diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml
index 89b0620085d3..5c5ec32d9402 100644
--- a/.github/workflows/ci-testing-deploy.yml
+++ b/.github/workflows/ci-testing-deploy.yml
@@ -1288,7 +1288,7 @@ jobs:
uses: docker/setup-buildx-action@v3
with:
driver: docker-container
- - uses: actions/setup-node@v4.0.3
+ - uses: actions/setup-node@v4.0.4
with:
node-version: ${{ matrix.node }}
cache: "npm"
@@ -2359,7 +2359,7 @@ jobs:
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python }}
- - uses: actions/setup-node@v4.0.3
+ - uses: actions/setup-node@v4.0.4
with:
node-version: ${{ matrix.node }}
cache: "npm"
diff --git a/.pylintrc b/.pylintrc
index 9454261215a1..9f0e88f06ef7 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -466,6 +466,8 @@ max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
+# Minimum number of public methods for a class (see R0903).
+max-positional-arguments=12
[EXCEPTIONS]
diff --git a/api/tests/conftest.py b/api/tests/conftest.py
index 1f0319393b78..8be5481a3c58 100644
--- a/api/tests/conftest.py
+++ b/api/tests/conftest.py
@@ -1,5 +1,6 @@
-# pylint: disable=unused-argument
# pylint: disable=redefined-outer-name
+# pylint: disable=too-many-positional-arguments
+# pylint: disable=unused-argument
# pylint: disable=unused-variable
import logging
diff --git a/api/tests/requirements.txt b/api/tests/requirements.txt
index ec2da14450da..04cf811ff2af 100644
--- a/api/tests/requirements.txt
+++ b/api/tests/requirements.txt
@@ -11,7 +11,7 @@ attrs==24.2.0
# aiohttp
# jsonschema
# referencing
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c ../../requirements/constraints.txt
# requests
@@ -25,7 +25,7 @@ frozenlist==1.4.1
# via
# aiohttp
# aiosignal
-idna==3.7
+idna==3.10
# via
# requests
# yarl
@@ -50,13 +50,13 @@ lazy-object-proxy==1.10.0
# via openapi-spec-validator
markupsafe==2.1.5
# via werkzeug
-more-itertools==10.4.0
+more-itertools==10.5.0
# via openapi-core
-multidict==6.0.5
+multidict==6.1.0
# via
# aiohttp
# yarl
-openapi-core==0.19.3
+openapi-core==0.19.4
# via -r requirements.in
openapi-schema-validator==0.6.2
# via
@@ -74,7 +74,7 @@ pathable==0.4.3
# via jsonschema-path
pluggy==1.5.0
# via pytest
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements.in
# pytest-asyncio
@@ -114,11 +114,11 @@ six==1.16.0
# rfc3339-validator
termcolor==2.4.0
# via pytest-sugar
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c ../../requirements/constraints.txt
# requests
-werkzeug==3.0.3
+werkzeug==3.0.4
# via openapi-core
-yarl==1.9.4
+yarl==1.12.1
# via aiohttp
diff --git a/package.json b/package.json
index 8533c589643a..c50867df4cee 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,7 @@
{
"scripts": {
- "linter": "npx eslint ./services/*/client/source/class/*/"
+ "linter": "npx eslint ./services/*/client/source/class/*/",
+ "linter-fix": "npx eslint ./services/*/client/source/class/*/ --fix"
},
"devDependencies": {
"babel-eslint": "^10.1.0",
diff --git a/packages/aws-library/requirements/_base.txt b/packages/aws-library/requirements/_base.txt
index 1c4e64828a39..79052f3c4d91 100644
--- a/packages/aws-library/requirements/_base.txt
+++ b/packages/aws-library/requirements/_base.txt
@@ -10,7 +10,7 @@ aiocache==0.12.2
# -r requirements/_base.in
aiodebug==2.3.0
# via -r requirements/../../../packages/service-library/requirements/_base.in
-aiodocker==0.22.2
+aiodocker==0.23.0
# via -r requirements/../../../packages/service-library/requirements/_base.in
aiofiles==24.1.0
# via
@@ -28,13 +28,13 @@ aiohttp==3.10.5
# -c requirements/../../../requirements/constraints.txt
# aiobotocore
# aiodocker
-aioitertools==0.11.0
+aioitertools==0.12.0
# via aiobotocore
-aiormq==6.8.0
+aiormq==6.8.1
# via aio-pika
aiosignal==1.3.1
# via aiohttp
-anyio==4.4.0
+anyio==4.6.0
# via
# fast-depends
# faststream
@@ -56,9 +56,9 @@ botocore==1.34.131
# aiobotocore
# boto3
# s3transfer
-botocore-stubs==1.35.2
+botocore-stubs==1.35.25
# via types-aiobotocore
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
@@ -81,9 +81,9 @@ dnspython==2.6.1
# via email-validator
email-validator==2.2.0
# via pydantic
-fast-depends==2.4.8
+fast-depends==2.4.11
# via faststream
-faststream==0.5.18
+faststream==0.5.23
# via -r requirements/../../../packages/service-library/requirements/_base.in
frozenlist==1.4.1
# via
@@ -93,15 +93,15 @@ googleapis-common-protos==1.65.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
-grpcio==1.66.0
+grpcio==1.66.1
# via opentelemetry-exporter-otlp-proto-grpc
-idna==3.7
+idna==3.10
# via
# anyio
# email-validator
# requests
# yarl
-importlib-metadata==8.0.0
+importlib-metadata==8.4.0
# via opentelemetry-api
jmespath==1.0.1
# via
@@ -117,11 +117,11 @@ markdown-it-py==3.0.0
# via rich
mdurl==0.1.2
# via markdown-it-py
-multidict==6.0.5
+multidict==6.1.0
# via
# aiohttp
# yarl
-opentelemetry-api==1.26.0
+opentelemetry-api==1.27.0
# via
# -r requirements/../../../packages/service-library/requirements/_base.in
# opentelemetry-exporter-otlp-proto-grpc
@@ -130,35 +130,35 @@ opentelemetry-api==1.26.0
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
# opentelemetry-semantic-conventions
-opentelemetry-exporter-otlp==1.26.0
+opentelemetry-exporter-otlp==1.27.0
# via -r requirements/../../../packages/service-library/requirements/_base.in
-opentelemetry-exporter-otlp-proto-common==1.26.0
+opentelemetry-exporter-otlp-proto-common==1.27.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
-opentelemetry-exporter-otlp-proto-grpc==1.26.0
+opentelemetry-exporter-otlp-proto-grpc==1.27.0
# via opentelemetry-exporter-otlp
-opentelemetry-exporter-otlp-proto-http==1.26.0
+opentelemetry-exporter-otlp-proto-http==1.27.0
# via opentelemetry-exporter-otlp
-opentelemetry-instrumentation==0.47b0
+opentelemetry-instrumentation==0.48b0
# via opentelemetry-instrumentation-requests
-opentelemetry-instrumentation-requests==0.47b0
+opentelemetry-instrumentation-requests==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_base.in
-opentelemetry-proto==1.26.0
+opentelemetry-proto==1.27.0
# via
# opentelemetry-exporter-otlp-proto-common
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
-opentelemetry-sdk==1.26.0
+opentelemetry-sdk==1.27.0
# via
# -r requirements/../../../packages/service-library/requirements/_base.in
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
-opentelemetry-semantic-conventions==0.47b0
+opentelemetry-semantic-conventions==0.48b0
# via
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
-opentelemetry-util-http==0.47b0
+opentelemetry-util-http==0.48b0
# via opentelemetry-instrumentation-requests
orjson==3.10.7
# via
@@ -172,13 +172,13 @@ orjson==3.10.7
# -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in
pamqp==3.3.0
# via aiormq
-protobuf==4.25.4
+protobuf==4.25.5
# via
# googleapis-common-protos
# opentelemetry-proto
psutil==6.0.0
# via -r requirements/../../../packages/service-library/requirements/_base.in
-pydantic==1.10.17
+pydantic==1.10.18
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
@@ -195,7 +195,7 @@ pydantic==1.10.17
# fast-depends
pygments==2.18.0
# via rich
-pyinstrument==4.7.2
+pyinstrument==4.7.3
# via -r requirements/../../../packages/service-library/requirements/_base.in
python-dateutil==2.9.0.post0
# via
@@ -226,7 +226,7 @@ referencing==0.29.3
# jsonschema-specifications
requests==2.32.3
# via opentelemetry-exporter-otlp-proto-http
-rich==13.7.1
+rich==13.8.1
# via
# -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in
# -r requirements/../../../packages/settings-library/requirements/_base.in
@@ -237,7 +237,7 @@ rpds-py==0.20.0
# referencing
s3transfer==0.10.2
# via boto3
-setuptools==74.0.0
+setuptools==75.1.0
# via opentelemetry-instrumentation
sh==2.0.7
# via -r requirements/_base.in
@@ -253,22 +253,21 @@ toolz==0.12.1
# via -r requirements/../../../packages/service-library/requirements/_base.in
tqdm==4.66.5
# via -r requirements/../../../packages/service-library/requirements/_base.in
-typer==0.12.4
+typer==0.12.5
# via
# -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in
# -r requirements/../../../packages/settings-library/requirements/_base.in
- # faststream
-types-aiobotocore==2.13.2
+types-aiobotocore==2.15.1
# via -r requirements/_base.in
-types-aiobotocore-ec2==2.13.2
+types-aiobotocore-ec2==2.15.1
# via types-aiobotocore
-types-aiobotocore-s3==2.13.2
+types-aiobotocore-s3==2.15.1
# via types-aiobotocore
-types-aiobotocore-ssm==2.13.2
+types-aiobotocore-ssm==2.15.1
# via types-aiobotocore
-types-awscrt==0.21.2
+types-awscrt==0.21.5
# via botocore-stubs
-types-python-dateutil==2.9.0.20240821
+types-python-dateutil==2.9.0.20240906
# via arrow
typing-extensions==4.12.2
# via
@@ -281,7 +280,7 @@ typing-extensions==4.12.2
# types-aiobotocore-ec2
# types-aiobotocore-s3
# types-aiobotocore-ssm
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
@@ -296,10 +295,10 @@ wrapt==1.16.0
# aiobotocore
# deprecated
# opentelemetry-instrumentation
-yarl==1.9.4
+yarl==1.12.1
# via
# aio-pika
# aiohttp
# aiormq
-zipp==3.20.1
+zipp==3.20.2
# via importlib-metadata
diff --git a/packages/aws-library/requirements/_test.txt b/packages/aws-library/requirements/_test.txt
index 53608f2e480b..68df09cd6f43 100644
--- a/packages/aws-library/requirements/_test.txt
+++ b/packages/aws-library/requirements/_test.txt
@@ -7,7 +7,7 @@ attrs==24.2.0
# -c requirements/_base.txt
# jsonschema
# referencing
-aws-sam-translator==1.89.0
+aws-sam-translator==1.91.0
# via cfn-lint
aws-xray-sdk==2.14.0
# via moto
@@ -18,7 +18,7 @@ boto3==1.34.131
# -c requirements/_base.txt
# aws-sam-translator
# moto
-boto3-stubs==1.35.2
+boto3-stubs==1.35.25
# via types-boto3
botocore==1.34.131
# via
@@ -27,20 +27,20 @@ botocore==1.34.131
# boto3
# moto
# s3transfer
-botocore-stubs==1.35.2
+botocore-stubs==1.35.25
# via
# -c requirements/_base.txt
# boto3-stubs
# types-aioboto3
# types-aiobotocore
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# requests
-cffi==1.17.0
+cffi==1.17.1
# via cryptography
-cfn-lint==1.10.3
+cfn-lint==1.15.0
# via moto
charset-normalizer==3.3.2
# via
@@ -54,30 +54,30 @@ coverage==7.6.1
# via
# -r requirements/_test.in
# pytest-cov
-cryptography==43.0.0
+cryptography==43.0.1
# via
# -c requirements/../../../requirements/constraints.txt
# joserfc
# moto
docker==7.1.0
# via moto
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
flask==3.0.3
# via
# flask-cors
# moto
-flask-cors==4.0.1
+flask-cors==5.0.0
# via moto
flexcache==0.3
# via pint
flexparser==0.3.1
# via pint
-graphql-core==3.2.3
+graphql-core==3.2.4
# via moto
icdiff==2.0.7
# via pytest-icdiff
-idna==3.7
+idna==3.10
# via
# -c requirements/_base.txt
# requests
@@ -97,7 +97,7 @@ jmespath==1.0.1
# botocore
joserfc==1.0.0
# via moto
-jsondiff==2.2.0
+jsondiff==2.2.1
# via moto
jsonpatch==1.33
# via cfn-lint
@@ -124,7 +124,7 @@ markupsafe==2.1.5
# via
# jinja2
# werkzeug
-moto==5.0.13
+moto==5.0.15
# via -r requirements/_test.in
mpmath==1.3.0
# via sympy
@@ -150,18 +150,18 @@ pprintpp==0.4.0
# via pytest-icdiff
py-cpuinfo==9.0.0
# via pytest-benchmark
-py-partiql-parser==0.5.5
+py-partiql-parser==0.5.6
# via moto
pycparser==2.22
# via cffi
-pydantic==1.10.17
+pydantic==1.10.18
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# aws-sam-translator
-pyparsing==3.1.2
+pyparsing==3.1.4
# via moto
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -213,7 +213,7 @@ referencing==0.29.3
# jsonschema
# jsonschema-path
# jsonschema-specifications
-regex==2024.7.24
+regex==2024.9.11
# via cfn-lint
requests==2.32.3
# via
@@ -235,7 +235,7 @@ s3transfer==0.10.2
# via
# -c requirements/_base.txt
# boto3
-setuptools==74.0.0
+setuptools==75.1.0
# via
# -c requirements/_base.txt
# moto
@@ -244,23 +244,23 @@ six==1.16.0
# -c requirements/_base.txt
# python-dateutil
# rfc3339-validator
-sympy==1.13.2
+sympy==1.13.3
# via cfn-lint
termcolor==2.4.0
# via pytest-sugar
types-aioboto3==13.1.1
# via -r requirements/_test.in
-types-aiobotocore==2.13.2
+types-aiobotocore==2.15.1
# via
# -c requirements/_base.txt
# types-aioboto3
-types-awscrt==0.21.2
+types-awscrt==0.21.5
# via
# -c requirements/_base.txt
# botocore-stubs
types-boto3==1.0.2
# via -r requirements/_test.in
-types-s3transfer==0.10.1
+types-s3transfer==0.10.2
# via
# boto3-stubs
# types-aioboto3
@@ -276,7 +276,7 @@ typing-extensions==4.12.2
# pydantic
# types-aioboto3
# types-aiobotocore
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
@@ -284,7 +284,7 @@ urllib3==2.2.2
# docker
# requests
# responses
-werkzeug==3.0.3
+werkzeug==3.0.4
# via
# flask
# moto
diff --git a/packages/aws-library/requirements/_tools.txt b/packages/aws-library/requirements/_tools.txt
index 36623b0c7093..861338d5b7f7 100644
--- a/packages/aws-library/requirements/_tools.txt
+++ b/packages/aws-library/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -18,9 +18,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -28,7 +28,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via -r requirements/../../../requirements/devenv.txt
mypy-extensions==1.0.0
# via
@@ -47,14 +47,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -66,9 +66,9 @@ pyyaml==6.0.2
# -c requirements/_base.txt
# -c requirements/_test.txt
# pre-commit
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
-setuptools==74.0.0
+setuptools==75.1.0
# via
# -c requirements/_base.txt
# -c requirements/_test.txt
@@ -80,7 +80,7 @@ typing-extensions==4.12.2
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
wheel==0.44.0
# via pip-tools
diff --git a/packages/aws-library/tests/test_ec2_client.py b/packages/aws-library/tests/test_ec2_client.py
index 2b1d8fca3768..625555e9f5d1 100644
--- a/packages/aws-library/tests/test_ec2_client.py
+++ b/packages/aws-library/tests/test_ec2_client.py
@@ -130,7 +130,7 @@ async def test_get_ec2_instance_capabilities_empty_list_returns_all_options(
instance_types = await simcore_ec2_api.get_ec2_instance_capabilities(set())
assert instance_types
# NOTE: this might need adaptation when moto is updated
- assert 700 < len(instance_types) < 807
+ assert 700 < len(instance_types) < 828
async def test_get_ec2_instance_capabilities_with_invalid_type_raises(
diff --git a/packages/dask-task-models-library/requirements/_base.txt b/packages/dask-task-models-library/requirements/_base.txt
index 327a04c56789..ff32942482a6 100644
--- a/packages/dask-task-models-library/requirements/_base.txt
+++ b/packages/dask-task-models-library/requirements/_base.txt
@@ -13,21 +13,21 @@ cloudpickle==3.0.0
# via
# dask
# distributed
-dask==2024.8.1
+dask==2024.9.0
# via
# -r requirements/_base.in
# distributed
-distributed==2024.8.1
+distributed==2024.9.0
# via dask
dnspython==2.6.1
# via email-validator
email-validator==2.2.0
# via pydantic
-fsspec==2024.6.1
+fsspec==2024.9.0
# via dask
-idna==3.7
+idna==3.10
# via email-validator
-importlib-metadata==8.4.0
+importlib-metadata==8.5.0
# via dask
jinja2==3.1.4
# via
@@ -49,7 +49,7 @@ markupsafe==2.1.5
# via jinja2
mdurl==0.1.2
# via markdown-it-py
-msgpack==1.0.8
+msgpack==1.1.0
# via distributed
orjson==3.10.7
# via
@@ -65,7 +65,7 @@ partd==1.4.2
# via dask
psutil==6.0.0
# via distributed
-pydantic==1.10.17
+pydantic==1.10.18
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
@@ -88,7 +88,7 @@ referencing==0.35.1
# via
# jsonschema
# jsonschema-specifications
-rich==13.7.1
+rich==13.8.1
# via
# -r requirements/../../../packages/settings-library/requirements/_base.in
# typer
@@ -111,15 +111,15 @@ toolz==0.12.1
# partd
tornado==6.4.1
# via distributed
-typer==0.12.4
+typer==0.12.5
# via -r requirements/../../../packages/settings-library/requirements/_base.in
-types-python-dateutil==2.9.0.20240821
+types-python-dateutil==2.9.0.20240906
# via arrow
typing-extensions==4.12.2
# via
# pydantic
# typer
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
@@ -127,5 +127,5 @@ urllib3==2.2.2
# distributed
zict==3.0.0
# via distributed
-zipp==3.20.0
+zipp==3.20.2
# via importlib-metadata
diff --git a/packages/dask-task-models-library/requirements/_test.txt b/packages/dask-task-models-library/requirements/_test.txt
index 521d13265d9b..b05932129396 100644
--- a/packages/dask-task-models-library/requirements/_test.txt
+++ b/packages/dask-task-models-library/requirements/_test.txt
@@ -4,7 +4,7 @@ coverage==7.6.1
# via
# -r requirements/_test.in
# pytest-cov
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
flexcache==0.3
# via pint
@@ -25,7 +25,7 @@ pluggy==1.5.0
# via pytest
pprintpp==0.4.0
# via pytest-icdiff
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
diff --git a/packages/dask-task-models-library/requirements/_tools.txt b/packages/dask-task-models-library/requirements/_tools.txt
index b9ee0a3c96da..779105b3894d 100644
--- a/packages/dask-task-models-library/requirements/_tools.txt
+++ b/packages/dask-task-models-library/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -17,9 +17,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -27,7 +27,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via -r requirements/../../../requirements/devenv.txt
mypy-extensions==1.0.0
# via
@@ -47,14 +47,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -66,9 +66,9 @@ pyyaml==6.0.2
# -c requirements/_base.txt
# -c requirements/_test.txt
# pre-commit
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
-setuptools==73.0.1
+setuptools==75.1.0
# via pip-tools
tomlkit==0.13.2
# via pylint
@@ -77,7 +77,7 @@ typing-extensions==4.12.2
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
wheel==0.44.0
# via pip-tools
diff --git a/packages/models-library/requirements/_base.txt b/packages/models-library/requirements/_base.txt
index f900e7715f00..d21b94e9cb78 100644
--- a/packages/models-library/requirements/_base.txt
+++ b/packages/models-library/requirements/_base.txt
@@ -8,7 +8,7 @@ dnspython==2.6.1
# via email-validator
email-validator==2.2.0
# via pydantic
-idna==3.7
+idna==3.10
# via email-validator
jsonschema==4.23.0
# via -r requirements/_base.in
@@ -18,7 +18,7 @@ orjson==3.10.7
# via
# -c requirements/../../../requirements/constraints.txt
# -r requirements/_base.in
-pydantic==1.10.17
+pydantic==1.10.18
# via
# -c requirements/../../../requirements/constraints.txt
# -r requirements/_base.in
@@ -34,7 +34,7 @@ rpds-py==0.20.0
# referencing
six==1.16.0
# via python-dateutil
-types-python-dateutil==2.9.0.20240821
+types-python-dateutil==2.9.0.20240906
# via arrow
typing-extensions==4.12.2
# via pydantic
diff --git a/packages/models-library/requirements/_test.txt b/packages/models-library/requirements/_test.txt
index 1cdf223883d7..b0e97313b9f2 100644
--- a/packages/models-library/requirements/_test.txt
+++ b/packages/models-library/requirements/_test.txt
@@ -8,7 +8,7 @@ coverage==7.6.1
# via
# -r requirements/_test.in
# pytest-cov
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
flexcache==0.3
# via pint
@@ -16,13 +16,13 @@ flexparser==0.3.1
# via pint
icdiff==2.0.7
# via pytest-icdiff
-idna==3.7
+idna==3.10
# via
# -c requirements/_base.txt
# yarl
iniconfig==2.0.0
# via pytest
-multidict==6.0.5
+multidict==6.1.0
# via yarl
packaging==24.1
# via
@@ -36,7 +36,7 @@ pprintpp==0.4.0
# via pytest-icdiff
psutil==6.0.0
# via -r requirements/_test.in
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -87,7 +87,7 @@ termcolor==2.4.0
# via pytest-sugar
types-jsonschema==4.23.0.20240813
# via -r requirements/_test.in
-types-pyyaml==6.0.12.20240808
+types-pyyaml==6.0.12.20240917
# via -r requirements/_test.in
typing-extensions==4.12.2
# via
@@ -95,5 +95,5 @@ typing-extensions==4.12.2
# flexcache
# flexparser
# pint
-yarl==1.9.4
+yarl==1.12.1
# via -r requirements/_test.in
diff --git a/packages/models-library/requirements/_tools.txt b/packages/models-library/requirements/_tools.txt
index 0efdb1139714..c6baceac3546 100644
--- a/packages/models-library/requirements/_tools.txt
+++ b/packages/models-library/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -17,9 +17,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -31,7 +31,7 @@ mccabe==0.7.0
# via pylint
mdurl==0.1.2
# via markdown-it-py
-mypy==1.11.1
+mypy==1.11.2
# via -r requirements/../../../requirements/devenv.txt
mypy-extensions==1.0.0
# via
@@ -50,7 +50,7 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
@@ -59,7 +59,7 @@ pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
pygments==2.18.0
# via rich
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -70,17 +70,17 @@ pyyaml==6.0.2
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_test.txt
# pre-commit
-rich==13.7.1
+rich==13.8.1
# via typer
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
-setuptools==73.0.1
+setuptools==75.1.0
# via pip-tools
shellingham==1.5.4
# via typer
tomlkit==0.13.2
# via pylint
-typer==0.12.4
+typer==0.12.5
# via -r requirements/_tools.in
typing-extensions==4.12.2
# via
@@ -88,7 +88,7 @@ typing-extensions==4.12.2
# -c requirements/_test.txt
# mypy
# typer
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
wheel==0.44.0
# via pip-tools
diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/socketio.py b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/socketio.py
new file mode 100644
index 000000000000..89a493a56cce
--- /dev/null
+++ b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/socketio.py
@@ -0,0 +1,3 @@
+from typing import Final
+
+SOCKET_IO_SERVICE_STATUS_EVENT: Final[str] = "serviceStatus"
diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/ports.py b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/ports.py
new file mode 100644
index 000000000000..5863b53b2bc6
--- /dev/null
+++ b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/ports.py
@@ -0,0 +1,35 @@
+from enum import auto
+
+from models_library.projects import ProjectID
+from models_library.projects_nodes_io import NodeID
+from models_library.services_types import ServicePortKey
+from models_library.utils.enums import StrAutoEnum
+from pydantic import BaseModel
+
+
+class OutputStatus(StrAutoEnum):
+ UPLOAD_STARTED = auto()
+ UPLOAD_WAS_ABORTED = auto()
+ UPLOAD_FINISHED_SUCCESSFULLY = auto()
+ UPLOAD_FINISHED_WITH_ERRROR = auto()
+
+
+class InputStatus(StrAutoEnum):
+ DOWNLOAD_STARTED = auto()
+ DOWNLOAD_WAS_ABORTED = auto()
+ DOWNLOAD_FINISHED_SUCCESSFULLY = auto()
+ DOWNLOAD_FINISHED_WITH_ERRROR = auto()
+
+
+class _PortStatusCommon(BaseModel):
+ project_id: ProjectID
+ node_id: NodeID
+ port_key: ServicePortKey
+
+
+class OutputPortStatus(_PortStatusCommon):
+ status: OutputStatus
+
+
+class InputPortSatus(_PortStatusCommon):
+ status: InputStatus
diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/socketio.py b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/socketio.py
index 054b0834bc4d..93e34a1682bb 100644
--- a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/socketio.py
+++ b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/socketio.py
@@ -1,3 +1,5 @@
from typing import Final
SOCKET_IO_SERVICE_DISK_USAGE_EVENT: Final[str] = "serviceDiskUsage"
+SOCKET_IO_STATE_OUTPUT_PORTS_EVENT: Final[str] = "stateOutputPorts"
+SOCKET_IO_STATE_INPUT_PORTS_EVENT: Final[str] = "stateInputPorts"
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py
index 25a6f5fb0dd8..0c2bdd07c7fd 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py
@@ -93,19 +93,36 @@ class NodeGet(OutputSchema):
class Config:
schema_extra: ClassVar[dict[str, Any]] = {
- "example": {
- "published_port": 30000,
- "entrypoint": "/the/entry/point/is/here",
- "service_uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6",
- "service_key": "simcore/services/comp/itis/sleeper",
- "service_version": "1.2.3",
- "service_host": "jupyter_E1O2E-LAH",
- "service_port": 8081,
- "service_basepath": "/x/E1O2E-LAH",
- "service_state": "pending",
- "service_message": "no suitable node (insufficient resources on 1 node)",
- "user_id": 123,
- }
+ "examples": [
+ # computational
+ {
+ "published_port": 30000,
+ "entrypoint": "/the/entry/point/is/here",
+ "service_uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6",
+ "service_key": "simcore/services/comp/itis/sleeper",
+ "service_version": "1.2.3",
+ "service_host": "jupyter_E1O2E-LAH",
+ "service_port": 8081,
+ "service_basepath": "/x/E1O2E-LAH",
+ "service_state": "pending",
+ "service_message": "no suitable node (insufficient resources on 1 node)",
+ "user_id": 123,
+ },
+ # dynamic
+ {
+ "published_port": 30000,
+ "entrypoint": "/the/entry/point/is/here",
+ "service_uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6",
+ "service_key": "simcore/services/dynamic/some-dynamic-service",
+ "service_version": "1.2.3",
+ "service_host": "jupyter_E1O2E-LAH",
+ "service_port": 8081,
+ "service_basepath": "/x/E1O2E-LAH",
+ "service_state": "pending",
+ "service_message": "no suitable node (insufficient resources on 1 node)",
+ "user_id": 123,
+ },
+ ]
}
diff --git a/packages/models-library/src/models_library/services_enums.py b/packages/models-library/src/models_library/services_enums.py
index 50a83313482e..ec5414218e3c 100644
--- a/packages/models-library/src/models_library/services_enums.py
+++ b/packages/models-library/src/models_library/services_enums.py
@@ -11,14 +11,18 @@ class ServiceBootType(str, Enum):
@functools.total_ordering
@unique
class ServiceState(Enum):
+ FAILED = "failed"
+
PENDING = "pending"
PULLING = "pulling"
STARTING = "starting"
RUNNING = "running"
- COMPLETE = "complete"
- FAILED = "failed"
+
STOPPING = "stopping"
+ COMPLETE = "complete"
+ IDLE = "idle"
+
def __lt__(self, other):
if self.__class__ is other.__class__:
comparison_order = ServiceState.comparison_order()
@@ -39,6 +43,7 @@ def comparison_order() -> dict["ServiceState", int]:
ServiceState.RUNNING: 4,
ServiceState.STOPPING: 5,
ServiceState.COMPLETE: 6,
+ ServiceState.IDLE: 7,
}
diff --git a/packages/models-library/tests/conftest.py b/packages/models-library/tests/conftest.py
index 9169e570b510..8bf433b901d7 100644
--- a/packages/models-library/tests/conftest.py
+++ b/packages/models-library/tests/conftest.py
@@ -9,6 +9,7 @@
import pytest
pytest_plugins = [
+ "pytest_simcore.faker_projects_data",
"pytest_simcore.pydantic_models",
"pytest_simcore.pytest_global_environs",
"pytest_simcore.repository_paths",
diff --git a/packages/models-library/tests/test_utils_nodes.py b/packages/models-library/tests/test_utils_nodes.py
index 47465ce236d3..b4634770a97e 100644
--- a/packages/models-library/tests/test_utils_nodes.py
+++ b/packages/models-library/tests/test_utils_nodes.py
@@ -16,12 +16,6 @@
from models_library.utils.nodes import compute_node_hash
from pydantic import AnyUrl, parse_obj_as
-
-@pytest.fixture()
-def node_id() -> NodeID:
- return uuid4()
-
-
ANOTHER_NODE_ID = uuid4()
ANOTHER_NODE_OUTPUT_KEY = "the_output_link"
ANOTHER_NODE_PAYLOAD = {"outputs": {ANOTHER_NODE_OUTPUT_KEY: 36}}
diff --git a/packages/notifications-library/requirements/_base.txt b/packages/notifications-library/requirements/_base.txt
index b7e4320e9e01..abc242615c5a 100644
--- a/packages/notifications-library/requirements/_base.txt
+++ b/packages/notifications-library/requirements/_base.txt
@@ -2,7 +2,7 @@ aiofiles==24.1.0
# via -r requirements/_base.in
aiosmtplib==3.0.2
# via -r requirements/_base.in
-alembic==1.13.2
+alembic==1.13.3
# via -r requirements/../../../packages/postgres-database/requirements/_base.in
arrow==1.3.0
# via -r requirements/../../../packages/models-library/requirements/_base.in
@@ -20,9 +20,9 @@ dnspython==2.6.1
# via email-validator
email-validator==2.2.0
# via pydantic
-greenlet==3.0.3
+greenlet==3.1.1
# via sqlalchemy
-idna==3.7
+idna==3.10
# via
# email-validator
# yarl
@@ -52,7 +52,7 @@ markupsafe==2.1.5
# mako
mdurl==0.1.2
# via markdown-it-py
-multidict==6.0.5
+multidict==6.1.0
# via yarl
orjson==3.10.7
# via
@@ -63,7 +63,7 @@ orjson==3.10.7
# -r requirements/../../../packages/models-library/requirements/_base.in
psycopg2-binary==2.9.9
# via sqlalchemy
-pydantic==1.10.17
+pydantic==1.10.18
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt
@@ -80,7 +80,7 @@ referencing==0.35.1
# via
# jsonschema
# jsonschema-specifications
-rich==13.7.1
+rich==13.8.1
# via
# -r requirements/../../../packages/settings-library/requirements/_base.in
# typer
@@ -92,7 +92,7 @@ shellingham==1.5.4
# via typer
six==1.16.0
# via python-dateutil
-sqlalchemy==1.4.53
+sqlalchemy==1.4.54
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt
@@ -100,14 +100,14 @@ sqlalchemy==1.4.53
# -c requirements/../../../requirements/constraints.txt
# -r requirements/../../../packages/postgres-database/requirements/_base.in
# alembic
-typer==0.12.4
+typer==0.12.5
# via -r requirements/../../../packages/settings-library/requirements/_base.in
-types-python-dateutil==2.9.0.20240821
+types-python-dateutil==2.9.0.20240906
# via arrow
typing-extensions==4.12.2
# via
# alembic
# pydantic
# typer
-yarl==1.9.4
+yarl==1.12.1
# via -r requirements/../../../packages/postgres-database/requirements/_base.in
diff --git a/packages/notifications-library/requirements/_test.txt b/packages/notifications-library/requirements/_test.txt
index 25211dd50f26..15f7a5075508 100644
--- a/packages/notifications-library/requirements/_test.txt
+++ b/packages/notifications-library/requirements/_test.txt
@@ -1,4 +1,4 @@
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c requirements/../../../requirements/constraints.txt
# requests
@@ -10,21 +10,21 @@ coverage==7.6.1
# pytest-cov
docker==7.1.0
# via -r requirements/_test.in
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
-greenlet==3.0.3
+greenlet==3.1.1
# via
# -c requirements/_base.txt
# sqlalchemy
icdiff==2.0.7
# via pytest-icdiff
-idna==3.7
+idna==3.10
# via
# -c requirements/_base.txt
# requests
iniconfig==2.0.0
# via pytest
-mypy==1.11.1
+mypy==1.11.2
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
@@ -36,7 +36,7 @@ pluggy==1.5.0
# via pytest
pprintpp==0.4.0
# via pytest-icdiff
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -77,7 +77,7 @@ six==1.16.0
# via
# -c requirements/_base.txt
# python-dateutil
-sqlalchemy==1.4.53
+sqlalchemy==1.4.54
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
@@ -95,7 +95,7 @@ typing-extensions==4.12.2
# -c requirements/_base.txt
# mypy
# sqlalchemy2-stubs
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../requirements/constraints.txt
# docker
diff --git a/packages/notifications-library/requirements/_tools.txt b/packages/notifications-library/requirements/_tools.txt
index 8204f34a33c9..fa8bee59633c 100644
--- a/packages/notifications-library/requirements/_tools.txt
+++ b/packages/notifications-library/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -17,9 +17,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -27,7 +27,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via
# -c requirements/_test.txt
# -r requirements/../../../requirements/devenv.txt
@@ -49,14 +49,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -67,9 +67,9 @@ pyyaml==6.0.2
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_test.txt
# pre-commit
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
-setuptools==73.0.1
+setuptools==75.1.0
# via pip-tools
tomlkit==0.13.2
# via pylint
@@ -78,7 +78,7 @@ typing-extensions==4.12.2
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
wheel==0.44.0
# via pip-tools
diff --git a/packages/postgres-database/requirements/_base.txt b/packages/postgres-database/requirements/_base.txt
index aaf19732f53f..bded83bb4f90 100644
--- a/packages/postgres-database/requirements/_base.txt
+++ b/packages/postgres-database/requirements/_base.txt
@@ -1,12 +1,12 @@
-alembic==1.13.2
+alembic==1.13.3
# via -r requirements/_base.in
async-timeout==4.0.3
# via asyncpg
asyncpg==0.29.0
# via sqlalchemy
-greenlet==3.0.3
+greenlet==3.1.1
# via sqlalchemy
-idna==3.7
+idna==3.10
# via yarl
mako==1.3.5
# via
@@ -14,15 +14,15 @@ mako==1.3.5
# alembic
markupsafe==2.1.5
# via mako
-multidict==6.0.5
+multidict==6.1.0
# via yarl
psycopg2-binary==2.9.9
# via sqlalchemy
-pydantic==1.10.17
+pydantic==1.10.18
# via
# -c requirements/../../../requirements/constraints.txt
# -r requirements/_base.in
-sqlalchemy==1.4.53
+sqlalchemy==1.4.54
# via
# -c requirements/../../../requirements/constraints.txt
# -r requirements/_base.in
@@ -31,5 +31,5 @@ typing-extensions==4.12.2
# via
# alembic
# pydantic
-yarl==1.9.4
+yarl==1.12.1
# via -r requirements/_base.in
diff --git a/packages/postgres-database/requirements/_migration.txt b/packages/postgres-database/requirements/_migration.txt
index 914d0820310f..a0dd4d6577f2 100644
--- a/packages/postgres-database/requirements/_migration.txt
+++ b/packages/postgres-database/requirements/_migration.txt
@@ -1,8 +1,8 @@
-alembic==1.13.2
+alembic==1.13.3
# via
# -c requirements/_base.txt
# -r requirements/_migration.in
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c requirements/../../../requirements/constraints.txt
# requests
@@ -12,11 +12,11 @@ click==8.1.7
# via -r requirements/_migration.in
docker==7.1.0
# via -r requirements/_migration.in
-greenlet==3.0.3
+greenlet==3.1.1
# via
# -c requirements/_base.txt
# sqlalchemy
-idna==3.7
+idna==3.10
# via
# -c requirements/_base.txt
# requests
@@ -31,7 +31,7 @@ markupsafe==2.1.5
# mako
requests==2.32.3
# via docker
-sqlalchemy==1.4.53
+sqlalchemy==1.4.54
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
@@ -42,7 +42,7 @@ typing-extensions==4.12.2
# via
# -c requirements/_base.txt
# alembic
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../requirements/constraints.txt
# -r requirements/_migration.in
diff --git a/packages/postgres-database/requirements/_test.txt b/packages/postgres-database/requirements/_test.txt
index 8bd80b78b95b..91705ca63c53 100644
--- a/packages/postgres-database/requirements/_test.txt
+++ b/packages/postgres-database/requirements/_test.txt
@@ -10,16 +10,16 @@ coverage==7.6.1
# via
# -r requirements/_test.in
# pytest-cov
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
-greenlet==3.0.3
+greenlet==3.1.1
# via
# -c requirements/_base.txt
# -c requirements/_migration.txt
# sqlalchemy
iniconfig==2.0.0
# via pytest
-mypy==1.11.1
+mypy==1.11.2
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
@@ -32,7 +32,7 @@ psycopg2-binary==2.9.9
# -c requirements/_base.txt
# aiopg
# sqlalchemy
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -59,7 +59,7 @@ pyyaml==6.0.2
# -r requirements/_test.in
six==1.16.0
# via python-dateutil
-sqlalchemy==1.4.53
+sqlalchemy==1.4.54
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
@@ -68,11 +68,11 @@ sqlalchemy==1.4.53
# aiopg
sqlalchemy2-stubs==0.0.2a38
# via sqlalchemy
-types-docker==7.1.0.20240821
+types-docker==7.1.0.20240827
# via -r requirements/_test.in
types-psycopg2==2.9.21.20240819
# via -r requirements/_test.in
-types-requests==2.32.0.20240712
+types-requests==2.32.0.20240914
# via types-docker
typing-extensions==4.12.2
# via
@@ -80,7 +80,7 @@ typing-extensions==4.12.2
# -c requirements/_migration.txt
# mypy
# sqlalchemy2-stubs
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_migration.txt
diff --git a/packages/postgres-database/requirements/_tools.txt b/packages/postgres-database/requirements/_tools.txt
index 9247bc4b1a97..6d01f81c8e17 100644
--- a/packages/postgres-database/requirements/_tools.txt
+++ b/packages/postgres-database/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -16,9 +16,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -26,7 +26,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via
# -c requirements/_test.txt
# -r requirements/../../../requirements/devenv.txt
@@ -48,14 +48,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -66,9 +66,9 @@ pyyaml==6.0.2
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_test.txt
# pre-commit
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
-setuptools==73.0.1
+setuptools==75.1.0
# via pip-tools
tomlkit==0.13.2
# via pylint
@@ -77,7 +77,7 @@ typing-extensions==4.12.2
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
wheel==0.44.0
# via pip-tools
diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/10729e07000d_improve_foreign_key_dependencies.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/10729e07000d_improve_foreign_key_dependencies.py
new file mode 100644
index 000000000000..16bfc82acd8c
--- /dev/null
+++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/10729e07000d_improve_foreign_key_dependencies.py
@@ -0,0 +1,110 @@
+"""improve foreign key dependencies
+
+Revision ID: 10729e07000d
+Revises: 47ca7335e146
+Create Date: 2024-09-24 07:52:20.253076+00:00
+
+"""
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "10729e07000d"
+down_revision = "47ca7335e146"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_foreign_key(
+ "fk_rut_pricing_plan_to_service_key_and_version",
+ "resource_tracker_pricing_plan_to_service",
+ "services_meta_data",
+ ["service_key", "service_version"],
+ ["key", "version"],
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ )
+ op.drop_index(
+ "ix_resource_tracker_pricing_plans_product_name",
+ table_name="resource_tracker_pricing_plans",
+ )
+ op.create_foreign_key(
+ "fk_rut_pricing_plans_product_name",
+ "resource_tracker_pricing_plans",
+ "products",
+ ["product_name"],
+ ["name"],
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ )
+ op.create_foreign_key(
+ "fk_resource_tracker_pricing_units_costs_pricing_plan_id",
+ "resource_tracker_pricing_unit_costs",
+ "resource_tracker_pricing_plans",
+ ["pricing_plan_id"],
+ ["pricing_plan_id"],
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ )
+ op.create_foreign_key(
+ "fk_resource_tracker_pricing_units_costs_pricing_unit_id",
+ "resource_tracker_pricing_unit_costs",
+ "resource_tracker_pricing_units",
+ ["pricing_unit_id"],
+ ["pricing_unit_id"],
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ )
+ op.create_foreign_key(
+ "fk_wallets_product_name",
+ "wallets",
+ "products",
+ ["product_name"],
+ ["name"],
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ )
+ op.create_foreign_key(
+ "fk_workspaces_product_name",
+ "workspaces",
+ "products",
+ ["product_name"],
+ ["name"],
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_constraint("fk_workspaces_product_name", "workspaces", type_="foreignkey")
+ op.drop_constraint("fk_wallets_product_name", "wallets", type_="foreignkey")
+ op.drop_constraint(
+ "fk_resource_tracker_pricing_units_costs_pricing_unit_id",
+ "resource_tracker_pricing_unit_costs",
+ type_="foreignkey",
+ )
+ op.drop_constraint(
+ "fk_resource_tracker_pricing_units_costs_pricing_plan_id",
+ "resource_tracker_pricing_unit_costs",
+ type_="foreignkey",
+ )
+ op.drop_constraint(
+ "fk_rut_pricing_plans_product_name",
+ "resource_tracker_pricing_plans",
+ type_="foreignkey",
+ )
+ op.create_index(
+ "ix_resource_tracker_pricing_plans_product_name",
+ "resource_tracker_pricing_plans",
+ ["product_name"],
+ unique=False,
+ )
+ op.drop_constraint(
+ "fk_rut_pricing_plan_to_service_key_and_version",
+ "resource_tracker_pricing_plan_to_service",
+ type_="foreignkey",
+ )
+ # ### end Alembic commands ###
diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/47ca7335e146_remove_old_folders.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/47ca7335e146_remove_old_folders.py
new file mode 100644
index 000000000000..63fb1a299231
--- /dev/null
+++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/47ca7335e146_remove_old_folders.py
@@ -0,0 +1,169 @@
+"""remove old folders
+
+Revision ID: 47ca7335e146
+Revises: 9f381dcb9b95
+Create Date: 2024-09-17 11:54:39.600025+00:00
+
+"""
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy.dialects import postgresql
+
+# revision identifiers, used by Alembic.
+revision = "47ca7335e146"
+down_revision = "9f381dcb9b95"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_table("folders_to_projects")
+ op.drop_table("folders_access_rights")
+ op.drop_table("folders")
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table(
+ "folders",
+ sa.Column(
+ "id",
+ sa.BIGINT(),
+ server_default=sa.text("nextval('folders_id_seq'::regclass)"),
+ autoincrement=True,
+ nullable=False,
+ ),
+ sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=False),
+ sa.Column(
+ "description",
+ sa.VARCHAR(),
+ server_default=sa.text("''::character varying"),
+ autoincrement=False,
+ nullable=False,
+ ),
+ sa.Column("created_by", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column(
+ "created",
+ postgresql.TIMESTAMP(timezone=True),
+ server_default=sa.text("now()"),
+ autoincrement=False,
+ nullable=False,
+ ),
+ sa.Column(
+ "modified",
+ postgresql.TIMESTAMP(timezone=True),
+ server_default=sa.text("now()"),
+ autoincrement=False,
+ nullable=False,
+ ),
+ sa.Column("product_name", sa.VARCHAR(), autoincrement=False, nullable=False),
+ sa.ForeignKeyConstraint(
+ ["created_by"],
+ ["groups.gid"],
+ name="fk_folders_to_groups_gid",
+ ondelete="SET NULL",
+ ),
+ sa.ForeignKeyConstraint(
+ ["product_name"],
+ ["products.name"],
+ name="fk_folders_to_products_name",
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ ),
+ sa.PrimaryKeyConstraint("id", name="folders_pkey"),
+ postgresql_ignore_search_path=False,
+ )
+ op.create_table(
+ "folders_access_rights",
+ sa.Column("folder_id", sa.BIGINT(), autoincrement=False, nullable=False),
+ sa.Column("gid", sa.BIGINT(), autoincrement=False, nullable=False),
+ sa.Column(
+ "traversal_parent_id", sa.BIGINT(), autoincrement=False, nullable=True
+ ),
+ sa.Column(
+ "original_parent_id", sa.BIGINT(), autoincrement=False, nullable=True
+ ),
+ sa.Column("read", sa.BOOLEAN(), autoincrement=False, nullable=False),
+ sa.Column("write", sa.BOOLEAN(), autoincrement=False, nullable=False),
+ sa.Column("delete", sa.BOOLEAN(), autoincrement=False, nullable=False),
+ sa.Column(
+ "created",
+ postgresql.TIMESTAMP(timezone=True),
+ server_default=sa.text("now()"),
+ autoincrement=False,
+ nullable=False,
+ ),
+ sa.Column(
+ "modified",
+ postgresql.TIMESTAMP(timezone=True),
+ server_default=sa.text("now()"),
+ autoincrement=False,
+ nullable=False,
+ ),
+ sa.ForeignKeyConstraint(
+ ["folder_id"],
+ ["folders.id"],
+ name="fk_folders_access_rights_to_folders_id",
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ ),
+ sa.ForeignKeyConstraint(
+ ["gid"],
+ ["groups.gid"],
+ name="fk_folders_access_rights_to_groups_gid",
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ ),
+ sa.ForeignKeyConstraint(
+ ["original_parent_id"],
+ ["folders.id"],
+ name="fk_folders_to_folders_id_via_original_parent_id",
+ ondelete="SET NULL",
+ ),
+ sa.ForeignKeyConstraint(
+ ["traversal_parent_id"],
+ ["folders.id"],
+ name="fk_folders_to_folders_id_via_traversal_parent_id",
+ ondelete="SET NULL",
+ ),
+ sa.PrimaryKeyConstraint("folder_id", "gid", name="folders_access_rights_pk"),
+ )
+ op.create_table(
+ "folders_to_projects",
+ sa.Column("folder_id", sa.BIGINT(), autoincrement=False, nullable=False),
+ sa.Column("project_uuid", sa.VARCHAR(), autoincrement=False, nullable=False),
+ sa.Column(
+ "created",
+ postgresql.TIMESTAMP(timezone=True),
+ server_default=sa.text("now()"),
+ autoincrement=False,
+ nullable=False,
+ ),
+ sa.Column(
+ "modified",
+ postgresql.TIMESTAMP(timezone=True),
+ server_default=sa.text("now()"),
+ autoincrement=False,
+ nullable=False,
+ ),
+ sa.ForeignKeyConstraint(
+ ["folder_id"],
+ ["folders.id"],
+ name="fk_folders_to_projects_to_folders_id",
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ ),
+ sa.ForeignKeyConstraint(
+ ["project_uuid"],
+ ["projects.uuid"],
+ name="fk_folders_to_projects_to_projects_uuid",
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ ),
+ sa.PrimaryKeyConstraint(
+ "folder_id", "project_uuid", name="projects_to_folder_pk"
+ ),
+ )
+ # ### end Alembic commands ###
diff --git a/packages/postgres-database/src/simcore_postgres_database/models/folders.py b/packages/postgres-database/src/simcore_postgres_database/models/folders.py
deleted file mode 100644
index 82f3ef1a6c4f..000000000000
--- a/packages/postgres-database/src/simcore_postgres_database/models/folders.py
+++ /dev/null
@@ -1,183 +0,0 @@
-import sqlalchemy as sa
-
-from ._common import (
- column_created_datetime,
- column_modified_datetime,
- register_modified_datetime_auto_update_trigger,
-)
-from .base import metadata
-
-folders = sa.Table(
- "folders",
- metadata,
- sa.Column(
- "id",
- sa.BigInteger,
- nullable=False,
- autoincrement=True,
- primary_key=True,
- doc="Primary key",
- ),
- sa.Column(
- "name",
- sa.String,
- nullable=False,
- doc="name of the folder",
- ),
- sa.Column(
- "description",
- sa.String,
- nullable=False,
- server_default="",
- doc="user provided description for the folder",
- ),
- sa.Column(
- "product_name",
- sa.String,
- sa.ForeignKey(
- "products.name",
- onupdate="CASCADE",
- ondelete="CASCADE",
- name="fk_folders_to_products_name",
- ),
- nullable=False,
- doc="product identifier",
- ),
- sa.Column(
- "created_by",
- sa.BigInteger,
- sa.ForeignKey(
- "groups.gid",
- name="fk_folders_to_groups_gid",
- ondelete="SET NULL",
- ),
- nullable=True,
- doc="traces who created the folder",
- ),
- column_created_datetime(timezone=True),
- column_modified_datetime(timezone=True),
-)
-
-
-register_modified_datetime_auto_update_trigger(folders)
-
-folders_access_rights = sa.Table(
- "folders_access_rights",
- metadata,
- sa.Column(
- "folder_id",
- sa.BigInteger,
- sa.ForeignKey(
- "folders.id",
- name="fk_folders_access_rights_to_folders_id",
- onupdate="CASCADE",
- ondelete="CASCADE",
- ),
- ),
- sa.Column(
- "gid",
- sa.BigInteger,
- sa.ForeignKey(
- "groups.gid",
- name="fk_folders_access_rights_to_groups_gid",
- onupdate="CASCADE",
- ondelete="CASCADE",
- ),
- ),
- sa.Column(
- "traversal_parent_id",
- sa.BigInteger,
- sa.ForeignKey(
- "folders.id",
- name="fk_folders_to_folders_id_via_traversal_parent_id",
- ondelete="SET NULL",
- ),
- doc=(
- "used for listing the contes of the folders, "
- "can be changed by the user by moving the folder"
- ),
- ),
- sa.Column(
- "original_parent_id",
- sa.BigInteger,
- sa.ForeignKey(
- "folders.id",
- name="fk_folders_to_folders_id_via_original_parent_id",
- ondelete="SET NULL",
- ),
- doc=(
- "initially equal the same as `traversal_parent_id`, "
- "keeps track of the original parent, "
- "can never be changed once insteted"
- ),
- ),
- sa.Column(
- "read",
- sa.Boolean(),
- nullable=False,
- doc=(
- "if True can: "
- "view folders inside current folder "
- "view projects inside current folder"
- ),
- ),
- sa.Column(
- "write",
- sa.Boolean(),
- nullable=False,
- doc=(
- "if True can: "
- "create folder inside current folder, "
- "add project to folder"
- ),
- ),
- sa.Column(
- "delete",
- sa.Boolean(),
- nullable=False,
- doc=(
- "if True can: "
- "share folder, "
- "rename folder, "
- "edit folder description, "
- "delete folder, "
- "delete project form folder"
- ),
- ),
- column_created_datetime(timezone=True),
- column_modified_datetime(timezone=True),
- sa.PrimaryKeyConstraint("folder_id", "gid", name="folders_access_rights_pk"),
-)
-
-register_modified_datetime_auto_update_trigger(folders_access_rights)
-
-
-folders_to_projects = sa.Table(
- "folders_to_projects",
- metadata,
- sa.Column(
- "folder_id",
- sa.BigInteger,
- sa.ForeignKey(
- "folders.id",
- name="fk_folders_to_projects_to_folders_id",
- onupdate="CASCADE",
- ondelete="CASCADE",
- ),
- ),
- sa.Column(
- "project_uuid",
- sa.String,
- sa.ForeignKey(
- "projects.uuid",
- name="fk_folders_to_projects_to_projects_uuid",
- onupdate="CASCADE",
- ondelete="CASCADE",
- ),
- ),
- column_created_datetime(timezone=True),
- column_modified_datetime(timezone=True),
- sa.PrimaryKeyConstraint("folder_id", "project_uuid", name="projects_to_folder_pk"),
-)
-
-register_modified_datetime_auto_update_trigger(folders_to_projects)
diff --git a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plan_to_service.py b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plan_to_service.py
index b0040d93ae66..820ec42fc506 100644
--- a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plan_to_service.py
+++ b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plan_to_service.py
@@ -45,4 +45,11 @@
doc="Option to mark default pricing plan for the service (ex. when there are more pricing plans for the same service)",
),
# ---------------------------
+ sa.ForeignKeyConstraint(
+ ["service_key", "service_version"],
+ ["services_meta_data.key", "services_meta_data.version"],
+ name="fk_rut_pricing_plan_to_service_key_and_version",
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ ),
)
diff --git a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plans.py b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plans.py
index 8ec50b0f206a..81d98ebcac10 100644
--- a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plans.py
+++ b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_plans.py
@@ -33,9 +33,14 @@ class PricingPlanClassification(str, enum.Enum):
sa.Column(
"product_name",
sa.String,
+ sa.ForeignKey(
+ "products.name",
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ name="fk_rut_pricing_plans_product_name",
+ ),
nullable=False,
- doc="Product name",
- index=True,
+ doc="Products unique name",
),
sa.Column(
"display_name",
diff --git a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_unit_costs.py b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_unit_costs.py
index defaf49eb4a5..460315323877 100644
--- a/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_unit_costs.py
+++ b/packages/postgres-database/src/simcore_postgres_database/models/resource_tracker_pricing_unit_costs.py
@@ -22,8 +22,14 @@
sa.Column(
"pricing_plan_id",
sa.BigInteger,
+ sa.ForeignKey(
+ "resource_tracker_pricing_plans.pricing_plan_id",
+ name="fk_resource_tracker_pricing_units_costs_pricing_plan_id",
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ ),
nullable=False,
- doc="Parent pricing plan",
+ doc="Foreign key to pricing plan",
index=True,
),
sa.Column(
@@ -35,8 +41,14 @@
sa.Column(
"pricing_unit_id",
sa.BigInteger,
+ sa.ForeignKey(
+ "resource_tracker_pricing_units.pricing_unit_id",
+ name="fk_resource_tracker_pricing_units_costs_pricing_unit_id",
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ ),
nullable=False,
- doc="Parent pricing unit",
+ doc="Foreign key to pricing unit",
index=True,
),
sa.Column(
diff --git a/packages/postgres-database/src/simcore_postgres_database/models/wallets.py b/packages/postgres-database/src/simcore_postgres_database/models/wallets.py
index e26545f1f4ac..3c7655299761 100644
--- a/packages/postgres-database/src/simcore_postgres_database/models/wallets.py
+++ b/packages/postgres-database/src/simcore_postgres_database/models/wallets.py
@@ -50,7 +50,18 @@ class WalletStatus(str, enum.Enum):
),
column_created_datetime(timezone=True),
column_modified_datetime(timezone=True),
- sa.Column("product_name", sa.String, nullable=False, doc="Product name"),
+ sa.Column(
+ "product_name",
+ sa.String,
+ sa.ForeignKey(
+ "products.name",
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ name="fk_wallets_product_name",
+ ),
+ nullable=False,
+ doc="Products unique name",
+ ),
)
# ------------------------ TRIGGERS
diff --git a/packages/postgres-database/src/simcore_postgres_database/models/workspaces.py b/packages/postgres-database/src/simcore_postgres_database/models/workspaces.py
index f4b76812a6c1..998c7676761b 100644
--- a/packages/postgres-database/src/simcore_postgres_database/models/workspaces.py
+++ b/packages/postgres-database/src/simcore_postgres_database/models/workspaces.py
@@ -34,7 +34,18 @@
nullable=False,
doc="Identifier of the group that owns this workspace (Should be just PRIMARY GROUP)",
),
- sa.Column("product_name", sa.String, nullable=False, doc="Product name"),
+ sa.Column(
+ "product_name",
+ sa.String,
+ sa.ForeignKey(
+ "products.name",
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ name="fk_workspaces_product_name",
+ ),
+ nullable=False,
+ doc="Products unique name",
+ ),
column_created_datetime(timezone=True),
column_modified_datetime(timezone=True),
)
diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py
deleted file mode 100644
index e0f59cdcfd23..000000000000
--- a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py
+++ /dev/null
@@ -1,1155 +0,0 @@
-import re
-import uuid
-from collections.abc import Iterable
-from dataclasses import dataclass
-from datetime import datetime
-from enum import Enum
-from functools import reduce
-from typing import Any, ClassVar, Final, TypeAlias, cast
-
-import sqlalchemy as sa
-from aiopg.sa.connection import SAConnection
-from aiopg.sa.result import RowProxy
-from pydantic import (
- BaseModel,
- ConstrainedStr,
- Field,
- NonNegativeInt,
- PositiveInt,
- ValidationError,
- parse_obj_as,
-)
-from pydantic.errors import PydanticErrorMixin
-from simcore_postgres_database.utils_ordering import OrderByDict
-from sqlalchemy import Column, func
-from sqlalchemy.dialects import postgresql
-from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER
-from sqlalchemy.sql.elements import ColumnElement, Label
-from sqlalchemy.sql.selectable import CTE
-
-from .models.folders import folders, folders_access_rights, folders_to_projects
-from .models.groups import GroupType, groups
-from .utils_ordering import OrderDirection
-
-_ProductName: TypeAlias = str
-_ProjectID: TypeAlias = uuid.UUID
-_GroupID: TypeAlias = PositiveInt
-_FolderID: TypeAlias = PositiveInt
-
-###
-### ERRORS
-###
-
-
-"""Errors hierarchy
-
-FoldersError
- * InvalidFolderNameError
- * FolderAccessError
- * FolderNotFoundError
- * FolderNotSharedWithGidError
- * InsufficientPermissionsError
- * NoAccessForGroupsFoundError
- * BaseCreateFolderError
- * FolderAlreadyExistsError
- * ParentFolderIsNotWritableError
- * CouldNotCreateFolderError
- * GroupIdDoesNotExistError
- * RootFolderRequiresAtLeastOnePrimaryGroupError
- * BaseMoveFolderError
- * CannotMoveFolderSharedViaNonPrimaryGroupError
- * BaseAddProjectError
- * ProjectAlreadyExistsInFolderError
-"""
-
-
-class FoldersError(PydanticErrorMixin, RuntimeError):
- pass
-
-
-class InvalidFolderNameError(FoldersError):
- msg_template = "Provided folder name='{name}' is invalid: {reason}"
-
-
-class FolderAccessError(FoldersError):
- pass
-
-
-class FolderNotFoundError(FolderAccessError):
- msg_template = "no entry found for folder_id={folder_id}, gids={gids} and product_name={product_name}"
-
-
-class FolderNotSharedWithGidError(FolderAccessError):
- msg_template = "folder_id={folder_id} was not shared with gids={gids}"
-
-
-class InsufficientPermissionsError(FolderAccessError):
- msg_template = "could not find a parent for folder_id={folder_id} and gids={gids}, with permissions={permissions}"
-
-
-class NoAccessForGroupsFoundError(FolderAccessError):
- msg_template = "No parent found for folder_id={folder_id} and gids={gids}, with permissions={permissions}"
-
-
-class BaseCreateFolderError(FoldersError):
- pass
-
-
-class FolderAlreadyExistsError(BaseCreateFolderError):
- msg_template = "A folder='{folder}' with parent='{parent}' in product_name={product_name} already exists"
-
-
-class ParentFolderIsNotWritableError(BaseCreateFolderError):
- msg_template = "Cannot create any sub-folders inside folder_id={parent_folder_id} since it is not writable for gid={gid}."
-
-
-class CouldNotCreateFolderError(BaseCreateFolderError):
- msg_template = "Could not create folder='{folder}' and parent='{parent}'"
-
-
-class NoGroupIDFoundError(BaseCreateFolderError):
- msg_template = "None of the provided gids='{gids}' was found"
-
-
-class RootFolderRequiresAtLeastOnePrimaryGroupError(BaseCreateFolderError):
- msg_template = (
- "No parent={parent} defined and groupIDs={gids} did not contain a PRIMARY group. "
- "Cannot create a folder isnide the 'root' wihtout using the user's group."
- )
-
-
-class BaseMoveFolderError(FoldersError):
- pass
-
-
-class CannotMoveFolderSharedViaNonPrimaryGroupError(BaseMoveFolderError):
- msg_template = (
- "deltected group_type={group_type} for gid={gid} which is not allowed"
- )
-
-
-class BaseAddProjectError(FoldersError):
- pass
-
-
-class ProjectAlreadyExistsInFolderError(BaseAddProjectError):
- msg_template = (
- "project_id={project_uuid} in folder_id={folder_id} is already present"
- )
-
-
-###
-### UTILS ACCESS LAYER
-###
-
-
-class FolderAccessRole(Enum):
- """Used by the frontend to indicate a role in a simple manner"""
-
- NO_ACCESS = 0
- VIEWER = 1
- EDITOR = 2
- OWNER = 3
-
-
-@dataclass(frozen=True)
-class _FolderPermissions:
- read: bool
- write: bool
- delete: bool
-
- def to_dict(self, *, include_only_true: bool = False) -> dict[str, bool]:
- data: dict[str, bool] = {
- "read": self.read,
- "write": self.write,
- "delete": self.delete,
- }
- if include_only_true:
- for key_to_remove in [k for k, v in data.items() if not v]:
- data.pop(key_to_remove)
-
- return data
-
-
-def _make_permissions(
- *, r: bool = False, w: bool = False, d: bool = False, description: str = ""
-) -> "_FolderPermissions":
- _ = description
- return _FolderPermissions(read=r, write=w, delete=d)
-
-
-def _only_true_permissions(permissions: _FolderPermissions) -> dict:
- return permissions.to_dict(include_only_true=True)
-
-
-def _or_reduce(x: _FolderPermissions, y: _FolderPermissions) -> _FolderPermissions:
- return _FolderPermissions(
- read=x.read or y.read, write=x.write or y.write, delete=x.delete or y.delete
- )
-
-
-def _or_dicts_list(dicts: Iterable[_FolderPermissions]) -> _FolderPermissions:
- if not dicts:
- return _make_permissions()
- return reduce(_or_reduce, dicts)
-
-
-class _BasePermissions:
- GET_FOLDER: ClassVar[_FolderPermissions] = _make_permissions(r=True)
- LIST_FOLDERS: ClassVar[_FolderPermissions] = _make_permissions(r=True)
-
- CREATE_FOLDER: ClassVar[_FolderPermissions] = _make_permissions(w=True)
- ADD_PROJECT_TO_FOLDER: ClassVar[_FolderPermissions] = _make_permissions(w=True)
-
- SHARE_FOLDER: ClassVar[_FolderPermissions] = _make_permissions(d=True)
- UPDATE_FOLDER: ClassVar[_FolderPermissions] = _make_permissions(d=True)
- DELETE_FOLDER: ClassVar[_FolderPermissions] = _make_permissions(d=True)
- REMOVE_PROJECT_FROM_FOLDER: ClassVar[_FolderPermissions] = _make_permissions(d=True)
-
- _MOVE_PROJECT_FROM_FOLDER_SOURCE: ClassVar[_FolderPermissions] = _make_permissions(
- d=True,
- description="apply to folder where the project is",
- )
- _MOVE_PROJECT_FROM_FOLDER_DESTINATION: ClassVar[
- _FolderPermissions
- ] = _make_permissions(
- w=True, description="apply on the folder receiving the project"
- )
- MOVE_PROJECT_FROM_FOLDER: ClassVar[_FolderPermissions] = _or_dicts_list(
- [_MOVE_PROJECT_FROM_FOLDER_SOURCE, _MOVE_PROJECT_FROM_FOLDER_DESTINATION]
- )
-
- _MOVE_FOLDER_SOURCE: ClassVar[_FolderPermissions] = _make_permissions(
- d=True,
- description="apply to folder providing the data",
- )
- _MOVE_FOLDER_DESTINATION: ClassVar[_FolderPermissions] = _make_permissions(
- w=True, description="apply on the folder receiving the data"
- )
- MOVE_FOLDER: ClassVar[_FolderPermissions] = _or_dicts_list(
- [_MOVE_FOLDER_SOURCE, _MOVE_FOLDER_DESTINATION]
- )
-
-
-NO_ACCESS_PERMISSIONS: _FolderPermissions = _make_permissions()
-
-VIEWER_PERMISSIONS: _FolderPermissions = _or_dicts_list(
- [
- _BasePermissions.LIST_FOLDERS,
- ]
-)
-EDITOR_PERMISSIONS: _FolderPermissions = _or_dicts_list(
- [
- VIEWER_PERMISSIONS,
- _BasePermissions.CREATE_FOLDER,
- _BasePermissions.ADD_PROJECT_TO_FOLDER,
- ]
-)
-OWNER_PERMISSIONS: _FolderPermissions = _or_dicts_list(
- [
- EDITOR_PERMISSIONS,
- _BasePermissions.SHARE_FOLDER,
- _BasePermissions.UPDATE_FOLDER,
- _BasePermissions.DELETE_FOLDER,
- _BasePermissions.REMOVE_PROJECT_FROM_FOLDER,
- _BasePermissions.MOVE_FOLDER,
- ]
-)
-
-_ROLE_TO_PERMISSIONS: dict[FolderAccessRole, _FolderPermissions] = {
- FolderAccessRole.NO_ACCESS: NO_ACCESS_PERMISSIONS,
- FolderAccessRole.VIEWER: VIEWER_PERMISSIONS,
- FolderAccessRole.EDITOR: EDITOR_PERMISSIONS,
- FolderAccessRole.OWNER: OWNER_PERMISSIONS,
-}
-
-
-def _get_permissions_from_role(role: FolderAccessRole) -> _FolderPermissions:
- return _ROLE_TO_PERMISSIONS[role]
-
-
-def _requires(*permissions: _FolderPermissions) -> _FolderPermissions:
- if len(permissions) == 0:
- return _make_permissions()
- return _or_dicts_list(permissions)
-
-
-def _get_filter_for_enabled_permissions(
- permissions: _FolderPermissions, table: sa.Table | CTE
-) -> ColumnElement | bool:
- clauses: list[ColumnElement] = []
-
- if permissions.read:
- clauses.append(table.c.read.is_(True))
- if permissions.write:
- clauses.append(table.c.write.is_(True))
- if permissions.delete:
- clauses.append(table.c.delete.is_(True))
-
- return sa.and_(*clauses) if clauses else True
-
-
-###
-### UTILS
-###
-
-
-class FolderName(ConstrainedStr):
- regex = re.compile(
- r'^(?!.*[<>:"/\\|?*\]])(?!.*\b(?:LPT9|COM1|LPT1|COM2|LPT3|LPT4|CON|COM5|COM3|COM4|AUX|PRN|LPT2|LPT5|COM6|LPT7|NUL|COM8|LPT6|COM9|COM7|LPT8)\b).+$',
- re.IGNORECASE,
- )
- min_length = 1
- max_length = 255
-
-
-class FolderEntry(BaseModel):
- id: _FolderID
- parent_folder: _FolderID | None = Field(alias="traversal_parent_id")
- name: str
- description: str
- owner: _GroupID = Field(alias="created_by")
- created: datetime = Field(alias="access_created")
- modified: datetime = Field(alias="access_modified")
- my_access_rights: _FolderPermissions
- access_rights: dict[_GroupID, _FolderPermissions]
-
- class Config:
- orm_mode = True
-
-
-class _ResolvedAccessRights(BaseModel):
- folder_id: _FolderID
- gid: _GroupID
- traversal_parent_id: _FolderID | None
- original_parent_id: _FolderID | None
- read: bool
- write: bool
- delete: bool
- level: int
-
- class Config:
- orm_mode = True
-
-
-async def _get_resolved_access_rights(
- connection: SAConnection,
- folder_id: _FolderID,
- gid: _GroupID,
- *,
- permissions: _FolderPermissions | None,
-) -> _ResolvedAccessRights | None:
-
- # Define the anchor CTE
- access_rights_cte = (
- sa.select(
- folders_access_rights.c.folder_id,
- folders_access_rights.c.gid,
- folders_access_rights.c.traversal_parent_id,
- folders_access_rights.c.original_parent_id,
- folders_access_rights.c.read,
- folders_access_rights.c.write,
- folders_access_rights.c.delete,
- sa.literal_column("0").label("level"),
- )
- .where(folders_access_rights.c.folder_id == sa.bindparam("start_folder_id"))
- .cte(name="access_rights_cte", recursive=True)
- )
-
- # Define the recursive part of the CTE
- recursive = sa.select(
- folders_access_rights.c.folder_id,
- folders_access_rights.c.gid,
- folders_access_rights.c.traversal_parent_id,
- folders_access_rights.c.original_parent_id,
- folders_access_rights.c.read,
- folders_access_rights.c.write,
- folders_access_rights.c.delete,
- sa.literal_column("access_rights_cte.level + 1").label("level"),
- ).select_from(
- folders_access_rights.join(
- access_rights_cte,
- folders_access_rights.c.folder_id == access_rights_cte.c.original_parent_id,
- )
- )
-
- # Combine anchor and recursive CTE
- folder_hierarchy: CTE = access_rights_cte.union_all(recursive)
-
- # Final query to filter and order results
- query = (
- sa.select(
- folder_hierarchy.c.folder_id,
- folder_hierarchy.c.gid,
- folder_hierarchy.c.traversal_parent_id,
- folder_hierarchy.c.original_parent_id,
- folder_hierarchy.c.read,
- folder_hierarchy.c.write,
- folder_hierarchy.c.delete,
- folder_hierarchy.c.level,
- )
- .where(
- True
- if not permissions
- else _get_filter_for_enabled_permissions(permissions, folder_hierarchy)
- )
- .where(folder_hierarchy.c.original_parent_id.is_(None))
- .where(folder_hierarchy.c.gid == gid)
- .order_by(folder_hierarchy.c.level.asc())
- )
-
- result = await connection.execute(query.params(start_folder_id=folder_id))
- resolved_access_rights: RowProxy | None = await result.fetchone()
- return (
- _ResolvedAccessRights.from_orm(resolved_access_rights)
- if resolved_access_rights
- else None
- )
-
-
-async def _check_and_get_folder_access_by_group(
- connection: SAConnection,
- product_name: _ProductName,
- folder_id: _FolderID,
- gid: _GroupID,
- *,
- error_reporting_gids: set[_GroupID],
- permissions: _FolderPermissions,
-) -> _ResolvedAccessRights:
- """
- Raises:
- FolderNotFoundError
- FolderNotSharedWithGidError
- InsufficientPermissionsError
- """
- folder_entry: int | None = await connection.scalar(
- sa.select(folders.c.id)
- .where(folders.c.id == folder_id)
- .where(folders.c.product_name == product_name)
- )
- if not folder_entry:
- raise FolderNotFoundError(
- folder_id=folder_id, gids=error_reporting_gids, product_name=product_name
- )
-
- # check if folder was shared
- resolved_access_rights_without_permissions = await _get_resolved_access_rights(
- connection,
- folder_id,
- gid,
- permissions=None,
- )
- if not resolved_access_rights_without_permissions:
- raise FolderNotSharedWithGidError(
- folder_id=folder_id, gids=error_reporting_gids
- )
-
- # check if there are permissions
- resolved_access_rights = await _get_resolved_access_rights(
- connection,
- folder_id,
- gid,
- permissions=permissions,
- )
- if resolved_access_rights is None:
- raise InsufficientPermissionsError(
- folder_id=folder_id,
- gids=error_reporting_gids,
- permissions=_only_true_permissions(permissions),
- )
-
- return resolved_access_rights
-
-
-async def _check_and_get_folder_access(
- connection: SAConnection,
- product_name: _ProductName,
- folder_id: _FolderID,
- gids: set[_GroupID],
- *,
- permissions: _FolderPermissions,
-) -> _ResolvedAccessRights:
- """
- Raises:
- FolderNotFoundError
- FolderNotSharedWithGidError
- InsufficientPermissionsError
- NoAccessForGroupsFoundError
- """
- folder_access_error = None
-
- for gid in gids:
- try:
- return await _check_and_get_folder_access_by_group(
- connection,
- product_name,
- folder_id,
- gid,
- error_reporting_gids=gids,
- permissions=permissions,
- )
- except FolderAccessError as e: # noqa: PERF203
- folder_access_error = e
-
- if folder_access_error:
- raise folder_access_error
-
- raise NoAccessForGroupsFoundError(
- folder_id=folder_id,
- gids=gids,
- permissions=_only_true_permissions(permissions),
- )
-
-
-###
-### API DB LAYER
-###
-
-
-async def folder_create(
- connection: SAConnection,
- product_name: _ProductName,
- name: str,
- gids: set[_GroupID],
- description: str = "",
- parent: _FolderID | None = None,
- _required_permissions: _FolderPermissions = _requires( # noqa: B008
- _BasePermissions.CREATE_FOLDER
- ),
-) -> _FolderID:
- """
- Raises:
- FolderNotFoundError
- FolderNotSharedWithGidError
- InsufficientPermissionsError
- NoAccessForGroupsFoundError
- FolderAlreadyExistsError
- CouldNotCreateFolderError
- GroupIdDoesNotExistError
- RootFolderRequiresAtLeastOnePrimaryGroupError
- """
- try:
- parse_obj_as(FolderName, name)
- except ValidationError as exc:
- raise InvalidFolderNameError(name=name, reason=f"{exc}") from exc
-
- async with connection.begin():
- entry_exists: int | None = await connection.scalar(
- sa.select(folders.c.id)
- .select_from(
- folders.join(
- folders_access_rights,
- folders.c.id == folders_access_rights.c.folder_id,
- )
- )
- .where(folders.c.name == name)
- .where(folders.c.product_name == product_name)
- .where(folders_access_rights.c.original_parent_id == parent)
- )
- if entry_exists:
- raise FolderAlreadyExistsError(
- product_name=product_name, folder=name, parent=parent
- )
-
- # `permissions_gid` is computed as follows:
- # - `folder has a parent?` taken from the resolved access rights of the parent folder
- # - `is root folder, a.k.a. no parent?` taken from the user's primary group
- permissions_gid = None
- if parent:
- resolved_access_rights = await _check_and_get_folder_access(
- connection,
- product_name,
- folder_id=parent,
- gids=gids,
- permissions=_required_permissions,
- )
- permissions_gid = resolved_access_rights.gid
-
- if permissions_gid is None:
- groups_results: list[RowProxy] | None = await (
- await connection.execute(
- sa.select(groups.c.gid, groups.c.type).where(groups.c.gid.in_(gids))
- )
- ).fetchall()
-
- if not groups_results:
- raise NoGroupIDFoundError(gids=gids)
-
- primary_gid = None
- for group in groups_results:
- if group["type"] == GroupType.PRIMARY:
- primary_gid = group["gid"]
- if primary_gid is None:
- raise RootFolderRequiresAtLeastOnePrimaryGroupError(
- parent=parent, gids=gids
- )
-
- permissions_gid = primary_gid
-
- # folder entry can now be inserted
- folder_id = await connection.scalar(
- sa.insert(folders)
- .values(
- name=name,
- description=description,
- created_by=permissions_gid,
- product_name=product_name,
- )
- .returning(folders.c.id)
- )
-
- if not folder_id:
- raise CouldNotCreateFolderError(folder=name, parent=parent)
-
- await connection.execute(
- sa.insert(folders_access_rights).values(
- folder_id=folder_id,
- gid=permissions_gid,
- traversal_parent_id=parent,
- original_parent_id=parent,
- **OWNER_PERMISSIONS.to_dict(),
- )
- )
-
- return _FolderID(folder_id)
-
-
-async def folder_share_or_update_permissions(
- connection: SAConnection,
- product_name: _ProductName,
- folder_id: _FolderID,
- sharing_gids: set[_GroupID],
- *,
- recipient_gid: _GroupID,
- recipient_role: FolderAccessRole,
- required_permissions: _FolderPermissions = _requires( # noqa: B008
- _BasePermissions.SHARE_FOLDER
- ),
-) -> None:
- """
- Raises:
- FolderNotFoundError
- FolderNotSharedWithGidError
- InsufficientPermissionsError
- NoAccessForGroupsFoundError
- """
- # NOTE: if the `sharing_gid`` has permissions to share it can share it with any `FolderAccessRole`
- async with connection.begin():
- await _check_and_get_folder_access(
- connection,
- product_name,
- folder_id=folder_id,
- gids=sharing_gids,
- permissions=required_permissions,
- )
-
- # update or create permissions entry
- sharing_permissions: _FolderPermissions = _get_permissions_from_role(
- recipient_role
- )
- data: dict[str, Any] = {
- "folder_id": folder_id,
- "gid": recipient_gid,
- "original_parent_id": None,
- "traversal_parent_id": None,
- **sharing_permissions.to_dict(),
- }
- insert_stmt = postgresql.insert(folders_access_rights).values(**data)
- upsert_stmt = insert_stmt.on_conflict_do_update(
- index_elements=[
- folders_access_rights.c.folder_id,
- folders_access_rights.c.gid,
- ],
- set_=data,
- )
- await connection.execute(upsert_stmt)
-
-
-async def folder_update(
- connection: SAConnection,
- product_name: _ProductName,
- folder_id: _FolderID,
- gids: set[_GroupID],
- *,
- name: str | None = None,
- description: str | None = None,
- _required_permissions: _FolderPermissions = _requires( # noqa: B008
- _BasePermissions.UPDATE_FOLDER
- ),
-) -> None:
- """
- Raises:
- FolderNotFoundError
- FolderNotSharedWithGidError
- InsufficientPermissionsError
- NoAccessForGroupsFoundError
- """
- async with connection.begin():
- await _check_and_get_folder_access(
- connection,
- product_name,
- folder_id=folder_id,
- gids=gids,
- permissions=_required_permissions,
- )
-
- # do not update if nothing changed
- if name is None and description is None:
- return
-
- values: dict[str, str] = {}
- if name:
- values["name"] = name
- if description is not None: # Can be empty string
- values["description"] = description
-
- # update entry
- await connection.execute(
- folders.update().where(folders.c.id == folder_id).values(**values)
- )
-
-
-async def folder_delete(
- connection: SAConnection,
- product_name: _ProductName,
- folder_id: _FolderID,
- gids: set[_GroupID],
- *,
- _required_permissions: _FolderPermissions = _requires( # noqa: B008
- _BasePermissions.DELETE_FOLDER
- ),
-) -> None:
- """
- Raises:
- FolderNotFoundError
- FolderNotSharedWithGidError
- InsufficientPermissionsError
- NoAccessForGroupsFoundError
- """
- childern_folder_ids: list[_FolderID] = []
-
- async with connection.begin():
- await _check_and_get_folder_access(
- connection,
- product_name,
- folder_id=folder_id,
- gids=gids,
- permissions=_required_permissions,
- )
-
- # list all children then delete
- results = await connection.execute(
- folders_access_rights.select().where(
- folders_access_rights.c.traversal_parent_id == folder_id
- )
- )
- rows = await results.fetchall()
- if rows:
- for entry in rows:
- childern_folder_ids.append(entry.folder_id) # noqa: PERF401
-
- # first remove all childeren
- for child_folder_id in childern_folder_ids:
- await folder_delete(connection, product_name, child_folder_id, gids)
-
- # as a last step remove the folder per se
- async with connection.begin():
- await connection.execute(folders.delete().where(folders.c.id == folder_id))
-
-
-async def folder_move(
- connection: SAConnection,
- product_name: _ProductName,
- source_folder_id: _FolderID,
- gids: set[_GroupID],
- *,
- destination_folder_id: _FolderID | None,
- required_permissions_source: _FolderPermissions = _requires( # noqa: B008
- _BasePermissions._MOVE_FOLDER_SOURCE # pylint:disable=protected-access # noqa: SLF001
- ),
- required_permissions_destination: _FolderPermissions = _requires( # noqa: B008
- _BasePermissions._MOVE_FOLDER_DESTINATION # pylint:disable=protected-access # noqa: SLF001
- ),
-) -> None:
- """
- Raises:
- FolderNotFoundError
- FolderNotSharedWithGidError
- InsufficientPermissionsError
- NoAccessForGroupsFoundError
- CannotMoveFolderSharedViaNonPrimaryGroupError:
- """
- async with connection.begin():
- source_access_entry = await _check_and_get_folder_access(
- connection,
- product_name,
- folder_id=source_folder_id,
- gids=gids,
- permissions=required_permissions_source,
- )
-
- source_access_gid = source_access_entry.gid
- group_type: GroupType | None = await connection.scalar(
- sa.select(groups.c.type).where(groups.c.gid == source_access_gid)
- )
- # Might drop primary check
- if group_type is None or group_type != GroupType.PRIMARY:
- raise CannotMoveFolderSharedViaNonPrimaryGroupError(
- group_type=group_type, gid=source_access_gid
- )
- if destination_folder_id:
- await _check_and_get_folder_access(
- connection,
- product_name,
- folder_id=destination_folder_id,
- gids=gids,
- permissions=required_permissions_destination,
- )
-
- # set new traversa_parent_id on the source_folder_id which is equal to destination_folder_id
- await connection.execute(
- folders_access_rights.update()
- .where(
- sa.and_(
- folders_access_rights.c.folder_id == source_folder_id,
- folders_access_rights.c.gid.in_(gids),
- )
- )
- .values(traversal_parent_id=destination_folder_id)
- )
-
-
-async def folder_add_project(
- connection: SAConnection,
- product_name: _ProductName,
- folder_id: _FolderID,
- gids: set[_GroupID],
- *,
- project_uuid: _ProjectID,
- required_permissions=_requires( # noqa: B008
- _BasePermissions.ADD_PROJECT_TO_FOLDER
- ),
-) -> None:
- """
- Raises:
- FolderNotFoundError
- FolderNotSharedWithGidError
- InsufficientPermissionsError
- NoAccessForGroupsFoundError
- ProjectAlreadyExistsInFolderError
- """
- async with connection.begin():
- await _check_and_get_folder_access(
- connection,
- product_name,
- folder_id=folder_id,
- gids=gids,
- permissions=required_permissions,
- )
-
- # check if already added in folder
- project_in_folder_entry = await (
- await connection.execute(
- folders_to_projects.select()
- .where(folders_to_projects.c.folder_id == folder_id)
- .where(folders_to_projects.c.project_uuid == f"{project_uuid}")
- )
- ).fetchone()
- if project_in_folder_entry:
- raise ProjectAlreadyExistsInFolderError(
- project_uuid=project_uuid, folder_id=folder_id
- )
-
- # finally add project to folder
- await connection.execute(
- folders_to_projects.insert().values(
- folder_id=folder_id, project_uuid=f"{project_uuid}"
- )
- )
-
-
-async def folder_move_project(
- connection: SAConnection,
- product_name: _ProductName,
- source_folder_id: _FolderID,
- gids: set[_GroupID],
- *,
- project_uuid: _ProjectID,
- destination_folder_id: _FolderID | None,
- _required_permissions_source: _FolderPermissions = _requires( # noqa: B008
- _BasePermissions._MOVE_PROJECT_FROM_FOLDER_SOURCE # pylint:disable=protected-access # noqa: SLF001
- ),
- _required_permissions_destination: _FolderPermissions = _requires( # noqa: B008
- _BasePermissions._MOVE_PROJECT_FROM_FOLDER_DESTINATION # pylint:disable=protected-access # noqa: SLF001
- ),
-) -> None:
- """
- Raises:
- FolderNotFoundError
- FolderNotSharedWithGidError
- InsufficientPermissionsError
- CannotMoveFolderSharedViaNonPrimaryGroupError:
- """
- async with connection.begin():
- await _check_and_get_folder_access(
- connection,
- product_name,
- folder_id=source_folder_id,
- gids=gids,
- permissions=_required_permissions_source,
- )
-
- if destination_folder_id is None:
- # NOTE: As the project is moved to the root directory we will just remove it from the folders_to_projects table
- await folder_remove_project(
- connection,
- product_name,
- folder_id=source_folder_id,
- gids=gids,
- project_uuid=project_uuid,
- )
- return
-
- async with connection.begin():
- await _check_and_get_folder_access(
- connection,
- product_name,
- folder_id=destination_folder_id,
- gids=gids,
- permissions=_required_permissions_destination,
- )
-
- await connection.execute(
- folders_to_projects.delete()
- .where(folders_to_projects.c.folder_id == source_folder_id)
- .where(folders_to_projects.c.project_uuid == f"{project_uuid}")
- )
- await connection.execute(
- folders_to_projects.insert().values(
- folder_id=destination_folder_id, project_uuid=f"{project_uuid}"
- )
- )
-
-
-async def get_project_folder_without_check(
- connection: SAConnection,
- *,
- project_uuid: _ProjectID,
-) -> _FolderID | None:
- """
- This is temporary, until we discuss how to proceed. In first version we assume there is only one unique project uuid
- in the folders_to_projects table.
-
- Raises:
- FolderNotFoundError
- FolderNotSharedWithGidError
- InsufficientPermissionsError
- CannotMoveFolderSharedViaNonPrimaryGroupError:
- """
- async with connection.begin():
- folder_id = await connection.scalar(
- sa.select(folders_to_projects.c.folder_id).where(
- folders_to_projects.c.project_uuid == f"{project_uuid}"
- )
- )
- if folder_id:
- return _FolderID(folder_id)
- return None
-
-
-async def folder_remove_project(
- connection: SAConnection,
- product_name: _ProductName,
- folder_id: _FolderID,
- gids: set[_GroupID],
- *,
- project_uuid: _ProjectID,
- required_permissions=_requires( # noqa: B008
- _BasePermissions.REMOVE_PROJECT_FROM_FOLDER
- ),
-) -> None:
- """
- Raises:
- FolderNotFoundError
- FolderNotSharedWithGidError
- InsufficientPermissionsError
- NoAccessForGroupsFoundError
- """
- async with connection.begin():
- await _check_and_get_folder_access(
- connection,
- product_name,
- folder_id=folder_id,
- gids=gids,
- permissions=required_permissions,
- )
-
- await connection.execute(
- folders_to_projects.delete()
- .where(folders_to_projects.c.folder_id == folder_id)
- .where(folders_to_projects.c.project_uuid == f"{project_uuid}")
- )
-
-
-_LIST_GROUP_BY_FIELDS: Final[tuple[Column, ...]] = (
- folders.c.id,
- folders.c.name,
- folders.c.description,
- folders.c.created_by,
- folders_access_rights.c.traversal_parent_id,
-)
-_LIST_SELECT_FIELDS: Final[tuple[Label | Column, ...]] = (
- *_LIST_GROUP_BY_FIELDS,
- # access_rights
- (
- sa.select(
- sa.func.jsonb_object_agg(
- folders_access_rights.c.gid,
- sa.func.jsonb_build_object(
- "read",
- folders_access_rights.c.read,
- "write",
- folders_access_rights.c.write,
- "delete",
- folders_access_rights.c.delete,
- ),
- ).label("access_rights"),
- )
- .where(folders_access_rights.c.folder_id == folders.c.id)
- .correlate(folders)
- .scalar_subquery()
- ).label("access_rights"),
- # my_access_rights
- func.json_build_object(
- "read",
- func.max(folders_access_rights.c.read.cast(INTEGER)).cast(BOOLEAN),
- "write",
- func.max(folders_access_rights.c.write.cast(INTEGER)).cast(BOOLEAN),
- "delete",
- func.max(folders_access_rights.c.delete.cast(INTEGER)).cast(BOOLEAN),
- ).label("my_access_rights"),
- # access_created
- func.max(folders_access_rights.c.created).label("access_created"),
- # access_modified
- func.max(folders_access_rights.c.modified).label("access_modified"),
-)
-
-
-async def folder_list(
- connection: SAConnection,
- product_name: _ProductName,
- folder_id: _FolderID | None,
- gids: set[_GroupID],
- *,
- offset: NonNegativeInt,
- limit: NonNegativeInt,
- order_by: OrderByDict = OrderByDict( # noqa: B008
- field="modified", direction=OrderDirection.DESC
- ),
- required_permissions: _FolderPermissions = _requires( # noqa: B008
- _BasePermissions.LIST_FOLDERS
- ),
-) -> tuple[int, list[FolderEntry]]:
- """
- Raises:
- FolderNotFoundError
- FolderNotSharedWithGidError
- InsufficientPermissionsError
- NoAccessForGroupsFoundError
- """
- # NOTE: when `folder_id is None` list the root folder of the `gids`
-
- if folder_id is not None:
- await _check_and_get_folder_access(
- connection,
- product_name,
- folder_id=folder_id,
- gids=gids,
- permissions=required_permissions,
- )
-
- results: list[FolderEntry] = []
-
- base_query = (
- sa.select(*_LIST_SELECT_FIELDS)
- .join(folders_access_rights, folders.c.id == folders_access_rights.c.folder_id)
- .where(folders.c.product_name == product_name)
- .where(
- folders_access_rights.c.traversal_parent_id.is_(None)
- if folder_id is None
- else folders_access_rights.c.traversal_parent_id == folder_id
- )
- .where(folders_access_rights.c.gid.in_(gids))
- .where(
- _get_filter_for_enabled_permissions(
- required_permissions, folders_access_rights
- )
- )
- .group_by(*_LIST_GROUP_BY_FIELDS)
- )
-
- # Select total count from base_query
- subquery = base_query.subquery()
- count_query = sa.select(sa.func.count()).select_from(subquery)
- count_result = await connection.execute(count_query)
- total_count = await count_result.scalar()
-
- # Ordering and pagination
- if order_by["direction"] == OrderDirection.ASC:
- list_query = base_query.order_by(sa.asc(getattr(folders.c, order_by["field"])))
- else:
- list_query = base_query.order_by(sa.desc(getattr(folders.c, order_by["field"])))
- list_query = list_query.offset(offset).limit(limit)
-
- async for entry in connection.execute(list_query):
- results.append(FolderEntry.from_orm(entry)) # noqa: PERF401s
-
- return cast(int, total_count), results
-
-
-async def folder_get(
- connection: SAConnection,
- product_name: _ProductName,
- folder_id: _FolderID,
- gids: set[_GroupID],
- *,
- required_permissions: _FolderPermissions = _requires( # noqa: B008
- _BasePermissions.GET_FOLDER
- ),
-) -> FolderEntry:
- resolved_access_rights: _ResolvedAccessRights = await _check_and_get_folder_access(
- connection,
- product_name,
- folder_id=folder_id,
- gids=gids,
- permissions=required_permissions,
- )
- permissions_gid: _GroupID = resolved_access_rights.gid
-
- query = (
- sa.select(*_LIST_SELECT_FIELDS)
- .join(folders_access_rights, folders.c.id == folders_access_rights.c.folder_id)
- .where(folders_access_rights.c.folder_id == folder_id)
- .where(folders_access_rights.c.gid == permissions_gid)
- .where(
- _get_filter_for_enabled_permissions(
- required_permissions, folders_access_rights
- )
- if folder_id is None
- else True
- )
- .where(folders.c.product_name == product_name)
- .group_by(*_LIST_GROUP_BY_FIELDS)
- )
-
- query_result: RowProxy | None = await (await connection.execute(query)).fetchone()
-
- if query_result is None:
- raise FolderNotFoundError(
- folder_id=folder_id, gids=gids, product_name=product_name
- )
-
- return FolderEntry.from_orm(query_result)
-
-
-__all__ = ["OrderByDict"]
diff --git a/packages/postgres-database/tests/test_utils_folders.py b/packages/postgres-database/tests/test_utils_folders.py
deleted file mode 100644
index 8c49fd9914f5..000000000000
--- a/packages/postgres-database/tests/test_utils_folders.py
+++ /dev/null
@@ -1,2312 +0,0 @@
-# pylint:disable=redefined-outer-name
-# pylint:disable=too-many-statements
-# pylint:disable=unused-variable
-
-import itertools
-from collections.abc import AsyncIterable, Awaitable, Callable
-from copy import deepcopy
-from typing import NamedTuple
-from unittest.mock import Mock
-
-import pytest
-import sqlalchemy as sa
-from aiopg.sa.connection import SAConnection
-from aiopg.sa.result import RowProxy
-from pydantic import BaseModel, Field, NonNegativeInt
-from pytest_simcore.helpers.faker_factories import random_product
-from simcore_postgres_database.models.folders import (
- folders,
- folders_access_rights,
- folders_to_projects,
-)
-from simcore_postgres_database.models.groups import GroupType, groups
-from simcore_postgres_database.utils_folders import (
- _ROLE_TO_PERMISSIONS,
- EDITOR_PERMISSIONS,
- NO_ACCESS_PERMISSIONS,
- OWNER_PERMISSIONS,
- VIEWER_PERMISSIONS,
- CannotMoveFolderSharedViaNonPrimaryGroupError,
- FolderAccessRole,
- FolderAlreadyExistsError,
- FolderEntry,
- FolderNotFoundError,
- FolderNotSharedWithGidError,
- InsufficientPermissionsError,
- InvalidFolderNameError,
- NoGroupIDFoundError,
- RootFolderRequiresAtLeastOnePrimaryGroupError,
- _FolderID,
- _FolderPermissions,
- _get_filter_for_enabled_permissions,
- _get_permissions_from_role,
- _get_resolved_access_rights,
- _GroupID,
- _ProductName,
- _ProjectID,
- _requires,
- folder_add_project,
- folder_create,
- folder_delete,
- folder_get,
- folder_list,
- folder_move,
- folder_remove_project,
- folder_share_or_update_permissions,
- folder_update,
-)
-from simcore_postgres_database.utils_products import products
-from sqlalchemy.sql.elements import ColumnElement
-
-
-def test_permissions_integrity():
- assert set(FolderAccessRole) == set(_ROLE_TO_PERMISSIONS.keys())
-
-
-@pytest.mark.parametrize(
- "read, write, delete", list(itertools.product([True, False], repeat=3))
-)
-def test__folder_permissions_to_dict(read: bool, write: bool, delete: bool):
- folder_permissions = _FolderPermissions(read=read, write=write, delete=delete)
- assert folder_permissions.to_dict() == {
- "read": read,
- "write": write,
- "delete": delete,
- }
- only_true: dict[str, bool] = {}
- if read:
- only_true["read"] = True
- if write:
- only_true["write"] = True
- if delete:
- only_true["delete"] = True
- assert folder_permissions.to_dict(include_only_true=True) == only_true
-
-
-@pytest.mark.parametrize(
- "role, expected_permissions",
- [
- (
- FolderAccessRole.VIEWER,
- _FolderPermissions(read=True, write=False, delete=False),
- ),
- (
- FolderAccessRole.EDITOR,
- _FolderPermissions(read=True, write=True, delete=False),
- ),
- (
- FolderAccessRole.OWNER,
- _FolderPermissions(read=True, write=True, delete=True),
- ),
- ],
-)
-def test_role_permissions(
- role: FolderAccessRole, expected_permissions: dict[str, bool]
-):
- assert _get_permissions_from_role(role) == expected_permissions
-
-
-@pytest.mark.parametrize(
- "permissions, expected",
- [
- ([], _FolderPermissions(read=False, write=False, delete=False)),
- (
- [VIEWER_PERMISSIONS],
- _FolderPermissions(read=True, write=False, delete=False),
- ),
- ([EDITOR_PERMISSIONS], _FolderPermissions(read=True, write=True, delete=False)),
- (
- [EDITOR_PERMISSIONS, VIEWER_PERMISSIONS],
- _FolderPermissions(read=True, write=True, delete=False),
- ),
- ([OWNER_PERMISSIONS], _FolderPermissions(read=True, write=True, delete=True)),
- (
- [OWNER_PERMISSIONS, EDITOR_PERMISSIONS],
- _FolderPermissions(read=True, write=True, delete=True),
- ),
- (
- [OWNER_PERMISSIONS, EDITOR_PERMISSIONS, VIEWER_PERMISSIONS],
- _FolderPermissions(read=True, write=True, delete=True),
- ),
- ],
-)
-def test__requires_permissions(
- permissions: list[_FolderPermissions], expected: dict[str, bool]
-):
- assert _requires(*permissions) == expected
-
-
-@pytest.fixture
-async def create_product(
- connection: SAConnection,
-) -> AsyncIterable[Callable[[str], Awaitable[_ProductName]]]:
- created_products: list[_ProductName] = []
-
- async def _(name: str) -> _ProductName:
- assert name != "osparc", f"{name} is reserved! please choose a different one"
- resultlt: _ProductName | None = await connection.scalar(
- products.insert()
- .values(random_product(name=name, group_id=None))
- .returning(products.c.name)
- )
- assert resultlt is not None
- return resultlt
-
- yield _
-
- for product in created_products:
- await connection.execute(products.delete().where(products.c.name == product))
-
-
-@pytest.fixture
-async def default_product_name(
- create_product: Callable[[str], Awaitable[_ProductName]]
-) -> _ProductName:
- return await create_product("test_product")
-
-
-@pytest.mark.parametrize(
- "invalid_name",
- [
- None,
- "",
- "/",
- ":",
- '"',
- "<",
- ">",
- "\\",
- "|",
- "?",
- "My/Folder",
- "MyFolder<",
- "My*Folder",
- "A" * (256),
- "CON",
- "PRN",
- "AUX",
- "NUL",
- *[f"COM{i}" for i in range(1, 10)],
- *[f"LPT{i}" for i in range(1, 10)],
- ],
-)
-async def test_folder_create_wrong_folder_name(invalid_name: str):
- with pytest.raises(InvalidFolderNameError):
- await folder_create(Mock(), "mock_product", invalid_name, Mock())
-
-
-def test__get_where_clause():
- assert isinstance(
- _get_filter_for_enabled_permissions(VIEWER_PERMISSIONS, folders_access_rights),
- ColumnElement,
- )
- assert isinstance(
- _get_filter_for_enabled_permissions(EDITOR_PERMISSIONS, folders_access_rights),
- ColumnElement,
- )
- assert isinstance(
- _get_filter_for_enabled_permissions(OWNER_PERMISSIONS, folders_access_rights),
- ColumnElement,
- )
- assert isinstance(
- _get_filter_for_enabled_permissions(
- _FolderPermissions(read=False, write=False, delete=False),
- folders_access_rights,
- ),
- bool,
- )
-
-
-async def _assert_folder_entires(
- connection: SAConnection,
- *,
- folder_count: NonNegativeInt,
- access_rights_count: NonNegativeInt | None = None,
-) -> None:
- async def _query_table(table_name: sa.Table, count: NonNegativeInt) -> None:
- result = await connection.execute(table_name.select())
- rows = await result.fetchall()
- assert rows is not None
- assert len(rows) == count
-
- await _query_table(folders, folder_count)
- await _query_table(folders_access_rights, access_rights_count or folder_count)
-
-
-async def _assert_folderpermissions_exists(
- connection: SAConnection, folder_id: _FolderID, gids: set[_GroupID]
-) -> None:
- result = await connection.execute(
- folders_access_rights.select()
- .where(folders_access_rights.c.folder_id == folder_id)
- .where(folders_access_rights.c.gid.in_(gids))
- )
- rows = await result.fetchall()
- assert rows is not None
- assert len(rows) == 1
-
-
-async def _assert_folder_permissions(
- connection: SAConnection,
- *,
- folder_id: _FolderID,
- gid: _GroupID,
- role: FolderAccessRole,
-) -> None:
- result = await connection.execute(
- sa.select(folders_access_rights.c.folder_id)
- .where(folders_access_rights.c.folder_id == folder_id)
- .where(folders_access_rights.c.gid == gid)
- .where(
- _get_filter_for_enabled_permissions(
- _get_permissions_from_role(role), folders_access_rights
- )
- )
- )
- rows = await result.fetchall()
- assert rows is not None
- assert len(rows) == 1
-
-
-async def _assert_name_and_description(
- connection: SAConnection,
- folder_id: _FolderID,
- *,
- name: str,
- description: str,
-):
- async with connection.execute(
- sa.select(folders.c.name, folders.c.description).where(
- folders.c.id == folder_id
- )
- ) as result_proxy:
- results = await result_proxy.fetchall()
- assert results
- assert len(results) == 1
- result = results[0]
- assert result["name"] == name
- assert result["description"] == description
-
-
-@pytest.fixture
-async def setup_users(
- connection: SAConnection, create_fake_user: Callable[..., Awaitable[RowProxy]]
-) -> list[RowProxy]:
- users: list[RowProxy] = []
- for _ in range(10):
- users.append(await create_fake_user(connection)) # noqa: PERF401
- return users
-
-
-@pytest.fixture
-async def setup_users_and_groups(setup_users: list[RowProxy]) -> set[_GroupID]:
- return {u.primary_gid for u in setup_users}
-
-
-@pytest.fixture
-def get_unique_gids(
- setup_users_and_groups: set[_GroupID],
-) -> Callable[[int], tuple[_GroupID, ...]]:
- def _(tuple_size: int) -> tuple[_GroupID, ...]:
- copied_groups = deepcopy(setup_users_and_groups)
- return tuple(copied_groups.pop() for _ in range(tuple_size))
-
- return _
-
-
-@pytest.fixture
-async def setup_projects_for_users(
- connection: SAConnection,
- setup_users: list[RowProxy],
- create_fake_project: Callable[..., Awaitable[RowProxy]],
-) -> set[_ProjectID]:
- projects: set[_ProjectID] = set()
- for user in setup_users:
- project = await create_fake_project(connection, user)
- projects.add(project.uuid)
- return projects
-
-
-@pytest.fixture
-def get_unique_project_uuids(
- setup_projects_for_users: set[_ProjectID],
-) -> Callable[[int], tuple[_ProjectID, ...]]:
- def _(tuple_size: int) -> tuple[_ProjectID, ...]:
- copied_projects = deepcopy(setup_projects_for_users)
- return tuple(copied_projects.pop() for _ in range(tuple_size))
-
- return _
-
-
-class MkFolder(BaseModel):
- name: str
- gid: _GroupID
- description: str = ""
- parent: _FolderID | None = None
-
- shared_with: dict[_GroupID, FolderAccessRole] = Field(default_factory=dict)
- children: set["MkFolder"] = Field(default_factory=set)
-
- def __hash__(self):
- return hash(
- (
- self.name,
- self.description,
- self.gid,
- tuple(sorted(self.shared_with.items())),
- frozenset(self.children),
- )
- )
-
- def __eq__(self, other):
- if not isinstance(other, MkFolder):
- return False
- return (
- self.name == other.name
- and self.description == other.description
- and self.gid == other.gid
- and self.shared_with == other.shared_with
- and self.children == other.children
- )
-
-
-@pytest.fixture
-def make_folders(
- connection: SAConnection, default_product_name: _ProductName
-) -> Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]]:
- async def _(
- root_folders: set[MkFolder], *, parent: _FolderID | None = None
- ) -> dict[str, _FolderID]:
- folder_names_map: dict[str, _FolderID] = {}
-
- for root in root_folders:
- # create folder
- folder_names_map[root.name] = root_folder_id = await folder_create(
- connection,
- default_product_name,
- root.name,
- {root.gid},
- description=root.description,
- parent=parent,
- )
- # share with others
- for gid, role in root.shared_with.items():
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- root_folder_id,
- sharing_gids={root.gid},
- recipient_gid=gid,
- recipient_role=role,
- )
- # create subfolders
- subfolders_names_map = await _(root.children, parent=root_folder_id)
- root_name = set(folder_names_map.keys())
- subfolder_names = set(subfolders_names_map.keys())
- if subfolder_names & root_name != set():
- msg = f"{root_name=} and {subfolder_names=} are not allowed to have common folder names"
- raise ValueError(msg)
- folder_names_map.update(subfolders_names_map)
-
- return folder_names_map
-
- return _
-
-
-async def test_folder_create(
- connection: SAConnection,
- create_product: Callable[[str], Awaitable[_ProductName]],
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
-):
-
- (owner_gid,) = get_unique_gids(1)
-
- product_a = await create_product("product_a")
- product_b = await create_product("product_b")
-
- expected_folder_count: int = 0
- for product_name in (
- product_a,
- product_b, # NOTE: a different product also can dfeine the same folder strucutre
- ):
-
- # 1. when GID is missing no entries should be present
- missing_gid = 10202023302
- await _assert_folder_entires(connection, folder_count=expected_folder_count)
- with pytest.raises(NoGroupIDFoundError):
- await folder_create(connection, product_name, "f1", {missing_gid})
- await _assert_folder_entires(connection, folder_count=expected_folder_count)
-
- # 2. create a folder and a subfolder of the same name
- f1_folder_id = await folder_create(connection, product_name, "f1", {owner_gid})
- expected_folder_count += 1
- await _assert_folder_entires(connection, folder_count=expected_folder_count)
- await folder_create(
- connection, product_name, "f1", {owner_gid}, parent=f1_folder_id
- )
- expected_folder_count += 1
- await _assert_folder_entires(connection, folder_count=expected_folder_count)
-
- # 3. inserting already existing folder fails
- with pytest.raises(FolderAlreadyExistsError):
- await folder_create(connection, product_name, "f1", {owner_gid})
- await _assert_folder_entires(connection, folder_count=expected_folder_count)
-
-
-async def test_folder_create_shared_via_groups(
- connection: SAConnection,
- default_product_name: _ProductName,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
- make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]],
- create_fake_group: Callable[..., Awaitable[RowProxy]],
-):
- #######
- # SETUP
- #######
- gid_original_owner: _GroupID
- (gid_original_owner,) = get_unique_gids(1)
-
- gid_user: _GroupID = (
- await create_fake_group(connection, type=GroupType.PRIMARY)
- ).gid
- gid_everyone: _GroupID | None = await connection.scalar(
- sa.select(groups.c.gid).where(groups.c.type == GroupType.EVERYONE)
- )
- assert gid_everyone
- gid_z43: _GroupID = (
- await create_fake_group(connection, type=GroupType.STANDARD)
- ).gid
-
- folder_ids = await make_folders(
- {
- MkFolder(
- name="root",
- gid=gid_original_owner,
- shared_with={
- gid_z43: FolderAccessRole.OWNER,
- gid_everyone: FolderAccessRole.OWNER,
- },
- ),
- }
- )
-
- folder_id_root = folder_ids["root"]
-
- #######
- # TESTS
- #######
-
- # 1. can create when using one gid with permissions
- folder_id_f1 = await folder_create(
- connection,
- default_product_name,
- "f1",
- {gid_z43, gid_user},
- parent=folder_id_root,
- )
- await _assert_folderpermissions_exists(connection, folder_id_f1, {gid_z43})
-
- folder_id_f2 = await folder_create(
- connection,
- default_product_name,
- "f2",
- {gid_everyone, gid_user},
- parent=folder_id_root,
- )
- await _assert_folderpermissions_exists(connection, folder_id_f2, {gid_everyone})
-
- # 2. can create new folder when using both gids with permissions
- folder_id_f3 = await folder_create(
- connection,
- default_product_name,
- "f3",
- {gid_z43, gid_everyone, gid_user},
- parent=folder_id_root,
- )
- await _assert_folderpermissions_exists(
- connection, folder_id_f3, {gid_everyone, gid_z43}
- )
-
- # 3. cannot create a root folder without a primary group
- with pytest.raises(RootFolderRequiresAtLeastOnePrimaryGroupError):
- await folder_create(
- connection,
- default_product_name,
- "folder_in_root",
- {gid_z43, gid_everyone},
- )
-
-
-async def test__get_resolved_access_rights(
- connection: SAConnection,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
- make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]],
-):
- #######
- # SETUP
- #######
- (
- gid_owner_a,
- gid_owner_b,
- gid_owner_c,
- gid_owner_d,
- gid_editor_a,
- gid_editor_b,
- ) = get_unique_gids(6)
-
- folder_ids = await make_folders(
- {
- MkFolder(
- name="root",
- gid=gid_owner_a,
- shared_with={
- gid_owner_b: FolderAccessRole.OWNER,
- gid_owner_c: FolderAccessRole.OWNER,
- gid_owner_d: FolderAccessRole.OWNER,
- gid_editor_a: FolderAccessRole.EDITOR,
- },
- children={
- MkFolder(name="b", gid=gid_owner_b),
- MkFolder(
- name="c",
- gid=gid_owner_c,
- children={
- MkFolder(
- name="d",
- gid=gid_owner_d,
- shared_with={gid_editor_b: FolderAccessRole.EDITOR},
- children={MkFolder(name="editor_a", gid=gid_editor_a)},
- )
- },
- ),
- },
- ),
- }
- )
-
- folder_id_root = folder_ids["root"]
- folder_id_b = folder_ids["b"]
- folder_id_c = folder_ids["c"]
- folder_id_d = folder_ids["d"]
- folder_id_editor_a = folder_ids["editor_a"]
-
- # check resolved access rgihts resolution
- async def _assert_resolves_to(
- *,
- target_folder_id: _FolderID,
- gid: _GroupID,
- permissions: _FolderPermissions,
- expected_folder_id: _FolderID,
- expected_gids: set[_FolderID],
- ) -> None:
- resolved_parent = await _get_resolved_access_rights(
- connection,
- target_folder_id,
- gid,
- permissions=permissions,
- )
- assert resolved_parent
- assert resolved_parent.folder_id == expected_folder_id
- assert resolved_parent.gid in expected_gids
-
- #######
- # TESTS
- #######
-
- await _assert_resolves_to(
- target_folder_id=folder_id_root,
- gid=gid_owner_a,
- permissions=OWNER_PERMISSIONS,
- expected_folder_id=folder_id_root,
- expected_gids={gid_owner_a},
- )
- await _assert_resolves_to(
- target_folder_id=folder_id_b,
- gid=gid_owner_b,
- permissions=OWNER_PERMISSIONS,
- expected_folder_id=folder_id_root,
- expected_gids={gid_owner_b},
- )
- await _assert_resolves_to(
- target_folder_id=folder_id_c,
- gid=gid_owner_c,
- permissions=OWNER_PERMISSIONS,
- expected_folder_id=folder_id_root,
- expected_gids={gid_owner_c},
- )
- await _assert_resolves_to(
- target_folder_id=folder_id_d,
- gid=gid_owner_d,
- permissions=OWNER_PERMISSIONS,
- expected_folder_id=folder_id_root,
- expected_gids={gid_owner_d},
- )
- await _assert_resolves_to(
- target_folder_id=folder_id_editor_a,
- gid=gid_editor_a,
- permissions=EDITOR_PERMISSIONS,
- expected_folder_id=folder_id_root,
- expected_gids={gid_editor_a},
- )
- await _assert_resolves_to(
- target_folder_id=folder_id_editor_a,
- gid=gid_editor_b,
- permissions=EDITOR_PERMISSIONS,
- expected_folder_id=folder_id_d,
- expected_gids={gid_editor_b},
- )
-
-
-async def test_folder_share_or_update_permissions(
- connection: SAConnection,
- default_product_name: _ProductName,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
-):
- (
- gid_owner,
- gid_other_owner,
- gid_editor,
- gid_viewer,
- gid_no_access,
- gid_share_with_error,
- ) = get_unique_gids(6)
-
- # 1. folder does not exist
- folder_id_missing = 12313123232
- with pytest.raises(FolderNotFoundError):
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id_missing,
- sharing_gids={gid_owner},
- recipient_gid=gid_share_with_error,
- recipient_role=FolderAccessRole.OWNER,
- )
- await _assert_folder_entires(connection, folder_count=0)
-
- # 2. share existing folder with all possible roles
- folder_id = await folder_create(connection, default_product_name, "f1", {gid_owner})
- await _assert_folder_entires(connection, folder_count=1)
- await _assert_folder_permissions(
- connection, folder_id=folder_id, gid=gid_owner, role=FolderAccessRole.OWNER
- )
-
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={gid_owner},
- recipient_gid=gid_other_owner,
- recipient_role=FolderAccessRole.OWNER,
- )
- await _assert_folder_entires(connection, folder_count=1, access_rights_count=2)
- await _assert_folder_permissions(
- connection,
- folder_id=folder_id,
- gid=gid_other_owner,
- role=FolderAccessRole.OWNER,
- )
-
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={gid_owner},
- recipient_gid=gid_editor,
- recipient_role=FolderAccessRole.EDITOR,
- )
- await _assert_folder_entires(connection, folder_count=1, access_rights_count=3)
- await _assert_folder_permissions(
- connection, folder_id=folder_id, gid=gid_editor, role=FolderAccessRole.EDITOR
- )
-
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={gid_owner},
- recipient_gid=gid_viewer,
- recipient_role=FolderAccessRole.VIEWER,
- )
- await _assert_folder_entires(connection, folder_count=1, access_rights_count=4)
- await _assert_folder_permissions(
- connection, folder_id=folder_id, gid=gid_viewer, role=FolderAccessRole.VIEWER
- )
-
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={gid_owner},
- recipient_gid=gid_no_access,
- recipient_role=FolderAccessRole.NO_ACCESS,
- )
- await _assert_folder_entires(connection, folder_count=1, access_rights_count=5)
- await _assert_folder_permissions(
- connection,
- folder_id=folder_id,
- gid=gid_no_access,
- role=FolderAccessRole.NO_ACCESS,
- )
-
- # 3. roles without permissions cannot share with any role
- for recipient_role in FolderAccessRole:
- for no_access_gids in (gid_editor, gid_viewer, gid_no_access):
- with pytest.raises(InsufficientPermissionsError):
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={no_access_gids},
- recipient_gid=gid_share_with_error,
- recipient_role=recipient_role,
- )
- await _assert_folder_entires(
- connection, folder_count=1, access_rights_count=5
- )
-
- with pytest.raises(FolderNotSharedWithGidError):
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={gid_share_with_error},
- recipient_gid=gid_share_with_error,
- recipient_role=recipient_role,
- )
- await _assert_folder_entires(connection, folder_count=1, access_rights_count=5)
-
- # 4. all users loose permission on the foler including the issuer
- # NOTE: anoteher_owner dropped owner's permission and his permission to no access!
- for gid_to_drop_permission in (gid_owner, gid_editor, gid_viewer, gid_other_owner):
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={gid_other_owner},
- recipient_gid=gid_to_drop_permission,
- recipient_role=FolderAccessRole.NO_ACCESS,
- )
- await _assert_folder_entires(connection, folder_count=1, access_rights_count=5)
- await _assert_folder_permissions(
- connection,
- folder_id=folder_id,
- gid=gid_to_drop_permission,
- role=FolderAccessRole.NO_ACCESS,
- )
-
-
-async def test_folder_update(
- connection: SAConnection,
- default_product_name: _ProductName,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
-):
- (
- owner_gid,
- other_owner_gid,
- editor_gid,
- viewer_gid,
- no_access_gid,
- share_with_error_gid,
- ) = get_unique_gids(6)
-
- # 1. folder is missing
- missing_folder_id = 1231321332
- with pytest.raises(FolderNotFoundError):
- await folder_update(
- connection, default_product_name, missing_folder_id, {owner_gid}
- )
- await _assert_folder_entires(connection, folder_count=0)
-
- # 2. owner updates created fodler
- folder_id = await folder_create(connection, default_product_name, "f1", {owner_gid})
- await _assert_folder_entires(connection, folder_count=1)
- await _assert_name_and_description(connection, folder_id, name="f1", description="")
-
- # nothing changes
- await folder_update(connection, default_product_name, folder_id, {owner_gid})
- await _assert_name_and_description(connection, folder_id, name="f1", description="")
-
- # both changed
- await folder_update(
- connection,
- default_product_name,
- folder_id,
- {owner_gid},
- name="new_folder",
- description="new_desc",
- )
- await _assert_name_and_description(
- connection, folder_id, name="new_folder", description="new_desc"
- )
-
- # 3. another_owner can also update
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={owner_gid},
- recipient_gid=other_owner_gid,
- recipient_role=FolderAccessRole.OWNER,
- )
- await folder_update(
- connection,
- default_product_name,
- folder_id,
- {owner_gid},
- name="another_owner_name",
- description="another_owner_description",
- )
- await _assert_name_and_description(
- connection,
- folder_id,
- name="another_owner_name",
- description="another_owner_description",
- )
-
- # 4. other roles have no permission to update
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={owner_gid},
- recipient_gid=editor_gid,
- recipient_role=FolderAccessRole.EDITOR,
- )
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={owner_gid},
- recipient_gid=viewer_gid,
- recipient_role=FolderAccessRole.VIEWER,
- )
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={owner_gid},
- recipient_gid=no_access_gid,
- recipient_role=FolderAccessRole.NO_ACCESS,
- )
-
- for target_user_gid in (editor_gid, viewer_gid, no_access_gid):
- with pytest.raises(InsufficientPermissionsError):
- await folder_update(
- connection,
- default_product_name,
- folder_id,
- {target_user_gid},
- name="error_name",
- description="error_description",
- )
- await _assert_name_and_description(
- connection,
- folder_id,
- name="another_owner_name",
- description="another_owner_description",
- )
-
- with pytest.raises(FolderNotSharedWithGidError):
- await folder_update(
- connection,
- default_product_name,
- folder_id,
- {share_with_error_gid},
- name="error_name",
- description="error_description",
- )
- await _assert_name_and_description(
- connection,
- folder_id,
- name="another_owner_name",
- description="another_owner_description",
- )
-
-
-async def test_folder_delete(
- connection: SAConnection,
- default_product_name: _ProductName,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
-):
- (
- owner_gid,
- other_owner_gid,
- editor_gid,
- viewer_gid,
- no_access_gid,
- share_with_error_gid,
- ) = get_unique_gids(6)
-
- # 1. folder is missing
- missing_folder_id = 1231321332
- with pytest.raises(FolderNotFoundError):
- await folder_delete(
- connection, default_product_name, missing_folder_id, {owner_gid}
- )
- await _assert_folder_entires(connection, folder_count=0)
-
- # 2. owner deletes folder
- folder_id = await folder_create(connection, default_product_name, "f1", {owner_gid})
- await _assert_folder_entires(connection, folder_count=1)
-
- await folder_delete(connection, default_product_name, folder_id, {owner_gid})
- await _assert_folder_entires(connection, folder_count=0)
-
- # 3. other owners can delete the folder
- folder_id = await folder_create(connection, default_product_name, "f1", {owner_gid})
- await _assert_folder_entires(connection, folder_count=1)
-
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={owner_gid},
- recipient_gid=other_owner_gid,
- recipient_role=FolderAccessRole.OWNER,
- )
-
- await folder_delete(connection, default_product_name, folder_id, {other_owner_gid})
- await _assert_folder_entires(connection, folder_count=0)
-
- # 4. non owner users cannot delete the folder
- folder_id = await folder_create(connection, default_product_name, "f1", {owner_gid})
- await _assert_folder_entires(connection, folder_count=1)
-
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={owner_gid},
- recipient_gid=editor_gid,
- recipient_role=FolderAccessRole.EDITOR,
- )
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={owner_gid},
- recipient_gid=viewer_gid,
- recipient_role=FolderAccessRole.VIEWER,
- )
- await folder_share_or_update_permissions(
- connection,
- default_product_name,
- folder_id,
- sharing_gids={owner_gid},
- recipient_gid=no_access_gid,
- recipient_role=FolderAccessRole.NO_ACCESS,
- )
- await _assert_folder_entires(connection, folder_count=1, access_rights_count=4)
-
- for non_owner_gid in (editor_gid, viewer_gid, no_access_gid):
- with pytest.raises(InsufficientPermissionsError):
- await folder_delete(
- connection, default_product_name, folder_id, {non_owner_gid}
- )
-
- with pytest.raises(FolderNotSharedWithGidError):
- await folder_delete(
- connection, default_product_name, folder_id, {share_with_error_gid}
- )
-
- await _assert_folder_entires(connection, folder_count=1, access_rights_count=4)
-
-
-async def test_folder_delete_nested_folders(
- connection: SAConnection,
- default_product_name: _ProductName,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
- make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]],
-):
- #######
- # SETUP
- #######
- (
- gid_owner_a,
- gid_owner_b,
- gid_editor_a,
- gid_editor_b,
- gid_viewer,
- gid_no_access,
- gid_not_shared,
- ) = get_unique_gids(7)
-
- async def _setup_folders() -> _FolderID:
- await _assert_folder_entires(connection, folder_count=0)
- folder_ids = await make_folders(
- {
- MkFolder(
- name="root_folder",
- gid=gid_owner_a,
- shared_with={
- gid_owner_b: FolderAccessRole.OWNER,
- gid_editor_a: FolderAccessRole.EDITOR,
- gid_editor_b: FolderAccessRole.EDITOR,
- gid_viewer: FolderAccessRole.VIEWER,
- gid_no_access: FolderAccessRole.NO_ACCESS,
- },
- )
- }
- )
- folder_id_root_folder = folder_ids["root_folder"]
- await _assert_folder_entires(connection, folder_count=1, access_rights_count=6)
-
- GIDS_WITH_CREATE_PERMISSIONS: set[_GroupID] = {
- gid_owner_a,
- gid_owner_b,
- gid_editor_a,
- gid_editor_b,
- }
-
- previous_folder_id = folder_id_root_folder
- for i in range(100):
- previous_folder_id = await folder_create(
- connection,
- default_product_name,
- f"f{i}",
- GIDS_WITH_CREATE_PERMISSIONS,
- parent=previous_folder_id,
- )
- await _assert_folder_entires(
- connection, folder_count=101, access_rights_count=106
- )
- return folder_id_root_folder
-
- #######
- # TESTS
- #######
-
- # 1. delete via `gid_owner_a`
- folder_id_root_folder = await _setup_folders()
- await folder_delete(
- connection, default_product_name, folder_id_root_folder, {gid_owner_a}
- )
- await _assert_folder_entires(connection, folder_count=0)
-
- # 2. delete via shared with `gid_owner_b`
- folder_id_root_folder = await _setup_folders()
- await folder_delete(
- connection, default_product_name, folder_id_root_folder, {gid_owner_b}
- )
- await _assert_folder_entires(connection, folder_count=0)
-
- # 3. delete is not permitted
- folder_id_root_folder = await _setup_folders()
- for no_permissions_gid in (gid_editor_a, gid_editor_b, gid_viewer):
- with pytest.raises(InsufficientPermissionsError):
- await folder_delete(
- connection,
- default_product_name,
- folder_id_root_folder,
- {no_permissions_gid},
- )
- for no_permissions_gid in (gid_not_shared,):
- with pytest.raises(FolderNotSharedWithGidError):
- await folder_delete(
- connection,
- default_product_name,
- folder_id_root_folder,
- {no_permissions_gid},
- )
- await _assert_folder_entires(connection, folder_count=101, access_rights_count=106)
-
-
-async def test_folder_move(
- connection: SAConnection,
- default_product_name: _ProductName,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
- make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]],
-):
- #######
- # SETUP
- #######
-
- (gid_sharing, gid_user_a, gid_user_b) = get_unique_gids(3)
-
- folder_ids = await make_folders(
- {
- MkFolder(
- name="USER_A",
- gid=gid_user_a,
- children={MkFolder(name="f_user_a", gid=gid_user_a)},
- ),
- MkFolder(
- name="USER_B",
- gid=gid_user_b,
- children={MkFolder(name="f_user_b", gid=gid_user_b)},
- ),
- MkFolder(
- name="SHARED_AS_OWNER",
- gid=gid_sharing,
- children={
- MkFolder(
- name="f_shared_as_owner_user_a",
- gid=gid_sharing,
- shared_with={gid_user_a: FolderAccessRole.OWNER},
- ),
- MkFolder(
- name="f_shared_as_owner_user_b",
- gid=gid_sharing,
- shared_with={gid_user_b: FolderAccessRole.OWNER},
- ),
- },
- ),
- MkFolder(
- name="SHARED_AS_EDITOR",
- gid=gid_sharing,
- children={
- MkFolder(
- name="f_shared_as_editor_user_a",
- gid=gid_sharing,
- shared_with={gid_user_a: FolderAccessRole.EDITOR},
- ),
- MkFolder(
- name="f_shared_as_editor_user_b",
- gid=gid_sharing,
- shared_with={gid_user_b: FolderAccessRole.EDITOR},
- ),
- },
- ),
- MkFolder(
- name="SHARED_AS_VIEWER",
- gid=gid_sharing,
- children={
- MkFolder(
- name="f_shared_as_viewer_user_a",
- gid=gid_sharing,
- shared_with={gid_user_a: FolderAccessRole.VIEWER},
- ),
- MkFolder(
- name="f_shared_as_viewer_user_b",
- gid=gid_sharing,
- shared_with={gid_user_b: FolderAccessRole.VIEWER},
- ),
- },
- ),
- MkFolder(
- name="SHARED_AS_NO_ACCESS",
- gid=gid_sharing,
- children={
- MkFolder(
- name="f_shared_as_no_access_user_a",
- gid=gid_sharing,
- shared_with={gid_user_a: FolderAccessRole.NO_ACCESS},
- ),
- MkFolder(
- name="f_shared_as_no_access_user_b",
- gid=gid_sharing,
- shared_with={gid_user_b: FolderAccessRole.NO_ACCESS},
- ),
- },
- ),
- MkFolder(name="NOT_SHARED", gid=gid_sharing),
- }
- )
-
- folder_id_user_a = folder_ids["USER_A"]
- folder_id_f_user_a = folder_ids["f_user_a"]
- folder_id_user_b = folder_ids["USER_B"]
- folder_id_f_user_b = folder_ids["f_user_b"]
- folder_id_f_shared_as_owner_user_a = folder_ids["f_shared_as_owner_user_a"]
- folder_id_f_shared_as_owner_user_b = folder_ids["f_shared_as_owner_user_b"]
- folder_id_f_shared_as_editor_user_a = folder_ids["f_shared_as_editor_user_a"]
- folder_id_f_shared_as_editor_user_b = folder_ids["f_shared_as_editor_user_b"]
- folder_id_f_shared_as_viewer_user_a = folder_ids["f_shared_as_viewer_user_a"]
- folder_id_f_shared_as_viewer_user_b = folder_ids["f_shared_as_viewer_user_b"]
- folder_id_f_shared_as_no_access_user_a = folder_ids["f_shared_as_no_access_user_a"]
- folder_id_f_shared_as_no_access_user_b = folder_ids["f_shared_as_no_access_user_b"]
- folder_id_not_shared = folder_ids["NOT_SHARED"]
-
- async def _move_fails_not_shared_with_error(
- gid: _GroupID, *, source: _FolderID, destination: _FolderID
- ) -> None:
- with pytest.raises(FolderNotSharedWithGidError):
- await folder_move(
- connection,
- default_product_name,
- source,
- {gid},
- destination_folder_id=destination,
- )
-
- async def _move_fails_insufficient_permissions_error(
- gid: _GroupID, *, source: _FolderID, destination: _FolderID
- ) -> None:
- with pytest.raises(InsufficientPermissionsError):
- await folder_move(
- connection,
- default_product_name,
- source,
- {gid},
- destination_folder_id=destination,
- )
-
- async def _move_back_and_forth(
- gid: _GroupID,
- *,
- source: _FolderID,
- destination: _FolderID,
- source_parent: _FolderID,
- ) -> None:
- async def _assert_folder_permissions(
- connection: SAConnection,
- *,
- folder_id: _FolderID,
- gid: _GroupID,
- parent_folder: _FolderID,
- ) -> None:
- result = await connection.execute(
- sa.select(folders_access_rights.c.folder_id)
- .where(folders_access_rights.c.folder_id == folder_id)
- .where(folders_access_rights.c.gid == gid)
- .where(folders_access_rights.c.traversal_parent_id == parent_folder)
- )
- rows = await result.fetchall()
- assert rows is not None
- assert len(rows) == 1
-
- # check parent should be parent_before
- await _assert_folder_permissions(
- connection, folder_id=source, gid=gid, parent_folder=source_parent
- )
-
- await folder_move(
- connection,
- default_product_name,
- source,
- {gid},
- destination_folder_id=destination,
- )
-
- # check parent should be destination
- await _assert_folder_permissions(
- connection, folder_id=source, gid=gid, parent_folder=destination
- )
-
- await folder_move(
- connection,
- default_product_name,
- source,
- {gid},
- destination_folder_id=source_parent,
- )
-
- # check parent should be parent_before
- await _assert_folder_permissions(
- connection, folder_id=source, gid=gid, parent_folder=source_parent
- )
-
- #######
- # TESTS
- #######
-
- # 1. not working:
- # - `USER_A/f_user_a -> USER_B`
- await _move_fails_not_shared_with_error(
- gid_user_a, source=folder_id_f_user_a, destination=folder_id_user_b
- )
- # - `USER_B.f_user_b -/> USER_A`
- await _move_fails_not_shared_with_error(
- gid_user_b, source=folder_id_f_user_b, destination=folder_id_user_a
- )
- # - `USER_A/f_user_a -> NOT_SHARED`
- await _move_fails_not_shared_with_error(
- gid_user_a, source=folder_id_f_user_a, destination=folder_id_not_shared
- )
- # - `USER_B/f_user_b -> NOT_SHARED`
- await _move_fails_not_shared_with_error(
- gid_user_b, source=folder_id_f_user_b, destination=folder_id_not_shared
- )
- # - `USER_A/f_user_a -> f_shared_as_no_access_user_a`
- await _move_fails_insufficient_permissions_error(
- gid_user_a,
- source=folder_id_f_user_a,
- destination=folder_id_f_shared_as_no_access_user_a,
- )
- # - `USER_B/f_user_b -> f_shared_as_no_access_user_b`
- await _move_fails_insufficient_permissions_error(
- gid_user_b,
- source=folder_id_f_user_b,
- destination=folder_id_f_shared_as_no_access_user_b,
- )
- # - `USER_A/f_user_a -> f_shared_as_viewer_user_a`
- await _move_fails_insufficient_permissions_error(
- gid_user_a,
- source=folder_id_f_user_a,
- destination=folder_id_f_shared_as_viewer_user_a,
- )
- # - `USER_B/f_user_b -> f_shared_as_viewer_user_b`
- await _move_fails_insufficient_permissions_error(
- gid_user_b,
- source=folder_id_f_user_b,
- destination=folder_id_f_shared_as_viewer_user_b,
- )
-
- # 2. allowed oeprations:
- # - `USER_A/f_user_a -> f_shared_as_editor_user_a` (& reverse)
- await _move_back_and_forth(
- gid_user_a,
- source=folder_id_f_user_a,
- destination=folder_id_f_shared_as_editor_user_a,
- source_parent=folder_id_user_a,
- )
- # - `USER_B/f_user_b -> f_shared_as_editor_user_b` (& reverse)
- await _move_back_and_forth(
- gid_user_b,
- source=folder_id_f_user_b,
- destination=folder_id_f_shared_as_editor_user_b,
- source_parent=folder_id_user_b,
- )
- # - `USER_A/f_user_a -> f_shared_as_owner_user_a` (& reverse)
- await _move_back_and_forth(
- gid_user_a,
- source=folder_id_f_user_a,
- destination=folder_id_f_shared_as_owner_user_a,
- source_parent=folder_id_user_a,
- )
- # - `USER_B/f_user_b -> f_shared_as_owner_user_b` (& reverse)
- await _move_back_and_forth(
- gid_user_b,
- source=folder_id_f_user_b,
- destination=folder_id_f_shared_as_owner_user_b,
- source_parent=folder_id_user_b,
- )
-
- # 3. allowed to move in `root` folder
- for to_move_folder_id, to_move_gid in [
- (folder_id_f_user_a, gid_user_a),
- (folder_id_f_user_b, gid_user_b),
- (folder_id_f_shared_as_owner_user_a, gid_user_a),
- (folder_id_f_shared_as_owner_user_b, gid_user_b),
- ]:
- await folder_move(
- connection,
- default_product_name,
- to_move_folder_id,
- {to_move_gid},
- destination_folder_id=None,
- )
-
- # 4. not allowed to move in `root` folder
- for to_move_folder_id, to_move_gid in [
- (folder_id_f_shared_as_editor_user_a, gid_user_a),
- (folder_id_f_shared_as_editor_user_b, gid_user_b),
- (folder_id_f_shared_as_viewer_user_a, gid_user_a),
- (folder_id_f_shared_as_viewer_user_b, gid_user_b),
- (folder_id_f_shared_as_no_access_user_a, gid_user_a),
- (folder_id_f_shared_as_no_access_user_b, gid_user_b),
- ]:
- with pytest.raises(InsufficientPermissionsError):
- await folder_move(
- connection,
- default_product_name,
- to_move_folder_id,
- {to_move_gid},
- destination_folder_id=None,
- )
-
- for to_move_gid in [gid_user_a, gid_user_b]:
- with pytest.raises(FolderNotSharedWithGidError):
- await folder_move(
- connection,
- default_product_name,
- folder_id_not_shared,
- {to_move_gid},
- destination_folder_id=None,
- )
-
-
-async def test_move_only_owners_can_move(
- connection: SAConnection,
- default_product_name: _ProductName,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
- make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]],
-):
- #######
- # SETUP
- #######
- (
- gid_owner,
- gid_editor,
- gid_viewer,
- gid_no_access,
- gid_not_shared,
- ) = get_unique_gids(5)
-
- folder_ids = await make_folders(
- {
- MkFolder(
- name="to_move",
- gid=gid_owner,
- shared_with={
- gid_editor: FolderAccessRole.EDITOR,
- gid_viewer: FolderAccessRole.VIEWER,
- gid_no_access: FolderAccessRole.NO_ACCESS,
- },
- ),
- MkFolder(name="target_owner", gid=gid_owner),
- MkFolder(name="target_editor", gid=gid_editor),
- MkFolder(name="target_viewer", gid=gid_viewer),
- MkFolder(name="target_no_access", gid=gid_no_access),
- MkFolder(name="target_not_shared", gid=gid_not_shared),
- }
- )
-
- folder_id_to_move = folder_ids["to_move"]
- folder_id_target_owner = folder_ids["target_owner"]
- folder_id_target_editor = folder_ids["target_editor"]
- folder_id_target_viewer = folder_ids["target_viewer"]
- folder_id_target_no_access = folder_ids["target_no_access"]
- folder_id_target_not_shared = folder_ids["target_not_shared"]
-
- async def _fails_to_move(gid: _GroupID, destination_folder_id: _FolderID) -> None:
- with pytest.raises(InsufficientPermissionsError):
- await folder_move(
- connection,
- default_product_name,
- folder_id_to_move,
- {gid},
- destination_folder_id=destination_folder_id,
- )
-
- #######
- # TESTS
- #######
-
- # 1. no permissions to move
- await _fails_to_move(gid_editor, folder_id_target_editor)
- await _fails_to_move(gid_viewer, folder_id_target_viewer)
- await _fails_to_move(gid_no_access, folder_id_target_no_access)
-
- # 2. not shared with user
- with pytest.raises(FolderNotSharedWithGidError):
- await folder_move(
- connection,
- default_product_name,
- folder_id_to_move,
- {gid_not_shared},
- destination_folder_id=folder_id_target_not_shared,
- )
-
- # 3. owner us able to move
- await folder_move(
- connection,
- default_product_name,
- folder_id_to_move,
- {gid_owner},
- destination_folder_id=folder_id_target_owner,
- )
-
-
-async def test_move_group_non_standard_groups_raise_error(
- connection: SAConnection,
- default_product_name: _ProductName,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
- make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]],
- create_fake_group: Callable[..., Awaitable[RowProxy]],
-):
- #######
- # SETUP
- #######
- gid_original_owner: _GroupID
- (gid_original_owner,) = get_unique_gids(1)
- gid_primary: _GroupID = (
- await create_fake_group(connection, type=GroupType.PRIMARY)
- ).gid
- gid_everyone: _GroupID | None = await connection.scalar(
- sa.select(groups.c.gid).where(groups.c.type == GroupType.EVERYONE)
- )
- assert gid_everyone
- gid_standard: _GroupID = (
- await create_fake_group(connection, type=GroupType.STANDARD)
- ).gid
-
- folder_ids = await make_folders(
- {
- MkFolder(
- name="SHARING_USER",
- gid=gid_original_owner,
- shared_with={
- gid_primary: FolderAccessRole.EDITOR,
- gid_everyone: FolderAccessRole.EDITOR,
- gid_standard: FolderAccessRole.EDITOR,
- },
- ),
- MkFolder(
- name="PRIMARY",
- gid=gid_original_owner,
- shared_with={gid_primary: FolderAccessRole.OWNER},
- ),
- MkFolder(
- name="EVERYONE",
- gid=gid_original_owner,
- shared_with={gid_everyone: FolderAccessRole.OWNER},
- ),
- MkFolder(
- name="STANDARD",
- gid=gid_original_owner,
- shared_with={gid_standard: FolderAccessRole.OWNER},
- ),
- }
- )
-
- folder_id_sharing_user = folder_ids["SHARING_USER"]
- folder_id_primary = folder_ids["PRIMARY"]
- folder_id_everyone = folder_ids["EVERYONE"]
- folder_id_standard = folder_ids["STANDARD"]
-
- #######
- # TESTS
- #######
-
- with pytest.raises(CannotMoveFolderSharedViaNonPrimaryGroupError) as exc:
- await folder_move(
- connection,
- default_product_name,
- folder_id_everyone,
- {gid_everyone},
- destination_folder_id=folder_id_sharing_user,
- )
- assert "EVERYONE" in f"{exc.value}"
-
- with pytest.raises(CannotMoveFolderSharedViaNonPrimaryGroupError) as exc:
- await folder_move(
- connection,
- default_product_name,
- folder_id_standard,
- {gid_standard},
- destination_folder_id=folder_id_sharing_user,
- )
- assert "STANDARD" in f"{exc.value}"
-
- # primary gorup does not raise error
- await folder_move(
- connection,
- default_product_name,
- folder_id_primary,
- {gid_primary},
- destination_folder_id=folder_id_sharing_user,
- )
-
-
-async def test_add_remove_project_in_folder(
- connection: SAConnection,
- default_product_name: _ProductName,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
- make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]],
- get_unique_project_uuids: Callable[[int], tuple[_ProjectID, ...]],
-):
- #######
- # SETUP
- #######
-
- (gid_owner, gid_editor, gid_viewer, gid_no_access) = get_unique_gids(4)
- (project_uuid,) = get_unique_project_uuids(1)
-
- folder_ids = await make_folders(
- {
- MkFolder(
- name="f1",
- gid=gid_owner,
- shared_with={
- gid_editor: FolderAccessRole.EDITOR,
- gid_viewer: FolderAccessRole.VIEWER,
- gid_no_access: FolderAccessRole.NO_ACCESS,
- },
- )
- }
- )
- folder_id_f1 = folder_ids["f1"]
-
- async def _is_project_present(
- connection: SAConnection,
- folder_id: _FolderID,
- project_id: _ProjectID,
- ) -> bool:
- async with connection.execute(
- folders_to_projects.select()
- .where(folders_to_projects.c.folder_id == folder_id)
- .where(folders_to_projects.c.project_uuid == project_id)
- ) as result:
- rows = await result.fetchall()
- assert rows is not None
- return len(rows) == 1
-
- async def _add_folder_as(gid: _GroupID) -> None:
- await folder_add_project(
- connection,
- default_product_name,
- folder_id_f1,
- {gid},
- project_uuid=project_uuid,
- )
- assert await _is_project_present(connection, folder_id_f1, project_uuid) is True
-
- async def _remove_folder_as(gid: _GroupID) -> None:
- await folder_remove_project(
- connection,
- default_product_name,
- folder_id_f1,
- {gid},
- project_uuid=project_uuid,
- )
- assert (
- await _is_project_present(connection, folder_id_f1, project_uuid) is False
- )
-
- assert await _is_project_present(connection, folder_id_f1, project_uuid) is False
-
- #######
- # TESTS
- #######
-
- # 1. owner can add and remove
- await _add_folder_as(gid_owner)
- await _remove_folder_as(gid_owner)
-
- # 2 editor can add and can't remove
- await _add_folder_as(gid_editor)
- with pytest.raises(InsufficientPermissionsError):
- await _remove_folder_as(gid_editor)
- await _remove_folder_as(gid_owner) # cleanup
-
- # 3. viwer can't add and can't remove
- with pytest.raises(InsufficientPermissionsError):
- await _add_folder_as(gid_viewer)
- with pytest.raises(InsufficientPermissionsError):
- await _remove_folder_as(gid_viewer)
-
- # 4. no_access can't add and can't remove
- with pytest.raises(InsufficientPermissionsError):
- await _add_folder_as(gid_no_access)
- with pytest.raises(InsufficientPermissionsError):
- await _remove_folder_as(gid_no_access)
-
-
-class ExpectedValues(NamedTuple):
- id: _FolderID
- my_access_rights: _FolderPermissions
- access_rights: dict[_GroupID, _FolderPermissions]
-
- def __hash__(self):
- return hash(
- (
- self.id,
- self.my_access_rights,
- tuple(sorted(self.access_rights.items())),
- )
- )
-
- def __eq__(self, other):
- if not isinstance(other, ExpectedValues):
- return False
- return (
- self.id == other.id
- and self.my_access_rights == other.my_access_rights
- and self.access_rights == other.access_rights
- )
-
-
-def _assert_expected_entries(
- folders: list[FolderEntry], *, expected: set[ExpectedValues]
-) -> None:
- for folder_entry in folders:
- expected_values = ExpectedValues(
- folder_entry.id,
- folder_entry.my_access_rights,
- folder_entry.access_rights,
- )
- assert expected_values in expected
-
-
-ALL_IN_ONE_PAGE_OFFSET: NonNegativeInt = 0
-ALL_IN_ONE_PAGE_LIMIT: NonNegativeInt = 1000
-
-
-async def _list_folder_as(
- connection: SAConnection,
- default_product_name: _ProductName,
- folder_id: _FolderID | None,
- gids: set[_GroupID],
- offset: NonNegativeInt = ALL_IN_ONE_PAGE_OFFSET,
- limit: NonNegativeInt = ALL_IN_ONE_PAGE_LIMIT,
-) -> list[FolderEntry]:
-
- _, folders_db = await folder_list(
- connection, default_product_name, folder_id, gids, offset=offset, limit=limit
- )
- return folders_db
-
-
-async def test_folder_list(
- connection: SAConnection,
- default_product_name: _ProductName,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
- make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]],
-):
- #######
- # SETUP
- #######
- (
- gid_owner,
- gid_editor,
- gid_viewer,
- gid_no_access,
- gid_not_shared,
- ) = get_unique_gids(5)
-
- folder_ids = await make_folders(
- {
- MkFolder(
- name="owner_folder",
- gid=gid_owner,
- shared_with={
- gid_editor: FolderAccessRole.EDITOR,
- gid_viewer: FolderAccessRole.VIEWER,
- gid_no_access: FolderAccessRole.NO_ACCESS,
- },
- children={
- *{MkFolder(name=f"f{i}", gid=gid_owner) for i in range(1, 10)},
- MkFolder(
- name="f10",
- gid=gid_owner,
- children={
- MkFolder(name=f"sub_f{i}", gid=gid_owner)
- for i in range(1, 11)
- },
- ),
- },
- )
- }
- )
-
- folder_id_owner_folder = folder_ids["owner_folder"]
- folder_id_f1 = folder_ids["f1"]
- folder_id_f2 = folder_ids["f2"]
- folder_id_f3 = folder_ids["f3"]
- folder_id_f4 = folder_ids["f4"]
- folder_id_f5 = folder_ids["f5"]
- folder_id_f6 = folder_ids["f6"]
- folder_id_f7 = folder_ids["f7"]
- folder_id_f8 = folder_ids["f8"]
- folder_id_f9 = folder_ids["f9"]
- folder_id_f10 = folder_ids["f10"]
- folder_id_sub_f1 = folder_ids["sub_f1"]
- folder_id_sub_f2 = folder_ids["sub_f2"]
- folder_id_sub_f3 = folder_ids["sub_f3"]
- folder_id_sub_f4 = folder_ids["sub_f4"]
- folder_id_sub_f5 = folder_ids["sub_f5"]
- folder_id_sub_f6 = folder_ids["sub_f6"]
- folder_id_sub_f7 = folder_ids["sub_f7"]
- folder_id_sub_f8 = folder_ids["sub_f8"]
- folder_id_sub_f9 = folder_ids["sub_f9"]
- folder_id_sub_f10 = folder_ids["sub_f10"]
-
- ALL_FOLDERS_FX = (
- folder_id_f1,
- folder_id_f2,
- folder_id_f3,
- folder_id_f4,
- folder_id_f5,
- folder_id_f6,
- folder_id_f7,
- folder_id_f8,
- folder_id_f9,
- folder_id_f10,
- )
-
- ALL_FOLDERS_SUB_FX = (
- folder_id_sub_f1,
- folder_id_sub_f2,
- folder_id_sub_f3,
- folder_id_sub_f4,
- folder_id_sub_f5,
- folder_id_sub_f6,
- folder_id_sub_f7,
- folder_id_sub_f8,
- folder_id_sub_f9,
- folder_id_sub_f10,
- )
-
- ALL_FOLDERS_AND_SUBFOLDERS = (
- folder_id_owner_folder,
- *ALL_FOLDERS_FX,
- *ALL_FOLDERS_SUB_FX,
- )
-
- ACCESS_RIGHTS_BY_GID: dict[_GroupID, _FolderPermissions] = {
- gid_owner: OWNER_PERMISSIONS,
- gid_editor: EDITOR_PERMISSIONS,
- gid_viewer: VIEWER_PERMISSIONS,
- gid_no_access: NO_ACCESS_PERMISSIONS,
- }
-
- #######
- # TESTS
- #######
-
- # 1. list all levels per gid with access
- for listing_gid in (gid_owner, gid_editor, gid_viewer):
- # list `root` for gid
- _assert_expected_entries(
- await _list_folder_as(
- connection, default_product_name, None, {listing_gid}
- ),
- expected={
- ExpectedValues(
- folder_id_owner_folder,
- ACCESS_RIGHTS_BY_GID[listing_gid],
- {
- gid_owner: OWNER_PERMISSIONS,
- gid_editor: EDITOR_PERMISSIONS,
- gid_viewer: VIEWER_PERMISSIONS,
- gid_no_access: NO_ACCESS_PERMISSIONS,
- },
- ),
- },
- )
- # list `owner_folder` for gid
- _assert_expected_entries(
- await _list_folder_as(
- connection, default_product_name, folder_id_owner_folder, {listing_gid}
- ),
- expected={
- ExpectedValues(
- fx,
- ACCESS_RIGHTS_BY_GID[listing_gid],
- {gid_owner: OWNER_PERMISSIONS},
- )
- for fx in ALL_FOLDERS_FX
- },
- )
- # list `f10` for gid
- _assert_expected_entries(
- await _list_folder_as(
- connection, default_product_name, folder_id_f10, {listing_gid}
- ),
- expected={
- ExpectedValues(
- sub_fx,
- ACCESS_RIGHTS_BY_GID[listing_gid],
- {gid_owner: OWNER_PERMISSIONS},
- )
- for sub_fx in ALL_FOLDERS_SUB_FX
- },
- )
-
- # 2. lisit all levels for `gid_no_access`
- # can always be ran but should not list any entry
- _assert_expected_entries(
- await _list_folder_as(connection, default_product_name, None, {gid_no_access}),
- expected=set(),
- )
- # there are insusficient permissions
- for folder_id_to_check in ALL_FOLDERS_AND_SUBFOLDERS:
- with pytest.raises(InsufficientPermissionsError):
- await _list_folder_as(
- connection, default_product_name, folder_id_to_check, {gid_no_access}
- )
-
- # 3. lisit all levels for `gid_not_shared``
- # can always list the contets of the "root" folder for a gid
- _assert_expected_entries(
- await _list_folder_as(connection, default_product_name, None, {gid_not_shared}),
- expected=set(),
- )
- for folder_id_to_check in ALL_FOLDERS_AND_SUBFOLDERS:
- with pytest.raises(FolderNotSharedWithGidError):
- await _list_folder_as(
- connection, default_product_name, folder_id_to_check, {gid_not_shared}
- )
-
- # 4. list with pagination
- for initial_limit in (1, 2, 3, 4, 5):
- offset = 0
- limit = initial_limit
- found_folders: list[FolderEntry] = []
- while items := await _list_folder_as(
- connection,
- default_product_name,
- folder_id_owner_folder,
- {gid_owner},
- offset=offset,
- limit=limit,
- ):
- found_folders.extend(items)
- offset += limit
- if len(items) != limit:
- break
-
- one_shot_query = await _list_folder_as(
- connection, default_product_name, folder_id_owner_folder, {gid_owner}
- )
-
- assert len(found_folders) == len(one_shot_query)
- assert found_folders == one_shot_query
-
-
-async def test_folder_list_shared_with_different_permissions(
- connection: SAConnection,
- default_product_name: _ProductName,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
- make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]],
-):
- #######
- # SETUP
- #######
-
- (gid_owner_a, gid_owner_b, gid_owner_c, gid_owner_level_2) = get_unique_gids(4)
-
- folder_ids = await make_folders(
- {
- MkFolder(
- name="f_owner_a",
- gid=gid_owner_a,
- shared_with={
- gid_owner_b: FolderAccessRole.OWNER,
- gid_owner_c: FolderAccessRole.OWNER,
- },
- children={
- MkFolder(
- name="f_owner_b",
- gid=gid_owner_b,
- children={
- MkFolder(
- name="f_owner_c",
- gid=gid_owner_c,
- shared_with={gid_owner_level_2: FolderAccessRole.OWNER},
- children={
- MkFolder(name="f_sub_owner_c", gid=gid_owner_c),
- MkFolder(
- name="f_owner_level_2", gid=gid_owner_level_2
- ),
- },
- )
- },
- )
- },
- )
- }
- )
-
- folder_id_f_owner_a = folder_ids["f_owner_a"]
- folder_id_f_owner_b = folder_ids["f_owner_b"]
- folder_id_f_owner_c = folder_ids["f_owner_c"]
- folder_id_f_sub_owner_c = folder_ids["f_sub_owner_c"]
- folder_id_f_owner_level_2 = folder_ids["f_owner_level_2"]
-
- #######
- # TESTS
- #######
-
- # 1. `gid_owner_a`, `gid_owner_b`, `gid_owner_c` have the exact same veiw
- for listing_gid in (gid_owner_a, gid_owner_b, gid_owner_c):
- # list `root` for gid
- _assert_expected_entries(
- await _list_folder_as(
- connection, default_product_name, None, {listing_gid}
- ),
- expected={
- ExpectedValues(
- folder_id_f_owner_a,
- OWNER_PERMISSIONS,
- {
- gid_owner_a: OWNER_PERMISSIONS,
- gid_owner_b: OWNER_PERMISSIONS,
- gid_owner_c: OWNER_PERMISSIONS,
- },
- ),
- },
- )
- # list `f_owner_a` for gid
- _assert_expected_entries(
- await _list_folder_as(
- connection, default_product_name, folder_id_f_owner_a, {listing_gid}
- ),
- expected={
- ExpectedValues(
- folder_id_f_owner_b,
- OWNER_PERMISSIONS,
- {gid_owner_b: OWNER_PERMISSIONS},
- ),
- },
- )
- # list `f_owner_b` for gid
- _assert_expected_entries(
- await _list_folder_as(
- connection, default_product_name, folder_id_f_owner_b, {listing_gid}
- ),
- expected={
- ExpectedValues(
- folder_id_f_owner_c,
- OWNER_PERMISSIONS,
- {
- gid_owner_c: OWNER_PERMISSIONS,
- gid_owner_level_2: OWNER_PERMISSIONS,
- },
- ),
- },
- )
- # list `f_owner_c` for gid
- _assert_expected_entries(
- await _list_folder_as(
- connection, default_product_name, folder_id_f_owner_c, {listing_gid}
- ),
- expected={
- ExpectedValues(
- folder_id_f_sub_owner_c,
- OWNER_PERMISSIONS,
- {
- gid_owner_c: OWNER_PERMISSIONS,
- },
- ),
- ExpectedValues(
- folder_id_f_owner_level_2,
- OWNER_PERMISSIONS,
- {
- gid_owner_level_2: OWNER_PERMISSIONS,
- },
- ),
- },
- )
-
- # 2. `gid_owner_level_2` can only access from `f_owner_c` downwards
- # list `f_owner_c` for `gid_owner_level_2`
- _assert_expected_entries(
- await _list_folder_as(
- connection, default_product_name, None, {gid_owner_level_2}
- ),
- expected={
- ExpectedValues(
- folder_id_f_owner_c,
- OWNER_PERMISSIONS,
- {
- gid_owner_c: OWNER_PERMISSIONS,
- gid_owner_level_2: OWNER_PERMISSIONS,
- },
- ),
- },
- )
- # list `root` for `gid_owner_level_2`
- _assert_expected_entries(
- await _list_folder_as(
- connection, default_product_name, folder_id_f_owner_c, {gid_owner_level_2}
- ),
- expected={
- ExpectedValues(
- folder_id_f_sub_owner_c,
- OWNER_PERMISSIONS,
- {
- gid_owner_c: OWNER_PERMISSIONS,
- },
- ),
- ExpectedValues(
- folder_id_f_owner_level_2,
- OWNER_PERMISSIONS,
- {
- gid_owner_level_2: OWNER_PERMISSIONS,
- },
- ),
- },
- )
-
-
-async def test_folder_list_in_root_with_different_groups_avoids_duplicate_entries(
- connection: SAConnection,
- default_product_name: _ProductName,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
- make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]],
-):
- #######
- # SETUP
- #######
-
- (gid_z43, gid_osparc, gid_user) = get_unique_gids(3)
-
- await make_folders(
- {
- MkFolder(
- name="f1",
- gid=gid_user,
- shared_with={
- gid_z43: FolderAccessRole.OWNER,
- gid_osparc: FolderAccessRole.OWNER,
- },
- ),
- MkFolder(
- name="f2",
- gid=gid_z43,
- shared_with={
- gid_osparc: FolderAccessRole.OWNER,
- },
- ),
- MkFolder(
- name="f3",
- gid=gid_osparc,
- shared_with={
- gid_z43: FolderAccessRole.OWNER,
- },
- ),
- }
- )
-
- #######
- # TESTS
- #######
-
- # 1. gid_z43 and gid_osparc see all folders
- for gid_all_folders in (gid_z43, gid_osparc):
- entries_z43 = await _list_folder_as(
- connection, default_product_name, None, {gid_all_folders}
- )
- assert len(entries_z43) == 3
-
- # 2. gid_user only sees it's own folder
- entries_user = await _list_folder_as(
- connection, default_product_name, None, {gid_user}
- )
- assert len(entries_user) == 1
-
- # 3. all gids see all fodlers
- entries_all_groups = await _list_folder_as(
- connection, default_product_name, None, {gid_z43, gid_osparc, gid_user}
- )
- assert len(entries_all_groups) == 3
-
-
-async def test_regression_list_folder_parent(
- connection: SAConnection,
- default_product_name: _ProductName,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
- make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]],
-):
- #######
- # SETUP
- #######
-
- (gid_user,) = get_unique_gids(1)
-
- folder_ids = await make_folders(
- {
- MkFolder(
- name="f1",
- gid=gid_user,
- children={
- MkFolder(
- name="f2",
- gid=gid_user,
- children={
- MkFolder(name="f3", gid=gid_user),
- },
- )
- },
- ),
- }
- )
-
- folder_id_f1 = folder_ids["f1"]
- folder_id_f2 = folder_ids["f2"]
- folder_id_f3 = folder_ids["f3"]
-
- #######
- # TESTS
- #######
-
- for folder_id in (None, folder_id_f1, folder_id_f2):
- folder_content = await _list_folder_as(
- connection, default_product_name, folder_id, {gid_user}
- )
- assert len(folder_content) == 1
- assert folder_content[0]
- assert folder_content[0].parent_folder == folder_id
-
- f3_content = await _list_folder_as(
- connection, default_product_name, folder_id_f3, {gid_user}
- )
- assert len(f3_content) == 0
-
-
-async def test_folder_get(
- connection: SAConnection,
- default_product_name: _ProductName,
- get_unique_gids: Callable[[int], tuple[_GroupID, ...]],
- make_folders: Callable[[set[MkFolder]], Awaitable[dict[str, _FolderID]]],
-):
- #######
- # SETUP
- #######
- (
- gid_owner,
- gid_other_owner,
- gid_not_shared,
- ) = get_unique_gids(3)
-
- folder_ids = await make_folders(
- {
- MkFolder(
- name="owner_folder",
- gid=gid_owner,
- shared_with={
- gid_other_owner: FolderAccessRole.OWNER,
- },
- children={
- *{MkFolder(name=f"f{i}", gid=gid_owner) for i in range(1, 3)},
- MkFolder(
- name="f10",
- gid=gid_owner,
- children={
- MkFolder(name=f"sub_f{i}", gid=gid_owner)
- for i in range(1, 3)
- },
- ),
- },
- )
- }
- )
-
- folder_id_owner_folder = folder_ids["owner_folder"]
- folder_id_f1 = folder_ids["f1"]
- folder_id_f2 = folder_ids["f2"]
- folder_id_sub_f1 = folder_ids["sub_f1"]
- folder_id_sub_f2 = folder_ids["sub_f2"]
-
- #######
- # TESTS
- #######
-
- # 1. query exsisting directories
- for folder_id_to_list in (
- None,
- folder_id_owner_folder,
- folder_id_f1,
- folder_id_f2,
- folder_id_sub_f1,
- folder_id_sub_f2,
- ):
- folder_entries = await _list_folder_as(
- connection, default_product_name, folder_id_to_list, {gid_owner}
- )
- for entry in folder_entries:
- queried_folder = await folder_get(
- connection, default_product_name, entry.id, {gid_owner}
- )
- assert entry == queried_folder
-
- # 2. query via gid_not_shared
- with pytest.raises(FolderNotSharedWithGidError):
- await folder_get(
- connection, default_product_name, folder_id_owner_folder, {gid_not_shared}
- )
-
- # 3. query with missing folder_id
- missing_folder_id = 12312313123
- for gid_to_test in (
- gid_owner,
- gid_other_owner,
- gid_not_shared,
- ):
- with pytest.raises(FolderNotFoundError):
- await folder_get(
- connection, default_product_name, missing_folder_id, {gid_to_test}
- )
diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/pydantic_extension.py b/packages/pytest-simcore/src/pytest_simcore/helpers/pydantic_extension.py
new file mode 100644
index 000000000000..c1252ed8bb42
--- /dev/null
+++ b/packages/pytest-simcore/src/pytest_simcore/helpers/pydantic_extension.py
@@ -0,0 +1,34 @@
+from pydantic import SecretStr
+
+
+def _mask(value):
+ """
+ Mask the password, showing only the first and last characters
+ or *** if very short passwords
+ """
+ if len(value) > 2:
+ masked_value = value[0] + "*" * (len(value) - 2) + value[-1]
+ else:
+ # In case of very short passwords
+ masked_value = "*" * len(value)
+ return masked_value
+
+
+def _hash(value):
+ """Uses hash number to mask the password"""
+ return f"hash:{hash(value)}"
+
+
+class Secret4TestsStr(SecretStr):
+ """Prints a hint of the secret
+ TIP: Can be handy for testing
+ """
+
+ def _display(self) -> str | bytes:
+ # SEE overrides _SecretBase._display
+ value = self.get_secret_value()
+ return _mask(value) if value else ""
+
+
+assert str(Secret4TestsStr("123456890")) == "1*******0"
+assert "1*******0" in repr(Secret4TestsStr("123456890"))
diff --git a/packages/service-integration/requirements/_base.in b/packages/service-integration/requirements/_base.in
index fee8aa856e24..6e288d49e0bf 100644
--- a/packages/service-integration/requirements/_base.in
+++ b/packages/service-integration/requirements/_base.in
@@ -13,3 +13,4 @@ jsonschema # pytest-plugin
pytest # pytest-plugin
pyyaml
typer[all]
+yarl
diff --git a/packages/service-integration/requirements/_base.txt b/packages/service-integration/requirements/_base.txt
index 131e231b5370..b91836c51389 100644
--- a/packages/service-integration/requirements/_base.txt
+++ b/packages/service-integration/requirements/_base.txt
@@ -9,7 +9,7 @@ attrs==24.2.0
# referencing
binaryornot==0.4.4
# via cookiecutter
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../requirements/constraints.txt
@@ -31,10 +31,11 @@ docker==7.1.0
# via -r requirements/_base.in
email-validator==2.2.0
# via pydantic
-idna==3.7
+idna==3.10
# via
# email-validator
# requests
+ # yarl
iniconfig==2.0.0
# via pytest
jinja2==3.1.4
@@ -57,6 +58,8 @@ markupsafe==2.1.5
# via jinja2
mdurl==0.1.2
# via markdown-it-py
+multidict==6.1.0
+ # via yarl
orjson==3.10.7
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
@@ -66,14 +69,14 @@ packaging==24.1
# via pytest
pluggy==1.5.0
# via pytest
-pydantic==1.10.17
+pydantic==1.10.18
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../requirements/constraints.txt
# -r requirements/../../../packages/models-library/requirements/_base.in
pygments==2.18.0
# via rich
-pytest==8.3.2
+pytest==8.3.3
# via -r requirements/_base.in
python-dateutil==2.9.0.post0
# via arrow
@@ -93,7 +96,7 @@ requests==2.32.3
# via
# cookiecutter
# docker
-rich==13.7.1
+rich==13.8.1
# via
# cookiecutter
# typer
@@ -107,17 +110,19 @@ six==1.16.0
# via python-dateutil
text-unidecode==1.3
# via python-slugify
-typer==0.12.4
+typer==0.12.5
# via -r requirements/_base.in
-types-python-dateutil==2.9.0.20240821
+types-python-dateutil==2.9.0.20240906
# via arrow
typing-extensions==4.12.2
# via
# pydantic
# typer
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../requirements/constraints.txt
# docker
# requests
+yarl==1.12.1
+ # via -r requirements/_base.in
diff --git a/packages/service-integration/requirements/_test.txt b/packages/service-integration/requirements/_test.txt
index fa7046980915..dad76c6a0cd4 100644
--- a/packages/service-integration/requirements/_test.txt
+++ b/packages/service-integration/requirements/_test.txt
@@ -19,7 +19,7 @@ pluggy==1.5.0
# via
# -c requirements/_base.txt
# pytest
-pytest==8.3.2
+pytest==8.3.3
# via
# -c requirements/_base.txt
# -r requirements/_test.in
@@ -44,15 +44,15 @@ rpds-py==0.20.0
# referencing
termcolor==2.4.0
# via pytest-sugar
-types-docker==7.1.0.20240821
+types-docker==7.1.0.20240827
# via -r requirements/_test.in
types-jsonschema==4.23.0.20240813
# via -r requirements/_test.in
-types-pyyaml==6.0.12.20240808
+types-pyyaml==6.0.12.20240917
# via -r requirements/_test.in
-types-requests==2.32.0.20240712
+types-requests==2.32.0.20240914
# via types-docker
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
diff --git a/packages/service-integration/requirements/_tools.txt b/packages/service-integration/requirements/_tools.txt
index 354746f70e43..6cfab1a3f280 100644
--- a/packages/service-integration/requirements/_tools.txt
+++ b/packages/service-integration/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -17,9 +17,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -27,7 +27,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via -r requirements/../../../requirements/devenv.txt
mypy-extensions==1.0.0
# via
@@ -47,14 +47,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -65,9 +65,9 @@ pyyaml==6.0.2
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# pre-commit
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
-setuptools==73.0.1
+setuptools==75.1.0
# via pip-tools
tomlkit==0.13.2
# via pylint
@@ -75,7 +75,7 @@ typing-extensions==4.12.2
# via
# -c requirements/_base.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
wheel==0.44.0
# via pip-tools
diff --git a/packages/service-integration/src/service_integration/cli/_compose_spec.py b/packages/service-integration/src/service_integration/cli/_compose_spec.py
index 117a4afa5efb..a42936c36959 100644
--- a/packages/service-integration/src/service_integration/cli/_compose_spec.py
+++ b/packages/service-integration/src/service_integration/cli/_compose_spec.py
@@ -8,6 +8,7 @@
import yaml
from models_library.utils.labels_annotations import to_labels
from rich.console import Console
+from yarl import URL
from ..compose_spec_model import ComposeSpecification
from ..errors import UndefinedOciImageSpecError
@@ -34,6 +35,13 @@ def _run_git(*args) -> str:
).stdout.strip()
+def _strip_credentials(url: str) -> str:
+ if (yarl_url := URL(url)) and yarl_url.is_absolute():
+ stripped_url = URL(url).with_user(None).with_password(None)
+ return f"{stripped_url}"
+ return url
+
+
def _run_git_or_empty_string(*args) -> str:
try:
return _run_git(*args)
@@ -118,8 +126,8 @@ def create_docker_compose_image_spec(
extra_labels[f"{LS_LABEL_PREFIX}.vcs-ref"] = _run_git_or_empty_string(
"rev-parse", "HEAD"
)
- extra_labels[f"{LS_LABEL_PREFIX}.vcs-url"] = _run_git_or_empty_string(
- "config", "--get", "remote.origin.url"
+ extra_labels[f"{LS_LABEL_PREFIX}.vcs-url"] = _strip_credentials(
+ _run_git_or_empty_string("config", "--get", "remote.origin.url")
)
return create_image_spec(
diff --git a/packages/service-integration/tests/test_cli__compose_spec.py b/packages/service-integration/tests/test_cli__compose_spec.py
new file mode 100644
index 000000000000..5fe98689a14e
--- /dev/null
+++ b/packages/service-integration/tests/test_cli__compose_spec.py
@@ -0,0 +1,28 @@
+import pytest
+from service_integration.cli._compose_spec import _strip_credentials
+
+
+@pytest.mark.parametrize(
+ "url, expected_url",
+ [
+ (
+ "schema.veshttps://user:password@example.com/some/repo.git",
+ "schema.veshttps://example.com/some/repo.git",
+ ),
+ (
+ "https://user:password@example.com/some/repo.git",
+ "https://example.com/some/repo.git",
+ ),
+ (
+ "ssh://user:password@example.com/some/repo.git",
+ "ssh://example.com/some/repo.git",
+ ),
+ (
+ "git@git.speag.com:some/repo.git",
+ "git@git.speag.com:some/repo.git",
+ ),
+ ("any_str", "any_str"),
+ ],
+)
+def test__strip_credentials(url: str, expected_url: str):
+ assert _strip_credentials(url) == expected_url
diff --git a/packages/service-library/requirements/_aiohttp.txt b/packages/service-library/requirements/_aiohttp.txt
index 990f33561e3a..1536a562252a 100644
--- a/packages/service-library/requirements/_aiohttp.txt
+++ b/packages/service-library/requirements/_aiohttp.txt
@@ -18,7 +18,7 @@ attrs==24.2.0
# aiohttp
# jsonschema
# referencing
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
@@ -34,13 +34,13 @@ frozenlist==1.4.1
# via
# aiohttp
# aiosignal
-greenlet==3.0.3
+greenlet==3.1.1
# via sqlalchemy
-idna==3.7
+idna==3.10
# via
# requests
# yarl
-importlib-metadata==8.0.0
+importlib-metadata==8.4.0
# via opentelemetry-api
isodate==0.6.1
# via openapi-core
@@ -62,13 +62,13 @@ lazy-object-proxy==1.10.0
# via openapi-spec-validator
markupsafe==2.1.5
# via werkzeug
-more-itertools==10.4.0
+more-itertools==10.5.0
# via openapi-core
-multidict==6.0.5
+multidict==6.1.0
# via
# aiohttp
# yarl
-openapi-core==0.19.3
+openapi-core==0.19.4
# via -r requirements/_aiohttp.in
openapi-schema-validator==0.6.2
# via
@@ -76,7 +76,7 @@ openapi-schema-validator==0.6.2
# openapi-spec-validator
openapi-spec-validator==0.7.1
# via openapi-core
-opentelemetry-api==1.26.0
+opentelemetry-api==1.27.0
# via
# opentelemetry-instrumentation
# opentelemetry-instrumentation-aiohttp-client
@@ -84,26 +84,26 @@ opentelemetry-api==1.26.0
# opentelemetry-instrumentation-aiopg
# opentelemetry-instrumentation-dbapi
# opentelemetry-semantic-conventions
-opentelemetry-instrumentation==0.47b0
+opentelemetry-instrumentation==0.48b0
# via
# opentelemetry-instrumentation-aiohttp-client
# opentelemetry-instrumentation-aiohttp-server
# opentelemetry-instrumentation-aiopg
# opentelemetry-instrumentation-dbapi
-opentelemetry-instrumentation-aiohttp-client==0.47b0
+opentelemetry-instrumentation-aiohttp-client==0.48b0
# via -r requirements/_aiohttp.in
-opentelemetry-instrumentation-aiohttp-server==0.47b0
+opentelemetry-instrumentation-aiohttp-server==0.48b0
# via -r requirements/_aiohttp.in
-opentelemetry-instrumentation-aiopg==0.47b0
+opentelemetry-instrumentation-aiopg==0.48b0
# via -r requirements/_aiohttp.in
-opentelemetry-instrumentation-dbapi==0.47b0
+opentelemetry-instrumentation-dbapi==0.48b0
# via opentelemetry-instrumentation-aiopg
-opentelemetry-semantic-conventions==0.47b0
+opentelemetry-semantic-conventions==0.48b0
# via
# opentelemetry-instrumentation-aiohttp-client
# opentelemetry-instrumentation-aiohttp-server
# opentelemetry-instrumentation-dbapi
-opentelemetry-util-http==0.47b0
+opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-aiohttp-client
# opentelemetry-instrumentation-aiohttp-server
@@ -111,7 +111,7 @@ parse==1.20.2
# via openapi-core
pathable==0.4.3
# via jsonschema-path
-prometheus-client==0.20.0
+prometheus-client==0.21.0
# via -r requirements/_aiohttp.in
psycopg2-binary==2.9.9
# via
@@ -137,25 +137,25 @@ rpds-py==0.20.0
# via
# jsonschema
# referencing
-setuptools==74.0.0
+setuptools==75.1.0
# via opentelemetry-instrumentation
six==1.16.0
# via
# isodate
# rfc3339-validator
-sqlalchemy==1.4.53
+sqlalchemy==1.4.54
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../requirements/constraints.txt
# aiopg
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../requirements/constraints.txt
# requests
-werkzeug==3.0.3
+werkzeug==3.0.4
# via
# -r requirements/_aiohttp.in
# openapi-core
@@ -167,7 +167,7 @@ wrapt==1.16.0
# opentelemetry-instrumentation-aiohttp-server
# opentelemetry-instrumentation-aiopg
# opentelemetry-instrumentation-dbapi
-yarl==1.9.4
+yarl==1.12.1
# via aiohttp
-zipp==3.20.1
+zipp==3.20.2
# via importlib-metadata
diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt
index 4216f1f186dc..64b334477253 100644
--- a/packages/service-library/requirements/_base.txt
+++ b/packages/service-library/requirements/_base.txt
@@ -4,7 +4,7 @@ aiocache==0.12.2
# via -r requirements/_base.in
aiodebug==2.3.0
# via -r requirements/_base.in
-aiodocker==0.22.2
+aiodocker==0.23.0
# via -r requirements/_base.in
aiofiles==24.1.0
# via -r requirements/_base.in
@@ -16,11 +16,11 @@ aiohttp==3.10.5
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../requirements/constraints.txt
# aiodocker
-aiormq==6.8.0
+aiormq==6.8.1
# via aio-pika
aiosignal==1.3.1
# via aiohttp
-anyio==4.4.0
+anyio==4.6.0
# via
# fast-depends
# faststream
@@ -33,7 +33,7 @@ attrs==24.2.0
# aiohttp
# jsonschema
# referencing
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
@@ -53,9 +53,9 @@ dnspython==2.6.1
# via email-validator
email-validator==2.2.0
# via pydantic
-fast-depends==2.4.8
+fast-depends==2.4.11
# via faststream
-faststream==0.5.18
+faststream==0.5.23
# via -r requirements/_base.in
frozenlist==1.4.1
# via
@@ -65,15 +65,15 @@ googleapis-common-protos==1.65.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
-grpcio==1.66.0
+grpcio==1.66.1
# via opentelemetry-exporter-otlp-proto-grpc
-idna==3.7
+idna==3.10
# via
# anyio
# email-validator
# requests
# yarl
-importlib-metadata==8.0.0
+importlib-metadata==8.4.0
# via opentelemetry-api
jsonschema==4.23.0
# via -r requirements/../../../packages/models-library/requirements/_base.in
@@ -83,11 +83,11 @@ markdown-it-py==3.0.0
# via rich
mdurl==0.1.2
# via markdown-it-py
-multidict==6.0.5
+multidict==6.1.0
# via
# aiohttp
# yarl
-opentelemetry-api==1.26.0
+opentelemetry-api==1.27.0
# via
# -r requirements/_base.in
# opentelemetry-exporter-otlp-proto-grpc
@@ -96,35 +96,35 @@ opentelemetry-api==1.26.0
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
# opentelemetry-semantic-conventions
-opentelemetry-exporter-otlp==1.26.0
+opentelemetry-exporter-otlp==1.27.0
# via -r requirements/_base.in
-opentelemetry-exporter-otlp-proto-common==1.26.0
+opentelemetry-exporter-otlp-proto-common==1.27.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
-opentelemetry-exporter-otlp-proto-grpc==1.26.0
+opentelemetry-exporter-otlp-proto-grpc==1.27.0
# via opentelemetry-exporter-otlp
-opentelemetry-exporter-otlp-proto-http==1.26.0
+opentelemetry-exporter-otlp-proto-http==1.27.0
# via opentelemetry-exporter-otlp
-opentelemetry-instrumentation==0.47b0
+opentelemetry-instrumentation==0.48b0
# via opentelemetry-instrumentation-requests
-opentelemetry-instrumentation-requests==0.47b0
+opentelemetry-instrumentation-requests==0.48b0
# via -r requirements/_base.in
-opentelemetry-proto==1.26.0
+opentelemetry-proto==1.27.0
# via
# opentelemetry-exporter-otlp-proto-common
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
-opentelemetry-sdk==1.26.0
+opentelemetry-sdk==1.27.0
# via
# -r requirements/_base.in
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
-opentelemetry-semantic-conventions==0.47b0
+opentelemetry-semantic-conventions==0.48b0
# via
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
-opentelemetry-util-http==0.47b0
+opentelemetry-util-http==0.48b0
# via opentelemetry-instrumentation-requests
orjson==3.10.7
# via
@@ -134,13 +134,13 @@ orjson==3.10.7
# -r requirements/../../../packages/models-library/requirements/_base.in
pamqp==3.3.0
# via aiormq
-protobuf==4.25.4
+protobuf==4.25.5
# via
# googleapis-common-protos
# opentelemetry-proto
psutil==6.0.0
# via -r requirements/_base.in
-pydantic==1.10.17
+pydantic==1.10.18
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
@@ -151,7 +151,7 @@ pydantic==1.10.17
# fast-depends
pygments==2.18.0
# via rich
-pyinstrument==4.7.2
+pyinstrument==4.7.3
# via -r requirements/_base.in
python-dateutil==2.9.0.post0
# via arrow
@@ -174,7 +174,7 @@ referencing==0.29.3
# jsonschema-specifications
requests==2.32.3
# via opentelemetry-exporter-otlp-proto-http
-rich==13.7.1
+rich==13.8.1
# via
# -r requirements/../../../packages/settings-library/requirements/_base.in
# typer
@@ -182,7 +182,7 @@ rpds-py==0.20.0
# via
# jsonschema
# referencing
-setuptools==74.0.0
+setuptools==75.1.0
# via opentelemetry-instrumentation
shellingham==1.5.4
# via typer
@@ -196,11 +196,9 @@ toolz==0.12.1
# via -r requirements/_base.in
tqdm==4.66.5
# via -r requirements/_base.in
-typer==0.12.4
- # via
- # -r requirements/../../../packages/settings-library/requirements/_base.in
- # faststream
-types-python-dateutil==2.9.0.20240821
+typer==0.12.5
+ # via -r requirements/../../../packages/settings-library/requirements/_base.in
+types-python-dateutil==2.9.0.20240906
# via arrow
typing-extensions==4.12.2
# via
@@ -209,7 +207,7 @@ typing-extensions==4.12.2
# opentelemetry-sdk
# pydantic
# typer
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
@@ -219,10 +217,10 @@ wrapt==1.16.0
# via
# deprecated
# opentelemetry-instrumentation
-yarl==1.9.4
+yarl==1.12.1
# via
# aio-pika
# aiohttp
# aiormq
-zipp==3.20.1
+zipp==3.20.2
# via importlib-metadata
diff --git a/packages/service-library/requirements/_fastapi.txt b/packages/service-library/requirements/_fastapi.txt
index 3fceb62b1f63..8a3aed376009 100644
--- a/packages/service-library/requirements/_fastapi.txt
+++ b/packages/service-library/requirements/_fastapi.txt
@@ -1,10 +1,10 @@
-anyio==4.4.0
+anyio==4.6.0
# via
# httpx
# starlette
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
@@ -30,54 +30,54 @@ h11==0.14.0
# uvicorn
httpcore==1.0.5
# via httpx
-httpx==0.27.0
+httpx==0.27.2
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../requirements/constraints.txt
# -r requirements/_fastapi.in
-idna==3.7
+idna==3.10
# via
# anyio
# httpx
-importlib-metadata==8.0.0
+importlib-metadata==8.4.0
# via opentelemetry-api
-opentelemetry-api==1.26.0
+opentelemetry-api==1.27.0
# via
# opentelemetry-instrumentation
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
# opentelemetry-semantic-conventions
-opentelemetry-instrumentation==0.47b0
+opentelemetry-instrumentation==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
-opentelemetry-instrumentation-asgi==0.47b0
+opentelemetry-instrumentation-asgi==0.48b0
# via opentelemetry-instrumentation-fastapi
-opentelemetry-instrumentation-fastapi==0.47b0
+opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/_fastapi.in
-opentelemetry-semantic-conventions==0.47b0
+opentelemetry-semantic-conventions==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
-opentelemetry-util-http==0.47b0
+opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
-prometheus-client==0.20.0
+prometheus-client==0.21.0
# via
# -r requirements/_fastapi.in
# prometheus-fastapi-instrumentator
prometheus-fastapi-instrumentator==6.1.0
# via -r requirements/_fastapi.in
-pydantic==1.10.17
+pydantic==1.10.18
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/settings-library/requirements/_base.in
# -c requirements/../../../requirements/constraints.txt
# fastapi
-setuptools==74.0.0
+setuptools==75.1.0
# via opentelemetry-instrumentation
sniffio==1.3.1
# via
@@ -99,5 +99,5 @@ wrapt==1.16.0
# via
# deprecated
# opentelemetry-instrumentation
-zipp==3.20.1
+zipp==3.20.2
# via importlib-metadata
diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt
index 42d1479b5d2c..a6a72dcd5002 100644
--- a/packages/service-library/requirements/_test.txt
+++ b/packages/service-library/requirements/_test.txt
@@ -14,7 +14,7 @@ aiosignal==1.3.1
# -c requirements/_aiohttp.txt
# -c requirements/_base.txt
# aiohttp
-anyio==4.4.0
+anyio==4.6.0
# via
# -c requirements/_base.txt
# -c requirements/_fastapi.txt
@@ -29,7 +29,7 @@ attrs==24.2.0
# jsonschema
# pytest-docker
# referencing
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_aiohttp.txt
@@ -51,7 +51,7 @@ docker==7.1.0
# via -r requirements/_test.in
execnet==2.1.1
# via pytest-xdist
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
flaky==3.8.1
# via -r requirements/_test.in
@@ -61,7 +61,7 @@ frozenlist==1.4.1
# -c requirements/_base.txt
# aiohttp
# aiosignal
-greenlet==3.0.3
+greenlet==3.1.1
# via
# -c requirements/_aiohttp.txt
# sqlalchemy
@@ -73,14 +73,14 @@ httpcore==1.0.5
# via
# -c requirements/_fastapi.txt
# httpx
-httpx==0.27.0
+httpx==0.27.2
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_fastapi.txt
# respx
icdiff==2.0.7
# via pytest-icdiff
-idna==3.7
+idna==3.10
# via
# -c requirements/_aiohttp.txt
# -c requirements/_base.txt
@@ -111,13 +111,13 @@ lazy-object-proxy==1.10.0
# via
# -c requirements/_aiohttp.txt
# openapi-spec-validator
-multidict==6.0.5
+multidict==6.1.0
# via
# -c requirements/_aiohttp.txt
# -c requirements/_base.txt
# aiohttp
# yarl
-mypy==1.11.1
+mypy==1.11.2
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
@@ -143,7 +143,7 @@ pprintpp==0.4.0
# via pytest-icdiff
py-cpuinfo==9.0.0
# via pytest-benchmark
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-aiohttp
@@ -231,7 +231,7 @@ sniffio==1.3.1
# anyio
# asgi-lifespan
# httpx
-sqlalchemy==1.4.53
+sqlalchemy==1.4.54
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_aiohttp.txt
@@ -254,14 +254,14 @@ typing-extensions==4.12.2
# -c requirements/_fastapi.txt
# mypy
# sqlalchemy2-stubs
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_aiohttp.txt
# -c requirements/_base.txt
# docker
# requests
-yarl==1.9.4
+yarl==1.12.1
# via
# -c requirements/_aiohttp.txt
# -c requirements/_base.txt
diff --git a/packages/service-library/requirements/_tools.txt b/packages/service-library/requirements/_tools.txt
index 31b97e4b2d7b..dff0e09f6bbc 100644
--- a/packages/service-library/requirements/_tools.txt
+++ b/packages/service-library/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -17,9 +17,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -27,7 +27,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via
# -c requirements/_test.txt
# -r requirements/../../../requirements/devenv.txt
@@ -49,14 +49,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -68,9 +68,9 @@ pyyaml==6.0.2
# -c requirements/_base.txt
# -c requirements/_test.txt
# pre-commit
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
-setuptools==74.0.0
+setuptools==75.1.0
# via
# -c requirements/_base.txt
# pip-tools
@@ -81,7 +81,7 @@ typing-extensions==4.12.2
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
wheel==0.44.0
# via pip-tools
diff --git a/packages/service-library/src/servicelib/aiohttp/tracing.py b/packages/service-library/src/servicelib/aiohttp/tracing.py
index 686112eaedd1..c33e92cc96f4 100644
--- a/packages/service-library/src/servicelib/aiohttp/tracing.py
+++ b/packages/service-library/src/servicelib/aiohttp/tracing.py
@@ -1,6 +1,7 @@
""" Adds aiohttp middleware for tracing using opentelemetry instrumentation.
"""
+
import logging
from aiohttp import web
@@ -8,20 +9,26 @@
from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
OTLPSpanExporter as OTLPSpanExporterHTTP,
)
-from opentelemetry.instrumentation.aiohttp_client import AioHttpClientInstrumentor
-from opentelemetry.instrumentation.aiohttp_server import AioHttpServerInstrumentor
-from opentelemetry.instrumentation.aiopg import AiopgInstrumentor
+from opentelemetry.instrumentation.aiohttp_client import ( # pylint:disable=no-name-in-module
+ AioHttpClientInstrumentor,
+)
+from opentelemetry.instrumentation.aiohttp_server import ( # pylint:disable=no-name-in-module
+ AioHttpServerInstrumentor,
+)
+from opentelemetry.instrumentation.aiopg import ( # pylint:disable=no-name-in-module
+ AiopgInstrumentor,
+)
from opentelemetry.instrumentation.requests import RequestsInstrumentor
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from settings_library.tracing import TracingSettings
-log = logging.getLogger(__name__)
+_logger = logging.getLogger(__name__)
def setup_tracing(
- app: web.Application, # pylint: disable=unused-argument
+ app: web.Application,
tracing_settings: TracingSettings,
service_name: str,
instrument_aiopg: bool = False, # noqa: FBT001, FBT002
@@ -29,17 +36,23 @@ def setup_tracing(
"""
Sets up this service for a distributed tracing system (opentelemetry)
"""
+ _ = app
opentelemetry_collector_endpoint = (
tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT
)
opentelemetry_collector_port = tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_PORT
if not opentelemetry_collector_endpoint and not opentelemetry_collector_port:
- log.warning("Skipping opentelemetry tracing setup")
+ _logger.warning("Skipping opentelemetry tracing setup")
return
if not opentelemetry_collector_endpoint or not opentelemetry_collector_port:
- raise RuntimeError(
- f"Variable opentelemetry_collector_endpoint [{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}] or opentelemetry_collector_port [{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_PORT}] unset. Tracing options incomplete."
+ msg = (
+ "Variable opentelemetry_collector_endpoint "
+ f"[{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT}] "
+ "or opentelemetry_collector_port "
+ f"[{tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_PORT}] "
+ "unset. Provide both or remove both."
)
+ raise RuntimeError(msg)
resource = Resource(attributes={"service.name": service_name})
trace.set_tracer_provider(TracerProvider(resource=resource))
tracer_provider: trace.TracerProvider = trace.get_tracer_provider()
@@ -47,7 +60,7 @@ def setup_tracing(
f"{opentelemetry_collector_endpoint}:{opentelemetry_collector_port}/v1/traces"
)
- log.info(
+ _logger.info(
"Trying to connect service %s to tracing collector at %s.",
service_name,
tracing_destination,
diff --git a/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py b/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py
index 69762108e716..bbe45ccc39ac 100644
--- a/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py
+++ b/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py
@@ -1,3 +1,4 @@
+import pickle
from typing import Final
from uuid import uuid4
@@ -33,13 +34,15 @@ async def get_new_unique_identifier(self) -> TaskUID:
async def _get_raw(self, redis_key: str) -> TaskScheduleModel | None:
found_data = await self.redis_client_sdk.redis.get(redis_key)
- return None if found_data is None else TaskScheduleModel.parse_raw(found_data)
+ return None if found_data is None else pickle.loads(found_data) # noqa: S301
async def get(self, task_uid: TaskUID) -> TaskScheduleModel | None:
return await self._get_raw(_get_key(task_uid))
async def save(self, task_uid: TaskUID, task_schedule: TaskScheduleModel) -> None:
- await self.redis_client_sdk.redis.set(_get_key(task_uid), task_schedule.json())
+ await self.redis_client_sdk.redis.set(
+ _get_key(task_uid), pickle.dumps(task_schedule)
+ )
async def remove(self, task_uid: TaskUID) -> None:
await self.redis_client_sdk.redis.delete(_get_key(task_uid))
diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py
index bc8ba72b4c01..2e6b9960eff5 100644
--- a/packages/service-library/src/servicelib/logging_utils.py
+++ b/packages/service-library/src/servicelib/logging_utils.py
@@ -392,3 +392,8 @@ def guess_message_log_level(message: str) -> LogLevelInt:
):
return logging.WARNING
return logging.INFO
+
+
+def set_parent_module_log_level(current_module: str, desired_log_level: int) -> None:
+ parent_module = ".".join(current_module.split(".")[:-1])
+ logging.getLogger(parent_module).setLevel(desired_log_level)
diff --git a/packages/service-library/src/servicelib/services_utils.py b/packages/service-library/src/servicelib/services_utils.py
index 60a9caf92a53..98aace49c6c6 100644
--- a/packages/service-library/src/servicelib/services_utils.py
+++ b/packages/service-library/src/servicelib/services_utils.py
@@ -1,5 +1,11 @@
import urllib.parse
+from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet
+from models_library.api_schemas_webserver.projects_nodes import (
+ NodeGet,
+ NodeGetIdle,
+ NodeGetUnknown,
+)
from models_library.services import ServiceType
@@ -9,3 +15,14 @@ def get_service_from_key(service_key: str) -> ServiceType:
if encoded_service_type == "comp":
encoded_service_type = "computational"
return ServiceType(encoded_service_type)
+
+
+def get_status_as_dict(
+ status: NodeGetIdle | NodeGetUnknown | DynamicServiceGet | NodeGet,
+) -> dict:
+ """shared between different backend services to guarantee same result to frontend"""
+ return (
+ status.dict(by_alias=True)
+ if isinstance(status, DynamicServiceGet)
+ else status.dict()
+ )
diff --git a/packages/service-library/tests/conftest.py b/packages/service-library/tests/conftest.py
index f069aeedd768..712746ccce97 100644
--- a/packages/service-library/tests/conftest.py
+++ b/packages/service-library/tests/conftest.py
@@ -76,9 +76,11 @@ async def get_redis_client_sdk(
Callable[[RedisDatabase], AbstractAsyncContextManager[RedisClientSDK]]
]:
@asynccontextmanager
- async def _(database: RedisDatabase) -> AsyncIterator[RedisClientSDK]:
+ async def _(
+ database: RedisDatabase, decode_response: bool = True # noqa: FBT002
+ ) -> AsyncIterator[RedisClientSDK]:
redis_resources_dns = redis_service.build_redis_dsn(database)
- client = RedisClientSDK(redis_resources_dns)
+ client = RedisClientSDK(redis_resources_dns, decode_responses=decode_response)
assert client
assert client.redis_dsn == redis_resources_dns
await client.setup()
diff --git a/packages/service-library/tests/deferred_tasks/conftest.py b/packages/service-library/tests/deferred_tasks/conftest.py
index 642a67336b6b..00881e614715 100644
--- a/packages/service-library/tests/deferred_tasks/conftest.py
+++ b/packages/service-library/tests/deferred_tasks/conftest.py
@@ -9,8 +9,10 @@
@pytest.fixture
async def redis_client_sdk_deferred_tasks(
get_redis_client_sdk: Callable[
- [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK]
+ [RedisDatabase, bool], AbstractAsyncContextManager[RedisClientSDK]
]
) -> AsyncIterator[RedisClientSDK]:
- async with get_redis_client_sdk(RedisDatabase.DEFERRED_TASKS) as client:
+ async with get_redis_client_sdk(
+ RedisDatabase.DEFERRED_TASKS, decode_response=False
+ ) as client:
yield client
diff --git a/packages/service-library/tests/deferred_tasks/example_app.py b/packages/service-library/tests/deferred_tasks/example_app.py
index 75850fddc2e3..0ba848178d8e 100644
--- a/packages/service-library/tests/deferred_tasks/example_app.py
+++ b/packages/service-library/tests/deferred_tasks/example_app.py
@@ -8,6 +8,7 @@
from uuid import uuid4
from pydantic import NonNegativeInt
+from redis.asyncio import Redis
from servicelib.deferred_tasks import (
BaseDeferredHandler,
DeferredContext,
@@ -54,21 +55,22 @@ async def on_result(cls, result: str, context: DeferredContext) -> None:
class InMemoryLists:
def __init__(self, redis_settings: RedisSettings, port: int) -> None:
- self.redis_client_sdk = RedisClientSDK(
- redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS)
- )
+ # NOTE: RedisClientSDK is not required here but it's used to easily construct
+ # a redis connection
+ self.redis: Redis = RedisClientSDK(
+ redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS),
+ decode_responses=True,
+ ).redis
self.port = port
def _get_queue_name(self, queue_name: str) -> str:
return f"in_memory_lists::{queue_name}.{self.port}"
async def append_to(self, queue_name: str, value: Any) -> None:
- await self.redis_client_sdk.redis.rpush(self._get_queue_name(queue_name), value) # type: ignore
+ await self.redis.rpush(self._get_queue_name(queue_name), value) # type: ignore
async def get_all_from(self, queue_name: str) -> list:
- return await self.redis_client_sdk.redis.lrange(
- self._get_queue_name(queue_name), 0, -1
- ) # type: ignore
+ return await self.redis.lrange(self._get_queue_name(queue_name), 0, -1) # type: ignore
class ExampleApp:
@@ -79,18 +81,19 @@ def __init__(
in_memory_lists: InMemoryLists,
max_workers: NonNegativeInt,
) -> None:
- self._redis_client_sdk = RedisClientSDK(
- redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS)
+ self._redis_client = RedisClientSDK(
+ redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS),
+ decode_responses=False,
)
self._manager = DeferredManager(
rabbit_settings,
- self._redis_client_sdk,
+ self._redis_client,
globals_context={"in_memory_lists": in_memory_lists},
max_workers=max_workers,
)
async def setup(self) -> None:
- await self._redis_client_sdk.setup()
+ await self._redis_client.setup()
await self._manager.setup()
diff --git a/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py b/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py
index 9f3451058bfb..a5b45ed80d95 100644
--- a/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py
+++ b/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py
@@ -52,7 +52,10 @@ class MockKeys(StrAutoEnum):
async def redis_client_sdk(
redis_service: RedisSettings,
) -> AsyncIterable[RedisClientSDK]:
- sdk = RedisClientSDK(redis_service.build_redis_dsn(RedisDatabase.DEFERRED_TASKS))
+ sdk = RedisClientSDK(
+ redis_service.build_redis_dsn(RedisDatabase.DEFERRED_TASKS),
+ decode_responses=False,
+ )
await sdk.setup()
yield sdk
await sdk.shutdown()
diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py
index 024ce9966aac..abdfcd5411e4 100644
--- a/packages/service-library/tests/test_logging_utils.py
+++ b/packages/service-library/tests/test_logging_utils.py
@@ -14,6 +14,7 @@
log_context,
log_decorator,
log_exceptions,
+ set_parent_module_log_level,
)
_logger = logging.getLogger(__name__)
@@ -322,3 +323,57 @@ def test_log_exceptions_and_reraise(caplog: pytest.LogCaptureFixture, level: int
assert len(caplog.records) == (1 if level != logging.NOTSET else 0)
assert all(r.levelno == level for r in caplog.records)
+
+
+def test_set_parent_module_log_level_(caplog: pytest.LogCaptureFixture):
+ caplog.clear()
+ # emulates service logger
+ logging.root.setLevel(logging.WARNING)
+
+ parent = logging.getLogger("parent")
+ child = logging.getLogger("parent.child")
+
+ assert parent.level == logging.NOTSET
+ assert child.level == logging.NOTSET
+
+ parent.debug("parent debug")
+ child.debug("child debug")
+
+ parent.info("parent info")
+ child.info("child info")
+
+ parent.warning("parent warning")
+ child.warning("child warning")
+
+ assert "parent debug" not in caplog.text
+ assert "child debug" not in caplog.text
+
+ assert "parent info" not in caplog.text
+ assert "child info" not in caplog.text
+
+ assert "parent warning" in caplog.text
+ assert "child warning" in caplog.text
+
+ caplog.clear()
+ set_parent_module_log_level("parent.child", logging.INFO)
+
+ assert parent.level == logging.INFO
+ assert child.level == logging.NOTSET
+
+ parent.debug("parent debug")
+ child.debug("child debug")
+
+ parent.info("parent info")
+ child.info("child info")
+
+ parent.warning("parent warning")
+ child.warning("child warning")
+
+ assert "parent debug" not in caplog.text
+ assert "child debug" not in caplog.text
+
+ assert "parent info" in caplog.text
+ assert "child info" in caplog.text
+
+ assert "parent warning" in caplog.text
+ assert "child warning" in caplog.text
diff --git a/packages/settings-library/requirements/_base.txt b/packages/settings-library/requirements/_base.txt
index 900c4fea2aae..a97700778c33 100644
--- a/packages/settings-library/requirements/_base.txt
+++ b/packages/settings-library/requirements/_base.txt
@@ -4,19 +4,19 @@ markdown-it-py==3.0.0
# via rich
mdurl==0.1.2
# via markdown-it-py
-pydantic==1.10.17
+pydantic==1.10.18
# via
# -c requirements/../../../requirements/constraints.txt
# -r requirements/_base.in
pygments==2.18.0
# via rich
-rich==13.7.1
+rich==13.8.1
# via
# -r requirements/_base.in
# typer
shellingham==1.5.4
# via typer
-typer==0.12.4
+typer==0.12.5
# via -r requirements/_base.in
typing-extensions==4.12.2
# via
diff --git a/packages/settings-library/requirements/_test.txt b/packages/settings-library/requirements/_test.txt
index 9a1422f4bc48..d4aa9b9224c6 100644
--- a/packages/settings-library/requirements/_test.txt
+++ b/packages/settings-library/requirements/_test.txt
@@ -2,7 +2,7 @@ coverage==7.6.1
# via
# -r requirements/_test.in
# pytest-cov
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
iniconfig==2.0.0
# via pytest
@@ -12,7 +12,7 @@ packaging==24.1
# pytest-sugar
pluggy==1.5.0
# via pytest
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-cov
diff --git a/packages/settings-library/requirements/_tools.txt b/packages/settings-library/requirements/_tools.txt
index d14257822b06..643018f44287 100644
--- a/packages/settings-library/requirements/_tools.txt
+++ b/packages/settings-library/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -17,9 +17,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -27,7 +27,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via -r requirements/../../../requirements/devenv.txt
mypy-extensions==1.0.0
# via
@@ -46,14 +46,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -63,9 +63,9 @@ pyyaml==6.0.2
# via
# -c requirements/../../../requirements/constraints.txt
# pre-commit
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
-setuptools==73.0.1
+setuptools==75.1.0
# via pip-tools
tomlkit==0.13.2
# via pylint
@@ -73,7 +73,7 @@ typing-extensions==4.12.2
# via
# -c requirements/_base.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
wheel==0.44.0
# via pip-tools
diff --git a/packages/settings-library/src/settings_library/redis.py b/packages/settings-library/src/settings_library/redis.py
index 656ffdd2e716..b4873665dd1e 100644
--- a/packages/settings-library/src/settings_library/redis.py
+++ b/packages/settings-library/src/settings_library/redis.py
@@ -17,6 +17,7 @@ class RedisDatabase(IntEnum):
ANNOUNCEMENTS = 5
DISTRIBUTED_IDENTIFIERS = 6
DEFERRED_TASKS = 7
+ DYNAMIC_SERVICES = 8
class RedisSettings(BaseCustomSettings):
diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt
index 65fcbaa084cf..a2d040670d0d 100644
--- a/packages/simcore-sdk/requirements/_base.txt
+++ b/packages/simcore-sdk/requirements/_base.txt
@@ -6,7 +6,7 @@ aiocache==0.12.2
# -r requirements/_base.in
aiodebug==2.3.0
# via -r requirements/../../../packages/service-library/requirements/_base.in
-aiodocker==0.22.2
+aiodocker==0.23.0
# via -r requirements/../../../packages/service-library/requirements/_base.in
aiofiles==24.1.0
# via
@@ -27,13 +27,13 @@ aiohttp==3.10.5
# aiodocker
aiopg==1.4.0
# via -r requirements/_base.in
-aiormq==6.8.0
+aiormq==6.8.1
# via aio-pika
aiosignal==1.3.1
# via aiohttp
-alembic==1.13.2
+alembic==1.13.3
# via -r requirements/../../../packages/postgres-database/requirements/_base.in
-anyio==4.4.0
+anyio==4.6.0
# via
# fast-depends
# faststream
@@ -55,7 +55,7 @@ attrs==24.2.0
# aiohttp
# jsonschema
# referencing
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt
@@ -79,9 +79,9 @@ dnspython==2.6.1
# via email-validator
email-validator==2.2.0
# via pydantic
-fast-depends==2.4.8
+fast-depends==2.4.11
# via faststream
-faststream==0.5.18
+faststream==0.5.23
# via -r requirements/../../../packages/service-library/requirements/_base.in
flexcache==0.3
# via pint
@@ -95,17 +95,17 @@ googleapis-common-protos==1.65.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
-greenlet==3.0.3
+greenlet==3.1.1
# via sqlalchemy
-grpcio==1.66.0
+grpcio==1.66.1
# via opentelemetry-exporter-otlp-proto-grpc
-idna==3.7
+idna==3.10
# via
# anyio
# email-validator
# requests
# yarl
-importlib-metadata==8.0.0
+importlib-metadata==8.4.0
# via opentelemetry-api
jsonschema==4.23.0
# via
@@ -129,11 +129,11 @@ markupsafe==2.1.5
# via mako
mdurl==0.1.2
# via markdown-it-py
-multidict==6.0.5
+multidict==6.1.0
# via
# aiohttp
# yarl
-opentelemetry-api==1.26.0
+opentelemetry-api==1.27.0
# via
# -r requirements/../../../packages/service-library/requirements/_base.in
# opentelemetry-exporter-otlp-proto-grpc
@@ -142,35 +142,35 @@ opentelemetry-api==1.26.0
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
# opentelemetry-semantic-conventions
-opentelemetry-exporter-otlp==1.26.0
+opentelemetry-exporter-otlp==1.27.0
# via -r requirements/../../../packages/service-library/requirements/_base.in
-opentelemetry-exporter-otlp-proto-common==1.26.0
+opentelemetry-exporter-otlp-proto-common==1.27.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
-opentelemetry-exporter-otlp-proto-grpc==1.26.0
+opentelemetry-exporter-otlp-proto-grpc==1.27.0
# via opentelemetry-exporter-otlp
-opentelemetry-exporter-otlp-proto-http==1.26.0
+opentelemetry-exporter-otlp-proto-http==1.27.0
# via opentelemetry-exporter-otlp
-opentelemetry-instrumentation==0.47b0
+opentelemetry-instrumentation==0.48b0
# via opentelemetry-instrumentation-requests
-opentelemetry-instrumentation-requests==0.47b0
+opentelemetry-instrumentation-requests==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_base.in
-opentelemetry-proto==1.26.0
+opentelemetry-proto==1.27.0
# via
# opentelemetry-exporter-otlp-proto-common
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
-opentelemetry-sdk==1.26.0
+opentelemetry-sdk==1.27.0
# via
# -r requirements/../../../packages/service-library/requirements/_base.in
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
-opentelemetry-semantic-conventions==0.47b0
+opentelemetry-semantic-conventions==0.48b0
# via
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
-opentelemetry-util-http==0.47b0
+opentelemetry-util-http==0.48b0
# via opentelemetry-instrumentation-requests
orjson==3.10.7
# via
@@ -189,7 +189,7 @@ pamqp==3.3.0
# via aiormq
pint==0.24.3
# via -r requirements/_base.in
-protobuf==4.25.4
+protobuf==4.25.5
# via
# googleapis-common-protos
# opentelemetry-proto
@@ -199,7 +199,7 @@ psycopg2-binary==2.9.9
# via
# aiopg
# sqlalchemy
-pydantic==1.10.17
+pydantic==1.10.18
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt
@@ -218,7 +218,7 @@ pydantic==1.10.17
# fast-depends
pygments==2.18.0
# via rich
-pyinstrument==4.7.2
+pyinstrument==4.7.3
# via -r requirements/../../../packages/service-library/requirements/_base.in
python-dateutil==2.9.0.post0
# via arrow
@@ -249,7 +249,7 @@ referencing==0.29.3
# jsonschema-specifications
requests==2.32.3
# via opentelemetry-exporter-otlp-proto-http
-rich==13.7.1
+rich==13.8.1
# via
# -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in
# -r requirements/../../../packages/settings-library/requirements/_base.in
@@ -258,7 +258,7 @@ rpds-py==0.20.0
# via
# jsonschema
# referencing
-setuptools==74.0.0
+setuptools==75.1.0
# via opentelemetry-instrumentation
shellingham==1.5.4
# via typer
@@ -266,7 +266,7 @@ six==1.16.0
# via python-dateutil
sniffio==1.3.1
# via anyio
-sqlalchemy==1.4.53
+sqlalchemy==1.4.54
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt
@@ -288,12 +288,11 @@ tqdm==4.66.5
# via
# -r requirements/../../../packages/service-library/requirements/_base.in
# -r requirements/_base.in
-typer==0.12.4
+typer==0.12.5
# via
# -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in
# -r requirements/../../../packages/settings-library/requirements/_base.in
- # faststream
-types-python-dateutil==2.9.0.20240821
+types-python-dateutil==2.9.0.20240906
# via arrow
typing-extensions==4.12.2
# via
@@ -306,7 +305,7 @@ typing-extensions==4.12.2
# pint
# pydantic
# typer
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt
@@ -320,11 +319,11 @@ wrapt==1.16.0
# via
# deprecated
# opentelemetry-instrumentation
-yarl==1.9.4
+yarl==1.12.1
# via
# -r requirements/../../../packages/postgres-database/requirements/_base.in
# aio-pika
# aiohttp
# aiormq
-zipp==3.20.1
+zipp==3.20.2
# via importlib-metadata
diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt
index 16e63f1e6be5..783f629f7788 100644
--- a/packages/simcore-sdk/requirements/_test.txt
+++ b/packages/simcore-sdk/requirements/_test.txt
@@ -16,7 +16,7 @@ aiohttp==3.10.5
# -c requirements/_base.txt
# aiobotocore
# aioresponses
-aioitertools==0.11.0
+aioitertools==0.12.0
# via aiobotocore
aioresponses==0.7.6
# via -r requirements/_test.in
@@ -24,7 +24,7 @@ aiosignal==1.3.1
# via
# -c requirements/_base.txt
# aiohttp
-alembic==1.13.2
+alembic==1.13.3
# via
# -c requirements/_base.txt
# -r requirements/_test.in
@@ -36,7 +36,7 @@ attrs==24.2.0
# aiohttp
# jsonschema
# referencing
-aws-sam-translator==1.89.0
+aws-sam-translator==1.91.0
# via cfn-lint
aws-xray-sdk==2.14.0
# via moto
@@ -54,16 +54,16 @@ botocore==1.34.131
# boto3
# moto
# s3transfer
-botocore-stubs==1.35.2
+botocore-stubs==1.35.25
# via types-aiobotocore
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# requests
-cffi==1.17.0
+cffi==1.17.1
# via cryptography
-cfn-lint==1.10.3
+cfn-lint==1.15.0
# via moto
charset-normalizer==3.3.2
# via
@@ -78,7 +78,7 @@ coverage==7.6.1
# via
# -r requirements/_test.in
# pytest-cov
-cryptography==43.0.0
+cryptography==43.0.1
# via
# -c requirements/../../../requirements/constraints.txt
# joserfc
@@ -89,7 +89,7 @@ docker==7.1.0
# moto
execnet==2.1.1
# via pytest-xdist
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
flaky==3.8.1
# via -r requirements/_test.in
@@ -97,22 +97,22 @@ flask==3.0.3
# via
# flask-cors
# moto
-flask-cors==4.0.1
+flask-cors==5.0.0
# via moto
frozenlist==1.4.1
# via
# -c requirements/_base.txt
# aiohttp
# aiosignal
-graphql-core==3.2.3
+graphql-core==3.2.4
# via moto
-greenlet==3.0.3
+greenlet==3.1.1
# via
# -c requirements/_base.txt
# sqlalchemy
icdiff==2.0.7
# via pytest-icdiff
-idna==3.7
+idna==3.10
# via
# -c requirements/_base.txt
# requests
@@ -132,7 +132,7 @@ jmespath==1.0.1
# botocore
joserfc==1.0.0
# via moto
-jsondiff==2.2.0
+jsondiff==2.2.1
# via moto
jsonpatch==1.33
# via cfn-lint
@@ -166,16 +166,16 @@ markupsafe==2.1.5
# jinja2
# mako
# werkzeug
-moto==5.0.13
+moto==5.0.15
# via -r requirements/_test.in
mpmath==1.3.0
# via sympy
-multidict==6.0.5
+multidict==6.1.0
# via
# -c requirements/_base.txt
# aiohttp
# yarl
-mypy==1.11.1
+mypy==1.11.2
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
@@ -198,18 +198,18 @@ ply==3.11
# via jsonpath-ng
pprintpp==0.4.0
# via pytest-icdiff
-py-partiql-parser==0.5.5
+py-partiql-parser==0.5.6
# via moto
pycparser==2.22
# via cffi
-pydantic==1.10.17
+pydantic==1.10.18
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# aws-sam-translator
-pyparsing==3.1.2
+pyparsing==3.1.4
# via moto
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -260,7 +260,7 @@ referencing==0.29.3
# jsonschema
# jsonschema-path
# jsonschema-specifications
-regex==2024.7.24
+regex==2024.9.11
# via cfn-lint
requests==2.32.3
# via
@@ -281,7 +281,7 @@ rpds-py==0.20.0
# referencing
s3transfer==0.10.2
# via boto3
-setuptools==74.0.0
+setuptools==75.1.0
# via
# -c requirements/_base.txt
# moto
@@ -290,7 +290,7 @@ six==1.16.0
# -c requirements/_base.txt
# python-dateutil
# rfc3339-validator
-sqlalchemy==1.4.53
+sqlalchemy==1.4.54
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
@@ -298,17 +298,17 @@ sqlalchemy==1.4.53
# alembic
sqlalchemy2-stubs==0.0.2a38
# via sqlalchemy
-sympy==1.13.2
+sympy==1.13.3
# via cfn-lint
termcolor==2.4.0
# via pytest-sugar
-types-aiobotocore==2.13.2
+types-aiobotocore==2.15.1
# via -r requirements/_test.in
-types-aiobotocore-s3==2.13.2
+types-aiobotocore-s3==2.15.1
# via types-aiobotocore
types-aiofiles==24.1.0.20240626
# via -r requirements/_test.in
-types-awscrt==0.21.2
+types-awscrt==0.21.5
# via botocore-stubs
types-tqdm==4.66.0.20240417
# via -r requirements/_test.in
@@ -323,7 +323,7 @@ typing-extensions==4.12.2
# sqlalchemy2-stubs
# types-aiobotocore
# types-aiobotocore-s3
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
@@ -331,7 +331,7 @@ urllib3==2.2.2
# docker
# requests
# responses
-werkzeug==3.0.3
+werkzeug==3.0.4
# via
# flask
# moto
@@ -342,7 +342,7 @@ wrapt==1.16.0
# aws-xray-sdk
xmltodict==0.13.0
# via moto
-yarl==1.9.4
+yarl==1.12.1
# via
# -c requirements/_base.txt
# aiohttp
diff --git a/packages/simcore-sdk/requirements/_tools.txt b/packages/simcore-sdk/requirements/_tools.txt
index f6ad9dfbd42f..d1b323ae5dde 100644
--- a/packages/simcore-sdk/requirements/_tools.txt
+++ b/packages/simcore-sdk/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -18,9 +18,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -28,7 +28,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via
# -c requirements/_test.txt
# -r requirements/../../../requirements/devenv.txt
@@ -51,14 +51,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -70,9 +70,9 @@ pyyaml==6.0.2
# -c requirements/_base.txt
# -c requirements/_test.txt
# pre-commit
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
-setuptools==74.0.0
+setuptools==75.1.0
# via
# -c requirements/_base.txt
# -c requirements/_test.txt
@@ -84,7 +84,7 @@ typing-extensions==4.12.2
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
wheel==0.44.0
# via pip-tools
diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py
index 8c78e28a0661..9da016b4cea9 100644
--- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py
+++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py
@@ -1,4 +1,6 @@
import logging
+from abc import ABC, abstractmethod
+from asyncio import CancelledError
from collections.abc import Callable, Coroutine
from pathlib import Path
from typing import Any
@@ -27,6 +29,20 @@
log = logging.getLogger(__name__)
+class OutputsCallbacks(ABC):
+ @abstractmethod
+ async def aborted(self, key: ServicePortKey) -> None:
+ pass
+
+ @abstractmethod
+ async def finished_succesfully(self, key: ServicePortKey) -> None:
+ pass
+
+ @abstractmethod
+ async def finished_with_error(self, key: ServicePortKey) -> None:
+ pass
+
+
class Nodeports(BaseModel):
"""
Represents a node in a project and all its input/output ports
@@ -148,6 +164,7 @@ async def set_multiple(
],
*,
progress_bar: ProgressBarData,
+ outputs_callbacks: OutputsCallbacks | None,
) -> None:
"""
Sets the provided values to the respective input or output ports
@@ -156,26 +173,44 @@ async def set_multiple(
raises ValidationError
"""
+
+ async def _set_with_notifications(
+ port_key: ServicePortKey,
+ value: ItemConcreteValue | None,
+ set_kwargs: SetKWargs | None,
+ sub_progress: ProgressBarData,
+ ) -> None:
+ try:
+ # pylint: disable=protected-access
+ await self.internal_outputs[port_key]._set( # noqa: SLF001
+ value, set_kwargs=set_kwargs, progress_bar=sub_progress
+ )
+ if outputs_callbacks:
+ await outputs_callbacks.finished_succesfully(port_key)
+ except UnboundPortError:
+ # not available try inputs
+ # if this fails it will raise another exception
+ # pylint: disable=protected-access
+ await self.internal_inputs[port_key]._set( # noqa: SLF001
+ value, set_kwargs=set_kwargs, progress_bar=sub_progress
+ )
+ except CancelledError:
+ if outputs_callbacks:
+ await outputs_callbacks.aborted(port_key)
+ raise
+ except Exception:
+ if outputs_callbacks:
+ await outputs_callbacks.finished_with_error(port_key)
+ raise
+
tasks = []
async with progress_bar.sub_progress(
steps=len(port_values.items()), description=IDStr("set multiple")
) as sub_progress:
for port_key, (value, set_kwargs) in port_values.items():
- # pylint: disable=protected-access
- try:
- tasks.append(
- self.internal_outputs[port_key]._set(
- value, set_kwargs=set_kwargs, progress_bar=sub_progress
- )
- )
- except UnboundPortError:
- # not available try inputs
- # if this fails it will raise another exception
- tasks.append(
- self.internal_inputs[port_key]._set(
- value, set_kwargs=set_kwargs, progress_bar=sub_progress
- )
- )
+ tasks.append(
+ _set_with_notifications(port_key, value, set_kwargs, sub_progress)
+ )
results = await logged_gather(*tasks)
await self.save_to_db_cb(self)
diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py
index a9016609d130..73fb423d101a 100644
--- a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py
+++ b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py
@@ -1,9 +1,10 @@
# pylint: disable=pointless-statement
+# pylint: disable=protected-access
# pylint: disable=redefined-outer-name
# pylint: disable=too-many-arguments
+# pylint: disable=too-many-positional-arguments
# pylint: disable=unused-argument
# pylint: disable=unused-variable
-# pylint: disable=protected-access
import filecmp
@@ -13,6 +14,7 @@
from collections.abc import Awaitable, Callable, Iterable
from pathlib import Path
from typing import Any
+from unittest.mock import AsyncMock
from uuid import uuid4
import np_helpers
@@ -28,13 +30,14 @@
SimcoreS3FileID,
)
from models_library.services_types import ServicePortKey
+from pytest_mock import MockerFixture
from servicelib.progress_bar import ProgressBarData
from settings_library.r_clone import RCloneSettings
from simcore_sdk import node_ports_v2
from simcore_sdk.node_ports_common.exceptions import UnboundPortError
from simcore_sdk.node_ports_v2 import exceptions
from simcore_sdk.node_ports_v2.links import ItemConcreteValue, PortLink
-from simcore_sdk.node_ports_v2.nodeports_v2 import Nodeports
+from simcore_sdk.node_ports_v2.nodeports_v2 import Nodeports, OutputsCallbacks
from simcore_sdk.node_ports_v2.port import Port
from utils_port_v2 import CONSTANT_UUID
@@ -749,6 +752,34 @@ async def _upload_create_task(item_key: str) -> None:
)
+class _Callbacks(OutputsCallbacks):
+ async def aborted(self, key: ServicePortKey) -> None:
+ pass
+
+ async def finished_succesfully(self, key: ServicePortKey) -> None:
+ pass
+
+ async def finished_with_error(self, key: ServicePortKey) -> None:
+ pass
+
+
+@pytest.fixture
+async def output_callbacks() -> _Callbacks:
+ return _Callbacks()
+
+
+@pytest.fixture
+async def spy_outputs_callbaks(
+ mocker: MockerFixture, output_callbacks: _Callbacks
+) -> dict[str, AsyncMock]:
+ return {
+ "aborted": mocker.spy(output_callbacks, "aborted"),
+ "finished_succesfully": mocker.spy(output_callbacks, "finished_succesfully"),
+ "finished_with_error": mocker.spy(output_callbacks, "finished_with_error"),
+ }
+
+
+@pytest.mark.parametrize("use_output_callbacks", [True, False])
async def test_batch_update_inputs_outputs(
user_id: int,
project_id: str,
@@ -757,7 +788,12 @@ async def test_batch_update_inputs_outputs(
port_count: int,
option_r_clone_settings: RCloneSettings | None,
faker: Faker,
+ output_callbacks: _Callbacks,
+ spy_outputs_callbaks: dict[str, AsyncMock],
+ use_output_callbacks: bool,
) -> None:
+ callbacks = output_callbacks if use_output_callbacks else None
+
outputs = [(f"value_out_{i}", "integer", None) for i in range(port_count)]
inputs = [(f"value_in_{i}", "integer", None) for i in range(port_count)]
config_dict, _, _ = create_special_configuration(inputs=inputs, outputs=outputs)
@@ -771,12 +807,14 @@ async def test_batch_update_inputs_outputs(
await check_config_valid(PORTS, config_dict)
async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar:
+ port_values = (await PORTS.outputs).values()
await PORTS.set_multiple(
- {
- ServicePortKey(port.key): (k, None)
- for k, port in enumerate((await PORTS.outputs).values())
- },
+ {ServicePortKey(port.key): (k, None) for k, port in enumerate(port_values)},
progress_bar=progress_bar,
+ outputs_callbacks=callbacks,
+ )
+ assert len(spy_outputs_callbaks["finished_succesfully"].call_args_list) == (
+ len(port_values) if use_output_callbacks else 0
)
# pylint: disable=protected-access
assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001
@@ -786,6 +824,11 @@ async def test_batch_update_inputs_outputs(
for k, port in enumerate((await PORTS.inputs).values(), start=1000)
},
progress_bar=progress_bar,
+ outputs_callbacks=callbacks,
+ )
+ # inputs do not trigger callbacks
+ assert len(spy_outputs_callbaks["finished_succesfully"].call_args_list) == (
+ len(port_values) if use_output_callbacks else 0
)
assert progress_bar._current_steps == pytest.approx(2) # noqa: SLF001
@@ -807,4 +850,11 @@ async def test_batch_update_inputs_outputs(
await PORTS.set_multiple(
{ServicePortKey("missing_key_in_both"): (123132, None)},
progress_bar=progress_bar,
+ outputs_callbacks=callbacks,
)
+
+ assert len(spy_outputs_callbaks["finished_succesfully"].call_args_list) == (
+ len(port_values) if use_output_callbacks else 0
+ )
+ assert len(spy_outputs_callbaks["aborted"].call_args_list) == 0
+ assert len(spy_outputs_callbaks["finished_with_error"].call_args_list) == 0
diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py
index 2d32d345ffa7..c32c055afe4f 100644
--- a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py
+++ b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py
@@ -18,6 +18,7 @@
FileUploadSchema,
UploadedPart,
)
+from models_library.basic_types import IDStr
from moto.server import ThreadedMotoServer
from pydantic import AnyUrl, ByteSize, parse_obj_as
from pytest_mock import MockerFixture
@@ -271,7 +272,7 @@ async def test_upload_file_to_presigned_links(
assert effective_chunk_size <= used_chunk_size
upload_links = await create_upload_links(num_links, used_chunk_size)
assert len(upload_links.urls) == num_links
- async with ProgressBarData(num_steps=1) as progress_bar:
+ async with ProgressBarData(num_steps=1, description=IDStr("")) as progress_bar:
uploaded_parts: list[UploadedPart] = await upload_file_to_presigned_links(
session=client_session,
file_upload_links=upload_links,
diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py
index 91609476b9cf..f8d09836213a 100644
--- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py
+++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py
@@ -5,6 +5,7 @@
from pathlib import Path
from typing import Any, Callable
+from unittest.mock import AsyncMock
import pytest
from faker import Faker
@@ -138,6 +139,7 @@ async def mock_node_port_creator_cb(*args, **kwargs):
+ list(original_outputs.values())
},
progress_bar=progress_bar,
+ outputs_callbacks=AsyncMock(),
)
assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001
diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py
index 8485e19b74b6..49fa694742e1 100644
--- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py
+++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py
@@ -1,9 +1,10 @@
-# pylint:disable=unused-variable
-# pylint:disable=unused-argument
-# pylint:disable=redefined-outer-name
# pylint:disable=no-member
# pylint:disable=protected-access
+# pylint:disable=redefined-outer-name
# pylint:disable=too-many-arguments
+# pylint:disable=too-many-positional-arguments
+# pylint:disable=unused-argument
+# pylint:disable=unused-variable
import os
diff --git a/services/agent/requirements/_test.txt b/services/agent/requirements/_test.txt
index 9a8a524999d9..fbcf8d854094 100644
--- a/services/agent/requirements/_test.txt
+++ b/services/agent/requirements/_test.txt
@@ -7,7 +7,7 @@ aiohttp==3.8.5
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# aiobotocore
-aioitertools==0.11.0
+aioitertools==0.12.0
# via aiobotocore
aiosignal==1.2.0
# via
@@ -51,9 +51,9 @@ certifi==2023.11.17
# httpcore
# httpx
# requests
-cffi==1.17.0
+cffi==1.17.1
# via cryptography
-cfn-lint==1.10.3
+cfn-lint==1.15.0
# via moto
charset-normalizer==2.1.1
# via
@@ -68,7 +68,7 @@ coverage==7.6.1
# via
# -r requirements/_test.in
# pytest-cov
-cryptography==43.0.0
+cryptography==43.0.1
# via
# -c requirements/../../../requirements/constraints.txt
# moto
@@ -81,20 +81,20 @@ ecdsa==0.19.0
# moto
# python-jose
# sshpubkeys
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
flask==3.0.3
# via
# flask-cors
# moto
-flask-cors==4.0.1
+flask-cors==5.0.0
# via moto
frozenlist==1.3.1
# via
# -c requirements/_base.txt
# aiohttp
# aiosignal
-graphql-core==3.2.3
+graphql-core==3.2.4
# via moto
h11==0.14.0
# via
@@ -129,7 +129,7 @@ jmespath==1.0.1
# via
# boto3
# botocore
-jsondiff==2.2.0
+jsondiff==2.2.1
# via moto
jsonpatch==1.33
# via cfn-lint
@@ -170,7 +170,7 @@ pluggy==1.5.0
# via pytest
py-partiql-parser==0.4.0
# via moto
-pyasn1==0.6.0
+pyasn1==0.6.1
# via
# python-jose
# rsa
@@ -181,13 +181,13 @@ pydantic==1.10.2
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# aws-sam-translator
-pyparsing==3.1.2
+pyparsing==3.1.4
# via moto
pyrsistent==0.19.2
# via
# -c requirements/_base.txt
# jsonschema
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -219,7 +219,7 @@ pyyaml==6.0.2
# moto
# openapi-spec-validator
# responses
-regex==2024.7.24
+regex==2024.9.11
# via cfn-lint
requests==2.32.3
# via
@@ -253,7 +253,7 @@ sniffio==1.3.0
# httpx
sshpubkeys==3.3.1
# via moto
-sympy==1.13.2
+sympy==1.13.3
# via cfn-lint
typing-extensions==4.4.0
# via
@@ -261,14 +261,14 @@ typing-extensions==4.4.0
# aws-sam-translator
# cfn-lint
# pydantic
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../requirements/constraints.txt
# botocore
# docker
# requests
# responses
-werkzeug==3.0.3
+werkzeug==3.0.4
# via
# flask
# moto
diff --git a/services/agent/requirements/_tools.txt b/services/agent/requirements/_tools.txt
index 84228c21c49a..6937ce6b8b5c 100644
--- a/services/agent/requirements/_tools.txt
+++ b/services/agent/requirements/_tools.txt
@@ -1,10 +1,10 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via
# -r requirements/../../../requirements/devenv.txt
# -r requirements/_tools.in
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -20,9 +20,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -51,14 +51,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -70,7 +70,7 @@ pyyaml==6.0.2
# -c requirements/_test.txt
# pre-commit
# watchdog
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
setuptools==69.2.0
# via
@@ -84,9 +84,9 @@ typing-extensions==4.4.0
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
-watchdog==4.0.2
+watchdog==5.0.2
# via -r requirements/_tools.in
wheel==0.44.0
# via pip-tools
diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt
index d171a96e8f06..f4201ab9d4d6 100644
--- a/services/api-server/requirements/_test.txt
+++ b/services/api-server/requirements/_test.txt
@@ -33,19 +33,19 @@ aws-sam-translator==1.55.0
# cfn-lint
aws-xray-sdk==2.14.0
# via moto
-boto3==1.35.2
+boto3==1.35.25
# via
# aws-sam-translator
# moto
-boto3-stubs==1.35.2
+boto3-stubs==1.35.25
# via types-boto3
-botocore==1.35.2
+botocore==1.35.25
# via
# aws-xray-sdk
# boto3
# moto
# s3transfer
-botocore-stubs==1.35.2
+botocore-stubs==1.35.25
# via boto3-stubs
certifi==2024.2.2
# via
@@ -89,20 +89,20 @@ ecdsa==0.19.0
# moto
# python-jose
# sshpubkeys
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
flask==2.1.3
# via
# flask-cors
# moto
-flask-cors==4.0.1
+flask-cors==5.0.0
# via moto
frozenlist==1.4.1
# via
# -c requirements/_base.txt
# aiohttp
# aiosignal
-graphql-core==3.2.3
+graphql-core==3.2.4
# via moto
greenlet==3.0.3
# via
@@ -147,11 +147,11 @@ jmespath==1.0.1
# botocore
jschema-to-python==1.2.3
# via cfn-lint
-jsondiff==2.2.0
+jsondiff==2.2.1
# via moto
jsonpatch==1.33
# via cfn-lint
-jsonpickle==3.2.2
+jsonpickle==3.3.0
# via jschema-to-python
jsonpointer==3.0.0
# via jsonpatch
@@ -187,7 +187,7 @@ multidict==6.0.5
# -c requirements/_base.txt
# aiohttp
# yarl
-mypy==1.11.1
+mypy==1.11.2
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
@@ -203,13 +203,13 @@ packaging==24.0
# via
# -c requirements/_base.txt
# pytest
-pbr==6.0.0
+pbr==6.1.0
# via
# jschema-to-python
# sarif-om
pluggy==1.5.0
# via pytest
-pyasn1==0.6.0
+pyasn1==0.6.1
# via
# python-jose
# rsa
@@ -221,13 +221,13 @@ pyinstrument==4.6.2
# via
# -c requirements/_base.txt
# -r requirements/_test.in
-pyparsing==3.1.2
+pyparsing==3.1.4
# via moto
pyrsistent==0.20.0
# via
# -c requirements/_base.txt
# jsonschema
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -254,7 +254,7 @@ python-dateutil==2.9.0.post0
# moto
python-jose==3.3.0
# via moto
-pytz==2024.1
+pytz==2024.2
# via moto
pyyaml==6.0.1
# via
@@ -314,11 +314,11 @@ sshpubkeys==3.3.1
# via moto
types-aiofiles==24.1.0.20240626
# via -r requirements/_test.in
-types-awscrt==0.21.2
+types-awscrt==0.21.5
# via botocore-stubs
types-boto3==1.0.2
# via -r requirements/_test.in
-types-s3transfer==0.10.1
+types-s3transfer==0.10.2
# via boto3-stubs
typing-extensions==4.10.0
# via
diff --git a/services/api-server/requirements/_tools.txt b/services/api-server/requirements/_tools.txt
index a741d4f592a7..f7033c1523f0 100644
--- a/services/api-server/requirements/_tools.txt
+++ b/services/api-server/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -20,9 +20,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -41,7 +41,7 @@ markupsafe==2.1.5
# jinja2
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via
# -c requirements/_test.txt
# -r requirements/../../../requirements/devenv.txt
@@ -64,14 +64,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -84,7 +84,7 @@ pyyaml==6.0.1
# -c requirements/_test.txt
# pre-commit
# watchdog
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
setuptools==69.2.0
# via
@@ -98,9 +98,9 @@ typing-extensions==4.10.0
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
-watchdog==4.0.2
+watchdog==5.0.2
# via -r requirements/_tools.in
wheel==0.44.0
# via pip-tools
diff --git a/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py b/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py
index ffaa1fd5618f..0a23d0400f75 100644
--- a/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py
+++ b/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py
@@ -12,7 +12,9 @@ class BaseBackEndError(ApiServerBaseError):
@classmethod
def named_fields(cls) -> set[str]:
- return set(parse.compile(cls.msg_template).named_fields)
+ return set(
+ parse.compile(cls.msg_template).named_fields # pylint: disable=no-member
+ )
class ListSolversOrStudiesError(BaseBackEndError):
diff --git a/services/autoscaling/requirements/_test.txt b/services/autoscaling/requirements/_test.txt
index 8f2bda059cdb..47379c4d69fa 100644
--- a/services/autoscaling/requirements/_test.txt
+++ b/services/autoscaling/requirements/_test.txt
@@ -40,7 +40,7 @@ certifi==2024.2.2
# httpcore
# httpx
# requests
-cffi==1.17.0
+cffi==1.17.1
# via cryptography
cfn-lint==1.10.3
# via moto
@@ -56,28 +56,28 @@ coverage==7.6.1
# via
# -r requirements/_test.in
# pytest-cov
-cryptography==43.0.0
+cryptography==43.0.1
# via
# -c requirements/../../../requirements/constraints.txt
# joserfc
# moto
-deepdiff==7.0.1
+deepdiff==8.0.1
# via -r requirements/_test.in
docker==7.1.0
# via
# -r requirements/_test.in
# moto
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
-fakeredis==2.23.5
+fakeredis==2.24.1
# via -r requirements/_test.in
flask==3.0.3
# via
# flask-cors
# moto
-flask-cors==4.0.1
+flask-cors==5.0.0
# via moto
-graphql-core==3.2.3
+graphql-core==3.2.4
# via moto
h11==0.14.0
# via
@@ -118,7 +118,7 @@ jmespath==1.0.1
# botocore
joserfc==1.0.0
# via moto
-jsondiff==2.2.0
+jsondiff==2.2.1
# via moto
jsonpatch==1.33
# via cfn-lint
@@ -148,7 +148,7 @@ markupsafe==2.1.5
# -c requirements/_base.txt
# jinja2
# werkzeug
-moto==5.0.13
+moto==5.0.15
# via -r requirements/_test.in
mpmath==1.3.0
# via sympy
@@ -158,7 +158,7 @@ openapi-schema-validator==0.6.2
# via openapi-spec-validator
openapi-spec-validator==0.7.1
# via moto
-ordered-set==4.1.0
+orderly-set==5.2.2
# via deepdiff
packaging==24.0
# via
@@ -177,7 +177,7 @@ psutil==6.0.0
# via
# -c requirements/_base.txt
# -r requirements/_test.in
-py-partiql-parser==0.5.5
+py-partiql-parser==0.5.6
# via moto
pycparser==2.22
# via cffi
@@ -186,9 +186,9 @@ pydantic==1.10.15
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# aws-sam-translator
-pyparsing==3.1.2
+pyparsing==3.1.4
# via moto
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -238,7 +238,7 @@ referencing==0.29.3
# jsonschema
# jsonschema-path
# jsonschema-specifications
-regex==2024.7.24
+regex==2024.9.11
# via cfn-lint
requests==2.32.3
# via
@@ -281,7 +281,7 @@ sortedcontainers==2.4.0
# via
# -c requirements/_base.txt
# fakeredis
-sympy==1.13.2
+sympy==1.13.3
# via cfn-lint
termcolor==2.4.0
# via pytest-sugar
@@ -293,7 +293,7 @@ types-aiobotocore-ec2==2.13.0
# via
# -c requirements/_base.txt
# types-aiobotocore
-types-aiobotocore-iam==2.13.2
+types-aiobotocore-iam==2.13.3
# via types-aiobotocore
types-aiobotocore-s3==2.13.0
# via
@@ -307,7 +307,7 @@ types-awscrt==0.20.9
# via
# -c requirements/_base.txt
# botocore-stubs
-types-pyyaml==6.0.12.20240808
+types-pyyaml==6.0.12.20240917
# via -r requirements/_test.in
typing-extensions==4.11.0
# via
@@ -328,7 +328,7 @@ urllib3==2.2.1
# docker
# requests
# responses
-werkzeug==3.0.3
+werkzeug==3.0.4
# via
# flask
# moto
diff --git a/services/autoscaling/requirements/_tools.txt b/services/autoscaling/requirements/_tools.txt
index 4ec61eba91aa..97a49efc2ebd 100644
--- a/services/autoscaling/requirements/_tools.txt
+++ b/services/autoscaling/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -18,9 +18,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -28,7 +28,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via -r requirements/../../../requirements/devenv.txt
mypy-extensions==1.0.0
# via
@@ -48,14 +48,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -68,7 +68,7 @@ pyyaml==6.0.1
# -c requirements/_test.txt
# pre-commit
# watchdog
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
setuptools==74.0.0
# via
@@ -82,9 +82,9 @@ typing-extensions==4.11.0
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
-watchdog==4.0.2
+watchdog==5.0.2
# via -r requirements/_tools.in
wheel==0.44.0
# via pip-tools
diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py
index 327b006802d4..5811b43b2f06 100644
--- a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py
+++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py
@@ -1,9 +1,10 @@
# pylint: disable=no-value-for-parameter
# pylint: disable=redefined-outer-name
-# pylint: disable=unused-argument
-# pylint: disable=unused-variable
# pylint: disable=too-many-arguments
+# pylint: disable=too-many-positional-arguments
# pylint: disable=too-many-statements
+# pylint: disable=unused-argument
+# pylint: disable=unused-variable
import asyncio
diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py
index 8f8c2ac3fe0b..3a79a11c853c 100644
--- a/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py
+++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py
@@ -1,9 +1,10 @@
# pylint: disable=no-value-for-parameter
# pylint: disable=redefined-outer-name
-# pylint: disable=unused-argument
-# pylint: disable=unused-variable
# pylint: disable=too-many-arguments
+# pylint: disable=too-many-positional-arguments
# pylint: disable=too-many-statements
+# pylint: disable=unused-argument
+# pylint: disable=unused-variable
import asyncio
import datetime
diff --git a/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py b/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py
index 28d26b7dfe09..26ac271db298 100644
--- a/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py
+++ b/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py
@@ -1,8 +1,9 @@
# pylint: disable=no-value-for-parameter
# pylint: disable=redefined-outer-name
+# pylint: disable=too-many-arguments
+# pylint: disable=too-many-positional-arguments
# pylint: disable=unused-argument
# pylint: disable=unused-variable
-# pylint: disable=too-many-arguments
import datetime
import json
diff --git a/services/autoscaling/tests/unit/test_utils_rabbitmq.py b/services/autoscaling/tests/unit/test_utils_rabbitmq.py
index 6b6308399d08..1c5920f9dc74 100644
--- a/services/autoscaling/tests/unit/test_utils_rabbitmq.py
+++ b/services/autoscaling/tests/unit/test_utils_rabbitmq.py
@@ -1,7 +1,8 @@
-# pylint:disable=unused-variable
-# pylint:disable=unused-argument
+# pylint: disable=too-many-positional-arguments
# pylint:disable=redefined-outer-name
# pylint:disable=too-many-arguments
+# pylint:disable=unused-argument
+# pylint:disable=unused-variable
from collections.abc import Awaitable, Callable
diff --git a/services/catalog/requirements/_test.txt b/services/catalog/requirements/_test.txt
index 8b2787688915..6fdd398def33 100644
--- a/services/catalog/requirements/_test.txt
+++ b/services/catalog/requirements/_test.txt
@@ -43,7 +43,7 @@ coverage==7.6.1
# via pytest-cov
docker==7.1.0
# via -r requirements/_test.in
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
frozenlist==1.4.1
# via
@@ -98,7 +98,7 @@ multidict==6.0.5
# -c requirements/_base.txt
# aiohttp
# yarl
-mypy==1.11.1
+mypy==1.11.2
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
@@ -112,7 +112,7 @@ ptvsd==4.3.2
# via -r requirements/_test.in
py-cpuinfo==9.0.0
# via pytest-benchmark
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-aiohttp
@@ -177,7 +177,7 @@ sqlalchemy2-stubs==0.0.2a38
# via sqlalchemy
types-psycopg2==2.9.21.20240819
# via -r requirements/_test.in
-types-pyyaml==6.0.12.20240808
+types-pyyaml==6.0.12.20240917
# via -r requirements/_test.in
typing-extensions==4.10.0
# via
diff --git a/services/catalog/requirements/_tools.txt b/services/catalog/requirements/_tools.txt
index 3ee1fe551b59..c0a526c13100 100644
--- a/services/catalog/requirements/_tools.txt
+++ b/services/catalog/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -18,9 +18,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -28,7 +28,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via
# -c requirements/_test.txt
# -r requirements/../../../requirements/devenv.txt
@@ -51,14 +51,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -69,7 +69,7 @@ pyyaml==6.0.1
# -c requirements/_base.txt
# pre-commit
# watchdog
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
setuptools==74.0.0
# via
@@ -82,9 +82,9 @@ typing-extensions==4.10.0
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
-watchdog==4.0.2
+watchdog==5.0.2
# via -r requirements/_tools.in
wheel==0.44.0
# via pip-tools
diff --git a/services/catalog/tests/unit/with_dbs/conftest.py b/services/catalog/tests/unit/with_dbs/conftest.py
index 1681622314f9..e31913ab9bbf 100644
--- a/services/catalog/tests/unit/with_dbs/conftest.py
+++ b/services/catalog/tests/unit/with_dbs/conftest.py
@@ -1,6 +1,7 @@
# pylint: disable=not-context-manager
# pylint: disable=protected-access
# pylint: disable=redefined-outer-name
+# pylint: disable=too-many-positional-arguments
# pylint: disable=unused-argument
# pylint: disable=unused-variable
@@ -109,7 +110,7 @@ async def product(
"""
# NOTE: this fixture ignores products' group-id but it is fine for this test context
assert product["group_id"] is None
- async with insert_and_get_row_lifespan(
+ async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup
sqlalchemy_async_engine,
table=products,
values=product,
@@ -149,7 +150,7 @@ async def user(
injects a user in db
"""
assert user_id == user["id"]
- async with insert_and_get_row_lifespan(
+ async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup
sqlalchemy_async_engine,
table=users,
values=user,
@@ -442,9 +443,9 @@ def _fake_factory(
@pytest.fixture
-def create_director_list_services_from() -> Callable[
- [list[dict[str, Any]], list], list[dict[str, Any]]
-]:
+def create_director_list_services_from() -> (
+ Callable[[list[dict[str, Any]], list], list[dict[str, Any]]]
+):
"""Convenience function to merge outputs of
- `create_fake_service_data` callable with those of
- `expected_director_list_services` fixture
diff --git a/services/catalog/tests/unit/with_dbs/test_api_rest_services_specifications.py b/services/catalog/tests/unit/with_dbs/test_api_rest_services_specifications.py
index f8515b572983..394ea9123ad3 100644
--- a/services/catalog/tests/unit/with_dbs/test_api_rest_services_specifications.py
+++ b/services/catalog/tests/unit/with_dbs/test_api_rest_services_specifications.py
@@ -1,7 +1,8 @@
# pylint: disable=redefined-outer-name
+# pylint: disable=too-many-arguments
+# pylint: disable=too-many-positional-arguments
# pylint: disable=unused-argument
# pylint: disable=unused-variable
-# pylint: disable=too-many-arguments
import asyncio
diff --git a/services/catalog/tests/unit/with_dbs/test_api_rpc.py b/services/catalog/tests/unit/with_dbs/test_api_rpc.py
index dfbf9c4adc84..3aeaaf4ef73a 100644
--- a/services/catalog/tests/unit/with_dbs/test_api_rpc.py
+++ b/services/catalog/tests/unit/with_dbs/test_api_rpc.py
@@ -1,7 +1,8 @@
# pylint: disable=redefined-outer-name
+# pylint: disable=too-many-arguments
+# pylint: disable=too-many-positional-arguments
# pylint: disable=unused-argument
# pylint: disable=unused-variable
-# pylint: disable=too-many-arguments
from collections.abc import AsyncIterator, Callable
@@ -245,7 +246,7 @@ async def other_user(
) -> AsyncIterator[dict[str, Any]]:
_user = random_user(fake=faker, id=user_id + 1)
- async with insert_and_get_row_lifespan(
+ async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup
sqlalchemy_async_engine,
table=users,
values=_user,
diff --git a/services/clusters-keeper/requirements/_test.txt b/services/clusters-keeper/requirements/_test.txt
index 4dea10f742dc..00a7437644c0 100644
--- a/services/clusters-keeper/requirements/_test.txt
+++ b/services/clusters-keeper/requirements/_test.txt
@@ -50,7 +50,7 @@ certifi==2024.2.2
# httpcore
# httpx
# requests
-cffi==1.17.0
+cffi==1.17.1
# via cryptography
cfn-lint==1.10.3
# via moto
@@ -66,35 +66,35 @@ coverage==7.6.1
# via
# -r requirements/_test.in
# pytest-cov
-cryptography==43.0.0
+cryptography==43.0.1
# via
# -c requirements/../../../requirements/constraints.txt
# joserfc
# moto
debugpy==1.8.5
# via -r requirements/_test.in
-deepdiff==7.0.1
+deepdiff==8.0.1
# via -r requirements/_test.in
docker==7.1.0
# via
# -r requirements/_test.in
# moto
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
-fakeredis==2.23.5
+fakeredis==2.24.1
# via -r requirements/_test.in
flask==3.0.3
# via
# flask-cors
# moto
-flask-cors==4.0.1
+flask-cors==5.0.0
# via moto
frozenlist==1.4.1
# via
# -c requirements/_base.txt
# aiohttp
# aiosignal
-graphql-core==3.2.3
+graphql-core==3.2.4
# via moto
h11==0.14.0
# via
@@ -134,7 +134,7 @@ jmespath==1.0.1
# botocore
joserfc==1.0.0
# via moto
-jsondiff==2.2.0
+jsondiff==2.2.1
# via moto
jsonpatch==1.33
# via cfn-lint
@@ -164,7 +164,7 @@ markupsafe==2.1.5
# -c requirements/_base.txt
# jinja2
# werkzeug
-moto==5.0.13
+moto==5.0.15
# via -r requirements/_test.in
mpmath==1.3.0
# via sympy
@@ -179,7 +179,7 @@ openapi-schema-validator==0.6.2
# via openapi-spec-validator
openapi-spec-validator==0.7.1
# via moto
-ordered-set==4.1.0
+orderly-set==5.2.2
# via deepdiff
packaging==24.0
# via
@@ -197,7 +197,7 @@ psutil==6.0.0
# via
# -c requirements/_base.txt
# -r requirements/_test.in
-py-partiql-parser==0.5.5
+py-partiql-parser==0.5.6
# via moto
pycparser==2.22
# via cffi
@@ -206,9 +206,9 @@ pydantic==1.10.15
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# aws-sam-translator
-pyparsing==3.1.2
+pyparsing==3.1.4
# via moto
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -252,7 +252,7 @@ referencing==0.29.3
# jsonschema
# jsonschema-path
# jsonschema-specifications
-regex==2024.7.24
+regex==2024.9.11
# via cfn-lint
requests==2.32.3
# via
@@ -295,9 +295,9 @@ sortedcontainers==2.4.0
# via
# -c requirements/_base.txt
# fakeredis
-sympy==1.13.2
+sympy==1.13.3
# via cfn-lint
-types-pyyaml==6.0.12.20240808
+types-pyyaml==6.0.12.20240917
# via -r requirements/_test.in
typing-extensions==4.11.0
# via
@@ -314,7 +314,7 @@ urllib3==2.2.1
# docker
# requests
# responses
-werkzeug==3.0.3
+werkzeug==3.0.4
# via
# flask
# moto
diff --git a/services/clusters-keeper/requirements/_tools.txt b/services/clusters-keeper/requirements/_tools.txt
index 4ec61eba91aa..97a49efc2ebd 100644
--- a/services/clusters-keeper/requirements/_tools.txt
+++ b/services/clusters-keeper/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -18,9 +18,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -28,7 +28,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via -r requirements/../../../requirements/devenv.txt
mypy-extensions==1.0.0
# via
@@ -48,14 +48,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -68,7 +68,7 @@ pyyaml==6.0.1
# -c requirements/_test.txt
# pre-commit
# watchdog
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
setuptools==74.0.0
# via
@@ -82,9 +82,9 @@ typing-extensions==4.11.0
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
-watchdog==4.0.2
+watchdog==5.0.2
# via -r requirements/_tools.in
wheel==0.44.0
# via pip-tools
diff --git a/services/dask-sidecar/requirements/_test.txt b/services/dask-sidecar/requirements/_test.txt
index 1f379c87c051..7f13a97ad899 100644
--- a/services/dask-sidecar/requirements/_test.txt
+++ b/services/dask-sidecar/requirements/_test.txt
@@ -27,7 +27,7 @@ certifi==2024.7.4
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# requests
-cffi==1.17.0
+cffi==1.17.1
# via cryptography
cfn-lint==1.10.3
# via moto
@@ -43,7 +43,7 @@ coverage==7.6.1
# via
# -r requirements/_test.in
# pytest-cov
-cryptography==43.0.0
+cryptography==43.0.1
# via
# -c requirements/../../../requirements/constraints.txt
# joserfc
@@ -53,15 +53,15 @@ docker==7.1.0
# via
# -r requirements/_test.in
# moto
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
flask==3.0.3
# via
# flask-cors
# moto
-flask-cors==4.0.1
+flask-cors==5.0.0
# via moto
-graphql-core==3.2.3
+graphql-core==3.2.4
# via moto
icdiff==2.0.7
# via pytest-icdiff
@@ -86,7 +86,7 @@ jmespath==1.0.1
# botocore
joserfc==1.0.0
# via moto
-jsondiff==2.2.0
+jsondiff==2.2.1
# via moto
jsonpatch==1.33
# via cfn-lint
@@ -114,7 +114,7 @@ markupsafe==2.1.5
# -c requirements/_base.txt
# jinja2
# werkzeug
-moto==5.0.13
+moto==5.0.15
# via -r requirements/_test.in
mpmath==1.3.0
# via sympy
@@ -137,7 +137,7 @@ ply==3.11
# via jsonpath-ng
pprintpp==0.4.0
# via pytest-icdiff
-py-partiql-parser==0.5.5
+py-partiql-parser==0.5.6
# via moto
pycparser==2.22
# via cffi
@@ -146,13 +146,13 @@ pydantic==1.10.15
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# aws-sam-translator
-pyftpdlib==1.5.10
+pyftpdlib==2.0.0
# via pytest-localftpserver
pyopenssl==24.2.1
# via pytest-localftpserver
-pyparsing==3.1.2
+pyparsing==3.1.4
# via moto
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -203,7 +203,7 @@ referencing==0.29.3
# jsonschema
# jsonschema-path
# jsonschema-specifications
-regex==2024.7.24
+regex==2024.9.11
# via cfn-lint
requests==2.32.3
# via
@@ -232,7 +232,7 @@ six==1.16.0
# -c requirements/_base.txt
# python-dateutil
# rfc3339-validator
-sympy==1.13.2
+sympy==1.13.3
# via cfn-lint
termcolor==2.4.0
# via pytest-sugar
@@ -252,7 +252,7 @@ urllib3==2.2.1
# docker
# requests
# responses
-werkzeug==3.0.3
+werkzeug==3.0.4
# via
# flask
# moto
diff --git a/services/dask-sidecar/requirements/_tools.txt b/services/dask-sidecar/requirements/_tools.txt
index 4ec61eba91aa..97a49efc2ebd 100644
--- a/services/dask-sidecar/requirements/_tools.txt
+++ b/services/dask-sidecar/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -18,9 +18,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -28,7 +28,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via -r requirements/../../../requirements/devenv.txt
mypy-extensions==1.0.0
# via
@@ -48,14 +48,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -68,7 +68,7 @@ pyyaml==6.0.1
# -c requirements/_test.txt
# pre-commit
# watchdog
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
setuptools==74.0.0
# via
@@ -82,9 +82,9 @@ typing-extensions==4.11.0
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
-watchdog==4.0.2
+watchdog==5.0.2
# via -r requirements/_tools.in
wheel==0.44.0
# via pip-tools
diff --git a/services/datcore-adapter/requirements/_test.txt b/services/datcore-adapter/requirements/_test.txt
index e54eea880ec4..b09942fe970d 100644
--- a/services/datcore-adapter/requirements/_test.txt
+++ b/services/datcore-adapter/requirements/_test.txt
@@ -4,9 +4,9 @@ anyio==4.3.0
# httpx
asgi-lifespan==2.1.0
# via -r requirements/_test.in
-boto3-stubs==1.35.2
+boto3-stubs==1.35.25
# via types-boto3
-botocore-stubs==1.35.2
+botocore-stubs==1.35.25
# via
# boto3-stubs
# types-botocore
@@ -27,7 +27,7 @@ coverage==7.6.1
# pytest-cov
execnet==2.1.1
# via pytest-xdist
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
h11==0.14.0
# via
@@ -60,7 +60,7 @@ pluggy==1.5.0
# via pytest
pprintpp==0.4.0
# via pytest-icdiff
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -110,13 +110,13 @@ sniffio==1.3.1
# httpx
termcolor==2.4.0
# via pytest-sugar
-types-awscrt==0.21.2
+types-awscrt==0.21.5
# via botocore-stubs
types-boto3==1.0.2
# via -r requirements/_test.in
types-botocore==1.0.2
# via -r requirements/_test.in
-types-s3transfer==0.10.1
+types-s3transfer==0.10.2
# via boto3-stubs
typing-extensions==4.10.0
# via
diff --git a/services/datcore-adapter/requirements/_tools.txt b/services/datcore-adapter/requirements/_tools.txt
index c155d41e2659..508da70431fe 100644
--- a/services/datcore-adapter/requirements/_tools.txt
+++ b/services/datcore-adapter/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -17,9 +17,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -27,7 +27,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via -r requirements/../../../requirements/devenv.txt
mypy-extensions==1.0.0
# via
@@ -46,14 +46,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -65,7 +65,7 @@ pyyaml==6.0.1
# -c requirements/_base.txt
# pre-commit
# watchdog
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
setuptools==74.0.0
# via
@@ -78,9 +78,9 @@ typing-extensions==4.10.0
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
-watchdog==4.0.2
+watchdog==5.0.2
# via -r requirements/_tools.in
wheel==0.44.0
# via pip-tools
diff --git a/services/director-v2/requirements/_test.txt b/services/director-v2/requirements/_test.txt
index a344bc678853..22d12c69c200 100644
--- a/services/director-v2/requirements/_test.txt
+++ b/services/director-v2/requirements/_test.txt
@@ -16,7 +16,7 @@ aiohttp==3.9.5
# -c requirements/_base.txt
# aiobotocore
# dask-gateway-server
-aioitertools==0.11.0
+aioitertools==0.12.0
# via aiobotocore
aiormq==6.8.0
# via
@@ -43,7 +43,7 @@ attrs==23.2.0
# -c requirements/_base.txt
# aiohttp
# pytest-docker
-bokeh==3.5.1
+bokeh==3.5.2
# via dask
boto3==1.34.131
# via aiobotocore
@@ -59,7 +59,7 @@ certifi==2024.2.2
# httpcore
# httpx
# requests
-cffi==1.17.0
+cffi==1.17.1
# via cryptography
charset-normalizer==3.3.2
# via
@@ -77,11 +77,11 @@ cloudpickle==3.0.0
# distributed
colorlog==6.8.2
# via dask-gateway-server
-contourpy==1.2.1
+contourpy==1.3.0
# via bokeh
coverage==7.6.1
# via pytest-cov
-cryptography==43.0.0
+cryptography==43.0.1
# via
# -c requirements/../../../requirements/constraints.txt
# dask-gateway-server
@@ -100,7 +100,7 @@ docker==7.1.0
# via -r requirements/_test.in
execnet==2.1.1
# via pytest-xdist
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
flaky==3.8.1
# via -r requirements/_test.in
@@ -181,7 +181,7 @@ multidict==6.0.5
# aiohttp
# async-asgi-testclient
# yarl
-mypy==1.11.1
+mypy==1.11.2
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
@@ -203,7 +203,7 @@ pamqp==3.3.0
# via
# -c requirements/_base.txt
# aiormq
-pandas==2.2.2
+pandas==2.2.3
# via bokeh
partd==1.4.2
# via
@@ -221,7 +221,7 @@ psutil==6.0.0
# distributed
pycparser==2.22
# via cffi
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -252,7 +252,7 @@ python-dateutil==2.9.0.post0
# botocore
# faker
# pandas
-pytz==2024.1
+pytz==2024.2
# via pandas
pyyaml==6.0.1
# via
@@ -310,11 +310,11 @@ tornado==6.4
# distributed
traitlets==5.14.3
# via dask-gateway-server
-types-networkx==3.2.1.20240820
+types-networkx==3.2.1.20240918
# via -r requirements/_test.in
types-psycopg2==2.9.21.20240819
# via -r requirements/_test.in
-types-pyyaml==6.0.12.20240808
+types-pyyaml==6.0.12.20240917
# via -r requirements/_test.in
typing-extensions==4.11.0
# via
@@ -322,7 +322,7 @@ typing-extensions==4.11.0
# alembic
# mypy
# sqlalchemy2-stubs
-tzdata==2024.1
+tzdata==2024.2
# via pandas
urllib3==2.2.1
# via
@@ -336,7 +336,7 @@ wrapt==1.16.0
# via
# -c requirements/_base.txt
# aiobotocore
-xyzservices==2024.6.0
+xyzservices==2024.9.0
# via bokeh
yarl==1.9.4
# via
diff --git a/services/director-v2/requirements/_tools.txt b/services/director-v2/requirements/_tools.txt
index 311f84dcf2f2..062a460207d6 100644
--- a/services/director-v2/requirements/_tools.txt
+++ b/services/director-v2/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -18,9 +18,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -28,7 +28,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via
# -c requirements/_test.txt
# -r requirements/../../../requirements/devenv.txt
@@ -51,14 +51,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -71,7 +71,7 @@ pyyaml==6.0.1
# -c requirements/_test.txt
# pre-commit
# watchdog
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
setuptools==74.0.0
# via
@@ -84,9 +84,9 @@ typing-extensions==4.11.0
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
-watchdog==4.0.2
+watchdog==5.0.2
# via -r requirements/_tools.in
wheel==0.44.0
# via pip-tools
diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py
index cea6e18770db..49fd757e8867 100644
--- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py
+++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py
@@ -286,7 +286,7 @@ async def _try_start_pipeline(
)
# NOTE: in case of a burst of calls to that endpoint, we might end up in a weird state.
@run_sequentially_in_context(target_args=["computation.project_id"])
-async def create_computation( # noqa: PLR0913
+async def create_computation( # noqa: PLR0913 # pylint:disable=too-many-positional-arguments
computation: ComputationCreate,
request: Request,
project_repo: Annotated[
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/base_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/base_scheduler.py
index 3ba703a78b78..08396686e431 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/base_scheduler.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/base_scheduler.py
@@ -27,6 +27,7 @@
from models_library.projects_state import RunningState
from models_library.services import ServiceKey, ServiceType, ServiceVersion
from models_library.users import UserID
+from networkx.classes.reportviews import InDegreeView
from pydantic import PositiveInt
from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE
from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient
@@ -734,8 +735,10 @@ async def _schedule_tasks_to_start( # noqa: C901
if t.state == RunningState.SUCCESS
}
)
+ dag_in_degree = dag.in_degree()
+ assert isinstance(dag_in_degree, InDegreeView) # nosec
next_task_node_ids = [
- node_id for node_id, degree in dag.in_degree() if degree == 0
+ node_id for node_id, degree in dag_in_degree if degree == 0
]
# get the tasks to start
diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py
index f107cfa54f5b..937ba4a3f307 100644
--- a/services/director-v2/tests/conftest.py
+++ b/services/director-v2/tests/conftest.py
@@ -42,12 +42,12 @@
"pytest_simcore.docker_registry",
"pytest_simcore.docker_swarm",
"pytest_simcore.environment_configs",
+ "pytest_simcore.faker_projects_data",
"pytest_simcore.faker_users_data",
"pytest_simcore.minio_service",
"pytest_simcore.postgres_service",
"pytest_simcore.pydantic_models",
"pytest_simcore.pytest_global_environs",
- "pytest_simcore.socketio",
"pytest_simcore.rabbit_service",
"pytest_simcore.redis_service",
"pytest_simcore.repository_paths",
@@ -55,6 +55,7 @@
"pytest_simcore.simcore_dask_service",
"pytest_simcore.simcore_services",
"pytest_simcore.simcore_storage_service",
+ "pytest_simcore.socketio",
]
logger = logging.getLogger(__name__)
diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py
index 39b7fea4e5da..cd6f8bc22235 100644
--- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py
+++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py
@@ -1,7 +1,8 @@
# pylint: disable=protected-access
# pylint: disable=redefined-outer-name
-# pylint: disable=unused-argument
# pylint: disable=too-many-arguments
+# pylint: disable=unused-argument
+# pylint:disable=too-many-positional-arguments
import asyncio
import hashlib
diff --git a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py
index 646cb788ad7f..4780c2f7a6f4 100644
--- a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py
+++ b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py
@@ -1,6 +1,7 @@
-# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
# pylint:disable=too-many-arguments
+# pylint:disable=too-many-positional-arguments
+# pylint:disable=unused-argument
import asyncio
import logging
@@ -229,6 +230,7 @@ async def _mocked_context_manger(*args, **kwargs) -> AsyncIterator[None]:
)
+@pytest.mark.flaky(max_runs=3)
async def test_legacy_and_dynamic_sidecar_run(
initialized_app: FastAPI,
wait_for_catalog_service: Callable[[UserID, str], Awaitable[None]],
diff --git a/services/director-v2/tests/unit/test_modules_dask_client.py b/services/director-v2/tests/unit/test_modules_dask_client.py
index a01980027c02..f63381c538bc 100644
--- a/services/director-v2/tests/unit/test_modules_dask_client.py
+++ b/services/director-v2/tests/unit/test_modules_dask_client.py
@@ -284,11 +284,6 @@ def project_id() -> ProjectID:
return uuid4()
-@pytest.fixture
-def node_id() -> NodeID:
- return uuid4()
-
-
@dataclass
class ImageParams:
image: Image
diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py
index 7fe676662673..81034fbaee5f 100644
--- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py
+++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py
@@ -4,6 +4,7 @@
# pylint: disable=too-many-arguments
# pylint: disable=unused-argument
# pylint: disable=unused-variable
+# pylint:disable=too-many-positional-arguments
import datetime
import json
diff --git a/services/docker-compose-ops.yml b/services/docker-compose-ops.yml
index 358b22fb8aba..9beacf76c343 100644
--- a/services/docker-compose-ops.yml
+++ b/services/docker-compose-ops.yml
@@ -93,7 +93,8 @@ services:
user_notifications:${REDIS_HOST}:${REDIS_PORT}:4:${REDIS_PASSWORD},
announcements:${REDIS_HOST}:${REDIS_PORT}:5:${REDIS_PASSWORD},
distributed_identifiers:${REDIS_HOST}:${REDIS_PORT}:6:${REDIS_PASSWORD},
- deferred_tasks:${REDIS_HOST}:${REDIS_PORT}:7:${REDIS_PASSWORD}
+ deferred_tasks:${REDIS_HOST}:${REDIS_PORT}:7:${REDIS_PASSWORD},
+ dynamic_services:${REDIS_HOST}:${REDIS_PORT}:8:${REDIS_PASSWORD}
# If you add/remove a db, do not forget to update the --databases entry in the docker-compose.yml
ports:
- "18081:8081"
diff --git a/services/docker-compose.yml b/services/docker-compose.yml
index af73de611b41..8e8f02db8a23 100644
--- a/services/docker-compose.yml
+++ b/services/docker-compose.yml
@@ -1168,7 +1168,19 @@ services:
# also aof (append only) is also enabled such that we get full durability at the expense
# of backup size. The backup is written into /data.
# https://redis.io/topics/persistence
- [ "redis-server", "--save", "60 1", "--loglevel", "verbose", "--databases", "8", "--appendonly", "yes", "--requirepass", "${REDIS_PASSWORD}" ]
+ [
+ "redis-server",
+ "--save",
+ "60 1",
+ "--loglevel",
+ "verbose",
+ "--databases",
+ "9",
+ "--appendonly",
+ "yes",
+ "--requirepass",
+ "${REDIS_PASSWORD}"
+ ]
networks:
- default
- autoscaling_subnet
diff --git a/services/dynamic-scheduler/requirements/_base.in b/services/dynamic-scheduler/requirements/_base.in
index 74bc0519c820..ab95aec0daa5 100644
--- a/services/dynamic-scheduler/requirements/_base.in
+++ b/services/dynamic-scheduler/requirements/_base.in
@@ -14,9 +14,10 @@
--requirement ../../../packages/service-library/requirements/_fastapi.in
-
+arrow
fastapi
httpx
packaging
+python-socketio
typer[all]
uvicorn[standard]
diff --git a/services/dynamic-scheduler/requirements/_base.txt b/services/dynamic-scheduler/requirements/_base.txt
index bab6a9c099e0..f60e814f088c 100644
--- a/services/dynamic-scheduler/requirements/_base.txt
+++ b/services/dynamic-scheduler/requirements/_base.txt
@@ -47,6 +47,8 @@ attrs==23.2.0
# aiohttp
# jsonschema
# referencing
+bidict==0.23.1
+ # via python-socketio
certifi==2024.2.2
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
@@ -107,6 +109,7 @@ h11==0.14.0
# via
# httpcore
# uvicorn
+ # wsproto
httpcore==1.0.5
# via httpx
httptools==0.6.1
@@ -265,6 +268,10 @@ python-dateutil==2.9.0.post0
# via arrow
python-dotenv==1.0.1
# via uvicorn
+python-engineio==4.9.1
+ # via python-socketio
+python-socketio==5.11.2
+ # via -r requirements/_base.in
pyyaml==6.0.1
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
@@ -306,6 +313,8 @@ setuptools==74.0.0
# via opentelemetry-instrumentation
shellingham==1.5.4
# via typer
+simple-websocket==1.0.0
+ # via python-engineio
six==1.16.0
# via python-dateutil
sniffio==1.3.1
diff --git a/services/dynamic-scheduler/requirements/_test.txt b/services/dynamic-scheduler/requirements/_test.txt
index 396d85a70e3d..b48cff66d524 100644
--- a/services/dynamic-scheduler/requirements/_test.txt
+++ b/services/dynamic-scheduler/requirements/_test.txt
@@ -21,7 +21,7 @@ coverage==7.6.1
# pytest-cov
docker==7.1.0
# via -r requirements/_test.in
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
h11==0.14.0
# via
@@ -55,7 +55,7 @@ pluggy==1.5.0
# via pytest
pprintpp==0.4.0
# via pytest-icdiff
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
diff --git a/services/dynamic-scheduler/requirements/_tools.txt b/services/dynamic-scheduler/requirements/_tools.txt
index e946c9129aad..3f27c470fe35 100644
--- a/services/dynamic-scheduler/requirements/_tools.txt
+++ b/services/dynamic-scheduler/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -17,9 +17,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -27,7 +27,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via -r requirements/../../../requirements/devenv.txt
mypy-extensions==1.0.0
# via
@@ -47,14 +47,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -65,7 +65,7 @@ pyyaml==6.0.1
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# pre-commit
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
setuptools==74.0.0
# via
@@ -77,7 +77,7 @@ typing-extensions==4.10.0
# via
# -c requirements/_base.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
wheel==0.44.0
# via pip-tools
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_dependencies.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_dependencies.py
index 088745a07c30..ce43766f5a33 100644
--- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_dependencies.py
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_dependencies.py
@@ -3,7 +3,8 @@
from servicelib.fastapi.dependencies import get_app, get_reverse_url_mapper
from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient
from servicelib.redis import RedisClientSDK
-from simcore_service_dynamic_scheduler.services.redis import get_redis_client
+from settings_library.redis import RedisDatabase
+from simcore_service_dynamic_scheduler.services.redis import get_all_redis_clients
from ...services.rabbitmq import get_rabbitmq_client, get_rabbitmq_rpc_server
@@ -19,8 +20,10 @@ def get_rabbitmq_rpc_server_from_request(request: Request) -> RabbitMQRPCClient:
return get_rabbitmq_rpc_server(request.app)
-def get_redis_client_from_request(request: Request) -> RedisClientSDK:
- return get_redis_client(request.app)
+def get_redis_clients_from_request(
+ request: Request,
+) -> dict[RedisDatabase, RedisClientSDK]:
+ return get_all_redis_clients(request.app)
__all__: tuple[str, ...] = (
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py
index 515602aef7c2..7e87c57fd06e 100644
--- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rest/_health.py
@@ -9,11 +9,12 @@
)
from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient
from servicelib.redis import RedisClientSDK
+from settings_library.redis import RedisDatabase
from ._dependencies import (
get_rabbitmq_client_from_request,
get_rabbitmq_rpc_server_from_request,
- get_redis_client_from_request,
+ get_redis_clients_from_request,
)
router = APIRouter()
@@ -29,12 +30,17 @@ async def healthcheck(
rabbit_rpc_server: Annotated[
RabbitMQRPCClient, Depends(get_rabbitmq_rpc_server_from_request)
],
- redis_client_sdk: Annotated[RedisClientSDK, Depends(get_redis_client_from_request)],
+ redis_client_sdks: Annotated[
+ dict[RedisDatabase, RedisClientSDK],
+ Depends(get_redis_clients_from_request),
+ ],
):
if not rabbit_client.healthy or not rabbit_rpc_server.healthy:
raise HealthCheckError(RABBITMQ_CLIENT_UNHEALTHY_MSG)
- if not redis_client_sdk.is_healthy:
+ if not all(
+ redis_client_sdk.is_healthy for redis_client_sdk in redis_client_sdks.values()
+ ):
raise HealthCheckError(REDIS_CLIENT_UNHEALTHY_MSG)
return f"{__name__}@{arrow.utcnow().isoformat()}"
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py
index 991aa004703b..0687c58bac12 100644
--- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/rpc/_services.py
@@ -14,6 +14,7 @@
from ...core.settings import ApplicationSettings
from ...services.director_v2 import DirectorV2Client
+from ...services.service_tracker import set_request_as_running, set_request_as_stopped
router = RPCRouter()
@@ -37,6 +38,7 @@ async def run_dynamic_service(
response: NodeGet | DynamicServiceGet = (
await director_v2_client.run_dynamic_service(dynamic_service_start)
)
+ await set_request_as_running(app, dynamic_service_start)
return response
@@ -59,4 +61,5 @@ async def stop_dynamic_service(
timeout=settings.DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT,
)
)
+ await set_request_as_stopped(app, dynamic_service_stop)
return response
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py
index f1c00211386f..e6ba2bbb53f7 100644
--- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py
@@ -17,9 +17,13 @@
)
from ..api.rest.routes import setup_rest_api
from ..api.rpc.routes import setup_rpc_api_routes
+from ..services.deferred_manager import setup_deferred_manager
from ..services.director_v2 import setup_director_v2
+from ..services.notifier import setup_notifier
from ..services.rabbitmq import setup_rabbitmq
from ..services.redis import setup_redis
+from ..services.service_tracker import setup_service_tracker
+from ..services.status_monitor import setup_status_monitor
from .settings import ApplicationSettings
@@ -57,10 +61,18 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI:
# PLUGINS SETUP
setup_director_v2(app)
+
setup_rabbitmq(app)
setup_rpc_api_routes(app)
+
setup_redis(app)
+ setup_notifier(app)
+
+ setup_service_tracker(app)
+ setup_deferred_manager(app)
+ setup_status_monitor(app)
+
setup_rest_api(app)
# ERROR HANDLERS
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/deferred_manager.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/deferred_manager.py
new file mode 100644
index 000000000000..8544c0f38e6f
--- /dev/null
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/deferred_manager.py
@@ -0,0 +1,24 @@
+from fastapi import FastAPI
+from servicelib.deferred_tasks import DeferredManager
+from settings_library.rabbit import RabbitSettings
+from settings_library.redis import RedisDatabase
+
+from .redis import get_redis_client
+
+
+def setup_deferred_manager(app: FastAPI) -> None:
+ async def on_startup() -> None:
+ rabbit_settings: RabbitSettings = app.state.settings.DYNAMIC_SCHEDULER_RABBITMQ
+
+ redis_client_sdk = get_redis_client(app, RedisDatabase.DEFERRED_TASKS)
+ app.state.deferred_manager = manager = DeferredManager(
+ rabbit_settings, redis_client_sdk, globals_context={"app": app}
+ )
+ await manager.setup()
+
+ async def on_shutdown() -> None:
+ manager: DeferredManager = app.state.deferred_manager
+ await manager.shutdown()
+
+ app.add_event_handler("startup", on_startup)
+ app.add_event_handler("shutdown", on_shutdown)
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/__init__.py
new file mode 100644
index 000000000000..8cd33e12808f
--- /dev/null
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/__init__.py
@@ -0,0 +1,7 @@
+from ._notifier import notify_service_status_change
+from ._setup import setup_notifier
+
+__all__: tuple[str, ...] = (
+ "setup_notifier",
+ "notify_service_status_change",
+)
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_notifier.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py
similarity index 60%
rename from services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_notifier.py
rename to services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py
index 9f97a889baca..0b8690a96766 100644
--- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_notifier.py
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py
@@ -1,20 +1,17 @@
import contextlib
-from pathlib import Path
import socketio # type: ignore[import-untyped]
from fastapi import FastAPI
from fastapi.encoders import jsonable_encoder
-from models_library.api_schemas_dynamic_sidecar.socketio import (
- SOCKET_IO_SERVICE_DISK_USAGE_EVENT,
-)
-from models_library.api_schemas_dynamic_sidecar.telemetry import (
- DiskUsage,
- ServiceDiskUsage,
+from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet
+from models_library.api_schemas_dynamic_scheduler.socketio import (
+ SOCKET_IO_SERVICE_STATUS_EVENT,
)
+from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle
from models_library.api_schemas_webserver.socketio import SocketIORoomStr
-from models_library.projects_nodes_io import NodeID
from models_library.users import UserID
from servicelib.fastapi.app_state import SingletonInAppStateMixin
+from servicelib.services_utils import get_status_as_dict
class Notifier(SingletonInAppStateMixin):
@@ -23,26 +20,24 @@ class Notifier(SingletonInAppStateMixin):
def __init__(self, sio_manager: socketio.AsyncAioPikaManager):
self._sio_manager = sio_manager
- async def notify_service_disk_usage(
- self, user_id: UserID, node_id: NodeID, usage: dict[Path, DiskUsage]
+ async def notify_service_status(
+ self, user_id: UserID, status: NodeGet | DynamicServiceGet | NodeGetIdle
) -> None:
await self._sio_manager.emit(
- SOCKET_IO_SERVICE_DISK_USAGE_EVENT,
- data=jsonable_encoder(ServiceDiskUsage(node_id=node_id, usage=usage)),
+ SOCKET_IO_SERVICE_STATUS_EVENT,
+ data=jsonable_encoder(get_status_as_dict(status)),
room=SocketIORoomStr.from_user_id(user_id),
)
-async def publish_disk_usage(
- app: FastAPI, *, user_id: UserID, node_id: NodeID, usage: dict[Path, DiskUsage]
+async def notify_service_status_change(
+ app: FastAPI, user_id: UserID, status: NodeGet | DynamicServiceGet | NodeGetIdle
) -> None:
notifier: Notifier = Notifier.get_from_app_state(app)
- await notifier.notify_service_disk_usage(
- user_id=user_id, node_id=node_id, usage=usage
- )
+ await notifier.notify_service_status(user_id=user_id, status=status)
-def setup_notifier(app: FastAPI):
+def setup(app: FastAPI):
async def _on_startup() -> None:
assert app.state.external_socketio # nosec
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_setup.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_setup.py
new file mode 100644
index 000000000000..1542afa8a87d
--- /dev/null
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_setup.py
@@ -0,0 +1,8 @@
+from fastapi import FastAPI
+
+from . import _notifier, _socketio
+
+
+def setup_notifier(app: FastAPI):
+ _socketio.setup(app)
+ _notifier.setup(app)
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_socketio.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_socketio.py
new file mode 100644
index 000000000000..2f0abfbd3af1
--- /dev/null
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_socketio.py
@@ -0,0 +1,32 @@
+import logging
+
+import socketio # type: ignore[import-untyped]
+from fastapi import FastAPI
+from servicelib.socketio_utils import cleanup_socketio_async_pubsub_manager
+
+from ...core.settings import ApplicationSettings
+
+_logger = logging.getLogger(__name__)
+
+
+def setup(app: FastAPI):
+ settings: ApplicationSettings = app.state.settings
+
+ async def _on_startup() -> None:
+ assert app.state.rabbitmq_client # nosec
+
+ # Connect to the as an external process in write-only mode
+ # SEE https://python-socketio.readthedocs.io/en/stable/server.html#emitting-from-external-processes
+ assert settings.DYNAMIC_SCHEDULER_RABBITMQ # nosec
+ app.state.external_socketio = socketio.AsyncAioPikaManager(
+ url=settings.DYNAMIC_SCHEDULER_RABBITMQ.dsn, logger=_logger, write_only=True
+ )
+
+ async def _on_shutdown() -> None:
+ if external_socketio := getattr(app.state, "external_socketio"): # noqa: B009
+ await cleanup_socketio_async_pubsub_manager(
+ server_manager=external_socketio
+ )
+
+ app.add_event_handler("startup", _on_startup)
+ app.add_event_handler("shutdown", _on_shutdown)
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py
index 7904d5e1a5df..84131eaf54bf 100644
--- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py
@@ -1,25 +1,46 @@
+from typing import Final
+
from fastapi import FastAPI
-from servicelib.redis import RedisClientSDK
+from servicelib.redis import RedisClientSDK, RedisClientsManager, RedisManagerDBConfig
from settings_library.redis import RedisDatabase, RedisSettings
+_DECODE_DBS: Final[set[RedisDatabase]] = {
+ RedisDatabase.LOCKS,
+}
+
+_BINARY_DBS: Final[set[RedisDatabase]] = {
+ RedisDatabase.DEFERRED_TASKS,
+ RedisDatabase.DYNAMIC_SERVICES,
+}
+
+_ALL_REDIS_DATABASES: Final[set[RedisDatabase]] = _DECODE_DBS | _BINARY_DBS
+
def setup_redis(app: FastAPI) -> None:
settings: RedisSettings = app.state.settings.DYNAMIC_SCHEDULER_REDIS
async def on_startup() -> None:
- redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS)
- app.state.redis_client_sdk = client = RedisClientSDK(redis_locks_dsn)
- await client.setup()
+ app.state.redis_clients_manager = manager = RedisClientsManager(
+ {RedisManagerDBConfig(x, decode_responses=False) for x in _BINARY_DBS}
+ | {RedisManagerDBConfig(x, decode_responses=True) for x in _DECODE_DBS},
+ settings,
+ )
+ await manager.setup()
async def on_shutdown() -> None:
- redis_client_sdk: None | RedisClientSDK = app.state.redis_client_sdk
- if redis_client_sdk:
- await redis_client_sdk.shutdown()
+ manager: RedisClientsManager = app.state.redis_clients_manager
+ await manager.shutdown()
app.add_event_handler("startup", on_startup)
app.add_event_handler("shutdown", on_shutdown)
-def get_redis_client(app: FastAPI) -> RedisClientSDK:
- redis_client_sdk: RedisClientSDK = app.state.redis_client_sdk
- return redis_client_sdk
+def get_redis_client(app: FastAPI, database: RedisDatabase) -> RedisClientSDK:
+ manager: RedisClientsManager = app.state.redis_clients_manager
+ return manager.client(database)
+
+
+def get_all_redis_clients(
+ app: FastAPI,
+) -> dict[RedisDatabase, RedisClientSDK]:
+ return {d: get_redis_client(app, d) for d in _ALL_REDIS_DATABASES}
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py
new file mode 100644
index 000000000000..abf543d1befa
--- /dev/null
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py
@@ -0,0 +1,33 @@
+from ._api import (
+ NORMAL_RATE_POLL_INTERVAL,
+ get_all_tracked_services,
+ get_tracked_service,
+ get_user_id_for_service,
+ remove_tracked_service,
+ set_frontned_notified_for_service,
+ set_if_status_changed_for_service,
+ set_request_as_running,
+ set_request_as_stopped,
+ set_service_scheduled_to_run,
+ set_service_status_task_uid,
+ should_notify_frontend_for_service,
+)
+from ._models import TrackedServiceModel
+from ._setup import setup_service_tracker
+
+__all__: tuple[str, ...] = (
+ "get_all_tracked_services",
+ "get_tracked_service",
+ "get_user_id_for_service",
+ "NORMAL_RATE_POLL_INTERVAL",
+ "remove_tracked_service",
+ "set_frontned_notified_for_service",
+ "set_if_status_changed_for_service",
+ "set_request_as_running",
+ "set_request_as_stopped",
+ "set_service_scheduled_to_run",
+ "set_service_status_task_uid",
+ "setup_service_tracker",
+ "should_notify_frontend_for_service",
+ "TrackedServiceModel",
+)
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py
new file mode 100644
index 000000000000..1b1b4a0d9f8f
--- /dev/null
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py
@@ -0,0 +1,248 @@
+import inspect
+import logging
+from datetime import timedelta
+from typing import Final
+
+import arrow
+from fastapi import FastAPI
+from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet
+from models_library.api_schemas_dynamic_scheduler.dynamic_services import (
+ DynamicServiceStart,
+ DynamicServiceStop,
+)
+from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle
+from models_library.projects_nodes_io import NodeID
+from models_library.services_enums import ServiceState
+from models_library.users import UserID
+from servicelib.deferred_tasks import TaskUID
+
+from ._models import SchedulerServiceState, TrackedServiceModel, UserRequestedState
+from ._setup import get_tracker
+
+_logger = logging.getLogger(__name__)
+
+
+_LOW_RATE_POLL_INTERVAL: Final[timedelta] = timedelta(seconds=1)
+NORMAL_RATE_POLL_INTERVAL: Final[timedelta] = timedelta(seconds=5)
+_MAX_PERIOD_WITHOUT_SERVICE_STATUS_UPDATES: Final[timedelta] = timedelta(seconds=60)
+
+
+async def set_request_as_running(
+ app: FastAPI,
+ dynamic_service_start: DynamicServiceStart,
+) -> None:
+ """Stores intention to `start` request"""
+ await get_tracker(app).save(
+ dynamic_service_start.node_uuid,
+ TrackedServiceModel(
+ dynamic_service_start=dynamic_service_start,
+ requested_state=UserRequestedState.RUNNING,
+ project_id=dynamic_service_start.project_id,
+ user_id=dynamic_service_start.user_id,
+ ),
+ )
+
+
+async def set_request_as_stopped(
+ app: FastAPI, dynamic_service_stop: DynamicServiceStop
+) -> None:
+ """Stores intention to `stop` request"""
+ tracker = get_tracker(app)
+ model: TrackedServiceModel | None = await tracker.load(dynamic_service_stop.node_id)
+
+ if model is None:
+ model = TrackedServiceModel(
+ dynamic_service_start=None,
+ user_id=dynamic_service_stop.user_id,
+ project_id=dynamic_service_stop.project_id,
+ requested_state=UserRequestedState.STOPPED,
+ )
+
+ model.requested_state = UserRequestedState.STOPPED
+ await tracker.save(dynamic_service_stop.node_id, model)
+
+
+def _get_service_state(
+ status: NodeGet | DynamicServiceGet | NodeGetIdle,
+) -> ServiceState:
+ # Attributes where to find the state
+ # NodeGet -> service_state
+ # DynamicServiceGet -> state
+ # NodeGetIdle -> service_state
+ state_key = "state" if isinstance(status, DynamicServiceGet) else "service_state"
+
+ state: ServiceState | str = getattr(status, state_key)
+ result: str = state.value if isinstance(state, ServiceState) else state
+ return ServiceState(result)
+
+
+def _get_poll_interval(status: NodeGet | DynamicServiceGet | NodeGetIdle) -> timedelta:
+ if _get_service_state(status) != ServiceState.RUNNING:
+ return _LOW_RATE_POLL_INTERVAL
+
+ return NORMAL_RATE_POLL_INTERVAL
+
+
+def _get_current_scheduler_service_state(
+ requested_state: UserRequestedState,
+ status: NodeGet | DynamicServiceGet | NodeGetIdle,
+) -> SchedulerServiceState:
+ """
+ Computes the `SchedulerServiceState` used internally by the scheduler
+ to decide about a service's future.
+ """
+
+ if isinstance(status, NodeGetIdle):
+ return SchedulerServiceState.IDLE
+
+ service_state: ServiceState = _get_service_state(status)
+
+ if requested_state == UserRequestedState.RUNNING:
+ if service_state == ServiceState.RUNNING:
+ return SchedulerServiceState.RUNNING
+
+ if (
+ ServiceState.PENDING # type:ignore[operator]
+ <= service_state
+ <= ServiceState.STARTING
+ ):
+ return SchedulerServiceState.STARTING
+
+ if service_state < ServiceState.PENDING or service_state > ServiceState.RUNNING:
+ return SchedulerServiceState.UNEXPECTED_OUTCOME
+
+ if requested_state == UserRequestedState.STOPPED:
+ if service_state >= ServiceState.RUNNING: # type:ignore[operator]
+ return SchedulerServiceState.STOPPING
+
+ if service_state < ServiceState.RUNNING:
+ return SchedulerServiceState.UNEXPECTED_OUTCOME
+
+ msg = f"Could not determine current_state from: '{requested_state=}', '{status=}'"
+ raise TypeError(msg)
+
+
+def _log_skipping_operation(node_id: NodeID) -> None:
+ # the caller is at index 1 (index 0 is the current function)
+ caller_name = inspect.stack()[1].function
+
+ _logger.info(
+ "Could not find a %s entry for node_id %s: skipping %s",
+ TrackedServiceModel.__name__,
+ node_id,
+ caller_name,
+ )
+
+
+async def set_if_status_changed_for_service(
+ app: FastAPI, node_id: NodeID, status: NodeGet | DynamicServiceGet | NodeGetIdle
+) -> bool:
+ """returns ``True`` if the tracker detected a status change"""
+ tracker = get_tracker(app)
+ model: TrackedServiceModel | None = await tracker.load(node_id)
+ if model is None:
+ _log_skipping_operation(node_id)
+ return False
+
+ # set new polling interval in the future
+ model.set_check_status_after_to(_get_poll_interval(status))
+ model.service_status_task_uid = None
+ model.scheduled_to_run = False
+
+ # check if model changed
+ json_status = status.json()
+ if model.service_status != json_status:
+ model.service_status = json_status
+ model.current_state = _get_current_scheduler_service_state(
+ model.requested_state, status
+ )
+ await tracker.save(node_id, model)
+ return True
+
+ return False
+
+
+async def should_notify_frontend_for_service(
+ app: FastAPI, node_id: NodeID, *, status_changed: bool
+) -> bool:
+ """
+ Checks if it's time to notify the frontend.
+ The frontend will be notified at regular intervals and on changes
+ Avoids sending too many updates.
+ """
+ tracker = get_tracker(app)
+ model: TrackedServiceModel | None = await tracker.load(node_id)
+
+ if model is None:
+ return False
+
+ # check if too much time has passed since the last time an update was sent
+ return (
+ status_changed
+ or arrow.utcnow().timestamp() - model.last_status_notification
+ > _MAX_PERIOD_WITHOUT_SERVICE_STATUS_UPDATES.total_seconds()
+ )
+
+
+async def set_frontned_notified_for_service(app: FastAPI, node_id: NodeID) -> None:
+ tracker = get_tracker(app)
+ model: TrackedServiceModel | None = await tracker.load(node_id)
+ if model is None:
+ _log_skipping_operation(node_id)
+ return
+
+ model.set_last_status_notification_to_now()
+ await tracker.save(node_id, model)
+
+
+async def set_service_scheduled_to_run(
+ app: FastAPI, node_id: NodeID, delay_from_now: timedelta
+) -> None:
+ tracker = get_tracker(app)
+ model: TrackedServiceModel | None = await tracker.load(node_id)
+ if model is None:
+ _log_skipping_operation(node_id)
+ return
+
+ model.scheduled_to_run = True
+ model.set_check_status_after_to(delay_from_now)
+ await tracker.save(node_id, model)
+
+
+async def set_service_status_task_uid(
+ app: FastAPI, node_id: NodeID, task_uid: TaskUID
+) -> None:
+ tracker = get_tracker(app)
+ model: TrackedServiceModel | None = await tracker.load(node_id)
+ if model is None:
+ _log_skipping_operation(node_id)
+ return
+
+ model.service_status_task_uid = task_uid
+ await tracker.save(node_id, model)
+
+
+async def remove_tracked_service(app: FastAPI, node_id: NodeID) -> None:
+ """
+ Removes the service from tracking (usually after stop completes)
+ # NOTE: does not raise if node_id is not found
+ """
+ await get_tracker(app).delete(node_id)
+
+
+async def get_tracked_service(
+ app: FastAPI, node_id: NodeID
+) -> TrackedServiceModel | None:
+ """Returns information about the tracked service"""
+ return await get_tracker(app).load(node_id)
+
+
+async def get_all_tracked_services(app: FastAPI) -> dict[NodeID, TrackedServiceModel]:
+ """Returns all tracked services"""
+ return await get_tracker(app).all()
+
+
+async def get_user_id_for_service(app: FastAPI, node_id: NodeID) -> UserID | None:
+ """returns user_id for the service"""
+ model: TrackedServiceModel | None = await get_tracker(app).load(node_id)
+ return model.user_id if model else None
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_models.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_models.py
new file mode 100644
index 000000000000..985ca8feef5a
--- /dev/null
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_models.py
@@ -0,0 +1,123 @@
+import pickle
+from dataclasses import dataclass, field
+from datetime import timedelta
+from enum import auto
+
+import arrow
+from models_library.api_schemas_dynamic_scheduler.dynamic_services import (
+ DynamicServiceStart,
+)
+from models_library.projects import ProjectID
+from models_library.users import UserID
+from models_library.utils.enums import StrAutoEnum
+from servicelib.deferred_tasks import TaskUID
+
+
+class UserRequestedState(StrAutoEnum):
+ RUNNING = auto()
+ STOPPED = auto()
+
+
+class SchedulerServiceState(StrAutoEnum):
+ # service was started and is running as expected
+ RUNNING = auto()
+ # service is not present
+ IDLE = auto()
+ # something went wrong while starting/stopping service
+ UNEXPECTED_OUTCOME = auto()
+
+ # service is being started
+ STARTING = auto()
+ # service is being stopped
+ STOPPING = auto()
+
+ # service status has not been determined
+ UNKNOWN = auto()
+
+
+@dataclass
+class TrackedServiceModel: # pylint:disable=too-many-instance-attributes
+
+ dynamic_service_start: DynamicServiceStart | None = field(
+ metadata={
+ "description": (
+ "used to create the service in any given moment if the requested_state is RUNNING"
+ "can be set to None only when stopping the service"
+ )
+ }
+ )
+
+ user_id: UserID | None = field(
+ metadata={
+ "description": "required for propagating status changes to the frontend"
+ }
+ )
+ project_id: ProjectID | None = field(
+ metadata={
+ "description": "required for propagating status changes to the frontend"
+ }
+ )
+
+ requested_state: UserRequestedState = field(
+ metadata={
+ "description": (
+ "status of the service desidered by the user RUNNING or STOPPED"
+ )
+ }
+ )
+
+ current_state: SchedulerServiceState = field(
+ default=SchedulerServiceState.UNKNOWN,
+ metadata={
+ "description": "to set after parsing the incoming state via the API calls"
+ },
+ )
+
+ #############################
+ ### SERVICE STATUS UPDATE ###
+ #############################
+
+ scheduled_to_run: bool = field(
+ default=False,
+ metadata={"description": "set when a job will be immediately scheduled"},
+ )
+
+ service_status: str = field(
+ default="",
+ metadata={
+ "description": "stored for debug mainly this is used to compute ``current_state``"
+ },
+ )
+ service_status_task_uid: TaskUID | None = field(
+ default=None,
+ metadata={"description": "uid of the job currently fetching the status"},
+ )
+
+ check_status_after: float = field(
+ default_factory=lambda: arrow.utcnow().timestamp(),
+ metadata={"description": "used to determine when to poll the status again"},
+ )
+
+ last_status_notification: float = field(
+ default=0,
+ metadata={
+ "description": "used to determine when was the last time the status was notified"
+ },
+ )
+
+ def set_check_status_after_to(self, delay_from_now: timedelta) -> None:
+ self.check_status_after = (arrow.utcnow() + delay_from_now).timestamp()
+
+ def set_last_status_notification_to_now(self) -> None:
+ self.last_status_notification = arrow.utcnow().timestamp()
+
+ #####################
+ ### SERIALIZATION ###
+ #####################
+
+ def to_bytes(self) -> bytes:
+ return pickle.dumps(self)
+
+ @classmethod
+ def from_bytes(cls, data: bytes) -> "TrackedServiceModel":
+ return pickle.loads(data) # type: ignore # noqa: S301
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_setup.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_setup.py
new file mode 100644
index 000000000000..40a47bb8becc
--- /dev/null
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_setup.py
@@ -0,0 +1,19 @@
+from fastapi import FastAPI
+from settings_library.redis import RedisDatabase
+
+from ..redis import get_redis_client
+from ._tracker import Tracker
+
+
+def setup_service_tracker(app: FastAPI) -> None:
+ async def on_startup() -> None:
+ app.state.service_tracker = Tracker(
+ get_redis_client(app, RedisDatabase.DYNAMIC_SERVICES)
+ )
+
+ app.add_event_handler("startup", on_startup)
+
+
+def get_tracker(app: FastAPI) -> Tracker:
+ tracker: Tracker = app.state.service_tracker
+ return tracker
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_tracker.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_tracker.py
new file mode 100644
index 000000000000..489cee153105
--- /dev/null
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_tracker.py
@@ -0,0 +1,44 @@
+from dataclasses import dataclass
+from typing import Final
+
+from models_library.projects_nodes_io import NodeID
+from servicelib.redis import RedisClientSDK
+
+from ._models import TrackedServiceModel
+
+_KEY_PREFIX: Final[str] = "t::"
+
+
+def _get_key(node_id: NodeID) -> str:
+ return f"{_KEY_PREFIX}{node_id}"
+
+
+@dataclass
+class Tracker:
+ redis_client_sdk: RedisClientSDK
+
+ async def save(self, node_id: NodeID, model: TrackedServiceModel) -> None:
+ await self.redis_client_sdk.redis.set(_get_key(node_id), model.to_bytes())
+
+ async def load(self, node_id: NodeID) -> TrackedServiceModel | None:
+ model_as_bytes: bytes | None = await self.redis_client_sdk.redis.get(
+ _get_key(node_id)
+ )
+ return (
+ None
+ if model_as_bytes is None
+ else TrackedServiceModel.from_bytes(model_as_bytes)
+ )
+
+ async def delete(self, node_id: NodeID) -> None:
+ await self.redis_client_sdk.redis.delete(_get_key(node_id))
+
+ async def all(self) -> dict[NodeID, TrackedServiceModel]:
+ found_keys = await self.redis_client_sdk.redis.keys(f"{_KEY_PREFIX}*")
+ found_values = await self.redis_client_sdk.redis.mget(found_keys)
+
+ return {
+ NodeID(k.decode().lstrip(_KEY_PREFIX)): TrackedServiceModel.from_bytes(v)
+ for k, v in zip(found_keys, found_values, strict=True)
+ if v is not None
+ }
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/__init__.py
new file mode 100644
index 000000000000..263451243252
--- /dev/null
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/__init__.py
@@ -0,0 +1,3 @@
+from ._setup import setup_status_monitor
+
+__all__: tuple[str, ...] = ("setup_status_monitor",)
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_deferred_get_status.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_deferred_get_status.py
new file mode 100644
index 000000000000..f710204504c2
--- /dev/null
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_deferred_get_status.py
@@ -0,0 +1,85 @@
+import logging
+from datetime import timedelta
+
+from fastapi import FastAPI
+from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet
+from models_library.api_schemas_directorv2.dynamic_services_service import (
+ RunningDynamicServiceDetails,
+)
+from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle
+from models_library.projects_nodes_io import NodeID
+from models_library.users import UserID
+from servicelib.deferred_tasks import BaseDeferredHandler, TaskUID
+from servicelib.deferred_tasks._base_deferred_handler import DeferredContext
+
+from .. import service_tracker
+from ..director_v2 import DirectorV2Client
+from ..notifier import notify_service_status_change
+
+_logger = logging.getLogger(__name__)
+
+
+class DeferredGetStatus(BaseDeferredHandler[NodeGet | DynamicServiceGet | NodeGetIdle]):
+ @classmethod
+ async def get_timeout(cls, context: DeferredContext) -> timedelta:
+ assert context # nosec
+ return timedelta(seconds=5)
+
+ @classmethod
+ async def start( # type:ignore[override] # pylint:disable=arguments-differ
+ cls, node_id: NodeID
+ ) -> DeferredContext:
+ _logger.debug("Getting service status for %s", node_id)
+ return {"node_id": node_id}
+
+ @classmethod
+ async def on_created(cls, task_uid: TaskUID, context: DeferredContext) -> None:
+ """called after deferred was scheduled to run"""
+ app: FastAPI = context["app"]
+ node_id: NodeID = context["node_id"]
+
+ await service_tracker.set_service_status_task_uid(app, node_id, task_uid)
+
+ @classmethod
+ async def run(
+ cls, context: DeferredContext
+ ) -> NodeGet | DynamicServiceGet | NodeGetIdle:
+ app: FastAPI = context["app"]
+ node_id: NodeID = context["node_id"]
+
+ director_v2_client: DirectorV2Client = DirectorV2Client.get_from_app_state(app)
+ service_status: NodeGet | RunningDynamicServiceDetails | NodeGetIdle = (
+ await director_v2_client.get_status(node_id)
+ )
+ _logger.debug(
+ "Service status type=%s, %s", type(service_status), service_status
+ )
+ return service_status
+
+ @classmethod
+ async def on_result(
+ cls, result: NodeGet | DynamicServiceGet | NodeGetIdle, context: DeferredContext
+ ) -> None:
+ app: FastAPI = context["app"]
+ node_id: NodeID = context["node_id"]
+
+ _logger.debug("Received status for service '%s': '%s'", node_id, result)
+
+ status_changed: bool = await service_tracker.set_if_status_changed_for_service(
+ app, node_id, result
+ )
+ if await service_tracker.should_notify_frontend_for_service(
+ app, node_id, status_changed=status_changed
+ ):
+ user_id: UserID | None = await service_tracker.get_user_id_for_service(
+ app, node_id
+ )
+ if user_id:
+ await notify_service_status_change(app, user_id, result)
+ await service_tracker.set_frontned_notified_for_service(app, node_id)
+ else:
+ _logger.info(
+ "Did not find a user for '%s', skipping status delivery of: %s",
+ node_id,
+ result,
+ )
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_monitor.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_monitor.py
new file mode 100644
index 000000000000..0d8b5a2723f3
--- /dev/null
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_monitor.py
@@ -0,0 +1,121 @@
+import logging
+from datetime import timedelta
+from functools import cached_property
+from typing import Final
+
+import arrow
+from fastapi import FastAPI
+from models_library.projects_nodes_io import NodeID
+from pydantic import NonNegativeFloat, NonNegativeInt
+from servicelib.background_task import stop_periodic_task
+from servicelib.redis_utils import start_exclusive_periodic_task
+from servicelib.utils import limited_gather
+from settings_library.redis import RedisDatabase
+
+from .. import service_tracker
+from ..redis import get_redis_client
+from ..service_tracker import NORMAL_RATE_POLL_INTERVAL, TrackedServiceModel
+from ..service_tracker._models import SchedulerServiceState, UserRequestedState
+from ._deferred_get_status import DeferredGetStatus
+
+_logger = logging.getLogger(__name__)
+
+_INTERVAL_BETWEEN_CHECKS: Final[timedelta] = timedelta(seconds=1)
+_MAX_CONCURRENCY: Final[NonNegativeInt] = 10
+
+
+async def _start_get_status_deferred(
+ app: FastAPI, node_id: NodeID, *, next_check_delay: timedelta
+) -> None:
+ await service_tracker.set_service_scheduled_to_run(app, node_id, next_check_delay)
+ await DeferredGetStatus.start(node_id=node_id)
+
+
+class Monitor:
+ def __init__(self, app: FastAPI, status_worker_interval: timedelta) -> None:
+ self.app = app
+ self.status_worker_interval = status_worker_interval
+
+ @cached_property
+ def status_worker_interval_seconds(self) -> NonNegativeFloat:
+ return self.status_worker_interval.total_seconds()
+
+ async def _worker_start_get_status_requests(self) -> None:
+ """
+ Check if a service requires it's status to be polled.
+ Note that the interval at which the status is polled can vary.
+ This is a relatively low resoruce check.
+ """
+
+ # NOTE: this worker runs on only once across all instances of the scheduler
+
+ models: dict[
+ NodeID, TrackedServiceModel
+ ] = await service_tracker.get_all_tracked_services(self.app)
+
+ to_remove: list[NodeID] = []
+ to_start: list[NodeID] = []
+
+ current_timestamp = arrow.utcnow().timestamp()
+
+ for node_id, model in models.items():
+ # check if service is idle and status polling should stop
+ if (
+ model.current_state == SchedulerServiceState.IDLE
+ and model.requested_state == UserRequestedState.STOPPED
+ ):
+ to_remove.append(node_id)
+ continue
+
+ job_not_running = not (
+ model.scheduled_to_run
+ and model.service_status_task_uid is not None
+ and await DeferredGetStatus.is_present(model.service_status_task_uid)
+ )
+ wait_period_finished = current_timestamp > model.check_status_after
+ if job_not_running and wait_period_finished:
+ to_start.append(node_id)
+ else:
+ _logger.info(
+ "Skipping status check for %s, because: %s or %s",
+ node_id,
+ f"{job_not_running=}",
+ (
+ f"{wait_period_finished=}"
+ if wait_period_finished
+ else f"can_start_in={model.check_status_after - current_timestamp}"
+ ),
+ )
+
+ _logger.debug("Removing tracked services: '%s'", to_remove)
+ await limited_gather(
+ *(
+ service_tracker.remove_tracked_service(self.app, node_id)
+ for node_id in to_remove
+ ),
+ limit=_MAX_CONCURRENCY,
+ )
+
+ _logger.debug("Poll status for tracked services: '%s'", to_start)
+ await limited_gather(
+ *(
+ _start_get_status_deferred(
+ self.app, node_id, next_check_delay=NORMAL_RATE_POLL_INTERVAL
+ )
+ for node_id in to_start
+ ),
+ limit=_MAX_CONCURRENCY,
+ )
+
+ async def setup(self) -> None:
+ self.app.state.status_monitor_background_task = start_exclusive_periodic_task(
+ get_redis_client(self.app, RedisDatabase.LOCKS),
+ self._worker_start_get_status_requests,
+ task_period=_INTERVAL_BETWEEN_CHECKS,
+ retry_after=_INTERVAL_BETWEEN_CHECKS,
+ task_name="periodic_service_status_update",
+ )
+
+ async def shutdown(self) -> None:
+ if getattr(self.app.state, "status_monitor_background_task", None):
+ await stop_periodic_task(self.app.state.status_monitor_background_task)
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_setup.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_setup.py
new file mode 100644
index 000000000000..8f9601464bcb
--- /dev/null
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_setup.py
@@ -0,0 +1,28 @@
+from datetime import timedelta
+from typing import Final
+
+from fastapi import FastAPI
+
+from ._monitor import Monitor
+
+_STATUS_WORKER_DEFAULT_INTERVAL: Final[timedelta] = timedelta(seconds=1)
+
+
+def setup_status_monitor(app: FastAPI) -> None:
+ async def on_startup() -> None:
+ app.state.status_monitor = monitor = Monitor(
+ app, status_worker_interval=_STATUS_WORKER_DEFAULT_INTERVAL
+ )
+ await monitor.setup()
+
+ async def on_shutdown() -> None:
+ monitor: Monitor = app.state.status_monitor
+ await monitor.shutdown()
+
+ app.add_event_handler("startup", on_startup)
+ app.add_event_handler("shutdown", on_shutdown)
+
+
+def get_monitor(app: FastAPI) -> Monitor:
+ monitor: Monitor = app.state.status_monitor
+ return monitor
diff --git a/services/dynamic-scheduler/tests/conftest.py b/services/dynamic-scheduler/tests/conftest.py
index ff72140f5ee7..2cb14086b2a2 100644
--- a/services/dynamic-scheduler/tests/conftest.py
+++ b/services/dynamic-scheduler/tests/conftest.py
@@ -4,6 +4,7 @@
import string
from collections.abc import AsyncIterator
from pathlib import Path
+from typing import Final
import pytest
import simcore_service_dynamic_scheduler
@@ -13,6 +14,9 @@
from pytest_mock import MockerFixture
from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict
from pytest_simcore.helpers.typing_env import EnvVarsDict
+from servicelib.redis import RedisClientsManager, RedisManagerDBConfig
+from servicelib.utils import logged_gather
+from settings_library.redis import RedisDatabase, RedisSettings
from simcore_service_dynamic_scheduler.core.application import create_app
pytest_plugins = [
@@ -20,6 +24,7 @@
"pytest_simcore.docker_compose",
"pytest_simcore.docker_swarm",
"pytest_simcore.environment_configs",
+ "pytest_simcore.faker_projects_data",
"pytest_simcore.rabbit_service",
"pytest_simcore.redis_service",
"pytest_simcore.repository_paths",
@@ -73,17 +78,38 @@ def app_environment(
)
+_PATH_APPLICATION: Final[str] = "simcore_service_dynamic_scheduler.core.application"
+
+
@pytest.fixture
def disable_rabbitmq_setup(mocker: MockerFixture) -> None:
- base_path = "simcore_service_dynamic_scheduler.core.application"
- mocker.patch(f"{base_path}.setup_rabbitmq")
- mocker.patch(f"{base_path}.setup_rpc_api_routes")
+ mocker.patch(f"{_PATH_APPLICATION}.setup_rabbitmq")
+ mocker.patch(f"{_PATH_APPLICATION}.setup_rpc_api_routes")
@pytest.fixture
def disable_redis_setup(mocker: MockerFixture) -> None:
- base_path = "simcore_service_dynamic_scheduler.core.application"
- mocker.patch(f"{base_path}.setup_redis")
+ mocker.patch(f"{_PATH_APPLICATION}.setup_redis")
+
+
+@pytest.fixture
+def disable_service_tracker_setup(mocker: MockerFixture) -> None:
+ mocker.patch(f"{_PATH_APPLICATION}.setup_service_tracker")
+
+
+@pytest.fixture
+def disable_deferred_manager_setup(mocker: MockerFixture) -> None:
+ mocker.patch(f"{_PATH_APPLICATION}.setup_deferred_manager")
+
+
+@pytest.fixture
+def disable_notifier_setup(mocker: MockerFixture) -> None:
+ mocker.patch(f"{_PATH_APPLICATION}.setup_notifier")
+
+
+@pytest.fixture
+def disable_status_monitor_setup(mocker: MockerFixture) -> None:
+ mocker.patch(f"{_PATH_APPLICATION}.setup_status_monitor")
MAX_TIME_FOR_APP_TO_STARTUP = 10
@@ -101,3 +127,13 @@ async def app(
shutdown_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_SHUTDOWN,
):
yield test_app
+
+
+@pytest.fixture
+async def remove_redis_data(redis_service: RedisSettings) -> None:
+ async with RedisClientsManager(
+ {RedisManagerDBConfig(x) for x in RedisDatabase}, redis_service
+ ) as manager:
+ await logged_gather(
+ *[manager.client(d).redis.flushall() for d in RedisDatabase]
+ )
diff --git a/services/dynamic-scheduler/tests/unit/api_rest/conftest.py b/services/dynamic-scheduler/tests/unit/api_rest/conftest.py
index 987ed8c4d851..efef4241d981 100644
--- a/services/dynamic-scheduler/tests/unit/api_rest/conftest.py
+++ b/services/dynamic-scheduler/tests/unit/api_rest/conftest.py
@@ -1,13 +1,31 @@
+# pylint:disable=redefined-outer-name
+# pylint:disable=unused-argument
from collections.abc import AsyncIterator
import pytest
from fastapi import FastAPI
from httpx import AsyncClient
from httpx._transports.asgi import ASGITransport
+from pytest_simcore.helpers.typing_env import EnvVarsDict
@pytest.fixture
-async def client(app: FastAPI) -> AsyncIterator[AsyncClient]:
+def app_environment(
+ disable_rabbitmq_setup: None,
+ disable_redis_setup: None,
+ disable_service_tracker_setup: None,
+ disable_deferred_manager_setup: None,
+ disable_notifier_setup: None,
+ disable_status_monitor_setup: None,
+ app_environment: EnvVarsDict,
+) -> EnvVarsDict:
+ return app_environment
+
+
+@pytest.fixture
+async def client(
+ app_environment: EnvVarsDict, app: FastAPI
+) -> AsyncIterator[AsyncClient]:
# - Needed for app to trigger start/stop event handlers
# - Prefer this client instead of fastapi.testclient.TestClient
async with AsyncClient(
diff --git a/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__health.py b/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__health.py
index 8cc1c3279efd..9b5648e12b4e 100644
--- a/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__health.py
+++ b/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__health.py
@@ -21,7 +21,6 @@ def __init__(self, is_ok: bool) -> None:
@pytest.fixture
def mock_rabbitmq_clients(
- disable_rabbitmq_setup: None,
mocker: MockerFixture,
rabbit_client_ok: bool,
rabbit_rpc_server_ok: bool,
@@ -39,11 +38,13 @@ def mock_rabbitmq_clients(
@pytest.fixture
def mock_redis_client(
- disable_redis_setup: None, mocker: MockerFixture, redis_client_ok: bool
+ mocker: MockerFixture,
+ redis_client_ok: bool,
) -> None:
base_path = "simcore_service_dynamic_scheduler.api.rest._dependencies"
mocker.patch(
- f"{base_path}.get_redis_client", return_value=MockHealth(redis_client_ok)
+ f"{base_path}.get_all_redis_clients",
+ return_value={0: MockHealth(redis_client_ok)},
)
diff --git a/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__meta.py b/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__meta.py
index 6e68190bcee9..8d986dfe60ed 100644
--- a/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__meta.py
+++ b/services/dynamic-scheduler/tests/unit/api_rest/test_api_rest__meta.py
@@ -1,24 +1,11 @@
# pylint:disable=redefined-outer-name
# pylint:disable=unused-argument
-
-
-import pytest
from fastapi import status
from httpx import AsyncClient
-from pytest_simcore.helpers.typing_env import EnvVarsDict
from simcore_service_dynamic_scheduler._meta import API_VTAG
from simcore_service_dynamic_scheduler.models.schemas.meta import Meta
-@pytest.fixture
-def app_environment(
- disable_rabbitmq_setup: None,
- disable_redis_setup: None,
- app_environment: EnvVarsDict,
-) -> EnvVarsDict:
- return app_environment
-
-
async def test_health(client: AsyncClient):
response = await client.get(f"/{API_VTAG}/meta")
assert response.status_code == status.HTTP_200_OK
diff --git a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py
index 7c8dada1e183..c484f722ff95 100644
--- a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py
+++ b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py
@@ -59,7 +59,7 @@ def service_status_new_style() -> DynamicServiceGet:
@pytest.fixture
def service_status_legacy() -> NodeGet:
- return NodeGet.parse_obj(NodeGet.Config.schema_extra["example"])
+ return NodeGet.parse_obj(NodeGet.Config.schema_extra["examples"][1])
@pytest.fixture
diff --git a/services/dynamic-scheduler/tests/unit/conftest.py b/services/dynamic-scheduler/tests/unit/conftest.py
new file mode 100644
index 000000000000..642ed2170ce1
--- /dev/null
+++ b/services/dynamic-scheduler/tests/unit/conftest.py
@@ -0,0 +1,29 @@
+from collections.abc import Callable
+from copy import deepcopy
+
+import pytest
+from models_library.api_schemas_dynamic_scheduler.dynamic_services import (
+ DynamicServiceStart,
+ DynamicServiceStop,
+)
+from models_library.projects_nodes_io import NodeID
+
+
+@pytest.fixture
+def get_dynamic_service_start() -> Callable[[NodeID], DynamicServiceStart]:
+ def _(node_id: NodeID) -> DynamicServiceStart:
+ dict_data = deepcopy(DynamicServiceStart.Config.schema_extra["example"])
+ dict_data["service_uuid"] = f"{node_id}"
+ return DynamicServiceStart.parse_obj(dict_data)
+
+ return _
+
+
+@pytest.fixture
+def get_dynamic_service_stop() -> Callable[[NodeID], DynamicServiceStop]:
+ def _(node_id: NodeID) -> DynamicServiceStop:
+ dict_data = deepcopy(DynamicServiceStop.Config.schema_extra["example"])
+ dict_data["node_id"] = f"{node_id}"
+ return DynamicServiceStop.parse_obj(dict_data)
+
+ return _
diff --git a/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py b/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py
new file mode 100644
index 000000000000..0755f7e5d786
--- /dev/null
+++ b/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py
@@ -0,0 +1,325 @@
+# pylint:disable=redefined-outer-name
+# pylint:disable=unused-argument
+
+from collections.abc import Callable
+from datetime import timedelta
+from typing import Any, Final, NamedTuple
+from uuid import uuid4
+
+import pytest
+from faker import Faker
+from fastapi import FastAPI
+from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet
+from models_library.api_schemas_dynamic_scheduler.dynamic_services import (
+ DynamicServiceStart,
+ DynamicServiceStop,
+)
+from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle
+from models_library.projects_nodes_io import NodeID
+from models_library.services_enums import ServiceState
+from pydantic import NonNegativeInt
+from pytest_simcore.helpers.typing_env import EnvVarsDict
+from servicelib.deferred_tasks import TaskUID
+from servicelib.utils import limited_gather
+from settings_library.redis import RedisSettings
+from simcore_service_dynamic_scheduler.services.service_tracker import (
+ get_all_tracked_services,
+ get_tracked_service,
+ remove_tracked_service,
+ set_if_status_changed_for_service,
+ set_request_as_running,
+ set_request_as_stopped,
+ set_service_status_task_uid,
+)
+from simcore_service_dynamic_scheduler.services.service_tracker._api import (
+ _LOW_RATE_POLL_INTERVAL,
+ NORMAL_RATE_POLL_INTERVAL,
+ _get_current_scheduler_service_state,
+ _get_poll_interval,
+)
+from simcore_service_dynamic_scheduler.services.service_tracker._models import (
+ SchedulerServiceState,
+ UserRequestedState,
+)
+
+pytest_simcore_core_services_selection = [
+ "redis",
+]
+
+
+@pytest.fixture
+def app_environment(
+ disable_rabbitmq_setup: None,
+ disable_deferred_manager_setup: None,
+ disable_notifier_setup: None,
+ app_environment: EnvVarsDict,
+ redis_service: RedisSettings,
+ remove_redis_data: None,
+) -> EnvVarsDict:
+ return app_environment
+
+
+async def test_services_tracer_set_as_running_set_as_stopped(
+ app: FastAPI,
+ node_id: NodeID,
+ get_dynamic_service_start: Callable[[NodeID], DynamicServiceStart],
+ get_dynamic_service_stop: Callable[[NodeID], DynamicServiceStop],
+):
+ async def _remove_service() -> None:
+ await remove_tracked_service(app, node_id)
+ assert await get_tracked_service(app, node_id) is None
+ assert await get_all_tracked_services(app) == {}
+
+ async def _set_as_running() -> None:
+ await set_request_as_running(app, get_dynamic_service_start(node_id))
+ tracked_model = await get_tracked_service(app, node_id)
+ assert tracked_model
+ assert tracked_model.requested_state == UserRequestedState.RUNNING
+
+ async def _set_as_stopped() -> None:
+ await set_request_as_stopped(app, get_dynamic_service_stop(node_id))
+ tracked_model = await get_tracked_service(app, node_id)
+ assert tracked_model
+ assert tracked_model.requested_state == UserRequestedState.STOPPED
+
+ # request as running then as stopped
+ await _remove_service()
+ await _set_as_running()
+ await _set_as_stopped()
+
+ # request as stopped then as running
+ await _remove_service()
+ await _set_as_stopped()
+ await _set_as_running()
+
+
+@pytest.mark.parametrize("item_count", [100])
+async def test_services_tracer_workflow(
+ app: FastAPI,
+ node_id: NodeID,
+ item_count: NonNegativeInt,
+ get_dynamic_service_start: Callable[[NodeID], DynamicServiceStart],
+ get_dynamic_service_stop: Callable[[NodeID], DynamicServiceStop],
+):
+ # ensure more than one service can be tracked
+ await limited_gather(
+ *[
+ set_request_as_stopped(app, get_dynamic_service_stop(uuid4()))
+ for _ in range(item_count)
+ ],
+ limit=100,
+ )
+ assert len(await get_all_tracked_services(app)) == item_count
+
+
+@pytest.mark.parametrize(
+ "status",
+ [
+ *[NodeGet.parse_obj(o) for o in NodeGet.Config.schema_extra["examples"]],
+ *[
+ DynamicServiceGet.parse_obj(o)
+ for o in DynamicServiceGet.Config.schema_extra["examples"]
+ ],
+ NodeGetIdle.parse_obj(NodeGetIdle.Config.schema_extra["example"]),
+ ],
+)
+async def test_set_if_status_changed(
+ app: FastAPI,
+ node_id: NodeID,
+ status: NodeGet | DynamicServiceGet | NodeGetIdle,
+ get_dynamic_service_start: Callable[[NodeID], DynamicServiceStart],
+):
+ await set_request_as_running(app, get_dynamic_service_start(node_id))
+
+ assert await set_if_status_changed_for_service(app, node_id, status) is True
+
+ assert await set_if_status_changed_for_service(app, node_id, status) is False
+
+ model = await get_tracked_service(app, node_id)
+ assert model
+
+ assert model.service_status == status.json()
+
+
+async def test_set_service_status_task_uid(
+ app: FastAPI,
+ node_id: NodeID,
+ faker: Faker,
+ get_dynamic_service_start: Callable[[NodeID], DynamicServiceStart],
+):
+ await set_request_as_running(app, get_dynamic_service_start(node_id))
+
+ task_uid = TaskUID(faker.uuid4())
+ await set_service_status_task_uid(app, node_id, task_uid)
+
+ model = await get_tracked_service(app, node_id)
+ assert model
+
+ assert model.service_status_task_uid == task_uid
+
+
+@pytest.mark.parametrize(
+ "status, expected_poll_interval",
+ [
+ (
+ NodeGet.parse_obj(NodeGet.Config.schema_extra["examples"][1]),
+ _LOW_RATE_POLL_INTERVAL,
+ ),
+ *[
+ (DynamicServiceGet.parse_obj(o), NORMAL_RATE_POLL_INTERVAL)
+ for o in DynamicServiceGet.Config.schema_extra["examples"]
+ ],
+ (
+ NodeGetIdle.parse_obj(NodeGetIdle.Config.schema_extra["example"]),
+ _LOW_RATE_POLL_INTERVAL,
+ ),
+ ],
+)
+def test__get_poll_interval(
+ status: NodeGet | DynamicServiceGet | NodeGetIdle, expected_poll_interval: timedelta
+):
+ assert _get_poll_interval(status) == expected_poll_interval
+
+
+def _get_node_get_from(service_state: ServiceState) -> NodeGet:
+ dict_data = NodeGet.Config.schema_extra["examples"][1]
+ assert "service_state" in dict_data
+ dict_data["service_state"] = service_state
+ return NodeGet.parse_obj(dict_data)
+
+
+def _get_dynamic_service_get_from(
+ service_state: ServiceState,
+) -> DynamicServiceGet:
+ dict_data = DynamicServiceGet.Config.schema_extra["examples"][1]
+ assert "state" in dict_data
+ dict_data["state"] = service_state
+ return DynamicServiceGet.parse_obj(dict_data)
+
+
+def _get_node_get_idle() -> NodeGetIdle:
+ return NodeGetIdle.parse_obj(NodeGetIdle.Config.schema_extra["example"])
+
+
+def __get_flat_list(nested_list: list[list[Any]]) -> list[Any]:
+ return [item for sublist in nested_list for item in sublist]
+
+
+class ServiceStatusToSchedulerState(NamedTuple):
+ requested: UserRequestedState
+ service_status: NodeGet | DynamicServiceGet | NodeGetIdle
+ expected: SchedulerServiceState
+
+
+_EXPECTED_TEST_CASES: list[list[ServiceStatusToSchedulerState]] = [
+ [
+ # UserRequestedState.RUNNING
+ ServiceStatusToSchedulerState(
+ UserRequestedState.RUNNING,
+ status_generator(ServiceState.PENDING),
+ SchedulerServiceState.STARTING,
+ ),
+ ServiceStatusToSchedulerState(
+ UserRequestedState.RUNNING,
+ status_generator(ServiceState.PULLING),
+ SchedulerServiceState.STARTING,
+ ),
+ ServiceStatusToSchedulerState(
+ UserRequestedState.RUNNING,
+ status_generator(ServiceState.STARTING),
+ SchedulerServiceState.STARTING,
+ ),
+ ServiceStatusToSchedulerState(
+ UserRequestedState.RUNNING,
+ status_generator(ServiceState.RUNNING),
+ SchedulerServiceState.RUNNING,
+ ),
+ ServiceStatusToSchedulerState(
+ UserRequestedState.RUNNING,
+ status_generator(ServiceState.COMPLETE),
+ SchedulerServiceState.UNEXPECTED_OUTCOME,
+ ),
+ ServiceStatusToSchedulerState(
+ UserRequestedState.RUNNING,
+ status_generator(ServiceState.FAILED),
+ SchedulerServiceState.UNEXPECTED_OUTCOME,
+ ),
+ ServiceStatusToSchedulerState(
+ UserRequestedState.RUNNING,
+ status_generator(ServiceState.STOPPING),
+ SchedulerServiceState.UNEXPECTED_OUTCOME,
+ ),
+ ServiceStatusToSchedulerState(
+ UserRequestedState.RUNNING,
+ _get_node_get_idle(),
+ SchedulerServiceState.IDLE,
+ ),
+ # UserRequestedState.STOPPED
+ ServiceStatusToSchedulerState(
+ UserRequestedState.STOPPED,
+ status_generator(ServiceState.PENDING),
+ SchedulerServiceState.UNEXPECTED_OUTCOME,
+ ),
+ ServiceStatusToSchedulerState(
+ UserRequestedState.STOPPED,
+ status_generator(ServiceState.PULLING),
+ SchedulerServiceState.UNEXPECTED_OUTCOME,
+ ),
+ ServiceStatusToSchedulerState(
+ UserRequestedState.STOPPED,
+ status_generator(ServiceState.STARTING),
+ SchedulerServiceState.UNEXPECTED_OUTCOME,
+ ),
+ ServiceStatusToSchedulerState(
+ UserRequestedState.STOPPED,
+ status_generator(ServiceState.RUNNING),
+ SchedulerServiceState.STOPPING,
+ ),
+ ServiceStatusToSchedulerState(
+ UserRequestedState.STOPPED,
+ status_generator(ServiceState.COMPLETE),
+ SchedulerServiceState.STOPPING,
+ ),
+ ServiceStatusToSchedulerState(
+ UserRequestedState.STOPPED,
+ status_generator(ServiceState.FAILED),
+ SchedulerServiceState.UNEXPECTED_OUTCOME,
+ ),
+ ServiceStatusToSchedulerState(
+ UserRequestedState.STOPPED,
+ status_generator(ServiceState.STOPPING),
+ SchedulerServiceState.STOPPING,
+ ),
+ ServiceStatusToSchedulerState(
+ UserRequestedState.STOPPED,
+ _get_node_get_idle(),
+ SchedulerServiceState.IDLE,
+ ),
+ ]
+ for status_generator in (
+ _get_node_get_from,
+ _get_dynamic_service_get_from,
+ )
+]
+_FLAT_EXPECTED_TEST_CASES: list[ServiceStatusToSchedulerState] = __get_flat_list(
+ _EXPECTED_TEST_CASES
+)
+# ensure enum changes do not break above rules
+_NODE_STATUS_FORMATS_COUNT: Final[int] = 2
+assert (
+ len(_FLAT_EXPECTED_TEST_CASES)
+ == len(ServiceState) * len(UserRequestedState) * _NODE_STATUS_FORMATS_COUNT
+)
+
+
+@pytest.mark.parametrize("service_status_to_scheduler_state", _FLAT_EXPECTED_TEST_CASES)
+def test__get_current_scheduler_service_state(
+ service_status_to_scheduler_state: ServiceStatusToSchedulerState,
+):
+ assert (
+ _get_current_scheduler_service_state(
+ service_status_to_scheduler_state.requested,
+ service_status_to_scheduler_state.service_status,
+ )
+ == service_status_to_scheduler_state.expected
+ )
diff --git a/services/dynamic-scheduler/tests/unit/service_tracker/test__models.py b/services/dynamic-scheduler/tests/unit/service_tracker/test__models.py
new file mode 100644
index 000000000000..6b8e31321b38
--- /dev/null
+++ b/services/dynamic-scheduler/tests/unit/service_tracker/test__models.py
@@ -0,0 +1,57 @@
+from datetime import timedelta
+
+import arrow
+import pytest
+from faker import Faker
+from servicelib.deferred_tasks import TaskUID
+from simcore_service_dynamic_scheduler.services.service_tracker._models import (
+ SchedulerServiceState,
+ TrackedServiceModel,
+ UserRequestedState,
+)
+
+
+@pytest.mark.parametrize("requested_state", UserRequestedState)
+@pytest.mark.parametrize("current_state", SchedulerServiceState)
+@pytest.mark.parametrize("check_status_after", [1, arrow.utcnow().timestamp()])
+@pytest.mark.parametrize("service_status_task_uid", [None, TaskUID("ok")])
+def test_serialization(
+ faker: Faker,
+ requested_state: UserRequestedState,
+ current_state: SchedulerServiceState,
+ check_status_after: float,
+ service_status_task_uid: TaskUID | None,
+):
+ tracked_model = TrackedServiceModel(
+ dynamic_service_start=None,
+ user_id=None,
+ project_id=None,
+ requested_state=requested_state,
+ current_state=current_state,
+ service_status=faker.pystr(),
+ check_status_after=check_status_after,
+ service_status_task_uid=service_status_task_uid,
+ )
+
+ as_bytes = tracked_model.to_bytes()
+ assert as_bytes
+ assert TrackedServiceModel.from_bytes(as_bytes) == tracked_model
+
+
+async def test_set_check_status_after_to():
+ model = TrackedServiceModel(
+ dynamic_service_start=None,
+ user_id=None,
+ project_id=None,
+ requested_state=UserRequestedState.RUNNING,
+ )
+ assert model.check_status_after < arrow.utcnow().timestamp()
+
+ delay = timedelta(seconds=4)
+
+ before = (arrow.utcnow() + delay).timestamp()
+ model.set_check_status_after_to(delay)
+ after = (arrow.utcnow() + delay).timestamp()
+
+ assert model.check_status_after
+ assert before < model.check_status_after < after
diff --git a/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py b/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py
new file mode 100644
index 000000000000..59739ddf8f60
--- /dev/null
+++ b/services/dynamic-scheduler/tests/unit/service_tracker/test__tracker.py
@@ -0,0 +1,94 @@
+# pylint:disable=redefined-outer-name
+# pylint:disable=unused-argument
+
+from uuid import uuid4
+
+import pytest
+from fastapi import FastAPI
+from models_library.projects_nodes_io import NodeID
+from pydantic import NonNegativeInt
+from pytest_simcore.helpers.typing_env import EnvVarsDict
+from servicelib.utils import logged_gather
+from settings_library.redis import RedisSettings
+from simcore_service_dynamic_scheduler.services.service_tracker._models import (
+ TrackedServiceModel,
+ UserRequestedState,
+)
+from simcore_service_dynamic_scheduler.services.service_tracker._setup import (
+ get_tracker,
+)
+from simcore_service_dynamic_scheduler.services.service_tracker._tracker import Tracker
+
+pytest_simcore_core_services_selection = [
+ "redis",
+]
+
+
+@pytest.fixture
+def app_environment(
+ disable_rabbitmq_setup: None,
+ disable_deferred_manager_setup: None,
+ disable_notifier_setup: None,
+ app_environment: EnvVarsDict,
+ redis_service: RedisSettings,
+ remove_redis_data: None,
+) -> EnvVarsDict:
+ return app_environment
+
+
+@pytest.fixture
+def tracker(app: FastAPI) -> Tracker:
+ return get_tracker(app)
+
+
+async def test_tracker_workflow(tracker: Tracker):
+ node_id: NodeID = uuid4()
+
+ # ensure does not already exist
+ result = await tracker.load(node_id)
+ assert result is None
+
+ # node creation
+ model = TrackedServiceModel(
+ dynamic_service_start=None,
+ user_id=None,
+ project_id=None,
+ requested_state=UserRequestedState.RUNNING,
+ )
+ await tracker.save(node_id, model)
+
+ # check if exists
+ result = await tracker.load(node_id)
+ assert result == model
+
+ # remove and check is missing
+ await tracker.delete(node_id)
+ result = await tracker.load(node_id)
+ assert result is None
+
+
+@pytest.mark.parametrize("item_count", [100])
+async def test_tracker_listing(tracker: Tracker, item_count: NonNegativeInt) -> None:
+ assert await tracker.all() == {}
+
+ model_to_insert = TrackedServiceModel(
+ dynamic_service_start=None,
+ user_id=None,
+ project_id=None,
+ requested_state=UserRequestedState.RUNNING,
+ )
+
+ data_to_insert = {uuid4(): model_to_insert for _ in range(item_count)}
+
+ await logged_gather(
+ *[tracker.save(k, v) for k, v in data_to_insert.items()], max_concurrency=100
+ )
+
+ response = await tracker.all()
+ for key in response:
+ assert isinstance(key, NodeID)
+ assert response == data_to_insert
+
+
+async def test_remove_missing_key_does_not_raise_error(tracker: Tracker):
+ await tracker.delete(uuid4())
diff --git a/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py b/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py
new file mode 100644
index 000000000000..2dd5270b627a
--- /dev/null
+++ b/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py
@@ -0,0 +1,416 @@
+# pylint:disable=redefined-outer-name
+# pylint:disable=too-many-positional-arguments
+# pylint:disable=unused-argument
+
+import json
+import re
+from collections.abc import AsyncIterable, Callable
+from copy import deepcopy
+from typing import Any
+from unittest.mock import AsyncMock
+from uuid import uuid4
+
+import pytest
+import respx
+from fastapi import FastAPI, status
+from fastapi.encoders import jsonable_encoder
+from httpx import Request, Response
+from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet
+from models_library.api_schemas_dynamic_scheduler.dynamic_services import (
+ DynamicServiceStart,
+ DynamicServiceStop,
+)
+from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle
+from models_library.projects_nodes_io import NodeID
+from pydantic import NonNegativeInt
+from pytest_mock import MockerFixture
+from pytest_simcore.helpers.typing_env import EnvVarsDict
+from settings_library.rabbit import RabbitSettings
+from settings_library.redis import RedisSettings
+from simcore_service_dynamic_scheduler.services.service_tracker import (
+ get_all_tracked_services,
+ set_request_as_running,
+ set_request_as_stopped,
+)
+from simcore_service_dynamic_scheduler.services.status_monitor import _monitor
+from simcore_service_dynamic_scheduler.services.status_monitor._deferred_get_status import (
+ DeferredGetStatus,
+)
+from simcore_service_dynamic_scheduler.services.status_monitor._monitor import Monitor
+from simcore_service_dynamic_scheduler.services.status_monitor._setup import get_monitor
+from tenacity import AsyncRetrying
+from tenacity.retry import retry_if_exception_type
+from tenacity.stop import stop_after_delay
+from tenacity.wait import wait_fixed
+
+pytest_simcore_core_services_selection = [
+ "rabbit",
+ "redis",
+]
+
+
+@pytest.fixture
+def app_environment(
+ app_environment: EnvVarsDict,
+ rabbit_service: RabbitSettings,
+ redis_service: RedisSettings,
+ remove_redis_data: None,
+) -> EnvVarsDict:
+ return app_environment
+
+
+_DEFAULT_NODE_ID: NodeID = uuid4()
+
+
+def _add_to_dict(dict_data: dict, entries: list[tuple[str, Any]]) -> None:
+ for key, data in entries:
+ assert key in dict_data
+ dict_data[key] = data
+
+
+def _get_node_get_with(state: str, node_id: NodeID = _DEFAULT_NODE_ID) -> NodeGet:
+ dict_data = deepcopy(NodeGet.Config.schema_extra["examples"][1])
+ _add_to_dict(
+ dict_data,
+ [
+ ("service_state", state),
+ ("service_uuid", f"{node_id}"),
+ ],
+ )
+ return NodeGet.parse_obj(dict_data)
+
+
+def _get_dynamic_service_get_legacy_with(
+ state: str, node_id: NodeID = _DEFAULT_NODE_ID
+) -> DynamicServiceGet:
+ dict_data = deepcopy(DynamicServiceGet.Config.schema_extra["examples"][0])
+ _add_to_dict(
+ dict_data,
+ [
+ ("state", state),
+ ("uuid", f"{node_id}"),
+ ("node_uuid", f"{node_id}"),
+ ],
+ )
+ return DynamicServiceGet.parse_obj(dict_data)
+
+
+def _get_dynamic_service_get_new_style_with(
+ state: str, node_id: NodeID = _DEFAULT_NODE_ID
+) -> DynamicServiceGet:
+ dict_data = deepcopy(DynamicServiceGet.Config.schema_extra["examples"][1])
+ _add_to_dict(
+ dict_data,
+ [
+ ("state", state),
+ ("uuid", f"{node_id}"),
+ ("node_uuid", f"{node_id}"),
+ ],
+ )
+ return DynamicServiceGet.parse_obj(dict_data)
+
+
+def _get_node_get_idle(node_id: NodeID = _DEFAULT_NODE_ID) -> NodeGetIdle:
+ dict_data = NodeGetIdle.Config.schema_extra["example"]
+ _add_to_dict(
+ dict_data,
+ [
+ ("service_uuid", f"{node_id}"),
+ ],
+ )
+ return NodeGetIdle.parse_obj(dict_data)
+
+
+class _ResponseTimeline:
+ def __init__(
+ self, timeline: list[NodeGet | DynamicServiceGet | NodeGetIdle]
+ ) -> None:
+ self._timeline = timeline
+
+ self._client_access_history: dict[NodeID, NonNegativeInt] = {}
+
+ @property
+ def entries(self) -> list[NodeGet | DynamicServiceGet | NodeGetIdle]:
+ return self._timeline
+
+ def __len__(self) -> int:
+ return len(self._timeline)
+
+ def get_status(self, node_id: NodeID) -> NodeGet | DynamicServiceGet | NodeGetIdle:
+ if node_id not in self._client_access_history:
+ self._client_access_history[node_id] = 0
+
+ # always return node idle when timeline finished playing
+ if self._client_access_history[node_id] >= len(self._timeline):
+ return _get_node_get_idle()
+
+ status = self._timeline[self._client_access_history[node_id]]
+ self._client_access_history[node_id] += 1
+ return status
+
+
+async def _assert_call_to(
+ deferred_status_spies: dict[str, AsyncMock], *, method: str, count: NonNegativeInt
+) -> None:
+ async for attempt in AsyncRetrying(
+ reraise=True,
+ stop=stop_after_delay(1),
+ wait=wait_fixed(0.01),
+ retry=retry_if_exception_type(AssertionError),
+ ):
+ with attempt:
+ call_count = deferred_status_spies[method].call_count
+ assert (
+ call_count == count
+ ), f"Received calls {call_count} != {count} (expected) to '{method}'"
+
+
+async def _assert_result(
+ deferred_status_spies: dict[str, AsyncMock],
+ *,
+ timeline: list[NodeGet | DynamicServiceGet | NodeGetIdle],
+) -> None:
+ async for attempt in AsyncRetrying(
+ reraise=True,
+ stop=stop_after_delay(1),
+ wait=wait_fixed(0.01),
+ retry=retry_if_exception_type(AssertionError),
+ ):
+ with attempt:
+
+ assert deferred_status_spies["on_result"].call_count == len(timeline)
+ assert [
+ x.args[0] for x in deferred_status_spies["on_result"].call_args_list
+ ] == timeline
+
+
+async def _assert_notification_count(
+ mock: AsyncMock, expected_count: NonNegativeInt
+) -> None:
+ async for attempt in AsyncRetrying(
+ reraise=True,
+ stop=stop_after_delay(1),
+ wait=wait_fixed(0.01),
+ retry=retry_if_exception_type(AssertionError),
+ ):
+ with attempt:
+ assert mock.call_count == expected_count
+
+
+@pytest.fixture
+async def mock_director_v2_status(
+ app: FastAPI, response_timeline: _ResponseTimeline
+) -> AsyncIterable[None]:
+ def _side_effect_node_status_response(request: Request) -> Response:
+ node_id = NodeID(f"{request.url}".split("/")[-1])
+
+ service_status = response_timeline.get_status(node_id)
+
+ if isinstance(service_status, NodeGet):
+ return Response(
+ status.HTTP_200_OK,
+ text=json.dumps(jsonable_encoder({"data": service_status.dict()})),
+ )
+ if isinstance(service_status, DynamicServiceGet):
+ return Response(status.HTTP_200_OK, text=service_status.json())
+ if isinstance(service_status, NodeGetIdle):
+ return Response(status.HTTP_404_NOT_FOUND)
+
+ raise TypeError
+
+ with respx.mock(
+ base_url=app.state.settings.DYNAMIC_SCHEDULER_DIRECTOR_V2_SETTINGS.api_base_url,
+ assert_all_called=False,
+ assert_all_mocked=True,
+ ) as mock:
+ mock.get(re.compile(r"/dynamic_services/([\w-]+)")).mock(
+ side_effect=_side_effect_node_status_response
+ )
+ yield
+
+
+@pytest.fixture
+def monitor(mock_director_v2_status: None, app: FastAPI) -> Monitor:
+ return get_monitor(app)
+
+
+@pytest.fixture
+def deferred_status_spies(mocker: MockerFixture) -> dict[str, AsyncMock]:
+ results: dict[str, AsyncMock] = {}
+ for method_name in (
+ "start",
+ "on_result",
+ "on_created",
+ "run",
+ "on_finished_with_error",
+ ):
+ mock_method = mocker.AsyncMock(wraps=getattr(DeferredGetStatus, method_name))
+ mocker.patch.object(DeferredGetStatus, method_name, mock_method)
+ results[method_name] = mock_method
+
+ return results
+
+
+@pytest.fixture
+def remove_tracked_spy(mocker: MockerFixture) -> AsyncMock:
+ mock_method = mocker.AsyncMock(
+ wraps=_monitor.service_tracker.remove_tracked_service
+ )
+ return mocker.patch.object(
+ _monitor.service_tracker,
+ _monitor.service_tracker.remove_tracked_service.__name__,
+ mock_method,
+ )
+
+
+@pytest.fixture
+def node_id() -> NodeID:
+ return _DEFAULT_NODE_ID
+
+
+@pytest.fixture
+def mocked_notify_frontend(mocker: MockerFixture) -> AsyncMock:
+ return mocker.patch(
+ "simcore_service_dynamic_scheduler.services.status_monitor._deferred_get_status.notify_service_status_change"
+ )
+
+
+@pytest.fixture
+def disable_status_monitor_background_task(mocker: MockerFixture) -> None:
+ mocker.patch(
+ "simcore_service_dynamic_scheduler.services.status_monitor._monitor.Monitor.setup"
+ )
+
+
+@pytest.mark.parametrize(
+ "user_requests_running, response_timeline, expected_notification_count, remove_tracked_count",
+ [
+ pytest.param(
+ True,
+ _ResponseTimeline([_get_node_get_with("running")]),
+ 1,
+ 0,
+ id="requested_running_state_changes_1_no_task_removal",
+ ),
+ pytest.param(
+ True,
+ _ResponseTimeline(
+ [_get_dynamic_service_get_legacy_with("running") for _ in range(10)]
+ ),
+ 1,
+ 0,
+ id="requested_running_state_changes_1_for_multiple_same_state_no_task_removal",
+ ),
+ pytest.param(
+ True,
+ _ResponseTimeline([_get_node_get_idle()]),
+ 1,
+ 0,
+ id="requested_running_state_idle_no_removal",
+ ),
+ pytest.param(
+ False,
+ _ResponseTimeline([_get_node_get_idle()]),
+ 1,
+ 1,
+ id="requested_stopped_state_idle_is_removed",
+ ),
+ pytest.param(
+ True,
+ _ResponseTimeline(
+ [
+ *[_get_node_get_idle() for _ in range(10)],
+ _get_dynamic_service_get_new_style_with("pending"),
+ _get_dynamic_service_get_new_style_with("pulling"),
+ *[
+ _get_dynamic_service_get_new_style_with("starting")
+ for _ in range(10)
+ ],
+ _get_dynamic_service_get_new_style_with("running"),
+ _get_dynamic_service_get_new_style_with("stopping"),
+ _get_dynamic_service_get_new_style_with("complete"),
+ _get_node_get_idle(),
+ ]
+ ),
+ 8,
+ 0,
+ id="requested_running_state_changes_8_no_removal",
+ ),
+ pytest.param(
+ False,
+ _ResponseTimeline(
+ [
+ _get_dynamic_service_get_new_style_with("pending"),
+ _get_dynamic_service_get_new_style_with("pulling"),
+ *[
+ _get_dynamic_service_get_new_style_with("starting")
+ for _ in range(10)
+ ],
+ _get_dynamic_service_get_new_style_with("running"),
+ _get_dynamic_service_get_new_style_with("stopping"),
+ _get_dynamic_service_get_new_style_with("complete"),
+ _get_node_get_idle(),
+ ]
+ ),
+ 7,
+ 1,
+ id="requested_stopped_state_changes_7_is_removed",
+ ),
+ ],
+)
+async def test_expected_calls_to_notify_frontend( # pylint:disable=too-many-arguments
+ disable_status_monitor_background_task: None,
+ mocked_notify_frontend: AsyncMock,
+ deferred_status_spies: dict[str, AsyncMock],
+ remove_tracked_spy: AsyncMock,
+ app: FastAPI,
+ monitor: Monitor,
+ node_id: NodeID,
+ user_requests_running: bool,
+ response_timeline: _ResponseTimeline,
+ expected_notification_count: NonNegativeInt,
+ remove_tracked_count: NonNegativeInt,
+ get_dynamic_service_start: Callable[[NodeID], DynamicServiceStart],
+ get_dynamic_service_stop: Callable[[NodeID], DynamicServiceStop],
+):
+ assert await get_all_tracked_services(app) == {}
+
+ if user_requests_running:
+ await set_request_as_running(app, get_dynamic_service_start(node_id))
+ else:
+ await set_request_as_stopped(app, get_dynamic_service_stop(node_id))
+
+ entries_in_timeline = len(response_timeline)
+
+ for i in range(entries_in_timeline):
+ async for attempt in AsyncRetrying(
+ reraise=True, stop=stop_after_delay(10), wait=wait_fixed(0.1)
+ ):
+ with attempt:
+ # pylint:disable=protected-access
+ await monitor._worker_start_get_status_requests() # noqa: SLF001
+ for method in ("start", "on_created", "on_result"):
+ await _assert_call_to(
+ deferred_status_spies, method=method, count=i + 1
+ )
+
+ await _assert_call_to(
+ deferred_status_spies, method="run", count=entries_in_timeline
+ )
+ await _assert_call_to(
+ deferred_status_spies, method="on_finished_with_error", count=0
+ )
+
+ await _assert_result(deferred_status_spies, timeline=response_timeline.entries)
+
+ await _assert_notification_count(
+ mocked_notify_frontend, expected_notification_count
+ )
+
+ async for attempt in AsyncRetrying(
+ reraise=True, stop=stop_after_delay(1), wait=wait_fixed(0.1)
+ ):
+ with attempt:
+ # pylint:disable=protected-access
+ await monitor._worker_start_get_status_requests() # noqa: SLF001
+ assert remove_tracked_spy.call_count == remove_tracked_count
diff --git a/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py b/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py
index feefc0c1aa4a..eadb7c9ee038 100644
--- a/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py
+++ b/services/dynamic-scheduler/tests/unit/test_services_rabbitmq.py
@@ -21,6 +21,10 @@
@pytest.fixture
def app_environment(
disable_redis_setup: None,
+ disable_service_tracker_setup: None,
+ disable_deferred_manager_setup: None,
+ disable_notifier_setup: None,
+ disable_status_monitor_setup: None,
app_environment: EnvVarsDict,
rabbit_service: RabbitSettings,
) -> EnvVarsDict:
diff --git a/services/dynamic-scheduler/tests/unit/test_services_redis.py b/services/dynamic-scheduler/tests/unit/test_services_redis.py
index 7a7d90063851..059a17aeb0fc 100644
--- a/services/dynamic-scheduler/tests/unit/test_services_redis.py
+++ b/services/dynamic-scheduler/tests/unit/test_services_redis.py
@@ -6,7 +6,7 @@
from fastapi import FastAPI
from pytest_simcore.helpers.typing_env import EnvVarsDict
from settings_library.redis import RedisSettings
-from simcore_service_dynamic_scheduler.services.redis import get_redis_client
+from simcore_service_dynamic_scheduler.services.redis import get_all_redis_clients
pytest_simcore_core_services_selection = [
"redis",
@@ -16,6 +16,9 @@
@pytest.fixture
def app_environment(
disable_rabbitmq_setup: None,
+ disable_deferred_manager_setup: None,
+ disable_notifier_setup: None,
+ disable_status_monitor_setup: None,
app_environment: EnvVarsDict,
redis_service: RedisSettings,
) -> EnvVarsDict:
@@ -23,5 +26,6 @@ def app_environment(
async def test_health(app: FastAPI):
- redis_client = get_redis_client(app)
- assert await redis_client.ping() is True
+ redis_clients = get_all_redis_clients(app)
+ for redis_client in redis_clients.values():
+ assert await redis_client.ping() is True
diff --git a/services/dynamic-sidecar/requirements/_test.txt b/services/dynamic-sidecar/requirements/_test.txt
index ad204462b214..3b248a300cdc 100644
--- a/services/dynamic-sidecar/requirements/_test.txt
+++ b/services/dynamic-sidecar/requirements/_test.txt
@@ -11,7 +11,7 @@ aiohttp==3.9.3
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# aiobotocore
-aioitertools==0.11.0
+aioitertools==0.12.0
# via aiobotocore
aiosignal==1.3.1
# via
@@ -45,7 +45,7 @@ coverage==7.6.1
# via pytest-cov
docker==7.1.0
# via -r requirements/_test.in
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
flaky==3.8.1
# via -r requirements/_test.in
@@ -75,7 +75,7 @@ multidict==6.0.5
# aiohttp
# async-asgi-testclient
# yarl
-mypy==1.11.1
+mypy==1.11.2
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
@@ -85,7 +85,7 @@ packaging==24.0
# pytest
pluggy==1.5.0
# via pytest
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -128,13 +128,13 @@ sqlalchemy==1.4.52
# -r requirements/_test.in
sqlalchemy2-stubs==0.0.2a38
# via sqlalchemy
-types-aiobotocore-s3==2.13.2
+types-aiobotocore-s3==2.15.1
# via -r requirements/_test.in
types-aiofiles==24.1.0.20240626
# via -r requirements/_test.in
-types-psutil==6.0.0.20240621
+types-psutil==6.0.0.20240901
# via -r requirements/_test.in
-types-pyyaml==6.0.12.20240808
+types-pyyaml==6.0.12.20240917
# via -r requirements/_test.in
typing-extensions==4.11.0
# via
diff --git a/services/dynamic-sidecar/requirements/_tools.txt b/services/dynamic-sidecar/requirements/_tools.txt
index 088c9a9396fe..4eed4827cf84 100644
--- a/services/dynamic-sidecar/requirements/_tools.txt
+++ b/services/dynamic-sidecar/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -17,9 +17,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -27,7 +27,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via
# -c requirements/_test.txt
# -r requirements/../../../requirements/devenv.txt
@@ -50,14 +50,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -68,7 +68,7 @@ pyyaml==6.0.1
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# pre-commit
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
setuptools==74.0.0
# via
@@ -81,7 +81,7 @@ typing-extensions==4.11.0
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
wheel==0.44.0
# via pip-tools
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers_long_running_tasks.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers_long_running_tasks.py
index ae04a620c8a2..52b0e2e7ad64 100644
--- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers_long_running_tasks.py
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers_long_running_tasks.py
@@ -209,6 +209,7 @@ async def ports_inputs_pull_task(
request: Request,
tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)],
app: Annotated[FastAPI, Depends(get_application)],
+ settings: Annotated[ApplicationSettings, Depends(get_settings)],
mounted_volumes: Annotated[MountedVolumes, Depends(get_mounted_volumes)],
inputs_state: Annotated[InputsState, Depends(get_inputs_state)],
port_keys: list[str] | None = None,
@@ -223,6 +224,7 @@ async def ports_inputs_pull_task(
port_keys=port_keys,
mounted_volumes=mounted_volumes,
app=app,
+ settings=settings,
inputs_pulling_enabled=inputs_state.inputs_pulling_enabled,
)
except TaskAlreadyRunningError as e:
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py
index f5910ffbffee..20029cac7fcb 100644
--- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py
@@ -19,6 +19,7 @@
from ..modules.attribute_monitor import setup_attribute_monitor
from ..modules.inputs import setup_inputs
from ..modules.mounted_fs import MountedVolumes, setup_mounted_fs
+from ..modules.notifications import setup_notifications
from ..modules.outputs import setup_outputs
from ..modules.prometheus_metrics import setup_prometheus_metrics
from ..modules.resource_tracking import setup_resource_tracking
@@ -172,6 +173,7 @@ def create_app():
setup_rabbitmq(app)
setup_background_log_fetcher(app)
setup_resource_tracking(app)
+ setup_notifications(app)
setup_system_monitor(app)
setup_mounted_fs(app)
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/attribute_monitor/_watchdog_extensions.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/attribute_monitor/_watchdog_extensions.py
index 5925e7d7fe20..83389547c776 100644
--- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/attribute_monitor/_watchdog_extensions.py
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/attribute_monitor/_watchdog_extensions.py
@@ -20,7 +20,9 @@ def __init__(self, path, recursive=False): # pylint:disable=super-init-not-call
# overwrite the `InotifyBuffer.__init__` method
BaseThread.__init__(self) # pylint:disable=non-parent-init-called
self._queue = DelayedQueue(self.delay)
- self._inotify = Inotify(path, recursive, InotifyConstants.IN_ATTRIB)
+ self._inotify = Inotify( # pylint:disable=too-many-function-args
+ path, recursive, InotifyConstants.IN_ATTRIB
+ )
self.start()
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py
index a8277415b06b..0134d481f78e 100644
--- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py
@@ -52,6 +52,7 @@
from ..models.shared_store import SharedStore
from ..modules import nodeports, user_services_preferences
from ..modules.mounted_fs import MountedVolumes
+from ..modules.notifications._notifications_ports import PortNotifier
from ..modules.outputs import OutputsManager, event_propagation_disabled
from .long_running_tasksutils import run_before_shutdown_actions
from .resource_tracking import send_service_started, send_service_stopped
@@ -472,6 +473,7 @@ async def task_ports_inputs_pull(
port_keys: list[str] | None,
mounted_volumes: MountedVolumes,
app: FastAPI,
+ settings: ApplicationSettings,
*,
inputs_pulling_enabled: bool,
) -> int:
@@ -505,6 +507,12 @@ async def task_ports_inputs_pull(
post_sidecar_log_message, app, log_level=logging.INFO
),
progress_bar=root_progress,
+ port_notifier=PortNotifier(
+ app,
+ settings.DY_SIDECAR_USER_ID,
+ settings.DY_SIDECAR_PROJECT_ID,
+ settings.DY_SIDECAR_NODE_ID,
+ ),
)
await post_sidecar_log_message(
app, "Finished pulling inputs", log_level=logging.INFO
@@ -541,6 +549,7 @@ async def task_ports_outputs_pull(
post_sidecar_log_message, app, log_level=logging.INFO
),
progress_bar=root_progress,
+ port_notifier=None,
)
await post_sidecar_log_message(
app, "Finished pulling outputs", log_level=logging.INFO
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py
index 2213dd1d4ac9..0ad00f2c18da 100644
--- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py
@@ -4,6 +4,7 @@
import shutil
import sys
import time
+from asyncio import CancelledError
from collections import deque
from collections.abc import Coroutine
from contextlib import AsyncExitStack
@@ -24,16 +25,17 @@
from servicelib.file_utils import remove_directory
from servicelib.logging_utils import log_context
from servicelib.progress_bar import ProgressBarData
-from servicelib.utils import logged_gather
+from servicelib.utils import limited_gather
from simcore_sdk import node_ports_v2
from simcore_sdk.node_ports_common.file_io_utils import LogRedirectCB
from simcore_sdk.node_ports_v2 import Port
from simcore_sdk.node_ports_v2.links import ItemConcreteValue
-from simcore_sdk.node_ports_v2.nodeports_v2 import Nodeports
+from simcore_sdk.node_ports_v2.nodeports_v2 import Nodeports, OutputsCallbacks
from simcore_sdk.node_ports_v2.port import SetKWargs
from simcore_sdk.node_ports_v2.port_utils import is_file_type
from ..core.settings import ApplicationSettings, get_settings
+from ..modules.notifications import PortNotifier
class PortTypeName(str, Enum):
@@ -70,13 +72,27 @@ def _get_size_of_value(value: tuple[ItemConcreteValue | None, SetKWargs | None])
)
-# NOTE: outputs_manager guarantees that no parallel calls
-# to this function occur
-async def upload_outputs(
+class OutputCallbacksWrapper(OutputsCallbacks):
+ def __init__(self, port_notifier: PortNotifier) -> None:
+ self.port_notifier = port_notifier
+
+ async def aborted(self, key: ServicePortKey) -> None:
+ await self.port_notifier.send_output_port_upload_was_aborted(key)
+
+ async def finished_succesfully(self, key: ServicePortKey) -> None:
+ await self.port_notifier.send_output_port_upload_finished_successfully(key)
+
+ async def finished_with_error(self, key: ServicePortKey) -> None:
+ await self.port_notifier.send_output_port_upload_finished_with_error(key)
+
+
+# NOTE: outputs_manager guarantees that no parallel calls to this function occur
+async def upload_outputs( # pylint:disable=too-many-statements # noqa: PLR0915, C901
outputs_path: Path,
port_keys: list[str],
io_log_redirect_cb: LogRedirectCB | None,
progress_bar: ProgressBarData,
+ port_notifier: PortNotifier,
) -> None:
# pylint: disable=too-many-branches
logger.debug("uploading data to simcore...")
@@ -97,12 +113,17 @@ async def upload_outputs(
ServicePortKey, tuple[ItemConcreteValue | None, SetKWargs | None]
] = {}
archiving_tasks: deque[Coroutine[None, None, None]] = deque()
- ports_to_set = [
+ ports_to_set: list[Port] = [
port_value
for port_value in (await PORTS.outputs).values()
if (not port_keys) or (port_value.key in port_keys)
]
+ await limited_gather(
+ *(port_notifier.send_output_port_upload_sarted(p.key) for p in ports_to_set),
+ limit=4,
+ )
+
async with AsyncExitStack() as stack:
sub_progress = await stack.enter_async_context(
progress_bar.sub_progress(
@@ -147,13 +168,34 @@ async def upload_outputs(
# when having multiple directories it is important to
# run the compression in parallel to guarantee better performance
+ async def _archive_dir_notified(
+ dir_to_compress: Path, destination: Path, port_key: ServicePortKey
+ ) -> None:
+ # Errors and cancellation can also be triggered from archving as well
+ try:
+ await archive_dir(
+ dir_to_compress=dir_to_compress,
+ destination=destination,
+ compress=False,
+ store_relative_path=True,
+ progress_bar=sub_progress,
+ )
+ except CancelledError:
+ await port_notifier.send_output_port_upload_was_aborted(
+ port_key
+ )
+ raise
+ except Exception:
+ await port_notifier.send_output_port_upload_finished_with_error(
+ port_key
+ )
+ raise
+
archiving_tasks.append(
- archive_dir(
+ _archive_dir_notified(
dir_to_compress=src_folder,
destination=tmp_file,
- compress=False,
- store_relative_path=True,
- progress_bar=sub_progress,
+ port_key=port.key,
)
)
ports_values[port.key] = (
@@ -176,9 +218,13 @@ async def upload_outputs(
logger.debug("No file %s to fetch port values from", data_file)
if archiving_tasks:
- await logged_gather(*archiving_tasks)
+ await limited_gather(*archiving_tasks, limit=4)
- await PORTS.set_multiple(ports_values, progress_bar=sub_progress)
+ await PORTS.set_multiple(
+ ports_values,
+ progress_bar=sub_progress,
+ outputs_callbacks=OutputCallbacksWrapper(port_notifier),
+ )
elapsed_time = time.perf_counter() - start_time
total_bytes = sum(_get_size_of_value(x) for x in ports_values.values())
@@ -264,6 +310,7 @@ async def download_target_ports(
port_keys: list[str],
io_log_redirect_cb: LogRedirectCB,
progress_bar: ProgressBarData,
+ port_notifier: PortNotifier | None,
) -> ByteSize:
logger.debug("retrieving data from simcore...")
start_time = time.perf_counter()
@@ -279,22 +326,46 @@ async def download_target_ports(
)
# let's gather all the data
- ports_to_get = [
+ ports_to_get: list[Port] = [
port_value
for port_value in (await getattr(PORTS, port_type_name.value)).values()
if (not port_keys) or (port_value.key in port_keys)
]
+
+ async def _get_date_from_port_notified(
+ port: Port, progress_bar: ProgressBarData
+ ) -> tuple[Port, ItemConcreteValue | None, ByteSize]:
+ assert port_notifier is not None
+ await port_notifier.send_input_port_download_started(port.key)
+ try:
+ result = await _get_data_from_port(
+ port, target_dir=target_dir, progress_bar=progress_bar
+ )
+ await port_notifier.send_input_port_download_finished_succesfully(port.key)
+ return result
+
+ except CancelledError:
+ await port_notifier.send_input_port_download_was_aborted(port.key)
+ raise
+ except Exception:
+ await port_notifier.send_input_port_download_finished_with_error(port.key)
+ raise
+
async with progress_bar.sub_progress(
steps=len(ports_to_get), description=IDStr("downloading")
) as sub_progress:
- results = await logged_gather(
+ results = await limited_gather(
*[
- _get_data_from_port(
- port, target_dir=target_dir, progress_bar=sub_progress
+ (
+ _get_data_from_port(
+ port, target_dir=target_dir, progress_bar=sub_progress
+ )
+ if port_type_name == PortTypeName.OUTPUTS
+ else _get_date_from_port_notified(port, progress_bar=sub_progress)
)
for port in ports_to_get
],
- max_concurrency=2,
+ limit=2,
)
# parse results
data = {
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/__init__.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/__init__.py
new file mode 100644
index 000000000000..18254b1d23c1
--- /dev/null
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/__init__.py
@@ -0,0 +1,9 @@
+from ._notifications_ports import PortNotifier
+from ._notifications_system_monitor import publish_disk_usage
+from ._setup import setup_notifications
+
+__all__: tuple[str, ...] = (
+ "PortNotifier",
+ "publish_disk_usage",
+ "setup_notifications",
+)
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_ports.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_ports.py
new file mode 100644
index 000000000000..ae48f19a973f
--- /dev/null
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_ports.py
@@ -0,0 +1,78 @@
+from dataclasses import dataclass
+
+from fastapi import FastAPI
+from models_library.api_schemas_dynamic_sidecar.ports import InputStatus, OutputStatus
+from models_library.projects import ProjectID
+from models_library.projects_nodes_io import NodeID
+from models_library.services_types import ServicePortKey
+from models_library.users import UserID
+
+from ._notifier import Notifier
+
+
+@dataclass
+class PortNotifier:
+ app: FastAPI
+ user_id: UserID
+ project_id: ProjectID
+ node_id: NodeID
+
+ async def _send_output_port_status(
+ self, port_key: ServicePortKey, status: OutputStatus
+ ) -> None:
+ notifier: Notifier = Notifier.get_from_app_state(self.app)
+ await notifier.notify_output_port_status(
+ self.user_id, self.project_id, self.node_id, port_key, status
+ )
+
+ async def _send_input_port_status(
+ self, port_key: ServicePortKey, status: InputStatus
+ ) -> None:
+ notifier: Notifier = Notifier.get_from_app_state(self.app)
+ await notifier.notify_input_port_status(
+ self.user_id, self.project_id, self.node_id, port_key, status
+ )
+
+ async def send_output_port_upload_sarted(self, port_key: ServicePortKey) -> None:
+ await self._send_output_port_status(port_key, OutputStatus.UPLOAD_STARTED)
+
+ async def send_output_port_upload_was_aborted(
+ self, port_key: ServicePortKey
+ ) -> None:
+ await self._send_output_port_status(port_key, OutputStatus.UPLOAD_WAS_ABORTED)
+
+ async def send_output_port_upload_finished_successfully(
+ self, port_key: ServicePortKey
+ ) -> None:
+ await self._send_output_port_status(
+ port_key, OutputStatus.UPLOAD_FINISHED_SUCCESSFULLY
+ )
+
+ async def send_output_port_upload_finished_with_error(
+ self, port_key: ServicePortKey
+ ) -> None:
+ await self._send_output_port_status(
+ port_key, OutputStatus.UPLOAD_FINISHED_WITH_ERRROR
+ )
+
+ async def send_input_port_download_started(self, port_key: ServicePortKey) -> None:
+ await self._send_input_port_status(port_key, InputStatus.DOWNLOAD_STARTED)
+
+ async def send_input_port_download_was_aborted(
+ self, port_key: ServicePortKey
+ ) -> None:
+ await self._send_input_port_status(port_key, InputStatus.DOWNLOAD_WAS_ABORTED)
+
+ async def send_input_port_download_finished_succesfully(
+ self, port_key: ServicePortKey
+ ) -> None:
+ await self._send_input_port_status(
+ port_key, InputStatus.DOWNLOAD_FINISHED_SUCCESSFULLY
+ )
+
+ async def send_input_port_download_finished_with_error(
+ self, port_key: ServicePortKey
+ ) -> None:
+ await self._send_input_port_status(
+ port_key, InputStatus.DOWNLOAD_FINISHED_WITH_ERRROR
+ )
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_system_monitor.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_system_monitor.py
new file mode 100644
index 000000000000..840c47d729ef
--- /dev/null
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_system_monitor.py
@@ -0,0 +1,17 @@
+from pathlib import Path
+
+from fastapi import FastAPI
+from models_library.api_schemas_dynamic_sidecar.telemetry import DiskUsage
+from models_library.projects_nodes_io import NodeID
+from models_library.users import UserID
+
+from ._notifier import Notifier
+
+
+async def publish_disk_usage(
+ app: FastAPI, *, user_id: UserID, node_id: NodeID, usage: dict[Path, DiskUsage]
+) -> None:
+ notifier: Notifier = Notifier.get_from_app_state(app)
+ await notifier.notify_service_disk_usage(
+ user_id=user_id, node_id=node_id, usage=usage
+ )
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifier.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifier.py
new file mode 100644
index 000000000000..0d61e1b388ba
--- /dev/null
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifier.py
@@ -0,0 +1,103 @@
+import contextlib
+from pathlib import Path
+
+import socketio # type: ignore[import-untyped]
+from fastapi import FastAPI
+from fastapi.encoders import jsonable_encoder
+from models_library.api_schemas_dynamic_sidecar.ports import (
+ InputPortSatus,
+ InputStatus,
+ OutputPortStatus,
+ OutputStatus,
+)
+from models_library.api_schemas_dynamic_sidecar.socketio import (
+ SOCKET_IO_SERVICE_DISK_USAGE_EVENT,
+ SOCKET_IO_STATE_INPUT_PORTS_EVENT,
+ SOCKET_IO_STATE_OUTPUT_PORTS_EVENT,
+)
+from models_library.api_schemas_dynamic_sidecar.telemetry import (
+ DiskUsage,
+ ServiceDiskUsage,
+)
+from models_library.api_schemas_webserver.socketio import SocketIORoomStr
+from models_library.projects import ProjectID
+from models_library.projects_nodes_io import NodeID
+from models_library.services_types import ServicePortKey
+from models_library.users import UserID
+from servicelib.fastapi.app_state import SingletonInAppStateMixin
+
+
+class Notifier(SingletonInAppStateMixin):
+ app_state_name: str = "notifier"
+
+ def __init__(self, sio_manager: socketio.AsyncAioPikaManager):
+ self._sio_manager = sio_manager
+
+ async def notify_service_disk_usage(
+ self, user_id: UserID, node_id: NodeID, usage: dict[Path, DiskUsage]
+ ) -> None:
+ await self._sio_manager.emit(
+ SOCKET_IO_SERVICE_DISK_USAGE_EVENT,
+ data=jsonable_encoder(ServiceDiskUsage(node_id=node_id, usage=usage)),
+ room=SocketIORoomStr.from_user_id(user_id),
+ )
+
+ async def notify_output_port_status(
+ self,
+ user_id: UserID,
+ project_id: ProjectID,
+ node_id: NodeID,
+ port_key: ServicePortKey,
+ output_status: OutputStatus,
+ ) -> None:
+ await self._sio_manager.emit(
+ SOCKET_IO_STATE_OUTPUT_PORTS_EVENT,
+ data=jsonable_encoder(
+ OutputPortStatus(
+ project_id=project_id,
+ node_id=node_id,
+ port_key=port_key,
+ status=output_status,
+ )
+ ),
+ room=SocketIORoomStr.from_user_id(user_id),
+ )
+
+ async def notify_input_port_status(
+ self,
+ user_id: UserID,
+ project_id: ProjectID,
+ node_id: NodeID,
+ port_key: ServicePortKey,
+ input_status: InputStatus,
+ ) -> None:
+ await self._sio_manager.emit(
+ SOCKET_IO_STATE_INPUT_PORTS_EVENT,
+ data=jsonable_encoder(
+ InputPortSatus(
+ project_id=project_id,
+ node_id=node_id,
+ port_key=port_key,
+ status=input_status,
+ )
+ ),
+ room=SocketIORoomStr.from_user_id(user_id),
+ )
+
+
+def setup_notifier(app: FastAPI):
+ async def _on_startup() -> None:
+ assert app.state.external_socketio # nosec
+
+ notifier = Notifier(
+ sio_manager=app.state.external_socketio,
+ )
+ notifier.set_to_app_state(app)
+ assert Notifier.get_from_app_state(app) == notifier # nosec
+
+ async def _on_shutdown() -> None:
+ with contextlib.suppress(AttributeError):
+ Notifier.pop_from_app_state(app)
+
+ app.add_event_handler("startup", _on_startup)
+ app.add_event_handler("shutdown", _on_shutdown)
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_setup.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_setup.py
new file mode 100644
index 000000000000..6de0fae307f1
--- /dev/null
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_setup.py
@@ -0,0 +1,15 @@
+import logging
+
+from fastapi import FastAPI
+from servicelib.logging_utils import log_context
+
+from ..notifications._notifier import setup_notifier
+from ..notifications._socketio import setup_socketio
+
+_logger = logging.getLogger(__name__)
+
+
+def setup_notifications(app: FastAPI) -> None:
+ with log_context(_logger, logging.INFO, "setup notifications"):
+ setup_socketio(app)
+ setup_notifier(app)
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_socketio.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_socketio.py
similarity index 100%
rename from services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_socketio.py
rename to services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_socketio.py
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py
index 307f8b3d9337..d4a8ac8d07ad 100644
--- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py
@@ -18,6 +18,7 @@
from ...core.rabbitmq import post_log_message, post_progress_message
from ...core.settings import ApplicationSettings
+from ...modules.notifications._notifications_ports import PortNotifier
from ..nodeports import upload_outputs
from ._context import OutputsContext
@@ -100,6 +101,7 @@ class OutputsManager: # pylint: disable=too-many-instance-attributes
def __init__(
self,
outputs_context: OutputsContext,
+ port_notifier: PortNotifier,
io_log_redirect_cb: LogRedirectCB | None,
progress_cb: progress_bar.AsyncReportCB | None,
*,
@@ -108,6 +110,7 @@ def __init__(
task_monitor_interval_s: PositiveFloat = 1.0,
):
self.outputs_context = outputs_context
+ self.port_notifier = port_notifier
self.io_log_redirect_cb = io_log_redirect_cb
self.upload_upon_api_request = upload_upon_api_request
self.task_cancellation_timeout_s = task_cancellation_timeout_s
@@ -138,6 +141,7 @@ async def _upload_ports() -> None:
port_keys=port_keys,
io_log_redirect_cb=self.io_log_redirect_cb,
progress_bar=root_progress,
+ port_notifier=self.port_notifier,
)
task_name = f"outputs_manager_port_keys-{'_'.join(port_keys)}"
@@ -271,6 +275,12 @@ async def on_startup() -> None:
progress_cb=partial(
post_progress_message, app, ProgressType.SERVICE_OUTPUTS_PUSHING
),
+ port_notifier=PortNotifier(
+ app,
+ settings.DY_SIDECAR_USER_ID,
+ settings.DY_SIDECAR_PROJECT_ID,
+ settings.DY_SIDECAR_NODE_ID,
+ ),
)
await outputs_manager.start()
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_watchdog_extensions.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_watchdog_extensions.py
index 6d6917d4e15f..c95813e939f2 100644
--- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_watchdog_extensions.py
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_watchdog_extensions.py
@@ -36,7 +36,9 @@ def __init__(self, path, recursive=False): # pylint:disable=super-init-not-call
# overwrite the `InotifyBuffer.__init__` method
BaseThread.__init__(self) # pylint:disable=non-parent-init-called
self._queue = DelayedQueue(self.delay)
- self._inotify = Inotify(path, recursive, _EVENTS_TO_WATCH)
+ self._inotify = Inotify( # pylint:disable=too-many-function-args
+ path, recursive, _EVENTS_TO_WATCH
+ )
self.start()
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_disk_usage.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_disk_usage.py
index 1ecc04fdaea0..90b06450e6f0 100644
--- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_disk_usage.py
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_disk_usage.py
@@ -15,7 +15,7 @@
from ...core.settings import ApplicationSettings
from ..mounted_fs import MountedVolumes
-from ._notifier import publish_disk_usage
+from ..notifications import publish_disk_usage
_logger = logging.getLogger(__name__)
diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_setup.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_setup.py
index e460f7a9ee30..aa0d36a72b9b 100644
--- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_setup.py
+++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_setup.py
@@ -5,8 +5,6 @@
from ...core.settings import SystemMonitorSettings
from ._disk_usage import setup_disk_usage
-from ._notifier import setup_notifier
-from ._socketio import setup_socketio
_logger = logging.getLogger(__name__)
@@ -19,6 +17,4 @@ def setup_system_monitor(app: FastAPI) -> None:
_logger.warning("system monitor disabled")
return
- setup_socketio(app) # required by notifier
- setup_notifier(app)
setup_disk_usage(app)
diff --git a/services/dynamic-sidecar/tests/conftest.py b/services/dynamic-sidecar/tests/conftest.py
index 397666815fb4..53b88ac13592 100644
--- a/services/dynamic-sidecar/tests/conftest.py
+++ b/services/dynamic-sidecar/tests/conftest.py
@@ -1,5 +1,6 @@
# pylint: disable=redefined-outer-name
# pylint: disable=too-many-arguments
+# pylint: disable=too-many-positional-arguments
# pylint: disable=unused-argument
# pylint: disable=unused-variable
diff --git a/services/dynamic-sidecar/tests/unit/conftest.py b/services/dynamic-sidecar/tests/unit/conftest.py
index b6e590f71ebb..ee2c106bb695 100644
--- a/services/dynamic-sidecar/tests/unit/conftest.py
+++ b/services/dynamic-sidecar/tests/unit/conftest.py
@@ -17,6 +17,10 @@
docker_compose_down,
)
from simcore_service_dynamic_sidecar.core.docker_utils import docker_client
+from simcore_service_dynamic_sidecar.core.settings import ApplicationSettings
+from simcore_service_dynamic_sidecar.modules.notifications._notifications_ports import (
+ PortNotifier,
+)
from tenacity import retry
from tenacity.after import after_log
from tenacity.stop import stop_after_delay
@@ -142,3 +146,14 @@ def mock_rabbitmq_envs(
},
)
return mock_environment
+
+
+@pytest.fixture
+def port_notifier(app: FastAPI) -> PortNotifier:
+ settings: ApplicationSettings = app.state.settings
+ return PortNotifier(
+ app,
+ settings.DY_SIDECAR_USER_ID,
+ settings.DY_SIDECAR_PROJECT_ID,
+ settings.DY_SIDECAR_NODE_ID,
+ )
diff --git a/services/dynamic-sidecar/tests/unit/test_modules_notifier.py b/services/dynamic-sidecar/tests/unit/test_modules_notifier.py
new file mode 100644
index 000000000000..654d2bb16191
--- /dev/null
+++ b/services/dynamic-sidecar/tests/unit/test_modules_notifier.py
@@ -0,0 +1,400 @@
+# pylint:disable=unused-argument
+# pylint:disable=redefined-outer-name
+
+from collections.abc import AsyncIterable, Callable
+from contextlib import AsyncExitStack, _AsyncGeneratorContextManager
+from pathlib import Path
+from typing import Final
+from unittest.mock import AsyncMock
+
+import pytest
+import socketio
+from asgi_lifespan import LifespanManager
+from fastapi import FastAPI
+from fastapi.encoders import jsonable_encoder
+from models_library.api_schemas_dynamic_sidecar.ports import (
+ InputPortSatus,
+ InputStatus,
+ OutputPortStatus,
+ OutputStatus,
+)
+from models_library.api_schemas_dynamic_sidecar.socketio import (
+ SOCKET_IO_SERVICE_DISK_USAGE_EVENT,
+ SOCKET_IO_STATE_INPUT_PORTS_EVENT,
+ SOCKET_IO_STATE_OUTPUT_PORTS_EVENT,
+)
+from models_library.api_schemas_dynamic_sidecar.telemetry import (
+ DiskUsage,
+ ServiceDiskUsage,
+)
+from models_library.api_schemas_webserver.socketio import SocketIORoomStr
+from models_library.projects import ProjectID
+from models_library.projects_nodes_io import NodeID
+from models_library.services_types import ServicePortKey
+from models_library.users import UserID
+from pydantic import ByteSize, NonNegativeInt, parse_obj_as
+from pytest_mock import MockerFixture
+from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict
+from servicelib.utils import logged_gather
+from settings_library.rabbit import RabbitSettings
+from simcore_service_dynamic_sidecar.core.application import create_app
+from simcore_service_dynamic_sidecar.core.settings import ApplicationSettings
+from simcore_service_dynamic_sidecar.modules.notifications import (
+ PortNotifier,
+ publish_disk_usage,
+)
+from simcore_service_dynamic_sidecar.modules.system_monitor._disk_usage import (
+ DiskUsageMonitor,
+)
+from socketio import AsyncServer
+from tenacity import AsyncRetrying
+from tenacity.stop import stop_after_delay
+from tenacity.wait import wait_fixed
+
+pytest_simcore_core_services_selection = [
+ "rabbit",
+]
+
+_NUMBER_OF_CLIENTS: Final[NonNegativeInt] = 10
+
+
+@pytest.fixture
+def mock_environment(
+ monkeypatch: pytest.MonkeyPatch,
+ rabbit_service: RabbitSettings,
+ mock_environment: EnvVarsDict,
+) -> EnvVarsDict:
+ return setenvs_from_dict(
+ monkeypatch,
+ {
+ "DY_SIDECAR_SYSTEM_MONITOR_TELEMETRY_ENABLE": "true",
+ "RABBIT_HOST": rabbit_service.RABBIT_HOST,
+ "RABBIT_PASSWORD": rabbit_service.RABBIT_PASSWORD.get_secret_value(),
+ "RABBIT_PORT": f"{rabbit_service.RABBIT_PORT}",
+ "RABBIT_SECURE": f"{rabbit_service.RABBIT_SECURE}",
+ "RABBIT_USER": rabbit_service.RABBIT_USER,
+ },
+ )
+
+
+@pytest.fixture
+async def app(
+ mock_environment: EnvVarsDict,
+ mock_registry_service: AsyncMock,
+ mock_storage_check: None,
+ mock_postgres_check: None,
+ mocker: MockerFixture,
+) -> AsyncIterable[FastAPI]:
+ mocker.patch(
+ "simcore_service_dynamic_sidecar.modules.system_monitor._disk_usage._get_monitored_paths",
+ return_value=[],
+ )
+
+ app: FastAPI = create_app()
+ async with LifespanManager(app):
+ yield app
+
+
+@pytest.fixture
+async def disk_usage_monitor(app: FastAPI) -> DiskUsageMonitor:
+ return app.state.disk_usage_monitor
+
+
+@pytest.fixture
+async def socketio_server(
+ app: FastAPI,
+ socketio_server_factory: Callable[
+ [RabbitSettings], _AsyncGeneratorContextManager[AsyncServer]
+ ],
+) -> AsyncIterable[AsyncServer]:
+ # Same configuration as simcore_service_webserver/socketio/server.py
+ settings: ApplicationSettings = app.state.settings
+ assert settings.RABBIT_SETTINGS
+
+ async with socketio_server_factory(settings.RABBIT_SETTINGS) as server:
+ yield server
+
+
+@pytest.fixture
+def room_name(user_id: UserID) -> SocketIORoomStr:
+ return SocketIORoomStr.from_user_id(user_id)
+
+
+async def _assert_call_count(mock: AsyncMock, *, call_count: int) -> None:
+ async for attempt in AsyncRetrying(
+ wait=wait_fixed(0.1), stop=stop_after_delay(5), reraise=True
+ ):
+ with attempt:
+ assert mock.call_count == call_count
+
+
+def _get_mocked_disk_usage(byte_size_str: str) -> DiskUsage:
+ return DiskUsage(
+ total=ByteSize(0),
+ used=ByteSize(0),
+ free=ByteSize.validate(byte_size_str),
+ used_percent=0,
+ )
+
+
+def _get_on_service_disk_usage_spy(
+ socketio_client: socketio.AsyncClient,
+) -> AsyncMock:
+ # emulates front-end receiving message
+
+ async def on_service_status(data):
+ assert parse_obj_as(ServiceDiskUsage, data) is not None
+
+ on_event_spy = AsyncMock(wraps=on_service_status)
+ socketio_client.on(SOCKET_IO_SERVICE_DISK_USAGE_EVENT, on_event_spy)
+
+ return on_event_spy
+
+
+@pytest.mark.parametrize(
+ "usage",
+ [
+ pytest.param({}, id="empty"),
+ pytest.param({Path("/"): _get_mocked_disk_usage("1kb")}, id="one_entry"),
+ pytest.param(
+ {
+ Path("/"): _get_mocked_disk_usage("1kb"),
+ Path("/tmp"): _get_mocked_disk_usage("2kb"), # noqa: S108
+ },
+ id="two_entries",
+ ),
+ ],
+)
+async def test_notifier_publish_disk_usage(
+ disk_usage_monitor: DiskUsageMonitor,
+ socketio_server_events: dict[str, AsyncMock],
+ app: FastAPI,
+ user_id: UserID,
+ usage: dict[Path, DiskUsage],
+ node_id: NodeID,
+ socketio_client_factory: Callable[
+ [], _AsyncGeneratorContextManager[socketio.AsyncClient]
+ ],
+):
+ # web server spy events
+ server_connect = socketio_server_events["connect"]
+ server_disconnect = socketio_server_events["disconnect"]
+ server_on_check = socketio_server_events["on_check"]
+
+ async with AsyncExitStack() as socketio_frontend_clients:
+ frontend_clients: list[socketio.AsyncClient] = await logged_gather(
+ *[
+ socketio_frontend_clients.enter_async_context(socketio_client_factory())
+ for _ in range(_NUMBER_OF_CLIENTS)
+ ]
+ )
+ await _assert_call_count(server_connect, call_count=_NUMBER_OF_CLIENTS)
+
+ # client emits and check it was received
+ await logged_gather(
+ *[
+ frontend_client.emit("check", data="an_event")
+ for frontend_client in frontend_clients
+ ]
+ )
+ await _assert_call_count(server_on_check, call_count=_NUMBER_OF_CLIENTS)
+
+ # attach spy to client
+ on_service_disk_usage_events: list[AsyncMock] = [
+ _get_on_service_disk_usage_spy(c) for c in frontend_clients
+ ]
+
+ # server publishes a message
+ await publish_disk_usage(app, user_id=user_id, node_id=node_id, usage=usage)
+
+ # check that all clients received it
+ for on_service_disk_usage_event in on_service_disk_usage_events:
+ await _assert_call_count(on_service_disk_usage_event, call_count=1)
+ on_service_disk_usage_event.assert_awaited_once_with(
+ jsonable_encoder(ServiceDiskUsage(node_id=node_id, usage=usage))
+ )
+
+ await _assert_call_count(server_disconnect, call_count=_NUMBER_OF_CLIENTS)
+
+
+@pytest.fixture
+def port_key() -> ServicePortKey:
+ return ServicePortKey("test_port")
+
+
+def _get_on_input_port_spy(
+ socketio_client: socketio.AsyncClient,
+) -> AsyncMock:
+ # emulates front-end receiving message
+
+ async def on_service_status(data):
+ assert parse_obj_as(ServiceDiskUsage, data) is not None
+
+ on_event_spy = AsyncMock(wraps=on_service_status)
+ socketio_client.on(SOCKET_IO_STATE_INPUT_PORTS_EVENT, on_event_spy)
+
+ return on_event_spy
+
+
+@pytest.mark.parametrize("input_status", InputStatus)
+async def test_notifier_send_input_port_status(
+ socketio_server_events: dict[str, AsyncMock],
+ app: FastAPI,
+ user_id: UserID,
+ project_id: ProjectID,
+ node_id: NodeID,
+ port_key: ServicePortKey,
+ socketio_client_factory: Callable[
+ [], _AsyncGeneratorContextManager[socketio.AsyncClient]
+ ],
+ input_status: InputStatus,
+):
+ # web server spy events
+ server_connect = socketio_server_events["connect"]
+ server_disconnect = socketio_server_events["disconnect"]
+ server_on_check = socketio_server_events["on_check"]
+
+ async with AsyncExitStack() as socketio_frontend_clients:
+ frontend_clients: list[socketio.AsyncClient] = await logged_gather(
+ *[
+ socketio_frontend_clients.enter_async_context(socketio_client_factory())
+ for _ in range(_NUMBER_OF_CLIENTS)
+ ]
+ )
+ await _assert_call_count(server_connect, call_count=_NUMBER_OF_CLIENTS)
+
+ # client emits and check it was received
+ await logged_gather(
+ *[
+ frontend_client.emit("check", data="an_event")
+ for frontend_client in frontend_clients
+ ]
+ )
+ await _assert_call_count(server_on_check, call_count=_NUMBER_OF_CLIENTS)
+
+ # attach spy to client
+ on_input_port_events: list[AsyncMock] = [
+ _get_on_input_port_spy(c) for c in frontend_clients
+ ]
+
+ port_notifier = PortNotifier(app, user_id, project_id, node_id)
+
+ # server publishes a message
+ match input_status:
+ case InputStatus.DOWNLOAD_STARTED:
+ await port_notifier.send_input_port_download_started(port_key)
+ case InputStatus.DOWNLOAD_WAS_ABORTED:
+ await port_notifier.send_input_port_download_was_aborted(port_key)
+ case InputStatus.DOWNLOAD_FINISHED_SUCCESSFULLY:
+ await port_notifier.send_input_port_download_finished_succesfully(
+ port_key
+ )
+ case InputStatus.DOWNLOAD_FINISHED_WITH_ERRROR:
+ await port_notifier.send_input_port_download_finished_with_error(
+ port_key
+ )
+
+ # check that all clients received it
+ for on_input_port_event in on_input_port_events:
+ await _assert_call_count(on_input_port_event, call_count=1)
+ on_input_port_event.assert_awaited_once_with(
+ jsonable_encoder(
+ InputPortSatus(
+ project_id=project_id,
+ node_id=node_id,
+ port_key=port_key,
+ status=input_status,
+ )
+ )
+ )
+
+ await _assert_call_count(server_disconnect, call_count=_NUMBER_OF_CLIENTS)
+
+
+def _get_on_output_port_spy(
+ socketio_client: socketio.AsyncClient,
+) -> AsyncMock:
+ # emulates front-end receiving message
+
+ async def on_service_status(data):
+ assert parse_obj_as(ServiceDiskUsage, data) is not None
+
+ on_event_spy = AsyncMock(wraps=on_service_status)
+ socketio_client.on(SOCKET_IO_STATE_OUTPUT_PORTS_EVENT, on_event_spy)
+
+ return on_event_spy
+
+
+@pytest.mark.parametrize("output_status", OutputStatus)
+async def test_notifier_send_output_port_status(
+ socketio_server_events: dict[str, AsyncMock],
+ app: FastAPI,
+ user_id: UserID,
+ project_id: ProjectID,
+ node_id: NodeID,
+ port_key: ServicePortKey,
+ socketio_client_factory: Callable[
+ [], _AsyncGeneratorContextManager[socketio.AsyncClient]
+ ],
+ output_status: OutputStatus,
+):
+ # web server spy events
+ server_connect = socketio_server_events["connect"]
+ server_disconnect = socketio_server_events["disconnect"]
+ server_on_check = socketio_server_events["on_check"]
+
+ async with AsyncExitStack() as socketio_frontend_clients:
+ frontend_clients: list[socketio.AsyncClient] = await logged_gather(
+ *[
+ socketio_frontend_clients.enter_async_context(socketio_client_factory())
+ for _ in range(_NUMBER_OF_CLIENTS)
+ ]
+ )
+ await _assert_call_count(server_connect, call_count=_NUMBER_OF_CLIENTS)
+
+ # client emits and check it was received
+ await logged_gather(
+ *[
+ frontend_client.emit("check", data="an_event")
+ for frontend_client in frontend_clients
+ ]
+ )
+ await _assert_call_count(server_on_check, call_count=_NUMBER_OF_CLIENTS)
+
+ # attach spy to client
+ on_output_port_events: list[AsyncMock] = [
+ _get_on_output_port_spy(c) for c in frontend_clients
+ ]
+
+ port_notifier = PortNotifier(app, user_id, project_id, node_id)
+
+ # server publishes a message
+ match output_status:
+ case OutputStatus.UPLOAD_STARTED:
+ await port_notifier.send_output_port_upload_sarted(port_key)
+ case OutputStatus.UPLOAD_WAS_ABORTED:
+ await port_notifier.send_output_port_upload_was_aborted(port_key)
+ case OutputStatus.UPLOAD_FINISHED_SUCCESSFULLY:
+ await port_notifier.send_output_port_upload_finished_successfully(
+ port_key
+ )
+ case OutputStatus.UPLOAD_FINISHED_WITH_ERRROR:
+ await port_notifier.send_output_port_upload_finished_with_error(
+ port_key
+ )
+
+ # check that all clients received it
+ for on_output_port_event in on_output_port_events:
+ await _assert_call_count(on_output_port_event, call_count=1)
+ on_output_port_event.assert_awaited_once_with(
+ jsonable_encoder(
+ OutputPortStatus(
+ project_id=project_id,
+ node_id=node_id,
+ port_key=port_key,
+ status=output_status,
+ )
+ )
+ )
+
+ await _assert_call_count(server_disconnect, call_count=_NUMBER_OF_CLIENTS)
diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_filter.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_filter.py
index 024d966e424b..38b217bab8f5 100644
--- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_filter.py
+++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_filter.py
@@ -9,6 +9,9 @@
import pytest
from pydantic import ByteSize, NonNegativeFloat, NonNegativeInt, parse_obj_as
from pytest_mock.plugin import MockerFixture
+from simcore_service_dynamic_sidecar.modules.notifications._notifications_ports import (
+ PortNotifier,
+)
from simcore_service_dynamic_sidecar.modules.outputs._context import OutputsContext
from simcore_service_dynamic_sidecar.modules.outputs._event_filter import (
BaseDelayPolicy,
@@ -56,10 +59,13 @@ async def outputs_context(outputs_path: Path, port_keys: list[str]) -> OutputsCo
@pytest.fixture
async def outputs_manager(
- outputs_context: OutputsContext,
+ outputs_context: OutputsContext, port_notifier: PortNotifier
) -> AsyncIterator[OutputsManager]:
outputs_manager = OutputsManager(
- outputs_context=outputs_context, io_log_redirect_cb=None, progress_cb=None
+ outputs_context=outputs_context,
+ port_notifier=port_notifier,
+ io_log_redirect_cb=None,
+ progress_cb=None,
)
await outputs_manager.start()
yield outputs_manager
diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_handler.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_handler.py
index 5f02a500a4da..35ccc7d72df7 100644
--- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_handler.py
+++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_event_handler.py
@@ -10,6 +10,9 @@
import pytest
from aioprocessing.queues import AioQueue
from pydantic import PositiveFloat
+from simcore_service_dynamic_sidecar.modules.notifications._notifications_ports import (
+ PortNotifier,
+)
from simcore_service_dynamic_sidecar.modules.outputs._context import OutputsContext
from simcore_service_dynamic_sidecar.modules.outputs._event_handler import (
EventHandlerObserver,
@@ -39,10 +42,13 @@ async def outputs_context(
@pytest.fixture
async def outputs_manager(
- outputs_context: OutputsContext,
+ outputs_context: OutputsContext, port_notifier: PortNotifier
) -> AsyncIterable[OutputsManager]:
outputs_manager = OutputsManager(
- outputs_context, io_log_redirect_cb=None, progress_cb=None
+ outputs_context,
+ port_notifier=port_notifier,
+ io_log_redirect_cb=None,
+ progress_cb=None,
)
await outputs_manager.start()
diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py
index 40a3db6d3f94..3bf17d09f925 100644
--- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py
+++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_manager.py
@@ -22,6 +22,9 @@
from simcore_sdk.node_ports_common.file_io_utils import LogRedirectCB
from simcore_service_dynamic_sidecar.core.settings import ApplicationSettings
from simcore_service_dynamic_sidecar.modules.mounted_fs import MountedVolumes
+from simcore_service_dynamic_sidecar.modules.notifications._notifications_ports import (
+ PortNotifier,
+)
from simcore_service_dynamic_sidecar.modules.outputs._context import (
OutputsContext,
setup_outputs_context,
@@ -165,10 +168,11 @@ async def outputs_context(
@pytest.fixture
async def outputs_manager(
- outputs_context: OutputsContext,
+ outputs_context: OutputsContext, port_notifier: PortNotifier
) -> AsyncIterator[OutputsManager]:
outputs_manager = OutputsManager(
outputs_context=outputs_context,
+ port_notifier=port_notifier,
io_log_redirect_cb=None,
task_monitor_interval_s=0.01,
progress_cb=None,
diff --git a/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py b/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py
index f209e4877a75..7f9b81587c25 100644
--- a/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py
+++ b/services/dynamic-sidecar/tests/unit/test_modules_outputs_watcher.py
@@ -26,6 +26,9 @@
)
from pytest_mock import MockerFixture
from simcore_service_dynamic_sidecar.modules.mounted_fs import MountedVolumes
+from simcore_service_dynamic_sidecar.modules.notifications._notifications_ports import (
+ PortNotifier,
+)
from simcore_service_dynamic_sidecar.modules.outputs import (
_watcher as outputs_watcher_core,
)
@@ -90,10 +93,11 @@ async def outputs_context(
@pytest.fixture
async def outputs_manager(
- outputs_context: OutputsContext,
+ outputs_context: OutputsContext, port_notifier: PortNotifier
) -> AsyncIterable[OutputsManager]:
outputs_manager = OutputsManager(
outputs_context=outputs_context,
+ port_notifier=port_notifier,
io_log_redirect_cb=None,
task_monitor_interval_s=TICK_INTERVAL,
progress_cb=None,
diff --git a/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py b/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py
deleted file mode 100644
index 73184a1b3cba..000000000000
--- a/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py
+++ /dev/null
@@ -1,204 +0,0 @@
-# pylint:disable=unused-argument
-# pylint:disable=redefined-outer-name
-
-from collections.abc import AsyncIterable, Callable
-from contextlib import AsyncExitStack, _AsyncGeneratorContextManager
-from pathlib import Path
-from unittest.mock import AsyncMock
-
-import pytest
-import socketio
-from asgi_lifespan import LifespanManager
-from fastapi import FastAPI
-from fastapi.encoders import jsonable_encoder
-from models_library.api_schemas_dynamic_sidecar.socketio import (
- SOCKET_IO_SERVICE_DISK_USAGE_EVENT,
-)
-from models_library.api_schemas_dynamic_sidecar.telemetry import (
- DiskUsage,
- ServiceDiskUsage,
-)
-from models_library.api_schemas_webserver.socketio import SocketIORoomStr
-from models_library.projects_nodes_io import NodeID
-from models_library.users import UserID
-from pydantic import ByteSize, NonNegativeInt, parse_obj_as
-from pytest_mock import MockerFixture
-from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict
-from servicelib.utils import logged_gather
-from settings_library.rabbit import RabbitSettings
-from simcore_service_dynamic_sidecar.core.application import create_app
-from simcore_service_dynamic_sidecar.core.settings import ApplicationSettings
-from simcore_service_dynamic_sidecar.modules.system_monitor._disk_usage import (
- DiskUsageMonitor,
-)
-from simcore_service_dynamic_sidecar.modules.system_monitor._notifier import (
- publish_disk_usage,
-)
-from socketio import AsyncServer
-from tenacity import AsyncRetrying
-from tenacity.stop import stop_after_delay
-from tenacity.wait import wait_fixed
-
-pytest_simcore_core_services_selection = [
- "rabbit",
-]
-
-
-@pytest.fixture
-def mock_environment(
- monkeypatch: pytest.MonkeyPatch,
- rabbit_service: RabbitSettings,
- mock_environment: EnvVarsDict,
-) -> EnvVarsDict:
- return setenvs_from_dict(
- monkeypatch,
- {
- "DY_SIDECAR_SYSTEM_MONITOR_TELEMETRY_ENABLE": "true",
- "RABBIT_HOST": rabbit_service.RABBIT_HOST,
- "RABBIT_PASSWORD": rabbit_service.RABBIT_PASSWORD.get_secret_value(),
- "RABBIT_PORT": f"{rabbit_service.RABBIT_PORT}",
- "RABBIT_SECURE": f"{rabbit_service.RABBIT_SECURE}",
- "RABBIT_USER": rabbit_service.RABBIT_USER,
- },
- )
-
-
-@pytest.fixture
-async def app(
- mock_environment: EnvVarsDict,
- mock_registry_service: AsyncMock,
- mock_storage_check: None,
- mock_postgres_check: None,
- mocker: MockerFixture,
-) -> AsyncIterable[FastAPI]:
- mocker.patch(
- "simcore_service_dynamic_sidecar.modules.system_monitor._disk_usage._get_monitored_paths",
- return_value=[],
- )
-
- app: FastAPI = create_app()
- async with LifespanManager(app):
- yield app
-
-
-@pytest.fixture
-async def disk_usage_monitor(app: FastAPI) -> DiskUsageMonitor:
- return app.state.disk_usage_monitor
-
-
-@pytest.fixture
-async def socketio_server(
- app: FastAPI,
- socketio_server_factory: Callable[
- [RabbitSettings], _AsyncGeneratorContextManager[AsyncServer]
- ],
-) -> AsyncIterable[AsyncServer]:
- # Same configuration as simcore_service_webserver/socketio/server.py
- settings: ApplicationSettings = app.state.settings
- assert settings.RABBIT_SETTINGS
-
- async with socketio_server_factory(settings.RABBIT_SETTINGS) as server:
- yield server
-
-
-@pytest.fixture
-def room_name(user_id: UserID) -> SocketIORoomStr:
- return SocketIORoomStr.from_user_id(user_id)
-
-
-def _get_on_service_disk_usage_event(
- socketio_client: socketio.AsyncClient,
-) -> AsyncMock:
- # emulates front-end receiving message
-
- async def on_service_status(data):
- assert parse_obj_as(ServiceDiskUsage, data) is not None
-
- on_event_spy = AsyncMock(wraps=on_service_status)
- socketio_client.on(SOCKET_IO_SERVICE_DISK_USAGE_EVENT, on_event_spy)
-
- return on_event_spy
-
-
-async def _assert_call_count(mock: AsyncMock, *, call_count: int) -> None:
- async for attempt in AsyncRetrying(
- wait=wait_fixed(0.1), stop=stop_after_delay(5), reraise=True
- ):
- with attempt:
- assert mock.call_count == call_count
-
-
-def _get_mocked_disk_usage(byte_size_str: str) -> DiskUsage:
- return DiskUsage(
- total=ByteSize(0),
- used=ByteSize(0),
- free=ByteSize.validate(byte_size_str),
- used_percent=0,
- )
-
-
-@pytest.mark.parametrize(
- "usage",
- [
- pytest.param({}, id="empty"),
- pytest.param({Path("/"): _get_mocked_disk_usage("1kb")}, id="one_entry"),
- pytest.param(
- {
- Path("/"): _get_mocked_disk_usage("1kb"),
- Path("/tmp"): _get_mocked_disk_usage("2kb"), # noqa: S108
- },
- id="two_entries",
- ),
- ],
-)
-async def test_notifier_publish_message(
- disk_usage_monitor: DiskUsageMonitor,
- socketio_server_events: dict[str, AsyncMock],
- app: FastAPI,
- user_id: UserID,
- usage: dict[Path, DiskUsage],
- node_id: NodeID,
- socketio_client_factory: Callable[
- [], _AsyncGeneratorContextManager[socketio.AsyncClient]
- ],
-):
- # web server spy events
- server_connect = socketio_server_events["connect"]
- server_disconnect = socketio_server_events["disconnect"]
- server_on_check = socketio_server_events["on_check"]
-
- number_of_clients: NonNegativeInt = 10
- async with AsyncExitStack() as socketio_frontend_clients:
- frontend_clients: list[socketio.AsyncClient] = await logged_gather(
- *[
- socketio_frontend_clients.enter_async_context(socketio_client_factory())
- for _ in range(number_of_clients)
- ]
- )
- await _assert_call_count(server_connect, call_count=number_of_clients)
-
- # client emits and check it was received
- await logged_gather(
- *[
- frontend_client.emit("check", data="an_event")
- for frontend_client in frontend_clients
- ]
- )
- await _assert_call_count(server_on_check, call_count=number_of_clients)
-
- # attach spy to client
- on_service_disk_usage_events: list[AsyncMock] = [
- _get_on_service_disk_usage_event(c) for c in frontend_clients
- ]
-
- # server publishes a message
- await publish_disk_usage(app, user_id=user_id, node_id=node_id, usage=usage)
-
- # check that all clients received it
- for on_service_disk_usage_event in on_service_disk_usage_events:
- await _assert_call_count(on_service_disk_usage_event, call_count=1)
- on_service_disk_usage_event.assert_awaited_once_with(
- jsonable_encoder(ServiceDiskUsage(node_id=node_id, usage=usage))
- )
-
- await _assert_call_count(server_disconnect, call_count=number_of_clients)
diff --git a/services/efs-guardian/requirements/_test.txt b/services/efs-guardian/requirements/_test.txt
index f35b36a175a4..efd05c557f60 100644
--- a/services/efs-guardian/requirements/_test.txt
+++ b/services/efs-guardian/requirements/_test.txt
@@ -50,7 +50,7 @@ certifi==2024.2.2
# httpcore
# httpx
# requests
-cffi==1.17.0
+cffi==1.17.1
# via cryptography
cfn-lint==1.10.3
# via moto
@@ -66,35 +66,35 @@ coverage==7.6.1
# via
# -r requirements/_test.in
# pytest-cov
-cryptography==43.0.0
+cryptography==43.0.1
# via
# -c requirements/../../../requirements/constraints.txt
# joserfc
# moto
debugpy==1.8.5
# via -r requirements/_test.in
-deepdiff==7.0.1
+deepdiff==8.0.1
# via -r requirements/_test.in
docker==7.1.0
# via
# -r requirements/_test.in
# moto
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
-fakeredis==2.23.5
+fakeredis==2.24.1
# via -r requirements/_test.in
flask==3.0.3
# via
# flask-cors
# moto
-flask-cors==4.0.1
+flask-cors==5.0.0
# via moto
frozenlist==1.4.1
# via
# -c requirements/_base.txt
# aiohttp
# aiosignal
-graphql-core==3.2.3
+graphql-core==3.2.4
# via moto
h11==0.14.0
# via
@@ -133,7 +133,7 @@ jmespath==1.0.1
# botocore
joserfc==1.0.0
# via moto
-jsondiff==2.2.0
+jsondiff==2.2.1
# via moto
jsonpatch==1.33
# via cfn-lint
@@ -162,7 +162,7 @@ markupsafe==2.1.5
# via
# jinja2
# werkzeug
-moto==5.0.13
+moto==5.0.15
# via -r requirements/_test.in
mpmath==1.3.0
# via sympy
@@ -177,7 +177,7 @@ openapi-schema-validator==0.6.2
# via openapi-spec-validator
openapi-spec-validator==0.7.1
# via moto
-ordered-set==4.1.0
+orderly-set==5.2.2
# via deepdiff
packaging==24.0
# via
@@ -195,7 +195,7 @@ psutil==6.0.0
# via
# -c requirements/_base.txt
# -r requirements/_test.in
-py-partiql-parser==0.5.5
+py-partiql-parser==0.5.6
# via moto
pycparser==2.22
# via cffi
@@ -204,9 +204,9 @@ pydantic==1.10.15
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# aws-sam-translator
-pyparsing==3.1.2
+pyparsing==3.1.4
# via moto
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -250,7 +250,7 @@ referencing==0.29.3
# jsonschema
# jsonschema-path
# jsonschema-specifications
-regex==2024.7.24
+regex==2024.9.11
# via cfn-lint
requests==2.32.3
# via
@@ -291,7 +291,7 @@ sniffio==1.3.1
# httpx
sortedcontainers==2.4.0
# via fakeredis
-sympy==1.13.2
+sympy==1.13.3
# via cfn-lint
typing-extensions==4.11.0
# via
@@ -308,7 +308,7 @@ urllib3==2.2.1
# docker
# requests
# responses
-werkzeug==3.0.3
+werkzeug==3.0.4
# via
# flask
# moto
diff --git a/services/efs-guardian/requirements/_tools.txt b/services/efs-guardian/requirements/_tools.txt
index 4ec61eba91aa..97a49efc2ebd 100644
--- a/services/efs-guardian/requirements/_tools.txt
+++ b/services/efs-guardian/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -18,9 +18,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -28,7 +28,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via -r requirements/../../../requirements/devenv.txt
mypy-extensions==1.0.0
# via
@@ -48,14 +48,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -68,7 +68,7 @@ pyyaml==6.0.1
# -c requirements/_test.txt
# pre-commit
# watchdog
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
setuptools==74.0.0
# via
@@ -82,9 +82,9 @@ typing-extensions==4.11.0
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
-watchdog==4.0.2
+watchdog==5.0.2
# via -r requirements/_tools.in
wheel==0.44.0
# via pip-tools
diff --git a/services/invitations/requirements/_test.txt b/services/invitations/requirements/_test.txt
index 7b029ccb0de5..6a73c31809dd 100644
--- a/services/invitations/requirements/_test.txt
+++ b/services/invitations/requirements/_test.txt
@@ -16,7 +16,7 @@ coverage==7.6.1
# via
# -r requirements/_test.in
# pytest-cov
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
h11==0.14.0
# via
@@ -31,7 +31,7 @@ httpx==0.27.0
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# -r requirements/_test.in
-hypothesis==6.111.1
+hypothesis==6.112.1
# via -r requirements/_test.in
idna==3.6
# via
@@ -47,7 +47,7 @@ packaging==24.0
# pytest-sugar
pluggy==1.5.0
# via pytest
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
diff --git a/services/invitations/requirements/_tools.txt b/services/invitations/requirements/_tools.txt
index df9e8e642d52..d6bba29eee2d 100644
--- a/services/invitations/requirements/_tools.txt
+++ b/services/invitations/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -17,9 +17,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -27,7 +27,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via -r requirements/../../../requirements/devenv.txt
mypy-extensions==1.0.0
# via
@@ -47,14 +47,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -66,7 +66,7 @@ pyyaml==6.0.1
# -c requirements/_base.txt
# pre-commit
# watchdog
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
setuptools==74.0.0
# via
@@ -78,9 +78,9 @@ typing-extensions==4.10.0
# via
# -c requirements/_base.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
-watchdog==4.0.2
+watchdog==5.0.2
# via -r requirements/_tools.in
wheel==0.44.0
# via pip-tools
diff --git a/services/migration/requirements/_test.txt b/services/migration/requirements/_test.txt
index 5b2e5bb55dd1..0c989c238a4e 100644
--- a/services/migration/requirements/_test.txt
+++ b/services/migration/requirements/_test.txt
@@ -3,7 +3,7 @@ attrs==24.2.0
# jsonschema
# pytest-docker
# referencing
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c requirements/../../../requirements/constraints.txt
# requests
@@ -13,9 +13,9 @@ coverage==7.6.1
# via pytest-cov
docker==7.1.0
# via -r requirements/_test.in
-greenlet==3.0.3
+greenlet==3.1.1
# via sqlalchemy
-idna==3.7
+idna==3.10
# via requests
iniconfig==2.0.0
# via pytest
@@ -23,7 +23,7 @@ jsonschema==4.23.0
# via -r requirements/_test.in
jsonschema-specifications==2023.12.1
# via jsonschema
-mypy==1.11.1
+mypy==1.11.2
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
@@ -31,7 +31,7 @@ packaging==24.1
# via pytest
pluggy==1.5.0
# via pytest
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -66,7 +66,7 @@ rpds-py==0.20.0
# via
# jsonschema
# referencing
-sqlalchemy==1.4.53
+sqlalchemy==1.4.54
# via
# -c requirements/../../../requirements/constraints.txt
# -r requirements/_test.in
@@ -78,7 +78,7 @@ typing-extensions==4.12.2
# via
# mypy
# sqlalchemy2-stubs
-urllib3==2.2.2
+urllib3==2.2.3
# via
# -c requirements/../../../requirements/constraints.txt
# docker
diff --git a/services/migration/requirements/_tools.txt b/services/migration/requirements/_tools.txt
index 70e35d70acc4..e775221e68b0 100644
--- a/services/migration/requirements/_tools.txt
+++ b/services/migration/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -16,9 +16,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -26,7 +26,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via
# -c requirements/_test.txt
# -r requirements/../../../requirements/devenv.txt
@@ -48,14 +48,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -67,9 +67,9 @@ pyyaml==6.0.2
# -c requirements/_test.txt
# pre-commit
# watchdog
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
-setuptools==73.0.1
+setuptools==75.1.0
# via pip-tools
tomlkit==0.13.2
# via pylint
@@ -77,9 +77,9 @@ typing-extensions==4.12.2
# via
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
-watchdog==4.0.2
+watchdog==5.0.2
# via -r requirements/_tools.in
wheel==0.44.0
# via pip-tools
diff --git a/services/osparc-gateway-server/requirements/_test.txt b/services/osparc-gateway-server/requirements/_test.txt
index a092c888f38e..908dca5582ba 100644
--- a/services/osparc-gateway-server/requirements/_test.txt
+++ b/services/osparc-gateway-server/requirements/_test.txt
@@ -11,7 +11,7 @@ attrs==23.2.0
# via
# -c requirements/_base.txt
# aiohttp
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c requirements/../../../requirements/constraints.txt
# requests
@@ -47,7 +47,7 @@ distributed==2024.5.1
# dask-gateway
docker==7.1.0
# via -r requirements/_test.in
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
frozenlist==1.4.1
# via
@@ -98,7 +98,7 @@ multidict==6.0.5
# -c requirements/_base.txt
# aiohttp
# yarl
-mypy==1.11.1
+mypy==1.11.2
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
@@ -121,7 +121,7 @@ psutil==6.0.0
# via
# -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt
# distributed
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
diff --git a/services/osparc-gateway-server/requirements/_tools.txt b/services/osparc-gateway-server/requirements/_tools.txt
index a3428b0ccc33..985945c7b0ef 100644
--- a/services/osparc-gateway-server/requirements/_tools.txt
+++ b/services/osparc-gateway-server/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -17,9 +17,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -27,7 +27,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via
# -c requirements/_test.txt
# -r requirements/../../../requirements/devenv.txt
@@ -49,14 +49,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -68,9 +68,9 @@ pyyaml==6.0.1
# -c requirements/_test.txt
# pre-commit
# watchdog
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
-setuptools==73.0.1
+setuptools==75.1.0
# via pip-tools
tomlkit==0.13.2
# via pylint
@@ -79,9 +79,9 @@ typing-extensions==4.12.2
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
-watchdog==4.0.2
+watchdog==5.0.2
# via -r requirements/_tools.in
wheel==0.44.0
# via pip-tools
diff --git a/services/osparc-gateway-server/tests/system/requirements/_test.txt b/services/osparc-gateway-server/tests/system/requirements/_test.txt
index f2a907154425..410339df3c69 100644
--- a/services/osparc-gateway-server/tests/system/requirements/_test.txt
+++ b/services/osparc-gateway-server/tests/system/requirements/_test.txt
@@ -1,4 +1,4 @@
-aiodocker==0.22.2
+aiodocker==0.23.0
# via -r requirements/_test.in
aiohappyeyeballs==2.4.0
# via aiohttp
@@ -11,7 +11,7 @@ aiosignal==1.3.1
# via aiohttp
attrs==24.2.0
# via aiohttp
-certifi==2024.7.4
+certifi==2024.8.30
# via
# -c requirements/../../../../../requirements/constraints.txt
# requests
@@ -43,7 +43,7 @@ distributed==2024.5.1
# dask-gateway
docker==7.1.0
# via -r requirements/_test.in
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
frozenlist==1.4.1
# via
@@ -55,7 +55,7 @@ fsspec==2024.5.0
# dask
icdiff==2.0.7
# via pytest-icdiff
-idna==3.7
+idna==3.10
# via
# requests
# yarl
@@ -87,7 +87,7 @@ msgpack==1.0.8
# via
# -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt
# distributed
-multidict==6.0.5
+multidict==6.1.0
# via
# aiohttp
# yarl
@@ -114,7 +114,7 @@ psutil==6.0.0
# via
# -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt
# distributed
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -182,7 +182,7 @@ urllib3==2.2.1
# distributed
# docker
# requests
-yarl==1.9.4
+yarl==1.12.1
# via aiohttp
zict==3.0.0
# via
diff --git a/services/osparc-gateway-server/tests/system/requirements/_tools.txt b/services/osparc-gateway-server/tests/system/requirements/_tools.txt
index ba908e304bb6..ce5d53160d5b 100644
--- a/services/osparc-gateway-server/tests/system/requirements/_tools.txt
+++ b/services/osparc-gateway-server/tests/system/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../../../requirements/devenv.txt
@@ -17,9 +17,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -27,7 +27,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via -r requirements/../../../../../requirements/devenv.txt
mypy-extensions==1.0.0
# via
@@ -46,14 +46,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -64,15 +64,15 @@ pyyaml==6.0.1
# -c requirements/../../../../../requirements/constraints.txt
# -c requirements/_test.txt
# pre-commit
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../../../requirements/devenv.txt
-setuptools==73.0.1
+setuptools==75.1.0
# via pip-tools
tomlkit==0.13.2
# via pylint
typing-extensions==4.12.2
# via mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
wheel==0.44.0
# via pip-tools
diff --git a/services/payments/requirements/_test.txt b/services/payments/requirements/_test.txt
index 30290658b180..4a42c0d9ccf8 100644
--- a/services/payments/requirements/_test.txt
+++ b/services/payments/requirements/_test.txt
@@ -42,7 +42,7 @@ coverage==7.6.1
# pytest-cov
docker==7.1.0
# via -r requirements/_test.in
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
frozenlist==1.4.1
# via
@@ -85,7 +85,7 @@ multidict==6.0.5
# -c requirements/_base.txt
# aiohttp
# yarl
-mypy==1.11.1
+mypy==1.11.2
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
@@ -98,7 +98,7 @@ pluggy==1.5.0
# via pytest
pprintpp==0.4.0
# via pytest-icdiff
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -167,11 +167,11 @@ termcolor==2.4.0
# via pytest-sugar
types-aiofiles==24.1.0.20240626
# via -r requirements/_test.in
-types-pyasn1==0.6.0.20240402
+types-pyasn1==0.6.0.20240913
# via types-python-jose
types-python-jose==3.3.4.20240106
# via -r requirements/_test.in
-types-pyyaml==6.0.12.20240808
+types-pyyaml==6.0.12.20240917
# via -r requirements/_test.in
typing-extensions==4.12.2
# via
diff --git a/services/payments/requirements/_tools.txt b/services/payments/requirements/_tools.txt
index a3199931bff4..5ac982ce79cc 100644
--- a/services/payments/requirements/_tools.txt
+++ b/services/payments/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -17,9 +17,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -27,7 +27,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via
# -c requirements/_test.txt
# -r requirements/../../../requirements/devenv.txt
@@ -50,14 +50,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -68,7 +68,7 @@ pyyaml==6.0.1
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# pre-commit
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
setuptools==74.0.0
# via
@@ -81,7 +81,7 @@ typing-extensions==4.12.2
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
wheel==0.44.0
# via pip-tools
diff --git a/services/payments/tests/unit/api/test__one_time_payment_workflows.py b/services/payments/tests/unit/api/test__one_time_payment_workflows.py
index 2052dee31f60..753432ac6d69 100644
--- a/services/payments/tests/unit/api/test__one_time_payment_workflows.py
+++ b/services/payments/tests/unit/api/test__one_time_payment_workflows.py
@@ -1,7 +1,8 @@
# pylint: disable=redefined-outer-name
+# pylint: disable=too-many-arguments
+# pylint: disable=too-many-positional-arguments
# pylint: disable=unused-argument
# pylint: disable=unused-variable
-# pylint: disable=too-many-arguments
import httpx
diff --git a/services/payments/tests/unit/api/test__payment_method_workflows.py b/services/payments/tests/unit/api/test__payment_method_workflows.py
index 5b92bee8b173..76640384f7b1 100644
--- a/services/payments/tests/unit/api/test__payment_method_workflows.py
+++ b/services/payments/tests/unit/api/test__payment_method_workflows.py
@@ -1,7 +1,8 @@
# pylint: disable=redefined-outer-name
+# pylint: disable=too-many-arguments
+# pylint: disable=too-many-positional-arguments
# pylint: disable=unused-argument
# pylint: disable=unused-variable
-# pylint: disable=too-many-arguments
import httpx
diff --git a/services/payments/tests/unit/test_db_payments_users_repo.py b/services/payments/tests/unit/test_db_payments_users_repo.py
index a695af251c41..51d5f540c6ba 100644
--- a/services/payments/tests/unit/test_db_payments_users_repo.py
+++ b/services/payments/tests/unit/test_db_payments_users_repo.py
@@ -59,7 +59,7 @@ async def user(
injects a user in db
"""
assert user_id == user["id"]
- async with insert_and_get_row_lifespan(
+ async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup
get_engine(app),
table=users,
values=user,
@@ -84,7 +84,7 @@ async def product(
"""
# NOTE: this fixture ignores products' group-id but it is fine for this test context
assert product["group_id"] is None
- async with insert_and_get_row_lifespan(
+ async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup
get_engine(app),
table=products,
values=product,
@@ -101,7 +101,7 @@ async def successful_transaction(
"""
injects transaction in db
"""
- async with insert_and_get_row_lifespan(
+ async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup
get_engine(app),
table=payments_transactions,
values=successful_transaction,
diff --git a/services/payments/tests/unit/test_rpc_payments_methods.py b/services/payments/tests/unit/test_rpc_payments_methods.py
index 9ecb10d9976f..ef60bfa6c425 100644
--- a/services/payments/tests/unit/test_rpc_payments_methods.py
+++ b/services/payments/tests/unit/test_rpc_payments_methods.py
@@ -1,6 +1,7 @@
# pylint: disable=protected-access
# pylint: disable=redefined-outer-name
# pylint: disable=too-many-arguments
+# pylint: disable=too-many-positional-arguments
# pylint: disable=unused-argument
# pylint: disable=unused-variable
diff --git a/services/payments/tests/unit/test_services_payments.py b/services/payments/tests/unit/test_services_payments.py
index 94452d9c7726..4cb484aafbbf 100644
--- a/services/payments/tests/unit/test_services_payments.py
+++ b/services/payments/tests/unit/test_services_payments.py
@@ -1,6 +1,7 @@
# pylint: disable=protected-access
# pylint: disable=redefined-outer-name
# pylint: disable=too-many-arguments
+# pylint: disable=too-many-positional-arguments
# pylint: disable=unused-argument
# pylint: disable=unused-variable
diff --git a/services/resource-usage-tracker/requirements/_test.txt b/services/resource-usage-tracker/requirements/_test.txt
index bfc96422668e..d1c36a7f469f 100644
--- a/services/resource-usage-tracker/requirements/_test.txt
+++ b/services/resource-usage-tracker/requirements/_test.txt
@@ -40,9 +40,9 @@ certifi==2024.2.2
# httpcore
# httpx
# requests
-cffi==1.17.0
+cffi==1.17.1
# via cryptography
-cfn-lint==1.10.3
+cfn-lint==1.15.0
# via moto
charset-normalizer==3.3.2
# via
@@ -56,7 +56,7 @@ coverage==7.6.1
# via
# -r requirements/_test.in
# pytest-cov
-cryptography==43.0.0
+cryptography==43.0.1
# via
# -c requirements/../../../requirements/constraints.txt
# joserfc
@@ -65,17 +65,17 @@ docker==7.1.0
# via
# -r requirements/_test.in
# moto
-faker==27.0.0
+faker==29.0.0
# via -r requirements/_test.in
-fakeredis==2.23.5
+fakeredis==2.24.1
# via -r requirements/_test.in
flask==3.0.3
# via
# flask-cors
# moto
-flask-cors==4.0.1
+flask-cors==5.0.0
# via moto
-graphql-core==3.2.3
+graphql-core==3.2.4
# via moto
greenlet==3.0.3
# via
@@ -117,7 +117,7 @@ jmespath==1.0.1
# botocore
joserfc==1.0.0
# via moto
-jsondiff==2.2.0
+jsondiff==2.2.1
# via moto
jsonpatch==1.33
# via cfn-lint
@@ -153,11 +153,11 @@ markupsafe==2.1.5
# jinja2
# mako
# werkzeug
-moto==5.0.13
+moto==5.0.15
# via -r requirements/_test.in
mpmath==1.3.0
# via sympy
-mypy==1.11.1
+mypy==1.11.2
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
@@ -178,7 +178,7 @@ pluggy==1.5.0
# via pytest
ply==3.11
# via jsonpath-ng
-py-partiql-parser==0.5.5
+py-partiql-parser==0.5.6
# via moto
pycparser==2.22
# via cffi
@@ -191,7 +191,7 @@ pyparsing==3.1.2
# via
# -c requirements/_base.txt
# moto
-pytest==8.3.2
+pytest==8.3.3
# via
# -r requirements/_test.in
# pytest-asyncio
@@ -294,11 +294,11 @@ sqlalchemy==1.4.52
# alembic
sqlalchemy2-stubs==0.0.2a38
# via sqlalchemy
-sympy==1.13.2
+sympy==1.13.3
# via cfn-lint
termcolor==2.4.0
# via pytest-sugar
-types-requests==2.32.0.20240712
+types-requests==2.32.0.20240914
# via -r requirements/_test.in
typing-extensions==4.10.0
# via
@@ -318,7 +318,7 @@ urllib3==2.0.7
# requests
# responses
# types-requests
-werkzeug==3.0.3
+werkzeug==3.0.4
# via
# flask
# moto
diff --git a/services/resource-usage-tracker/requirements/_tools.txt b/services/resource-usage-tracker/requirements/_tools.txt
index 4be35ba32575..44759acdfd41 100644
--- a/services/resource-usage-tracker/requirements/_tools.txt
+++ b/services/resource-usage-tracker/requirements/_tools.txt
@@ -1,8 +1,8 @@
-astroid==3.2.4
+astroid==3.3.4
# via pylint
black==24.8.0
# via -r requirements/../../../requirements/devenv.txt
-build==1.2.1
+build==1.2.2
# via pip-tools
bump2version==1.0.1
# via -r requirements/../../../requirements/devenv.txt
@@ -18,9 +18,9 @@ dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
-filelock==3.15.4
+filelock==3.16.1
# via virtualenv
-identify==2.6.0
+identify==2.6.1
# via pre-commit
isort==5.13.2
# via
@@ -28,7 +28,7 @@ isort==5.13.2
# pylint
mccabe==0.7.0
# via pylint
-mypy==1.11.1
+mypy==1.11.2
# via
# -c requirements/_test.txt
# -r requirements/../../../requirements/devenv.txt
@@ -51,14 +51,14 @@ pip==24.2
# via pip-tools
pip-tools==7.4.1
# via -r requirements/../../../requirements/devenv.txt
-platformdirs==4.2.2
+platformdirs==4.3.6
# via
# black
# pylint
# virtualenv
pre-commit==3.8.0
# via -r requirements/../../../requirements/devenv.txt
-pylint==3.2.6
+pylint==3.3.0
# via -r requirements/../../../requirements/devenv.txt
pyproject-hooks==1.1.0
# via
@@ -71,7 +71,7 @@ pyyaml==6.0.1
# -c requirements/_test.txt
# pre-commit
# watchdog
-ruff==0.6.1
+ruff==0.6.7
# via -r requirements/../../../requirements/devenv.txt
setuptools==74.0.0
# via
@@ -85,9 +85,9 @@ typing-extensions==4.10.0
# -c requirements/_base.txt
# -c requirements/_test.txt
# mypy
-virtualenv==20.26.3
+virtualenv==20.26.5
# via pre-commit
-watchdog==4.0.2
+watchdog==5.0.2
# via -r requirements/_tools.in
wheel==0.44.0
# via pip-tools
diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py
index 5e241e607674..609b0ebd54f0 100644
--- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py
+++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py
@@ -27,6 +27,7 @@
from simcore_postgres_database.models.resource_tracker_pricing_units import (
resource_tracker_pricing_units,
)
+from simcore_postgres_database.models.services import services_meta_data
from starlette import status
from yarl import URL
@@ -184,6 +185,15 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato
modified=datetime.now(tz=timezone.utc),
)
)
+
+ con.execute(
+ services_meta_data.insert().values(
+ key=_SERVICE_KEY,
+ version=_SERVICE_VERSION,
+ name="name",
+ description="description",
+ )
+ )
con.execute(
resource_tracker_pricing_plan_to_service.insert().values(
pricing_plan_id=_PRICING_PLAN_ID,
@@ -192,6 +202,15 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato
service_default_plan=True,
)
)
+
+ con.execute(
+ services_meta_data.insert().values(
+ key=_SERVICE_KEY_2,
+ version=_SERVICE_VERSION_2,
+ name="name",
+ description="description",
+ )
+ )
con.execute(
resource_tracker_pricing_plan_to_service.insert().values(
pricing_plan_id=_PRICING_PLAN_ID_2,
@@ -207,6 +226,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato
con.execute(resource_tracker_pricing_units.delete())
con.execute(resource_tracker_pricing_plans.delete())
con.execute(resource_tracker_pricing_unit_costs.delete())
+ con.execute(services_meta_data.delete())
async def test_get_default_pricing_plan_for_service(
diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py
index 5a12fd24dbe0..4ec8d45bb72d 100644
--- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py
+++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py
@@ -37,6 +37,7 @@
from simcore_postgres_database.models.resource_tracker_pricing_units import (
resource_tracker_pricing_units,
)
+from simcore_postgres_database.models.services import services_meta_data
pytest_simcore_core_services_selection = ["postgres", "rabbit"]
pytest_simcore_ops_services_selection = [
@@ -44,18 +45,52 @@
]
+_SERVICE_KEY = "simcore/services/comp/itis/sleeper"
+_SERVICE_VERSION_1 = "2.0.2"
+_SERVICE_VERSION_2 = "3.0.0"
+
+_SERVICE_KEY_3 = "simcore/services/comp/itis/different-service"
+_SERVICE_VERSION_3 = "1.0.1"
+
+
@pytest.fixture()
def resource_tracker_setup_db(
postgres_db: sa.engine.Engine,
) -> Iterator[None]:
with postgres_db.connect() as con:
+ con.execute(
+ services_meta_data.insert().values(
+ key=_SERVICE_KEY,
+ version=_SERVICE_VERSION_1,
+ name="name",
+ description="description",
+ )
+ )
+ con.execute(
+ services_meta_data.insert().values(
+ key=_SERVICE_KEY,
+ version=_SERVICE_VERSION_2,
+ name="name",
+ description="description",
+ )
+ )
+ con.execute(
+ services_meta_data.insert().values(
+ key=_SERVICE_KEY_3,
+ version=_SERVICE_VERSION_3,
+ name="name",
+ description="description",
+ )
+ )
+
yield
con.execute(resource_tracker_pricing_unit_costs.delete())
con.execute(resource_tracker_pricing_units.delete())
con.execute(resource_tracker_pricing_plan_to_service.delete())
con.execute(resource_tracker_pricing_plans.delete())
+ con.execute(services_meta_data.delete())
async def test_rpc_pricing_plans_workflow(
@@ -68,7 +103,7 @@ async def test_rpc_pricing_plans_workflow(
result = await pricing_plans.create_pricing_plan(
rpc_client,
data=PricingPlanCreate(
- product_name="s4l",
+ product_name="osparc",
display_name=_display_name,
description=faker.sentence(),
classification=PricingPlanClassification.TIER,
@@ -84,7 +119,7 @@ async def test_rpc_pricing_plans_workflow(
_update_description = "description name updated"
result = await pricing_plans.update_pricing_plan(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
data=PricingPlanUpdate(
pricing_plan_id=_pricing_plan_id,
display_name=_update_display_name,
@@ -99,7 +134,7 @@ async def test_rpc_pricing_plans_workflow(
result = await pricing_plans.get_pricing_plan(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
pricing_plan_id=_pricing_plan_id,
)
assert isinstance(result, PricingPlanGet)
@@ -110,7 +145,7 @@ async def test_rpc_pricing_plans_workflow(
result = await pricing_plans.list_pricing_plans(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
)
assert isinstance(result, list)
assert len(result) == 1
@@ -120,7 +155,7 @@ async def test_rpc_pricing_plans_workflow(
# Now I will deactivate the pricing plan
result = await pricing_plans.update_pricing_plan(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
data=PricingPlanUpdate(
pricing_plan_id=_pricing_plan_id,
display_name=faker.word(),
@@ -142,7 +177,7 @@ async def test_rpc_pricing_plans_with_units_workflow(
result = await pricing_plans.create_pricing_plan(
rpc_client,
data=PricingPlanCreate(
- product_name="s4l",
+ product_name="osparc",
display_name=_display_name,
description=faker.sentence(),
classification=PricingPlanClassification.TIER,
@@ -156,7 +191,7 @@ async def test_rpc_pricing_plans_with_units_workflow(
result = await pricing_units.create_pricing_unit(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
data=PricingUnitWithCostCreate(
pricing_plan_id=_pricing_plan_id,
unit_name="SMALL",
@@ -175,7 +210,7 @@ async def test_rpc_pricing_plans_with_units_workflow(
# Get pricing plan
result = await pricing_plans.get_pricing_plan(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
pricing_plan_id=_pricing_plan_id,
)
assert isinstance(result, PricingPlanGet)
@@ -187,7 +222,7 @@ async def test_rpc_pricing_plans_with_units_workflow(
_unit_name = "VERY SMALL"
result = await pricing_units.update_pricing_unit(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
data=PricingUnitWithCostUpdate(
pricing_plan_id=_pricing_plan_id,
pricing_unit_id=_first_pricing_unit_id,
@@ -206,7 +241,7 @@ async def test_rpc_pricing_plans_with_units_workflow(
# Update pricing unit with COST update!
result = await pricing_units.update_pricing_unit(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
data=PricingUnitWithCostUpdate(
pricing_plan_id=_pricing_plan_id,
pricing_unit_id=_first_pricing_unit_id,
@@ -228,7 +263,7 @@ async def test_rpc_pricing_plans_with_units_workflow(
# Test get pricing unit
result = await pricing_units.get_pricing_unit(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
pricing_plan_id=_pricing_plan_id,
pricing_unit_id=_first_pricing_unit_id,
)
@@ -238,7 +273,7 @@ async def test_rpc_pricing_plans_with_units_workflow(
# Create one more unit
result = await pricing_units.create_pricing_unit(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
data=PricingUnitWithCostCreate(
pricing_plan_id=_pricing_plan_id,
unit_name="LARGE",
@@ -256,7 +291,7 @@ async def test_rpc_pricing_plans_with_units_workflow(
# Get pricing plan with units
result = await pricing_plans.get_pricing_plan(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
pricing_plan_id=_pricing_plan_id,
)
assert isinstance(result, PricingPlanGet)
@@ -275,7 +310,7 @@ async def test_rpc_pricing_plans_to_service_workflow(
result = await pricing_plans.create_pricing_plan(
rpc_client,
data=PricingPlanCreate(
- product_name="s4l",
+ product_name="osparc",
display_name=faker.word(),
description=faker.sentence(),
classification=PricingPlanClassification.TIER,
@@ -288,19 +323,19 @@ async def test_rpc_pricing_plans_to_service_workflow(
result = (
await pricing_plans.list_connected_services_to_pricing_plan_by_pricing_plan(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
pricing_plan_id=_pricing_plan_id,
)
)
assert isinstance(result, list)
assert result == []
- _first_service_version = ServiceVersion("2.0.2")
+ _first_service_version = ServiceVersion(_SERVICE_VERSION_1)
result = await pricing_plans.connect_service_to_pricing_plan(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
pricing_plan_id=_pricing_plan_id,
- service_key=ServiceKey("simcore/services/comp/itis/sleeper"),
+ service_key=ServiceKey(_SERVICE_KEY),
service_version=_first_service_version,
)
assert isinstance(result, PricingPlanToServiceGet)
@@ -310,7 +345,7 @@ async def test_rpc_pricing_plans_to_service_workflow(
result = (
await pricing_plans.list_connected_services_to_pricing_plan_by_pricing_plan(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
pricing_plan_id=_pricing_plan_id,
)
)
@@ -318,12 +353,12 @@ async def test_rpc_pricing_plans_to_service_workflow(
assert len(result) == 1
# Connect different version
- _second_service_version = ServiceVersion("3.0.0")
+ _second_service_version = ServiceVersion(_SERVICE_VERSION_2)
result = await pricing_plans.connect_service_to_pricing_plan(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
pricing_plan_id=_pricing_plan_id,
- service_key=ServiceKey("simcore/services/comp/itis/sleeper"),
+ service_key=ServiceKey(_SERVICE_KEY),
service_version=_second_service_version,
)
assert isinstance(result, PricingPlanToServiceGet)
@@ -333,7 +368,7 @@ async def test_rpc_pricing_plans_to_service_workflow(
result = (
await pricing_plans.list_connected_services_to_pricing_plan_by_pricing_plan(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
pricing_plan_id=_pricing_plan_id,
)
)
@@ -341,13 +376,13 @@ async def test_rpc_pricing_plans_to_service_workflow(
assert len(result) == 2
# Connect different service
- _different_service_key = ServiceKey("simcore/services/comp/itis/different-service")
+ _different_service_key = ServiceKey(_SERVICE_KEY_3)
result = await pricing_plans.connect_service_to_pricing_plan(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
pricing_plan_id=_pricing_plan_id,
service_key=_different_service_key,
- service_version=ServiceVersion("1.0.0"),
+ service_version=ServiceVersion(_SERVICE_VERSION_3),
)
assert isinstance(result, PricingPlanToServiceGet)
assert result.pricing_plan_id == _pricing_plan_id
@@ -356,7 +391,7 @@ async def test_rpc_pricing_plans_to_service_workflow(
result = (
await pricing_plans.list_connected_services_to_pricing_plan_by_pricing_plan(
rpc_client,
- product_name="s4l",
+ product_name="osparc",
pricing_plan_id=_pricing_plan_id,
)
)
diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py
index 8d95ae78d75b..7a5e2114c1d8 100644
--- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py
+++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py
@@ -28,6 +28,7 @@
from simcore_postgres_database.models.resource_tracker_pricing_units import (
resource_tracker_pricing_units,
)
+from simcore_postgres_database.models.services import services_meta_data
from .conftest import assert_service_runs_db_row
@@ -128,6 +129,14 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato
modified=datetime.now(tz=timezone.utc),
)
)
+ con.execute(
+ services_meta_data.insert().values(
+ key="simcore/services/comp/itis/sleeper",
+ version="1.0.16",
+ name="name",
+ description="description",
+ )
+ )
con.execute(
resource_tracker_pricing_plan_to_service.insert().values(
pricing_plan_id=1,
@@ -144,6 +153,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato
con.execute(resource_tracker_pricing_plans.delete())
con.execute(resource_tracker_pricing_unit_costs.delete())
con.execute(resource_tracker_credit_transactions.delete())
+ con.execute(services_meta_data.delete())
@pytest.mark.flaky(max_runs=3)
diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py
index 92946509e912..4b6c1a0dfac7 100644
--- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py
+++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py
@@ -30,6 +30,7 @@
from simcore_postgres_database.models.resource_tracker_pricing_units import (
resource_tracker_pricing_units,
)
+from simcore_postgres_database.models.services import services_meta_data
from simcore_service_resource_usage_tracker.modules.db.repositories.resource_tracker import (
ResourceTrackerRepository,
)
@@ -142,6 +143,14 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato
modified=datetime.now(tz=timezone.utc),
)
)
+ con.execute(
+ services_meta_data.insert().values(
+ key="simcore/services/comp/itis/sleeper",
+ version="1.0.16",
+ name="name",
+ description="description",
+ )
+ )
con.execute(
resource_tracker_pricing_plan_to_service.insert().values(
pricing_plan_id=1,
@@ -158,6 +167,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato
con.execute(resource_tracker_pricing_plans.delete())
con.execute(resource_tracker_pricing_unit_costs.delete())
con.execute(resource_tracker_credit_transactions.delete())
+ con.execute(services_meta_data.delete())
@pytest.fixture
diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py
index d5bc497fb0f3..c1d62af5b23f 100644
--- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py
+++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py
@@ -30,6 +30,7 @@
from simcore_postgres_database.models.resource_tracker_pricing_units import (
resource_tracker_pricing_units,
)
+from simcore_postgres_database.models.services import services_meta_data
from simcore_service_resource_usage_tracker.modules.db.repositories.resource_tracker import (
ResourceTrackerRepository,
)
@@ -88,6 +89,14 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato
modified=datetime.now(tz=timezone.utc),
)
)
+ con.execute(
+ services_meta_data.insert().values(
+ key="simcore/services/comp/itis/sleeper",
+ version="1.0.16",
+ name="name",
+ description="description",
+ )
+ )
con.execute(
resource_tracker_pricing_plan_to_service.insert().values(
pricing_plan_id=1,
@@ -104,6 +113,7 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato
con.execute(resource_tracker_pricing_plans.delete())
con.execute(resource_tracker_pricing_unit_costs.delete())
con.execute(resource_tracker_credit_transactions.delete())
+ con.execute(services_meta_data.delete())
@pytest.fixture
diff --git a/services/static-webserver/client/scripts/apps_metadata.json b/services/static-webserver/client/scripts/apps_metadata.json
index 6164b2a263ff..38959e460904 100644
--- a/services/static-webserver/client/scripts/apps_metadata.json
+++ b/services/static-webserver/client/scripts/apps_metadata.json
@@ -3,6 +3,9 @@
{
"application": "osparc",
"replacements": {
+ "replace_me_favicon_uri": "/resource/osparc/osparc/icons/favicon-32x32.png",
+ "replace_me_manifest_uri": "/resource/osparc/osparc/manifest.json",
+ "replace_me_browserconfig_uri": "/resource/osparc/osparc/browserconfig.xml",
"replace_me_og_title": "oSPARC",
"replace_me_og_description": "open online simulations for Stimulating Peripheral Activity to Relieve Conditions",
"replace_me_og_image": "https://raw.githubusercontent.com/ITISFoundation/osparc-simcore/master/services/static-webserver/client/source/resource/osparc/favicon-osparc.png"
@@ -10,6 +13,9 @@
}, {
"application": "s4l",
"replacements": {
+ "replace_me_favicon_uri": "/resource/osparc/s4l/icons/favicon-32x32.png",
+ "replace_me_manifest_uri": "/resource/osparc/s4l/manifest.json",
+ "replace_me_browserconfig_uri": "/resource/osparc/s4l/browserconfig.xml",
"replace_me_og_title": "Sim4Life",
"replace_me_og_description": "Computational life sciences platform that combines computable human phantoms, powerful physics solvers and advanced tissue models.",
"replace_me_og_image": "https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/full/background-images/S4L/Sim4Life-head-default.png"
@@ -17,6 +23,9 @@
}, {
"application": "s4lacad",
"replacements": {
+ "replace_me_favicon_uri": "/resource/osparc/s4l/icons/favicon-32x32.png",
+ "replace_me_manifest_uri": "/resource/osparc/s4l/manifest.json",
+ "replace_me_browserconfig_uri": "/resource/osparc/s4l/browserconfig.xml",
"replace_me_og_title": "Sim4Life Science",
"replace_me_og_description": "Sim4Life for Science - Computational life sciences platform that combines computable human phantoms, powerful physics solvers and advanced tissue models.",
"replace_me_og_image": "https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/full/background-images/S4L/Sim4Life-head-academy.png"
@@ -24,6 +33,9 @@
}, {
"application": "s4lengine",
"replacements": {
+ "replace_me_favicon_uri": "/resource/osparc/s4l/icons/favicon-32x32.png",
+ "replace_me_manifest_uri": "/resource/osparc/s4l/manifest.json",
+ "replace_me_browserconfig_uri": "/resource/osparc/s4l/browserconfig.xml",
"replace_me_og_title": "Sim4Life Engineering",
"replace_me_og_description": "Sim4Life for Engineers - Computational life sciences platform that combines computable human phantoms, powerful physics solvers and advanced tissue models.",
"replace_me_og_image": "https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/full/background-images/S4L/Sim4Life-head-default.png"
@@ -31,6 +43,9 @@
}, {
"application": "s4ldesktop",
"replacements": {
+ "replace_me_favicon_uri": "/resource/osparc/s4l/icons/favicon-32x32.png",
+ "replace_me_manifest_uri": "/resource/osparc/s4l/manifest.json",
+ "replace_me_browserconfig_uri": "/resource/osparc/s4l/browserconfig.xml",
"replace_me_og_title": "Sim4Life (Desktop)",
"replace_me_og_description": "Computational life sciences platform that combines computable human phantoms, powerful physics solvers and advanced tissue models.",
"replace_me_og_image": "https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/full/background-images/S4L/Sim4Life-head-default.png"
@@ -38,6 +53,9 @@
}, {
"application": "s4ldesktopacad",
"replacements": {
+ "replace_me_favicon_uri": "/resource/osparc/s4l/icons/favicon-32x32.png",
+ "replace_me_manifest_uri": "/resource/osparc/s4l/manifest.json",
+ "replace_me_browserconfig_uri": "/resource/osparc/s4l/browserconfig.xml",
"replace_me_og_title": "Sim4Life Science (Desktop)",
"replace_me_og_description": "Sim4Life for Science - Computational life sciences platform that combines computable human phantoms, powerful physics solvers and advanced tissue models.",
"replace_me_og_image": "https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/full/background-images/S4L/Sim4Life-head-academy.png"
@@ -45,6 +63,9 @@
}, {
"application": "s4llite",
"replacements": {
+ "replace_me_favicon_uri": "/resource/osparc/s4l/icons/favicon-32x32.png",
+ "replace_me_manifest_uri": "/resource/osparc/s4l/manifest.json",
+ "replace_me_browserconfig_uri": "/resource/osparc/s4l/browserconfig.xml",
"replace_me_og_title": "S4L Lite",
"replace_me_og_description": "Sim4Life for Students - Computational life sciences platform that combines computable human phantoms, powerful physics solvers and advanced tissue models.",
"replace_me_og_image": "https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/main/app/full/background-images/S4L/Sim4Life-head-lite.png"
@@ -52,6 +73,9 @@
}, {
"application": "tis",
"replacements": {
+ "replace_me_favicon_uri": "/resource/osparc/tis/icons/favicon-32x32.png",
+ "replace_me_manifest_uri": "/resource/osparc/tis/manifest.json",
+ "replace_me_browserconfig_uri": "/resource/osparc/tis/browserconfig.xml",
"replace_me_og_title": "TI Plan - IT'IS",
"replace_me_og_description": "A tool powered by o²S²PARC technology that reduces optimization of targeted neurostimulation protocols.",
"replace_me_og_image": "https://raw.githubusercontent.com/ITISFoundation/osparc-simcore/master/services/static-webserver/client/source/resource/osparc/tip_splitimage.png"
@@ -59,6 +83,9 @@
}, {
"application": "tiplite",
"replacements": {
+ "replace_me_favicon_uri": "/resource/osparc/tis/icons/favicon-32x32.png",
+ "replace_me_manifest_uri": "/resource/osparc/tis/manifest.json",
+ "replace_me_browserconfig_uri": "/resource/osparc/tis/browserconfig.xml",
"replace_me_og_title": "TI Plan lite - IT'IS",
"replace_me_og_description": "A tool powered by o²S²PARC technology that reduces optimization of targeted neurostimulation protocols.",
"replace_me_og_image": "https://raw.githubusercontent.com/ITISFoundation/osparc-simcore/master/services/static-webserver/client/source/resource/osparc/tip_splitimage.png"
diff --git a/services/static-webserver/client/source/boot/index.html b/services/static-webserver/client/source/boot/index.html
index 48b35c48db9e..11a7ed2808c3 100644
--- a/services/static-webserver/client/source/boot/index.html
+++ b/services/static-webserver/client/source/boot/index.html
@@ -24,10 +24,10 @@
-
+
-
+
@@ -35,16 +35,16 @@
-
+
-
-
-
-
+
+
+
+