diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 66cd8dce4db1..130a6d898210 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -353,7 +353,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/webserver.bash test_with_db 01 - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -392,7 +392,7 @@ jobs: run: ./ci/github/unit-testing/webserver.bash install - name: test run: ./ci/github/unit-testing/webserver.bash test_with_db 02 - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -431,7 +431,7 @@ jobs: run: ./ci/github/unit-testing/webserver.bash install - name: test run: ./ci/github/unit-testing/webserver.bash test_with_db 03 - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -473,7 +473,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/storage.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -517,7 +517,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/agent.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -598,7 +598,7 @@ jobs: - name: OAS backwards compatibility check if: always() run: ./ci/github/unit-testing/api-server.bash openapi-diff - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -640,7 +640,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/autoscaling.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -688,7 +688,7 @@ jobs: with: name: ${{ github.job }}_docker_logs path: ./services/catalog/test_failures - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -741,7 +741,7 @@ jobs: source .venv/bin/activate && \ pushd services/clusters-keeper && \ make test-ci-unit - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -789,7 +789,7 @@ jobs: with: name: ${{ github.job }}_docker_logs path: ./services/datcore-adapter/test_failures - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -824,7 +824,7 @@ jobs: run: ./ci/github/unit-testing/director.bash install - name: test run: ./ci/github/unit-testing/director.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -872,7 +872,7 @@ jobs: with: name: ${{ github.job }}_docker_logs path: ./services/director-v2/test_failures - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -914,7 +914,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/aws-library.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -956,7 +956,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/dask-task-models-library.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -998,7 +998,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/dask-sidecar.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1050,7 +1050,7 @@ jobs: source .venv/bin/activate && \ pushd services/osparc-gateway-server && \ make test-ci-unit - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1092,7 +1092,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/payments.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1134,7 +1134,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/dynamic-scheduler.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1186,7 +1186,7 @@ jobs: source .venv/bin/activate && \ pushd services/resource-usage-tracker && \ make test-ci-unit - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1228,7 +1228,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/dynamic-sidecar.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1281,7 +1281,7 @@ jobs: source .venv/bin/activate && \ pushd services/efs-guardian && \ make test-ci-unit - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1395,7 +1395,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/postgres-database.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1437,7 +1437,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/invitations.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1479,7 +1479,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/service-integration.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1521,7 +1521,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/service-library.bash test_all - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1563,7 +1563,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/settings-library.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1604,7 +1604,7 @@ jobs: run: ./ci/github/unit-testing/models-library.bash typecheck - name: test run: ./ci/github/unit-testing/models-library.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1691,7 +1691,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/notifications-library.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1735,7 +1735,7 @@ jobs: - name: test if: always() run: ./ci/github/unit-testing/simcore-sdk.bash test - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: unittests #optional @@ -1845,7 +1845,7 @@ jobs: - name: cleanup if: always() run: ./ci/github/integration-testing/webserver.bash clean_up - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: integrationtests #optional @@ -1907,7 +1907,7 @@ jobs: - name: cleanup if: always() run: ./ci/github/integration-testing/webserver.bash clean_up - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: integrationtests #optional @@ -1969,7 +1969,7 @@ jobs: - name: cleanup if: always() run: ./ci/github/integration-testing/director-v2.bash clean_up - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: integrationtests #optional @@ -2035,7 +2035,7 @@ jobs: - name: cleanup if: always() run: ./ci/github/integration-testing/director-v2.bash clean_up - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: integrationtests #optional @@ -2099,7 +2099,7 @@ jobs: - name: cleanup if: always() run: ./ci/github/integration-testing/dynamic-sidecar.bash clean_up - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: integrationtests #optional @@ -2176,7 +2176,7 @@ jobs: run: | pushd services/osparc-gateway-server && \ make down - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: integrationtests #optional @@ -2238,7 +2238,7 @@ jobs: - name: cleanup if: always() run: ./ci/github/integration-testing/simcore-sdk.bash clean_up - - uses: codecov/codecov-action@v4.5.0 + - uses: codecov/codecov-action@v4.6.0 with: flags: integrationtests diff --git a/Makefile b/Makefile index e8e0113d9013..0c7ee08f86ae 100644 --- a/Makefile +++ b/Makefile @@ -128,7 +128,8 @@ help: ## help on rule's targets test_python_version: ## Check Python version, throw error if compilation would fail with the installed version - python ./scripts/test_python_version.py + # Checking python version + @.venv/bin/python ./scripts/test_python_version.py ## DOCKER BUILD ------------------------------- @@ -332,6 +333,7 @@ printf "$$rows" "Rabbit Dashboard" "http://$(get_my_ip).nip.io:15672" admin admi printf "$$rows" "Redis" "http://$(get_my_ip).nip.io:18081";\ printf "$$rows" "Storage S3 Minio" "http://$(get_my_ip).nip.io:9001" 12345678 12345678;\ printf "$$rows" "Traefik Dashboard" "http://$(get_my_ip).nip.io:8080/dashboard/";\ +printf "$$rows" "Vendor Manual (Fake)" "http://manual.$(get_my_ip).nip.io:9081";\ printf "\n%s\n" "⚠️ if a DNS is not used (as displayed above), the interactive services started via dynamic-sidecar";\ echo "⚠️ will not be shown. The frontend accesses them via the uuid.services.YOUR_IP.nip.io:9081"; @@ -481,7 +483,7 @@ push-version: tag-version .venv: .check-uv-installed @uv venv $@ - ## upgrading tools to latest version in $(shell python3 --version) + @echo "# upgrading tools to latest version in" && $@/bin/python --version @uv pip --quiet install --upgrade \ pip~=24.0 \ wheel \ @@ -683,9 +685,9 @@ info-registry: ## info on local registry (if any) ## INFO ------------------------------- -.PHONY: info info-images info-swarm info-tools +.PHONY: info info-images info-swarm info: ## displays setup information - # setup info: + @echo setup info -------------------------------- @echo ' Detected OS : $(IS_LINUX)$(IS_OSX)$(IS_WSL)$(IS_WSL2)$(IS_WIN)' @echo ' SWARM_STACK_NAME : ${SWARM_STACK_NAME}' @echo ' DOCKER_REGISTRY : $(DOCKER_REGISTRY)' @@ -695,19 +697,23 @@ info: ## displays setup information @echo ' - ULR : ${VCS_URL}' @echo ' - REF : ${VCS_REF}' @echo ' - (STATUS)REF_CLIENT : (${VCS_STATUS_CLIENT}) ${VCS_REF_CLIENT}' - @echo ' DIRECTOR_API_VERSION : ${DIRECTOR_API_VERSION}' - @echo ' STORAGE_API_VERSION : ${STORAGE_API_VERSION}' - @echo ' DATCORE_ADAPTER_API_VERSION : ${DATCORE_ADAPTER_API_VERSION}' - @echo ' WEBSERVER_API_VERSION : ${WEBSERVER_API_VERSION}' - # dev tools version - @echo ' make : $(shell make --version 2>&1 | head -n 1)' - @echo ' jq : $(shell jq --version)' + @make --silent info-tools + + +.PHONY: show-tools +info-tools: ## displays tools versions + @echo dev-tools versions ------------------------- @echo ' awk : $(shell awk -W version 2>&1 | head -n 1)' - @echo ' python : $(shell python3 --version)' - @echo ' node : $(shell node --version 2> /dev/null || echo ERROR nodejs missing)' @echo ' docker : $(shell docker --version)' @echo ' docker buildx : $(shell docker buildx version)' @echo ' docker compose: $(shell docker compose version)' + @echo ' jq : $(shell jq --version)' + @echo ' make : $(shell make --version 2>&1 | head -n 1)' + @echo ' node : $(shell node --version 2> /dev/null || echo ERROR nodejs missing)' + @echo ' python : $(shell python3 --version)' + @echo ' uv : $(shell uv --version 2> /dev/null || echo ERROR uv missing)' + @echo ' ubuntu : $(shell lsb_release --description --short 2> /dev/null | tail || echo ERROR Not an Ubuntu OS )' + define show-meta diff --git a/packages/aws-library/requirements/_base.txt b/packages/aws-library/requirements/_base.txt index d1dd688f484f..b2013d579f8b 100644 --- a/packages/aws-library/requirements/_base.txt +++ b/packages/aws-library/requirements/_base.txt @@ -228,7 +228,7 @@ pyyaml==6.0.2 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in -redis==5.0.8 +redis==5.0.4 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt diff --git a/packages/aws-library/requirements/ci.txt b/packages/aws-library/requirements/ci.txt index f39ca4e36729..bac75da67f80 100644 --- a/packages/aws-library/requirements/ci.txt +++ b/packages/aws-library/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../common-library diff --git a/packages/service-library/src/servicelib/error_codes.py b/packages/common-library/src/common_library/error_codes.py similarity index 99% rename from packages/service-library/src/servicelib/error_codes.py rename to packages/common-library/src/common_library/error_codes.py index 06cd14ac8bc5..13b3b1566daa 100644 --- a/packages/service-library/src/servicelib/error_codes.py +++ b/packages/common-library/src/common_library/error_codes.py @@ -7,7 +7,6 @@ SEE test_error_codes for some use cases """ - import re from typing import TYPE_CHECKING, Annotated diff --git a/packages/common-library/src/common_library/errors_classes.py b/packages/common-library/src/common_library/errors_classes.py index f6f08837f7c2..83e40b2a2b0e 100644 --- a/packages/common-library/src/common_library/errors_classes.py +++ b/packages/common-library/src/common_library/errors_classes.py @@ -2,6 +2,8 @@ from pydantic.errors import PydanticErrorMixin +from .error_codes import create_error_code + class _DefaultDict(dict): def __missing__(self, key): @@ -46,3 +48,7 @@ def _get_full_class_name(cls) -> str: def error_context(self): """Returns context in which error occurred and stored within the exception""" return dict(**self.__dict__) + + def error_code(self) -> str: + assert isinstance(self, Exception), "subclass must be exception" # nosec + return create_error_code(self) diff --git a/packages/dask-task-models-library/requirements/ci.txt b/packages/dask-task-models-library/requirements/ci.txt index c8775e00d910..d7fc2c347fa2 100644 --- a/packages/dask-task-models-library/requirements/ci.txt +++ b/packages/dask-task-models-library/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages pytest-simcore @ ../pytest-simcore diff --git a/packages/models-library/requirements/ci.txt b/packages/models-library/requirements/ci.txt index caaa0bb1b315..fa3c1d99410a 100644 --- a/packages/models-library/requirements/ci.txt +++ b/packages/models-library/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../common-library diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/ports.py b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/ports.py index 5863b53b2bc6..01214a39537f 100644 --- a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/ports.py +++ b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/ports.py @@ -11,14 +11,14 @@ class OutputStatus(StrAutoEnum): UPLOAD_STARTED = auto() UPLOAD_WAS_ABORTED = auto() UPLOAD_FINISHED_SUCCESSFULLY = auto() - UPLOAD_FINISHED_WITH_ERRROR = auto() + UPLOAD_FINISHED_WITH_ERROR = auto() class InputStatus(StrAutoEnum): DOWNLOAD_STARTED = auto() DOWNLOAD_WAS_ABORTED = auto() DOWNLOAD_FINISHED_SUCCESSFULLY = auto() - DOWNLOAD_FINISHED_WITH_ERRROR = auto() + DOWNLOAD_FINISHED_WITH_ERROR = auto() class _PortStatusCommon(BaseModel): diff --git a/packages/models-library/src/models_library/rabbitmq_messages.py b/packages/models-library/src/models_library/rabbitmq_messages.py index 1b8c2df34e77..dd8917586035 100644 --- a/packages/models-library/src/models_library/rabbitmq_messages.py +++ b/packages/models-library/src/models_library/rabbitmq_messages.py @@ -190,6 +190,24 @@ def routing_key(self) -> str | None: return None +class DynamicServiceRunningMessage(RabbitMessageBase): + channel_name: Literal["io.simcore.service.dynamic-service-running"] = Field( + default="io.simcore.service.dynamic-service-running" + ) + + project_id: ProjectID + node_id: NodeID + user_id: UserID + product_name: ProductName | None + created_at: datetime.datetime = Field( + default_factory=lambda: arrow.utcnow().datetime, + description="message creation datetime", + ) + + def routing_key(self) -> str | None: + return None + + class RabbitResourceTrackingStartedMessage(RabbitResourceTrackingBaseMessage): message_type: Literal[ RabbitResourceTrackingMessageType.TRACKING_STARTED diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index 851b1880cc31..c5c6cbd05012 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -471,7 +471,7 @@ def _not_allowed_in_both_specs(self): return self common_containers = set(self.containers_allowed_outgoing_internet) & set( - self.containers_allowed_outgoing_permit_list.keys() + self.containers_allowed_outgoing_permit_list.keys() # pylint:disable=no-member ) if len(common_containers) > 0: err_msg = ( diff --git a/packages/models-library/src/models_library/utils/_original_fastapi_encoders.py b/packages/models-library/src/models_library/utils/_original_fastapi_encoders.py index a168e2e22c42..4f09ae6a3794 100644 --- a/packages/models-library/src/models_library/utils/_original_fastapi_encoders.py +++ b/packages/models-library/src/models_library/utils/_original_fastapi_encoders.py @@ -1,4 +1,5 @@ # pylint: disable-all + # # wget https://raw.githubusercontent.com/tiangolo/fastapi/master/fastapi/encoders.py --output-document=_original_fastapi_encoders # diff --git a/packages/service-library/tests/test_error_codes.py b/packages/models-library/tests/test_error_codes.py similarity index 95% rename from packages/service-library/tests/test_error_codes.py rename to packages/models-library/tests/test_error_codes.py index f738ebe1e966..5d4d78a5d2b8 100644 --- a/packages/service-library/tests/test_error_codes.py +++ b/packages/models-library/tests/test_error_codes.py @@ -6,7 +6,7 @@ import logging import pytest -from servicelib.error_codes import create_error_code, parse_error_code +from common_library.error_codes import create_error_code, parse_error_code logger = logging.getLogger(__name__) diff --git a/packages/models-library/tests/test_project_nodes.py b/packages/models-library/tests/test_project_nodes.py index 96f427a19cb4..e32bc161995f 100644 --- a/packages/models-library/tests/test_project_nodes.py +++ b/packages/models-library/tests/test_project_nodes.py @@ -1,6 +1,7 @@ # pylint:disable=unused-variable # pylint:disable=unused-argument # pylint:disable=redefined-outer-name +# pylint:disable=no-member from typing import Any diff --git a/packages/notifications-library/requirements/ci.txt b/packages/notifications-library/requirements/ci.txt index 3432db2c649d..105d6a514b3e 100644 --- a/packages/notifications-library/requirements/ci.txt +++ b/packages/notifications-library/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../common-library/ diff --git a/packages/postgres-database/docker/Dockerfile b/packages/postgres-database/docker/Dockerfile index c310ad5c53f9..7c3e74870b2d 100644 --- a/packages/postgres-database/docker/Dockerfile +++ b/packages/postgres-database/docker/Dockerfile @@ -1,5 +1,5 @@ # syntax=docker/dockerfile:1 -FROM python:3.6-slim as base +FROM python:3.6-slim AS base LABEL maintainer=sanderegg @@ -13,7 +13,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \ ENV PATH="${VIRTUAL_ENV}/bin:$PATH" -FROM base as build +FROM base AS build RUN apt-get update \ && apt-get install -y --no-install-recommends \ @@ -41,7 +41,7 @@ ARG GIT_REPOSITORY RUN git clone --single-branch --branch ${GIT_BRANCH} ${GIT_REPOSITORY} osparc-simcore\ && uv pip install osparc-simcore/packages/postgres-database[migration] -FROM base as production +FROM base AS production ENV PYTHONOPTIMIZE=TRUE diff --git a/packages/postgres-database/requirements/_test.txt b/packages/postgres-database/requirements/_test.txt index 91705ca63c53..d6059bacd37a 100644 --- a/packages/postgres-database/requirements/_test.txt +++ b/packages/postgres-database/requirements/_test.txt @@ -19,7 +19,7 @@ greenlet==3.1.1 # sqlalchemy iniconfig==2.0.0 # via pytest -mypy==1.11.2 +mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy diff --git a/packages/postgres-database/requirements/_tools.txt b/packages/postgres-database/requirements/_tools.txt index 6d01f81c8e17..d48ce38c32f2 100644 --- a/packages/postgres-database/requirements/_tools.txt +++ b/packages/postgres-database/requirements/_tools.txt @@ -26,7 +26,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt diff --git a/packages/postgres-database/requirements/ci.txt b/packages/postgres-database/requirements/ci.txt index 8df1aecf670c..25346140a412 100644 --- a/packages/postgres-database/requirements/ci.txt +++ b/packages/postgres-database/requirements/ci.txt @@ -10,6 +10,7 @@ --requirement _base.txt --requirement _migration.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../common-library/ diff --git a/packages/postgres-database/scripts/erd/Dockerfile b/packages/postgres-database/scripts/erd/Dockerfile index fae33a3af005..e18bec73e164 100644 --- a/packages/postgres-database/scripts/erd/Dockerfile +++ b/packages/postgres-database/scripts/erd/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base RUN apt-get update \ && apt-get -y install --no-install-recommends\ diff --git a/packages/service-integration/Dockerfile b/packages/service-integration/Dockerfile index 25069fdebbe2..15c1de6cc24e 100644 --- a/packages/service-integration/Dockerfile +++ b/packages/service-integration/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base LABEL maintainer=pcrespov @@ -46,7 +46,7 @@ ENV PATH="${VIRTUAL_ENV}/bin:$PATH" # -------------------------- Build stage ------------------- -FROM base as build +FROM base AS build RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ --mount=type=cache,target=/var/lib/apt,mode=0755,sharing=private \ @@ -82,9 +82,9 @@ RUN \ # -------------------------- Build stage ------------------- -FROM base as development +FROM base AS development # NOTE: this is necessary to allow to build development images but is the same as production here -FROM base as production +FROM base AS production ENV PYTHONOPTIMIZE=TRUE diff --git a/packages/service-integration/requirements/_tools.txt b/packages/service-integration/requirements/_tools.txt index 6cfab1a3f280..e25b10abec12 100644 --- a/packages/service-integration/requirements/_tools.txt +++ b/packages/service-integration/requirements/_tools.txt @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via diff --git a/packages/service-integration/requirements/ci.txt b/packages/service-integration/requirements/ci.txt index 46a2035bfc7e..daa95fb5ef9c 100644 --- a/packages/service-integration/requirements/ci.txt +++ b/packages/service-integration/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt simcore-common-library @ ../common-library simcore-models-library @ ../models-library diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt index 556fc10fa728..20866824d9a7 100644 --- a/packages/service-library/requirements/_base.txt +++ b/packages/service-library/requirements/_base.txt @@ -193,7 +193,7 @@ pyyaml==6.0.2 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in -redis==5.0.8 +redis==5.0.4 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt index 5b44ec09a3de..b4a044b367a5 100644 --- a/packages/service-library/requirements/_test.txt +++ b/packages/service-library/requirements/_test.txt @@ -117,7 +117,7 @@ multidict==6.1.0 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.2 +mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy diff --git a/packages/service-library/requirements/_tools.txt b/packages/service-library/requirements/_tools.txt index dff0e09f6bbc..c12d45d27034 100644 --- a/packages/service-library/requirements/_tools.txt +++ b/packages/service-library/requirements/_tools.txt @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt diff --git a/packages/service-library/requirements/ci.txt b/packages/service-library/requirements/ci.txt index b26845819f41..2c748b3f8608 100644 --- a/packages/service-library/requirements/ci.txt +++ b/packages/service-library/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../common-library diff --git a/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py b/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py index 035a41de5274..d12ea2147d88 100644 --- a/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py +++ b/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py @@ -12,11 +12,10 @@ from aiohttp import web from aiohttp.web_request import Request from aiohttp.web_response import StreamResponse -from common_library.errors_classes import OsparcErrorMixin +from common_library.error_codes import create_error_code from models_library.utils.json_serialization import json_dumps -from servicelib.error_codes import create_error_code -from ..logging_utils import create_troubleshotting_log_message, get_log_record_extra +from ..logging_errors import create_troubleshotting_log_kwargs from ..mimetype_constants import MIMETYPE_APPLICATION_JSON from ..utils import is_production_environ from .rest_models import ErrorItemType, ErrorType, LogMessageType @@ -59,31 +58,23 @@ def _process_and_raise_unexpected_error(request: web.BaseRequest, err: Exception "request.method": f"{request.method}", "request.path": f"{request.path}", } - if isinstance(err, OsparcErrorMixin): - error_context.update(err.error_context()) - frontend_msg = _FMSG_INTERNAL_ERROR_USER_FRIENDLY_WITH_OEC.format( + user_error_msg = _FMSG_INTERNAL_ERROR_USER_FRIENDLY_WITH_OEC.format( error_code=error_code ) - log_msg = create_troubleshotting_log_message( - message_to_user=frontend_msg, - error=err, - error_code=error_code, - error_context=error_context, - ) - http_error = create_http_error( err, - frontend_msg, + user_error_msg, web.HTTPInternalServerError, skip_internal_error_details=_is_prod, ) _logger.exception( - log_msg, - extra=get_log_record_extra( + **create_troubleshotting_log_kwargs( + user_error_msg, + error=err, + error_context=error_context, error_code=error_code, - user_id=error_context.get("user_id"), - ), + ) ) raise http_error diff --git a/packages/service-library/src/servicelib/aiohttp/tracing.py b/packages/service-library/src/servicelib/aiohttp/tracing.py index c33e92cc96f4..9947c12be0ba 100644 --- a/packages/service-library/src/servicelib/aiohttp/tracing.py +++ b/packages/service-library/src/servicelib/aiohttp/tracing.py @@ -12,8 +12,8 @@ from opentelemetry.instrumentation.aiohttp_client import ( # pylint:disable=no-name-in-module AioHttpClientInstrumentor, ) -from opentelemetry.instrumentation.aiohttp_server import ( # pylint:disable=no-name-in-module - AioHttpServerInstrumentor, +from opentelemetry.instrumentation.aiohttp_server import ( + middleware as aiohttp_server_opentelemetry_middleware, # pylint:disable=no-name-in-module ) from opentelemetry.instrumentation.aiopg import ( # pylint:disable=no-name-in-module AiopgInstrumentor, @@ -72,8 +72,24 @@ def setup_tracing( # Add the span processor to the tracer provider tracer_provider.add_span_processor(BatchSpanProcessor(otlp_exporter)) # type: ignore[attr-defined] # https://github.com/open-telemetry/opentelemetry-python/issues/3713 - # Instrument aiohttp server and client - AioHttpServerInstrumentor().instrument() + # Instrument aiohttp server + # Explanation for custom middleware call DK 10/2024: + # OpenTelemetry Aiohttp autoinstrumentation is meant to be used by only calling `AioHttpServerInstrumentor().instrument()` + # The call `AioHttpServerInstrumentor().instrument()` monkeypatches the __init__() of aiohttp's web.application() to inject the tracing middleware, in it's `__init__()`. + # In simcore, we want to switch tracing on or off using the simcore-settings-library. + # The simcore-settings library in turn depends on the instance of web.application(), i.e. the aiohttp webserver, to exist. So here we face a hen-and-egg problem. + # At the time when the instrumentation should be configured, the instance of web.application already exists and the overwrite to the __init__() is never called + # + # Since the code that is provided (monkeypatched) in the __init__ that the opentelemetry-autoinstrumentation-library provides is only 4 lines, + # just adding a middleware, we are free to simply execute this "missed call" [since we can't call the monkeypatch'ed __init__()] in this following line: + app.middlewares.insert(0, aiohttp_server_opentelemetry_middleware) + # Code of the aiohttp server instrumentation: github.com/open-telemetry/opentelemetry-python-contrib/blob/eccb05c808a7d797ef5b6ecefed3590664426fbf/instrumentation/opentelemetry-instrumentation-aiohttp-server/src/opentelemetry/instrumentation/aiohttp_server/__init__.py#L246 + # For reference, the above statement was written for: + # - osparc-simcore 1.77.x + # - opentelemetry-api==1.27.0 + # - opentelemetry-instrumentation==0.48b0 + + # Instrument aiohttp client AioHttpClientInstrumentor().instrument() if instrument_aiopg: AiopgInstrumentor().instrument() diff --git a/packages/service-library/src/servicelib/logging_errors.py b/packages/service-library/src/servicelib/logging_errors.py new file mode 100644 index 000000000000..f3b19a5ea4f4 --- /dev/null +++ b/packages/service-library/src/servicelib/logging_errors.py @@ -0,0 +1,94 @@ +import logging +from pprint import pformat +from typing import Any, TypedDict + +from common_library.error_codes import ErrorCodeStr +from common_library.errors_classes import OsparcErrorMixin + +from .logging_utils import LogExtra, get_log_record_extra + +_logger = logging.getLogger(__name__) + + +def create_troubleshotting_log_message( + user_error_msg: str, + *, + error: BaseException, + error_code: ErrorCodeStr | None = None, + error_context: dict[str, Any] | None = None, + tip: str | None = None, +) -> str: + """Create a formatted message for _logger.exception(...) + + Arguments: + user_error_msg -- A user-friendly message to be displayed on the front-end explaining the issue in simple terms. + error -- the instance of the handled exception + error_code -- A unique error code (e.g., OEC or osparc-specific) to identify the type or source of the error for easier tracking. + error_context -- Additional context surrounding the exception, such as environment variables or function-specific data. This can be derived from exc.error_context() (relevant when using the OsparcErrorMixin) + tip -- Helpful suggestions or possible solutions explaining why the error may have occurred and how it could potentially be resolved + """ + debug_data = pformat( + { + "exception_type": f"{type(error)}", + "exception_details": f"{error}", + "error_code": error_code, + "context": pformat(error_context, indent=1), + "tip": tip, + }, + indent=1, + ) + + return f"{user_error_msg}.\n{debug_data}" + + +class LogKwargs(TypedDict): + msg: str + extra: LogExtra | None + + +def create_troubleshotting_log_kwargs( + user_error_msg: str, + *, + error: BaseException, + error_code: ErrorCodeStr | None = None, + error_context: dict[str, Any] | None = None, + tip: str | None = None, +) -> LogKwargs: + """ + Creates a dictionary of logging arguments to be used with _log.exception for troubleshooting purposes. + + Usage: + + try: + ... + except MyException as exc + _logger.exception( + **create_troubleshotting_log_kwargs( + user_error_msg=frontend_msg, + exception=exc, + tip="Check row in `groups_extra_properties` for this product. It might be missing.", + ) + ) + + """ + # error-context + context = error_context or {} + if isinstance(error, OsparcErrorMixin): + context.update(error.error_context()) + + # compose as log message + log_msg = create_troubleshotting_log_message( + user_error_msg, + error=error, + error_code=error_code, + error_context=context, + tip=tip, + ) + + return { + "msg": log_msg, + "extra": get_log_record_extra( + error_code=error_code, + user_id=context.get("user_id", None), + ), + } diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 235418fd950e..08df2ae1037f 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -14,11 +14,11 @@ from datetime import datetime from inspect import getframeinfo, stack from pathlib import Path -from typing import Any, TypeAlias, TypedDict, TypeVar +from typing import Any, NotRequired, TypeAlias, TypedDict, TypeVar +from common_library.error_codes import ErrorCodeStr from models_library.utils.json_serialization import json_dumps -from .error_codes import ErrorCodeStr from .utils_secrets import mask_sensitive_data _logger = logging.getLogger(__name__) @@ -321,9 +321,9 @@ def log_catch(logger: logging.Logger, *, reraise: bool = True) -> Iterator[None] raise exc from exc -class LogExtra(TypedDict, total=False): - log_uid: str - log_oec: str +class LogExtra(TypedDict): + log_uid: NotRequired[str] + log_oec: NotRequired[str] LogLevelInt: TypeAlias = int diff --git a/packages/service-library/src/servicelib/utils.py b/packages/service-library/src/servicelib/utils.py index b3561689f01e..e6de282068cc 100644 --- a/packages/service-library/src/servicelib/utils.py +++ b/packages/service-library/src/servicelib/utils.py @@ -197,7 +197,7 @@ async def limited_as_completed( *, limit: int = _DEFAULT_LIMITED_CONCURRENCY, tasks_group_prefix: str | None = None, -) -> AsyncGenerator[asyncio.Future[T], None]: +) -> AsyncGenerator[asyncio.Task[T], None]: """Runs awaitables using limited concurrent tasks and returns result futures unordered. @@ -214,7 +214,7 @@ async def limited_as_completed( nothing Yields: - Future[T]: the future of the awaitables as they appear. + task[T]: the future of the awaitables as they appear. """ @@ -227,7 +227,7 @@ async def limited_as_completed( is_async = False completed_all_awaitables = False - pending_futures: set[asyncio.Future] = set() + pending_futures: set[asyncio.Task] = set() try: while pending_futures or not completed_all_awaitables: @@ -240,10 +240,11 @@ async def limited_as_completed( if is_async else next(awaitable_iterator) # type: ignore[call-overload] ) - future = asyncio.ensure_future(aw) + future: asyncio.Task = asyncio.ensure_future(aw) if tasks_group_prefix: future.set_name(f"{tasks_group_prefix}-{future.get_name()}") pending_futures.add(future) + except (StopIteration, StopAsyncIteration): # noqa: PERF203 completed_all_awaitables = True if not pending_futures: @@ -254,6 +255,7 @@ async def limited_as_completed( for future in done: yield future + except asyncio.CancelledError: for future in pending_futures: future.cancel() diff --git a/packages/service-library/src/servicelib/utils_secrets.py b/packages/service-library/src/servicelib/utils_secrets.py index 67c440ce044a..389aab962725 100644 --- a/packages/service-library/src/servicelib/utils_secrets.py +++ b/packages/service-library/src/servicelib/utils_secrets.py @@ -1,6 +1,6 @@ import secrets import string -from typing import Final +from typing import Any, Final from pydantic import StrictInt, validate_call @@ -68,7 +68,7 @@ def _is_possibly_sensitive(name: str, sensitive_keywords: set[str]) -> bool: def mask_sensitive_data( - data: dict, *, extra_sensitive_keywords: set[str] | None = None + data: dict[str, Any], *, extra_sensitive_keywords: set[str] | None = None ) -> dict: """Replaces the sensitive values in the dict with a placeholder before logging @@ -79,7 +79,7 @@ def mask_sensitive_data( sensitive_keywords = _DEFAULT_SENSITIVE_KEYWORDS | ( extra_sensitive_keywords or set() ) - masked_data = {} + masked_data: dict[str, Any] = {} for key, value in data.items(): if isinstance(value, dict): masked_data[key] = mask_sensitive_data( diff --git a/packages/service-library/tests/aiohttp/test_rest_middlewares.py b/packages/service-library/tests/aiohttp/test_rest_middlewares.py index e70b6963b437..3f9bc537642d 100644 --- a/packages/service-library/tests/aiohttp/test_rest_middlewares.py +++ b/packages/service-library/tests/aiohttp/test_rest_middlewares.py @@ -13,6 +13,7 @@ import pytest from aiohttp import web from aiohttp.test_utils import TestClient +from common_library.error_codes import parse_error_code from models_library.utils.json_serialization import json_dumps from servicelib.aiohttp import status from servicelib.aiohttp.rest_middlewares import ( @@ -21,7 +22,6 @@ error_middleware_factory, ) from servicelib.aiohttp.rest_responses import is_enveloped, unwrap_envelope -from servicelib.error_codes import parse_error_code @dataclass diff --git a/packages/service-library/tests/test_logging_errors.py b/packages/service-library/tests/test_logging_errors.py new file mode 100644 index 000000000000..b6b652a46d7c --- /dev/null +++ b/packages/service-library/tests/test_logging_errors.py @@ -0,0 +1,68 @@ +# pylint:disable=redefined-outer-name + +import logging + +import pytest +from common_library.error_codes import create_error_code +from common_library.errors_classes import OsparcErrorMixin +from servicelib.logging_errors import ( + create_troubleshotting_log_kwargs, + create_troubleshotting_log_message, +) + + +def test_create_troubleshotting_log_message(caplog: pytest.LogCaptureFixture): + class MyError(OsparcErrorMixin, RuntimeError): + msg_template = "My error {user_id}" + + with pytest.raises(MyError) as exc_info: + raise MyError(user_id=123, product_name="foo") + + exc = exc_info.value + error_code = create_error_code(exc) + + assert exc.error_code() == error_code + + msg = f"Nice message to user [{error_code}]" + + log_msg = create_troubleshotting_log_message( + msg, + error=exc, + error_code=error_code, + error_context=exc.error_context(), + tip="This is a test error", + ) + + log_kwargs = create_troubleshotting_log_kwargs( + msg, + error=exc, + error_code=error_code, + tip="This is a test error", + ) + + assert log_kwargs["msg"] == log_msg + assert log_kwargs["extra"] is not None + assert ( + # pylint: disable=unsubscriptable-object + log_kwargs["extra"]["log_uid"] + == "123" + ), "user_id is injected as extra from context" + + with caplog.at_level(logging.WARNING): + root_logger = logging.getLogger() + root_logger.exception(**log_kwargs) + + # ERROR root:test_logging_utils.py:417 Nice message to user [OEC:126055703573984]. + # { + # "exception_details": "My error 123", + # "error_code": "OEC:126055703573984", + # "context": { + # "user_id": 123, + # "product_name": "foo" + # }, + # "tip": "This is a test error" + # } + + assert error_code in caplog.text + assert "user_id" in caplog.text + assert "product_name" in caplog.text diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index ca92a2759b4b..e92685d2cafb 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -5,9 +5,9 @@ from typing import Any import pytest +from common_library.error_codes import create_error_code from common_library.errors_classes import OsparcErrorMixin from faker import Faker -from servicelib.error_codes import create_error_code from servicelib.logging_utils import ( LogExtra, LogLevelInt, diff --git a/packages/settings-library/requirements/_tools.txt b/packages/settings-library/requirements/_tools.txt index 643018f44287..4e6ff626122d 100644 --- a/packages/settings-library/requirements/_tools.txt +++ b/packages/settings-library/requirements/_tools.txt @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via diff --git a/packages/settings-library/requirements/ci.txt b/packages/settings-library/requirements/ci.txt index aeacf04c2e56..f535a4dc0269 100644 --- a/packages/settings-library/requirements/ci.txt +++ b/packages/settings-library/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../common-library/ diff --git a/packages/settings-library/src/settings_library/utils_session.py b/packages/settings-library/src/settings_library/utils_session.py index 48055810d456..3c78f7efa842 100644 --- a/packages/settings-library/src/settings_library/utils_session.py +++ b/packages/settings-library/src/settings_library/utils_session.py @@ -2,7 +2,7 @@ import binascii from typing import Final -DEFAULT_SESSION_COOKIE_NAME: Final[str] = "osparc-sc" +DEFAULT_SESSION_COOKIE_NAME: Final[str] = "osparc-sc2" _32_BYTES_LENGTH: Final[int] = 32 diff --git a/packages/settings-library/tests/test__pydantic_settings.py b/packages/settings-library/tests/test__pydantic_settings.py index 82d952fc3ae8..ef60bd219700 100644 --- a/packages/settings-library/tests/test__pydantic_settings.py +++ b/packages/settings-library/tests/test__pydantic_settings.py @@ -54,7 +54,7 @@ class Settings(BaseSettings): @field_validator("*", mode="before") @classmethod - def parse_none(cls, v, info: ValidationInfo): + def _parse_none(cls, v, info: ValidationInfo): # WARNING: In nullable fields, envs equal to null or none are parsed as None !! if info.field_name and is_nullable(cls.model_fields[info.field_name]): if isinstance(v, str) and v.lower() in ("null", "none"): diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt index 3df1342c216f..cf0f887c4d25 100644 --- a/packages/simcore-sdk/requirements/_base.txt +++ b/packages/simcore-sdk/requirements/_base.txt @@ -250,7 +250,7 @@ pyyaml==6.0.2 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in -redis==5.0.8 +redis==5.0.4 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt index 3a80283eb5c5..3065f3672cbf 100644 --- a/packages/simcore-sdk/requirements/_test.txt +++ b/packages/simcore-sdk/requirements/_test.txt @@ -179,7 +179,7 @@ multidict==6.1.0 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.2 +mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy diff --git a/packages/simcore-sdk/requirements/_tools.txt b/packages/simcore-sdk/requirements/_tools.txt index d1b323ae5dde..e1b3ce3b56ff 100644 --- a/packages/simcore-sdk/requirements/_tools.txt +++ b/packages/simcore-sdk/requirements/_tools.txt @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt diff --git a/packages/simcore-sdk/requirements/ci.txt b/packages/simcore-sdk/requirements/ci.txt index afeb04529b30..18aaf5e93a29 100644 --- a/packages/simcore-sdk/requirements/ci.txt +++ b/packages/simcore-sdk/requirements/ci.txt @@ -9,6 +9,9 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt +--requirement _tools.txt +--requirement _tools.txt # installs this repo's packages simcore-postgres-database @ ../postgres-database diff --git a/requirements/tools/Dockerfile b/requirements/tools/Dockerfile index 65d6faa3cad0..e6af4395ed65 100644 --- a/requirements/tools/Dockerfile +++ b/requirements/tools/Dockerfile @@ -9,7 +9,7 @@ # # ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base ENV VIRTUAL_ENV=/home/scu/.venv diff --git a/scripts/apt-packages-versions/Dockerfile b/scripts/apt-packages-versions/Dockerfile index 5992008917c5..02708317fe10 100644 --- a/scripts/apt-packages-versions/Dockerfile +++ b/scripts/apt-packages-versions/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base RUN \ apt-get update && \ diff --git a/scripts/common.Makefile b/scripts/common.Makefile index 539eb1bf22f1..abbe2a1d6cf6 100644 --- a/scripts/common.Makefile +++ b/scripts/common.Makefile @@ -98,6 +98,8 @@ clean: ## cleans all unversioned files in project and temp files create by this @git clean $(_GIT_CLEAN_ARGS) + + .PHONY: info inf%: ## displays basic info # system @@ -106,12 +108,6 @@ inf%: ## displays basic info @echo ' NOW_TIMESTAMP : ${NOW_TIMESTAMP}' @echo ' VCS_URL : ${VCS_URL}' @echo ' VCS_REF : ${VCS_REF}' - # dev tools version - @echo ' make : $(shell make --version 2>&1 | head -n 1)' - @echo ' jq : $(shell jq --version)' - @echo ' awk : $(shell awk -W version 2>&1 | head -n 1)' - @echo ' node : $(shell node --version 2> /dev/null || echo ERROR nodejs missing)' - @echo ' python : $(shell python3 --version)' # installed in .venv @uv pip list # package setup diff --git a/scripts/erd/Dockerfile b/scripts/erd/Dockerfile index ce98f367944d..9294fa23e1d7 100644 --- a/scripts/erd/Dockerfile +++ b/scripts/erd/Dockerfile @@ -8,7 +8,7 @@ # ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base RUN apt-get update \ && apt-get -y install --no-install-recommends\ diff --git a/scripts/pydeps-docker/Dockerfile b/scripts/pydeps-docker/Dockerfile index ed6474279132..610fe9aa6f06 100644 --- a/scripts/pydeps-docker/Dockerfile +++ b/scripts/pydeps-docker/Dockerfile @@ -9,7 +9,7 @@ # # ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base RUN apt-get update \ diff --git a/services/agent/Dockerfile b/services/agent/Dockerfile index fdff90cc5f3b..fe6fef17c35f 100644 --- a/services/agent/Dockerfile +++ b/services/agent/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # USAGE: @@ -62,7 +62,7 @@ RUN \ # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -104,7 +104,7 @@ RUN \ # + /build # + services/agent [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -126,7 +126,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/agent [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -166,7 +166,7 @@ CMD ["/bin/sh", "services/agent/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development \ SC_DEVEL_MOUNT=/devel/services/agent diff --git a/services/agent/requirements/_base.txt b/services/agent/requirements/_base.txt index 10b3e81bd41e..d73f75405782 100644 --- a/services/agent/requirements/_base.txt +++ b/services/agent/requirements/_base.txt @@ -268,6 +268,8 @@ referencing==0.29.3 # -c requirements/../../../packages/service-library/requirements/./constraints.txt # jsonschema # jsonschema-specifications +repro-zipfile==0.3.1 + # via -r requirements/../../../packages/service-library/requirements/_base.in requests==2.32.3 # via opentelemetry-exporter-otlp-proto-http rich==13.8.1 diff --git a/services/agent/requirements/_tools.txt b/services/agent/requirements/_tools.txt index 360f0628998a..23facad9ab33 100644 --- a/services/agent/requirements/_tools.txt +++ b/services/agent/requirements/_tools.txt @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via diff --git a/services/agent/requirements/ci.txt b/services/agent/requirements/ci.txt index 81006b68c241..5660d901f3e9 100644 --- a/services/agent/requirements/ci.txt +++ b/services/agent/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../../packages/common-library diff --git a/services/api-server/Dockerfile b/services/api-server/Dockerfile index 597f421ab4d8..ced7167fe5c7 100644 --- a/services/api-server/Dockerfile +++ b/services/api-server/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # USAGE: # cd sercices/api-server @@ -58,7 +58,7 @@ EXPOSE 3000 # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -100,7 +100,7 @@ RUN \ # + /build # + services/api-server [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -123,7 +123,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/api-server [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -160,7 +160,7 @@ CMD ["/bin/sh", "services/api-server/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development diff --git a/services/api-server/requirements/_base.txt b/services/api-server/requirements/_base.txt index 76bdd46ba363..5bc43dad29db 100644 --- a/services/api-server/requirements/_base.txt +++ b/services/api-server/requirements/_base.txt @@ -58,6 +58,8 @@ anyio==4.3.0 # httpx # starlette # watchfiles +appdirs==1.4.4 + # via pint arrow==1.3.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -154,16 +156,17 @@ fastapi==0.99.1 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in - # fastapi-pagination # prometheus-fastapi-instrumentator -fastapi-pagination==0.12.17 - # via - # -c requirements/./constraints.txt - # -r requirements/_base.in +fastapi-pagination==0.12.31 + # via -r requirements/_base.in faststream==0.5.10 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +flexcache==0.3 + # via pint +flexparser==0.3.1 + # via pint frozenlist==1.4.1 # via # aiohttp @@ -356,7 +359,7 @@ pamqp==3.3.0 # via aiormq parse==1.20.2 # via -r requirements/_base.in -pint==0.23 +pint==0.24.3 # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in prometheus-client==0.20.0 # via @@ -564,6 +567,8 @@ typing-extensions==4.10.0 # fastapi # fastapi-pagination # faststream + # flexcache + # flexparser # opentelemetry-sdk # pint # pydantic diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt index f4201ab9d4d6..19d8d3501997 100644 --- a/services/api-server/requirements/_test.txt +++ b/services/api-server/requirements/_test.txt @@ -187,7 +187,7 @@ multidict==6.0.5 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.2 +mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy diff --git a/services/api-server/requirements/_tools.txt b/services/api-server/requirements/_tools.txt index f7033c1523f0..8b1f4a36d831 100644 --- a/services/api-server/requirements/_tools.txt +++ b/services/api-server/requirements/_tools.txt @@ -41,7 +41,7 @@ markupsafe==2.1.5 # jinja2 mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt diff --git a/services/api-server/requirements/ci.txt b/services/api-server/requirements/ci.txt index 844706d5fb2a..cc1799cee075 100644 --- a/services/api-server/requirements/ci.txt +++ b/services/api-server/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../../packages/common-library diff --git a/services/api-server/requirements/constraints.txt b/services/api-server/requirements/constraints.txt index cfbd5e8be2ad..6247b0001567 100644 --- a/services/api-server/requirements/constraints.txt +++ b/services/api-server/requirements/constraints.txt @@ -40,31 +40,3 @@ aws-sam-translator<1.56.0 # # aws-sam-translator<1.55.0 (from -c ./constraints.txt (line 32)) # # aws-sam-translator>=1.57.0 (from cfn-lint==0.72.10->-c ./constraints.txt (line 33)) cfn-lint<0.72.1 - - - -# -# .venv/lib/python3.10/site-packages/fastapi_pagination/api.py:352: in _update_route -# get_parameterless_sub_dependant( -# .venv/lib/python3.10/site-packages/fastapi/dependencies/utils.py:136: in get_parameterless_sub_dependant -# return get_sub_dependant(depends=depends, dependency=depends.dependency, path=path) -# .venv/lib/python3.10/site-packages/fastapi/dependencies/utils.py:159: in get_sub_dependant -# sub_dependant = get_dependant( -# .venv/lib/python3.10/site-packages/fastapi/dependencies/utils.py:310: in get_dependant -# sub_dependant = get_param_sub_dependant( -# .venv/lib/python3.10/site-packages/fastapi/dependencies/utils.py:123: in get_param_sub_dependant -# return get_sub_dependant( -# .venv/lib/python3.10/site-packages/fastapi/dependencies/utils.py:159: in get_sub_dependant -# sub_dependant = get_dependant( -# .venv/lib/python3.10/site-packages/fastapi/dependencies/utils.py:331: in get_dependant -# add_param_to_fields(field=param_field, dependant=dependant) -# _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ - -# def add_param_to_fields(*, field: ModelField, dependant: Dependant) -> None: -# field_info = cast(params.Param, field.field_info) -# > if field_info.in_ == params.ParamTypes.path: -# E AttributeError: 'FieldInfo' object has no attribute 'in_' - -# .venv/lib/python3.10/site-packages/fastapi/dependencies/utils.py:500: AttributeError - -fastapi-pagination<=0.12.17 diff --git a/services/api-server/src/simcore_service_api_server/exceptions/_base.py b/services/api-server/src/simcore_service_api_server/exceptions/_base.py index 2e0b2e13c4f8..5ea7664ec23e 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/_base.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/_base.py @@ -1,6 +1,6 @@ from typing import Any -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class ApiServerBaseError(OsparcErrorMixin, Exception): diff --git a/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_factory.py b/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_factory.py index fb9ae8ddc106..fe2befdce63c 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_factory.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_factory.py @@ -1,8 +1,9 @@ import logging +from common_library.error_codes import create_error_code from fastapi.requests import Request from fastapi.responses import JSONResponse -from servicelib.error_codes import create_error_code +from servicelib.logging_errors import create_troubleshotting_log_kwargs from ._utils import ExceptionHandler, create_error_json_response @@ -30,19 +31,22 @@ async def _http_error_handler( assert request # nosec assert isinstance(exception, exception_cls) # nosec - msg = error_message + user_error_msg = error_message if add_exception_to_message: - msg += f" {exception}" + user_error_msg += f" {exception}" + error_code = create_error_code(exception) if add_oec_to_message: - error_code = create_error_code(exception) - msg += f" [{error_code}]" - _logger.exception( - "Unexpected %s: %s", - exception.__class__.__name__, - msg, - extra={"error_code": error_code}, + user_error_msg += f" [{error_code}]" + + _logger.exception( + **create_troubleshotting_log_kwargs( + user_error_msg, + error=exception, + error_code=error_code, + tip="Unexpected error", ) - return create_error_json_response(msg, status_code=status_code) + ) + return create_error_json_response(user_error_msg, status_code=status_code) return _http_error_handler diff --git a/services/api-server/src/simcore_service_api_server/models/pagination.py b/services/api-server/src/simcore_service_api_server/models/pagination.py index 44013b068e5c..6a3f5f96b176 100644 --- a/services/api-server/src/simcore_service_api_server/models/pagination.py +++ b/services/api-server/src/simcore_service_api_server/models/pagination.py @@ -9,30 +9,36 @@ from collections.abc import Sequence from typing import Any, ClassVar, Generic, TypeAlias, TypeVar -from fastapi_pagination.limit_offset import LimitOffsetParams -from fastapi_pagination.links.limit_offset import ( - LimitOffsetPage as _FastApiLimitOffsetPage, -) +from fastapi import Query +from fastapi_pagination.customization import CustomizedPage, UseName, UseParamsFields +from fastapi_pagination.limit_offset import LimitOffsetParams as _LimitOffsetParams +from fastapi_pagination.links import LimitOffsetPage as _LimitOffsetPage from models_library.rest_pagination import ( DEFAULT_NUMBER_OF_ITEMS_PER_PAGE, MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE, ) from models_library.utils.pydantic_tools_extension import FieldNotRequired -from pydantic import Field, NonNegativeInt, validator +from pydantic import NonNegativeInt, validator from pydantic.generics import GenericModel T = TypeVar("T") -# NOTE: same pagination limits and defaults as web-server -Page = _FastApiLimitOffsetPage.with_custom_options( - limit=Field( - DEFAULT_NUMBER_OF_ITEMS_PER_PAGE, ge=1, le=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE - ) -) -# NOTE: Renamed to make shorter clients name models -Page.__name__ = "Page" - -PaginationParams: TypeAlias = LimitOffsetParams +Page = CustomizedPage[ + _LimitOffsetPage[T], + # Customizes the default and maximum to fit those of the web-server. It simplifies interconnection + UseParamsFields( + limit=Query( + DEFAULT_NUMBER_OF_ITEMS_PER_PAGE, + ge=1, + le=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE, + description="Page size limit", + ) + ), + # Renames class for the openapi.json to make the python-client's name models shorter + UseName(name="Page"), +] + +PaginationParams: TypeAlias = _LimitOffsetParams class OnePage(GenericModel, Generic[T]): diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py b/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py index a190d4e182b6..be0050d586d9 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py @@ -1,7 +1,7 @@ import datetime import hashlib import logging -from typing import Any, ClassVar, TypeAlias +from typing import Annotated, Any, ClassVar, TypeAlias from uuid import UUID, uuid4 from models_library.projects import ProjectID @@ -9,7 +9,6 @@ from models_library.projects_state import RunningState from pydantic import ( BaseModel, - ConstrainedInt, Extra, Field, HttpUrl, @@ -247,9 +246,7 @@ def resource_name(self) -> str: return self.name -class PercentageInt(ConstrainedInt): - ge = 0 - le = 100 +PercentageInt: TypeAlias = Annotated[int, Field(ge=0, le=100)] class JobStatus(BaseModel): diff --git a/services/api-server/src/simcore_service_api_server/services/log_streaming.py b/services/api-server/src/simcore_service_api_server/services/log_streaming.py index faefc5c905bd..8d810c6b3ed0 100644 --- a/services/api-server/src/simcore_service_api_server/services/log_streaming.py +++ b/services/api-server/src/simcore_service_api_server/services/log_streaming.py @@ -4,10 +4,11 @@ from collections.abc import AsyncIterable from typing import Final +from common_library.error_codes import create_error_code from models_library.rabbitmq_messages import LoggerRabbitMessage from models_library.users import UserID from pydantic import NonNegativeInt -from servicelib.error_codes import create_error_code +from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.logging_utils import log_catch from servicelib.rabbitmq import RabbitMQClient from simcore_service_api_server.exceptions.backend_errors import BaseBackEndError @@ -124,21 +125,26 @@ async def log_generator(self) -> AsyncIterable[str]: yield log.json() + _NEW_LINE except asyncio.TimeoutError: done = await self._project_done() + except BaseBackEndError as exc: _logger.info("%s", f"{exc}") + yield ErrorGet(errors=[f"{exc}"]).json() + _NEW_LINE + except Exception as exc: # pylint: disable=W0718 error_code = create_error_code(exc) + user_error_msg = ( + MSG_INTERNAL_ERROR_USER_FRIENDLY_TEMPLATE + f" [{error_code}]" + ) + _logger.exception( - "Unexpected %s: %s", - exc.__class__.__name__, - f"{exc}", - extra={"error_code": error_code}, + **create_troubleshotting_log_kwargs( + user_error_msg, + error=exc, + error_code=error_code, + ) ) - yield ErrorGet( - errors=[ - MSG_INTERNAL_ERROR_USER_FRIENDLY_TEMPLATE + f" (OEC: {error_code})" - ] - ).json() + _NEW_LINE + yield ErrorGet(errors=[user_error_msg]).json() + _NEW_LINE + finally: await self._log_distributor.deregister(self._job_id) diff --git a/services/autoscaling/Dockerfile b/services/autoscaling/Dockerfile index a4d50b497c96..1821d873057a 100644 --- a/services/autoscaling/Dockerfile +++ b/services/autoscaling/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # USAGE: @@ -78,7 +78,7 @@ EXPOSE 3000 # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -120,7 +120,7 @@ RUN \ # + /build # + services/autoscaling [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -142,7 +142,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/autoscaling [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -179,7 +179,7 @@ CMD ["/bin/sh", "services/autoscaling/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development \ SC_DEVEL_MOUNT=/devel/services/autoscaling diff --git a/services/autoscaling/requirements/_tools.txt b/services/autoscaling/requirements/_tools.txt index 97a49efc2ebd..dec3b9c204df 100644 --- a/services/autoscaling/requirements/_tools.txt +++ b/services/autoscaling/requirements/_tools.txt @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via diff --git a/services/autoscaling/requirements/ci.txt b/services/autoscaling/requirements/ci.txt index 99c6675a7190..74758ddb53ef 100644 --- a/services/autoscaling/requirements/ci.txt +++ b/services/autoscaling/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-aws-library @ ../../packages/aws-library diff --git a/services/catalog/Dockerfile b/services/catalog/Dockerfile index b87d90b12243..c4f48d2ec08f 100644 --- a/services/catalog/Dockerfile +++ b/services/catalog/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # # USAGE: @@ -59,7 +59,7 @@ EXPOSE 3000 # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -101,7 +101,7 @@ RUN \ # + /build # + services/catalog [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -123,7 +123,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/catalog [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -159,7 +159,7 @@ CMD ["/bin/sh", "services/catalog/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development diff --git a/services/catalog/requirements/_test.txt b/services/catalog/requirements/_test.txt index 6fdd398def33..c824bb0f6c3e 100644 --- a/services/catalog/requirements/_test.txt +++ b/services/catalog/requirements/_test.txt @@ -98,7 +98,7 @@ multidict==6.0.5 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.2 +mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy diff --git a/services/catalog/requirements/_tools.txt b/services/catalog/requirements/_tools.txt index c0a526c13100..97a60860ebb4 100644 --- a/services/catalog/requirements/_tools.txt +++ b/services/catalog/requirements/_tools.txt @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt diff --git a/services/catalog/requirements/ci.txt b/services/catalog/requirements/ci.txt index 4ebc858a454a..68ad56caa9a7 100644 --- a/services/catalog/requirements/ci.txt +++ b/services/catalog/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../../packages/common-library diff --git a/services/catalog/src/simcore_service_catalog/exceptions/errors.py b/services/catalog/src/simcore_service_catalog/exceptions/errors.py index 8729cb437f58..0384088d37cd 100644 --- a/services/catalog/src/simcore_service_catalog/exceptions/errors.py +++ b/services/catalog/src/simcore_service_catalog/exceptions/errors.py @@ -1,6 +1,6 @@ from typing import Any -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class CatalogBaseError(OsparcErrorMixin, Exception): diff --git a/services/clusters-keeper/Dockerfile b/services/clusters-keeper/Dockerfile index cd29d763adfa..2dd08b4b44b4 100644 --- a/services/clusters-keeper/Dockerfile +++ b/services/clusters-keeper/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # USAGE: @@ -78,7 +78,7 @@ EXPOSE 3000 # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -120,7 +120,7 @@ RUN \ # + /build # + services/clusters-keeper [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -142,7 +142,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/clusters-keeper [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -179,7 +179,7 @@ CMD ["/bin/sh", "services/clusters-keeper/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development \ SC_DEVEL_MOUNT=/devel/services/clusters-keeper diff --git a/services/clusters-keeper/requirements/_tools.txt b/services/clusters-keeper/requirements/_tools.txt index 97a49efc2ebd..dec3b9c204df 100644 --- a/services/clusters-keeper/requirements/_tools.txt +++ b/services/clusters-keeper/requirements/_tools.txt @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via diff --git a/services/clusters-keeper/requirements/ci.txt b/services/clusters-keeper/requirements/ci.txt index 12125244536d..7bb3a4afb29f 100644 --- a/services/clusters-keeper/requirements/ci.txt +++ b/services/clusters-keeper/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-aws-library @ ../../packages/aws-library diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py index 01e51037c357..8b8ff0aa2480 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py @@ -1,6 +1,6 @@ import datetime from functools import cached_property -from typing import Annotated, Final, Literal, cast +from typing import Final, Literal, cast from aws_library.ec2 import EC2InstanceBootSpecific, EC2Tags from fastapi import FastAPI @@ -13,14 +13,12 @@ from models_library.clusters import InternalClusterAuthentication from pydantic import ( AliasChoices, - BeforeValidator, Field, NonNegativeFloat, NonNegativeInt, PositiveInt, SecretStr, TypeAdapter, - WrapValidator, field_validator, ) from pydantic_settings import SettingsConfigDict @@ -70,6 +68,21 @@ class Config(SSMSettings.Config): } +class ClustersKeeperSSMSettings(SSMSettings): + class Config(SSMSettings.Config): + env_prefix = CLUSTERS_KEEPER_ENV_PREFIX + + schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + "examples": [ + { + f"{CLUSTERS_KEEPER_ENV_PREFIX}{key}": var + for key, var in example.items() + } + for example in SSMSettings.Config.schema_extra["examples"] + ], + } + + class WorkersEC2InstancesSettings(BaseCustomSettings): WORKERS_EC2_INSTANCES_ALLOWED_TYPES: dict[str, EC2InstanceBootSpecific] = Field( ..., @@ -209,7 +222,6 @@ class PrimaryEC2InstancesSettings(BaseCustomSettings): "(see https://docs.docker.com/reference/cli/docker/swarm/init/)", ) - @field_validator("PRIMARY_EC2_INSTANCES_ALLOWED_TYPES") @classmethod def check_valid_instance_names( @@ -286,6 +298,10 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): auto_default_from_env=True ) + CLUSTERS_KEEPER_SSM_ACCESS: ClustersKeeperSSMSettings | None = Field( + auto_default_from_env=True + ) + CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES: PrimaryEC2InstancesSettings | None = Field( json_schema_extra={"auto_default_from_env": True} ) @@ -325,11 +341,9 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", ) - CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION: NonNegativeInt = ( - Field( - default=5, - description="Max number of missed heartbeats before a cluster is terminated", - ) + CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION: NonNegativeInt = Field( + default=5, + description="Max number of missed heartbeats before a cluster is terminated", ) CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG: str = Field( @@ -370,11 +384,14 @@ def LOG_LEVEL(self) -> LogLevel: # noqa: N802 @classmethod def _valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) - - - @field_validator("CLUSTERS_KEEPER_TASK_INTERVAL", "SERVICE_TRACKING_HEARTBEAT", mode="before") + + @field_validator( + "CLUSTERS_KEEPER_TASK_INTERVAL", "SERVICE_TRACKING_HEARTBEAT", mode="before" + ) @classmethod - def _validate_interval(cls, value: str | datetime.timedelta) -> int | datetime.timedelta: + def _validate_interval( + cls, value: str | datetime.timedelta + ) -> int | datetime.timedelta: if isinstance(value, str): return int(value) return value diff --git a/services/clusters-keeper/tests/unit/conftest.py b/services/clusters-keeper/tests/unit/conftest.py index 14c5e0d93c1e..5c661898cf0b 100644 --- a/services/clusters-keeper/tests/unit/conftest.py +++ b/services/clusters-keeper/tests/unit/conftest.py @@ -103,6 +103,21 @@ def mocked_ssm_server_envs( return setenvs_from_dict(monkeypatch, changed_envs) +@pytest.fixture +def mocked_ssm_server_envs( + mocked_ssm_server_settings: SSMSettings, + monkeypatch: pytest.MonkeyPatch, +) -> EnvVarsDict: + # NOTE: overrides the SSMSettings with what clusters-keeper expects + changed_envs: EnvVarsDict = { + f"{CLUSTERS_KEEPER_ENV_PREFIX}{k}": ( + v.get_secret_value() if isinstance(v, SecretStr) else v + ) + for k, v in mocked_ssm_server_settings.dict().items() + } + return setenvs_from_dict(monkeypatch, changed_envs) + + @pytest.fixture def ec2_settings(mocked_ec2_server_settings: EC2Settings) -> EC2Settings: return mocked_ec2_server_settings diff --git a/services/dask-sidecar/Dockerfile b/services/dask-sidecar/Dockerfile index b91cefa9e88d..a85f653f5ccd 100644 --- a/services/dask-sidecar/Dockerfile +++ b/services/dask-sidecar/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM --platform=${TARGETPLATFORM} python:${PYTHON_VERSION}-slim-bookworm as base +FROM --platform=${TARGETPLATFORM} python:${PYTHON_VERSION}-slim-bookworm AS base ARG TARGETPLATFORM ARG BUILDPLATFORM RUN echo "I am running on $BUILDPLATFORM, building for $TARGETPLATFORM" > /log @@ -69,7 +69,7 @@ RUN mkdir --parents /home/scu/.config/dask \ # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -108,7 +108,7 @@ RUN \ # + /build # + services/dask-sidecar [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps @@ -129,7 +129,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/dask-sidecar [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -169,7 +169,7 @@ CMD ["/bin/sh", "services/dask-sidecar/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development diff --git a/services/dask-sidecar/requirements/_tools.txt b/services/dask-sidecar/requirements/_tools.txt index 97a49efc2ebd..dec3b9c204df 100644 --- a/services/dask-sidecar/requirements/_tools.txt +++ b/services/dask-sidecar/requirements/_tools.txt @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via diff --git a/services/dask-sidecar/requirements/ci.txt b/services/dask-sidecar/requirements/ci.txt index 192b4ffcbf07..343964753b09 100644 --- a/services/dask-sidecar/requirements/ci.txt +++ b/services/dask-sidecar/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../../packages/common-library/ diff --git a/services/datcore-adapter/Dockerfile b/services/datcore-adapter/Dockerfile index fc4c2fcd4030..bdd52d81841f 100644 --- a/services/datcore-adapter/Dockerfile +++ b/services/datcore-adapter/Dockerfile @@ -1,10 +1,10 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # USAGE: -# cd sercices/datcore-adapter +# cd services/datcore-adapter # docker build -f Dockerfile -t datcore-adapter:prod --target production ../../ # docker run datcore-adapter:prod # @@ -59,7 +59,7 @@ EXPOSE 3000 # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -101,7 +101,7 @@ RUN \ # + /build # + services/datcore-adapter [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -123,7 +123,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/datcore-adapter [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -160,7 +160,7 @@ CMD ["/bin/sh", "services/datcore-adapter/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development \ SC_DEVEL_MOUNT=/devel/services/datcore-adapter diff --git a/services/datcore-adapter/requirements/_tools.txt b/services/datcore-adapter/requirements/_tools.txt index 508da70431fe..190dca86c7e8 100644 --- a/services/datcore-adapter/requirements/_tools.txt +++ b/services/datcore-adapter/requirements/_tools.txt @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via diff --git a/services/datcore-adapter/requirements/ci.txt b/services/datcore-adapter/requirements/ci.txt index f68013921e7c..95484d40524b 100644 --- a/services/datcore-adapter/requirements/ci.txt +++ b/services/datcore-adapter/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../../packages/common-library diff --git a/services/director-v2/Dockerfile b/services/director-v2/Dockerfile index 4e29e4a3d2ff..4780eb62e333 100644 --- a/services/director-v2/Dockerfile +++ b/services/director-v2/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # USAGE: @@ -59,7 +59,7 @@ EXPOSE 3000 # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -101,7 +101,7 @@ RUN \ # + /build # + services/director-v2 [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -123,7 +123,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/director-v2 [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -160,7 +160,7 @@ CMD ["/bin/sh", "services/director-v2/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development \ SC_DEVEL_MOUNT=/devel/services/director-v2 diff --git a/services/director-v2/requirements/_base.txt b/services/director-v2/requirements/_base.txt index a151f9fb0ab2..7a524e686196 100644 --- a/services/director-v2/requirements/_base.txt +++ b/services/director-v2/requirements/_base.txt @@ -64,6 +64,8 @@ anyio==4.3.0 # httpx # starlette # watchfiles +appdirs==1.4.4 + # via pint arrow==1.3.0 # via # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in @@ -180,6 +182,10 @@ faststream==0.5.10 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +flexcache==0.3 + # via pint +flexparser==0.3.1 + # via pint frozenlist==1.4.1 # via # aiohttp @@ -420,7 +426,7 @@ partd==1.4.2 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask -pint==0.23 +pint==0.24.3 # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in prometheus-client==0.20.0 # via @@ -678,6 +684,8 @@ typing-extensions==4.11.0 # alembic # fastapi # faststream + # flexcache + # flexparser # opentelemetry-sdk # pint # pydantic diff --git a/services/director-v2/requirements/_test.txt b/services/director-v2/requirements/_test.txt index 22d12c69c200..0b0bcda26306 100644 --- a/services/director-v2/requirements/_test.txt +++ b/services/director-v2/requirements/_test.txt @@ -181,7 +181,7 @@ multidict==6.0.5 # aiohttp # async-asgi-testclient # yarl -mypy==1.11.2 +mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy diff --git a/services/director-v2/requirements/_tools.txt b/services/director-v2/requirements/_tools.txt index 062a460207d6..8f148883b9c4 100644 --- a/services/director-v2/requirements/_tools.txt +++ b/services/director-v2/requirements/_tools.txt @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt diff --git a/services/director-v2/requirements/ci.txt b/services/director-v2/requirements/ci.txt index 67de20ae3394..17eacb4cfdaa 100644 --- a/services/director-v2/requirements/ci.txt +++ b/services/director-v2/requirements/ci.txt @@ -10,6 +10,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../../packages/common-library/ diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py index 49fd757e8867..72bdf37e6c7c 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py @@ -37,6 +37,7 @@ from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import AnyHttpUrl, parse_obj_as from servicelib.async_utils import run_sequentially_in_context +from servicelib.logging_utils import log_decorator from servicelib.rabbitmq import RabbitMQRPCClient from simcore_postgres_database.utils_projects_metadata import DBProjectNotFoundError from starlette import status @@ -150,6 +151,7 @@ async def _check_pipeline_startable( _UNKNOWN_NODE: Final[str] = "unknown node" +@log_decorator(_logger) async def _get_project_metadata( project_id: ProjectID, project_repo: ProjectsRepository, @@ -160,7 +162,7 @@ async def _get_project_metadata( project_id ) if project_ancestors.parent_project_uuid is None: - # no parents here + _logger.debug("no parent found for project %s", project_id) return {} assert project_ancestors.parent_node_id is not None # nosec diff --git a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py index 1e575486d42f..141ed981a609 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py @@ -10,6 +10,7 @@ from uuid import UUID import arrow +from common_library.error_codes import ErrorCodeStr from common_library.pydantic_basic_types import ConstrainedStr from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceCreate from models_library.api_schemas_directorv2.dynamic_services_service import ( @@ -28,8 +29,15 @@ from models_library.services import RunID from models_library.services_resources import ServiceResourcesDict from models_library.wallets import WalletInfo -from pydantic import AnyHttpUrl, BaseModel, Extra, Field, parse_obj_as, validator -from servicelib.error_codes import ErrorCodeStr +from pydantic import ( + AnyHttpUrl, + BaseModel, + ConstrainedStr, + Extra, + Field, + parse_obj_as, + validator, +) from servicelib.exception_utils import DelayedExceptionHandler from ..constants import ( diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py index f52607e4bc0a..4f9a8e42b53c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py @@ -50,7 +50,7 @@ async def get( ) row: RowProxy | None = await result.first() if not row: - raise ComputationalRunNotFoundError() + raise ComputationalRunNotFoundError return CompRunsAtDB.from_orm(row) async def list( @@ -80,7 +80,7 @@ async def create( project_id: ProjectID, cluster_id: ClusterID, iteration: PositiveInt | None = None, - metadata: RunMetadataDict | None, + metadata: RunMetadataDict, use_on_demand_clusters: bool, ) -> CompRunsAtDB: try: @@ -102,13 +102,13 @@ async def create( .values( user_id=user_id, project_uuid=f"{project_id}", - cluster_id=cluster_id - if cluster_id != DEFAULT_CLUSTER_ID - else None, + cluster_id=( + cluster_id if cluster_id != DEFAULT_CLUSTER_ID else None + ), iteration=iteration, result=RUNNING_STATE_TO_DB[RunningState.PUBLISHED], - started=datetime.datetime.now(tz=datetime.timezone.utc), - metadata=jsonable_encoder(metadata) if metadata else None, + started=datetime.datetime.now(tz=datetime.UTC), + metadata=jsonable_encoder(metadata), use_on_demand_clusters=use_on_demand_clusters, ) .returning(literal_column("*")) @@ -146,7 +146,7 @@ async def set_run_result( ) -> CompRunsAtDB | None: values: dict[str, Any] = {"result": RUNNING_STATE_TO_DB[result_state]} if final_state: - values.update({"ended": datetime.datetime.now(tz=datetime.timezone.utc)}) + values.update({"ended": datetime.datetime.now(tz=datetime.UTC)}) return await self.update( user_id, project_id, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_observer.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_observer.py index 7cd402784d3f..ce3decc2ca65 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_observer.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_observer.py @@ -5,8 +5,9 @@ from copy import deepcopy from math import floor +from common_library.error_codes import create_error_code from fastapi import FastAPI -from servicelib.error_codes import create_error_code +from servicelib.logging_errors import create_troubleshotting_log_kwargs from .....core.dynamic_services_settings.scheduler import ( DynamicServicesSchedulerSettings, @@ -139,27 +140,38 @@ async def observing_single_service( logger.debug("completed observation cycle of %s", f"{service_name=}") except asyncio.CancelledError: # pylint: disable=try-except-raise raise # pragma: no cover - except Exception as e: # pylint: disable=broad-except + except Exception as exc: # pylint: disable=broad-except service_name = scheduler_data.service_name # With unhandled errors, let's generate and ID and send it to the end-user # so that we can trace the logs and debug the issue. + user_error_msg = ( + f"This service ({service_name}) unexpectedly failed." + " Our team has recorded the issue and is working to resolve it as quickly as possible." + " Thank you for your patience." + ) + error_code = create_error_code(exc) - error_code = create_error_code(e) logger.exception( - "Observation of %s unexpectedly failed [%s]", - f"{service_name=} ", - f"{error_code}", - extra={"error_code": error_code}, + **create_troubleshotting_log_kwargs( + user_error_msg, + error=exc, + error_context={ + "service_name": service_name, + "user_id": scheduler_data.user_id, + }, + error_code=error_code, + tip=f"Observation of {service_name=} unexpectedly failed", + ) ) scheduler_data.dynamic_sidecar.status.update_failing_status( # This message must be human-friendly - f"Upss! This service ({service_name}) unexpectedly failed", + user_error_msg, error_code, ) finally: if scheduler_data_copy != scheduler_data: try: await update_scheduler_data_label(scheduler_data) - except GenericDockerError as e: - logger.warning("Skipped labels update, please check:\n %s", f"{e}") + except GenericDockerError as exc: + logger.warning("Skipped labels update, please check:\n %s", f"{exc}") diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index 09b727449f28..8dd5527f00a3 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -8,15 +8,16 @@ import datetime import json from collections.abc import Awaitable, Callable, Iterator -from typing import Any +from typing import Any, cast from uuid import uuid4 import pytest import sqlalchemy as sa from _helpers import PublishedProject, RunningProject from faker import Faker +from fastapi.encoders import jsonable_encoder from models_library.clusters import Cluster -from models_library.projects import ProjectAtDB +from models_library.projects import ProjectAtDB, ProjectID from models_library.projects_nodes_io import NodeID from pydantic.main import BaseModel from simcore_postgres_database.models.cluster_to_groups import cluster_to_groups @@ -25,7 +26,11 @@ from simcore_postgres_database.models.comp_runs import comp_runs from simcore_postgres_database.models.comp_tasks import comp_tasks from simcore_service_director_v2.models.comp_pipelines import CompPipelineAtDB -from simcore_service_director_v2.models.comp_runs import CompRunsAtDB, RunMetadataDict +from simcore_service_director_v2.models.comp_runs import ( + CompRunsAtDB, + ProjectMetadataDict, + RunMetadataDict, +) from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB, Image from simcore_service_director_v2.utils.computations import to_node_class from simcore_service_director_v2.utils.dask import generate_dask_job_id @@ -84,28 +89,36 @@ def creator( "project_id": f"{project.uuid}", "node_id": f"{node_id}", "schema": {"inputs": {}, "outputs": {}}, - "inputs": { - key: json.loads(value.json(by_alias=True, exclude_unset=True)) - if isinstance(value, BaseModel) - else value - for key, value in node_data.inputs.items() - } - if node_data.inputs - else {}, - "outputs": { - key: json.loads(value.json(by_alias=True, exclude_unset=True)) - if isinstance(value, BaseModel) - else value - for key, value in node_data.outputs.items() - } - if node_data.outputs - else {}, + "inputs": ( + { + key: ( + json.loads(value.json(by_alias=True, exclude_unset=True)) + if isinstance(value, BaseModel) + else value + ) + for key, value in node_data.inputs.items() + } + if node_data.inputs + else {} + ), + "outputs": ( + { + key: ( + json.loads(value.json(by_alias=True, exclude_unset=True)) + if isinstance(value, BaseModel) + else value + ) + for key, value in node_data.outputs.items() + } + if node_data.outputs + else {} + ), "image": Image(name=node_data.key, tag=node_data.version).dict( # type: ignore by_alias=True, exclude_unset=True ), # type: ignore "node_class": to_node_class(node_data.key), "internal_id": internal_id + 1, - "submit": datetime.datetime.now(tz=datetime.timezone.utc), + "submit": datetime.datetime.now(tz=datetime.UTC), "job_id": generate_dask_job_id( service_key=node_data.key, service_version=node_data.version, @@ -135,9 +148,26 @@ def creator( ) +@pytest.fixture +def project_metadata(faker: Faker) -> ProjectMetadataDict: + return ProjectMetadataDict( + parent_node_id=cast(NodeID, faker.uuid4(cast_to=None)), + parent_node_name=faker.pystr(), + parent_project_id=cast(ProjectID, faker.uuid4(cast_to=None)), + parent_project_name=faker.pystr(), + root_parent_project_id=cast(ProjectID, faker.uuid4(cast_to=None)), + root_parent_project_name=faker.pystr(), + root_parent_node_id=cast(NodeID, faker.uuid4(cast_to=None)), + root_parent_node_name=faker.pystr(), + ) + + @pytest.fixture def run_metadata( - osparc_product_name: str, simcore_user_agent: str, faker: Faker + osparc_product_name: str, + simcore_user_agent: str, + project_metadata: ProjectMetadataDict, + faker: Faker, ) -> RunMetadataDict: return RunMetadataDict( node_id_names_map={}, @@ -147,6 +177,7 @@ def run_metadata( user_email=faker.email(), wallet_id=faker.pyint(min_value=1), wallet_name=faker.name(), + project_metadata=project_metadata, ) @@ -171,7 +202,7 @@ def creator( with postgres_db.connect() as conn: result = conn.execute( comp_runs.insert() - .values(**run_config) + .values(**jsonable_encoder(run_config)) .returning(sa.literal_column("*")) ) new_run = CompRunsAtDB.from_orm(result.first()) @@ -298,7 +329,7 @@ async def running_project( project=created_project, state=StateType.RUNNING, progress=0.0, - start=datetime.datetime.now(tz=datetime.timezone.utc), + start=datetime.datetime.now(tz=datetime.UTC), ), runs=runs(user=user, project=created_project, result=StateType.RUNNING), ) diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py index 2968e96e5db6..d15ab46a4986 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py @@ -381,22 +381,17 @@ async def test_misconfigured_pipeline_is_not_scheduled( ) run_entry = CompRunsAtDB.parse_obj(await result.first()) assert run_entry.result == RunningState.ABORTED + assert run_entry.metadata == run_metadata async def _assert_start_pipeline( - aiopg_engine, published_project: PublishedProject, scheduler: BaseCompScheduler + aiopg_engine, + published_project: PublishedProject, + scheduler: BaseCompScheduler, + run_metadata: RunMetadataDict, ) -> list[CompTaskAtDB]: exp_published_tasks = deepcopy(published_project.tasks) assert published_project.project.prj_owner - run_metadata = RunMetadataDict( - node_id_names_map={}, - project_name="", - product_name="", - simcore_user_agent="", - user_email="", - wallet_id=231, - wallet_name="", - ) await scheduler.run_new_pipeline( user_id=published_project.project.prj_owner, project_id=published_project.project.uuid, @@ -618,11 +613,12 @@ async def test_proper_pipeline_is_scheduled( # noqa: PLR0915 mocked_clean_task_output_and_log_files_if_invalid: None, instrumentation_rabbit_client_parser: mock.AsyncMock, resource_tracking_rabbit_client_parser: mock.AsyncMock, + run_metadata: RunMetadataDict, ): _mock_send_computation_tasks(published_project.tasks, mocked_dask_client) expected_published_tasks = await _assert_start_pipeline( - aiopg_engine, published_project, scheduler + aiopg_engine, published_project, scheduler, run_metadata ) # ------------------------------------------------------------------------------- @@ -990,10 +986,11 @@ async def test_task_progress_triggers( published_project: PublishedProject, mocked_parse_output_data_fct: None, mocked_clean_task_output_and_log_files_if_invalid: None, + run_metadata: RunMetadataDict, ): _mock_send_computation_tasks(published_project.tasks, mocked_dask_client) expected_published_tasks = await _assert_start_pipeline( - aiopg_engine, published_project, scheduler + aiopg_engine, published_project, scheduler, run_metadata ) # ------------------------------------------------------------------------------- # 1. first run will move comp_tasks to PENDING so the worker can take them @@ -1286,10 +1283,11 @@ async def test_running_pipeline_triggers_heartbeat( aiopg_engine: aiopg.sa.engine.Engine, published_project: PublishedProject, resource_tracking_rabbit_client_parser: mock.AsyncMock, + run_metadata: RunMetadataDict, ): _mock_send_computation_tasks(published_project.tasks, mocked_dask_client) expected_published_tasks = await _assert_start_pipeline( - aiopg_engine, published_project, scheduler + aiopg_engine, published_project, scheduler, run_metadata ) # ------------------------------------------------------------------------------- # 1. first run will move comp_tasks to PENDING so the worker can take them diff --git a/services/director/Dockerfile b/services/director/Dockerfile index fbd6bf4b0c69..3449ba3db80a 100644 --- a/services/director/Dockerfile +++ b/services/director/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.6.10" -FROM python:${PYTHON_VERSION}-slim-buster as base +FROM python:${PYTHON_VERSION}-slim-buster AS base # # USAGE: # cd sercices/director @@ -67,7 +67,7 @@ EXPOSE 8080 # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -108,7 +108,7 @@ COPY --chown=scu:scu api/specs/director/schemas/node-meta-v0.0.1.json \ # + /build # + services/director [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps WORKDIR /build/services/director ENV SC_BUILD_TARGET=prod-only-deps @@ -124,7 +124,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/director [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -157,7 +157,7 @@ CMD ["services/director/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development ENV NODE_SCHEMA_LOCATION=../../../api/specs/director/schemas/node-meta-v0.0.1.json diff --git a/services/director/requirements/_base.txt b/services/director/requirements/_base.txt index d8280bd46738..2c6e016526fd 100644 --- a/services/director/requirements/_base.txt +++ b/services/director/requirements/_base.txt @@ -1,9 +1,3 @@ -# -# This file is autogenerated by pip-compile with python 3.6 -# To update, run: -# -# pip-compile --output-file=requirements/_base.txt --strip-extras requirements/_base.in -# aiodebug==1.1.2 # via # -r requirements/_base.in @@ -54,9 +48,7 @@ chardet==3.0.4 charset-normalizer==2.0.12 # via requests dataclasses==0.7 - # via - # -r requirements/_base.in - # pydantic + # via -r requirements/_base.in idna==2.8 # via # -r requirements/_base.in @@ -116,7 +108,7 @@ pyyaml==5.4 # simcore-service-library requests==2.27.1 # via -r requirements/_base.in -simcore-service-library @ git+https://github.com/ITISFoundation/osparc-simcore.git@c8669fb52659b684514fefa4f3b4599f57f276a0#subdirectory=packages/service-library +simcore-service-library @ git+https://github.com/ITISFoundation/osparc-simcore.git@c8669fb52659b684514fefa4f3b4599f57f276a0#egg=simcore-service-library&subdirectory=packages/service-library # via -r requirements/_base.in six==1.12.0 # via diff --git a/services/director/requirements/_tools.txt b/services/director/requirements/_tools.txt index 38ed7220aed8..4c0c50f666dd 100644 --- a/services/director/requirements/_tools.txt +++ b/services/director/requirements/_tools.txt @@ -1,9 +1,3 @@ -# -# This file is autogenerated by pip-compile with python 3.6 -# To update, run: -# -# pip-compile --output-file=requirements/_tools.txt --strip-extras requirements/_tools.in -# appdirs==1.4.4 # via black black==20.8b1 @@ -14,22 +8,14 @@ click==8.0.3 # via # black # pip-tools -dataclasses==0.7 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # black -importlib-metadata==2.0.0 - # via - # -c requirements/_test.txt - # click - # pep517 mypy-extensions==0.4.3 # via black pathspec==0.9.0 # via black pep517==0.12.0 # via pip-tools +pip==24.2 + # via pip-tools pip-tools==6.4.0 # via -r requirements/_tools.in pyyaml==5.4 @@ -39,6 +25,8 @@ pyyaml==5.4 # watchdog regex==2022.1.18 # via black +setuptools==75.1.0 + # via pip-tools toml==0.10.2 # via # -c requirements/_test.txt @@ -55,12 +43,3 @@ watchdog==2.1.6 # via -r requirements/_tools.in wheel==0.37.1 # via pip-tools -zipp==3.4.0 - # via - # -c requirements/_test.txt - # importlib-metadata - # pep517 - -# The following packages are considered to be unsafe in a requirements file: -# pip -# setuptools diff --git a/services/docker-compose-dev-vendors.yml b/services/docker-compose-dev-vendors.yml index cb2e45910eb0..02b614289bc6 100644 --- a/services/docker-compose-dev-vendors.yml +++ b/services/docker-compose-dev-vendors.yml @@ -14,10 +14,10 @@ services: - io.simcore.zone=${TRAEFIK_SIMCORE_ZONE} - traefik.enable=true - traefik.docker.network=${SWARM_STACK_NAME}_default - # auth + # auth: https://doc.traefik.io/traefik/middlewares/http/forwardauth - traefik.http.middlewares.${SWARM_STACK_NAME}_manual-auth.forwardauth.address=http://${WEBSERVER_HOST}:${WEBSERVER_PORT}/v0/auth:check - traefik.http.middlewares.${SWARM_STACK_NAME}_manual-auth.forwardauth.trustForwardHeader=true - - traefik.http.middlewares.${SWARM_STACK_NAME}_manual-auth.forwardauth.authResponseHeaders=Set-Cookie,osparc-sc + - traefik.http.middlewares.${SWARM_STACK_NAME}_manual-auth.forwardauth.authResponseHeaders=Set-Cookie,osparc-sc2 # routing - traefik.http.services.${SWARM_STACK_NAME}_manual.loadbalancer.server.port=80 - traefik.http.services.${SWARM_STACK_NAME}_manual.loadbalancer.healthcheck.path=/ diff --git a/services/docker-compose.devel.yml b/services/docker-compose.devel.yml index 542d9afa0466..94bcf0c1a8e4 100644 --- a/services/docker-compose.devel.yml +++ b/services/docker-compose.devel.yml @@ -108,6 +108,8 @@ services: volumes: - ./efs-guardian:/devel/services/efs-guardian - ../packages:/devel/packages + deploy: + replicas: 0 static-webserver: volumes: diff --git a/services/docker-compose.yml b/services/docker-compose.yml index ba8137e0e5ae..c5f8e762ee77 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -408,6 +408,11 @@ services: RABBIT_PORT: ${RABBIT_PORT} RABBIT_SECURE: ${RABBIT_SECURE} RABBIT_USER: ${RABBIT_USER} + REDIS_HOST: ${REDIS_HOST} + REDIS_PASSWORD: ${REDIS_PASSWORD} + REDIS_PORT: ${REDIS_PORT} + REDIS_SECURE: ${REDIS_SECURE} + REDIS_USER: ${REDIS_USER} SC_USER_ID: ${SC_USER_ID} SC_USER_NAME: ${SC_USER_NAME} EFS_USER_ID: ${EFS_USER_ID} diff --git a/services/dynamic-scheduler/Dockerfile b/services/dynamic-scheduler/Dockerfile index fb2db946f757..bc94b83125a1 100644 --- a/services/dynamic-scheduler/Dockerfile +++ b/services/dynamic-scheduler/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # USAGE: @@ -58,7 +58,7 @@ EXPOSE 8000 # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -101,7 +101,7 @@ RUN \ # + /build # + services/dynamic-scheduler [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -123,7 +123,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/dynamic-scheduler [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -160,7 +160,7 @@ CMD ["/bin/sh", "services/dynamic-scheduler/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development \ SC_DEVEL_MOUNT=/devel/services/dynamic-scheduler diff --git a/services/dynamic-scheduler/requirements/_tools.txt b/services/dynamic-scheduler/requirements/_tools.txt index 3f27c470fe35..df53578298f9 100644 --- a/services/dynamic-scheduler/requirements/_tools.txt +++ b/services/dynamic-scheduler/requirements/_tools.txt @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via diff --git a/services/dynamic-scheduler/requirements/ci.txt b/services/dynamic-scheduler/requirements/ci.txt index 53b69dc323f4..6b762254f44f 100644 --- a/services/dynamic-scheduler/requirements/ci.txt +++ b/services/dynamic-scheduler/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../../packages/common-library diff --git a/services/dynamic-sidecar/Dockerfile b/services/dynamic-sidecar/Dockerfile index c02ba67c9de1..3be8bae74d09 100644 --- a/services/dynamic-sidecar/Dockerfile +++ b/services/dynamic-sidecar/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # USAGE: # cd sercices/dynamic-sidecar @@ -99,7 +99,7 @@ RUN mkdir -p "${DYNAMIC_SIDECAR_SHARED_STORE_DIR}" && \ # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -146,7 +146,7 @@ COPY --chown=root:root services/dynamic-sidecar/scripts/Makefile /root # + /build # + services/dynamic-sidecar [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -167,7 +167,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/dynamic-sidecar [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -208,7 +208,7 @@ CMD ["/bin/sh", "services/dynamic-sidecar/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development \ SC_BOOT_MODE=development diff --git a/services/dynamic-sidecar/requirements/_base.txt b/services/dynamic-sidecar/requirements/_base.txt index d3357cc3a98f..3d1836e8ae77 100644 --- a/services/dynamic-sidecar/requirements/_base.txt +++ b/services/dynamic-sidecar/requirements/_base.txt @@ -75,6 +75,8 @@ anyio==4.3.0 # faststream # httpx # starlette +appdirs==1.4.4 + # via pint arrow==1.3.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -159,6 +161,10 @@ faststream==0.5.10 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +flexcache==0.3 + # via pint +flexparser==0.3.1 + # via pint frozenlist==1.4.1 # via # aiohttp @@ -365,7 +371,7 @@ packaging==24.0 # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in pamqp==3.3.0 # via aiormq -pint==0.23 +pint==0.24.3 # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in prometheus-client==0.20.0 # via @@ -677,6 +683,8 @@ typing-extensions==4.11.0 # alembic # fastapi # faststream + # flexcache + # flexparser # opentelemetry-sdk # pint # pydantic diff --git a/services/dynamic-sidecar/requirements/_test.txt b/services/dynamic-sidecar/requirements/_test.txt index 740f847d4a74..3a87310dcdb3 100644 --- a/services/dynamic-sidecar/requirements/_test.txt +++ b/services/dynamic-sidecar/requirements/_test.txt @@ -75,7 +75,7 @@ multidict==6.0.5 # aiohttp # async-asgi-testclient # yarl -mypy==1.11.2 +mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy diff --git a/services/dynamic-sidecar/requirements/_tools.txt b/services/dynamic-sidecar/requirements/_tools.txt index 4eed4827cf84..07f9e730fad8 100644 --- a/services/dynamic-sidecar/requirements/_tools.txt +++ b/services/dynamic-sidecar/requirements/_tools.txt @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt diff --git a/services/dynamic-sidecar/requirements/ci.txt b/services/dynamic-sidecar/requirements/ci.txt index 3c2e2adeb92c..827161faf6cf 100644 --- a/services/dynamic-sidecar/requirements/ci.txt +++ b/services/dynamic-sidecar/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../../packages/common-library/ diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py index b0daf7f881ce..0ce567648d91 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py @@ -5,6 +5,7 @@ from fastapi import FastAPI from models_library.progress_bar import ProgressReport from models_library.rabbitmq_messages import ( + DynamicServiceRunningMessage, EventRabbitMessage, LoggerRabbitMessage, ProgressRabbitMessageNode, @@ -34,6 +35,12 @@ async def post_resource_tracking_message( await _post_rabbit_message(app, message) +async def post_dynamic_service_running_message( + app: FastAPI, message: DynamicServiceRunningMessage +): + await _post_rabbit_message(app, message) + + async def post_log_message( app: FastAPI, log: LogMessageStr, *, log_level: LogLevelInt ) -> None: diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py index eee821145c1a..3c6dca35b510 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py @@ -7,7 +7,8 @@ from typing import NamedTuple import psutil -from servicelib.error_codes import create_error_code +from common_library.error_codes import create_error_code +from servicelib.logging_errors import create_troubleshotting_log_kwargs from ..modules.mounted_fs import MountedVolumes @@ -105,17 +106,22 @@ async def async_command( ) except Exception as err: # pylint: disable=broad-except + error_code = create_error_code(err) + user_error_msg = f"Unexpected error [{error_code}]" _logger.exception( - "Process with %s failed unexpectedly [%s]", - f"{command=!r}", - f"{error_code}", - extra={"error_code": error_code}, + **create_troubleshotting_log_kwargs( + user_error_msg, + error=err, + error_context={"command": command, "proc.returncode": proc.returncode}, + error_code=error_code, + tip="Process with command failed unexpectily", + ) ) return CommandResult( success=False, - message=f"Unexpected error [{error_code}]", + message=user_error_msg, command=f"{command}", elapsed=time.time() - start, ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_ports.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_ports.py index ae48f19a973f..6a8c45e35daf 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_ports.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/notifications/_notifications_ports.py @@ -52,7 +52,7 @@ async def send_output_port_upload_finished_with_error( self, port_key: ServicePortKey ) -> None: await self._send_output_port_status( - port_key, OutputStatus.UPLOAD_FINISHED_WITH_ERRROR + port_key, OutputStatus.UPLOAD_FINISHED_WITH_ERROR ) async def send_input_port_download_started(self, port_key: ServicePortKey) -> None: @@ -74,5 +74,5 @@ async def send_input_port_download_finished_with_error( self, port_key: ServicePortKey ) -> None: await self._send_input_port_status( - port_key, InputStatus.DOWNLOAD_FINISHED_WITH_ERRROR + port_key, InputStatus.DOWNLOAD_FINISHED_WITH_ERROR ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_core.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_core.py index 031b42ff324a..6da3aa3f00ce 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_core.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_core.py @@ -1,9 +1,11 @@ +import asyncio import logging from typing import Final from fastapi import FastAPI from models_library.generated_models.docker_rest_api import ContainerState from models_library.rabbitmq_messages import ( + DynamicServiceRunningMessage, RabbitResourceTrackingHeartbeatMessage, RabbitResourceTrackingStartedMessage, RabbitResourceTrackingStoppedMessage, @@ -19,7 +21,10 @@ are_all_containers_in_expected_states, get_container_states, ) -from ...core.rabbitmq import post_resource_tracking_message +from ...core.rabbitmq import ( + post_dynamic_service_running_message, + post_resource_tracking_message, +) from ...core.settings import ApplicationSettings, ResourceTrackingSettings from ...models.shared_store import SharedStore from ._models import ResourceTrackingState @@ -70,10 +75,21 @@ async def _heart_beat_task(app: FastAPI): ) if are_all_containers_in_expected_states(container_states.values()): - message = RabbitResourceTrackingHeartbeatMessage( + rut_message = RabbitResourceTrackingHeartbeatMessage( service_run_id=settings.DY_SIDECAR_RUN_ID ) - await post_resource_tracking_message(app, message) + dyn_message = DynamicServiceRunningMessage( + project_id=settings.DY_SIDECAR_PROJECT_ID, + node_id=settings.DY_SIDECAR_NODE_ID, + user_id=settings.DY_SIDECAR_USER_ID, + product_name=settings.DY_SIDECAR_PRODUCT_NAME, + ) + await asyncio.gather( + *[ + post_resource_tracking_message(app, rut_message), + post_dynamic_service_running_message(app, dyn_message), + ] + ) else: _logger.info( "heart beat message skipped: container_states=%s", container_states diff --git a/services/dynamic-sidecar/tests/unit/test_modules_notifier.py b/services/dynamic-sidecar/tests/unit/test_modules_notifier.py index d9f82f40d585..13533637e30e 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_notifier.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_notifier.py @@ -289,7 +289,7 @@ async def test_notifier_send_input_port_status( await port_notifier.send_input_port_download_finished_succesfully( port_key ) - case InputStatus.DOWNLOAD_FINISHED_WITH_ERRROR: + case InputStatus.DOWNLOAD_FINISHED_WITH_ERROR: await port_notifier.send_input_port_download_finished_with_error( port_key ) @@ -378,7 +378,7 @@ async def test_notifier_send_output_port_status( await port_notifier.send_output_port_upload_finished_successfully( port_key ) - case OutputStatus.UPLOAD_FINISHED_WITH_ERRROR: + case OutputStatus.UPLOAD_FINISHED_WITH_ERROR: await port_notifier.send_output_port_upload_finished_with_error( port_key ) diff --git a/services/efs-guardian/Dockerfile b/services/efs-guardian/Dockerfile index 5d470ee4af40..229e43563c44 100644 --- a/services/efs-guardian/Dockerfile +++ b/services/efs-guardian/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # USAGE: @@ -96,7 +96,7 @@ EXPOSE 3000 # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -138,7 +138,7 @@ RUN \ # + /build # + services/efs-guardian [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -160,7 +160,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/efs-guardian [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -196,7 +196,7 @@ CMD ["/bin/sh", "services/efs-guardian/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development \ SC_DEVEL_MOUNT=/devel/services/efs-guardian diff --git a/services/efs-guardian/requirements/_tools.txt b/services/efs-guardian/requirements/_tools.txt index 97a49efc2ebd..dec3b9c204df 100644 --- a/services/efs-guardian/requirements/_tools.txt +++ b/services/efs-guardian/requirements/_tools.txt @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via diff --git a/services/efs-guardian/requirements/ci.txt b/services/efs-guardian/requirements/ci.txt index 850a17f5f004..cbc9fd29af20 100644 --- a/services/efs-guardian/requirements/ci.txt +++ b/services/efs-guardian/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-aws-library @ ../../packages/aws-library diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py index ae439b01898d..b229fc1f376a 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py @@ -13,8 +13,11 @@ ) from ..api.rest.routes import setup_api_routes from ..api.rpc.routes import setup_rpc_routes +from ..services.background_tasks_setup import setup as setup_background_tasks from ..services.efs_manager_setup import setup as setup_efs_manager from ..services.modules.rabbitmq import setup as setup_rabbitmq +from ..services.modules.redis import setup as setup_redis +from ..services.process_messages_setup import setup as setup_process_messages from .settings import ApplicationSettings logger = logging.getLogger(__name__) @@ -40,11 +43,14 @@ def create_app(settings: ApplicationSettings) -> FastAPI: # PLUGINS SETUP setup_rabbitmq(app) + setup_redis(app) setup_api_routes(app) setup_rpc_routes(app) setup_efs_manager(app) + setup_background_tasks(app) + setup_process_messages(app) # EVENTS async def _on_startup() -> None: diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py index 0e9b766bbe8c..5bbee34598e8 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py @@ -8,10 +8,18 @@ LogLevel, VersionTag, ) -from pydantic import AliasChoices, Field, PositiveInt, field_validator +from pydantic import ( + AliasChoices, + ByteSize, + Field, + PositiveInt, + field_validator, + parse_obj_as, +) from settings_library.base import BaseCustomSettings from settings_library.efs import AwsEfsSettings from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from settings_library.tracing import TracingSettings from settings_library.utils_logging import MixinLoggingSettings @@ -57,6 +65,9 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): EFS_GROUP_NAME: str = Field( description="Linux group name that the EFS and Simcore linux users are part of" ) + EFS_DEFAULT_USER_SERVICE_SIZE_BYTES: ByteSize = Field( + default=parse_obj_as(ByteSize, "500GiB") + ) # RUNTIME ----------------------------------------------------------- EFS_GUARDIAN_DEBUG: bool = Field( @@ -77,13 +88,18 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) - - EFS_GUARDIAN_AWS_EFS_SETTINGS: AwsEfsSettings = Field(json_schema_extra={"auto_default_from_env": True}) - EFS_GUARDIAN_RABBITMQ: RabbitSettings = Field(json_schema_extra={"auto_default_from_env": True}) + EFS_GUARDIAN_AWS_EFS_SETTINGS: AwsEfsSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) + EFS_GUARDIAN_RABBITMQ: RabbitSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) EFS_GUARDIAN_TRACING: TracingSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, description="settings for opentelemetry tracing" + json_schema_extra={"auto_default_from_env": True}, + description="settings for opentelemetry tracing", + ) - EFS_GUARDIAN_AWS_EFS_SETTINGS: AwsEfsSettings = Field( + EFS_GUARDIAN_REDIS: RedisSettings = Field( json_schema_extra={"auto_default_from_env": True} ) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks.py new file mode 100644 index 000000000000..8edce477cc78 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks.py @@ -0,0 +1,18 @@ +import logging + +from fastapi import FastAPI + +from ..core.settings import ApplicationSettings + +_logger = logging.getLogger(__name__) + + +async def removal_policy_task(app: FastAPI) -> None: + _logger.info("FAKE Removal policy task started (not yet implemented)") + + # After X days of inactivity remove data from EFS + # Probably use `last_modified_data` in the project DB table + # Maybe lock project during this time lock_project() + + app_settings: ApplicationSettings = app.state.settings + assert app_settings # nosec diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks_setup.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks_setup.py new file mode 100644 index 000000000000..0946b82177b9 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks_setup.py @@ -0,0 +1,73 @@ +import asyncio +import logging +from collections.abc import Awaitable, Callable +from datetime import timedelta +from typing import TypedDict + +from fastapi import FastAPI +from servicelib.background_task import stop_periodic_task +from servicelib.logging_utils import log_catch, log_context +from servicelib.redis_utils import start_exclusive_periodic_task + +from .background_tasks import removal_policy_task +from .modules.redis import get_redis_lock_client + +_logger = logging.getLogger(__name__) + + +class EfsGuardianBackgroundTask(TypedDict): + name: str + task_func: Callable + + +_EFS_GUARDIAN_BACKGROUND_TASKS = [ + EfsGuardianBackgroundTask( + name="efs_removal_policy_task", task_func=removal_policy_task + ) +] + + +def _on_app_startup(app: FastAPI) -> Callable[[], Awaitable[None]]: + async def _startup() -> None: + with log_context( + _logger, logging.INFO, msg="Efs Guardian startup.." + ), log_catch(_logger, reraise=False): + app.state.efs_guardian_background_tasks = [] + + # Setup periodic tasks + for task in _EFS_GUARDIAN_BACKGROUND_TASKS: + exclusive_task = start_exclusive_periodic_task( + get_redis_lock_client(app), + task["task_func"], + task_period=timedelta(seconds=60), # 1 minute + retry_after=timedelta(seconds=300), # 5 minutes + task_name=task["name"], + app=app, + ) + app.state.efs_guardian_background_tasks.append(exclusive_task) + + return _startup + + +def _on_app_shutdown( + _app: FastAPI, +) -> Callable[[], Awaitable[None]]: + async def _stop() -> None: + with log_context( + _logger, logging.INFO, msg="Efs Guardian shutdown.." + ), log_catch(_logger, reraise=False): + assert _app # nosec + if _app.state.efs_guardian_background_tasks: + await asyncio.gather( + *[ + stop_periodic_task(task) + for task in _app.state.efs_guardian_background_tasks + ] + ) + + return _stop + + +def setup(app: FastAPI) -> None: + app.add_event_handler("startup", _on_app_startup(app)) + app.add_event_handler("shutdown", _on_app_shutdown(app)) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager.py index 401f38ed1b54..be0460b7e644 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager.py @@ -5,8 +5,10 @@ from fastapi import FastAPI from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID +from pydantic import ByteSize from ..core.settings import ApplicationSettings, get_application_settings +from . import efs_manager_utils @dataclass(frozen=True) @@ -52,3 +54,39 @@ async def create_project_specific_data_dir( _dir_path, 0o770 ) # This gives rwx permissions to user and group, and nothing to others return _dir_path + + async def check_project_node_data_directory_exits( + self, project_id: ProjectID, node_id: NodeID + ) -> bool: + _dir_path = ( + self._efs_mounted_path + / self._project_specific_data_base_directory + / f"{project_id}" + / f"{node_id}" + ) + + return _dir_path.exists() + + async def get_project_node_data_size( + self, project_id: ProjectID, node_id: NodeID + ) -> ByteSize: + _dir_path = ( + self._efs_mounted_path + / self._project_specific_data_base_directory + / f"{project_id}" + / f"{node_id}" + ) + + return await efs_manager_utils.get_size_bash_async(_dir_path) + + async def remove_project_node_data_write_permissions( + self, project_id: ProjectID, node_id: NodeID + ) -> None: + _dir_path = ( + self._efs_mounted_path + / self._project_specific_data_base_directory + / f"{project_id}" + / f"{node_id}" + ) + + await efs_manager_utils.remove_write_permissions_bash_async(_dir_path) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager_utils.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager_utils.py new file mode 100644 index 000000000000..9418fa733db8 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/efs_manager_utils.py @@ -0,0 +1,46 @@ +import asyncio +import logging + +from pydantic import ByteSize + +_logger = logging.getLogger(__name__) + + +async def get_size_bash_async(path) -> ByteSize: + # Create the subprocess + command = ["du", "-sb", path] + process = await asyncio.create_subprocess_exec( + *command, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + + # Wait for the subprocess to complete + stdout, stderr = await process.communicate() + + if process.returncode == 0: + # Parse the output + size = ByteSize(stdout.decode().split()[0]) + return size + msg = f"Command {' '.join(command)} failed with error code {process.returncode}: {stderr.decode()}" + _logger.error(msg) + raise RuntimeError(msg) + + +async def remove_write_permissions_bash_async(path) -> None: + # Create the subprocess + command = ["chmod", "-R", "a-w", path] + process = await asyncio.create_subprocess_exec( + *command, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + + # Wait for the subprocess to complete + _, stderr = await process.communicate() + + if process.returncode == 0: + return + msg = f"Command {' '.join(command)} failed with error code {process.returncode}: {stderr.decode()}" + _logger.error(msg) + raise RuntimeError(msg) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py new file mode 100644 index 000000000000..20cbcc0a4dbf --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py @@ -0,0 +1,29 @@ +import logging +from typing import cast + +from fastapi import FastAPI +from servicelib.redis import RedisClientSDK +from settings_library.redis import RedisDatabase, RedisSettings + +logger = logging.getLogger(__name__) + + +def setup(app: FastAPI) -> None: + async def on_startup() -> None: + app.state.redis_lock_client_sdk = None + settings: RedisSettings = app.state.settings.EFS_GUARDIAN_REDIS + redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS) + app.state.redis_lock_client_sdk = lock_client = RedisClientSDK(redis_locks_dsn) + await lock_client.setup() + + async def on_shutdown() -> None: + redis_lock_client_sdk: None | RedisClientSDK = app.state.redis_lock_client_sdk + if redis_lock_client_sdk: + await redis_lock_client_sdk.shutdown() + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) + + +def get_redis_lock_client(app: FastAPI) -> RedisClientSDK: + return cast(RedisClientSDK, app.state.redis_lock_client_sdk) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/process_messages.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/process_messages.py new file mode 100644 index 000000000000..11c7781bbaee --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/process_messages.py @@ -0,0 +1,66 @@ +import logging + +from fastapi import FastAPI +from models_library.rabbitmq_messages import DynamicServiceRunningMessage +from pydantic import parse_raw_as +from servicelib.logging_utils import log_context +from simcore_service_efs_guardian.services.modules.redis import get_redis_lock_client + +from ..core.settings import get_application_settings +from ..services.efs_manager import EfsManager + +_logger = logging.getLogger(__name__) + + +async def process_dynamic_service_running_message(app: FastAPI, data: bytes) -> bool: + assert app # nosec + rabbit_message: DynamicServiceRunningMessage = parse_raw_as( + DynamicServiceRunningMessage, data + ) + _logger.debug( + "Process dynamic service running msg, project ID: %s node ID: %s, current user: %s", + rabbit_message.project_id, + rabbit_message.node_id, + rabbit_message.user_id, + ) + + settings = get_application_settings(app) + efs_manager: EfsManager = app.state.efs_manager + + dir_exists = await efs_manager.check_project_node_data_directory_exits( + rabbit_message.project_id, node_id=rabbit_message.node_id + ) + if dir_exists is False: + _logger.debug( + "Directory doesn't exists in EFS, project ID: %s node ID: %s, current user: %s", + rabbit_message.project_id, + rabbit_message.node_id, + rabbit_message.user_id, + ) + return True + + size = await efs_manager.get_project_node_data_size( + rabbit_message.project_id, node_id=rabbit_message.node_id + ) + _logger.debug( + "Current directory size: %s, project ID: %s node ID: %s, current user: %s", + size, + rabbit_message.project_id, + rabbit_message.node_id, + rabbit_message.user_id, + ) + + if size > settings.EFS_DEFAULT_USER_SERVICE_SIZE_BYTES: + msg = f"Removing write permissions inside of EFS starts for project ID: {rabbit_message.project_id}, node ID: {rabbit_message.node_id}, current user: {rabbit_message.user_id}, size: {size}, upper limit: {settings.EFS_DEFAULT_USER_SERVICE_SIZE_BYTES}" + with log_context(_logger, logging.WARNING, msg=msg): + redis = get_redis_lock_client(app) + async with redis.lock_context( + f"efs_remove_write_permissions-{rabbit_message.project_id=}-{rabbit_message.node_id=}", + blocking=True, + blocking_timeout_s=10, + ): + await efs_manager.remove_project_node_data_write_permissions( + project_id=rabbit_message.project_id, node_id=rabbit_message.node_id + ) + + return True diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/process_messages_setup.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/process_messages_setup.py new file mode 100644 index 000000000000..d879d1891577 --- /dev/null +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/process_messages_setup.py @@ -0,0 +1,66 @@ +import functools +import logging +from collections.abc import Awaitable, Callable + +from fastapi import FastAPI +from models_library.rabbitmq_messages import DynamicServiceRunningMessage +from servicelib.logging_utils import log_catch, log_context +from servicelib.rabbitmq import RabbitMQClient +from settings_library.rabbit import RabbitSettings + +from ..core.settings import ApplicationSettings +from .modules.rabbitmq import get_rabbitmq_client +from .process_messages import process_dynamic_service_running_message + +_logger = logging.getLogger(__name__) + + +_SEC = 1000 # in ms +_MIN = 60 * _SEC # in ms +_HOUR = 60 * _MIN # in ms + +_EFS_MESSAGE_TTL_IN_MS = 2 * _HOUR + + +async def _subscribe_to_rabbitmq(app) -> str: + with log_context(_logger, logging.INFO, msg="Subscribing to rabbitmq channel"): + rabbit_client: RabbitMQClient = get_rabbitmq_client(app) + subscribed_queue: str = await rabbit_client.subscribe( + DynamicServiceRunningMessage.get_channel_name(), + message_handler=functools.partial( + process_dynamic_service_running_message, app + ), + exclusive_queue=False, + message_ttl=_EFS_MESSAGE_TTL_IN_MS, + ) + return subscribed_queue + + +def _on_app_startup(app: FastAPI) -> Callable[[], Awaitable[None]]: + async def _startup() -> None: + with log_context( + _logger, logging.INFO, msg="setup resource tracker" + ), log_catch(_logger, reraise=False): + app_settings: ApplicationSettings = app.state.settings + app.state.efs_guardian_rabbitmq_consumer = None + settings: RabbitSettings | None = app_settings.EFS_GUARDIAN_RABBITMQ + if not settings: + _logger.warning("RabbitMQ client is de-activated in the settings") + return + app.state.efs_guardian_rabbitmq_consumer = await _subscribe_to_rabbitmq(app) + + return _startup + + +def _on_app_shutdown( + _app: FastAPI, +) -> Callable[[], Awaitable[None]]: + async def _stop() -> None: + assert _app # nosec + + return _stop + + +def setup(app: FastAPI) -> None: + app.add_event_handler("startup", _on_app_startup(app)) + app.add_event_handler("shutdown", _on_app_shutdown(app)) diff --git a/services/efs-guardian/tests/unit/conftest.py b/services/efs-guardian/tests/unit/conftest.py index 62e4352e1ccb..da4196ea859e 100644 --- a/services/efs-guardian/tests/unit/conftest.py +++ b/services/efs-guardian/tests/unit/conftest.py @@ -2,7 +2,10 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name +import os import re +import shutil +import stat from collections.abc import AsyncIterator, Callable from pathlib import Path from typing import Awaitable @@ -12,10 +15,13 @@ import simcore_service_efs_guardian import yaml from asgi_lifespan import LifespanManager +from fakeredis.aioredis import FakeRedis from fastapi import FastAPI from httpx import ASGITransport +from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.rabbitmq import RabbitMQRPCClient +from settings_library.efs import AwsEfsSettings from settings_library.rabbit import RabbitSettings from simcore_service_efs_guardian.core.application import create_app from simcore_service_efs_guardian.core.settings import ApplicationSettings @@ -26,9 +32,11 @@ "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", + "pytest_simcore.faker_projects_data", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.rabbit_service", + "pytest_simcore.redis_service", "pytest_simcore.repository_paths", "pytest_simcore.aws_s3_service", "pytest_simcore.aws_server", @@ -139,3 +147,28 @@ async def rpc_client( rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], ) -> RabbitMQRPCClient: return await rabbitmq_rpc_client("client") + + +@pytest.fixture +async def mocked_redis_server(mocker: MockerFixture) -> None: + mock_redis = FakeRedis() + mocker.patch("redis.asyncio.from_url", return_value=mock_redis) + + +@pytest.fixture +async def cleanup(app: FastAPI): + + yield + + aws_efs_settings: AwsEfsSettings = app.state.settings.EFS_GUARDIAN_AWS_EFS_SETTINGS + _dir_path = Path(aws_efs_settings.EFS_MOUNTED_PATH) + if _dir_path.exists(): + for root, dirs, files in os.walk(_dir_path): + for name in dirs + files: + file_path = Path(root, name) + # Get the current permissions of the file or directory + current_permissions = Path.stat(file_path).st_mode + # Add write permission for the owner (user) + Path.chmod(file_path, current_permissions | stat.S_IWUSR) + + shutil.rmtree(_dir_path) diff --git a/services/efs-guardian/tests/unit/test_api_health.py b/services/efs-guardian/tests/unit/test_api_health.py index 621543e2d80f..8b42d559e7f7 100644 --- a/services/efs-guardian/tests/unit/test_api_health.py +++ b/services/efs-guardian/tests/unit/test_api_health.py @@ -28,7 +28,11 @@ def app_environment( ) -async def test_healthcheck(rabbit_service: RabbitSettings, client: httpx.AsyncClient): +async def test_healthcheck( + rabbit_service: RabbitSettings, + mocked_redis_server: None, + client: httpx.AsyncClient, +): response = await client.get("/") response.raise_for_status() assert response.status_code == status.HTTP_200_OK diff --git a/services/efs-guardian/tests/unit/test_efs_guardian_rpc.py b/services/efs-guardian/tests/unit/test_efs_guardian_rpc.py new file mode 100644 index 000000000000..48474a69d454 --- /dev/null +++ b/services/efs-guardian/tests/unit/test_efs_guardian_rpc.py @@ -0,0 +1,73 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +from pathlib import Path +from unittest.mock import patch + +import pytest +from faker import Faker +from fastapi import FastAPI +from models_library.projects import ProjectID +from models_library.projects_nodes import NodeID +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from servicelib.rabbitmq import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.efs_guardian import efs_manager +from simcore_service_efs_guardian.core.settings import AwsEfsSettings + +pytest_simcore_core_services_selection = ["rabbit"] +pytest_simcore_ops_services_selection = [] + + +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + app_environment: EnvVarsDict, + rabbit_env_vars_dict: EnvVarsDict, # rabbitMQ settings from 'rabbit' service +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **app_environment, + **rabbit_env_vars_dict, + }, + ) + + +async def test_rpc_create_project_specific_data_dir( + mocked_redis_server: None, + rpc_client: RabbitMQRPCClient, + faker: Faker, + app: FastAPI, + project_id: ProjectID, + node_id: NodeID, + cleanup: None, +): + aws_efs_settings: AwsEfsSettings = app.state.settings.EFS_GUARDIAN_AWS_EFS_SETTINGS + + _storage_directory_name = faker.word() + + with patch( + "simcore_service_efs_guardian.services.efs_manager.os.chown" + ) as mocked_chown: + result = await efs_manager.create_project_specific_data_dir( + rpc_client, + project_id=project_id, + node_id=node_id, + storage_directory_name=_storage_directory_name, + ) + mocked_chown.assert_called_once() + + assert isinstance(result, Path) + _expected_path = ( + aws_efs_settings.EFS_MOUNTED_PATH + / aws_efs_settings.EFS_PROJECT_SPECIFIC_DATA_DIRECTORY + / f"{project_id}" + / f"{node_id}" + / _storage_directory_name + ) + assert _expected_path == result + assert _expected_path.exists diff --git a/services/efs-guardian/tests/unit/test_efs_manager.py b/services/efs-guardian/tests/unit/test_efs_manager.py index 42e22c9386d8..5c5c57cf3aba 100644 --- a/services/efs-guardian/tests/unit/test_efs_manager.py +++ b/services/efs-guardian/tests/unit/test_efs_manager.py @@ -4,6 +4,7 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable +import stat from pathlib import Path from unittest.mock import patch @@ -12,9 +13,12 @@ from fastapi import FastAPI from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from servicelib.rabbitmq import RabbitMQRPCClient -from servicelib.rabbitmq.rpc_interfaces.efs_guardian import efs_manager from simcore_service_efs_guardian.core.settings import AwsEfsSettings +from simcore_service_efs_guardian.services.efs_manager import ( + EfsManager, + NodeID, + ProjectID, +) pytest_simcore_core_services_selection = ["rabbit"] pytest_simcore_ops_services_selection = [] @@ -24,7 +28,7 @@ def app_environment( monkeypatch: pytest.MonkeyPatch, app_environment: EnvVarsDict, - rabbit_env_vars_dict: EnvVarsDict, # rabbitMQ settings from 'rabbit' service + rabbit_env_vars_dict: EnvVarsDict, ) -> EnvVarsDict: return setenvs_from_dict( monkeypatch, @@ -35,35 +39,106 @@ def app_environment( ) -async def test_rpc_create_project_specific_data_dir( - rpc_client: RabbitMQRPCClient, +def assert_permissions( + file_path: Path, + expected_readable: bool, + expected_writable: bool, + expected_executable: bool, +): + file_stat = Path.stat(file_path) + file_permissions = file_stat.st_mode + is_readable = bool(file_permissions & stat.S_IRUSR) + is_writable = bool(file_permissions & stat.S_IWUSR) + is_executable = bool(file_permissions & stat.S_IXUSR) + + assert ( + is_readable == expected_readable + ), f"Expected readable={expected_readable}, but got readable={is_readable} for {file_path}" + assert ( + is_writable == expected_writable + ), f"Expected writable={expected_writable}, but got writable={is_writable} for {file_path}" + assert ( + is_executable == expected_executable + ), f"Expected executable={expected_executable}, but got executable={is_executable} for {file_path}" + + +async def test_remove_write_access_rights( faker: Faker, + mocked_redis_server: None, app: FastAPI, + cleanup: None, + project_id: ProjectID, + node_id: NodeID, ): aws_efs_settings: AwsEfsSettings = app.state.settings.EFS_GUARDIAN_AWS_EFS_SETTINGS - _project_id = faker.uuid4() - _node_id = faker.uuid4() - _storage_directory_name = faker.name() + _storage_directory_name = faker.word() + _dir_path = ( + aws_efs_settings.EFS_MOUNTED_PATH + / aws_efs_settings.EFS_PROJECT_SPECIFIC_DATA_DIRECTORY + / f"{project_id}" + / f"{node_id}" + / f"{_storage_directory_name}" + ) + + efs_manager: EfsManager = app.state.efs_manager + + assert ( + await efs_manager.check_project_node_data_directory_exits( + project_id=project_id, node_id=node_id + ) + is False + ) with patch( "simcore_service_efs_guardian.services.efs_manager.os.chown" ) as mocked_chown: - result = await efs_manager.create_project_specific_data_dir( - rpc_client, - project_id=_project_id, - node_id=_node_id, + await efs_manager.create_project_specific_data_dir( + project_id=project_id, + node_id=node_id, storage_directory_name=_storage_directory_name, ) - mocked_chown.assert_called_once() + assert mocked_chown.called - assert isinstance(result, Path) - _expected_path = ( - aws_efs_settings.EFS_MOUNTED_PATH - / aws_efs_settings.EFS_PROJECT_SPECIFIC_DATA_DIRECTORY - / _project_id - / _node_id - / _storage_directory_name + assert ( + await efs_manager.check_project_node_data_directory_exits( + project_id=project_id, node_id=node_id + ) + is True + ) + + size_before = await efs_manager.get_project_node_data_size( + project_id=project_id, node_id=node_id + ) + + file_paths = [] + for i in range(3): # Let's create 3 small files for testing + file_path = Path(_dir_path, f"test_file_{i}.txt") + file_path.write_text(f"This is file {i}") + file_paths.append(file_path) + + size_after = await efs_manager.get_project_node_data_size( + project_id=project_id, node_id=node_id ) - assert _expected_path == result - assert _expected_path.exists + assert size_after > size_before + + # Now we will check removal of write permissions + for file_path in file_paths: + assert_permissions( + file_path, + expected_readable=True, + expected_writable=True, + expected_executable=False, + ) + + await efs_manager.remove_project_node_data_write_permissions( + project_id=project_id, node_id=node_id + ) + + for file_path in file_paths: + assert_permissions( + file_path, + expected_readable=True, + expected_writable=False, + expected_executable=False, + ) diff --git a/services/invitations/Dockerfile b/services/invitations/Dockerfile index b7635329edc8..5d935426c484 100644 --- a/services/invitations/Dockerfile +++ b/services/invitations/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # USAGE: @@ -58,7 +58,7 @@ EXPOSE 8000 # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -101,7 +101,7 @@ RUN \ # + /build # + services/invitations [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -123,7 +123,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/invitations [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -160,7 +160,7 @@ CMD ["/bin/sh", "services/invitations/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development \ SC_DEVEL_MOUNT=/devel/services/invitations diff --git a/services/invitations/requirements/_tools.txt b/services/invitations/requirements/_tools.txt index d6bba29eee2d..1e4c33279eb2 100644 --- a/services/invitations/requirements/_tools.txt +++ b/services/invitations/requirements/_tools.txt @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via diff --git a/services/invitations/requirements/ci.txt b/services/invitations/requirements/ci.txt index 4051e96b178b..bae114603766 100644 --- a/services/invitations/requirements/ci.txt +++ b/services/invitations/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../../packages/common-library diff --git a/services/migration/Dockerfile b/services/migration/Dockerfile index 9cd1988399bb..d70eec97f93a 100644 --- a/services/migration/Dockerfile +++ b/services/migration/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base LABEL maintainer=sanderegg @@ -33,7 +33,7 @@ ENV PATH="${VIRTUAL_ENV}/bin:$PATH" # -------------------------------------------- -FROM base as build +FROM base AS build RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ --mount=type=cache,target=/var/lib/apt,mode=0755,sharing=private \ @@ -68,7 +68,7 @@ RUN \ # -------------------------------------------- -FROM base as production +FROM base AS production ENV PYTHONOPTIMIZE=TRUE diff --git a/services/migration/requirements/_test.txt b/services/migration/requirements/_test.txt index 0c989c238a4e..2c859c65104c 100644 --- a/services/migration/requirements/_test.txt +++ b/services/migration/requirements/_test.txt @@ -23,7 +23,7 @@ jsonschema==4.23.0 # via -r requirements/_test.in jsonschema-specifications==2023.12.1 # via jsonschema -mypy==1.11.2 +mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy diff --git a/services/migration/requirements/_tools.txt b/services/migration/requirements/_tools.txt index e775221e68b0..b74fa74ec508 100644 --- a/services/migration/requirements/_tools.txt +++ b/services/migration/requirements/_tools.txt @@ -26,7 +26,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt diff --git a/services/migration/requirements/ci.txt b/services/migration/requirements/ci.txt index 13e87ee7ed2d..f27407610d3f 100644 --- a/services/migration/requirements/ci.txt +++ b/services/migration/requirements/ci.txt @@ -10,6 +10,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages pytest-simcore @ ../../packages/pytest-simcore/ diff --git a/services/osparc-gateway-server/Dockerfile b/services/osparc-gateway-server/Dockerfile index 150063ce0fc1..59f55c8b3b6f 100644 --- a/services/osparc-gateway-server/Dockerfile +++ b/services/osparc-gateway-server/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bullseye as base +FROM python:${PYTHON_VERSION}-slim-bullseye AS base ARG TARGETPLATFORM ARG BUILDPLATFORM RUN echo "I am running on $BUILDPLATFORM, building for $TARGETPLATFORM" @@ -58,7 +58,7 @@ EXPOSE 8000 # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -108,7 +108,7 @@ RUN dpkgArch="$(dpkg --print-architecture)";\ # + /build # + services/osparc-gateway-server [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps @@ -128,7 +128,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/osparc-gateway-server [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -164,7 +164,7 @@ CMD ["/bin/sh", "services/osparc-gateway-server/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development diff --git a/services/osparc-gateway-server/requirements/_test.txt b/services/osparc-gateway-server/requirements/_test.txt index 908dca5582ba..797b272793e4 100644 --- a/services/osparc-gateway-server/requirements/_test.txt +++ b/services/osparc-gateway-server/requirements/_test.txt @@ -98,7 +98,7 @@ multidict==6.0.5 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.2 +mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy diff --git a/services/osparc-gateway-server/requirements/_tools.txt b/services/osparc-gateway-server/requirements/_tools.txt index 985945c7b0ef..4366080afe11 100644 --- a/services/osparc-gateway-server/requirements/_tools.txt +++ b/services/osparc-gateway-server/requirements/_tools.txt @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt diff --git a/services/osparc-gateway-server/requirements/ci.txt b/services/osparc-gateway-server/requirements/ci.txt index 52012f537ff8..e30762175d14 100644 --- a/services/osparc-gateway-server/requirements/ci.txt +++ b/services/osparc-gateway-server/requirements/ci.txt @@ -10,6 +10,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages pytest-simcore @ ../../packages/pytest-simcore/ diff --git a/services/osparc-gateway-server/tests/system/requirements/_tools.txt b/services/osparc-gateway-server/tests/system/requirements/_tools.txt index ce5d53160d5b..56217c590ee5 100644 --- a/services/osparc-gateway-server/tests/system/requirements/_tools.txt +++ b/services/osparc-gateway-server/tests/system/requirements/_tools.txt @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via -r requirements/../../../../../requirements/devenv.txt mypy-extensions==1.0.0 # via diff --git a/services/osparc-gateway-server/tests/system/requirements/ci.txt b/services/osparc-gateway-server/tests/system/requirements/ci.txt index e7dffa997f3d..684ed6c7887f 100644 --- a/services/osparc-gateway-server/tests/system/requirements/ci.txt +++ b/services/osparc-gateway-server/tests/system/requirements/ci.txt @@ -8,6 +8,7 @@ # installs base + tests requirements --requirement _test.txt +--requirement _tools.txt # installs this repo's packages pytest-simcore @ ../../../../packages/pytest-simcore/ diff --git a/services/payments/Dockerfile b/services/payments/Dockerfile index 692541c29d03..90cbb1c908e6 100644 --- a/services/payments/Dockerfile +++ b/services/payments/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # USAGE: @@ -58,7 +58,7 @@ EXPOSE 8000 # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -101,7 +101,7 @@ RUN \ # + /build # + services/payments [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -123,7 +123,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/payments [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -160,7 +160,7 @@ CMD ["/bin/sh", "services/payments/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development \ SC_DEVEL_MOUNT=/devel/services/payments diff --git a/services/payments/requirements/_test.txt b/services/payments/requirements/_test.txt index 4a42c0d9ccf8..8a2b7c3d174d 100644 --- a/services/payments/requirements/_test.txt +++ b/services/payments/requirements/_test.txt @@ -85,7 +85,7 @@ multidict==6.0.5 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.2 +mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy diff --git a/services/payments/requirements/_tools.txt b/services/payments/requirements/_tools.txt index 5ac982ce79cc..dffd5038b5ca 100644 --- a/services/payments/requirements/_tools.txt +++ b/services/payments/requirements/_tools.txt @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt diff --git a/services/payments/requirements/ci.txt b/services/payments/requirements/ci.txt index 47e283fa955f..562c7eb6d849 100644 --- a/services/payments/requirements/ci.txt +++ b/services/payments/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-common-library @ ../../packages/common-library diff --git a/services/payments/src/simcore_service_payments/services/notifier_email.py b/services/payments/src/simcore_service_payments/services/notifier_email.py index 8eeb09dc7269..29a423837df8 100644 --- a/services/payments/src/simcore_service_payments/services/notifier_email.py +++ b/services/payments/src/simcore_service_payments/services/notifier_email.py @@ -5,8 +5,6 @@ from contextlib import asynccontextmanager from email.headerregistry import Address from email.message import EmailMessage -from email.mime.application import MIMEApplication -from pathlib import Path from typing import Final import httpx @@ -17,6 +15,7 @@ from models_library.products import ProductName from models_library.users import UserID from pydantic import EmailStr +from servicelib.logging_errors import create_troubleshotting_log_kwargs from settings_library.email import EmailProtocol, SMTPSettings from tenacity import ( retry, @@ -134,9 +133,6 @@ class _PaymentData: invoice_pdf_url: str -invoice_file_name_pattern = re.compile(r'filename="(?P[^"]+)"') - - def retry_if_status_code(response): return response.status_code in ( 429, @@ -157,15 +153,37 @@ def retry_if_status_code(response): retry=exception_retry_condition | result_retry_condition, wait=wait_exponential(multiplier=1, min=4, max=10), stop=stop_after_attempt(5), - retry_error_callback=lambda _: None, # Return None if all retries fail + reraise=True, ) -async def _get_invoice_pdf(invoice_pdf: str) -> httpx.Response | None: +async def _get_invoice_pdf(invoice_pdf: str) -> httpx.Response: async with httpx.AsyncClient(follow_redirects=True) as client: _response = await client.get(invoice_pdf) _response.raise_for_status() return _response +_INVOICE_FILE_NAME_PATTERN: Final = re.compile(r'filename="(?P[^"]+)"') + + +def _extract_file_name(response: httpx.Response) -> str: + match = _INVOICE_FILE_NAME_PATTERN.search(response.headers["content-disposition"]) + if not match: + error_msg = f"Cannot file pdf invoice {response.request.url}" + raise RuntimeError(error_msg) + + file_name: str = match.group("filename") + return file_name + + +def _guess_file_type(filename: str) -> tuple[str, str]: + mimetype, _encoding = mimetypes.guess_type(filename) + if mimetype: + maintype, subtype = mimetype.split("/", maxsplit=1) + else: + maintype, subtype = "application", "octet-stream" + return maintype, subtype + + async def _create_user_email( env: Environment, user: _UserData, @@ -179,64 +197,61 @@ async def _create_user_email( "payment": payment, } - msg = EmailMessage() + email_msg = EmailMessage() - msg["From"] = Address( + email_msg["From"] = Address( display_name=f"{product.display_name} support", addr_spec=product.support_email, ) - msg["To"] = Address( + email_msg["To"] = Address( display_name=f"{user.first_name} {user.last_name}", addr_spec=user.email, ) - msg["Subject"] = env.get_template("notify_payments-subject.txt").render(data) + email_msg["Subject"] = env.get_template("notify_payments-subject.txt").render(data) if product.bcc_email: - msg["Bcc"] = product.bcc_email + email_msg["Bcc"] = product.bcc_email # Body text_template = env.get_template("notify_payments.txt") - msg.set_content(text_template.render(data)) + email_msg.set_content(text_template.render(data)) html_template = env.get_template("notify_payments.html") - msg.add_alternative(html_template.render(data), subtype="html") - - # Invoice attachment (It is important that attachment is added after body) - if pdf_response := await _get_invoice_pdf(payment.invoice_pdf_url): - match = invoice_file_name_pattern.search( - pdf_response.headers["content-disposition"] - ) - if match: - _file_name = match.group("filename") + email_msg.add_alternative(html_template.render(data), subtype="html") - attachment = MIMEApplication(pdf_response.content, Name=_file_name) - attachment["Content-Disposition"] = f"attachment; filename={_file_name}" - msg.attach(attachment) - else: - _logger.error("No match find for email attachment. This should not happen.") + if payment.invoice_pdf_url: + try: + # Invoice attachment (It is important that attachment is added after body) + pdf_response = await _get_invoice_pdf(payment.invoice_pdf_url) - return msg + # file + file_name = _extract_file_name(pdf_response) + main_type, sub_type = _guess_file_type(file_name) + pdf_data = pdf_response.content -def _guess_file_type(file_path: Path) -> tuple[str, str]: - assert file_path.is_file() - mimetype, _encoding = mimetypes.guess_type(file_path) - if mimetype: - maintype, subtype = mimetype.split("/", maxsplit=1) - else: - maintype, subtype = "application", "octet-stream" - return maintype, subtype + email_msg.add_attachment( + pdf_data, + filename=file_name, + maintype=main_type, + subtype=sub_type, + ) + except Exception as exc: # pylint: disable=broad-exception-caught + _logger.exception( + **create_troubleshotting_log_kwargs( + "Cannot attach invoice to payment. Email sent w/o attached pdf invoice", + error=exc, + error_context={ + "user": user, + "payment": payment, + "product": product, + }, + tip=f"Check downloading: `wget -v {payment.invoice_pdf_url}`", + ) + ) -def _add_attachments(msg: EmailMessage, file_paths: list[Path]): - for attachment_path in file_paths: - maintype, subtype = _guess_file_type(attachment_path) - msg.add_attachment( - attachment_path.read_bytes(), - filename=attachment_path.name, - maintype=maintype, - subtype=subtype, - ) + return email_msg @asynccontextmanager diff --git a/services/payments/tests/unit/test_services_notifier_email.py b/services/payments/tests/unit/test_services_notifier_email.py index d078d902b1ba..c554c7a2c281 100644 --- a/services/payments/tests/unit/test_services_notifier_email.py +++ b/services/payments/tests/unit/test_services_notifier_email.py @@ -3,13 +3,15 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments -from pathlib import Path from types import SimpleNamespace from typing import Any from unittest.mock import AsyncMock, MagicMock +import httpx import pytest +import respx from faker import Faker +from fastapi import status from jinja2 import DictLoader, Environment, select_autoescape from models_library.products import ProductName from models_library.users import UserID @@ -24,7 +26,6 @@ from simcore_service_payments.services.notifier_email import ( _PRODUCT_NOTIFICATIONS_TEMPLATES, EmailProvider, - _add_attachments, _create_email_session, _create_user_email, _PaymentData, @@ -60,13 +61,34 @@ def smtp_mock_or_none( return None -@pytest.fixture -def mock_get_invoice(mocker: MockerFixture) -> MagicMock: - _mock_get_invoice = mocker.patch( - "simcore_service_payments.services.notifier_email._get_invoice_pdf" - ) - _mock_get_invoice.return_value = None - return _mock_get_invoice +@pytest.fixture(params=["ok", "ko"]) +def mocked_get_invoice_pdf_response( + request: pytest.FixtureRequest, + respx_mock: respx.MockRouter, + transaction: PaymentsTransactionsDB, +) -> respx.MockRouter: + if request.param == "ok": + file_name = "test-attachment.pdf" + file_content = b"%PDF-1.4 ... (file content here) ..." + + response = httpx.Response( + status.HTTP_200_OK, + content=file_content, + headers={ + "Content-Type": "application/pdf", + "Content-Disposition": f'attachment; filename="{file_name}"', + }, + ) + else: + assert request.param == "ko" + response = httpx.Response( + status.HTTP_404_NOT_FOUND, + text=f"{request.fixturename} is set to '{request.param}'", + ) + + respx_mock.get(transaction.invoice_pdf_url).mock(return_value=response) + + return respx_mock @pytest.fixture @@ -83,14 +105,13 @@ def transaction( async def test_send_email_workflow( app_environment: EnvVarsDict, - tmp_path: Path, faker: Faker, transaction: PaymentsTransactionsDB, user_email: EmailStr, product_name: ProductName, product: dict[str, Any], smtp_mock_or_none: MagicMock | None, - mock_get_invoice: MagicMock, + mocked_get_invoice_pdf_response: respx.MockRouter, ): """ Example of usage with external email and envfile @@ -128,10 +149,6 @@ async def test_send_email_workflow( msg = await _create_user_email(env, user_data, payment_data, product_data) - attachment = tmp_path / "test-attachment.txt" - attachment.write_text(faker.text()) - _add_attachments(msg, [attachment]) - async with _create_email_session(settings) as smtp: await smtp.send_message(msg) @@ -153,7 +170,7 @@ async def test_email_provider( product: dict[str, Any], transaction: PaymentsTransactionsDB, smtp_mock_or_none: MagicMock | None, - mock_get_invoice: MagicMock, + mocked_get_invoice_pdf_response: respx.MockRouter, ): settings = SMTPSettings.create_from_envs() @@ -178,7 +195,6 @@ async def test_email_provider( await provider.notify_payment_completed(user_id=user_id, payment=transaction) assert get_notification_data_mock.called - assert mock_get_invoice.called if smtp_mock_or_none: assert smtp_mock_or_none.called diff --git a/services/resource-usage-tracker/Dockerfile b/services/resource-usage-tracker/Dockerfile index 31001e4f7355..55976c00c538 100644 --- a/services/resource-usage-tracker/Dockerfile +++ b/services/resource-usage-tracker/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # USAGE: @@ -59,7 +59,7 @@ EXPOSE 3000 # # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET=build @@ -101,7 +101,7 @@ RUN \ # + /build # + services/resource-usage-tracker [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -123,7 +123,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + services/resource-usage-tracker [scu:scu] # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -159,7 +159,7 @@ CMD ["/bin/sh", "services/resource-usage-tracker/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development \ SC_DEVEL_MOUNT=/devel/services/resource-usage-tracker diff --git a/services/resource-usage-tracker/requirements/_test.txt b/services/resource-usage-tracker/requirements/_test.txt index d1c36a7f469f..4db08363ded8 100644 --- a/services/resource-usage-tracker/requirements/_test.txt +++ b/services/resource-usage-tracker/requirements/_test.txt @@ -157,7 +157,7 @@ moto==5.0.15 # via -r requirements/_test.in mpmath==1.3.0 # via sympy -mypy==1.11.2 +mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy diff --git a/services/resource-usage-tracker/requirements/_tools.txt b/services/resource-usage-tracker/requirements/_tools.txt index 44759acdfd41..6565ecfab1fd 100644 --- a/services/resource-usage-tracker/requirements/_tools.txt +++ b/services/resource-usage-tracker/requirements/_tools.txt @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt diff --git a/services/resource-usage-tracker/requirements/ci.txt b/services/resource-usage-tracker/requirements/ci.txt index 2f2df03f1d53..697ade6fa5e9 100644 --- a/services/resource-usage-tracker/requirements/ci.txt +++ b/services/resource-usage-tracker/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-aws-library @ ../../packages/aws-library diff --git a/services/static-webserver/client/compile.json b/services/static-webserver/client/compile.json index 780532e3cf01..7ede67de7479 100644 --- a/services/static-webserver/client/compile.json +++ b/services/static-webserver/client/compile.json @@ -56,7 +56,7 @@ "class": "osparc.Application", "theme": "osparc.theme.products.s4l.ThemeDark", "name": "s4llite", - "title": "Sim4Life Lite", + "title": "Sim4Life.lite", "include": [ "iconfont.material.Load", "iconfont.fontawesome5.Load", diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js index c2aefdde8771..d52f03e47841 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js @@ -93,15 +93,15 @@ qx.Class.define("osparc.auth.ui.LoginView", { createAccountBtn.setLabel(this.tr("Request Account")); } createAccountBtn.addListener("execute", () => { - createAccountBtn.setEnabled(false); - if (createAccountAction === "REGISTER") { + if (window.location.hostname === "tip.itis.swiss") { + this.__openTIPITISSWISSPhaseOutDialog(); + } else if (createAccountAction === "REGISTER") { this.fireEvent("toRegister"); } else if (createAccountAction === "REQUEST_ACCOUNT_FORM") { this.fireEvent("toRequestAccount"); } else if (createAccountAction === "REQUEST_ACCOUNT_INSTRUCTIONS") { osparc.store.Support.openInvitationRequiredDialog(); } - createAccountBtn.setEnabled(true); }, this); osparc.utils.Utils.setIdToWidget(createAccountBtn, "loginCreateAccountBtn"); @@ -162,6 +162,21 @@ qx.Class.define("osparc.auth.ui.LoginView", { } }, + __openTIPITISSWISSPhaseOutDialog: function() { + const createAccountWindow = new osparc.ui.window.Dialog("Request Account").set({ + maxWidth: 380 + }); + let message = "This version of the planning tool will be phased out soon and no longer accepts new users."; + message += "
"; + const tipLiteLabel = osparc.utils.Utils.createHTMLLink("TIP.lite", "https://tip-lite.science/"); + const tipLabel = osparc.utils.Utils.createHTMLLink("TIP", "https://tip.science/"); + const hereLabel = osparc.utils.Utils.createHTMLLink("here", "https://itis.swiss/tools-and-systems/ti-planning/overview/"); + message += `Please visit ${tipLiteLabel} or ${tipLabel} instead. See ${hereLabel} for more information.`; + createAccountWindow.setMessage(message); + createAccountWindow.center(); + createAccountWindow.open(); + }, + getEmail: function() { const email = this._form.getItems().email; return email.getValue(); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ContextBreadcrumbs.js b/services/static-webserver/client/source/class/osparc/dashboard/ContextBreadcrumbs.js index 49b1f812990c..f31ad07941cf 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ContextBreadcrumbs.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ContextBreadcrumbs.js @@ -52,6 +52,10 @@ qx.Class.define("osparc.dashboard.ContextBreadcrumbs", { __rebuild: function() { this._removeAll(); + if (this.getCurrentWorkspaceId() === -2) { + return; + } + if (this.getCurrentFolderId()) { const currentFolder = osparc.store.Folders.getInstance().getFolder(this.getCurrentFolderId()); this.__createUpstreamButtons(currentFolder); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js b/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js index effe47c25c4a..cc714440242c 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js @@ -97,7 +97,8 @@ qx.Class.define("osparc.dashboard.Dashboard", { const permissions = osparc.data.Permissions.getInstance(); const tabIconSize = 20; const tabs = [{ - id: "studiesTabBtn", + id: "studiesTab", + buttonId: "studiesTabBtn", label: osparc.product.Utils.getStudyAlias({ plural: true, allUpperCase: true @@ -107,7 +108,8 @@ qx.Class.define("osparc.dashboard.Dashboard", { }]; if (permissions.canDo("dashboard.templates.read")) { tabs.push({ - id: "templatesTabBtn", + id: "templatesTab", + buttonId: "templatesTabBtn", label: osparc.product.Utils.getTemplateAlias({ plural: true, allUpperCase: true @@ -118,7 +120,8 @@ qx.Class.define("osparc.dashboard.Dashboard", { } if (permissions.canDo("dashboard.services.read")) { tabs.push({ - id: "servicesTabBtn", + id: "servicesTab", + buttonId: "servicesTabBtn", label: this.tr("SERVICES"), icon: "@FontAwesome5Solid/cogs/"+tabIconSize, buildLayout: this.__createServiceBrowser @@ -126,19 +129,22 @@ qx.Class.define("osparc.dashboard.Dashboard", { } if (permissions.canDo("dashboard.data.read") && osparc.product.Utils.isProduct("osparc")) { tabs.push({ - id: "dataTabBtn", + id: "dataTab", + buttonId: "dataTabBtn", label: this.tr("DATA"), icon: "@FontAwesome5Solid/folder/"+tabIconSize, buildLayout: this.__createDataBrowser }); } - tabs.forEach(({id, label, icon, buildLayout}) => { + tabs.forEach(({id, buttonId, label, icon, buildLayout}) => { const tabPage = new qx.ui.tabview.Page(label, icon).set({ appearance: "dashboard-page" }); + tabPage.id = id; const tabButton = tabPage.getChildControl("button"); tabButton.set({ - minWidth: 50 + minWidth: 50, + maxHeight: 36, }); tabButton.ttt = label; tabButton.getChildControl("label").set({ @@ -148,7 +154,7 @@ qx.Class.define("osparc.dashboard.Dashboard", { visibility: "excluded" }); osparc.utils.Utils.centerTabIcon(tabPage); - osparc.utils.Utils.setIdToWidget(tabButton, id); + osparc.utils.Utils.setIdToWidget(tabButton, buttonId); tabPage.setLayout(new qx.ui.layout.Grow()); const viewLayout = buildLayout.call(this); @@ -157,6 +163,13 @@ qx.Class.define("osparc.dashboard.Dashboard", { viewLayout.resetSelection(); } }, this); + viewLayout.addListener("changeTab", e => { + const activeTab = e.getData(); + const tabFound = this.getSelectables().find(s => s.id === activeTab); + if (tabFound) { + this.setSelection([tabFound]); + } + }, this); const scrollerMainView = new qx.ui.container.Scroll(); scrollerMainView.add(viewLayout); tabPage.add(scrollerMainView); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonBase.js b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonBase.js index 821c9d3455c7..ff567a659cb8 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonBase.js @@ -112,12 +112,10 @@ qx.Class.define("osparc.dashboard.FolderButtonBase", { }, _shouldApplyFilter: function(data) { - console.log("_shouldApplyFilter", data); return false; }, _shouldReactToFilter: function(data) { - console.log("_shouldReactToFilter", data); return false; } }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonNew.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonNew.js index 2ae24d22dd8a..4a2a3577e316 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonNew.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonNew.js @@ -37,6 +37,12 @@ qx.Class.define("osparc.dashboard.GridButtonNew", { }); if (title) { + title = osparc.utils.Utils.replaceTokens( + title, + "replace_me_product_name", + osparc.store.StaticInfo.getInstance().getDisplayName() + ); + const titleLabel = this.getChildControl("title"); titleLabel.set({ value: title, @@ -45,6 +51,12 @@ qx.Class.define("osparc.dashboard.GridButtonNew", { } if (description) { + description = osparc.utils.Utils.replaceTokens( + description, + "replace_me_product_name", + osparc.store.StaticInfo.getInstance().getDisplayName() + ); + const descLabel = this.getChildControl("subtitle-text"); descLabel.setValue(description.toString()); } diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonNew.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonNew.js index b979981a5a29..d9bb0679f46e 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonNew.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonNew.js @@ -34,6 +34,12 @@ qx.Class.define("osparc.dashboard.ListButtonNew", { }); if (title) { + title = osparc.utils.Utils.replaceTokens( + title, + "replace_me_product_name", + osparc.store.StaticInfo.getInstance().getDisplayName() + ); + const titleLabel = this.getChildControl("title"); titleLabel.set({ value: title, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js index 4d1effc3e377..9082c3a5c2b2 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js @@ -42,15 +42,15 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { const mainLayoutWithSideSpacers = new qx.ui.container.Composite(new qx.ui.layout.HBox(spacing)) this._addToMainLayout(mainLayoutWithSideSpacers); - this.__leftFilters = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ + this.__leftFilters = new qx.ui.container.Composite(new qx.ui.layout.VBox(15)).set({ width: leftColumnWidth }); mainLayoutWithSideSpacers.add(this.__leftFilters); - this.__centerLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); + this.__centerLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(15)); mainLayoutWithSideSpacers.add(this.__centerLayout); - const rightColum = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); + const rightColum = new qx.ui.container.Composite(new qx.ui.layout.VBox()); mainLayoutWithSideSpacers.add(rightColum, { flex: 1 }); @@ -77,6 +77,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { }, events: { + "changeTab": "qx.event.type.Data", "publishTemplate": "qx.event.type.Data" }, @@ -222,7 +223,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { throw new Error("Abstract method called!"); }, - reloadResources: function() { + reloadMoreResources: function() { throw new Error("Abstract method called!"); }, @@ -236,6 +237,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { }); const textField = searchBarFilter.getChildControl("text-field"); osparc.utils.Utils.setIdToWidget(textField, "searchBarFilter-textField-"+this._resourceType); + this._addToLayout(searchBarFilter); }, @@ -356,6 +358,15 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { radioGroup.add(btn); }); + if (this._resourceType === "study") { + const viewMode = osparc.utils.Utils.localCache.getLocalStorageItem("studiesViewMode"); + if (viewMode) { + if (viewMode === "list") { + radioGroup.setSelection([listBtn]); + } + } + } + this._toolbar.add(viewModeLayout); }, @@ -421,7 +432,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { _moreResourcesRequired: function() { if (this._resourcesContainer && this._resourcesContainer.areMoreResourcesRequired(this._loadingResourcesBtn)) { - this.reloadResources(); + this.reloadMoreResources(); } }, @@ -446,6 +457,9 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { }, _startStudyById: function(studyId, openCB, cancelCB, isStudyCreation = false) { + if (isStudyCreation) { + this.fireDataEvent("changeTab", "studiesTab"); + } this.self().startStudyById(studyId, openCB, cancelCB, isStudyCreation); }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js index 2a0abefe5df1..f0d854b2222c 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js @@ -32,10 +32,6 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { this.__resourcesList = []; this.__groupedContainersList = []; - const containerHeader = this.__containerHeader = new osparc.dashboard.ContextBreadcrumbs(); - this._add(containerHeader); - containerHeader.setVisibility(osparc.utils.DisabledPlugins.isFoldersEnabled() ? "visible" : "excluded"); - const workspacesContainer = this.__workspacesContainer = new osparc.dashboard.ToggleButtonContainer(); workspacesContainer.setVisibility(osparc.utils.DisabledPlugins.isFoldersEnabled() ? "visible" : "excluded"); @@ -64,7 +60,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { init: "grid", nullable: false, event: "changeMode", - apply: "reloadCards" + apply: "__reloadCards" }, groupBy: { @@ -117,8 +113,6 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { __workspacesList: null, __resourcesList: null, __groupedContainersList: null, - __foldersLayout: null, - __containerHeader: null, __foldersContainer: null, __workspacesContainer: null, __nonGroupedContainer: null, @@ -163,10 +157,6 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { } }, - getContainerHeader: function() { - return this.__containerHeader; - }, - getFlatList: function() { return this.__nonGroupedContainer; }, @@ -233,7 +223,10 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { position: "bottom-right" }); card.setMenu(menu); - card.subscribeToFilterGroup("searchBarFilter"); + if (resourceData.type !== "study") { + // the backend will do the projects:search + card.subscribeToFilterGroup("searchBarFilter"); + } [ "updateStudy", @@ -284,6 +277,10 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { this._removeAll(); }, + __reloadCards: function(mode) { + this.reloadCards(); + }, + __addFoldersContainer: function() { // add foldersContainer dynamically [ @@ -356,7 +353,6 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { reloadWorkspaces: function() { this.__cleanAll(); - this._add(this.__containerHeader); this._add(this.__workspacesContainer); let workspacesCards = []; this.__workspacesList.forEach(workspaceData => workspacesCards.push(this.__workspaceToCard(workspaceData))); @@ -375,7 +371,6 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { __createWorkspaceCard: function(workspace) { const card = new osparc.dashboard.WorkspaceButtonItem(workspace); - card.subscribeToFilterGroup("searchBarFilter"); [ "workspaceSelected", "workspaceUpdated", @@ -411,7 +406,6 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { __createFolderCard: function(folder) { const card = new osparc.dashboard.FolderButtonItem(folder); - card.subscribeToFilterGroup("searchBarFilter"); [ "folderSelected", "folderUpdated", diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js index 88675e851661..a1ae4d742fab 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js @@ -159,7 +159,11 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { this.__openResource(); } }) - .catch(() => this.__openButton.setFetching(false)); + .catch(err => { + console.error(err); + osparc.FlashMessenger.logAs(err.message, "ERROR"); + this.__openButton.setFetching(false); + }); }, __confirmUpdate: function() { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js b/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js index 6f224e46d806..b836a93ef44b 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js @@ -285,10 +285,6 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { }); }, - resetSharedWithActiveFilter: function() { - this.__removeChips("shared-with"); - this.__filter(); - }, setSharedWithActiveFilter: function(optionId, optionLabel) { this.__removeChips("shared-with"); @@ -353,9 +349,13 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { } }, - __resetFilters: function() { + resetFilters: function() { this.__removeChips(); this.getChildControl("text-field").resetValue(); + }, + + __resetFilters: function() { + this.resetFilters(); this.__filter(); }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js index cab599b6bc9f..bae554cf0527 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js @@ -125,7 +125,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { // "Starting..." page this._hideLoadingPage(); }) - .catch(console.error); + .catch(err => { + console.error(err); + osparc.FlashMessenger.logAs(err.message, "ERROR"); + }); }, __getActiveStudy: function() { @@ -148,21 +151,17 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { osparc.data.Permissions.getInstance().canDo("studies.user.read") && osparc.auth.Manager.getInstance().isLoggedIn() ) { + this.__reloadFolders(); this.__reloadStudies(); } else { this.__resetStudiesList(); } }, - __reloadFoldersAndStudies: function() { - this.__reloadFolders(); + reloadMoreResources: function() { this.__reloadStudies(); }, - __reloadFilteredResources: function() { - this.__reloadFilteredStudies(); - }, - __reloadWorkspaces: function() { this.__setWorkspacesToList([]); osparc.store.Workspaces.getInstance().fetchWorkspaces() @@ -175,7 +174,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (osparc.utils.DisabledPlugins.isFoldersEnabled()) { const folderId = this.getCurrentFolderId(); const workspaceId = this.getCurrentWorkspaceId(); - if (workspaceId === -1) { + if (workspaceId === -1 || workspaceId === -2) { return; } this.__setFoldersToList([]); @@ -192,7 +191,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return; } const workspaceId = this.getCurrentWorkspaceId(); - if (workspaceId === -1) { + if (workspaceId === -1) { // shared workspace listing return; } @@ -229,12 +228,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this._loadingResourcesBtn.setVisibility("visible"); this.__getNextStudiesRequest() .then(resp => { - if ( - resp["params"]["url"].workspaceId !== this.getCurrentWorkspaceId() || - resp["params"]["url"].folderId !== this.getCurrentFolderId() - ) { - // Context might have been changed while waiting for the response. - // The new call is on the ways and this can be ignored. + // Context might have been changed while waiting for the response. + // The new call is on the way, therefore this response can be ignored. + const contextChanged = this.__didContextChange(resp["params"]["url"]); + if (contextChanged) { return; } @@ -264,54 +261,19 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } } }) - .catch(err => console.error(err)) - .finally(() => { - this._loadingResourcesBtn.setFetching(false); - this._loadingResourcesBtn.setVisibility(this._resourcesContainer.getFlatList().nextRequest === null ? "excluded" : "visible"); - this._moreResourcesRequired(); - }); - }, - - __reloadFilteredStudies: function(text) { - if (this._loadingResourcesBtn.isFetching()) { - return; - } - this.__resetStudiesList(); - this._loadingResourcesBtn.setFetching(true); - this._loadingResourcesBtn.setVisibility("visible"); - const request = this.__getTextFilteredNextRequest(text); - request - .then(resp => { - const filteredStudies = resp["data"]; - this._resourcesContainer.getFlatList().nextRequest = resp["_links"]["next"]; - this.__addStudiesToList(filteredStudies); - }) - .catch(err => console.error(err)) - .finally(() => { - this._loadingResourcesBtn.setFetching(false); - this._loadingResourcesBtn.setVisibility(this._resourcesContainer.getFlatList().nextRequest === null ? "excluded" : "visible"); - this._moreResourcesRequired(); - }); - }, - - __reloadSortedByStudies: function() { - if (this._loadingResourcesBtn.isFetching()) { - return; - } - this.__resetStudiesList(); - this._loadingResourcesBtn.setFetching(true); - this._loadingResourcesBtn.setVisibility("visible"); - const request = this.__getSortedByNextRequest(); - request - .then(resp => { - const sortedStudies = resp["data"]; - this._resourcesContainer.getFlatList().nextRequest = resp["_links"]["next"]; - this.__addStudiesToList(sortedStudies); + .catch(err => { + console.error(err); + osparc.FlashMessenger.logAs(err.message, "ERROR"); + // stop fetching + if (this._resourcesContainer.getFlatList()) { + this._resourcesContainer.getFlatList().nextRequest = null; + } }) - .catch(err => console.error(err)) .finally(() => { this._loadingResourcesBtn.setFetching(false); - this._loadingResourcesBtn.setVisibility(this._resourcesContainer.getFlatList().nextRequest === null ? "excluded" : "visible"); + if (this._resourcesContainer.getFlatList()) { + this._loadingResourcesBtn.setVisibility(this._resourcesContainer.getFlatList().nextRequest === null ? "excluded" : "visible"); + } this._moreResourcesRequired(); }); }, @@ -460,7 +422,11 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __addNewFolderButton: function() { - if (this.getCurrentWorkspaceId()) { + const currentWorkspaceId = this.getCurrentWorkspaceId(); + if (currentWorkspaceId) { + if (currentWorkspaceId === -1 || currentWorkspaceId === -2) { + return; + } const currentWorkspace = osparc.store.Workspaces.getInstance().getWorkspace(this.getCurrentWorkspaceId()); if (currentWorkspace && !currentWorkspace.getMyAccessRights()["write"]) { // If user can't write in workspace, do not show plus button @@ -644,89 +610,87 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, this); }, - __getNextRequestParams: function() { - if ("nextRequest" in this._resourcesContainer.getFlatList() && - this._resourcesContainer.getFlatList().nextRequest !== null && - osparc.utils.Utils.hasParamFromURL(this._resourcesContainer.getFlatList().nextRequest, "offset") && - osparc.utils.Utils.hasParamFromURL(this._resourcesContainer.getFlatList().nextRequest, "limit") - ) { - return { - offset: osparc.utils.Utils.getParamFromURL(this._resourcesContainer.getFlatList().nextRequest, "offset"), - limit: osparc.utils.Utils.getParamFromURL(this._resourcesContainer.getFlatList().nextRequest, "limit") - }; - } - return null; - }, + __didContextChange: function(reqParams) { + // not needed for the comparison + delete reqParams["type"]; + delete reqParams["limit"]; + delete reqParams["offset"]; - __getNextStudiesRequest: function() { - const params = { - url: { - offset: 0, - limit: osparc.dashboard.ResourceBrowserBase.PAGINATED_STUDIES, - orderBy: JSON.stringify(this.getOrderBy()), + // check the entries in currentParams are the same as the reqParams + const currentParams = this.__getRequestParams(); + let sameContext = true; + Object.entries(currentParams).forEach(([key, value]) => { + sameContext &= key in reqParams && reqParams[key] === value; + }); + return !sameContext; + }, + + __getNextPageParams: function() { + if (this._resourcesContainer.getFlatList() && this._resourcesContainer.getFlatList().nextRequest) { + // Context might have been changed while waiting for the response. + // The new call is on the way, therefore this response can be ignored. + const url = new URL(this._resourcesContainer.getFlatList().nextRequest); + const urlSearchParams = new URLSearchParams(url.search); + const urlParams = {}; + for (const [snakeKey, value] of urlSearchParams.entries()) { + const key = osparc.utils.Utils.snakeToCamel(snakeKey); + urlParams[key] = value === "null" ? null : value; + } + const contextChanged = this.__didContextChange(urlParams); + if ( + !contextChanged && + osparc.utils.Utils.hasParamFromURL(this._resourcesContainer.getFlatList().nextRequest, "offset") && + osparc.utils.Utils.hasParamFromURL(this._resourcesContainer.getFlatList().nextRequest, "limit") + ) { + return { + offset: osparc.utils.Utils.getParamFromURL(this._resourcesContainer.getFlatList().nextRequest, "offset"), + limit: osparc.utils.Utils.getParamFromURL(this._resourcesContainer.getFlatList().nextRequest, "limit") + }; } - }; - const nextRequestParams = this.__getNextRequestParams(); - if (nextRequestParams) { - params.url.offset = nextRequestParams.offset; - params.url.limit = nextRequestParams.limit; - } - const options = { - resolveWResponse: true - }; - - params.url.workspaceId = this.getCurrentWorkspaceId(); - params.url.folderId = this.getCurrentFolderId(); - if (params.url.orderBy) { - return osparc.data.Resources.fetch("studies", "getPageSortBy", params, undefined, options); - } else if (params.url.search) { - return osparc.data.Resources.fetch("studies", "getPageSearch", params, undefined, options); } - return osparc.data.Resources.fetch("studies", "getPage", params, undefined, options); + return null; }, - __getTextFilteredNextRequest: function(text) { - const params = { - url: { - offset: 0, - limit: osparc.dashboard.ResourceBrowserBase.PAGINATED_STUDIES, - text - } - }; - const nextRequestParams = this.__getNextRequestParams(); - if (nextRequestParams) { - params.url.offset = nextRequestParams.offset; - params.url.limit = nextRequestParams.limit; + __getRequestParams: function() { + const requestParams = {}; + requestParams.orderBy = JSON.stringify(this.getOrderBy()); + + const filterData = this._searchBarFilter.getFilterData(); + if (filterData.text) { + requestParams.text = encodeURIComponent(filterData.text); // name, description and uuid + return requestParams; } - const options = { - resolveWResponse: true - }; - params.url.workspaceId = this.getCurrentWorkspaceId(); - params.url.folderId = this.getCurrentFolderId(); - return osparc.data.Resources.fetch("studies", "getPageSearch", params, undefined, options); + requestParams.workspaceId = this.getCurrentWorkspaceId(); + requestParams.folderId = this.getCurrentFolderId(); + return requestParams; }, - __getSortedByNextRequest: function() { + __getNextStudiesRequest: function() { const params = { url: { offset: 0, limit: osparc.dashboard.ResourceBrowserBase.PAGINATED_STUDIES, - orderBy: JSON.stringify(this.getOrderBy()) } }; - const nextRequestParams = this.__getNextRequestParams(); - if (nextRequestParams) { - params.url.offset = nextRequestParams.offset; - params.url.limit = nextRequestParams.limit; + + const nextPageParams = this.__getNextPageParams(); + if (nextPageParams) { + params.url.offset = nextPageParams.offset; + params.url.limit = nextPageParams.limit; } const options = { resolveWResponse: true }; - params.url.workspaceId = this.getCurrentWorkspaceId(); - params.url.folderId = this.getCurrentFolderId(); - return osparc.data.Resources.fetch("studies", "getPageSortBy", params, undefined, options); + const requestParams = this.__getRequestParams(); + Object.entries(requestParams).forEach(([key, value]) => { + params.url[key] = value; + }); + if ("text" in requestParams) { + return osparc.data.Resources.fetch("studies", "getPageSearch", params, undefined, options); + } + return osparc.data.Resources.fetch("studies", "getPage", params, undefined, options); }, invalidateStudies: function() { @@ -736,8 +700,12 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __addNewStudyButtons: function() { - if (this.getCurrentWorkspaceId()) { - const currentWorkspace = osparc.store.Workspaces.getInstance().getWorkspace(this.getCurrentWorkspaceId()); + const currentWorkspaceId = this.getCurrentWorkspaceId(); + if (currentWorkspaceId) { + if (currentWorkspaceId === -2) { + return; + } + const currentWorkspace = osparc.store.Workspaces.getInstance().getWorkspace(currentWorkspaceId); if (currentWorkspace && !currentWorkspace.getMyAccessRights()["write"]) { // If user can't write in workspace, do not show plus buttons return; @@ -949,14 +917,25 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const folderId = context["folderId"]; this.__changeContext(workspaceId, folderId); }, this); + + this._searchBarFilter.addListener("filterChanged", e => { + const filterData = e.getData(); + if (filterData.text) { + this.__changeContext(-2, null); + } else { + // Back to My Workspace + this.__changeContext(null, null); + } + }); } }, __changeContext: function(workspaceId, folderId) { if (osparc.utils.DisabledPlugins.isFoldersEnabled()) { if ( - this.getCurrentWorkspaceId() === workspaceId && - this.getCurrentFolderId() === folderId + workspaceId !== -2 && // reload studies for a new search + workspaceId === this.getCurrentWorkspaceId() && + folderId === this.getCurrentFolderId() ) { // didn't really change return; @@ -971,10 +950,19 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.invalidateStudies(); this._resourcesContainer.setResourcesToList([]); - if (workspaceId === -1) { + if (workspaceId === -2) { + // Search result: no folders, just studies + this.__setFoldersToList([]); + this.__reloadStudies(); + } else if (workspaceId === -1) { + // Workspaces + this._searchBarFilter.resetFilters(); this.__reloadWorkspaces(); } else { - this.__reloadFoldersAndStudies(); + // Actual workspace + this._searchBarFilter.resetFilters(); + this.__reloadFolders(); + this.__reloadStudies(); } // notify workspaceHeader @@ -1002,35 +990,12 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { osparc.utils.Utils.setIdToWidget(sortByButton, "sortByButton"); sortByButton.addListener("sortByChanged", e => { this.setOrderBy(e.getData()) - this.__reloadSortedByStudies(); + this.__resetStudiesList(); + this.__reloadStudies(); }, this); this._toolbar.add(sortByButton); }, - __addShowSharedWithButton: function() { - const sharedWithButton = new osparc.dashboard.SharedWithMenuButton("study"); - sharedWithButton.set({ - appearance: "form-button-outlined" - }); - osparc.utils.Utils.setIdToWidget(sharedWithButton, "sharedWithButton"); - - sharedWithButton.addListener("sharedWith", e => { - const option = e.getData(); - this._searchBarFilter.setSharedWithActiveFilter(option.id, option.label); - }, this); - this._searchBarFilter.addListener("filterChanged", e => { - const filterData = e.getData(); - if (filterData.text) { - this.__reloadFilteredResources(filterData.text); - } else { - this.__reloadFoldersAndStudies(); - } - sharedWithButton.filterChanged(filterData); - }, this); - - this._toolbar.add(sharedWithButton); - }, - __createLoadMoreButton: function() { const mode = this._resourcesContainer.getMode(); const loadMoreBtn = this._loadingResourcesBtn = (mode === "grid") ? new osparc.dashboard.GridButtonLoadMore() : new osparc.dashboard.ListButtonLoadMore(); @@ -1097,7 +1062,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.__moveStudyToFolder(studyData, destFolderId), ]) .then(() => this.__removeFromStudyList(studyData["uuid"])) - .catch(err => console.error(err)); + .catch(err => { + console.error(err); + osparc.FlashMessenger.logAs(err.message, "ERROR"); + }); }); this.resetSelection(); this.setMultiSelection(false); @@ -1156,6 +1124,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { })) }); this.bind("multiSelection", selectButton, "value"); + this.bind("currentWorkspaceId", selectButton, "visibility", { + converter: currentWorkspaceId => [-2, -1].includes(currentWorkspaceId) ? "excluded" : "visible" + }); return selectButton; }, @@ -1393,7 +1364,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { .then(() => this._updateStudyData(studyData)) .catch(err => { console.error(err); - const msg = this.tr("Something went wrong Renaming"); + const msg = err.message || this.tr("Something went wrong Renaming"); osparc.FlashMessenger.logAs(msg, "ERROR"); }); }, @@ -1403,7 +1374,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { .then(() => this._updateStudyData(studyData)) .catch(err => { console.error(err); - const msg = this.tr("Something went wrong updating the Thumbnail"); + const msg = err.message || this.tr("Something went wrong updating the Thumbnail"); osparc.FlashMessenger.logAs(msg, "ERROR"); }); }, @@ -1444,7 +1415,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.__moveStudyToFolder(studyData, destFolderId), ]) .then(() => this.__removeFromStudyList(studyData["uuid"])) - .catch(err => console.error(err)); + .catch(err => { + console.error(err); + osparc.FlashMessenger.logAs(err.message, "ERROR"); + }); }; if (destWorkspaceId === currentWorkspaceId) { moveStudy(); @@ -1577,8 +1551,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const pollTasks = osparc.data.PollTasks.getInstance(); pollTasks.createPollingTask(fetchPromise, interval) .then(task => this.__taskDuplicateReceived(task, studyData["name"])) - .catch(errMsg => { - const msg = this.tr("Something went wrong Duplicating the study
") + errMsg; + .catch(err => { + console.error(err); + const msg = err.message || this.tr("Something went wrong Duplicating"); osparc.FlashMessenger.logAs(msg, "ERROR"); }); }, @@ -1596,8 +1571,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { exportTask.setSubtitle(textSuccess); }; osparc.utils.Utils.downloadLink(url, "POST", null, progressCB) - .catch(e => { - const msg = osparc.data.Resources.getErrorMsg(JSON.parse(e.response)) || this.tr("Something went wrong Exporting the study"); + .catch(err => { + console.error(err); + const msg = osparc.data.Resources.getErrorMsg(JSON.parse(err.response)) || this.tr("Something went wrong Exporting the study"); osparc.FlashMessenger.logAs(msg, "ERROR"); }) .finally(() => { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonBase.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonBase.js index 844004de6986..22f238b0fd11 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonBase.js @@ -242,12 +242,10 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonBase", { }, _shouldApplyFilter: function(data) { - console.log("_shouldApplyFilter", data); return false; }, _shouldReactToFilter: function(data) { - console.log("_shouldReactToFilter", data); return false; } }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceHeader.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceHeader.js index a8a96135663b..819a8bf07bb3 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceHeader.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceHeader.js @@ -100,7 +100,7 @@ qx.Class.define("osparc.dashboard.WorkspaceHeader", { }); this._add(control); break; - case "title": + case "workspace-title": control = new qx.ui.basic.Label().set({ font: "text-16", alignY: "middle", @@ -178,37 +178,47 @@ qx.Class.define("osparc.dashboard.WorkspaceHeader", { __buildLayout: function(workspaceId) { this.getChildControl("icon"); - const title = this.getChildControl("title").set({ - cursor: "pointer" - }); - title.addListener("tap", () => { - const folderId = null; - this.setCurrentFolderId(folderId); - this.fireDataEvent("contextChanged", { - workspaceId, - folderId, - }); - }); - + const title = this.getChildControl("workspace-title"); this.getChildControl("breadcrumbs"); - this.getChildControl("edit-button").exclude(); this.resetAccessRights(); this.resetMyAccessRights(); - const workspace = osparc.store.Workspaces.getInstance().getWorkspace(workspaceId); - if (workspaceId === -1) { + if (workspaceId === -2) { + this.__setIcon("@FontAwesome5Solid/search/24"); + title.set({ + value: this.tr("Search results"), + cursor: "auto", + }); + } else if (workspaceId === -1) { this.__setIcon(osparc.store.Workspaces.iconPath(32)); - title.setValue(this.tr("Shared Workspaces")); - } else if (workspace) { - const thumbnail = workspace.getThumbnail(); - this.__setIcon(thumbnail ? thumbnail : osparc.store.Workspaces.iconPath(32)); - workspace.bind("name", title, "value"); - workspace.bind("accessRights", this, "accessRights"); - workspace.bind("myAccessRights", this, "myAccessRights"); + title.set({ + value: this.tr("Shared Workspaces"), + cursor: "auto", + }) } else { - this.__setIcon("@FontAwesome5Solid/home/30"); - title.setValue(this.tr("My Workspace")); + title.set({ + cursor: "pointer" + }); + title.addListener("tap", () => { + const folderId = null; + this.setCurrentFolderId(folderId); + this.fireDataEvent("contextChanged", { + workspaceId, + folderId, + }); + }); + const workspace = osparc.store.Workspaces.getInstance().getWorkspace(workspaceId); + if (workspace) { + const thumbnail = workspace.getThumbnail(); + this.__setIcon(thumbnail ? thumbnail : osparc.store.Workspaces.iconPath(32)); + workspace.bind("name", title, "value"); + workspace.bind("accessRights", this, "accessRights"); + workspace.bind("myAccessRights", this, "myAccessRights"); + } else { + this.__setIcon("@FontAwesome5Solid/home/30"); + title.setValue(this.tr("My Workspace")); + } } }, @@ -221,15 +231,21 @@ qx.Class.define("osparc.dashboard.WorkspaceHeader", { this._add(spacer); }, - __setIcon: function(source) { + __resetIcon: function() { const icon = this.getChildControl("icon"); const image = icon.getChildControl("image"); image.resetSource(); icon.getContentElement().setStyles({ "background-image": "none" }); + }, + __setIcon: function(source) { + this.__resetIcon(); + + const icon = this.getChildControl("icon"); if (source.includes("@")) { + const image = icon.getChildControl("image"); image.set({ source }); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js index 010deb4445bf..46382726935f 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js @@ -292,10 +292,12 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTree", { const workspaceId = this.getCurrentWorkspaceId(); const folderId = this.getCurrentFolderId(); + const selection = this.getSelection(); + if (selection) { + selection.removeAll(); + } const contextModel = this.__getModel(workspaceId, folderId); if (contextModel) { - const selection = this.getSelection(); - selection.removeAll(); selection.push(contextModel); } }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTreeItem.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTreeItem.js index 5039a743a07c..f643218243b8 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTreeItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTreeItem.js @@ -27,7 +27,7 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTreeItem", { }); this.set({ - maxWidth: 200 - 10 + maxWidth: osparc.dashboard.ResourceBrowserBase.SIDE_SPACER_WIDTH - 12 }); this.setNotHoveredStyle(); diff --git a/services/static-webserver/client/source/class/osparc/data/Resources.js b/services/static-webserver/client/source/class/osparc/data/Resources.js index d4b5c86e6ac1..ac1cddbdda59 100644 --- a/services/static-webserver/client/source/class/osparc/data/Resources.js +++ b/services/static-webserver/client/source/class/osparc/data/Resources.js @@ -119,18 +119,15 @@ qx.Class.define("osparc.data.Resources", { url: statics.API + "/projects?type=user" }, getPage: { - method: "GET", - url: statics.API + "/projects?type=user&offset={offset}&limit={limit}&workspace_id={workspaceId}&folder_id={folderId}" - }, - getPageSearch: { useCache: false, method: "GET", - url: statics.API + "/projects?type=user&offset={offset}&limit={limit}&workspace_id={workspaceId}&folder_id={folderId}&search={text}" + url: statics.API + "/projects?type=user&offset={offset}&limit={limit}&workspace_id={workspaceId}&folder_id={folderId}&order_by={orderBy}" }, - getPageSortBy: { + getPageSearch: { useCache: false, method: "GET", - url: statics.API + "/projects?type=user&offset={offset}&limit={limit}&workspace_id={workspaceId}&folder_id={folderId}&order_by={orderBy}" + url: statics.API + "/projects:search?offset={offset}&limit={limit}&text={text}&order_by={orderBy}" + // url: statics.API + "/projects:search?offset={offset}&limit={limit}&text={text}&tags={tags}&order_by={orderBy}" }, getOne: { useCache: false, @@ -1305,8 +1302,11 @@ qx.Class.define("osparc.data.Resources", { let status = null; if (e.getData().error) { const errorData = e.getData().error; + if (errorData.message) { + message = errorData.message; + } const logs = errorData.logs || null; - if (logs && logs.length) { + if (message === null && logs && logs.length) { message = logs[0].message; } const errors = errorData.errors || []; diff --git a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js index 39dbde53c8fe..1dd46c13b134 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js +++ b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js @@ -23,6 +23,12 @@ qx.Class.define("osparc.data.model.IframeHandler", { this.setStudy(study); this.setNode(node); + node.getStatus().addListener("changeInteractive", e => { + const newStatus = e.getData(); + const oldStatus = e.getOldData(); + this.__statusInteractiveChanged(newStatus, oldStatus); + }); + this.__initLoadingPage(); this.__initIFrame(); }, @@ -51,12 +57,6 @@ qx.Class.define("osparc.data.model.IframeHandler", { check: "osparc.widget.PersistentIframe", init: null, nullable: true - }, - - polling: { - check: "Boolean", - init: null, - nullable: true } }, @@ -69,12 +69,7 @@ qx.Class.define("osparc.data.model.IframeHandler", { __stopRequestingStatus: null, __retriesLeft: null, - startPolling: function() { - if (this.isPolling()) { - return; - } - this.setPolling(true); - + checkState: function() { this.getNode().getStatus().getProgressSequence() .resetSequence(); @@ -87,7 +82,7 @@ qx.Class.define("osparc.data.model.IframeHandler", { .resetSequence(); this.__unresponsiveRetries = 5; - this.__nodeState(false); + this.__nodeState(); this.getIFrame().resetSource(); }, @@ -124,47 +119,27 @@ qx.Class.define("osparc.data.model.IframeHandler", { }); loadingPage.addExtraWidget(sequenceWidget); - nodeStatus.addListener("changeInteractive", () => { - loadingPage.setHeader(this.__getLoadingPageHeader()); - const status = nodeStatus.getInteractive(); - if (["idle", "failed"].includes(status)) { - const startButton = new qx.ui.form.Button().set({ - label: this.tr("Start"), - icon: "@FontAwesome5Solid/play/18", - font: "text-18", - allowGrowX: false, - height: 32 - }); - startButton.addListener("execute", () => node.requestStartNode()); - loadingPage.addWidgetToMessages(startButton); - } else { - loadingPage.setMessages([]); - } - }, this); this.setLoadingPage(loadingPage); }, - __getLoadingPageHeader: function() { + __getLoadingPageHeader: function(status) { const node = this.getNode(); - let statusText = this.tr("Starting"); - const status = node.getStatus().getInteractive(); - if (status) { - statusText = status.charAt(0).toUpperCase() + status.slice(1); + if (status === undefined) { + status = node.getStatus().getInteractive(); } + const statusText = status ? (status.charAt(0).toUpperCase() + status.slice(1)) : this.tr("Starting"); const metadata = node.getMetaData(); const versionDisplay = osparc.service.Utils.extractVersionDisplay(metadata); return statusText + " " + node.getLabel() + " v" + versionDisplay + ""; }, - __nodeState: function(starting=true) { + __nodeState: function() { // Check if study is still there if (this.getStudy() === null || this.__stopRequestingStatus === true) { - this.setPolling(false); return; } // Check if node is still there if (this.getStudy().getWorkbench().getNode(this.getNode().getNodeId()) === null) { - this.setPolling(false); return; } @@ -176,7 +151,7 @@ qx.Class.define("osparc.data.model.IframeHandler", { } }; osparc.data.Resources.fetch("studies", "getNode", params) - .then(data => this.__onNodeState(data, starting)) + .then(data => this.onNodeState(data)) .catch(err => { let errorMsg = `Error retrieving ${node.getLabel()} status: ${err}`; if ("status" in err && err.status === 406) { @@ -191,7 +166,6 @@ qx.Class.define("osparc.data.model.IframeHandler", { }; node.fireDataEvent("showInLogger", errorMsgData); if ("status" in err && err.status === 406) { - this.setPolling(false); return; } if (this.__unresponsiveRetries > 0) { @@ -203,32 +177,24 @@ qx.Class.define("osparc.data.model.IframeHandler", { }; node.fireDataEvent("showInLogger", retryMsgData); this.__unresponsiveRetries--; - const interval = Math.floor(Math.random() * 5000) + 3000; - setTimeout(() => this.__nodeState(), interval); } else { - this.setPolling(false); node.getStatus().setInteractive("failed"); osparc.FlashMessenger.getInstance().logAs(this.tr("There was an error starting") + " " + node.getLabel(), "ERROR"); } }); }, - __onNodeState: function(data, starting=true) { + onNodeState: function(data) { const serviceState = data["service_state"]; const nodeId = data["service_uuid"]; const node = this.getNode(); const status = node.getStatus(); - let nextPollIn = null; - let pollingInNextStage = null; switch (serviceState) { case "idle": { status.setInteractive(serviceState); - if (starting && this.__unresponsiveRetries>0) { + if (this.__unresponsiveRetries>0) { // a bit of a hack. We will get rid of it when the backend pushes the states this.__unresponsiveRetries--; - nextPollIn = 2000; - } else { - this.setPolling(false); } break; } @@ -248,7 +214,6 @@ qx.Class.define("osparc.data.model.IframeHandler", { node.fireDataEvent("showInLogger", msgData); } status.setInteractive(serviceState); - nextPollIn = 10000; break; } case "stopping": @@ -256,28 +221,25 @@ qx.Class.define("osparc.data.model.IframeHandler", { case "starting": case "pulling": { status.setInteractive(serviceState); - nextPollIn = 5000; break; } case "running": { if (nodeId !== node.getNodeId()) { break; } - if (!starting) { - status.setInteractive("stopping"); - nextPollIn = 5000; - break; - } const { srvUrl, isDynamicV2 } = osparc.utils.Utils.computeServiceUrl(data); node.setDynamicV2(isDynamicV2); - if (srvUrl) { + if ( + srvUrl && + srvUrl !== node.getServiceUrl() // if it's already connected, do not restart the connection process + ) { + this.__statusInteractiveChanged("connecting", node.getStatus().getInteractive()); this.__retriesLeft = 40; this.__waitForServiceReady(srvUrl); } - pollingInNextStage = true; break; } case "complete": @@ -297,18 +259,10 @@ qx.Class.define("osparc.data.model.IframeHandler", { console.error(serviceState, "service state not supported"); break; } - if (nextPollIn) { - qx.event.Timer.once(() => this.__nodeState(starting), this, nextPollIn); - } else if (pollingInNextStage !== true) { - this.setPolling(false); - } }, __waitForServiceReady: function(srvUrl) { - this.getNode().getStatus().setInteractive("connecting"); - if (this.__retriesLeft === 0) { - this.setPolling(false); return; } @@ -317,7 +271,6 @@ qx.Class.define("osparc.data.model.IframeHandler", { // Check if node is still there if (this.getStudy().getWorkbench().getNode(this.getNode().getNodeId()) === null) { - this.setPolling(false); return; } const interval = 5000; @@ -335,7 +288,6 @@ qx.Class.define("osparc.data.model.IframeHandler", { console.log("Connecting: fetch's response status", response.status); } if (response.status < 400) { - this.setPolling(false); this.__serviceReadyIn(srvUrl); } else { console.log(`Connecting: ${srvUrl} is not reachable. Status: ${response.status}`); @@ -356,16 +308,57 @@ qx.Class.define("osparc.data.model.IframeHandler", { const node = this.getNode(); node.setServiceUrl(srvUrl); node.getStatus().setInteractive("ready"); - const msg = "Service ready on " + srvUrl; - const msgData = { - nodeId: node.getNodeId(), - msg, - level: "INFO" - }; - node.fireDataEvent("showInLogger", msgData); - this.__restartIFrame(); - if (!node.isDynamicV2()) { - node.callRetrieveInputs(); + }, + + __statusInteractiveChanged: function(status, oldStatus) { + if (status === oldStatus) { + return; + } + + const node = this.getNode(); + + const loadingPage = node.getLoadingPage(); + loadingPage.setHeader(this.__getLoadingPageHeader(status)); + loadingPage.clearMessages(); + if (["idle", "failed"].includes(status)) { + const startButton = new qx.ui.form.Button().set({ + label: this.tr("Start"), + icon: "@FontAwesome5Solid/play/18", + font: "text-18", + allowGrowX: false, + height: 32 + }); + startButton.addListener("execute", () => node.requestStartNode()); + loadingPage.addWidgetToMessages(startButton); + } + + if (status === "ready") { + const msg = `Service ${node.getLabel()} ${status}`; + const msgData = { + nodeId: node.getNodeId(), + msg, + level: "INFO" + }; + node.fireDataEvent("showInLogger", msgData); + + // will switch to iframe's content + this.__restartIFrame(); + if (!node.isDynamicV2()) { + node.callRetrieveInputs(); + } + } else if (["idle", "failed", "stopping"].includes(status) && oldStatus) { + const msg = `Service ${node.getLabel()} ${status}`; + const msgData = { + nodeId: node.getNodeId(), + msg, + level: "INFO" + }; + node.fireDataEvent("showInLogger", msgData); + + // will switch to the loading page + node.resetServiceUrl(); + this.getIFrame().resetSource(); + this.fireEvent("iframeChanged"); } }, @@ -394,7 +387,7 @@ qx.Class.define("osparc.data.model.IframeHandler", { const node = this.getNode(); const status = node.getStatus().getInteractive(); // it might have been stopped - if (status === "ready") { + if (["running", "ready"].includes(status)) { this.getIFrame().resetSource(); this.getIFrame().setSource(node.getServiceUrl()); diff --git a/services/static-webserver/client/source/class/osparc/data/model/Node.js b/services/static-webserver/client/source/class/osparc/data/model/Node.js index 6373d8835cad..c04e32ab64ef 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Node.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Node.js @@ -136,7 +136,8 @@ qx.Class.define("osparc.data.model.Node", { outputs: { check: "Object", nullable: false, - event: "changeOutputs" + event: "changeOutputs", + apply: "__applyOutputs", }, status: { @@ -166,6 +167,12 @@ qx.Class.define("osparc.data.model.Node", { apply: "__applyPropsForm" }, + outputsForm: { + check: "osparc.widget.NodeOutputs", + init: null, + nullable: true + }, + marker: { check: "qx.core.Object", init: null, @@ -612,6 +619,13 @@ qx.Class.define("osparc.data.model.Node", { }, this); }, + __applyOutputs: function() { + if (!this.isPropertyInitialized("outputsForm") || !this.getOutputsForm()) { + const nodeOutputs = new osparc.widget.NodeOutputs(this); + this.setOutputsForm(nodeOutputs); + } + }, + removeNodePortConnections: function(inputNodeId) { let inputs = this.__getInputData(); for (const portId in inputs) { @@ -905,7 +919,7 @@ qx.Class.define("osparc.data.model.Node", { } }; osparc.data.Resources.fetch("studies", "startNode", params) - .then(() => this.startPollingState()) + .then(() => this.checkState()) .catch(err => { if ("status" in err && (err.status === 409 || err.status === 402)) { osparc.FlashMessenger.getInstance().logAs(err.message, "WARNING"); @@ -1055,7 +1069,7 @@ qx.Class.define("osparc.data.model.Node", { } }, - startPollingState: function() { + checkState: function() { if (this.isDynamic()) { const metadata = this.getMetaData(); const msg = "Starting " + metadata.key + ":" + metadata.version + "..."; @@ -1067,7 +1081,7 @@ qx.Class.define("osparc.data.model.Node", { this.fireDataEvent("showInLogger", msgData); if (this.getIframeHandler()) { - this.getIframeHandler().startPolling(); + this.getIframeHandler().checkState(); } else { console.error(this.getLabel() + " iframe handler not ready"); } diff --git a/services/static-webserver/client/source/class/osparc/data/model/Study.js b/services/static-webserver/client/source/class/osparc/data/model/Study.js index c62549bcc63d..2d4633dd584e 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Study.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Study.js @@ -45,6 +45,7 @@ qx.Class.define("osparc.data.model.Study", { this.set({ uuid: studyData.uuid || this.getUuid(), workspaceId: studyData.workspaceId || null, + folderId: studyData.folderId || null, name: studyData.name || this.getName(), description: studyData.description || this.getDescription(), thumbnail: studyData.thumbnail || this.getThumbnail(), @@ -86,6 +87,13 @@ qx.Class.define("osparc.data.model.Study", { event: "changeWorkspaceId" }, + folderId: { + check: "Number", + init: true, + nullable: true, + event: "changeFolderId" + }, + name: { check: "String", nullable: false, diff --git a/services/static-webserver/client/source/class/osparc/data/model/Workbench.js b/services/static-webserver/client/source/class/osparc/data/model/Workbench.js index 3120a22f7f15..63cd8212fe2b 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Workbench.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Workbench.js @@ -104,7 +104,7 @@ qx.Class.define("osparc.data.model.Workbench", { initWorkbench: function() { const allModels = this.getNodes(); const nodes = Object.values(allModels); - nodes.forEach(node => node.startPollingState()); + nodes.forEach(node => node.checkState()); }, getUpstreamCompNodes: function(node, recursive = true, upstreamNodes = new Set()) { @@ -306,7 +306,7 @@ qx.Class.define("osparc.data.model.Workbench", { node.populateNodeData(); this.giveUniqueNameToNode(node, node.getLabel()); - node.startPollingState(); + node.checkState(); return node; } catch (err) { diff --git a/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js b/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js index 1218c22fc558..78ece737e9c5 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js +++ b/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js @@ -287,6 +287,8 @@ qx.Class.define("osparc.desktop.StudyEditor", { this.__listenToNodeProgress(); this.__listenToNoMoreCreditsEvents(); this.__listenToEvent(); + this.__listenToServiceStatus(); + this.__listenToStateInputPorts(); }, __listenToLogger: function() { @@ -414,6 +416,105 @@ qx.Class.define("osparc.desktop.StudyEditor", { } }, + __listenToServiceStatus: function() { + const socket = osparc.wrapper.WebSocket.getInstance(); + + // callback for events + if (!socket.slotExists("serviceStatus")) { + socket.on("serviceStatus", data => { + const nodeId = data["service_uuid"]; + const workbench = this.getStudy().getWorkbench(); + const node = workbench.getNode(nodeId); + if (node) { + if (node.getIframeHandler()) { + node.getIframeHandler().onNodeState(data); + } + } else if (osparc.data.Permissions.getInstance().isTester()) { + console.log("Ignored ws 'progress' msg", data); + } + }, this); + } + }, + + __listenToStateInputPorts: function() { + const socket = osparc.wrapper.WebSocket.getInstance(); + if (!socket.slotExists("stateInputPorts")) { + socket.on("stateInputPorts", data => { + this.__statePortReceived(data, "stateInputPorts"); + }, this); + } + if (!socket.slotExists("stateOutputPorts")) { + socket.on("stateOutputPorts", data => { + this.__statePortReceived(data, "stateOutputPorts"); + }, this); + } + }, + + __statePortReceived: function(socketData, msgName) { + const studyId = socketData["project_id"]; + if (this.getStudy().getUuid() !== studyId) { + return; + } + + const nodeId = socketData["node_id"]; + const workbench = this.getStudy().getWorkbench(); + const node = workbench.getNode(nodeId); + if (!node) { + if (osparc.data.Permissions.getInstance().isTester()) { + console.log("Ignored ws 'stateInputPorts' msg", socketData); + } + return; + } + + const propsForm = node.getPropsForm(); + if (msgName === "stateInputPorts" && propsForm) { + const portId = socketData["port_key"]; + const status = socketData["status"]; + switch (status) { + case "DOWNLOAD_STARTED": + propsForm.retrievingPortData( + portId, + osparc.form.renderer.PropForm.RETRIEVE_STATUS.downloading + ); + break; + case "DOWNLOAD_FINISHED_SUCCESSFULLY": + propsForm.retrievedPortData(portId, true); + break; + case "DOWNLOAD_WAS_ABORTED": + case "DOWNLOAD_FINISHED_WITH_ERROR": + propsForm.retrievedPortData(portId, false); + break; + } + } + + const outputsForm = node.getOutputsForm(); + if (msgName === "stateOutputPorts" && outputsForm) { + const portId = socketData["port_key"]; + const status = socketData["status"]; + switch (status) { + case "UPLOAD_STARTED": + outputsForm.setRetrievingStatus( + portId, + osparc.form.renderer.PropForm.RETRIEVE_STATUS.uploading + ); + break; + case "UPLOAD_FINISHED_SUCCESSFULLY": + outputsForm.setRetrievingStatus( + portId, + osparc.form.renderer.PropForm.RETRIEVE_STATUS.succeed + ); + break; + case "UPLOAD_WAS_ABORTED": + case "UPLOAD_FINISHED_WITH_ERROR": + outputsForm.setRetrievingStatus( + portId, + osparc.form.renderer.PropForm.RETRIEVE_STATUS.failed + ); + break; + } + } + }, + __reloadSnapshotsAndIterations: function() { const isVCDisabled = osparc.utils.DisabledPlugins.isVersionControlDisabled(); if (!isVCDisabled) { diff --git a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js index ad1c351f0725..ca51e61093b2 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js +++ b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js @@ -1032,11 +1032,12 @@ qx.Class.define("osparc.desktop.WorkbenchView", { // OUTPUTS const outputsBox = new qx.ui.container.Composite(new qx.ui.layout.VBox(spacing)); - if (node.hasOutputs()) { - const nodeOutputs = new osparc.widget.NodeOutputs(node, node.getMetaData().outputs).set({ + const outputsForm = node.getOutputsForm(); + if (node.hasOutputs() && outputsForm) { + outputsForm.set({ offerProbes: true }); - outputsBox.add(nodeOutputs); + outputsBox.add(outputsForm); } const nodeFilesBtn = new qx.ui.form.Button(this.tr("Service data"), "@FontAwesome5Solid/folder-open/14").set({ diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/ServicesList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/ServicesList.js index 6a2123753123..9d2d7a6fa1ec 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/ServicesList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/ServicesList.js @@ -88,7 +88,7 @@ qx.Class.define("osparc.desktop.organizations.ServicesList", { item.addListener("openMoreInfo", e => { const serviceKey = e.getData()["key"]; const serviceVersion = e.getData()["version"]; - osparc.store.Store.getService(serviceKey, serviceVersion) + osparc.store.Services.getService(serviceKey, serviceVersion) .then(serviceData => { if (serviceData) { serviceData["resourceType"] = "service"; diff --git a/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js b/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js index 9150cfe04998..ac609258130d 100644 --- a/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js +++ b/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js @@ -56,14 +56,24 @@ qx.Class.define("osparc.form.renderer.PropForm", { return new qx.ui.basic.Atom("", "osparc/loading.gif"); }, - getRetrievedAtom: function(success) { - const icon = success ? "@FontAwesome5Solid/check/12" : "@FontAwesome5Solid/times/12"; - return new qx.ui.basic.Atom("", icon); + getDownloadingAtom: function() { + return new qx.ui.basic.Atom("", "@FontAwesome5Solid/cloud-download-alt/12"); + }, + + getUploadingAtom: function() { + return new qx.ui.basic.Atom("", "@FontAwesome5Solid/cloud-upload-alt/12"); + }, + + getFailedAtom: function() { + return new qx.ui.basic.Atom("", "@FontAwesome5Solid/times/12"); + }, + + getSucceededAtom: function() { + return new qx.ui.basic.Atom("", "@FontAwesome5Solid/check/12"); }, getRetrievedEmpty: function() { - const icon = "@FontAwesome5Solid/dot-circle/10"; - return new qx.ui.basic.Atom("", icon); + return new qx.ui.basic.Atom("", "@FontAwesome5Solid/dot-circle/10"); }, GRID_POS: { @@ -78,18 +88,44 @@ qx.Class.define("osparc.form.renderer.PropForm", { supportedTypes.push(osparc.node.ParameterEditor.getParameterOutputTypeFromMD(paramMD)); }); return supportedTypes.includes(field.type); - } - }, + }, - // eslint-disable-next-line qx-rules/no-refs-in-members - members: { - _retrieveStatus: { + RETRIEVE_STATUS: { failed: -1, empty: 0, retrieving: 1, - succeed: 2 + downloading: 2, + uploading: 3, + succeed: 4 }, + getIconForStatus: function(status) { + let icon; + switch (status) { + case this.RETRIEVE_STATUS.failed: + icon = this.getFailedAtom(); + break; + case this.RETRIEVE_STATUS.empty: + icon = this.getRetrievedEmpty(); + break; + case this.RETRIEVE_STATUS.retrieving: + icon = this.getRetrievingAtom(); + break; + case this.RETRIEVE_STATUS.downloading: + icon = this.getDownloadingAtom(); + break; + case this.RETRIEVE_STATUS.uploading: + icon = this.getUploadingAtom(); + break; + case this.RETRIEVE_STATUS.succeed: + icon = this.getSucceededAtom(); + break; + } + return icon; + } + }, + + members: { __ctrlLinkMap: null, __linkUnlinkStackMap: null, __fieldOptsBtnMap: null, @@ -528,8 +564,10 @@ qx.Class.define("osparc.form.renderer.PropForm", { } }, - retrievingPortData: function(portId) { - const status = this._retrieveStatus.retrieving; + retrievingPortData: function(portId, status) { + if (status === undefined) { + status = this.self().RETRIEVE_STATUS.retrieving; + } if (portId) { let data = this._getCtrlFieldChild(portId); if (data) { @@ -553,9 +591,9 @@ qx.Class.define("osparc.form.renderer.PropForm", { }, retrievedPortData: function(portId, succeed, dataSize = -1) { - let status = succeed ? this._retrieveStatus.succeed : this._retrieveStatus.failed; + let status = succeed ? this.self().RETRIEVE_STATUS.succeed : this.self().RETRIEVE_STATUS.failed; if (parseInt(dataSize) === 0) { - status = this._retrieveStatus.empty; + status = this.self().RETRIEVE_STATUS.empty; } if (portId) { let data = this._getCtrlFieldChild(portId); @@ -578,23 +616,6 @@ qx.Class.define("osparc.form.renderer.PropForm", { }, __setRetrievingStatus: function(status, portId, idx, row) { - let icon; - switch (status) { - case this._retrieveStatus.failed: - icon = this.self().getRetrievedAtom(false); - break; - case this._retrieveStatus.empty: - icon = this.self().getRetrievedEmpty(); - break; - case this._retrieveStatus.retrieving: - icon = this.self().getRetrievingAtom(); - break; - case this._retrieveStatus.succeed: - icon = this.self().getRetrievedAtom(true); - break; - } - icon.key = portId; - // remove first if any let children = this._getChildren(); for (let i=0; iS4Llite, visit ${osparc.utils.Utils.createHTMLLink("our website", moreInfoUrl)}.`; + const moreInfoText = `For more information about Sim4Life.lite, visit ${osparc.utils.Utils.createHTMLLink("our website", moreInfoUrl)}.`; [ introText, diff --git a/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/Dashboard.js b/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/Dashboard.js index 6075367284e7..8b6e54d7bb2e 100644 --- a/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/Dashboard.js +++ b/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/Dashboard.js @@ -40,7 +40,7 @@ qx.Class.define("osparc.product.quickStart.s4llite.Dashboard", { this._add(dashboardProjects); const newProjectText = this.tr("\ - 1) Start S4Llite: Click the + Start S4Llite button to create a new project. This will start the user interface of S4Llite.\ + 1) Start Sim4Life.lite: Click the + Start Sim4Life.lite button to create a new project. This will start the user interface of Sim4Life.lite.\ "); const newProject = osparc.product.quickStart.Utils.createLabel(newProjectText); this._add(newProject); @@ -53,7 +53,7 @@ qx.Class.define("osparc.product.quickStart.s4llite.Dashboard", { this._add(otherProjects); const otherProjects2Text = this.tr("\ - 3) TUTORIALS: A set of pre-built read-only tutorial projects with results is available to all S4Llite users. When a tutorial is selected, a \ + 3) TUTORIALS: A set of pre-built read-only tutorial projects with results is available to all Sim4Life.lite users. When a tutorial is selected, a \ copy is automatically created and added to the user’s Projects tab. This new copy is editable and can be shared.\ "); const otherProjects2 = osparc.product.quickStart.Utils.createLabel(otherProjects2Text); @@ -68,8 +68,8 @@ qx.Class.define("osparc.product.quickStart.s4llite.Dashboard", { this._add(dashboardTutorials); const importProjectsText = this.tr("\ - 4) To open an existing desktop project in S4Llite: \ - - Click the + Start S4Llite button to create a new project.
\ + 4) To open an existing desktop project in Sim4Life.lite: \ + - Click the + Start Sim4Life.lite button to create a new project.
\ - Click the menu and select “File Browser…”.
\ - Click “Upload File” for the .smash project and select the file from your desktop. Repeat the same step, but this \ time select “Upload Folder” and then select the result folder from your desktop. Close the window
\ diff --git a/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/S4LLiteSpecs.js b/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/S4LLiteSpecs.js index 4b605b8c694b..fcdd411c10b5 100644 --- a/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/S4LLiteSpecs.js +++ b/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/S4LLiteSpecs.js @@ -19,23 +19,23 @@ qx.Class.define("osparc.product.quickStart.s4llite.S4LLiteSpecs", { extend: osparc.product.quickStart.SlideBase, construct: function() { - const title = this.tr("S4Llite: Features and Limitations"); + const title = this.tr("Sim4Life.lite: Features and Limitations"); this.base(arguments, title); }, members: { _populateCard: function() { const introText = this.tr("\ - S4Llite is a powerful web-based simulation platform that allows you to model and analyze real-world phenomena and to \ - design complex technical devices in a validated environment. S4Llite has been created specifically for students to \ + Sim4Life.lite is a powerful web-based simulation platform that allows you to model and analyze real-world phenomena and to \ + design complex technical devices in a validated environment. Sim4Life.lite has been created specifically for students to \ facilitate their understanding of computational modeling and simulations for various topics, ranging from wireless communication \ - to medical applications. The access to S4Llite is available free of charge to students enrolled at registered universities.\ + to medical applications. The access to Sim4Life.lite is available free of charge to students enrolled at registered universities.\ "); const intro = osparc.product.quickStart.Utils.createLabel(introText); this._add(intro); const featuresText = this.tr("\ - S4Llite offers
\ + Sim4Life.lite offers
\ - Framework (GUI, Modeling, Postprocessing)
\ - 3D modeling environment (based on the ACIS toolkit) and CAD translators
\ - Postprocessing and visualization of the simulation results (2D and 3D viewers, 2D planar slice, volume rendering, streamlines, surface fields on arbitrary 3D structures, radiation and far-field data)
\ diff --git a/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/S4LLiteUI.js b/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/S4LLiteUI.js index 59c2d0c81e76..09e29fc108ae 100644 --- a/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/S4LLiteUI.js +++ b/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/S4LLiteUI.js @@ -19,14 +19,14 @@ qx.Class.define("osparc.product.quickStart.s4llite.S4LLiteUI", { extend: osparc.product.quickStart.SlideBase, construct: function() { - const title = this.tr("S4Llite"); + const title = this.tr("Sim4Life.lite"); this.base(arguments, title); }, members: { _populateCard: function() { const introText = this.tr("\ - To check the S4Llite manual, please open a project and access the documentation via Help in the menu as shown below. Enjoy!\ + To check the Sim4Life.lite manual, please open a project and access the documentation via Help in the menu as shown below. Enjoy!\ "); const intro = osparc.product.quickStart.Utils.createLabel(introText); this._add(intro); diff --git a/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/Welcome.js b/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/Welcome.js index b6816398a2b7..77e187a1d8c2 100644 --- a/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/Welcome.js +++ b/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/Welcome.js @@ -30,13 +30,13 @@ qx.Class.define("osparc.product.quickStart.s4llite.Welcome", { this._add(welcome); const introText = this.tr("\ - This quick user’s guide gives a short introduction to S4Llite. We will show:
\ + This quick user’s guide gives a short introduction to Sim4Life.lite. We will show:
\ - how to get started with a new project,
\ - how to get started from an existing tutorial project
\ - - how to open Sim4Life lite desktop simulation projects in S4Llite,
\ - - S4Llite features, limitations and user interface
\ + - how to open Sim4Life desktop simulation projects in Sim4Life.lite,
\ + - Sim4Life.lite features, limitations and user interface
\
\ - For more specific technical information, please refer to the Dashboard Manual and the S4Llite Manual.\ + For more specific technical information, please refer to the Dashboard Manual and the Sim4Life.lite Manual.\ "); const intro = osparc.product.quickStart.Utils.createLabel(introText); this._add(intro); diff --git a/services/static-webserver/client/source/class/osparc/utils/Utils.js b/services/static-webserver/client/source/class/osparc/utils/Utils.js index a502a5144d00..fdf69de394b3 100644 --- a/services/static-webserver/client/source/class/osparc/utils/Utils.js +++ b/services/static-webserver/client/source/class/osparc/utils/Utils.js @@ -92,7 +92,10 @@ qx.Class.define("osparc.utils.Utils", { FLOATING_Z_INDEX: 110000, replaceTokens: function(str, key, value) { - return str.replaceAll("${"+key+"}", value); + // `str` might be a a localized string, get the string first + str = str.toString ? str.toString() : str; + const regex = new RegExp("\\${"+key+"\\}", "g"); + return str.replace(regex, value); }, /** @@ -994,17 +997,27 @@ qx.Class.define("osparc.utils.Utils", { getParamFromURL: (urlStr, param) => { const url = new URL(urlStr); - const args = new URLSearchParams(url.search); - return args.get(param); + const urlParams = new URLSearchParams(url.search); + return urlParams.get(param); }, - hasParamFromURL: (url, param) => { - const urlParams = new URLSearchParams(url); + hasParamFromURL: (urlStr, param) => { + const url = new URL(urlStr); + const urlParams = new URLSearchParams(url.search); return urlParams.has(param); }, isUrl: url => /^(http:\/\/www\.|https:\/\/www\.|http:\/\/|https:\/\/)?[a-z0-9]+([\-\.]{1}[a-z0-9]+)*\.[a-z]{2,5}(:[0-9]{1,5})?(\/.*)?$/gm.test(url), + snakeToCamel: str => { + return str.toLowerCase().replace(/([-_][a-z])/g, group => + group + .toUpperCase() + .replace("-", "") + .replace("_", "") + ); + }, + setIdToWidget: (qWidget, id) => { if (qWidget.getContentElement) { qWidget.getContentElement().setAttribute("osparc-test-id", id); diff --git a/services/static-webserver/client/source/class/osparc/viewer/NodeViewer.js b/services/static-webserver/client/source/class/osparc/viewer/NodeViewer.js index 94aeb50ef5f2..57d97839dc79 100644 --- a/services/static-webserver/client/source/class/osparc/viewer/NodeViewer.js +++ b/services/static-webserver/client/source/class/osparc/viewer/NodeViewer.js @@ -79,7 +79,7 @@ qx.Class.define("osparc.viewer.NodeViewer", { const iframeHandler = node.getIframeHandler(); if (iframeHandler) { - iframeHandler.startPolling(); + iframeHandler.checkState(); iframeHandler.addListener("iframeChanged", () => this.__iFrameChanged(), this); iframeHandler.getIFrame().addListener("load", () => this.__iFrameChanged(), this); this.__iFrameChanged(); diff --git a/services/static-webserver/client/source/class/osparc/widget/NodeOutputs.js b/services/static-webserver/client/source/class/osparc/widget/NodeOutputs.js index df8c4ff6a3da..071db993e304 100644 --- a/services/static-webserver/client/source/class/osparc/widget/NodeOutputs.js +++ b/services/static-webserver/client/source/class/osparc/widget/NodeOutputs.js @@ -29,31 +29,27 @@ qx.Class.define("osparc.widget.NodeOutputs", { /** * @param node {osparc.data.model.Node} Node owning the widget - * @param ports {Object} Port owning the widget */ - construct: function(node, ports) { + construct: function(node) { this.base(arguments); this._setLayout(new qx.ui.layout.VBox(15)); const grid = new qx.ui.layout.Grid(5, 5); grid.setColumnFlex(this.self().POS.LABEL, 1); - grid.setColumnFlex(this.self().POS.INFO, 0); - grid.setColumnFlex(this.self().POS.ICON, 0); grid.setColumnFlex(this.self().POS.VALUE, 1); - grid.setColumnFlex(this.self().POS.UNIT, 0); - grid.setColumnFlex(this.self().POS.PROBE, 0); grid.setColumnMinWidth(this.self().POS.VALUE, 50); + grid.setColumnMaxWidth(this.self().POS.RETRIEVE_STATUS, 25); Object.keys(this.self().POS).forEach((_, idx) => grid.setColumnAlign(idx, "left", "middle")); const gridLayout = this.__gridLayout = new qx.ui.container.Composite(grid); this._add(gridLayout); this.set({ node, - ports + ports: node.getMetaData().outputs }); - node.addListener("changeOutputs", () => this.__populateGrid(), this); + node.addListener("changeOutputs", () => this.__outputsChanged(), this); this.addListener("appear", () => this.__makeLabelsResponsive(), this); this.addListener("resize", () => this.__makeLabelsResponsive(), this); @@ -88,7 +84,8 @@ qx.Class.define("osparc.widget.NodeOutputs", { ICON: 2, VALUE: 3, UNIT: 4, - PROBE: 5 + PROBE: 5, + RETRIEVE_STATUS: 6, } }, @@ -98,7 +95,6 @@ qx.Class.define("osparc.widget.NodeOutputs", { __populateGrid: function() { this.__gridLayout.removeAll(); - const outputs = this.getNode().getOutputs(); const ports = this.getPorts(); const portKeys = Object.keys(ports); for (let i=0; i { if ("resp" in presignedLinkData && presignedLinkData.resp) { - valueLink.setUrl(presignedLinkData.resp.link); + valueWidget.setUrl(presignedLinkData.resp.link); } }); } else if ("downloadLink" in value) { // it's a link const filename = (value.filename && value.filename.length > 0) ? value.filename : osparc.file.FileDownloadLink.extractLabelFromLink(value["downloadLink"]); - valueLink.set({ + valueWidget.set({ value: filename, url: value.downloadLink }); } } else { - const valueEntry = new qx.ui.basic.Label("-"); + valueWidget = new qx.ui.basic.Label("-"); if (value) { - valueEntry.setValue(String(value)); + valueWidget.setValue(String(value)); } - this.__gridLayout.add(valueEntry, { - row: i, - column: this.self().POS.VALUE - }); } + + // remove first if any + this.__removeEntry(row, this.self().POS.VALUE); + + this.__gridLayout.add(valueWidget, { + row: row, + column: this.self().POS.VALUE + }); }, __makeLabelsResponsive: function() { @@ -227,6 +233,39 @@ qx.Class.define("osparc.widget.NodeOutputs", { infoButton.setVisibility(extendedVersion ? "hidden" : "visible"); grid.setColumnMinWidth(this.self().POS.VALUE, extendedVersion ? 150 : 50); } + }, + + __removeEntry: function(row, column) { + let children = this.__gridLayout.getChildren(); + for (let i=0; iS4Llite", + "title": "Start ${replace_me_product_name}", "description": "New project", "newStudyLabel": "New project", "idToWidget": "startS4LButton" diff --git a/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json b/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json index 7b883f8a78c0..cacb9ffb83d1 100644 --- a/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json +++ b/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json @@ -21,7 +21,7 @@ "selector": "osparc-test-id=templatesTabBtn" }, "anchorEl": "osparc-test-id=templatesTabBtn", - "text": "Clicking on a Tutorial will create a copy of that Study, which will appear in your own Project ts tab with thee same name as the Tutorial. Any changes you make to this copy will not affect the original Tutorial.", + "text": "Clicking on a Tutorial will create a copy of that Project, which will appear in your own Projects tab with the same name as the Tutorial. Any changes you make to this copy will not affect the original Tutorial.", "placement": "bottom" }, { "beforeClick": { @@ -30,13 +30,6 @@ "anchorEl": "osparc-test-id=servicesTabBtn", "text": "Every Project in Sim4Life is composed of at lease one so-called Service.
Services are building blocks for Studies and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Project.", "placement": "bottom" - }, { - "beforeClick": { - "selector": "osparc-test-id=dataTabBtn" - }, - "anchorEl": "osparc-test-id=dataTabBtn", - "text": "The Data tab allow you to browse the output files of all of your active pipelines in one place. This is most useful for downloading the data created on the online platform.", - "placement": "bottom" }] }, "navbar": { @@ -50,7 +43,7 @@ "event": "tap" }, "anchorEl": "osparc-test-id=notificationsContainer", - "text": "By clicking on the Bell, you will you see notifications about what Studies, Credits and Organizations.", + "text": "By clicking on the Bell, you will you see notifications about which Projects, Credits and Organizations have been shared with you.", "placement": "bottom" }, { "beforeClick": { diff --git a/services/static-webserver/client/source/resource/osparc/tours/s4llite_tours.json b/services/static-webserver/client/source/resource/osparc/tours/s4llite_tours.json index 7cfe8df14e49..e1e509a6f838 100644 --- a/services/static-webserver/client/source/resource/osparc/tours/s4llite_tours.json +++ b/services/static-webserver/client/source/resource/osparc/tours/s4llite_tours.json @@ -2,22 +2,88 @@ "dashboard": { "id": "dashboard", "name": "Dashboard", - "description": "Introduction to Dashboard tabs", + "description": "Introduction to the Dashboard tabs", + "context": "osparc-test-id=dashboardTabs", "steps": [{ - "anchorEl": "osparc-test-id=studiesTabBtn", "beforeClick": { "selector": "osparc-test-id=studiesTabBtn" }, + "anchorEl": "osparc-test-id=studiesTabBtn", "title": "Projects", - "text": "Existing projects can be accessed and managed, and new projects can be created. Each project is represented by a card.", + "text": "Any Project is accessible via the Dashboard. The Projects, which belong to or are shared with you, can be found here.", "placement": "bottom" }, { - "anchorEl": "osparc-test-id=templatesTabBtn", "beforeClick": { "selector": "osparc-test-id=templatesTabBtn" }, + "anchorEl": "osparc-test-id=templatesTabBtn", "title": "Tutorials", - "text": "A set of pre-built tutorial projects with results is available to all users. When a tutorial is selected, a copy is automatically created and added to the user’s Projects tab. This new copy is editable.", + "text": "Clicking on a Tutorial will create a copy of that Project, which will appear in your own Projects tab with the same name as the Tutorial. Any changes you make to this copy will not affect the original Tutorial.", + "placement": "bottom" + }] + }, + "navbar": { + "id": "navbar", + "name": "Navigation Bar", + "description": "Introduction to the Navigation Bar", + "context": "osparc-test-id=navigationBar", + "steps": [{ + "beforeClick": { + "selector": "osparc-test-id=notificationsButton", + "event": "tap" + }, + "anchorEl": "osparc-test-id=notificationsContainer", + "text": "By clicking on the Bell, you will you see notifications about which Projects and Organizations have been shared with you.", + "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=helpNavigationBtn", + "action": "open" + }, + "anchorEl": "osparc-test-id=helpNavigationMenu", + "text": "Under the question mark, you find Manuals, Support and ways to give us Feedback. It also provides quick access to other Guided Tours.", + "placement": "left" + }, { + "beforeClick": { + "selector": "osparc-test-id=userMenuBtn", + "action": "open" + }, + "anchorEl": "osparc-test-id=userMenuMenu", + "text": "The User Menu gives you access to Your Account, Preferences, Organizations and more.", + "placement": "left" + }] + }, + "projects": { + "id": "projects", + "name": "Projects", + "description": "All you need to know about Project handling", + "context": "osparc-test-id=studiesTabBtn", + "steps": [{ + "beforeClick": { + "selector": "osparc-test-id=studiesTabBtn" + }, + "anchorEl": "osparc-test-id=startS4LButton", + "title": "Start Sim4Life.lite", + "text": "Clicking on this (+) Start Sim4Life.lite button, allows you to create and open a new Sim4Life.lite project", + "placement": "right" + }, { + "anchorEl": "osparc-test-id=searchBarFilter-textField-study", + "title": "Filter and Search", + "text": "This tool allows you to filter Projects and Tutorials.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", + "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=studyItemMenuButton", + "action": "open" + }, + "anchorEl": "osparc-test-id=studyItemMenuMenu", + "title": "More options button", + "text": "On the Project card, you can use the three dots button to access more information and operation on the Project.", + "placement": "left" + }, { + "anchorEl": "osparc-test-id=updateStudyBtn", + "title": "Update Services", + "text": "On the Project card, you can use the Update button to update the corresponding service to the latest version.", "placement": "bottom" }] } diff --git a/services/static-webserver/client/source/resource/osparc/tours/tiplite_tours.json b/services/static-webserver/client/source/resource/osparc/tours/tiplite_tours.json index 71eedf418dc6..bdc28947d878 100644 --- a/services/static-webserver/client/source/resource/osparc/tours/tiplite_tours.json +++ b/services/static-webserver/client/source/resource/osparc/tours/tiplite_tours.json @@ -10,7 +10,7 @@ "event": "tap" }, "anchorEl": "osparc-test-id=notificationsContainer", - "text": "By clicking on the Bell, you will you see notifications about what Studies, Credits and Organizations.", + "text": "By clicking on the Bell, you will you see notifications about which Studies and Organizations have been shared with you.", "placement": "bottom" }, { "beforeClick": { @@ -26,7 +26,7 @@ "action": "open" }, "anchorEl": "osparc-test-id=userMenuMenu", - "text": "The User Menu gives you access to Your Account, Billing Center, Preferences, Organizations and more.", + "text": "The User Menu gives you access to Your Account, Preferences, Organizations and more.", "placement": "left" }] }, diff --git a/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json b/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json index c44afd38aadc..579de9a68883 100644 --- a/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json +++ b/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json @@ -10,7 +10,7 @@ "event": "tap" }, "anchorEl": "osparc-test-id=notificationsContainer", - "text": "By clicking on the Bell, you will you see notifications about what Studies, Credits and Organizations.", + "text": "By clicking on the Bell, you will you see notifications about which Studies, Credits and Organizations have been shared with you.", "placement": "bottom" }, { "beforeClick": { diff --git a/services/static-webserver/client/tools/qooxdoo-kit/builder/Dockerfile b/services/static-webserver/client/tools/qooxdoo-kit/builder/Dockerfile index 5890069e77b5..d5ec65a25922 100644 --- a/services/static-webserver/client/tools/qooxdoo-kit/builder/Dockerfile +++ b/services/static-webserver/client/tools/qooxdoo-kit/builder/Dockerfile @@ -5,14 +5,14 @@ # Note: context at osparc-simcore/services/static-webserver/client expected # ARG tag -FROM itisfoundation/qooxdoo-kit:${tag} as touch +FROM itisfoundation/qooxdoo-kit:${tag} AS touch WORKDIR /project ENV PATH=/home/node/node_modules/.bin:${PATH} RUN mkdir /project/build-output -FROM touch as build-client +FROM touch AS build-client # Installs contributions # Install packages (warning: cache might keep these library out-of-date!) @@ -59,7 +59,7 @@ RUN \ python3 ./scripts/post-compile.py -FROM joseluisq/static-web-server:2.32.1-alpine as server-base +FROM joseluisq/static-web-server:2.32.1-alpine AS server-base LABEL org.opencontainers.image.authors="GitHK, odeimaiz" @@ -79,12 +79,12 @@ RUN chown -R "${SC_USER_NAME}:${SC_USER_NAME}" /entrypoint.sh && \ USER ${SC_USER_NAME} -FROM server-base as production +FROM server-base AS production # front-end client last we copy the client application directly inside COPY --from=build-client \ --chown=${SC_USER_NAME}:${SC_USER_NAME} \ /project/build-output "/static-content" ENV SC_BUILD_TARGET production -FROM server-base as development +FROM server-base AS development ENV SC_BUILD_TARGET development diff --git a/services/storage/Dockerfile b/services/storage/Dockerfile index 688f5dfa9f9d..12e60feb78dc 100644 --- a/services/storage/Dockerfile +++ b/services/storage/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # USAGE: # cd sercices/storage @@ -66,7 +66,7 @@ EXPOSE 8080 # + /build WORKDIR # -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET build @@ -112,7 +112,7 @@ RUN \ # + /build # + services/storage [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -133,7 +133,7 @@ RUN \ # --------------------------Production stage ------------------- # Final cleanup up to reduce image size and startup setup # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production \ @@ -175,7 +175,7 @@ CMD ["/bin/sh", "services/storage/docker/boot.sh"] # + packages (mounted volume) # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET=development \ SC_DEVEL_MOUNT=/devel/services/storage/ diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt index c1abbffd5450..f0132fe4c7c1 100644 --- a/services/storage/requirements/_test.txt +++ b/services/storage/requirements/_test.txt @@ -156,7 +156,7 @@ multidict==6.0.5 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.2 +mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy diff --git a/services/storage/requirements/_tools.txt b/services/storage/requirements/_tools.txt index 44759acdfd41..6565ecfab1fd 100644 --- a/services/storage/requirements/_tools.txt +++ b/services/storage/requirements/_tools.txt @@ -28,7 +28,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt diff --git a/services/storage/requirements/ci.txt b/services/storage/requirements/ci.txt index 25d417f91c78..26d5d78bff96 100644 --- a/services/storage/requirements/ci.txt +++ b/services/storage/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-aws-library @ ../../packages/aws-library/ diff --git a/services/web/Dockerfile b/services/web/Dockerfile index 1e134f8c106f..1a1a443333a4 100644 --- a/services/web/Dockerfile +++ b/services/web/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -FROM python:${PYTHON_VERSION}-slim-bookworm as base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base # # USAGE: # cd sercices/web @@ -69,7 +69,7 @@ EXPOSE 8080 # + src # + tests -FROM base as build +FROM base AS build ENV SC_BUILD_TARGET build @@ -111,7 +111,7 @@ RUN \ # + /build # + services/web/server [scu:scu] WORKDIR # -FROM build as prod-only-deps +FROM build AS prod-only-deps ENV SC_BUILD_TARGET prod-only-deps @@ -133,7 +133,7 @@ RUN \ # + /home/scu $HOME = WORKDIR # + docker # -FROM base as production +FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production @@ -185,7 +185,7 @@ CMD ["services/web/server/docker/boot.sh"] # + /devel WORKDIR # + services (mounted volume) # -FROM build as development +FROM build AS development ENV SC_BUILD_TARGET development diff --git a/services/web/server/requirements/_base.txt b/services/web/server/requirements/_base.txt index b7d14fef70a9..bf5eb3e7c293 100644 --- a/services/web/server/requirements/_base.txt +++ b/services/web/server/requirements/_base.txt @@ -75,6 +75,8 @@ anyio==4.3.0 # via # fast-depends # faststream +appdirs==1.4.4 + # via pint arrow==1.2.3 # via # -r requirements/../../../../packages/models-library/requirements/_base.in @@ -164,6 +166,10 @@ faststream==0.5.10 # via # -r requirements/../../../../packages/service-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +flexcache==0.3 + # via pint +flexparser==0.3.1 + # via pint frozenlist==1.4.1 # via # -c requirements/./constraints.txt @@ -365,7 +371,7 @@ passlib==1.7.4 # via -r requirements/_base.in pillow==10.3.0 # via captcha -pint==0.19.2 +pint==0.24.3 # via # -r requirements/../../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in @@ -564,7 +570,10 @@ typing-extensions==4.12.0 # aiodebug # aiodocker # faststream + # flexcache + # flexparser # opentelemetry-sdk + # pint # pydantic # typer ujson==5.5.0 diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt index 1b8cff637239..8b5473360380 100644 --- a/services/web/server/requirements/_test.txt +++ b/services/web/server/requirements/_test.txt @@ -102,7 +102,7 @@ multidict==6.0.2 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.11.2 +mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy diff --git a/services/web/server/requirements/_tools.txt b/services/web/server/requirements/_tools.txt index 6aee7017f62b..2ce86b628302 100644 --- a/services/web/server/requirements/_tools.txt +++ b/services/web/server/requirements/_tools.txt @@ -30,7 +30,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via # -c requirements/_test.txt # -r requirements/../../../../requirements/devenv.txt diff --git a/services/web/server/requirements/ci.txt b/services/web/server/requirements/ci.txt index 9a171226abfa..f9917eb37488 100644 --- a/services/web/server/requirements/ci.txt +++ b/services/web/server/requirements/ci.txt @@ -9,6 +9,9 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt +--requirement _tools.txt +--requirement _tools.txt # installs this repo's packages simcore-models-library @ ../../../packages/models-library diff --git a/services/web/server/src/simcore_service_webserver/application_settings.py b/services/web/server/src/simcore_service_webserver/application_settings.py index fcdec0f9eb3b..aa31ea71d11f 100644 --- a/services/web/server/src/simcore_service_webserver/application_settings.py +++ b/services/web/server/src/simcore_service_webserver/application_settings.py @@ -108,6 +108,7 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): env=["WEBSERVER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"], # NOTE: suffix '_LOGLEVEL' is used overall ) + WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, env=["WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED"], diff --git a/services/web/server/src/simcore_service_webserver/errors.py b/services/web/server/src/simcore_service_webserver/errors.py index 173699f58882..1bc48eda031b 100644 --- a/services/web/server/src/simcore_service_webserver/errors.py +++ b/services/web/server/src/simcore_service_webserver/errors.py @@ -1,6 +1,6 @@ from typing import Any -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class WebServerBaseError(OsparcErrorMixin, Exception): diff --git a/services/web/server/src/simcore_service_webserver/invitations/_core.py b/services/web/server/src/simcore_service_webserver/invitations/_core.py index b834ac55b26c..2bf18487638d 100644 --- a/services/web/server/src/simcore_service_webserver/invitations/_core.py +++ b/services/web/server/src/simcore_service_webserver/invitations/_core.py @@ -2,7 +2,7 @@ from contextlib import contextmanager from typing import Final -from aiohttp import ClientError, ClientResponseError, web +from aiohttp import ClientResponseError, web from models_library.api_schemas_invitations.invitations import ( ApiInvitationContent, ApiInvitationContentAndLink, @@ -11,7 +11,6 @@ from models_library.emails import LowerCaseEmailStr from pydantic import AnyHttpUrl, ValidationError, parse_obj_as from servicelib.aiohttp import status -from servicelib.error_codes import create_error_code from ..groups.api import is_user_by_email_in_group from ..products.api import Product @@ -35,31 +34,30 @@ def _handle_exceptions_as_invitations_errors(): except ClientResponseError as err: # check possible errors if err.status == status.HTTP_422_UNPROCESSABLE_ENTITY: - error_code = create_error_code(err) - _logger.exception( - "Invitation request unexpectedly failed [%s]", - f"{error_code}", - extra={"error_code": error_code}, - ) raise InvalidInvitationError( - reason=f"Unexpected error [{error_code}]" + invitations_api_response={ + "err": err, + "status": err.status, + "message": err.message, + "url": err.request_info.real_url, + }, ) from err assert err.status >= status.HTTP_400_BAD_REQUEST # nosec - # any other error status code - raise InvitationsServiceUnavailableError from err - except (ValidationError, ClientError) as err: - _logger.debug("Invitations error %s", f"{err}") - raise InvitationsServiceUnavailableError from err + # any other error status code + raise InvitationsServiceUnavailableError( + client_response_error=err, + ) from err except InvitationsError: # bypass: prevents that the Exceptions handler catches this exception raise except Exception as err: - _logger.exception("Unexpected error in invitations plugin") - raise InvitationsServiceUnavailableError from err + raise InvitationsServiceUnavailableError( + unexpected_error=err, + ) from err # diff --git a/services/web/server/src/simcore_service_webserver/invitations/errors.py b/services/web/server/src/simcore_service_webserver/invitations/errors.py index cde3e3ab5c79..881b62c6df9a 100644 --- a/services/web/server/src/simcore_service_webserver/invitations/errors.py +++ b/services/web/server/src/simcore_service_webserver/invitations/errors.py @@ -14,11 +14,8 @@ class InvitationsError(WebServerBaseError, ValueError): class InvalidInvitationError(InvitationsError): - msg_template = "Invalid invitation. {reason}" + msg_template = "Invalid invitation" class InvitationsServiceUnavailableError(InvitationsError): - msg_template = ( - "Unable to process your invitation since the invitations service is currently unavailable. " - "Please try again later." - ) + msg_template = "Cannot process invitations" diff --git a/services/web/server/src/simcore_service_webserver/login/_2fa_api.py b/services/web/server/src/simcore_service_webserver/login/_2fa_api.py index 06a96f00e6df..fc844dd79f6f 100644 --- a/services/web/server/src/simcore_service_webserver/login/_2fa_api.py +++ b/services/web/server/src/simcore_service_webserver/login/_2fa_api.py @@ -13,8 +13,7 @@ from aiohttp import web from models_library.users import UserID from pydantic import BaseModel, Field -from servicelib.error_codes import create_error_code -from servicelib.logging_utils import LogExtra, get_log_record_extra, log_decorator +from servicelib.logging_utils import log_decorator from servicelib.utils_secrets import generate_passcode from settings_library.twilio import TwilioSettings from twilio.base.exceptions import TwilioException # type: ignore[import-untyped] @@ -132,15 +131,11 @@ def _sender(): await asyncio.get_event_loop().run_in_executor(executor=None, func=_sender) except TwilioException as exc: - error_code = create_error_code(exc) - log_extra: LogExtra = get_log_record_extra(user_id=user_id) or {} - log.exception( - "Failed while setting up 2FA code and sending SMS to %s [%s]", - mask_phone_number(phone_number), - f"{error_code}", - extra={"error_code": error_code, **log_extra}, - ) - raise SendingVerificationSmsError(reason=exc) from exc + raise SendingVerificationSmsError( + reason=f"Could not send SMS to {mask_phone_number(phone_number)}", + user_id=user_id, + twilio_error=exc, + ) from exc # @@ -177,16 +172,13 @@ async def send_email_code( "product": product, }, ) - except TwilioException as exc: - error_code = create_error_code(exc) - log_extra: LogExtra = get_log_record_extra(user_id=user_id) or {} - log.exception( - "Failed while setting up 2FA code and sending Email to %s [%s]", - user_email, - f"{error_code}", - extra={"error_code": error_code, **log_extra}, - ) - raise SendingVerificationEmailError(reason=exc) from exc + except Exception as exc: + raise SendingVerificationEmailError( + reason=f"Could not send email to {user_email}", + user_id=user_id, + user_email=user_email, + email_error=exc, + ) from exc # diff --git a/services/web/server/src/simcore_service_webserver/login/_registration.py b/services/web/server/src/simcore_service_webserver/login/_registration.py index 322dbb026c49..1cfc53396d29 100644 --- a/services/web/server/src/simcore_service_webserver/login/_registration.py +++ b/services/web/server/src/simcore_service_webserver/login/_registration.py @@ -10,6 +10,7 @@ from datetime import datetime from aiohttp import web +from common_library.error_codes import create_error_code from models_library.basic_types import IdInt from models_library.emails import LowerCaseEmailStr from models_library.products import ProductName @@ -22,6 +23,7 @@ parse_obj_as, validator, ) +from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from simcore_postgres_database.models.confirmations import ConfirmationAction from simcore_postgres_database.models.users import UserStatus @@ -211,17 +213,38 @@ def _invitations_request_context(invitation_code: str) -> Iterator[URL]: yield url except (ValidationError, InvalidInvitationError) as err: - msg = f"{err}" - if isinstance(err, ValidationError): - msg = f"{InvalidInvitationError(reason='')}" + error_code = create_error_code(err) + user_error_msg = ( + f"Invalid invitation. {MSG_INVITATIONS_CONTACT_SUFFIX} [{error_code}]" + ) + + _logger.exception( + **create_troubleshotting_log_kwargs( + user_error_msg, + error=err, + error_code=error_code, + tip="Something went wrong with the invitation", + ) + ) raise web.HTTPForbidden( - reason=f"{msg}. {MSG_INVITATIONS_CONTACT_SUFFIX}", + reason=user_error_msg, content_type=MIMETYPE_APPLICATION_JSON, ) from err except InvitationsServiceUnavailableError as err: + error_code = create_error_code(err) + user_error_msg = f"Unable to process your invitation since the invitations service is currently unavailable [{error_code}]" + + _logger.exception( + **create_troubleshotting_log_kwargs( + user_error_msg, + error=err, + error_code=error_code, + tip="Something went wrong communicating the `invitations` service", + ) + ) raise web.HTTPServiceUnavailable( - reason=f"{err}", + reason=user_error_msg, content_type=MIMETYPE_APPLICATION_JSON, ) from err diff --git a/services/web/server/src/simcore_service_webserver/login/decorators.py b/services/web/server/src/simcore_service_webserver/login/decorators.py index 2fb27431bf4a..7e9e681710d5 100644 --- a/services/web/server/src/simcore_service_webserver/login/decorators.py +++ b/services/web/server/src/simcore_service_webserver/login/decorators.py @@ -6,7 +6,12 @@ from servicelib.request_keys import RQT_USERID_KEY from ..products.api import get_product_name -from ..security.api import AuthContextDict, check_user_authorized, check_user_permission +from ..security.api import ( + PERMISSION_PRODUCT_LOGIN_KEY, + AuthContextDict, + check_user_authorized, + check_user_permission, +) def login_required(handler: HandlerAnyReturn) -> HandlerAnyReturn: @@ -53,7 +58,7 @@ async def _wrapper(request: web.Request): await check_user_permission( request, - "product", + PERMISSION_PRODUCT_LOGIN_KEY, context=AuthContextDict( product_name=get_product_name(request), authorized_uid=user_id, diff --git a/services/web/server/src/simcore_service_webserver/login/errors.py b/services/web/server/src/simcore_service_webserver/login/errors.py index e0b34c7787a5..56588b87df68 100644 --- a/services/web/server/src/simcore_service_webserver/login/errors.py +++ b/services/web/server/src/simcore_service_webserver/login/errors.py @@ -1,12 +1,16 @@ import functools +import logging from aiohttp import web from servicelib.aiohttp.typing_extension import Handler +from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from ..errors import WebServerBaseError from ._constants import MSG_2FA_UNAVAILABLE_OEC +_logger = logging.getLogger(__name__) + class LoginError(WebServerBaseError, ValueError): ... @@ -27,8 +31,18 @@ async def wrapper(request: web.Request) -> web.StreamResponse: return await handler(request) except (SendingVerificationSmsError, SendingVerificationEmailError) as exc: + error_code = exc.error_code() + front_end_msg = MSG_2FA_UNAVAILABLE_OEC.format(error_code=error_code) + # in these cases I want to log the cause + _logger.exception( + **create_troubleshotting_log_kwargs( + front_end_msg, + error=exc, + error_code=error_code, + ) + ) raise web.HTTPServiceUnavailable( - reason=MSG_2FA_UNAVAILABLE_OEC.format(error_code=exc.code), + reason=front_end_msg, content_type=MIMETYPE_APPLICATION_JSON, ) from exc diff --git a/services/web/server/src/simcore_service_webserver/login/handlers_confirmation.py b/services/web/server/src/simcore_service_webserver/login/handlers_confirmation.py index ecb99ce84e7a..63c4505c6475 100644 --- a/services/web/server/src/simcore_service_webserver/login/handlers_confirmation.py +++ b/services/web/server/src/simcore_service_webserver/login/handlers_confirmation.py @@ -4,6 +4,7 @@ from aiohttp import web from aiohttp.web import RouteTableDef +from common_library.error_codes import create_error_code from models_library.emails import LowerCaseEmailStr from models_library.products import ProductName from pydantic import ( @@ -20,7 +21,7 @@ parse_request_body_as, parse_request_path_parameters_as, ) -from servicelib.error_codes import create_error_code +from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from simcore_postgres_database.errors import UniqueViolation from yarl import URL @@ -179,17 +180,25 @@ async def validate_confirmation_and_redirect(request: web.Request): except Exception as err: # pylint: disable=broad-except error_code = create_error_code(err) + user_error_msg = ( + f"Sorry, we cannot confirm your {action}." + "Please try again in a few moments. " + f"If the problem persist please contact support attaching this code ({error_code})" + ) + _logger.exception( - "Failed during email_confirmation [%s]", - f"{error_code}", - extra={"error_code": error_code}, + **create_troubleshotting_log_kwargs( + user_error_msg, + error=err, + error_code=error_code, + tip="Failed during email_confirmation", + ) ) + raise create_redirect_to_page_response( request.app, page="error", - message=f"Sorry, we cannot confirm your {action}." - "Please try again in a few moments. " - "If the problem persist please contact support attaching this code ({error_code})", + message=user_error_msg, status_code=status.HTTP_503_SERVICE_UNAVAILABLE, ) from err diff --git a/services/web/server/src/simcore_service_webserver/login/handlers_registration.py b/services/web/server/src/simcore_service_webserver/login/handlers_registration.py index d3f553db71c0..7c1401847678 100644 --- a/services/web/server/src/simcore_service_webserver/login/handlers_registration.py +++ b/services/web/server/src/simcore_service_webserver/login/handlers_registration.py @@ -4,11 +4,12 @@ from aiohttp import web from aiohttp.web import RouteTableDef +from common_library.error_codes import create_error_code from models_library.emails import LowerCaseEmailStr from pydantic import BaseModel, Field, PositiveInt, SecretStr, validator from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import parse_request_body_as -from servicelib.error_codes import create_error_code +from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from simcore_postgres_database.models.users import UserStatus @@ -58,7 +59,7 @@ ) from .utils_email import get_template_path, send_email_from_template -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) routes = RouteTableDef() @@ -266,19 +267,27 @@ async def register(request: web.Request): ) except Exception as err: # pylint: disable=broad-except error_code = create_error_code(err) - log.exception( - "Failed while sending confirmation email to %s, %s [%s]", - f"{user=}", - f"{_confirmation=}", - f"{error_code}", - extra={"error_code": error_code}, + user_error_msg = f"{MSG_CANT_SEND_MAIL} [{error_code}]" + + _logger.exception( + **create_troubleshotting_log_kwargs( + user_error_msg, + error=err, + error_code=error_code, + error_context={ + "request": request, + "registration": registration, + "user_id": user.get("id"), + "user": user, + "confirmation": _confirmation, + }, + tip="Failed while sending confirmation email", + ) ) await db.delete_confirmation_and_user(user, _confirmation) - raise web.HTTPServiceUnavailable( - reason=f"{MSG_CANT_SEND_MAIL} [{error_code}]" - ) from err + raise web.HTTPServiceUnavailable(reason=user_error_msg) from err return flash_response( "You are registered successfully! To activate your account, please, " @@ -400,13 +409,19 @@ async def register_phone(request: web.Request): except Exception as err: # pylint: disable=broad-except # Unhandled errors -> 503 error_code = create_error_code(err) - log.exception( - "Phone registration failed [%s]", - f"{error_code}", - extra={"error_code": error_code}, + user_error_msg = f"Currently we cannot register phone numbers [{error_code}]" + + _logger.exception( + **create_troubleshotting_log_kwargs( + user_error_msg, + error=err, + error_code=error_code, + error_context={"request": request, "registration": registration}, + tip="Phone registration failed", + ) ) raise web.HTTPServiceUnavailable( - reason=f"Currently we cannot register phone numbers ({error_code})", + reason=user_error_msg, content_type=MIMETYPE_APPLICATION_JSON, ) from err diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/_results.py b/services/web/server/src/simcore_service_webserver/meta_modeling/_results.py index 68829e3489a8..bbc4e6203a9e 100644 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/_results.py +++ b/services/web/server/src/simcore_service_webserver/meta_modeling/_results.py @@ -5,20 +5,17 @@ """ - import logging -from typing import Any +from ast import TypeAlias +from typing import Annotated, Any from models_library.projects_nodes import OutputsDict from models_library.projects_nodes_io import NodeIDStr -from pydantic import BaseModel, ConstrainedInt, Field +from pydantic import BaseModel, Field _logger = logging.getLogger(__name__) - -class ProgressInt(ConstrainedInt): - ge = 0 - le = 100 +ProgressInt: TypeAlias = Annotated[int, Field(ge=0, le=100)] class ExtractedResults(BaseModel): diff --git a/services/web/server/src/simcore_service_webserver/products/_api.py b/services/web/server/src/simcore_service_webserver/products/_api.py index 3d8c38a14f3e..ed5b08b5ee17 100644 --- a/services/web/server/src/simcore_service_webserver/products/_api.py +++ b/services/web/server/src/simcore_service_webserver/products/_api.py @@ -16,6 +16,7 @@ def get_product_name(request: web.Request) -> str: + """Returns product name in request but might be undefined""" product_name: str = request[RQ_PRODUCT_KEY] return product_name diff --git a/services/web/server/src/simcore_service_webserver/products/_middlewares.py b/services/web/server/src/simcore_service_webserver/products/_middlewares.py index fbc7885b6d7b..5a962e25ef7b 100644 --- a/services/web/server/src/simcore_service_webserver/products/_middlewares.py +++ b/services/web/server/src/simcore_service_webserver/products/_middlewares.py @@ -1,4 +1,5 @@ import logging +import textwrap from collections import OrderedDict from aiohttp import web @@ -12,12 +13,25 @@ _logger = logging.getLogger(__name__) +def _get_default_product_name(app: web.Application) -> str: + product_name: str = app[f"{APP_PRODUCTS_KEY}_default"] + return product_name + + def _discover_product_by_hostname(request: web.Request) -> str | None: products: OrderedDict[str, Product] = request.app[APP_PRODUCTS_KEY] + # + # SEE https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Forwarded-Host + # SEE https://doc.traefik.io/traefik/getting-started/faq/#what-are-the-forwarded-headers-when-proxying-http-requests + originating_hosts = [ + request.headers.get("X-Forwarded-Host"), + request.host, + ] for product in products.values(): - if product.host_regex.search(request.host): - product_name: str = product.name - return product_name + for host in originating_hosts: + if host and product.host_regex.search(host): + product_name: str = product.name + return product_name return None @@ -30,9 +44,17 @@ def _discover_product_by_request_header(request: web.Request) -> str | None: return None -def _get_app_default_product_name(request: web.Request) -> str: - product_name: str = request.app[f"{APP_PRODUCTS_KEY}_default"] - return product_name +def _get_debug_msg(request: web.Request): + return "\n".join( + [ + f"{request.url=}", + f"{request.host=}", + f"{request.remote=}", + *[f"{k}:{request.headers[k][:20]}" for k in request.headers], + f"{request.headers.get('X-Forwarded-Host')=}", + f"{request.get(RQ_PRODUCT_KEY)=}", + ] + ) @web.middleware @@ -43,35 +65,37 @@ async def discover_product_middleware(request: web.Request, handler: Handler): - request[RQ_PRODUCT_KEY] is set to discovered product in 3 types of entrypoints - if no product discovered, then it is set to default """ - # - API entrypoints - # - /static info for front-end + if ( + # - API entrypoints + # - /static info for front-end + # - socket-io request.path.startswith(f"/{API_VTAG}") - or request.path == "/static-frontend-data.json" - or request.path == "/socket.io/" + or request.path in {"/static-frontend-data.json", "/socket.io/"} ): - product_name = ( + request[RQ_PRODUCT_KEY] = ( _discover_product_by_request_header(request) or _discover_product_by_hostname(request) - or _get_app_default_product_name(request) + or _get_default_product_name(request.app) ) - request[RQ_PRODUCT_KEY] = product_name - - # - Publications entrypoint: redirections from other websites. SEE studies_access.py::access_study - # - Root entrypoint: to serve front-end apps - elif ( - request.path.startswith("/study/") - or request.path.startswith("/view") - or request.path == "/" - ): - product_name = _discover_product_by_hostname( - request - ) or _get_app_default_product_name(request) - request[RQ_PRODUCT_KEY] = product_name + else: + # - Publications entrypoint: redirections from other websites. SEE studies_access.py::access_study + # - Root entrypoint: to serve front-end apps + assert ( # nosec + request.path.startswith("/dev/") + or request.path.startswith("/study/") + or request.path.startswith("/view") + or request.path == "/" + ) + request[RQ_PRODUCT_KEY] = _discover_product_by_hostname( + request + ) or _get_default_product_name(request.app) - assert request.get(RQ_PRODUCT_KEY) is not None or request.path.startswith( # nosec - "/dev/doc" + _logger.debug( + "Product middleware result: \n%s\n", + textwrap.indent(_get_debug_msg(request), " "), ) + assert request[RQ_PRODUCT_KEY] # nosec return await handler(request) diff --git a/services/web/server/src/simcore_service_webserver/security/_authz_policy.py b/services/web/server/src/simcore_service_webserver/security/_authz_policy.py index 44b6083f566b..612c1e649756 100644 --- a/services/web/server/src/simcore_service_webserver/security/_authz_policy.py +++ b/services/web/server/src/simcore_service_webserver/security/_authz_policy.py @@ -1,4 +1,5 @@ -""" AUTHoriZation (auth) policy: +""" AUTHoriZation (auth) policy + """ import contextlib @@ -23,7 +24,7 @@ has_access_by_role, ) from ._authz_db import AuthInfoDict, get_active_user_or_none, is_user_in_product_name -from ._constants import MSG_AUTH_NOT_AVAILABLE +from ._constants import MSG_AUTH_NOT_AVAILABLE, PERMISSION_PRODUCT_LOGIN_KEY from ._identity_api import IdentityStr _logger = logging.getLogger(__name__) @@ -132,7 +133,7 @@ async def permits( context = context or AuthContextDict() # product access - if permission == "product": + if permission == PERMISSION_PRODUCT_LOGIN_KEY: product_name = context.get("product_name") ok: bool = product_name is not None and await self._has_access_to_product( user_id=auth_info["id"], product_name=product_name diff --git a/services/web/server/src/simcore_service_webserver/security/_constants.py b/services/web/server/src/simcore_service_webserver/security/_constants.py index a00420e1db3b..a7b03fb3db7e 100644 --- a/services/web/server/src/simcore_service_webserver/security/_constants.py +++ b/services/web/server/src/simcore_service_webserver/security/_constants.py @@ -1,3 +1,5 @@ from typing import Final MSG_AUTH_NOT_AVAILABLE: Final[str] = "Authentication service is temporary unavailable" + +PERMISSION_PRODUCT_LOGIN_KEY: Final[str] = "product.login" diff --git a/services/web/server/src/simcore_service_webserver/security/api.py b/services/web/server/src/simcore_service_webserver/security/api.py index 268420560cb2..7ebb722a7519 100644 --- a/services/web/server/src/simcore_service_webserver/security/api.py +++ b/services/web/server/src/simcore_service_webserver/security/api.py @@ -6,7 +6,6 @@ NOTE: DO NOT USE aiohttp_security.api directly but use this interface instead """ - import aiohttp_security.api # type: ignore[import-untyped] import passlib.hash from aiohttp import web @@ -14,8 +13,11 @@ from ._authz_access_model import AuthContextDict, OptionalContext, RoleBasedAccessModel from ._authz_policy import AuthorizationPolicy +from ._constants import PERMISSION_PRODUCT_LOGIN_KEY from ._identity_api import forget_identity, remember_identity +assert PERMISSION_PRODUCT_LOGIN_KEY # nosec + def get_access_model(app: web.Application) -> RoleBasedAccessModel: autz_policy: AuthorizationPolicy = app[aiohttp_security.api.AUTZ_KEY] @@ -64,7 +66,9 @@ async def check_user_permission( allowed = await aiohttp_security.api.permits(request, permission, context) if not allowed: - raise web.HTTPForbidden(reason=f"Not sufficient access rights for {permission}") + raise web.HTTPForbidden( + reason=f"You do not have sufficient access rights for {permission}" + ) # @@ -93,5 +97,6 @@ def check_password(password: str, password_hash: str) -> bool: "forget_identity", "get_access_model", "is_anonymous", + "PERMISSION_PRODUCT_LOGIN_KEY", "remember_identity", ) diff --git a/services/web/server/src/simcore_service_webserver/session/_cookie_storage.py b/services/web/server/src/simcore_service_webserver/session/_cookie_storage.py new file mode 100644 index 000000000000..2f42750e136b --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/session/_cookie_storage.py @@ -0,0 +1,87 @@ +""" +Extends aiohttp_session.cookie_storage + +""" + +import logging +import time + +import aiohttp_session +from aiohttp import web +from aiohttp_session.cookie_storage import EncryptedCookieStorage + +from .errors import SessionValueError + +_logger = logging.getLogger(__name__) + + +def _share_cookie_across_all_subdomains( + request: web.BaseRequest, params: aiohttp_session._CookieParams +) -> aiohttp_session._CookieParams: + """ + Shares cookie across all subdomains, by appending a dot (`.`) in front of the domain name + overwrite domain from `None` (browser sets `example.com`) to `.example.com` + """ + host = request.url.host + if host is None: + raise SessionValueError( + invalid="host", host=host, request_url=request.url, params=params + ) + + params["domain"] = f".{host.lstrip('.')}" + + return params + + +class SharedCookieEncryptedCookieStorage(EncryptedCookieStorage): + async def save_session( + self, + request: web.Request, + response: web.StreamResponse, + session: aiohttp_session.Session, + ) -> None: + # link response to originating request (allows to detect the orginal request url) + response._req = request # pylint:disable=protected-access # noqa: SLF001 + + await super().save_session(request, response, session) + + def save_cookie( + self, + response: web.StreamResponse, + cookie_data: str, + *, + max_age: int | None = None, + ) -> None: + + params = self._cookie_params.copy() + request = response._req # pylint:disable=protected-access # noqa: SLF001 + if not request: + raise SessionValueError( + invalid="request", + invalid_request=request, + response=response, + params=params, + ) + + params = _share_cookie_across_all_subdomains(request, params) + + # -------------------------------------------------------- + # WARNING: the code below is taken and adapted from the superclass + # implementation `EncryptedCookieStorage.save_cookie` + # Adjust in case the base library changes. + assert aiohttp_session.__version__ == "2.11.0" # nosec + # -------------------------------------------------------- + + if max_age is not None: + params["max_age"] = max_age + t = time.gmtime(time.time() + max_age) + params["expires"] = time.strftime("%a, %d-%b-%Y %T GMT", t) + + if not cookie_data: + response.del_cookie( + self._cookie_name, + domain=params.get("domain"), + path=params.get("path", "/"), + ) + else: + response.set_cookie(self._cookie_name, cookie_data, **params) diff --git a/services/web/server/src/simcore_service_webserver/session/errors.py b/services/web/server/src/simcore_service_webserver/session/errors.py new file mode 100644 index 000000000000..12d54cc3fb8b --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/session/errors.py @@ -0,0 +1,5 @@ +from ..errors import WebServerBaseError + + +class SessionValueError(WebServerBaseError, ValueError): + msg_template = "Invalid {invalid} in session" diff --git a/services/web/server/src/simcore_service_webserver/session/plugin.py b/services/web/server/src/simcore_service_webserver/session/plugin.py index 173c1af65fbd..68f8981fe405 100644 --- a/services/web/server/src/simcore_service_webserver/session/plugin.py +++ b/services/web/server/src/simcore_service_webserver/session/plugin.py @@ -7,10 +7,10 @@ import aiohttp_session from aiohttp import web -from aiohttp_session.cookie_storage import EncryptedCookieStorage from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from settings_library.utils_session import DEFAULT_SESSION_COOKIE_NAME +from ._cookie_storage import SharedCookieEncryptedCookieStorage from .settings import SessionSettings, get_plugin_settings _logger = logging.getLogger(__name__) diff --git a/services/web/server/src/simcore_service_webserver/statics/_handlers.py b/services/web/server/src/simcore_service_webserver/statics/_handlers.py index e3ac6c1b5b4d..ecda8a0a83e1 100644 --- a/services/web/server/src/simcore_service_webserver/statics/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/statics/_handlers.py @@ -20,8 +20,7 @@ async def get_cached_frontend_index(request: web.Request): product_name in FRONTEND_APPS_AVAILABLE ), "Every product is mapped with a front-end app with IDENTICAL name" - # NOTE: CANNOT redirect , i.e. - # raise web.HTTPFound(f"/{target_frontend}/index.html") + # NOTE: CANNOT redirect , i.e. `web.HTTPFound(f"/{target_frontend}/index.html")` # because it losses fragments and therefore it fails in study links. # # SEE services/web/server/tests/unit/isolated/test_redirections.py diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py index 1b60fd5f7e06..237aed0f7fd1 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py @@ -8,6 +8,7 @@ from typing import TypeAlias from aiohttp import web +from common_library.error_codes import create_error_code from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.services import ServiceKey, ServiceVersion @@ -15,7 +16,7 @@ from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import parse_request_query_parameters_as from servicelib.aiohttp.typing_extension import Handler -from servicelib.error_codes import create_error_code +from servicelib.logging_errors import create_troubleshotting_log_kwargs from ..director_v2.api import update_dynamic_service_networks_in_project from ..products.api import get_product_name @@ -124,16 +125,22 @@ async def wrapper(request: web.Request) -> web.StreamResponse: except (ValidationError, web.HTTPServerError, Exception) as err: error_code = create_error_code(err) + + user_error_msg = compose_support_error_msg( + msg=MSG_UNEXPECTED_ERROR.format(hint=""), error_code=error_code + ) _logger.exception( - "Unexpected failure while dispatching study [%s]", - f"{error_code}", - extra={"error_code": error_code}, + **create_troubleshotting_log_kwargs( + user_error_msg, + error=err, + error_code=error_code, + error_context={"request": request}, + tip="Unexpected failure while dispatching study", + ) ) raise _create_redirect_response_to_error_page( request.app, - message=compose_support_error_msg( - msg=MSG_UNEXPECTED_ERROR.format(hint=""), error_code=error_code - ), + message=user_error_msg, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, ) from err diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py index 9545675d7ec4..85d47f0dba87 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py @@ -20,10 +20,11 @@ from aiohttp import web from aiohttp_session import get_session +from common_library.error_codes import create_error_code from models_library.projects import ProjectID from servicelib.aiohttp import status from servicelib.aiohttp.typing_extension import Handler -from servicelib.error_codes import create_error_code +from servicelib.logging_errors import create_troubleshotting_log_kwargs from .._constants import INDEX_RESOURCE_NAME from ..director_v2._core_computations import create_or_update_pipeline @@ -258,17 +259,22 @@ async def wrapper(request: web.Request) -> web.StreamResponse: except Exception as err: error_code = create_error_code(err) + user_error_msg = compose_support_error_msg( + msg=MSG_UNEXPECTED_ERROR.format(hint=""), error_code=error_code + ) _logger.exception( - "Unexpected failure while dispatching study [%s]", - f"{error_code}", - extra={"error_code": error_code}, + **create_troubleshotting_log_kwargs( + user_error_msg, + error=err, + error_code=error_code, + tip="Unexpected failure while dispatching study", + ) ) + raise create_redirect_to_page_response( request.app, page="error", - message=compose_support_error_msg( - msg=MSG_UNEXPECTED_ERROR.format(hint=""), error_code=error_code - ), + message=user_error_msg, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, ) from err @@ -327,13 +333,19 @@ async def get_redirection_to_study_page(request: web.Request) -> web.Response: # we cannot accept any more users. # error_code = create_error_code(exc) + + user_error_msg = MSG_TOO_MANY_GUESTS _logger.exception( - "Failed to create guest user. Responded with 429 Too Many Requests [%s]", - f"{error_code}", - extra={"error_code": error_code}, + **create_troubleshotting_log_kwargs( + user_error_msg, + error=exc, + error_code=error_code, + tip="Failed to create guest user. Responded with 429 Too Many Requests", + ) ) + raise RedirectToFrontEndPageError( - MSG_TOO_MANY_GUESTS, + user_error_msg, error_code=error_code, status_code=status.HTTP_429_TOO_MANY_REQUESTS, ) from exc @@ -353,15 +365,26 @@ async def get_redirection_to_study_page(request: web.Request) -> web.Response: except Exception as exc: # pylint: disable=broad-except error_code = create_error_code(exc) + + user_error_msg = MSG_UNEXPECTED_ERROR.format(hint="while copying your study") _logger.exception( - "Failed while copying project '%s' to '%s' [%s]", - template_project.get("name"), - user.get("email"), - f"{error_code}", - extra={"error_code": error_code}, + **create_troubleshotting_log_kwargs( + user_error_msg, + error=exc, + error_code=error_code, + error_context={ + "user_id": user.get("id"), + "user": dict(user), + "template_project": { + k: template_project.get(k) for k in ["name", "uuid"] + }, + }, + tip=f"Failed while copying project '{template_project.get('name')}' to '{user.get('email')}'", + ) ) + raise RedirectToFrontEndPageError( - MSG_UNEXPECTED_ERROR.format(hint="while copying your study"), + user_error_msg, error_code=error_code, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, ) from exc diff --git a/services/web/server/src/simcore_service_webserver/users/_handlers.py b/services/web/server/src/simcore_service_webserver/users/_handlers.py index a8516095e573..3e2018c7d9b2 100644 --- a/services/web/server/src/simcore_service_webserver/users/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/users/_handlers.py @@ -9,11 +9,7 @@ parse_request_query_parameters_as, ) from servicelib.aiohttp.typing_extension import Handler -from servicelib.error_codes import create_error_code -from servicelib.logging_utils import ( - create_troubleshotting_log_message, - get_log_record_extra, -) +from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from servicelib.request_keys import RQT_USERID_KEY from servicelib.rest_constants import RESPONSE_MODEL_POLICY @@ -53,24 +49,17 @@ async def wrapper(request: web.Request) -> web.StreamResponse: except UserNotFoundError as exc: raise web.HTTPNotFound(reason=f"{exc}") from exc except MissingGroupExtraPropertiesForProductError as exc: - error_code = create_error_code(exc) - frontend_msg = FMSG_MISSING_CONFIG_WITH_OEC.format(error_code=error_code) - log_msg = create_troubleshotting_log_message( - message_to_user=frontend_msg, - error=exc, - error_code=error_code, - error_context=exc.error_context(), - tip="Row in `groups_extra_properties` for this product is missing.", - ) - + error_code = exc.error_code() + user_error_msg = FMSG_MISSING_CONFIG_WITH_OEC.format(error_code=error_code) _logger.exception( - log_msg, - extra=get_log_record_extra( + **create_troubleshotting_log_kwargs( + user_error_msg, + error=exc, error_code=error_code, - user_id=exc.error_context().get("user_id", None), - ), + tip="Row in `groups_extra_properties` for this product is missing.", + ) ) - raise web.HTTPServiceUnavailable(reason=frontend_msg) from exc + raise web.HTTPServiceUnavailable(reason=user_error_msg) from exc return wrapper diff --git a/services/web/server/src/simcore_service_webserver/utils.py b/services/web/server/src/simcore_service_webserver/utils.py index d9ff8b9fcdaa..0942266dce9d 100644 --- a/services/web/server/src/simcore_service_webserver/utils.py +++ b/services/web/server/src/simcore_service_webserver/utils.py @@ -13,8 +13,8 @@ from typing import Any, TypedDict, cast import orjson +from common_library.error_codes import ErrorCodeStr from models_library.basic_types import SHA1Str -from servicelib.error_codes import ErrorCodeStr _CURRENT_DIR = ( Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent diff --git a/services/web/server/src/simcore_service_webserver/wallets/_handlers.py b/services/web/server/src/simcore_service_webserver/wallets/_handlers.py index e7c67919f108..3aa26158acbd 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_handlers.py @@ -2,6 +2,7 @@ import logging from aiohttp import web +from common_library.error_codes import create_error_code from models_library.api_schemas_webserver.wallets import ( CreateWalletBodyParams, PutWalletBodyParams, @@ -18,8 +19,7 @@ parse_request_path_parameters_as, ) from servicelib.aiohttp.typing_extension import Handler -from servicelib.error_codes import create_error_code -from servicelib.logging_utils import LogExtra, get_log_record_extra +from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.request_keys import RQT_USERID_KEY from .._constants import RQ_PRODUCT_KEY @@ -89,18 +89,19 @@ async def wrapper(request: web.Request) -> web.StreamResponse: raise web.HTTPConflict(reason=MSG_PRICE_NOT_DEFINED_ERROR) from exc except BillingDetailsNotFoundError as exc: + error_code = create_error_code(exc) - log_extra: LogExtra = {} - if user_id := getattr(exc, "user_id", None): - log_extra = get_log_record_extra(user_id=user_id) or {} + user_error_msg = f"{MSG_BILLING_DETAILS_NOT_DEFINED_ERROR} [{error_code}]" - log_msg = f"{exc} [{error_code}]" _logger.exception( - log_msg, - extra={"error_code": error_code, **log_extra}, + **create_troubleshotting_log_kwargs( + user_error_msg, + error=exc, + error_code=error_code, + ) ) - user_msg = f"{MSG_BILLING_DETAILS_NOT_DEFINED_ERROR} ({error_code})" - raise web.HTTPServiceUnavailable(reason=user_msg) from exc + + raise web.HTTPServiceUnavailable(reason=user_error_msg) from exc return wrapper diff --git a/services/web/server/tests/integration/01/test_garbage_collection.py b/services/web/server/tests/integration/01/test_garbage_collection.py index 2dcdb190ea9d..c52977d7115a 100644 --- a/services/web/server/tests/integration/01/test_garbage_collection.py +++ b/services/web/server/tests/integration/01/test_garbage_collection.py @@ -8,7 +8,7 @@ from collections.abc import AsyncIterable, Awaitable, Callable from copy import deepcopy from pathlib import Path -from typing import Any +from typing import Any, NamedTuple from unittest import mock from uuid import UUID, uuid4 @@ -275,7 +275,7 @@ async def get_template_project( ) -async def get_group(client, user): +async def get_group(client: TestClient, user): """Creates a group for a given user""" return await create_user_group( app=client.app, @@ -284,7 +284,7 @@ async def get_group(client, user): ) -async def invite_user_to_group(client, owner, invitee, group): +async def invite_user_to_group(client: TestClient, owner, invitee, group): """Invite a user to a group on which the owner has writes over""" await add_user_in_group( client.app, @@ -303,13 +303,19 @@ async def change_user_role( ) +class SioConnectionData(NamedTuple): + sio: socketio.AsyncClient + resource_key: UserSessionDict + + async def connect_to_socketio( - client, + client: TestClient, user, socketio_client_factory: Callable[..., Awaitable[socketio.AsyncClient]], -): +) -> SioConnectionData: """Connect a user to a socket.io""" - socket_registry = get_registry(client.server.app) + assert client.app + socket_registry = get_registry(client.app) cur_client_session_id = f"{uuid4()}" sio = await socketio_client_factory(cur_client_session_id, client) resource_key: UserSessionDict = { @@ -323,14 +329,18 @@ async def connect_to_socketio( resource_key, "socket_id" ) assert len(await socket_registry.find_resources(resource_key, "socket_id")) == 1 - return sio, resource_key + return SioConnectionData(sio, resource_key) -async def disconnect_user_from_socketio(client, sio_connection_data) -> None: +async def disconnect_user_from_socketio( + client: TestClient, sio_connection_data: SioConnectionData +) -> None: """disconnect a previously connected socket.io connection""" sio, resource_key = sio_connection_data sid = sio.get_sid() - socket_registry = get_registry(client.server.app) + + assert client.app + socket_registry = get_registry(client.app) await sio.disconnect() assert not sio.sid await asyncio.sleep(0) # just to ensure there is a context switch diff --git a/services/web/server/tests/unit/isolated/conftest.py b/services/web/server/tests/unit/isolated/conftest.py index f4436d35fa14..9cc0948ff88f 100644 --- a/services/web/server/tests/unit/isolated/conftest.py +++ b/services/web/server/tests/unit/isolated/conftest.py @@ -7,7 +7,10 @@ from faker import Faker from pytest_mock import MockerFixture from pytest_simcore.helpers.dict_tools import ConfigDict -from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import ( + setenvs_from_dict, + setenvs_from_envfile, +) from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -89,6 +92,145 @@ def mock_env_deployer_pipeline(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: ) +@pytest.fixture +def mock_env_devel_environment( + mock_env_devel_environment: EnvVarsDict, # pylint: disable=redefined-outer-name + monkeypatch: pytest.MonkeyPatch, +) -> EnvVarsDict: + # Overrides to ensure dev-features are enabled testings + return mock_env_devel_environment | setenvs_from_dict( + monkeypatch, + envs={ + "WEBSERVER_DEV_FEATURES_ENABLED": "1", + }, + ) + + +@pytest.fixture +def mock_env_makefile(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: + """envvars produced @Makefile (export)""" + # TODO: add Makefile recipe 'make dump-envs' to produce the file we load here + return setenvs_from_dict( + monkeypatch, + { + "API_SERVER_API_VERSION": "0.3.0", + "BUILD_DATE": "2022-01-14T21:28:15Z", + "CATALOG_API_VERSION": "0.3.2", + "CLIENT_WEB_OUTPUT": "/home/crespo/devp/osparc-simcore/services/static-webserver/client/source-output", + "DATCORE_ADAPTER_API_VERSION": "0.1.0-alpha", + "DIRECTOR_API_VERSION": "0.1.0", + "DIRECTOR_V2_API_VERSION": "2.0.0", + "DOCKER_IMAGE_TAG": "production", + "DOCKER_REGISTRY": "local", + "S3_ENDPOINT": "http://127.0.0.1:9001", + "STORAGE_API_VERSION": "0.2.1", + "SWARM_HOSTS": "", + "SWARM_STACK_NAME": "master-simcore", + "SWARM_STACK_NAME_NO_HYPHEN": "master_simcore", + "VCS_REF_CLIENT": "99b8022d2", + "VCS_STATUS_CLIENT": "'modified/untracked'", + "VCS_URL": "git@github.com:pcrespov/osparc-simcore.git", + "WEBSERVER_API_VERSION": "0.7.0", + }, + ) + + +@pytest.fixture +def mock_env_dockerfile_build(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: + # + # docker run -it --hostname "{{.Node.Hostname}}-{{.Service.Name}}-{{.Task.Slot}}" local/webserver:production printenv + # + return setenvs_from_envfile( + monkeypatch, + """\ + GPG_KEY=123456789123456789 + HOME=/home/scu + HOSTNAME=osparc-master-55-master-simcore_master_webserver-1 + IS_CONTAINER_CONTEXT=Yes + LANG=C.UTF-8 + PATH=/home/scu/.venv/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + PWD=/home/scu + PYTHON_GET_PIP_SHA256=6123659241292b2147b58922b9ffe11dda66b39d52d8a6f3aa310bc1d60ea6f7 + PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/a1675ab6c2bd898ed82b1f58c486097f763c74a9/public/get-pip.py + PYTHON_PIP_VERSION=21.1.3 + PYTHON_VERSION=3.11.9 + PYTHONDONTWRITEBYTECODE=1 + PYTHONOPTIMIZE=TRUE + SC_BOOT_MODE=production + SC_BUILD_DATE=2022-01-09T12:26:29Z + SC_BUILD_TARGET=production + SC_HEALTHCHECK_INTERVAL=30 + SC_HEALTHCHECK_RETRY=3 + SC_USER_ID=8004 + SC_USER_NAME=scu + SC_VCS_REF=dd536f998 + SC_VCS_URL=git@github.com:ITISFoundation/osparc-simcore.git + TERM=xterm + VIRTUAL_ENV=/home/scu/.venv + """, + ) + + +@pytest.fixture +def mock_webserver_service_environment( + monkeypatch: pytest.MonkeyPatch, + mock_env_makefile: EnvVarsDict, # pylint: disable=redefined-outer-name + mock_env_devel_environment: EnvVarsDict, # pylint: disable=redefined-outer-name + mock_env_dockerfile_build: EnvVarsDict, # pylint: disable=redefined-outer-name + mock_env_deployer_pipeline: EnvVarsDict, # pylint: disable=redefined-outer-name +) -> EnvVarsDict: + """ + Mocks environment produce in the docker compose config with a .env (.env-devel) + and launched with a makefile + """ + # @docker compose config (overrides) + # TODO: get from docker compose config + # r'- ([A-Z2_]+)=\$\{\1:-([\w-]+)\}' + + # - .env-devel + docker-compose service environs + # hostname: "{{.Node.Hostname}}-{{.Service.Name}}-{{.Task.Slot}}" + + # environment: + # - CATALOG_HOST=${CATALOG_HOST:-catalog} + # - CATALOG_PORT=${CATALOG_PORT:-8000} + # - DIAGNOSTICS_MAX_AVG_LATENCY=10 + # - DIAGNOSTICS_MAX_TASK_DELAY=30 + # - DIRECTOR_PORT=${DIRECTOR_PORT:-8080} + # - DIRECTOR_V2_HOST=${DIRECTOR_V2_HOST:-director-v2} + # - DIRECTOR_V2_PORT=${DIRECTOR_V2_PORT:-8000} + # - STORAGE_HOST=${STORAGE_HOST:-storage} + # - STORAGE_PORT=${STORAGE_PORT:-8080} + # - SWARM_STACK_NAME=${SWARM_STACK_NAME:-simcore} + # - WEBSERVER_LOGLEVEL=${LOG_LEVEL:-WARNING} + # env_file: + # - ../.env + mock_envs_docker_compose_environment = setenvs_from_dict( + monkeypatch, + { + # Emulates MYVAR=${MYVAR:-default} + "CATALOG_HOST": os.environ.get("CATALOG_HOST", "catalog"), + "CATALOG_PORT": os.environ.get("CATALOG_PORT", "8000"), + "DIAGNOSTICS_MAX_AVG_LATENCY": "30", + "DIRECTOR_PORT": os.environ.get("DIRECTOR_PORT", "8080"), + "DIRECTOR_V2_HOST": os.environ.get("DIRECTOR_V2_HOST", "director-v2"), + "DIRECTOR_V2_PORT": os.environ.get("DIRECTOR_V2_PORT", "8000"), + "STORAGE_HOST": os.environ.get("STORAGE_HOST", "storage"), + "STORAGE_PORT": os.environ.get("STORAGE_PORT", "8080"), + "SWARM_STACK_NAME": os.environ.get("SWARM_STACK_NAME", "simcore"), + "WEBSERVER_LOGLEVEL": os.environ.get("LOG_LEVEL", "WARNING"), + "SESSION_COOKIE_MAX_AGE": str(7 * 24 * 60 * 60), + }, + ) + + return ( + mock_env_makefile + | mock_env_devel_environment + | mock_env_dockerfile_build + | mock_env_deployer_pipeline + | mock_envs_docker_compose_environment + ) + + @pytest.fixture def mocked_login_required(mocker: MockerFixture): diff --git a/services/web/server/tests/unit/isolated/test_application_settings.py b/services/web/server/tests/unit/isolated/test_application_settings.py index 9b03e1092024..65fe54ff483f 100644 --- a/services/web/server/tests/unit/isolated/test_application_settings.py +++ b/services/web/server/tests/unit/isolated/test_application_settings.py @@ -3,16 +3,11 @@ # pylint:disable=no-name-in-module import json -import os import pytest from aiohttp import web from models_library.utils.json_serialization import json_dumps from pydantic import HttpUrl, parse_obj_as -from pytest_simcore.helpers.monkeypatch_envs import ( - setenvs_from_dict, - setenvs_from_envfile, -) from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_webserver.application_settings import ( APP_SETTINGS_KEY, @@ -21,144 +16,6 @@ ) -@pytest.fixture -def mock_env_devel_environment( - mock_env_devel_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch -) -> EnvVarsDict: - # Overrides to ensure dev-features are enabled testings - return mock_env_devel_environment | setenvs_from_dict( - monkeypatch, - envs={ - "WEBSERVER_DEV_FEATURES_ENABLED": "1", - }, - ) - - -@pytest.fixture -def mock_env_makefile(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: - """envvars produced @Makefile (export)""" - # TODO: add Makefile recipe 'make dump-envs' to produce the file we load here - return setenvs_from_dict( - monkeypatch, - { - "API_SERVER_API_VERSION": "0.3.0", - "BUILD_DATE": "2022-01-14T21:28:15Z", - "CATALOG_API_VERSION": "0.3.2", - "CLIENT_WEB_OUTPUT": "/home/crespo/devp/osparc-simcore/services/static-webserver/client/source-output", - "DATCORE_ADAPTER_API_VERSION": "0.1.0-alpha", - "DIRECTOR_API_VERSION": "0.1.0", - "DIRECTOR_V2_API_VERSION": "2.0.0", - "DOCKER_IMAGE_TAG": "production", - "DOCKER_REGISTRY": "local", - "S3_ENDPOINT": "http://127.0.0.1:9001", - "STORAGE_API_VERSION": "0.2.1", - "SWARM_HOSTS": "", - "SWARM_STACK_NAME": "master-simcore", - "SWARM_STACK_NAME_NO_HYPHEN": "master_simcore", - "VCS_REF_CLIENT": "99b8022d2", - "VCS_STATUS_CLIENT": "'modified/untracked'", - "VCS_URL": "git@github.com:pcrespov/osparc-simcore.git", - "WEBSERVER_API_VERSION": "0.7.0", - }, - ) - - -@pytest.fixture -def mock_env_dockerfile_build(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: - # - # docker run -it --hostname "{{.Node.Hostname}}-{{.Service.Name}}-{{.Task.Slot}}" local/webserver:production printenv - # - return setenvs_from_envfile( - monkeypatch, - """\ - GPG_KEY=123456789123456789 - HOME=/home/scu - HOSTNAME=osparc-master-55-master-simcore_master_webserver-1 - IS_CONTAINER_CONTEXT=Yes - LANG=C.UTF-8 - PATH=/home/scu/.venv/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - PWD=/home/scu - PYTHON_GET_PIP_SHA256=6123659241292b2147b58922b9ffe11dda66b39d52d8a6f3aa310bc1d60ea6f7 - PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/a1675ab6c2bd898ed82b1f58c486097f763c74a9/public/get-pip.py - PYTHON_PIP_VERSION=21.1.3 - PYTHON_VERSION=3.11.9 - PYTHONDONTWRITEBYTECODE=1 - PYTHONOPTIMIZE=TRUE - SC_BOOT_MODE=production - SC_BUILD_DATE=2022-01-09T12:26:29Z - SC_BUILD_TARGET=production - SC_HEALTHCHECK_INTERVAL=30 - SC_HEALTHCHECK_RETRY=3 - SC_USER_ID=8004 - SC_USER_NAME=scu - SC_VCS_REF=dd536f998 - SC_VCS_URL=git@github.com:ITISFoundation/osparc-simcore.git - TERM=xterm - VIRTUAL_ENV=/home/scu/.venv - """, - ) - - -@pytest.fixture -def mock_webserver_service_environment( - monkeypatch: pytest.MonkeyPatch, - mock_env_makefile: EnvVarsDict, - mock_env_devel_environment: EnvVarsDict, - mock_env_dockerfile_build: EnvVarsDict, - mock_env_deployer_pipeline: EnvVarsDict, -) -> EnvVarsDict: - """ - Mocks environment produce in the docker compose config with a .env (.env-devel) - and launched with a makefile - """ - # @docker compose config (overrides) - # TODO: get from docker compose config - # r'- ([A-Z2_]+)=\$\{\1:-([\w-]+)\}' - - # - .env-devel + docker-compose service environs - # hostname: "{{.Node.Hostname}}-{{.Service.Name}}-{{.Task.Slot}}" - - # environment: - # - CATALOG_HOST=${CATALOG_HOST:-catalog} - # - CATALOG_PORT=${CATALOG_PORT:-8000} - # - DIAGNOSTICS_MAX_AVG_LATENCY=10 - # - DIAGNOSTICS_MAX_TASK_DELAY=30 - # - DIRECTOR_PORT=${DIRECTOR_PORT:-8080} - # - DIRECTOR_V2_HOST=${DIRECTOR_V2_HOST:-director-v2} - # - DIRECTOR_V2_PORT=${DIRECTOR_V2_PORT:-8000} - # - STORAGE_HOST=${STORAGE_HOST:-storage} - # - STORAGE_PORT=${STORAGE_PORT:-8080} - # - SWARM_STACK_NAME=${SWARM_STACK_NAME:-simcore} - # - WEBSERVER_LOGLEVEL=${LOG_LEVEL:-WARNING} - # env_file: - # - ../.env - mock_envs_docker_compose_environment = setenvs_from_dict( - monkeypatch, - { - # Emulates MYVAR=${MYVAR:-default} - "CATALOG_HOST": os.environ.get("CATALOG_HOST", "catalog"), - "CATALOG_PORT": os.environ.get("CATALOG_PORT", "8000"), - "DIAGNOSTICS_MAX_AVG_LATENCY": "30", - "DIRECTOR_PORT": os.environ.get("DIRECTOR_PORT", "8080"), - "DIRECTOR_V2_HOST": os.environ.get("DIRECTOR_V2_HOST", "director-v2"), - "DIRECTOR_V2_PORT": os.environ.get("DIRECTOR_V2_PORT", "8000"), - "STORAGE_HOST": os.environ.get("STORAGE_HOST", "storage"), - "STORAGE_PORT": os.environ.get("STORAGE_PORT", "8080"), - "SWARM_STACK_NAME": os.environ.get("SWARM_STACK_NAME", "simcore"), - "WEBSERVER_LOGLEVEL": os.environ.get("LOG_LEVEL", "WARNING"), - "SESSION_COOKIE_MAX_AGE": str(7 * 24 * 60 * 60), - }, - ) - - return ( - mock_env_makefile - | mock_env_devel_environment - | mock_env_dockerfile_build - | mock_env_deployer_pipeline - | mock_envs_docker_compose_environment - ) - - @pytest.fixture def app_settings( mock_webserver_service_environment: EnvVarsDict, diff --git a/services/web/server/tests/unit/isolated/test_products_middlewares.py b/services/web/server/tests/unit/isolated/test_products_middlewares.py index 49b82468a1d0..8dbf517492d3 100644 --- a/services/web/server/tests/unit/isolated/test_products_middlewares.py +++ b/services/web/server/tests/unit/isolated/test_products_middlewares.py @@ -6,6 +6,9 @@ from typing import Any import pytest +from aiohttp import web +from aiohttp.test_utils import make_mocked_request +from servicelib.aiohttp import status from servicelib.rest_constants import X_PRODUCT_NAME_HEADER from simcore_postgres_database.models.products import LOGIN_SETTINGS_DEFAULT from simcore_postgres_database.webserver_models import products @@ -26,16 +29,22 @@ def mock_postgres_product_table(): column_defaults["login_settings"] = LOGIN_SETTINGS_DEFAULT + _SUBDOMAIN_PREFIX = r"[\w-]+\." + return [ - dict(name="osparc", host_regex=r"([\.-]{0,1}osparc[\.-])", **column_defaults), + dict( + name="osparc", + host_regex=rf"^({_SUBDOMAIN_PREFIX})*osparc[\.-]", + **column_defaults, + ), dict( name="s4l", - host_regex=r"(^s4l[\.-])|(^sim4life\.)|(^api.s4l[\.-])|(^api.sim4life\.)", + host_regex=rf"^({_SUBDOMAIN_PREFIX})*(s4l|sim4life)[\.-]", **column_defaults, ), dict( name="tis", - host_regex=r"(^tis[\.-])|(^ti-solutions\.)", + host_regex=rf"^({_SUBDOMAIN_PREFIX})*(tis|^ti-solutions)[\.-]", vendor={ "name": "ACME", "address": "sesame street", @@ -49,28 +58,22 @@ def mock_postgres_product_table(): @pytest.fixture -def mock_app(mock_postgres_product_table: dict[str, Any]): - class MockApp(dict): - def __init__(self): - super().__init__() - self.middlewares = [] - - mock_app = MockApp() +def mock_app(mock_postgres_product_table: dict[str, Any]) -> web.Application: + app = web.Application() app_products: dict[str, Product] = { entry["name"]: Product(**entry) for entry in mock_postgres_product_table } default_product_name = next(iter(app_products.keys())) - _set_app_state(mock_app, app_products, default_product_name) + _set_app_state(app, app_products, default_product_name) - return mock_app + return app @pytest.mark.parametrize( - "request_url,product_from_client,expected_product", + "request_url,x_product_name_header,expected_product", [ ("https://tis-master.domain.io/", "tis", "tis"), - ("https://s4l-staging.domain.com/v0/", "s4l", "s4l"), ("https://osparc-master.domain.com/v0/projects", None, "osparc"), ("https://s4l.domain.com/", "s4l", "s4l"), ("https://some-valid-but-undefined-product.io/", None, FRONTEND_APP_DEFAULT), @@ -79,47 +82,47 @@ def __init__(self): ("https://ti-solutions.io/", "tis", "tis"), ("https://osparc.io/", None, "osparc"), # e.g. an old front-end ("https://staging.osparc.io/", "osparc", "osparc"), + ("https://s4l-staging.domain.com/v0/", "s4l", "s4l"), + # new auth of subdomains. SEE https://github.com/ITISFoundation/osparc-simcore/pull/6484 + ( + "https://34c878cd-f801-433f-9ddb-7dccba9251af.services.s4l-staging.domain.com", + None, + "s4l", + ), ], ) async def test_middleware_product_discovery( - request_url, product_from_client, expected_product: str, mock_app + request_url: str, + x_product_name_header: str | None, + expected_product: str, + mock_app: web.Application, ): """ A client's request reaches the middleware with - an url (request_url), - a product name in the header from client (product_from_client) """ - requested_url = URL(request_url) - - # mocks - class MockRequest(dict): - @property - def headers(self): - return ( - {X_PRODUCT_NAME_HEADER: product_from_client} - if product_from_client - else {} - ) - - @property - def app(self): - return mock_app - - @property - def path(self): - return requested_url.path - - @property - def host(self): - return requested_url.host + url = URL(request_url) + headers = { + "Host": url.host, + "X-Forwarded-Host": url.host, + } + if x_product_name_header: + headers.update({X_PRODUCT_NAME_HEADER: x_product_name_header}) - mock_request = MockRequest() + mock_request = make_mocked_request( + "GET", + url.path, + headers=headers, + app=mock_app, + ) - async def mock_handler(request): - return "OK" + async def _mock_handler(_request: web.Request): + return web.Response(text="OK") - # under test --------- - response = await discover_product_middleware(mock_request, mock_handler) + # run middleware + response = await discover_product_middleware(mock_request, _mock_handler) # checks assert get_product_name(mock_request) == expected_product + assert response.status == status.HTTP_200_OK diff --git a/services/web/server/tests/unit/isolated/test_tracing.py b/services/web/server/tests/unit/isolated/test_tracing.py new file mode 100644 index 000000000000..ddec0d10422c --- /dev/null +++ b/services/web/server/tests/unit/isolated/test_tracing.py @@ -0,0 +1,42 @@ +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name +# pylint:disable=no-name-in-module + + +import pytest +from opentelemetry.instrumentation.aiohttp_server import ( + middleware as aiohttp_opentelemetry_middleware, +) +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_webserver.application import create_application +from simcore_service_webserver.application_settings import ApplicationSettings + + +@pytest.fixture +def mock_webserver_service_environment( + monkeypatch: pytest.MonkeyPatch, mock_webserver_service_environment: EnvVarsDict +) -> EnvVarsDict: + + return mock_webserver_service_environment | setenvs_from_dict( + monkeypatch, + { + "TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT": "http://opentelemetry-collector", + "TRACING_OPENTELEMETRY_COLLECTOR_PORT": "4318", + }, + ) + + +def test_middleware_restrictions_opentelemetry_is_second_middleware( + mock_webserver_service_environment: EnvVarsDict, +): + settings = ApplicationSettings.create_from_envs() + assert settings.WEBSERVER_TRACING + + app = create_application() + assert app.middlewares + assert ( + app.middlewares[0].__middleware_name__ + == "servicelib.aiohttp.monitoring.monitor_simcore_service_webserver" + ) + assert app.middlewares[1] is aiohttp_opentelemetry_middleware diff --git a/services/web/server/tests/unit/with_dbs/03/products/test_products_db.py b/services/web/server/tests/unit/with_dbs/03/products/test_products_db.py index 06022c2f9fa0..4cd80c74a160 100644 --- a/services/web/server/tests/unit/with_dbs/03/products/test_products_db.py +++ b/services/web/server/tests/unit/with_dbs/03/products/test_products_db.py @@ -24,9 +24,7 @@ ) from simcore_service_webserver.db.plugin import APP_AIOPG_ENGINE_KEY from simcore_service_webserver.products._db import ProductRepository -from simcore_service_webserver.products._middlewares import ( - _get_app_default_product_name, -) +from simcore_service_webserver.products._middlewares import _get_default_product_name from simcore_service_webserver.products._model import Product @@ -150,8 +148,6 @@ async def test_product_repository_get_product( assert await product_repository.get_product(product.name) == product # tests definitions of default from utle_products and web-server.products are in sync - mock_request = mocker.MagicMock() - mock_request.app = app async with product_repository.engine.acquire() as conn: default_product = await utils_products.get_default_product_name(conn) - assert default_product == _get_app_default_product_name(mock_request) + assert default_product == _get_default_product_name(app) diff --git a/services/web/server/tests/unit/with_dbs/03/test_session.py b/services/web/server/tests/unit/with_dbs/03/test_session.py index 3d92c32acd21..127089dc802a 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_session.py +++ b/services/web/server/tests/unit/with_dbs/03/test_session.py @@ -11,12 +11,14 @@ import pytest from aiohttp import web from aiohttp.test_utils import TestClient -from aiohttp_session.cookie_storage import EncryptedCookieStorage from cryptography.fernet import Fernet from pytest_simcore.helpers.dict_tools import ConfigDict from pytest_simcore.helpers.typing_env import EnvVarsDict from pytest_simcore.helpers.webserver_login import NewUser from simcore_service_webserver.application import create_application +from simcore_service_webserver.session._cookie_storage import ( + SharedCookieEncryptedCookieStorage, +) from simcore_service_webserver.session.api import get_session from simcore_service_webserver.session.settings import SessionSettings @@ -128,7 +130,7 @@ def test_session_settings( == WEBSERVER_SESSION_SECRET_KEY ) - _should_not_raise = EncryptedCookieStorage( + _should_not_raise = SharedCookieEncryptedCookieStorage( # NOTE: we pass here a string! secret_key=settings.SESSION_SECRET_KEY.get_secret_value() ) diff --git a/tests/e2e-playwright/requirements/_tools.txt b/tests/e2e-playwright/requirements/_tools.txt index cd07b666b723..bce03abbd9d7 100644 --- a/tests/e2e-playwright/requirements/_tools.txt +++ b/tests/e2e-playwright/requirements/_tools.txt @@ -26,7 +26,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via diff --git a/tests/e2e-playwright/requirements/ci.txt b/tests/e2e-playwright/requirements/ci.txt index d6a417115eb5..5b3c69223a39 100644 --- a/tests/e2e-playwright/requirements/ci.txt +++ b/tests/e2e-playwright/requirements/ci.txt @@ -1,3 +1,4 @@ --requirement _test.txt +--requirement _tools.txt pytest-simcore @ ../../packages/pytest-simcore diff --git a/tests/e2e/docker/Dockerfile b/tests/e2e/docker/Dockerfile index b4e2d4012c53..0f9413777c3f 100644 --- a/tests/e2e/docker/Dockerfile +++ b/tests/e2e/docker/Dockerfile @@ -1,5 +1,5 @@ ARG NODE_VERSION="14" -FROM node:${NODE_VERSION}-slim as base +FROM node:${NODE_VERSION}-slim AS base RUN apt-get update && \ apt-get install -y --no-install-recommends \ diff --git a/tests/performance/Makefile b/tests/performance/Makefile index 3c06c138c643..ead1e417d39a 100644 --- a/tests/performance/Makefile +++ b/tests/performance/Makefile @@ -80,4 +80,4 @@ install-ci: .PHONY: config config: @$(call check_defined, input, please define inputs when calling $@ - e.g. ```make $@ input="--help"```) - @python locust_settings.py $(input) | tee .env + @uv run locust_settings.py $(input) | tee .env diff --git a/tests/public-api/requirements/_tools.txt b/tests/public-api/requirements/_tools.txt index 346aa34ba599..31fd8a2f445e 100644 --- a/tests/public-api/requirements/_tools.txt +++ b/tests/public-api/requirements/_tools.txt @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via diff --git a/tests/public-api/requirements/ci.txt b/tests/public-api/requirements/ci.txt index 5b171eceb78c..b1b52acbf2a4 100644 --- a/tests/public-api/requirements/ci.txt +++ b/tests/public-api/requirements/ci.txt @@ -9,6 +9,7 @@ # installs base + tests requirements --requirement _base.txt --requirement _test.txt +--requirement _tools.txt simcore-settings-library @ ../../packages/settings-library/ pytest-simcore @ ../../packages/pytest-simcore/ diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt index e87b035be29b..59ede0d53e76 100644 --- a/tests/swarm-deploy/requirements/_test.txt +++ b/tests/swarm-deploy/requirements/_test.txt @@ -64,8 +64,6 @@ alembic==1.13.3 # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/_test.in -annotated-types==0.7.0 - # via pydantic anyio==4.6.0 # via # fast-depends @@ -484,7 +482,7 @@ pyyaml==6.0.2 # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_test.in -redis==5.0.8 +redis==5.0.4 # via # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt diff --git a/tests/swarm-deploy/requirements/_tools.txt b/tests/swarm-deploy/requirements/_tools.txt index 9de76ca77902..ef771b58939a 100644 --- a/tests/swarm-deploy/requirements/_tools.txt +++ b/tests/swarm-deploy/requirements/_tools.txt @@ -27,7 +27,7 @@ isort==5.13.2 # pylint mccabe==0.7.0 # via pylint -mypy==1.11.2 +mypy==1.12.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via diff --git a/tests/swarm-deploy/requirements/ci.txt b/tests/swarm-deploy/requirements/ci.txt index 0b682dd7a385..1e1ecd0bf109 100644 --- a/tests/swarm-deploy/requirements/ci.txt +++ b/tests/swarm-deploy/requirements/ci.txt @@ -8,6 +8,7 @@ # installs base + tests requirements --requirement _test.txt +--requirement _tools.txt # installs this repo's packages simcore-models-library @ ../../packages/models-library/ diff --git a/tests/swarm-deploy/test_service_images.py b/tests/swarm-deploy/test_service_images.py index 6fa388b7519f..fa18ea6f8374 100644 --- a/tests/swarm-deploy/test_service_images.py +++ b/tests/swarm-deploy/test_service_images.py @@ -31,7 +31,7 @@ def _extract_from_dockerfile(service_name: str) -> str: ) dockerfile = dockerfile_path.read_text() - m = re.search(r"FROM (.+) as base", dockerfile) + m = re.search(r"FROM (.+) AS base", dockerfile) assert m, f"{dockerfile_path} has no 'base' alias!?" return m.group(0)