From 0545b055cfa8c67989a01439cc227a8466e1ae80 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 19 May 2025 10:49:18 +0200 Subject: [PATCH 01/91] Initial commit --- packages/celery-library/README.md | 17 +++++++++ packages/celery-library/VERSION | 1 + packages/celery-library/setup.cfg | 21 +++++++++++ packages/celery-library/setup.py | 59 +++++++++++++++++++++++++++++++ 4 files changed, 98 insertions(+) create mode 100644 packages/celery-library/README.md create mode 100644 packages/celery-library/VERSION create mode 100644 packages/celery-library/setup.cfg create mode 100644 packages/celery-library/setup.py diff --git a/packages/celery-library/README.md b/packages/celery-library/README.md new file mode 100644 index 00000000000..bab2c37cc61 --- /dev/null +++ b/packages/celery-library/README.md @@ -0,0 +1,17 @@ +# simcore Celery library + +Provides a wrapper around Celery library. + +## Installation + +```console +make help +make install-dev +``` + +## Test + +```console +make help +make test-dev +``` diff --git a/packages/celery-library/VERSION b/packages/celery-library/VERSION new file mode 100644 index 00000000000..6e8bf73aa55 --- /dev/null +++ b/packages/celery-library/VERSION @@ -0,0 +1 @@ +0.1.0 diff --git a/packages/celery-library/setup.cfg b/packages/celery-library/setup.cfg new file mode 100644 index 00000000000..5b10d2dc187 --- /dev/null +++ b/packages/celery-library/setup.cfg @@ -0,0 +1,21 @@ +[bumpversion] +current_version = 0.1.0 +commit = True +message = packages/celery-library version: {current_version} → {new_version} +tag = False +commit_args = --no-verify + +[bumpversion:file:VERSION] + +[bdist_wheel] +universal = 1 + +[aliases] +test = pytest + +[tool:pytest] +asyncio_mode = auto + +[mypy] +plugins = + pydantic.mypy diff --git a/packages/celery-library/setup.py b/packages/celery-library/setup.py new file mode 100644 index 00000000000..226e43f0bf8 --- /dev/null +++ b/packages/celery-library/setup.py @@ -0,0 +1,59 @@ +import re +import sys +from pathlib import Path + +from setuptools import find_packages, setup + + +def read_reqs(reqs_path: Path) -> set[str]: + return { + r + for r in re.findall( + r"(^[^#\n-][\w\[,\]]+[-~>=<.\w]*)", + reqs_path.read_text(), + re.MULTILINE, + ) + if isinstance(r, str) + } + + +CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + +INSTALL_REQUIREMENTS = tuple( + read_reqs(CURRENT_DIR / "requirements" / "_base.in") +) # WEAK requirements + +TEST_REQUIREMENTS = tuple( + read_reqs(CURRENT_DIR / "requirements" / "_test.txt") +) # STRICT requirements + + +SETUP = { + "name": "simcore-celery-library", + "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), + "author": "Giancarlo Romeo (giancarloromeo)", + "description": "Core service library for Celery", + "python_requires": "~=3.11", + "classifiers": [ + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python :: 3.10", + ], + "long_description": Path(CURRENT_DIR / "README.md").read_text(), + "license": "MIT license", + "install_requires": INSTALL_REQUIREMENTS, + "packages": find_packages(where="src"), + "package_data": {"": ["py.typed"]}, + "package_dir": {"": "src"}, + "include_package_data": True, + "test_suite": "tests", + "tests_require": TEST_REQUIREMENTS, + "extras_require": {"test": TEST_REQUIREMENTS}, + "zip_safe": False, +} + + +if __name__ == "__main__": + setup(**SETUP) From 3d833dd5f402092ed0fa1de5890c8b6fcbae6bbf Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 19 May 2025 11:04:48 +0200 Subject: [PATCH 02/91] add files --- packages/celery-library/requirements/Makefile | 6 + packages/celery-library/requirements/_base.in | 10 + packages/celery-library/requirements/_test.in | 27 +++ .../celery-library/requirements/_tools.in | 5 + packages/celery-library/requirements/ci.txt | 22 ++ packages/celery-library/requirements/dev.txt | 22 ++ .../src/celery-library/__init__.py | 56 +++++ .../src/celery-library/_celery_types.py | 61 +++++ .../src/celery-library/_common.py | 37 ++++ .../src/celery-library/_task.py | 208 ++++++++++++++++++ .../src/celery-library/backends/__init__.py | 0 .../src/celery-library/backends/_redis.py | 133 +++++++++++ .../src/celery-library/client.py | 157 +++++++++++++ .../src/celery-library/errors.py | 24 ++ .../src/celery-library/models.py | 92 ++++++++ .../src/celery-library/signals.py | 99 +++++++++ .../src/celery-library/utils.py | 27 +++ .../src/celery-library/worker.py | 19 ++ 18 files changed, 1005 insertions(+) create mode 100644 packages/celery-library/requirements/Makefile create mode 100644 packages/celery-library/requirements/_base.in create mode 100644 packages/celery-library/requirements/_test.in create mode 100644 packages/celery-library/requirements/_tools.in create mode 100644 packages/celery-library/requirements/ci.txt create mode 100644 packages/celery-library/requirements/dev.txt create mode 100644 packages/celery-library/src/celery-library/__init__.py create mode 100644 packages/celery-library/src/celery-library/_celery_types.py create mode 100644 packages/celery-library/src/celery-library/_common.py create mode 100644 packages/celery-library/src/celery-library/_task.py create mode 100644 packages/celery-library/src/celery-library/backends/__init__.py create mode 100644 packages/celery-library/src/celery-library/backends/_redis.py create mode 100644 packages/celery-library/src/celery-library/client.py create mode 100644 packages/celery-library/src/celery-library/errors.py create mode 100644 packages/celery-library/src/celery-library/models.py create mode 100644 packages/celery-library/src/celery-library/signals.py create mode 100644 packages/celery-library/src/celery-library/utils.py create mode 100644 packages/celery-library/src/celery-library/worker.py diff --git a/packages/celery-library/requirements/Makefile b/packages/celery-library/requirements/Makefile new file mode 100644 index 00000000000..3f25442b790 --- /dev/null +++ b/packages/celery-library/requirements/Makefile @@ -0,0 +1,6 @@ +# +# Targets to pip-compile requirements +# +include ../../../requirements/base.Makefile + +# Add here any extra explicit dependency: e.g. _migration.txt: _base.txt diff --git a/packages/celery-library/requirements/_base.in b/packages/celery-library/requirements/_base.in new file mode 100644 index 00000000000..4fa518fa56a --- /dev/null +++ b/packages/celery-library/requirements/_base.in @@ -0,0 +1,10 @@ +# +# Specifies third-party dependencies for 'aws-library' +# +--constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in +--requirement ../../../packages/models-library/requirements/_base.in +--requirement ../../../packages/service-library/requirements/_base.in +--requirement ../../../packages/settings-library/requirements/_base.in + +celery diff --git a/packages/celery-library/requirements/_test.in b/packages/celery-library/requirements/_test.in new file mode 100644 index 00000000000..92c66219160 --- /dev/null +++ b/packages/celery-library/requirements/_test.in @@ -0,0 +1,27 @@ +# +# Specifies dependencies required to run 'models-library' +# +--constraint ../../../requirements/constraints.txt + +# Adds base AS CONSTRAINT specs, not requirement. +# - Resulting _text.txt is a frozen list of EXTRA packages for testing, besides _base.txt +# +--constraint _base.txt + +# testing +coverage +faker +fastapi +httpx +pint +pytest +pytest-asyncio +pytest-benchmark +pytest-cov +pytest-icdiff +pytest-instafail +pytest-mock +pytest-runner +pytest-sugar +python-dotenv +pyyaml diff --git a/packages/celery-library/requirements/_tools.in b/packages/celery-library/requirements/_tools.in new file mode 100644 index 00000000000..1def82c12a3 --- /dev/null +++ b/packages/celery-library/requirements/_tools.in @@ -0,0 +1,5 @@ +--constraint ../../../requirements/constraints.txt +--constraint _base.txt +--constraint _test.txt + +--requirement ../../../requirements/devenv.txt diff --git a/packages/celery-library/requirements/ci.txt b/packages/celery-library/requirements/ci.txt new file mode 100644 index 00000000000..c0623831422 --- /dev/null +++ b/packages/celery-library/requirements/ci.txt @@ -0,0 +1,22 @@ +# Shortcut to install all packages for the contigous integration (CI) of 'models-library' +# +# - As ci.txt but w/ tests +# +# Usage: +# pip install -r requirements/ci.txt +# + +# installs base + tests requirements +--requirement _base.txt +--requirement _test.txt +--requirement _tools.txt + +# installs this repo's packages +simcore-common-library @ ../common-library +simcore-models-library @ ../models-library/ +pytest-simcore @ ../pytest-simcore +simcore-service-library @ ../service-library/ +simcore-settings-library @ ../settings-library/ + +# current module +simcore-celery-library @ . diff --git a/packages/celery-library/requirements/dev.txt b/packages/celery-library/requirements/dev.txt new file mode 100644 index 00000000000..34cc644b370 --- /dev/null +++ b/packages/celery-library/requirements/dev.txt @@ -0,0 +1,22 @@ +# Shortcut to install all packages needed to develop 'models-library' +# +# - As ci.txt but with current and repo packages in develop (edit) mode +# +# Usage: +# pip install -r requirements/dev.txt +# + +# installs base + tests requirements +--requirement _base.txt +--requirement _test.txt +--requirement _tools.txt + +# installs this repo's packages +--editable ../common-library/ +--editable ../models-library/ +--editable ../pytest-simcore/ +--editable ../service-library/ +--editable ../settings-library/ + +# current module +--editable . diff --git a/packages/celery-library/src/celery-library/__init__.py b/packages/celery-library/src/celery-library/__init__.py new file mode 100644 index 00000000000..cbf60fda44e --- /dev/null +++ b/packages/celery-library/src/celery-library/__init__.py @@ -0,0 +1,56 @@ +import logging +from asyncio import AbstractEventLoop + +from fastapi import FastAPI +from servicelib.redis._client import RedisClientSDK +from settings_library.redis import RedisDatabase + +from ..._meta import APP_NAME +from ...core.settings import get_application_settings +from ._celery_types import register_celery_types +from ._common import create_app +from .backends._redis import RedisTaskInfoStore +from .client import CeleryTaskClient + +_logger = logging.getLogger(__name__) + + +def setup_celery_client(app: FastAPI) -> None: + async def on_startup() -> None: + application_settings = get_application_settings(app) + celery_settings = application_settings.STORAGE_CELERY + assert celery_settings # nosec + celery_app = create_app(celery_settings) + redis_client_sdk = RedisClientSDK( + celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( + RedisDatabase.CELERY_TASKS + ), + client_name=f"{APP_NAME}.celery_tasks", + ) + + app.state.celery_client = CeleryTaskClient( + celery_app, + celery_settings, + RedisTaskInfoStore(redis_client_sdk), + ) + + register_celery_types() + + app.add_event_handler("startup", on_startup) + + +def get_celery_client(app: FastAPI) -> CeleryTaskClient: + assert hasattr(app.state, "celery_client") # nosec + celery_client = app.state.celery_client + assert isinstance(celery_client, CeleryTaskClient) + return celery_client + + +def get_event_loop(app: FastAPI) -> AbstractEventLoop: + event_loop = app.state.event_loop + assert isinstance(event_loop, AbstractEventLoop) + return event_loop + + +def set_event_loop(app: FastAPI, event_loop: AbstractEventLoop) -> None: + app.state.event_loop = event_loop diff --git a/packages/celery-library/src/celery-library/_celery_types.py b/packages/celery-library/src/celery-library/_celery_types.py new file mode 100644 index 00000000000..4ed62e72775 --- /dev/null +++ b/packages/celery-library/src/celery-library/_celery_types.py @@ -0,0 +1,61 @@ +from functools import partial +from pathlib import Path +from typing import Any + +from kombu.utils.json import register_type # type: ignore[import-untyped] +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadCompletionBody, + FoldersBody, +) +from pydantic import BaseModel + +from ...models import FileMetaData + + +def _path_encoder(obj): + if isinstance(obj, Path): + return {"__path__": True, "path": str(obj)} + return obj + + +# Define how Path objects are deserialized +def _path_decoder(obj): + if "__path__" in obj: + return Path(obj["path"]) + return obj + + +def _class_full_name(clz: type) -> str: + return ".".join([clz.__module__, clz.__qualname__]) + + +def _pydantic_model_encoder(obj: BaseModel, *args, **kwargs) -> dict[str, Any]: + return obj.model_dump(*args, **kwargs, mode="json") + + +def _pydantic_model_decoder(clz: type[BaseModel], data: dict[str, Any]) -> BaseModel: + return clz(**data) + + +def _register_pydantic_types(*models: type[BaseModel]) -> None: + for model in models: + register_type( + model, + _class_full_name(model), + encoder=_pydantic_model_encoder, + decoder=partial(_pydantic_model_decoder, model), + ) + + +def register_celery_types() -> None: + register_type( + Path, + _class_full_name(Path), + _path_encoder, + _path_decoder, + ) + register_type(set, _class_full_name(set), encoder=list, decoder=set) + + _register_pydantic_types(FileUploadCompletionBody) + _register_pydantic_types(FileMetaData) + _register_pydantic_types(FoldersBody) diff --git a/packages/celery-library/src/celery-library/_common.py b/packages/celery-library/src/celery-library/_common.py new file mode 100644 index 00000000000..545bb98f682 --- /dev/null +++ b/packages/celery-library/src/celery-library/_common.py @@ -0,0 +1,37 @@ +import logging +import ssl +from typing import Any + +from celery import Celery # type: ignore[import-untyped] +from settings_library.celery import CelerySettings +from settings_library.redis import RedisDatabase + +_logger = logging.getLogger(__name__) + + +def _celery_configure(celery_settings: CelerySettings) -> dict[str, Any]: + base_config = { + "broker_connection_retry_on_startup": True, + "result_expires": celery_settings.CELERY_RESULT_EXPIRES, + "result_extended": True, + "result_serializer": "json", + "task_default_queue": "default", + "task_send_sent_event": True, + "task_track_started": True, + "worker_send_task_events": True, + } + if celery_settings.CELERY_REDIS_RESULT_BACKEND.REDIS_SECURE: + base_config["redis_backend_use_ssl"] = {"ssl_cert_reqs": ssl.CERT_NONE} + return base_config + + +def create_app(celery_settings: CelerySettings) -> Celery: + assert celery_settings + + return Celery( + broker=celery_settings.CELERY_RABBIT_BROKER.dsn, + backend=celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( + RedisDatabase.CELERY_TASKS, + ), + **_celery_configure(celery_settings), + ) diff --git a/packages/celery-library/src/celery-library/_task.py b/packages/celery-library/src/celery-library/_task.py new file mode 100644 index 00000000000..e367a3a73da --- /dev/null +++ b/packages/celery-library/src/celery-library/_task.py @@ -0,0 +1,208 @@ +import asyncio +import inspect +import logging +from collections.abc import Callable, Coroutine +from datetime import timedelta +from functools import wraps +from typing import Any, Concatenate, Final, ParamSpec, TypeVar, overload + +from celery import Celery # type: ignore[import-untyped] +from celery.contrib.abortable import ( # type: ignore[import-untyped] + AbortableAsyncResult, + AbortableTask, +) +from celery.exceptions import Ignore # type: ignore[import-untyped] +from pydantic import NonNegativeInt +from servicelib.async_utils import cancel_wait_task + +from . import get_event_loop +from .errors import encore_celery_transferrable_error +from .models import TaskID, TaskId +from .utils import get_fastapi_app + +_logger = logging.getLogger(__name__) + +_DEFAULT_TASK_TIMEOUT: Final[timedelta | None] = None +_DEFAULT_MAX_RETRIES: Final[NonNegativeInt] = 3 +_DEFAULT_WAIT_BEFORE_RETRY: Final[timedelta] = timedelta(seconds=5) +_DEFAULT_DONT_AUTORETRY_FOR: Final[tuple[type[Exception], ...]] = () +_DEFAULT_ABORT_TASK_TIMEOUT: Final[timedelta] = timedelta(seconds=1) +_DEFAULT_CANCEL_TASK_TIMEOUT: Final[timedelta] = timedelta(seconds=5) + +T = TypeVar("T") +P = ParamSpec("P") +R = TypeVar("R") + + +class TaskAbortedError(Exception): ... + + +def _async_task_wrapper( + app: Celery, +) -> Callable[ + [Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]]], + Callable[Concatenate[AbortableTask, P], R], +]: + def decorator( + coro: Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]], + ) -> Callable[Concatenate[AbortableTask, P], R]: + @wraps(coro) + def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: + fastapi_app = get_fastapi_app(app) + # NOTE: task.request is a thread local object, so we need to pass the id explicitly + assert task.request.id is not None # nosec + + async def run_task(task_id: TaskID) -> R: + try: + async with asyncio.TaskGroup() as tg: + main_task = tg.create_task( + coro(task, task_id, *args, **kwargs), + ) + + async def abort_monitor(): + abortable_result = AbortableAsyncResult(task_id, app=app) + while not main_task.done(): + if abortable_result.is_aborted(): + await cancel_wait_task( + main_task, + max_delay=_DEFAULT_CANCEL_TASK_TIMEOUT.total_seconds(), + ) + AbortableAsyncResult(task_id, app=app).forget() + raise TaskAbortedError + await asyncio.sleep( + _DEFAULT_ABORT_TASK_TIMEOUT.total_seconds() + ) + + tg.create_task(abort_monitor()) + + return main_task.result() + except BaseExceptionGroup as eg: + task_aborted_errors, other_errors = eg.split(TaskAbortedError) + + if task_aborted_errors: + assert task_aborted_errors is not None # nosec + assert len(task_aborted_errors.exceptions) == 1 # nosec + raise task_aborted_errors.exceptions[0] from eg + + assert other_errors is not None # nosec + assert len(other_errors.exceptions) == 1 # nosec + raise other_errors.exceptions[0] from eg + + return asyncio.run_coroutine_threadsafe( + run_task(task.request.id), + get_event_loop(fastapi_app), + ).result() + + return wrapper + + return decorator + + +def _error_handling( + max_retries: NonNegativeInt, + delay_between_retries: timedelta, + dont_autoretry_for: tuple[type[Exception], ...], +) -> Callable[ + [Callable[Concatenate[AbortableTask, P], R]], + Callable[Concatenate[AbortableTask, P], R], +]: + def decorator( + func: Callable[Concatenate[AbortableTask, P], R], + ) -> Callable[Concatenate[AbortableTask, P], R]: + @wraps(func) + def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: + try: + return func(task, *args, **kwargs) + except TaskAbortedError as exc: + _logger.warning("Task %s was cancelled", task.request.id) + raise Ignore from exc + except Exception as exc: + if isinstance(exc, dont_autoretry_for): + _logger.debug("Not retrying for exception %s", type(exc).__name__) + # propagate without retry + raise encore_celery_transferrable_error(exc) from exc + + exc_type = type(exc).__name__ + exc_message = f"{exc}" + _logger.exception( + "Task %s failed with exception: %s:%s", + task.request.id, + exc_type, + exc_message, + ) + + raise task.retry( + max_retries=max_retries, + countdown=delay_between_retries.total_seconds(), + exc=encore_celery_transferrable_error(exc), + ) from exc + + return wrapper + + return decorator + + +@overload +def register_task( + app: Celery, + fn: Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]], + task_name: str | None = None, + timeout: timedelta | None = _DEFAULT_TASK_TIMEOUT, + max_retries: NonNegativeInt = _DEFAULT_MAX_RETRIES, + delay_between_retries: timedelta = _DEFAULT_WAIT_BEFORE_RETRY, + dont_autoretry_for: tuple[type[Exception], ...] = _DEFAULT_DONT_AUTORETRY_FOR, +) -> None: ... + + +@overload +def register_task( + app: Celery, + fn: Callable[Concatenate[AbortableTask, P], R], + task_name: str | None = None, + timeout: timedelta | None = _DEFAULT_TASK_TIMEOUT, + max_retries: NonNegativeInt = _DEFAULT_MAX_RETRIES, + delay_between_retries: timedelta = _DEFAULT_WAIT_BEFORE_RETRY, + dont_autoretry_for: tuple[type[Exception], ...] = _DEFAULT_DONT_AUTORETRY_FOR, +) -> None: ... + + +def register_task( # type: ignore[misc] + app: Celery, + fn: ( + Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]] + | Callable[Concatenate[AbortableTask, P], R] + ), + task_name: str | None = None, + timeout: timedelta | None = _DEFAULT_TASK_TIMEOUT, + max_retries: NonNegativeInt = _DEFAULT_MAX_RETRIES, + delay_between_retries: timedelta = _DEFAULT_WAIT_BEFORE_RETRY, + dont_autoretry_for: tuple[type[Exception], ...] = _DEFAULT_DONT_AUTORETRY_FOR, +) -> None: + """Decorator to define a celery task with error handling and abortable support + + Keyword Arguments: + task_name -- name of the function used in Celery (default: {None} will be generated automatically) + timeout -- when None no timeout is enforced, task is allowed to run forever (default: {_DEFAULT_TASK_TIMEOUT}) + max_retries -- number of attempts in case of failuire before giving up (default: {_DEFAULT_MAX_RETRIES}) + delay_between_retries -- dealy between each attempt in case of error (default: {_DEFAULT_WAIT_BEFORE_RETRY}) + dont_autoretry_for -- exceptions that should not be retried when raised by the task + """ + wrapped_fn: Callable[Concatenate[AbortableTask, P], R] + if asyncio.iscoroutinefunction(fn): + wrapped_fn = _async_task_wrapper(app)(fn) + else: + assert inspect.isfunction(fn) # nosec + wrapped_fn = fn + + wrapped_fn = _error_handling( + max_retries=max_retries, + delay_between_retries=delay_between_retries, + dont_autoretry_for=dont_autoretry_for, + )(wrapped_fn) + + app.task( + name=task_name or fn.__name__, + bind=True, + base=AbortableTask, + time_limit=None if timeout is None else timeout.total_seconds(), + )(wrapped_fn) diff --git a/packages/celery-library/src/celery-library/backends/__init__.py b/packages/celery-library/src/celery-library/backends/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/packages/celery-library/src/celery-library/backends/_redis.py b/packages/celery-library/src/celery-library/backends/_redis.py new file mode 100644 index 00000000000..3fd9984fb2a --- /dev/null +++ b/packages/celery-library/src/celery-library/backends/_redis.py @@ -0,0 +1,133 @@ +import contextlib +import logging +from datetime import timedelta +from typing import Final + +from models_library.progress_bar import ProgressReport +from pydantic import ValidationError +from servicelib.redis._client import RedisClientSDK + +from ..models import ( + Task, + TaskContext, + TaskID, + TaskMetadata, + TaskUUID, + build_task_id_prefix, +) + +_CELERY_TASK_INFO_PREFIX: Final[str] = "celery-task-info-" +_CELERY_TASK_ID_KEY_ENCODING = "utf-8" +_CELERY_TASK_ID_KEY_SEPARATOR: Final[str] = ":" +_CELERY_TASK_SCAN_COUNT_PER_BATCH: Final[int] = 10000 +_CELERY_TASK_METADATA_KEY: Final[str] = "metadata" +_CELERY_TASK_PROGRESS_KEY: Final[str] = "progress" + +_logger = logging.getLogger(__name__) + + +def _build_key(task_id: TaskID) -> str: + return _CELERY_TASK_INFO_PREFIX + task_id + + +class RedisTaskInfoStore: + def __init__(self, redis_client_sdk: RedisClientSDK) -> None: + self._redis_client_sdk = redis_client_sdk + + async def create_task( + self, + task_id: TaskID, + task_metadata: TaskMetadata, + expiry: timedelta, + ) -> None: + task_key = _build_key(task_id) + await self._redis_client_sdk.redis.hset( + name=task_key, + key=_CELERY_TASK_METADATA_KEY, + value=task_metadata.model_dump_json(), + ) # type: ignore + await self._redis_client_sdk.redis.expire( + task_key, + expiry, + ) + + async def exists_task(self, task_id: TaskID) -> bool: + n = await self._redis_client_sdk.redis.exists(_build_key(task_id)) + assert isinstance(n, int) # nosec + return n > 0 + + async def get_task_metadata(self, task_id: TaskID) -> TaskMetadata | None: + raw_result = await self._redis_client_sdk.redis.hget(_build_key(task_id), _CELERY_TASK_METADATA_KEY) # type: ignore + if not raw_result: + return None + + try: + return TaskMetadata.model_validate_json(raw_result) + except ValidationError as exc: + _logger.debug( + "Failed to deserialize task metadata for task %s: %s", task_id, f"{exc}" + ) + return None + + async def get_task_progress(self, task_id: TaskID) -> ProgressReport | None: + raw_result = await self._redis_client_sdk.redis.hget(_build_key(task_id), _CELERY_TASK_PROGRESS_KEY) # type: ignore + if not raw_result: + return None + + try: + return ProgressReport.model_validate_json(raw_result) + except ValidationError as exc: + _logger.debug( + "Failed to deserialize task progress for task %s: %s", task_id, f"{exc}" + ) + return None + + async def list_tasks(self, task_context: TaskContext) -> list[Task]: + search_key = ( + _CELERY_TASK_INFO_PREFIX + + build_task_id_prefix(task_context) + + _CELERY_TASK_ID_KEY_SEPARATOR + ) + search_key_len = len(search_key) + + keys: list[str] = [] + pipeline = self._redis_client_sdk.redis.pipeline() + async for key in self._redis_client_sdk.redis.scan_iter( + match=search_key + "*", count=_CELERY_TASK_SCAN_COUNT_PER_BATCH + ): + # fake redis (tests) returns bytes, real redis returns str + _key = ( + key.decode(_CELERY_TASK_ID_KEY_ENCODING) + if isinstance(key, bytes) + else key + ) + keys.append(_key) + pipeline.hget(_key, _CELERY_TASK_METADATA_KEY) + + results = await pipeline.execute() + + tasks = [] + for key, raw_metadata in zip(keys, results, strict=True): + if raw_metadata is None: + continue + + with contextlib.suppress(ValidationError): + task_metadata = TaskMetadata.model_validate_json(raw_metadata) + tasks.append( + Task( + uuid=TaskUUID(key[search_key_len:]), + metadata=task_metadata, + ) + ) + + return tasks + + async def remove_task(self, task_id: TaskID) -> None: + await self._redis_client_sdk.redis.delete(_build_key(task_id)) + + async def set_task_progress(self, task_id: TaskID, report: ProgressReport) -> None: + await self._redis_client_sdk.redis.hset( + name=_build_key(task_id), + key=_CELERY_TASK_PROGRESS_KEY, + value=report.model_dump_json(), + ) # type: ignore diff --git a/packages/celery-library/src/celery-library/client.py b/packages/celery-library/src/celery-library/client.py new file mode 100644 index 00000000000..f68baf558fe --- /dev/null +++ b/packages/celery-library/src/celery-library/client.py @@ -0,0 +1,157 @@ +import logging +from dataclasses import dataclass +from typing import Any +from uuid import uuid4 + +from celery import Celery # type: ignore[import-untyped] +from celery.contrib.abortable import ( # type: ignore[import-untyped] + AbortableAsyncResult, +) +from common_library.async_tools import make_async +from models_library.progress_bar import ProgressReport +from servicelib.logging_utils import log_context +from settings_library.celery import CelerySettings + +from .models import ( + Task, + TaskContext, + TaskID, + TaskInfoStore, + TaskMetadata, + TaskState, + TaskStatus, + TaskUUID, + build_task_id, +) + +_logger = logging.getLogger(__name__) + + +_MIN_PROGRESS_VALUE = 0.0 +_MAX_PROGRESS_VALUE = 1.0 + + +@dataclass +class CeleryTaskClient: + _celery_app: Celery + _celery_settings: CelerySettings + _task_info_store: TaskInfoStore + + async def submit_task( + self, + task_metadata: TaskMetadata, + *, + task_context: TaskContext, + **task_params, + ) -> TaskUUID: + with log_context( + _logger, + logging.DEBUG, + msg=f"Submit {task_metadata.name=}: {task_context=} {task_params=}", + ): + task_uuid = uuid4() + task_id = build_task_id(task_context, task_uuid) + self._celery_app.send_task( + task_metadata.name, + task_id=task_id, + kwargs=task_params, + queue=task_metadata.queue.value, + ) + + expiry = ( + self._celery_settings.CELERY_EPHEMERAL_RESULT_EXPIRES + if task_metadata.ephemeral + else self._celery_settings.CELERY_RESULT_EXPIRES + ) + await self._task_info_store.create_task( + task_id, task_metadata, expiry=expiry + ) + return task_uuid + + @make_async() + def _abort_task(self, task_id: TaskID) -> None: + AbortableAsyncResult(task_id, app=self._celery_app).abort() + + async def cancel_task(self, task_context: TaskContext, task_uuid: TaskUUID) -> None: + with log_context( + _logger, + logging.DEBUG, + msg=f"task cancellation: {task_context=} {task_uuid=}", + ): + task_id = build_task_id(task_context, task_uuid) + if not (await self.get_task_status(task_context, task_uuid)).is_done: + await self._abort_task(task_id) + await self._task_info_store.remove_task(task_id) + + @make_async() + def _forget_task(self, task_id: TaskID) -> None: + AbortableAsyncResult(task_id, app=self._celery_app).forget() + + async def get_task_result( + self, task_context: TaskContext, task_uuid: TaskUUID + ) -> Any: + with log_context( + _logger, + logging.DEBUG, + msg=f"Get task result: {task_context=} {task_uuid=}", + ): + task_id = build_task_id(task_context, task_uuid) + async_result = self._celery_app.AsyncResult(task_id) + result = async_result.result + if async_result.ready(): + task_metadata = await self._task_info_store.get_task_metadata(task_id) + if task_metadata is not None and task_metadata.ephemeral: + await self._forget_task(task_id) + await self._task_info_store.remove_task(task_id) + return result + + async def _get_task_progress_report( + self, task_context: TaskContext, task_uuid: TaskUUID, task_state: TaskState + ) -> ProgressReport: + if task_state in (TaskState.STARTED, TaskState.RETRY, TaskState.ABORTED): + task_id = build_task_id(task_context, task_uuid) + progress = await self._task_info_store.get_task_progress(task_id) + if progress is not None: + return progress + if task_state in ( + TaskState.SUCCESS, + TaskState.FAILURE, + ): + return ProgressReport( + actual_value=_MAX_PROGRESS_VALUE, total=_MAX_PROGRESS_VALUE + ) + + # task is pending + return ProgressReport( + actual_value=_MIN_PROGRESS_VALUE, total=_MAX_PROGRESS_VALUE + ) + + @make_async() + def _get_task_celery_state(self, task_id: TaskID) -> TaskState: + return TaskState(self._celery_app.AsyncResult(task_id).state) + + async def get_task_status( + self, task_context: TaskContext, task_uuid: TaskUUID + ) -> TaskStatus: + with log_context( + _logger, + logging.DEBUG, + msg=f"Getting task status: {task_context=} {task_uuid=}", + ): + task_id = build_task_id(task_context, task_uuid) + task_state = await self._get_task_celery_state(task_id) + return TaskStatus( + task_uuid=task_uuid, + task_state=task_state, + progress_report=await self._get_task_progress_report( + task_context, task_uuid, task_state + ), + ) + + async def list_tasks(self, task_context: TaskContext) -> list[Task]: + with log_context( + _logger, + logging.DEBUG, + msg=f"Listing tasks: {task_context=}", + ): + return await self._task_info_store.list_tasks(task_context) diff --git a/packages/celery-library/src/celery-library/errors.py b/packages/celery-library/src/celery-library/errors.py new file mode 100644 index 00000000000..0e340f35e71 --- /dev/null +++ b/packages/celery-library/src/celery-library/errors.py @@ -0,0 +1,24 @@ +import base64 +import pickle + + +class TransferrableCeleryError(Exception): + def __repr__(self) -> str: + exception = decode_celery_transferrable_error(self) + return f"{self.__class__.__name__}({exception.__class__.__name__}({exception}))" + + def __str__(self) -> str: + return f"{decode_celery_transferrable_error(self)}" + + +def encore_celery_transferrable_error(error: Exception) -> TransferrableCeleryError: + # NOTE: Celery modifies exceptions during serialization, which can cause + # the original error context to be lost. This mechanism ensures the same + # error can be recreated on the caller side exactly as it was raised here. + return TransferrableCeleryError(base64.b64encode(pickle.dumps(error))) + + +def decode_celery_transferrable_error(error: TransferrableCeleryError) -> Exception: + assert isinstance(error, TransferrableCeleryError) # nosec + result: Exception = pickle.loads(base64.b64decode(error.args[0])) # noqa: S301 + return result diff --git a/packages/celery-library/src/celery-library/models.py b/packages/celery-library/src/celery-library/models.py new file mode 100644 index 00000000000..8b19d124ff1 --- /dev/null +++ b/packages/celery-library/src/celery-library/models.py @@ -0,0 +1,92 @@ +from datetime import timedelta +from enum import StrEnum +from typing import Annotated, Any, Final, Protocol, TypeAlias +from uuid import UUID + +from models_library.progress_bar import ProgressReport +from pydantic import BaseModel, StringConstraints + +TaskContext: TypeAlias = dict[str, Any] +TaskID: TypeAlias = str +TaskName: TypeAlias = Annotated[ + str, StringConstraints(strip_whitespace=True, min_length=1) +] +TaskUUID: TypeAlias = UUID + +_CELERY_TASK_ID_KEY_SEPARATOR: Final[str] = ":" + + +def build_task_id_prefix(task_context: TaskContext) -> str: + return _CELERY_TASK_ID_KEY_SEPARATOR.join( + [f"{task_context[key]}" for key in sorted(task_context)] + ) + + +def build_task_id(task_context: TaskContext, task_uuid: TaskUUID) -> TaskID: + return _CELERY_TASK_ID_KEY_SEPARATOR.join( + [build_task_id_prefix(task_context), f"{task_uuid}"] + ) + + +class TaskState(StrEnum): + PENDING = "PENDING" + STARTED = "STARTED" + RETRY = "RETRY" + SUCCESS = "SUCCESS" + FAILURE = "FAILURE" + ABORTED = "ABORTED" + + +class TasksQueue(StrEnum): + CPU_BOUND = "cpu_bound" + DEFAULT = "default" + + +class TaskMetadata(BaseModel): + name: TaskName + ephemeral: bool = True + queue: TasksQueue = TasksQueue.DEFAULT + + +class Task(BaseModel): + uuid: TaskUUID + metadata: TaskMetadata + + +_TASK_DONE = {TaskState.SUCCESS, TaskState.FAILURE, TaskState.ABORTED} + + +class TaskInfoStore(Protocol): + async def create_task( + self, + task_id: TaskID, + task_metadata: TaskMetadata, + expiry: timedelta, + ) -> None: ... + + async def exists_task(self, task_id: TaskID) -> bool: ... + + async def get_task_metadata(self, task_id: TaskID) -> TaskMetadata | None: ... + + async def get_task_progress(self, task_id: TaskID) -> ProgressReport | None: ... + + async def list_tasks(self, task_context: TaskContext) -> list[Task]: ... + + async def remove_task(self, task_id: TaskID) -> None: ... + + async def set_task_progress( + self, task_id: TaskID, report: ProgressReport + ) -> None: ... + + +class TaskStatus(BaseModel): + task_uuid: TaskUUID + task_state: TaskState + progress_report: ProgressReport + + @property + def is_done(self) -> bool: + return self.task_state in _TASK_DONE + + +TaskId: TypeAlias = str diff --git a/packages/celery-library/src/celery-library/signals.py b/packages/celery-library/src/celery-library/signals.py new file mode 100644 index 00000000000..113d26c3566 --- /dev/null +++ b/packages/celery-library/src/celery-library/signals.py @@ -0,0 +1,99 @@ +import asyncio +import datetime +import logging +import threading +from typing import Final + +from asgi_lifespan import LifespanManager +from celery import Celery # type: ignore[import-untyped] +from fastapi import FastAPI +from servicelib.logging_utils import log_context +from servicelib.redis._client import RedisClientSDK +from settings_library.redis import RedisDatabase +from simcore_service_storage._meta import APP_NAME + +from ...core.application import create_app +from ...core.settings import ApplicationSettings +from . import set_event_loop +from .backends._redis import RedisTaskInfoStore +from .utils import ( + get_fastapi_app, + set_celery_worker, + set_fastapi_app, +) +from .worker import CeleryTaskWorker + +_logger = logging.getLogger(__name__) + +_SHUTDOWN_TIMEOUT: Final[float] = datetime.timedelta(seconds=10).total_seconds() +_STARTUP_TIMEOUT: Final[float] = datetime.timedelta(minutes=1).total_seconds() + + +def on_worker_init(sender, **_kwargs) -> None: + startup_complete_event = threading.Event() + + def _init(startup_complete_event: threading.Event) -> None: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + shutdown_event = asyncio.Event() + + app_settings = ApplicationSettings.create_from_envs() + fastapi_app = create_app(app_settings) + + assert app_settings.STORAGE_CELERY + celery_settings = app_settings.STORAGE_CELERY + + async def setup_task_worker(): + redis_client_sdk = RedisClientSDK( + celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( + RedisDatabase.CELERY_TASKS + ), + client_name=f"{APP_NAME}.celery_tasks", + ) + + set_celery_worker( + sender.app, CeleryTaskWorker(RedisTaskInfoStore(redis_client_sdk)) + ) + + async def fastapi_lifespan( + startup_complete_event: threading.Event, shutdown_event: asyncio.Event + ) -> None: + async with LifespanManager( + fastapi_app, + startup_timeout=_STARTUP_TIMEOUT, + shutdown_timeout=_SHUTDOWN_TIMEOUT, + ): + try: + _logger.info("fastapi APP started!") + startup_complete_event.set() + await shutdown_event.wait() + except asyncio.CancelledError: + _logger.warning("Lifespan task cancelled") + + fastapi_app.state.shutdown_event = shutdown_event + set_event_loop(fastapi_app, loop) + + set_fastapi_app(sender.app, fastapi_app) + loop.run_until_complete(setup_task_worker()) + loop.run_until_complete( + fastapi_lifespan(startup_complete_event, shutdown_event) + ) + + thread = threading.Thread( + group=None, + target=_init, + name="fastapi_app", + args=(startup_complete_event,), + daemon=True, + ) + thread.start() + # ensure the fastapi app is ready before going on + startup_complete_event.wait(_STARTUP_TIMEOUT * 1.1) + + +def on_worker_shutdown(sender, **_kwargs) -> None: + with log_context(_logger, logging.INFO, "Worker Shuts-down"): + assert isinstance(sender.app, Celery) + fastapi_app = get_fastapi_app(sender.app) + assert isinstance(fastapi_app, FastAPI) + fastapi_app.state.shutdown_event.set() diff --git a/packages/celery-library/src/celery-library/utils.py b/packages/celery-library/src/celery-library/utils.py new file mode 100644 index 00000000000..d09c1a1ce41 --- /dev/null +++ b/packages/celery-library/src/celery-library/utils.py @@ -0,0 +1,27 @@ +from celery import Celery # type: ignore[import-untyped] +from fastapi import FastAPI + +from .worker import CeleryTaskWorker + +_WORKER_KEY = "celery_worker" +_FASTAPI_APP_KEY = "fastapi_app" + + +def set_celery_worker(celery_app: Celery, worker: CeleryTaskWorker) -> None: + celery_app.conf[_WORKER_KEY] = worker + + +def get_celery_worker(celery_app: Celery) -> CeleryTaskWorker: + worker = celery_app.conf[_WORKER_KEY] + assert isinstance(worker, CeleryTaskWorker) + return worker + + +def set_fastapi_app(celery_app: Celery, fastapi_app: FastAPI) -> None: + celery_app.conf[_FASTAPI_APP_KEY] = fastapi_app + + +def get_fastapi_app(celery_app: Celery) -> FastAPI: + fastapi_app = celery_app.conf[_FASTAPI_APP_KEY] + assert isinstance(fastapi_app, FastAPI) + return fastapi_app diff --git a/packages/celery-library/src/celery-library/worker.py b/packages/celery-library/src/celery-library/worker.py new file mode 100644 index 00000000000..a5e98ac09df --- /dev/null +++ b/packages/celery-library/src/celery-library/worker.py @@ -0,0 +1,19 @@ +import logging +from dataclasses import dataclass + +from models_library.progress_bar import ProgressReport + +from ..celery.models import TaskID, TaskInfoStore + +_logger = logging.getLogger(__name__) + + +@dataclass +class CeleryTaskWorker: + _task_info_store: TaskInfoStore + + async def set_task_progress(self, task_id: TaskID, report: ProgressReport) -> None: + await self._task_info_store.set_task_progress( + task_id=task_id, + report=report, + ) From 5eff59dee9d085716447d3de78302a3577e18cc8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 19 May 2025 11:17:03 +0200 Subject: [PATCH 03/91] rename --- packages/celery-library/src/celery-library/worker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/celery-library/src/celery-library/worker.py b/packages/celery-library/src/celery-library/worker.py index a5e98ac09df..1d5b603b7a5 100644 --- a/packages/celery-library/src/celery-library/worker.py +++ b/packages/celery-library/src/celery-library/worker.py @@ -3,7 +3,7 @@ from models_library.progress_bar import ProgressReport -from ..celery.models import TaskID, TaskInfoStore +from .models import TaskID, TaskInfoStore _logger = logging.getLogger(__name__) From 8eb12a01deb520febb3fa370f480838d67a2832b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 19 May 2025 11:19:27 +0200 Subject: [PATCH 04/91] add codeowner --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index d8684350361..b0fde245e8a 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -13,6 +13,7 @@ Makefile @pcrespov @sanderegg /api/ @sanderegg @pcrespov @matusdrobuliak66 /ci/ @sanderegg @pcrespov /docs/ @pcrespov +/packages/celery-library/ @giancarloromeo /packages/common-library/ @giancarloromeo /packages/models-library/ @sanderegg @pcrespov @matusdrobuliak66 @giancarloromeo /packages/postgres-database/ @matusdrobuliak66 From 48b4532f6717ae02ae93d1255fcb6830f960f01f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 19 May 2025 11:24:58 +0200 Subject: [PATCH 05/91] add dependency --- services/storage/requirements/_base.in | 1 + services/storage/requirements/ci.txt | 1 + services/storage/requirements/dev.txt | 1 + 3 files changed, 3 insertions(+) diff --git a/services/storage/requirements/_base.in b/services/storage/requirements/_base.in index cf0ccfdba89..22e8fac0e03 100644 --- a/services/storage/requirements/_base.in +++ b/services/storage/requirements/_base.in @@ -6,6 +6,7 @@ --requirement ../../../packages/aws-library/requirements/_base.in +--requirement ../../../packages/celery-library/requirements/_base.in --requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in diff --git a/services/storage/requirements/ci.txt b/services/storage/requirements/ci.txt index 31b66afbe90..bb397a78f5e 100644 --- a/services/storage/requirements/ci.txt +++ b/services/storage/requirements/ci.txt @@ -13,6 +13,7 @@ # installs this repo's packages simcore-aws-library @ ../../packages/aws-library/ +simcore-celery-library @ ../../packages/celery-library/ simcore-common-library @ ../../packages/common-library/ simcore-models-library @ ../../packages/models-library/ simcore-postgres-database @ ../../packages/postgres-database/ diff --git a/services/storage/requirements/dev.txt b/services/storage/requirements/dev.txt index 253cec8dbcb..b428181c8c5 100644 --- a/services/storage/requirements/dev.txt +++ b/services/storage/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../../packages/aws-library/ +--editable ../../packages/celery-library/ --editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database/ From f4ca78acce326c8796d75de08767fadbacb52a2c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 19 May 2025 11:42:36 +0200 Subject: [PATCH 06/91] add reqs --- .../celery-library/requirements/_base.txt | 419 ++++++++++++++++++ .../celery-library/requirements/_test.txt | 134 ++++++ .../celery-library/requirements/_tools.txt | 84 ++++ 3 files changed, 637 insertions(+) create mode 100644 packages/celery-library/requirements/_base.txt create mode 100644 packages/celery-library/requirements/_test.txt create mode 100644 packages/celery-library/requirements/_tools.txt diff --git a/packages/celery-library/requirements/_base.txt b/packages/celery-library/requirements/_base.txt new file mode 100644 index 00000000000..be5b739b431 --- /dev/null +++ b/packages/celery-library/requirements/_base.txt @@ -0,0 +1,419 @@ +aio-pika==9.5.5 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiocache==0.12.3 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiodebug==2.3.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiodocker==0.24.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiofiles==24.1.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.11.18 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # aiodocker +aiormq==6.8.1 + # via aio-pika +aiosignal==1.3.2 + # via aiohttp +amqp==5.3.1 + # via kombu +annotated-types==0.7.0 + # via pydantic +anyio==4.9.0 + # via + # fast-depends + # faststream +arrow==1.3.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in +attrs==25.3.0 + # via + # aiohttp + # jsonschema + # referencing +billiard==4.2.1 + # via celery +celery==5.5.2 + # via -r requirements/_base.in +certifi==2025.4.26 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # requests +charset-normalizer==3.4.2 + # via requests +click==8.1.8 + # via + # celery + # click-didyoumean + # click-plugins + # click-repl + # typer +click-didyoumean==0.3.1 + # via celery +click-plugins==1.1.1 + # via celery +click-repl==0.3.0 + # via celery +deprecated==1.2.18 + # via + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-semantic-conventions +dnspython==2.7.0 + # via email-validator +email-validator==2.2.0 + # via pydantic +exceptiongroup==1.3.0 + # via aio-pika +fast-depends==2.4.12 + # via faststream +faststream==0.5.41 + # via -r requirements/../../../packages/service-library/requirements/_base.in +frozenlist==1.6.0 + # via + # aiohttp + # aiosignal +googleapis-common-protos==1.70.0 + # via + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +grpcio==1.71.0 + # via opentelemetry-exporter-otlp-proto-grpc +idna==3.10 + # via + # anyio + # email-validator + # requests + # yarl +importlib-metadata==8.6.1 + # via opentelemetry-api +jsonschema==4.23.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +jsonschema-specifications==2025.4.1 + # via jsonschema +kombu==5.5.3 + # via celery +markdown-it-py==3.0.0 + # via rich +mdurl==0.1.2 + # via markdown-it-py +multidict==6.4.3 + # via + # aiohttp + # yarl +opentelemetry-api==1.33.1 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-instrumentation + # opentelemetry-instrumentation-logging + # opentelemetry-instrumentation-redis + # opentelemetry-instrumentation-requests + # opentelemetry-sdk + # opentelemetry-semantic-conventions +opentelemetry-exporter-otlp==1.33.1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-exporter-otlp-proto-common==1.33.1 + # via + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-exporter-otlp-proto-grpc==1.33.1 + # via opentelemetry-exporter-otlp +opentelemetry-exporter-otlp-proto-http==1.33.1 + # via opentelemetry-exporter-otlp +opentelemetry-instrumentation==0.54b1 + # via + # opentelemetry-instrumentation-logging + # opentelemetry-instrumentation-redis + # opentelemetry-instrumentation-requests +opentelemetry-instrumentation-logging==0.54b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-redis==0.54b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-requests==0.54b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-proto==1.33.1 + # via + # opentelemetry-exporter-otlp-proto-common + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-sdk==1.33.1 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-semantic-conventions==0.54b1 + # via + # opentelemetry-instrumentation + # opentelemetry-instrumentation-redis + # opentelemetry-instrumentation-requests + # opentelemetry-sdk +opentelemetry-util-http==0.54b1 + # via opentelemetry-instrumentation-requests +orjson==3.10.18 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in +packaging==25.0 + # via opentelemetry-instrumentation +pamqp==3.3.0 + # via aiormq +prompt-toolkit==3.0.51 + # via click-repl +propcache==0.3.1 + # via + # aiohttp + # yarl +protobuf==5.29.4 + # via + # googleapis-common-protos + # opentelemetry-proto +psutil==7.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +pycryptodome==3.23.0 + # via stream-zip +pydantic==2.11.4 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.33.2 + # via pydantic +pydantic-extra-types==2.10.4 + # via + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in +pydantic-settings==2.7.0 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in +pygments==2.19.1 + # via rich +pyinstrument==5.0.1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +python-dateutil==2.9.0.post0 + # via + # arrow + # celery +python-dotenv==1.1.0 + # via pydantic-settings +pyyaml==6.0.2 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in +redis==6.1.0 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in +referencing==0.35.1 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # jsonschema + # jsonschema-specifications +requests==2.32.3 + # via opentelemetry-exporter-otlp-proto-http +rich==14.0.0 + # via + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # typer +rpds-py==0.25.0 + # via + # jsonschema + # referencing +shellingham==1.5.4 + # via typer +six==1.17.0 + # via python-dateutil +sniffio==1.3.1 + # via anyio +stream-zip==0.0.83 + # via -r requirements/../../../packages/service-library/requirements/_base.in +tenacity==9.1.2 + # via -r requirements/../../../packages/service-library/requirements/_base.in +toolz==1.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +tqdm==4.67.1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +typer==0.15.4 + # via + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in +types-python-dateutil==2.9.0.20250516 + # via arrow +typing-extensions==4.13.2 + # via + # aiodebug + # anyio + # exceptiongroup + # faststream + # opentelemetry-sdk + # pydantic + # pydantic-core + # pydantic-extra-types + # typer + # typing-inspection +typing-inspection==0.4.0 + # via pydantic +tzdata==2025.2 + # via kombu +urllib3==2.4.0 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # requests +vine==5.1.0 + # via + # amqp + # celery + # kombu +wcwidth==0.2.13 + # via prompt-toolkit +wrapt==1.17.2 + # via + # deprecated + # opentelemetry-instrumentation + # opentelemetry-instrumentation-redis +yarl==1.20.0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # aio-pika + # aiohttp + # aiormq +zipp==3.21.0 + # via importlib-metadata diff --git a/packages/celery-library/requirements/_test.txt b/packages/celery-library/requirements/_test.txt new file mode 100644 index 00000000000..1bf99eeff9f --- /dev/null +++ b/packages/celery-library/requirements/_test.txt @@ -0,0 +1,134 @@ +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic +anyio==4.9.0 + # via + # -c requirements/_base.txt + # httpx + # starlette +certifi==2025.4.26 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # httpcore + # httpx +coverage==7.8.0 + # via + # -r requirements/_test.in + # pytest-cov +faker==37.3.0 + # via -r requirements/_test.in +fastapi==0.115.12 + # via -r requirements/_test.in +flexcache==0.3 + # via pint +flexparser==0.4 + # via pint +h11==0.16.0 + # via httpcore +httpcore==1.0.9 + # via httpx +httpx==0.28.1 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_test.in +icdiff==2.0.7 + # via pytest-icdiff +idna==3.10 + # via + # -c requirements/_base.txt + # anyio + # httpx +iniconfig==2.1.0 + # via pytest +packaging==25.0 + # via + # -c requirements/_base.txt + # pytest + # pytest-sugar +pint==0.24.4 + # via -r requirements/_test.in +platformdirs==4.3.8 + # via pint +pluggy==1.6.0 + # via pytest +pprintpp==0.4.0 + # via pytest-icdiff +py-cpuinfo==9.0.0 + # via pytest-benchmark +pydantic==2.11.4 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # fastapi +pydantic-core==2.33.2 + # via + # -c requirements/_base.txt + # pydantic +pytest==8.3.5 + # via + # -r requirements/_test.in + # pytest-asyncio + # pytest-benchmark + # pytest-cov + # pytest-icdiff + # pytest-instafail + # pytest-mock + # pytest-sugar +pytest-asyncio==0.23.8 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_test.in +pytest-benchmark==5.1.0 + # via -r requirements/_test.in +pytest-cov==6.1.1 + # via -r requirements/_test.in +pytest-icdiff==0.9 + # via -r requirements/_test.in +pytest-instafail==0.5.0 + # via -r requirements/_test.in +pytest-mock==3.14.0 + # via -r requirements/_test.in +pytest-runner==6.0.1 + # via -r requirements/_test.in +pytest-sugar==1.0.0 + # via -r requirements/_test.in +python-dotenv==1.1.0 + # via + # -c requirements/_base.txt + # -r requirements/_test.in +pyyaml==6.0.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # -r requirements/_test.in +sniffio==1.3.1 + # via + # -c requirements/_base.txt + # anyio +starlette==0.46.2 + # via + # -c requirements/../../../requirements/constraints.txt + # fastapi +termcolor==3.1.0 + # via pytest-sugar +typing-extensions==4.13.2 + # via + # -c requirements/_base.txt + # anyio + # fastapi + # flexcache + # flexparser + # pint + # pydantic + # pydantic-core + # typing-inspection +typing-inspection==0.4.0 + # via + # -c requirements/_base.txt + # pydantic +tzdata==2025.2 + # via + # -c requirements/_base.txt + # faker diff --git a/packages/celery-library/requirements/_tools.txt b/packages/celery-library/requirements/_tools.txt new file mode 100644 index 00000000000..e0213f1353c --- /dev/null +++ b/packages/celery-library/requirements/_tools.txt @@ -0,0 +1,84 @@ +astroid==3.3.10 + # via pylint +black==25.1.0 + # via -r requirements/../../../requirements/devenv.txt +build==1.2.2.post1 + # via pip-tools +bump2version==1.0.1 + # via -r requirements/../../../requirements/devenv.txt +cfgv==3.4.0 + # via pre-commit +click==8.1.8 + # via + # -c requirements/_base.txt + # black + # pip-tools +dill==0.4.0 + # via pylint +distlib==0.3.9 + # via virtualenv +filelock==3.18.0 + # via virtualenv +identify==2.6.10 + # via pre-commit +isort==6.0.1 + # via + # -r requirements/../../../requirements/devenv.txt + # pylint +mccabe==0.7.0 + # via pylint +mypy==1.15.0 + # via -r requirements/../../../requirements/devenv.txt +mypy-extensions==1.1.0 + # via + # black + # mypy +nodeenv==1.9.1 + # via pre-commit +packaging==25.0 + # via + # -c requirements/_base.txt + # -c requirements/_test.txt + # black + # build +pathspec==0.12.1 + # via black +pip==25.1.1 + # via pip-tools +pip-tools==7.4.1 + # via -r requirements/../../../requirements/devenv.txt +platformdirs==4.3.8 + # via + # -c requirements/_test.txt + # black + # pylint + # virtualenv +pre-commit==4.2.0 + # via -r requirements/../../../requirements/devenv.txt +pylint==3.3.7 + # via -r requirements/../../../requirements/devenv.txt +pyproject-hooks==1.2.0 + # via + # build + # pip-tools +pyyaml==6.0.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # -c requirements/_test.txt + # pre-commit +ruff==0.11.10 + # via -r requirements/../../../requirements/devenv.txt +setuptools==80.7.1 + # via pip-tools +tomlkit==0.13.2 + # via pylint +typing-extensions==4.13.2 + # via + # -c requirements/_base.txt + # -c requirements/_test.txt + # mypy +virtualenv==20.31.2 + # via pre-commit +wheel==0.45.1 + # via pip-tools From 04690caa6d625dca400aedd2e8e0779dea17ee0c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 19 May 2025 12:54:02 +0200 Subject: [PATCH 07/91] refactor --- .../src/celery-library/signals.py | 99 --------- .../__init__.py | 12 +- .../_common.py | 0 .../backends/__init__.py | 0 .../backends/_redis.py | 0 .../client.py | 0 .../errors.py | 0 .../models.py | 0 .../src/celery_library}/signals.py | 3 +- .../_task.py => celery_library/task.py} | 0 .../types.py} | 28 +-- .../utils.py | 0 .../worker.py | 0 .../api/_worker_tasks/_files.py | 4 +- .../api/_worker_tasks/_paths.py | 4 +- .../api/_worker_tasks/_simcore_s3.py | 4 +- .../api/_worker_tasks/tasks.py | 13 +- .../api/rest/_files.py | 4 +- .../api/rest/dependencies/celery.py | 5 +- .../api/rpc/_async_jobs.py | 13 +- .../simcore_service_storage/api/rpc/_paths.py | 4 +- .../api/rpc/_simcore_s3.py | 4 +- .../core/application.py | 4 +- .../modules/celery/__init__.py | 56 ----- .../modules/celery/_celery_types.py | 61 ----- .../modules/celery/_common.py | 37 ---- .../modules/celery/_task.py | 208 ------------------ .../modules/celery/backends/__init__.py | 0 .../modules/celery/backends/_redis.py | 133 ----------- .../modules/celery/client.py | 157 ------------- .../modules/celery/errors.py | 24 -- .../modules/celery/models.py | 92 -------- .../modules/celery/utils.py | 27 --- .../modules/celery/worker.py | 19 -- .../modules/celery/worker_main.py | 8 +- services/storage/tests/conftest.py | 9 +- 36 files changed, 54 insertions(+), 978 deletions(-) delete mode 100644 packages/celery-library/src/celery-library/signals.py rename packages/celery-library/src/{celery-library => celery_library}/__init__.py (77%) rename packages/celery-library/src/{celery-library => celery_library}/_common.py (100%) rename packages/celery-library/src/{celery-library => celery_library}/backends/__init__.py (100%) rename packages/celery-library/src/{celery-library => celery_library}/backends/_redis.py (100%) rename packages/celery-library/src/{celery-library => celery_library}/client.py (100%) rename packages/celery-library/src/{celery-library => celery_library}/errors.py (100%) rename packages/celery-library/src/{celery-library => celery_library}/models.py (100%) rename {services/storage/src/simcore_service_storage/modules/celery => packages/celery-library/src/celery_library}/signals.py (96%) rename packages/celery-library/src/{celery-library/_task.py => celery_library/task.py} (100%) rename packages/celery-library/src/{celery-library/_celery_types.py => celery_library/types.py} (77%) rename packages/celery-library/src/{celery-library => celery_library}/utils.py (100%) rename packages/celery-library/src/{celery-library => celery_library}/worker.py (100%) delete mode 100644 services/storage/src/simcore_service_storage/modules/celery/__init__.py delete mode 100644 services/storage/src/simcore_service_storage/modules/celery/_celery_types.py delete mode 100644 services/storage/src/simcore_service_storage/modules/celery/_common.py delete mode 100644 services/storage/src/simcore_service_storage/modules/celery/_task.py delete mode 100644 services/storage/src/simcore_service_storage/modules/celery/backends/__init__.py delete mode 100644 services/storage/src/simcore_service_storage/modules/celery/backends/_redis.py delete mode 100644 services/storage/src/simcore_service_storage/modules/celery/client.py delete mode 100644 services/storage/src/simcore_service_storage/modules/celery/errors.py delete mode 100644 services/storage/src/simcore_service_storage/modules/celery/models.py delete mode 100644 services/storage/src/simcore_service_storage/modules/celery/utils.py delete mode 100644 services/storage/src/simcore_service_storage/modules/celery/worker.py diff --git a/packages/celery-library/src/celery-library/signals.py b/packages/celery-library/src/celery-library/signals.py deleted file mode 100644 index 113d26c3566..00000000000 --- a/packages/celery-library/src/celery-library/signals.py +++ /dev/null @@ -1,99 +0,0 @@ -import asyncio -import datetime -import logging -import threading -from typing import Final - -from asgi_lifespan import LifespanManager -from celery import Celery # type: ignore[import-untyped] -from fastapi import FastAPI -from servicelib.logging_utils import log_context -from servicelib.redis._client import RedisClientSDK -from settings_library.redis import RedisDatabase -from simcore_service_storage._meta import APP_NAME - -from ...core.application import create_app -from ...core.settings import ApplicationSettings -from . import set_event_loop -from .backends._redis import RedisTaskInfoStore -from .utils import ( - get_fastapi_app, - set_celery_worker, - set_fastapi_app, -) -from .worker import CeleryTaskWorker - -_logger = logging.getLogger(__name__) - -_SHUTDOWN_TIMEOUT: Final[float] = datetime.timedelta(seconds=10).total_seconds() -_STARTUP_TIMEOUT: Final[float] = datetime.timedelta(minutes=1).total_seconds() - - -def on_worker_init(sender, **_kwargs) -> None: - startup_complete_event = threading.Event() - - def _init(startup_complete_event: threading.Event) -> None: - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - shutdown_event = asyncio.Event() - - app_settings = ApplicationSettings.create_from_envs() - fastapi_app = create_app(app_settings) - - assert app_settings.STORAGE_CELERY - celery_settings = app_settings.STORAGE_CELERY - - async def setup_task_worker(): - redis_client_sdk = RedisClientSDK( - celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( - RedisDatabase.CELERY_TASKS - ), - client_name=f"{APP_NAME}.celery_tasks", - ) - - set_celery_worker( - sender.app, CeleryTaskWorker(RedisTaskInfoStore(redis_client_sdk)) - ) - - async def fastapi_lifespan( - startup_complete_event: threading.Event, shutdown_event: asyncio.Event - ) -> None: - async with LifespanManager( - fastapi_app, - startup_timeout=_STARTUP_TIMEOUT, - shutdown_timeout=_SHUTDOWN_TIMEOUT, - ): - try: - _logger.info("fastapi APP started!") - startup_complete_event.set() - await shutdown_event.wait() - except asyncio.CancelledError: - _logger.warning("Lifespan task cancelled") - - fastapi_app.state.shutdown_event = shutdown_event - set_event_loop(fastapi_app, loop) - - set_fastapi_app(sender.app, fastapi_app) - loop.run_until_complete(setup_task_worker()) - loop.run_until_complete( - fastapi_lifespan(startup_complete_event, shutdown_event) - ) - - thread = threading.Thread( - group=None, - target=_init, - name="fastapi_app", - args=(startup_complete_event,), - daemon=True, - ) - thread.start() - # ensure the fastapi app is ready before going on - startup_complete_event.wait(_STARTUP_TIMEOUT * 1.1) - - -def on_worker_shutdown(sender, **_kwargs) -> None: - with log_context(_logger, logging.INFO, "Worker Shuts-down"): - assert isinstance(sender.app, Celery) - fastapi_app = get_fastapi_app(sender.app) - assert isinstance(fastapi_app, FastAPI) - fastapi_app.state.shutdown_event.set() diff --git a/packages/celery-library/src/celery-library/__init__.py b/packages/celery-library/src/celery_library/__init__.py similarity index 77% rename from packages/celery-library/src/celery-library/__init__.py rename to packages/celery-library/src/celery_library/__init__.py index cbf60fda44e..86a514e1b81 100644 --- a/packages/celery-library/src/celery-library/__init__.py +++ b/packages/celery-library/src/celery_library/__init__.py @@ -3,29 +3,25 @@ from fastapi import FastAPI from servicelib.redis._client import RedisClientSDK +from settings_library.celery import CelerySettings from settings_library.redis import RedisDatabase -from ..._meta import APP_NAME -from ...core.settings import get_application_settings -from ._celery_types import register_celery_types from ._common import create_app from .backends._redis import RedisTaskInfoStore from .client import CeleryTaskClient +from .types import register_celery_types _logger = logging.getLogger(__name__) -def setup_celery_client(app: FastAPI) -> None: +def setup_celery_client(app: FastAPI, celery_settings: CelerySettings) -> None: async def on_startup() -> None: - application_settings = get_application_settings(app) - celery_settings = application_settings.STORAGE_CELERY - assert celery_settings # nosec celery_app = create_app(celery_settings) redis_client_sdk = RedisClientSDK( celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( RedisDatabase.CELERY_TASKS ), - client_name=f"{APP_NAME}.celery_tasks", + client_name=f"{app.title}.celery_tasks", ) app.state.celery_client = CeleryTaskClient( diff --git a/packages/celery-library/src/celery-library/_common.py b/packages/celery-library/src/celery_library/_common.py similarity index 100% rename from packages/celery-library/src/celery-library/_common.py rename to packages/celery-library/src/celery_library/_common.py diff --git a/packages/celery-library/src/celery-library/backends/__init__.py b/packages/celery-library/src/celery_library/backends/__init__.py similarity index 100% rename from packages/celery-library/src/celery-library/backends/__init__.py rename to packages/celery-library/src/celery_library/backends/__init__.py diff --git a/packages/celery-library/src/celery-library/backends/_redis.py b/packages/celery-library/src/celery_library/backends/_redis.py similarity index 100% rename from packages/celery-library/src/celery-library/backends/_redis.py rename to packages/celery-library/src/celery_library/backends/_redis.py diff --git a/packages/celery-library/src/celery-library/client.py b/packages/celery-library/src/celery_library/client.py similarity index 100% rename from packages/celery-library/src/celery-library/client.py rename to packages/celery-library/src/celery_library/client.py diff --git a/packages/celery-library/src/celery-library/errors.py b/packages/celery-library/src/celery_library/errors.py similarity index 100% rename from packages/celery-library/src/celery-library/errors.py rename to packages/celery-library/src/celery_library/errors.py diff --git a/packages/celery-library/src/celery-library/models.py b/packages/celery-library/src/celery_library/models.py similarity index 100% rename from packages/celery-library/src/celery-library/models.py rename to packages/celery-library/src/celery_library/models.py diff --git a/services/storage/src/simcore_service_storage/modules/celery/signals.py b/packages/celery-library/src/celery_library/signals.py similarity index 96% rename from services/storage/src/simcore_service_storage/modules/celery/signals.py rename to packages/celery-library/src/celery_library/signals.py index 113d26c3566..cfb3c0a0353 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -10,7 +10,6 @@ from servicelib.logging_utils import log_context from servicelib.redis._client import RedisClientSDK from settings_library.redis import RedisDatabase -from simcore_service_storage._meta import APP_NAME from ...core.application import create_app from ...core.settings import ApplicationSettings @@ -48,7 +47,7 @@ async def setup_task_worker(): celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( RedisDatabase.CELERY_TASKS ), - client_name=f"{APP_NAME}.celery_tasks", + client_name=f"{fastapi_app.title}.celery_tasks", ) set_celery_worker( diff --git a/packages/celery-library/src/celery-library/_task.py b/packages/celery-library/src/celery_library/task.py similarity index 100% rename from packages/celery-library/src/celery-library/_task.py rename to packages/celery-library/src/celery_library/task.py diff --git a/packages/celery-library/src/celery-library/_celery_types.py b/packages/celery-library/src/celery_library/types.py similarity index 77% rename from packages/celery-library/src/celery-library/_celery_types.py rename to packages/celery-library/src/celery_library/types.py index 4ed62e72775..bbd04eabc56 100644 --- a/packages/celery-library/src/celery-library/_celery_types.py +++ b/packages/celery-library/src/celery_library/types.py @@ -3,14 +3,8 @@ from typing import Any from kombu.utils.json import register_type # type: ignore[import-untyped] -from models_library.api_schemas_storage.storage_schemas import ( - FileUploadCompletionBody, - FoldersBody, -) from pydantic import BaseModel -from ...models import FileMetaData - def _path_encoder(obj): if isinstance(obj, Path): @@ -37,16 +31,6 @@ def _pydantic_model_decoder(clz: type[BaseModel], data: dict[str, Any]) -> BaseM return clz(**data) -def _register_pydantic_types(*models: type[BaseModel]) -> None: - for model in models: - register_type( - model, - _class_full_name(model), - encoder=_pydantic_model_encoder, - decoder=partial(_pydantic_model_decoder, model), - ) - - def register_celery_types() -> None: register_type( Path, @@ -56,6 +40,12 @@ def register_celery_types() -> None: ) register_type(set, _class_full_name(set), encoder=list, decoder=set) - _register_pydantic_types(FileUploadCompletionBody) - _register_pydantic_types(FileMetaData) - _register_pydantic_types(FoldersBody) + +def register_pydantic_types(*models: type[BaseModel]) -> None: + for model in models: + register_type( + model, + _class_full_name(model), + encoder=_pydantic_model_encoder, + decoder=partial(_pydantic_model_decoder, model), + ) diff --git a/packages/celery-library/src/celery-library/utils.py b/packages/celery-library/src/celery_library/utils.py similarity index 100% rename from packages/celery-library/src/celery-library/utils.py rename to packages/celery-library/src/celery_library/utils.py diff --git a/packages/celery-library/src/celery-library/worker.py b/packages/celery-library/src/celery_library/worker.py similarity index 100% rename from packages/celery-library/src/celery-library/worker.py rename to packages/celery-library/src/celery_library/worker.py diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py index 871e8a7bcbc..2930656d825 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py @@ -1,6 +1,8 @@ import logging from celery import Task # type: ignore[import-untyped] +from celery_library.models import TaskId +from celery_library.utils import get_fastapi_app from models_library.api_schemas_storage.storage_schemas import ( FileUploadCompletionBody, ) @@ -10,8 +12,6 @@ from ...dsm import get_dsm_provider from ...models import FileMetaData -from ...modules.celery.models import TaskId -from ...modules.celery.utils import get_fastapi_app _logger = logging.getLogger(__name__) diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py index 2f3d05da547..b61448d52ea 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py @@ -2,6 +2,8 @@ from pathlib import Path from celery import Task # type: ignore[import-untyped] +from celery_library.models import TaskId +from celery_library.utils import get_fastapi_app from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID from pydantic import ByteSize, TypeAdapter @@ -10,8 +12,6 @@ from ...constants import MAX_CONCURRENT_S3_TASKS from ...dsm import get_dsm_provider -from ...modules.celery.models import TaskId -from ...modules.celery.utils import get_fastapi_app _logger = logging.getLogger(__name__) diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py index 8a9f0a941cc..9a1c7d6502d 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py @@ -4,6 +4,8 @@ from aws_library.s3._models import S3ObjectKey from celery import Task # type: ignore[import-untyped] +from celery_library.models import TaskID, TaskId +from celery_library.utils import get_celery_worker, get_fastapi_app from models_library.api_schemas_storage.storage_schemas import FoldersBody from models_library.api_schemas_webserver.storage import PathToExport from models_library.progress_bar import ProgressReport @@ -14,8 +16,6 @@ from servicelib.progress_bar import ProgressBarData from ...dsm import get_dsm_provider -from ...modules.celery.models import TaskID, TaskId -from ...modules.celery.utils import get_celery_worker, get_fastapi_app from ...simcore_s3_dsm import SimcoreS3DataManager _logger = logging.getLogger(__name__) diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py index 6c74cd9792d..55e4be6ac5b 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py @@ -1,11 +1,16 @@ import logging from celery import Celery # type: ignore[import-untyped] +from celery_library.task import register_task +from celery_library.types import register_celery_types, register_pydantic_types from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadCompletionBody, + FoldersBody, +) from servicelib.logging_utils import log_context -from ...modules.celery._celery_types import register_celery_types -from ...modules.celery._task import register_task +from ...models import FileMetaData from ._files import complete_upload_file from ._paths import compute_path_size, delete_paths from ._simcore_s3 import deep_copy_files_from_project, export_data @@ -15,6 +20,10 @@ def setup_worker_tasks(app: Celery) -> None: register_celery_types() + register_pydantic_types(FileMetaData) + register_pydantic_types(FileUploadCompletionBody) + register_pydantic_types(FoldersBody) + with log_context(_logger, logging.INFO, msg="worker task registration"): register_task(app, export_data, dont_autoretry_for=(AccessRightError,)) register_task(app, compute_path_size) diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py index f4781841570..91ffc3fd864 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_files.py +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -2,6 +2,8 @@ from typing import Annotated, Final, cast from urllib.parse import quote +from celery_library.client import CeleryTaskClient +from celery_library.models import TaskMetadata, TaskUUID from fastapi import APIRouter, Depends, Header, Request from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobNameData from models_library.api_schemas_storage.storage_schemas import ( @@ -34,8 +36,6 @@ StorageQueryParamsBase, UploadLinks, ) -from ...modules.celery.client import CeleryTaskClient -from ...modules.celery.models import TaskMetadata, TaskUUID from ...simcore_s3_dsm import SimcoreS3DataManager from .._worker_tasks._files import complete_upload_file as remote_complete_upload_file from .dependencies.celery import get_celery_client diff --git a/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py b/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py index 58413bba852..277ef83d589 100644 --- a/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py +++ b/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py @@ -1,11 +1,10 @@ from typing import Annotated +from celery_library import get_celery_client as _get_celery_client_from_app +from celery_library.client import CeleryTaskClient from fastapi import Depends, FastAPI from servicelib.fastapi.dependencies import get_app -from ....modules.celery import get_celery_client as _get_celery_client_from_app -from ....modules.celery.client import CeleryTaskClient - def get_celery_client( app: Annotated[FastAPI, Depends(get_app)], diff --git a/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py b/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py index 3186237eb7e..10c485f3644 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py @@ -3,6 +3,12 @@ import logging from celery.exceptions import CeleryError # type: ignore[import-untyped] +from celery_library import get_celery_client +from celery_library.errors import ( + TransferrableCeleryError, + decode_celery_transferrable_error, +) +from celery_library.models import TaskState from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, @@ -20,13 +26,6 @@ from servicelib.logging_utils import log_catch from servicelib.rabbitmq import RPCRouter -from ...modules.celery import get_celery_client -from ...modules.celery.errors import ( - TransferrableCeleryError, - decode_celery_transferrable_error, -) -from ...modules.celery.models import TaskState - _logger = logging.getLogger(__name__) router = RPCRouter() diff --git a/services/storage/src/simcore_service_storage/api/rpc/_paths.py b/services/storage/src/simcore_service_storage/api/rpc/_paths.py index db0e69af38d..ea8491877aa 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_paths.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_paths.py @@ -1,6 +1,8 @@ import logging from pathlib import Path +from celery_library import get_celery_client +from celery_library.models import TaskMetadata from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, @@ -9,8 +11,6 @@ from models_library.projects_nodes_io import LocationID from servicelib.rabbitmq import RPCRouter -from ...modules.celery import get_celery_client -from ...modules.celery.models import TaskMetadata from .._worker_tasks._paths import compute_path_size as remote_compute_path_size from .._worker_tasks._paths import delete_paths as remote_delete_paths diff --git a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py index ba3830c0329..52724a1da23 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py @@ -1,3 +1,5 @@ +from celery_library import get_celery_client +from celery_library.models import TaskMetadata, TasksQueue from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, @@ -7,8 +9,6 @@ from models_library.api_schemas_webserver.storage import PathToExport from servicelib.rabbitmq import RPCRouter -from ...modules.celery import get_celery_client -from ...modules.celery.models import TaskMetadata, TasksQueue from .._worker_tasks._simcore_s3 import deep_copy_files_from_project, export_data router = RPCRouter() diff --git a/services/storage/src/simcore_service_storage/core/application.py b/services/storage/src/simcore_service_storage/core/application.py index b70c2cef302..6b57eeb7f09 100644 --- a/services/storage/src/simcore_service_storage/core/application.py +++ b/services/storage/src/simcore_service_storage/core/application.py @@ -5,6 +5,7 @@ import logging +from celery_library import setup_celery_client from common_library.basic_types import BootModeEnum from fastapi import FastAPI from fastapi.middleware.gzip import GZipMiddleware @@ -33,7 +34,6 @@ from ..dsm import setup_dsm from ..dsm_cleaner import setup_dsm_cleaner from ..exceptions.handlers import set_exception_handlers -from ..modules.celery import setup_celery_client from ..modules.db import setup_db from ..modules.long_running_tasks import setup_rest_api_long_running_tasks_for_uploads from ..modules.rabbitmq import setup as setup_rabbitmq @@ -87,7 +87,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: # noqa: C901 if not settings.STORAGE_WORKER_MODE: setup_rabbitmq(app) setup_rpc_api_routes(app) - setup_celery_client(app) + setup_celery_client(app, celery_settings=settings.STORAGE_CELERY) setup_rest_api_long_running_tasks_for_uploads(app) setup_rest_api_routes(app, API_VTAG) set_exception_handlers(app) diff --git a/services/storage/src/simcore_service_storage/modules/celery/__init__.py b/services/storage/src/simcore_service_storage/modules/celery/__init__.py deleted file mode 100644 index cbf60fda44e..00000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -import logging -from asyncio import AbstractEventLoop - -from fastapi import FastAPI -from servicelib.redis._client import RedisClientSDK -from settings_library.redis import RedisDatabase - -from ..._meta import APP_NAME -from ...core.settings import get_application_settings -from ._celery_types import register_celery_types -from ._common import create_app -from .backends._redis import RedisTaskInfoStore -from .client import CeleryTaskClient - -_logger = logging.getLogger(__name__) - - -def setup_celery_client(app: FastAPI) -> None: - async def on_startup() -> None: - application_settings = get_application_settings(app) - celery_settings = application_settings.STORAGE_CELERY - assert celery_settings # nosec - celery_app = create_app(celery_settings) - redis_client_sdk = RedisClientSDK( - celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( - RedisDatabase.CELERY_TASKS - ), - client_name=f"{APP_NAME}.celery_tasks", - ) - - app.state.celery_client = CeleryTaskClient( - celery_app, - celery_settings, - RedisTaskInfoStore(redis_client_sdk), - ) - - register_celery_types() - - app.add_event_handler("startup", on_startup) - - -def get_celery_client(app: FastAPI) -> CeleryTaskClient: - assert hasattr(app.state, "celery_client") # nosec - celery_client = app.state.celery_client - assert isinstance(celery_client, CeleryTaskClient) - return celery_client - - -def get_event_loop(app: FastAPI) -> AbstractEventLoop: - event_loop = app.state.event_loop - assert isinstance(event_loop, AbstractEventLoop) - return event_loop - - -def set_event_loop(app: FastAPI, event_loop: AbstractEventLoop) -> None: - app.state.event_loop = event_loop diff --git a/services/storage/src/simcore_service_storage/modules/celery/_celery_types.py b/services/storage/src/simcore_service_storage/modules/celery/_celery_types.py deleted file mode 100644 index 4ed62e72775..00000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/_celery_types.py +++ /dev/null @@ -1,61 +0,0 @@ -from functools import partial -from pathlib import Path -from typing import Any - -from kombu.utils.json import register_type # type: ignore[import-untyped] -from models_library.api_schemas_storage.storage_schemas import ( - FileUploadCompletionBody, - FoldersBody, -) -from pydantic import BaseModel - -from ...models import FileMetaData - - -def _path_encoder(obj): - if isinstance(obj, Path): - return {"__path__": True, "path": str(obj)} - return obj - - -# Define how Path objects are deserialized -def _path_decoder(obj): - if "__path__" in obj: - return Path(obj["path"]) - return obj - - -def _class_full_name(clz: type) -> str: - return ".".join([clz.__module__, clz.__qualname__]) - - -def _pydantic_model_encoder(obj: BaseModel, *args, **kwargs) -> dict[str, Any]: - return obj.model_dump(*args, **kwargs, mode="json") - - -def _pydantic_model_decoder(clz: type[BaseModel], data: dict[str, Any]) -> BaseModel: - return clz(**data) - - -def _register_pydantic_types(*models: type[BaseModel]) -> None: - for model in models: - register_type( - model, - _class_full_name(model), - encoder=_pydantic_model_encoder, - decoder=partial(_pydantic_model_decoder, model), - ) - - -def register_celery_types() -> None: - register_type( - Path, - _class_full_name(Path), - _path_encoder, - _path_decoder, - ) - register_type(set, _class_full_name(set), encoder=list, decoder=set) - - _register_pydantic_types(FileUploadCompletionBody) - _register_pydantic_types(FileMetaData) - _register_pydantic_types(FoldersBody) diff --git a/services/storage/src/simcore_service_storage/modules/celery/_common.py b/services/storage/src/simcore_service_storage/modules/celery/_common.py deleted file mode 100644 index 545bb98f682..00000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/_common.py +++ /dev/null @@ -1,37 +0,0 @@ -import logging -import ssl -from typing import Any - -from celery import Celery # type: ignore[import-untyped] -from settings_library.celery import CelerySettings -from settings_library.redis import RedisDatabase - -_logger = logging.getLogger(__name__) - - -def _celery_configure(celery_settings: CelerySettings) -> dict[str, Any]: - base_config = { - "broker_connection_retry_on_startup": True, - "result_expires": celery_settings.CELERY_RESULT_EXPIRES, - "result_extended": True, - "result_serializer": "json", - "task_default_queue": "default", - "task_send_sent_event": True, - "task_track_started": True, - "worker_send_task_events": True, - } - if celery_settings.CELERY_REDIS_RESULT_BACKEND.REDIS_SECURE: - base_config["redis_backend_use_ssl"] = {"ssl_cert_reqs": ssl.CERT_NONE} - return base_config - - -def create_app(celery_settings: CelerySettings) -> Celery: - assert celery_settings - - return Celery( - broker=celery_settings.CELERY_RABBIT_BROKER.dsn, - backend=celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( - RedisDatabase.CELERY_TASKS, - ), - **_celery_configure(celery_settings), - ) diff --git a/services/storage/src/simcore_service_storage/modules/celery/_task.py b/services/storage/src/simcore_service_storage/modules/celery/_task.py deleted file mode 100644 index e367a3a73da..00000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/_task.py +++ /dev/null @@ -1,208 +0,0 @@ -import asyncio -import inspect -import logging -from collections.abc import Callable, Coroutine -from datetime import timedelta -from functools import wraps -from typing import Any, Concatenate, Final, ParamSpec, TypeVar, overload - -from celery import Celery # type: ignore[import-untyped] -from celery.contrib.abortable import ( # type: ignore[import-untyped] - AbortableAsyncResult, - AbortableTask, -) -from celery.exceptions import Ignore # type: ignore[import-untyped] -from pydantic import NonNegativeInt -from servicelib.async_utils import cancel_wait_task - -from . import get_event_loop -from .errors import encore_celery_transferrable_error -from .models import TaskID, TaskId -from .utils import get_fastapi_app - -_logger = logging.getLogger(__name__) - -_DEFAULT_TASK_TIMEOUT: Final[timedelta | None] = None -_DEFAULT_MAX_RETRIES: Final[NonNegativeInt] = 3 -_DEFAULT_WAIT_BEFORE_RETRY: Final[timedelta] = timedelta(seconds=5) -_DEFAULT_DONT_AUTORETRY_FOR: Final[tuple[type[Exception], ...]] = () -_DEFAULT_ABORT_TASK_TIMEOUT: Final[timedelta] = timedelta(seconds=1) -_DEFAULT_CANCEL_TASK_TIMEOUT: Final[timedelta] = timedelta(seconds=5) - -T = TypeVar("T") -P = ParamSpec("P") -R = TypeVar("R") - - -class TaskAbortedError(Exception): ... - - -def _async_task_wrapper( - app: Celery, -) -> Callable[ - [Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]]], - Callable[Concatenate[AbortableTask, P], R], -]: - def decorator( - coro: Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]], - ) -> Callable[Concatenate[AbortableTask, P], R]: - @wraps(coro) - def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: - fastapi_app = get_fastapi_app(app) - # NOTE: task.request is a thread local object, so we need to pass the id explicitly - assert task.request.id is not None # nosec - - async def run_task(task_id: TaskID) -> R: - try: - async with asyncio.TaskGroup() as tg: - main_task = tg.create_task( - coro(task, task_id, *args, **kwargs), - ) - - async def abort_monitor(): - abortable_result = AbortableAsyncResult(task_id, app=app) - while not main_task.done(): - if abortable_result.is_aborted(): - await cancel_wait_task( - main_task, - max_delay=_DEFAULT_CANCEL_TASK_TIMEOUT.total_seconds(), - ) - AbortableAsyncResult(task_id, app=app).forget() - raise TaskAbortedError - await asyncio.sleep( - _DEFAULT_ABORT_TASK_TIMEOUT.total_seconds() - ) - - tg.create_task(abort_monitor()) - - return main_task.result() - except BaseExceptionGroup as eg: - task_aborted_errors, other_errors = eg.split(TaskAbortedError) - - if task_aborted_errors: - assert task_aborted_errors is not None # nosec - assert len(task_aborted_errors.exceptions) == 1 # nosec - raise task_aborted_errors.exceptions[0] from eg - - assert other_errors is not None # nosec - assert len(other_errors.exceptions) == 1 # nosec - raise other_errors.exceptions[0] from eg - - return asyncio.run_coroutine_threadsafe( - run_task(task.request.id), - get_event_loop(fastapi_app), - ).result() - - return wrapper - - return decorator - - -def _error_handling( - max_retries: NonNegativeInt, - delay_between_retries: timedelta, - dont_autoretry_for: tuple[type[Exception], ...], -) -> Callable[ - [Callable[Concatenate[AbortableTask, P], R]], - Callable[Concatenate[AbortableTask, P], R], -]: - def decorator( - func: Callable[Concatenate[AbortableTask, P], R], - ) -> Callable[Concatenate[AbortableTask, P], R]: - @wraps(func) - def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: - try: - return func(task, *args, **kwargs) - except TaskAbortedError as exc: - _logger.warning("Task %s was cancelled", task.request.id) - raise Ignore from exc - except Exception as exc: - if isinstance(exc, dont_autoretry_for): - _logger.debug("Not retrying for exception %s", type(exc).__name__) - # propagate without retry - raise encore_celery_transferrable_error(exc) from exc - - exc_type = type(exc).__name__ - exc_message = f"{exc}" - _logger.exception( - "Task %s failed with exception: %s:%s", - task.request.id, - exc_type, - exc_message, - ) - - raise task.retry( - max_retries=max_retries, - countdown=delay_between_retries.total_seconds(), - exc=encore_celery_transferrable_error(exc), - ) from exc - - return wrapper - - return decorator - - -@overload -def register_task( - app: Celery, - fn: Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]], - task_name: str | None = None, - timeout: timedelta | None = _DEFAULT_TASK_TIMEOUT, - max_retries: NonNegativeInt = _DEFAULT_MAX_RETRIES, - delay_between_retries: timedelta = _DEFAULT_WAIT_BEFORE_RETRY, - dont_autoretry_for: tuple[type[Exception], ...] = _DEFAULT_DONT_AUTORETRY_FOR, -) -> None: ... - - -@overload -def register_task( - app: Celery, - fn: Callable[Concatenate[AbortableTask, P], R], - task_name: str | None = None, - timeout: timedelta | None = _DEFAULT_TASK_TIMEOUT, - max_retries: NonNegativeInt = _DEFAULT_MAX_RETRIES, - delay_between_retries: timedelta = _DEFAULT_WAIT_BEFORE_RETRY, - dont_autoretry_for: tuple[type[Exception], ...] = _DEFAULT_DONT_AUTORETRY_FOR, -) -> None: ... - - -def register_task( # type: ignore[misc] - app: Celery, - fn: ( - Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]] - | Callable[Concatenate[AbortableTask, P], R] - ), - task_name: str | None = None, - timeout: timedelta | None = _DEFAULT_TASK_TIMEOUT, - max_retries: NonNegativeInt = _DEFAULT_MAX_RETRIES, - delay_between_retries: timedelta = _DEFAULT_WAIT_BEFORE_RETRY, - dont_autoretry_for: tuple[type[Exception], ...] = _DEFAULT_DONT_AUTORETRY_FOR, -) -> None: - """Decorator to define a celery task with error handling and abortable support - - Keyword Arguments: - task_name -- name of the function used in Celery (default: {None} will be generated automatically) - timeout -- when None no timeout is enforced, task is allowed to run forever (default: {_DEFAULT_TASK_TIMEOUT}) - max_retries -- number of attempts in case of failuire before giving up (default: {_DEFAULT_MAX_RETRIES}) - delay_between_retries -- dealy between each attempt in case of error (default: {_DEFAULT_WAIT_BEFORE_RETRY}) - dont_autoretry_for -- exceptions that should not be retried when raised by the task - """ - wrapped_fn: Callable[Concatenate[AbortableTask, P], R] - if asyncio.iscoroutinefunction(fn): - wrapped_fn = _async_task_wrapper(app)(fn) - else: - assert inspect.isfunction(fn) # nosec - wrapped_fn = fn - - wrapped_fn = _error_handling( - max_retries=max_retries, - delay_between_retries=delay_between_retries, - dont_autoretry_for=dont_autoretry_for, - )(wrapped_fn) - - app.task( - name=task_name or fn.__name__, - bind=True, - base=AbortableTask, - time_limit=None if timeout is None else timeout.total_seconds(), - )(wrapped_fn) diff --git a/services/storage/src/simcore_service_storage/modules/celery/backends/__init__.py b/services/storage/src/simcore_service_storage/modules/celery/backends/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/services/storage/src/simcore_service_storage/modules/celery/backends/_redis.py b/services/storage/src/simcore_service_storage/modules/celery/backends/_redis.py deleted file mode 100644 index 3fd9984fb2a..00000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/backends/_redis.py +++ /dev/null @@ -1,133 +0,0 @@ -import contextlib -import logging -from datetime import timedelta -from typing import Final - -from models_library.progress_bar import ProgressReport -from pydantic import ValidationError -from servicelib.redis._client import RedisClientSDK - -from ..models import ( - Task, - TaskContext, - TaskID, - TaskMetadata, - TaskUUID, - build_task_id_prefix, -) - -_CELERY_TASK_INFO_PREFIX: Final[str] = "celery-task-info-" -_CELERY_TASK_ID_KEY_ENCODING = "utf-8" -_CELERY_TASK_ID_KEY_SEPARATOR: Final[str] = ":" -_CELERY_TASK_SCAN_COUNT_PER_BATCH: Final[int] = 10000 -_CELERY_TASK_METADATA_KEY: Final[str] = "metadata" -_CELERY_TASK_PROGRESS_KEY: Final[str] = "progress" - -_logger = logging.getLogger(__name__) - - -def _build_key(task_id: TaskID) -> str: - return _CELERY_TASK_INFO_PREFIX + task_id - - -class RedisTaskInfoStore: - def __init__(self, redis_client_sdk: RedisClientSDK) -> None: - self._redis_client_sdk = redis_client_sdk - - async def create_task( - self, - task_id: TaskID, - task_metadata: TaskMetadata, - expiry: timedelta, - ) -> None: - task_key = _build_key(task_id) - await self._redis_client_sdk.redis.hset( - name=task_key, - key=_CELERY_TASK_METADATA_KEY, - value=task_metadata.model_dump_json(), - ) # type: ignore - await self._redis_client_sdk.redis.expire( - task_key, - expiry, - ) - - async def exists_task(self, task_id: TaskID) -> bool: - n = await self._redis_client_sdk.redis.exists(_build_key(task_id)) - assert isinstance(n, int) # nosec - return n > 0 - - async def get_task_metadata(self, task_id: TaskID) -> TaskMetadata | None: - raw_result = await self._redis_client_sdk.redis.hget(_build_key(task_id), _CELERY_TASK_METADATA_KEY) # type: ignore - if not raw_result: - return None - - try: - return TaskMetadata.model_validate_json(raw_result) - except ValidationError as exc: - _logger.debug( - "Failed to deserialize task metadata for task %s: %s", task_id, f"{exc}" - ) - return None - - async def get_task_progress(self, task_id: TaskID) -> ProgressReport | None: - raw_result = await self._redis_client_sdk.redis.hget(_build_key(task_id), _CELERY_TASK_PROGRESS_KEY) # type: ignore - if not raw_result: - return None - - try: - return ProgressReport.model_validate_json(raw_result) - except ValidationError as exc: - _logger.debug( - "Failed to deserialize task progress for task %s: %s", task_id, f"{exc}" - ) - return None - - async def list_tasks(self, task_context: TaskContext) -> list[Task]: - search_key = ( - _CELERY_TASK_INFO_PREFIX - + build_task_id_prefix(task_context) - + _CELERY_TASK_ID_KEY_SEPARATOR - ) - search_key_len = len(search_key) - - keys: list[str] = [] - pipeline = self._redis_client_sdk.redis.pipeline() - async for key in self._redis_client_sdk.redis.scan_iter( - match=search_key + "*", count=_CELERY_TASK_SCAN_COUNT_PER_BATCH - ): - # fake redis (tests) returns bytes, real redis returns str - _key = ( - key.decode(_CELERY_TASK_ID_KEY_ENCODING) - if isinstance(key, bytes) - else key - ) - keys.append(_key) - pipeline.hget(_key, _CELERY_TASK_METADATA_KEY) - - results = await pipeline.execute() - - tasks = [] - for key, raw_metadata in zip(keys, results, strict=True): - if raw_metadata is None: - continue - - with contextlib.suppress(ValidationError): - task_metadata = TaskMetadata.model_validate_json(raw_metadata) - tasks.append( - Task( - uuid=TaskUUID(key[search_key_len:]), - metadata=task_metadata, - ) - ) - - return tasks - - async def remove_task(self, task_id: TaskID) -> None: - await self._redis_client_sdk.redis.delete(_build_key(task_id)) - - async def set_task_progress(self, task_id: TaskID, report: ProgressReport) -> None: - await self._redis_client_sdk.redis.hset( - name=_build_key(task_id), - key=_CELERY_TASK_PROGRESS_KEY, - value=report.model_dump_json(), - ) # type: ignore diff --git a/services/storage/src/simcore_service_storage/modules/celery/client.py b/services/storage/src/simcore_service_storage/modules/celery/client.py deleted file mode 100644 index f68baf558fe..00000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/client.py +++ /dev/null @@ -1,157 +0,0 @@ -import logging -from dataclasses import dataclass -from typing import Any -from uuid import uuid4 - -from celery import Celery # type: ignore[import-untyped] -from celery.contrib.abortable import ( # type: ignore[import-untyped] - AbortableAsyncResult, -) -from common_library.async_tools import make_async -from models_library.progress_bar import ProgressReport -from servicelib.logging_utils import log_context -from settings_library.celery import CelerySettings - -from .models import ( - Task, - TaskContext, - TaskID, - TaskInfoStore, - TaskMetadata, - TaskState, - TaskStatus, - TaskUUID, - build_task_id, -) - -_logger = logging.getLogger(__name__) - - -_MIN_PROGRESS_VALUE = 0.0 -_MAX_PROGRESS_VALUE = 1.0 - - -@dataclass -class CeleryTaskClient: - _celery_app: Celery - _celery_settings: CelerySettings - _task_info_store: TaskInfoStore - - async def submit_task( - self, - task_metadata: TaskMetadata, - *, - task_context: TaskContext, - **task_params, - ) -> TaskUUID: - with log_context( - _logger, - logging.DEBUG, - msg=f"Submit {task_metadata.name=}: {task_context=} {task_params=}", - ): - task_uuid = uuid4() - task_id = build_task_id(task_context, task_uuid) - self._celery_app.send_task( - task_metadata.name, - task_id=task_id, - kwargs=task_params, - queue=task_metadata.queue.value, - ) - - expiry = ( - self._celery_settings.CELERY_EPHEMERAL_RESULT_EXPIRES - if task_metadata.ephemeral - else self._celery_settings.CELERY_RESULT_EXPIRES - ) - await self._task_info_store.create_task( - task_id, task_metadata, expiry=expiry - ) - return task_uuid - - @make_async() - def _abort_task(self, task_id: TaskID) -> None: - AbortableAsyncResult(task_id, app=self._celery_app).abort() - - async def cancel_task(self, task_context: TaskContext, task_uuid: TaskUUID) -> None: - with log_context( - _logger, - logging.DEBUG, - msg=f"task cancellation: {task_context=} {task_uuid=}", - ): - task_id = build_task_id(task_context, task_uuid) - if not (await self.get_task_status(task_context, task_uuid)).is_done: - await self._abort_task(task_id) - await self._task_info_store.remove_task(task_id) - - @make_async() - def _forget_task(self, task_id: TaskID) -> None: - AbortableAsyncResult(task_id, app=self._celery_app).forget() - - async def get_task_result( - self, task_context: TaskContext, task_uuid: TaskUUID - ) -> Any: - with log_context( - _logger, - logging.DEBUG, - msg=f"Get task result: {task_context=} {task_uuid=}", - ): - task_id = build_task_id(task_context, task_uuid) - async_result = self._celery_app.AsyncResult(task_id) - result = async_result.result - if async_result.ready(): - task_metadata = await self._task_info_store.get_task_metadata(task_id) - if task_metadata is not None and task_metadata.ephemeral: - await self._forget_task(task_id) - await self._task_info_store.remove_task(task_id) - return result - - async def _get_task_progress_report( - self, task_context: TaskContext, task_uuid: TaskUUID, task_state: TaskState - ) -> ProgressReport: - if task_state in (TaskState.STARTED, TaskState.RETRY, TaskState.ABORTED): - task_id = build_task_id(task_context, task_uuid) - progress = await self._task_info_store.get_task_progress(task_id) - if progress is not None: - return progress - if task_state in ( - TaskState.SUCCESS, - TaskState.FAILURE, - ): - return ProgressReport( - actual_value=_MAX_PROGRESS_VALUE, total=_MAX_PROGRESS_VALUE - ) - - # task is pending - return ProgressReport( - actual_value=_MIN_PROGRESS_VALUE, total=_MAX_PROGRESS_VALUE - ) - - @make_async() - def _get_task_celery_state(self, task_id: TaskID) -> TaskState: - return TaskState(self._celery_app.AsyncResult(task_id).state) - - async def get_task_status( - self, task_context: TaskContext, task_uuid: TaskUUID - ) -> TaskStatus: - with log_context( - _logger, - logging.DEBUG, - msg=f"Getting task status: {task_context=} {task_uuid=}", - ): - task_id = build_task_id(task_context, task_uuid) - task_state = await self._get_task_celery_state(task_id) - return TaskStatus( - task_uuid=task_uuid, - task_state=task_state, - progress_report=await self._get_task_progress_report( - task_context, task_uuid, task_state - ), - ) - - async def list_tasks(self, task_context: TaskContext) -> list[Task]: - with log_context( - _logger, - logging.DEBUG, - msg=f"Listing tasks: {task_context=}", - ): - return await self._task_info_store.list_tasks(task_context) diff --git a/services/storage/src/simcore_service_storage/modules/celery/errors.py b/services/storage/src/simcore_service_storage/modules/celery/errors.py deleted file mode 100644 index 0e340f35e71..00000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/errors.py +++ /dev/null @@ -1,24 +0,0 @@ -import base64 -import pickle - - -class TransferrableCeleryError(Exception): - def __repr__(self) -> str: - exception = decode_celery_transferrable_error(self) - return f"{self.__class__.__name__}({exception.__class__.__name__}({exception}))" - - def __str__(self) -> str: - return f"{decode_celery_transferrable_error(self)}" - - -def encore_celery_transferrable_error(error: Exception) -> TransferrableCeleryError: - # NOTE: Celery modifies exceptions during serialization, which can cause - # the original error context to be lost. This mechanism ensures the same - # error can be recreated on the caller side exactly as it was raised here. - return TransferrableCeleryError(base64.b64encode(pickle.dumps(error))) - - -def decode_celery_transferrable_error(error: TransferrableCeleryError) -> Exception: - assert isinstance(error, TransferrableCeleryError) # nosec - result: Exception = pickle.loads(base64.b64decode(error.args[0])) # noqa: S301 - return result diff --git a/services/storage/src/simcore_service_storage/modules/celery/models.py b/services/storage/src/simcore_service_storage/modules/celery/models.py deleted file mode 100644 index 8b19d124ff1..00000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/models.py +++ /dev/null @@ -1,92 +0,0 @@ -from datetime import timedelta -from enum import StrEnum -from typing import Annotated, Any, Final, Protocol, TypeAlias -from uuid import UUID - -from models_library.progress_bar import ProgressReport -from pydantic import BaseModel, StringConstraints - -TaskContext: TypeAlias = dict[str, Any] -TaskID: TypeAlias = str -TaskName: TypeAlias = Annotated[ - str, StringConstraints(strip_whitespace=True, min_length=1) -] -TaskUUID: TypeAlias = UUID - -_CELERY_TASK_ID_KEY_SEPARATOR: Final[str] = ":" - - -def build_task_id_prefix(task_context: TaskContext) -> str: - return _CELERY_TASK_ID_KEY_SEPARATOR.join( - [f"{task_context[key]}" for key in sorted(task_context)] - ) - - -def build_task_id(task_context: TaskContext, task_uuid: TaskUUID) -> TaskID: - return _CELERY_TASK_ID_KEY_SEPARATOR.join( - [build_task_id_prefix(task_context), f"{task_uuid}"] - ) - - -class TaskState(StrEnum): - PENDING = "PENDING" - STARTED = "STARTED" - RETRY = "RETRY" - SUCCESS = "SUCCESS" - FAILURE = "FAILURE" - ABORTED = "ABORTED" - - -class TasksQueue(StrEnum): - CPU_BOUND = "cpu_bound" - DEFAULT = "default" - - -class TaskMetadata(BaseModel): - name: TaskName - ephemeral: bool = True - queue: TasksQueue = TasksQueue.DEFAULT - - -class Task(BaseModel): - uuid: TaskUUID - metadata: TaskMetadata - - -_TASK_DONE = {TaskState.SUCCESS, TaskState.FAILURE, TaskState.ABORTED} - - -class TaskInfoStore(Protocol): - async def create_task( - self, - task_id: TaskID, - task_metadata: TaskMetadata, - expiry: timedelta, - ) -> None: ... - - async def exists_task(self, task_id: TaskID) -> bool: ... - - async def get_task_metadata(self, task_id: TaskID) -> TaskMetadata | None: ... - - async def get_task_progress(self, task_id: TaskID) -> ProgressReport | None: ... - - async def list_tasks(self, task_context: TaskContext) -> list[Task]: ... - - async def remove_task(self, task_id: TaskID) -> None: ... - - async def set_task_progress( - self, task_id: TaskID, report: ProgressReport - ) -> None: ... - - -class TaskStatus(BaseModel): - task_uuid: TaskUUID - task_state: TaskState - progress_report: ProgressReport - - @property - def is_done(self) -> bool: - return self.task_state in _TASK_DONE - - -TaskId: TypeAlias = str diff --git a/services/storage/src/simcore_service_storage/modules/celery/utils.py b/services/storage/src/simcore_service_storage/modules/celery/utils.py deleted file mode 100644 index d09c1a1ce41..00000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/utils.py +++ /dev/null @@ -1,27 +0,0 @@ -from celery import Celery # type: ignore[import-untyped] -from fastapi import FastAPI - -from .worker import CeleryTaskWorker - -_WORKER_KEY = "celery_worker" -_FASTAPI_APP_KEY = "fastapi_app" - - -def set_celery_worker(celery_app: Celery, worker: CeleryTaskWorker) -> None: - celery_app.conf[_WORKER_KEY] = worker - - -def get_celery_worker(celery_app: Celery) -> CeleryTaskWorker: - worker = celery_app.conf[_WORKER_KEY] - assert isinstance(worker, CeleryTaskWorker) - return worker - - -def set_fastapi_app(celery_app: Celery, fastapi_app: FastAPI) -> None: - celery_app.conf[_FASTAPI_APP_KEY] = fastapi_app - - -def get_fastapi_app(celery_app: Celery) -> FastAPI: - fastapi_app = celery_app.conf[_FASTAPI_APP_KEY] - assert isinstance(fastapi_app, FastAPI) - return fastapi_app diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker.py b/services/storage/src/simcore_service_storage/modules/celery/worker.py deleted file mode 100644 index a5e98ac09df..00000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/worker.py +++ /dev/null @@ -1,19 +0,0 @@ -import logging -from dataclasses import dataclass - -from models_library.progress_bar import ProgressReport - -from ..celery.models import TaskID, TaskInfoStore - -_logger = logging.getLogger(__name__) - - -@dataclass -class CeleryTaskWorker: - _task_info_store: TaskInfoStore - - async def set_task_progress(self, task_id: TaskID, report: ProgressReport) -> None: - await self._task_info_store.set_task_progress( - task_id=task_id, - report=report, - ) diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py index 58febcb61f6..db400ef7a73 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py +++ b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py @@ -4,14 +4,14 @@ from celery.signals import worker_init, worker_shutdown # type: ignore[import-untyped] from servicelib.logging_utils import config_all_loggers +from simcore_celery_library.signals import ( + on_worker_init, + on_worker_shutdown, +) from simcore_service_storage.api._worker_tasks.tasks import setup_worker_tasks from ...core.settings import ApplicationSettings from ._common import create_app as create_celery_app -from .signals import ( - on_worker_init, - on_worker_shutdown, -) _settings = ApplicationSettings.create_from_envs() diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 6bbffe9cd19..fd6385519d1 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -24,6 +24,9 @@ from celery import Celery from celery.contrib.testing.worker import TestWorkController, start_worker from celery.signals import worker_init, worker_shutdown +from celery_library.signals import on_worker_init, on_worker_shutdown +from celery_library.utils import get_celery_worker +from celery_library.worker import CeleryTaskWorker from faker import Faker from fakeredis.aioredis import FakeRedis from fastapi import FastAPI @@ -71,12 +74,6 @@ from simcore_service_storage.datcore_dsm import DatCoreDataManager from simcore_service_storage.dsm import get_dsm_provider from simcore_service_storage.models import FileMetaData, FileMetaDataAtDB, S3BucketName -from simcore_service_storage.modules.celery.signals import ( - on_worker_init, - on_worker_shutdown, -) -from simcore_service_storage.modules.celery.utils import get_celery_worker -from simcore_service_storage.modules.celery.worker import CeleryTaskWorker from simcore_service_storage.modules.s3 import get_s3_client from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager from sqlalchemy import literal_column From 5cfdb51de5a28e13dbf138229dea369d9f3f0870 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 19 May 2025 13:15:44 +0200 Subject: [PATCH 08/91] refactor --- .../src/celery_library/__init__.py | 2 +- .../celery_library/{_common.py => common.py} | 0 .../src/celery_library/signals.py | 8 ++--- .../modules/celery/worker_main.py | 6 ++-- .../storage/tests/unit/test_async_jobs.py | 10 +++---- .../storage/tests/unit/test_handlers_files.py | 2 +- .../storage/tests/unit/test_modules_celery.py | 29 +++++++++---------- .../tests/unit/test_modules_celery_errors.py | 4 +-- .../tests/unit/test_rpc_handlers_paths.py | 2 +- .../unit/test_rpc_handlers_simcore_s3.py | 2 +- 10 files changed, 31 insertions(+), 34 deletions(-) rename packages/celery-library/src/celery_library/{_common.py => common.py} (100%) diff --git a/packages/celery-library/src/celery_library/__init__.py b/packages/celery-library/src/celery_library/__init__.py index 86a514e1b81..28704063292 100644 --- a/packages/celery-library/src/celery_library/__init__.py +++ b/packages/celery-library/src/celery_library/__init__.py @@ -6,9 +6,9 @@ from settings_library.celery import CelerySettings from settings_library.redis import RedisDatabase -from ._common import create_app from .backends._redis import RedisTaskInfoStore from .client import CeleryTaskClient +from .common import create_app from .types import register_celery_types _logger = logging.getLogger(__name__) diff --git a/packages/celery-library/src/celery_library/_common.py b/packages/celery-library/src/celery_library/common.py similarity index 100% rename from packages/celery-library/src/celery_library/_common.py rename to packages/celery-library/src/celery_library/common.py diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index cfb3c0a0353..7385d1dcaf6 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -11,8 +11,6 @@ from servicelib.redis._client import RedisClientSDK from settings_library.redis import RedisDatabase -from ...core.application import create_app -from ...core.settings import ApplicationSettings from . import set_event_loop from .backends._redis import RedisTaskInfoStore from .utils import ( @@ -36,7 +34,6 @@ def _init(startup_complete_event: threading.Event) -> None: asyncio.set_event_loop(loop) shutdown_event = asyncio.Event() - app_settings = ApplicationSettings.create_from_envs() fastapi_app = create_app(app_settings) assert app_settings.STORAGE_CELERY @@ -51,7 +48,10 @@ async def setup_task_worker(): ) set_celery_worker( - sender.app, CeleryTaskWorker(RedisTaskInfoStore(redis_client_sdk)) + sender.app, + CeleryTaskWorker( + RedisTaskInfoStore(redis_client_sdk), + ), ) async def fastapi_lifespan( diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py index db400ef7a73..e0dc2a50e46 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py +++ b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py @@ -3,15 +3,15 @@ import logging from celery.signals import worker_init, worker_shutdown # type: ignore[import-untyped] -from servicelib.logging_utils import config_all_loggers -from simcore_celery_library.signals import ( +from celery_library.common import create_app as create_celery_app +from celery_library.signals import ( on_worker_init, on_worker_shutdown, ) +from servicelib.logging_utils import config_all_loggers from simcore_service_storage.api._worker_tasks.tasks import setup_worker_tasks from ...core.settings import ApplicationSettings -from ._common import create_app as create_celery_app _settings = ApplicationSettings.create_from_envs() diff --git a/services/storage/tests/unit/test_async_jobs.py b/services/storage/tests/unit/test_async_jobs.py index 36f29a15bd8..6870080ed21 100644 --- a/services/storage/tests/unit/test_async_jobs.py +++ b/services/storage/tests/unit/test_async_jobs.py @@ -10,6 +10,11 @@ import pytest from celery import Celery, Task +from celery_library import get_celery_client +from celery_library.client import TaskMetadata +from celery_library.models import TaskID +from celery_library.task import register_task +from celery_library.worker import CeleryTaskWorker from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, @@ -27,11 +32,6 @@ from servicelib.rabbitmq import RabbitMQRPCClient, RPCRouter from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs from simcore_service_storage.api.rpc.routes import get_rabbitmq_rpc_server -from simcore_service_storage.modules.celery import get_celery_client -from simcore_service_storage.modules.celery._task import register_task -from simcore_service_storage.modules.celery.client import TaskMetadata -from simcore_service_storage.modules.celery.models import TaskID -from simcore_service_storage.modules.celery.worker import CeleryTaskWorker from tenacity import ( AsyncRetrying, retry_if_exception_type, diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index db66eab9878..f4149b39672 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -23,6 +23,7 @@ from aiohttp import ClientSession from aws_library.s3 import S3KeyNotFoundError, S3ObjectKey, SimcoreS3API from aws_library.s3._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE +from celery_library.worker import CeleryTaskWorker from faker import Faker from fastapi import FastAPI from models_library.api_schemas_storage.storage_schemas import ( @@ -53,7 +54,6 @@ from servicelib.aiohttp import status from simcore_service_storage.constants import S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID from simcore_service_storage.models import FileDownloadResponse, S3BucketName, UploadID -from simcore_service_storage.modules.celery.worker import CeleryTaskWorker from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager from sqlalchemy.ext.asyncio import AsyncEngine from tenacity.asyncio import AsyncRetrying diff --git a/services/storage/tests/unit/test_modules_celery.py b/services/storage/tests/unit/test_modules_celery.py index b1819aabb44..943f97a3051 100644 --- a/services/storage/tests/unit/test_modules_celery.py +++ b/services/storage/tests/unit/test_modules_celery.py @@ -13,28 +13,25 @@ import pytest from celery import Celery, Task from celery.contrib.abortable import AbortableTask -from common_library.errors_classes import OsparcErrorMixin -from fastapi import FastAPI -from models_library.progress_bar import ProgressReport -from servicelib.logging_utils import log_context -from simcore_service_storage.modules.celery import get_celery_client, get_event_loop -from simcore_service_storage.modules.celery._task import ( - AbortableAsyncResult, - register_task, -) -from simcore_service_storage.modules.celery.client import CeleryTaskClient -from simcore_service_storage.modules.celery.errors import TransferrableCeleryError -from simcore_service_storage.modules.celery.models import ( +from celery_library import get_celery_client, get_event_loop +from celery_library.client import CeleryTaskClient +from celery_library.errors import TransferrableCeleryError +from celery_library.models import ( TaskContext, TaskID, TaskMetadata, TaskState, ) -from simcore_service_storage.modules.celery.utils import ( - get_celery_worker, - get_fastapi_app, +from celery_library.task import ( + AbortableAsyncResult, + register_task, ) -from simcore_service_storage.modules.celery.worker import CeleryTaskWorker +from celery_library.utils import get_celery_worker, get_fastapi_app +from celery_library.worker import CeleryTaskWorker +from common_library.errors_classes import OsparcErrorMixin +from fastapi import FastAPI +from models_library.progress_bar import ProgressReport +from servicelib.logging_utils import log_context from tenacity import Retrying, retry_if_exception_type, stop_after_delay, wait_fixed _logger = logging.getLogger(__name__) diff --git a/services/storage/tests/unit/test_modules_celery_errors.py b/services/storage/tests/unit/test_modules_celery_errors.py index 74000f7649e..2808025c099 100644 --- a/services/storage/tests/unit/test_modules_celery_errors.py +++ b/services/storage/tests/unit/test_modules_celery_errors.py @@ -1,9 +1,9 @@ import pytest -from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError -from simcore_service_storage.modules.celery.errors import ( +from celery_library.errors import ( decode_celery_transferrable_error, encore_celery_transferrable_error, ) +from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError @pytest.mark.parametrize( diff --git a/services/storage/tests/unit/test_rpc_handlers_paths.py b/services/storage/tests/unit/test_rpc_handlers_paths.py index 98cb75e8cba..c0f45284de2 100644 --- a/services/storage/tests/unit/test_rpc_handlers_paths.py +++ b/services/storage/tests/unit/test_rpc_handlers_paths.py @@ -13,6 +13,7 @@ from typing import Any, TypeAlias import pytest +from celery_library.worker import CeleryTaskWorker from faker import Faker from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( @@ -34,7 +35,6 @@ compute_path_size, delete_paths, ) -from simcore_service_storage.modules.celery.worker import CeleryTaskWorker from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager pytest_simcore_core_services_selection = ["postgres", "rabbit"] diff --git a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py index 751cdae4f2f..135b3b49cfd 100644 --- a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py @@ -20,6 +20,7 @@ import pytest import sqlalchemy as sa from celery.contrib.testing.worker import TestWorkController +from celery_library.worker import CeleryTaskWorker from faker import Faker from fastapi import FastAPI from fastapi.encoders import jsonable_encoder @@ -57,7 +58,6 @@ start_export_data, ) from simcore_postgres_database.storage_models import file_meta_data -from simcore_service_storage.modules.celery.worker import CeleryTaskWorker from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager from sqlalchemy.ext.asyncio import AsyncEngine from yarl import URL From d5cdf8ce0a88c659fcc88d22fbbbe73164dadc91 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 19 May 2025 14:25:23 +0200 Subject: [PATCH 09/91] add celery dep --- services/storage/requirements/prod.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/services/storage/requirements/prod.txt b/services/storage/requirements/prod.txt index dc4a2da4805..40e94e3f436 100644 --- a/services/storage/requirements/prod.txt +++ b/services/storage/requirements/prod.txt @@ -11,6 +11,7 @@ # installs this repo's packages simcore-aws-library @ ../../packages/aws-library/ +simcore-celery-library @ ../../packages/celery-library/ simcore-common-library @ ../../packages/common-library/ simcore-models-library @ ../../packages/models-library/ simcore-postgres-database @ ../../packages/postgres-database/ From a1de0bee80551527e3b4f73fc1aefacfa9eb69cd Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 19 May 2025 14:33:42 +0200 Subject: [PATCH 10/91] continue --- .../celery-library/src/celery_library/signals.py | 12 +++++++----- .../modules/celery/__init__.py | 0 .../modules/celery/worker_main.py | 5 ++++- services/storage/tests/conftest.py | 14 ++++++++------ 4 files changed, 19 insertions(+), 12 deletions(-) create mode 100644 services/storage/src/simcore_service_storage/modules/celery/__init__.py diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index 7385d1dcaf6..03296bdd314 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -26,7 +26,12 @@ _STARTUP_TIMEOUT: Final[float] = datetime.timedelta(minutes=1).total_seconds() -def on_worker_init(sender, **_kwargs) -> None: +def on_worker_init( + app_factory, + celery_settings, + sender, + **_kwargs, +) -> None: startup_complete_event = threading.Event() def _init(startup_complete_event: threading.Event) -> None: @@ -34,10 +39,7 @@ def _init(startup_complete_event: threading.Event) -> None: asyncio.set_event_loop(loop) shutdown_event = asyncio.Event() - fastapi_app = create_app(app_settings) - - assert app_settings.STORAGE_CELERY - celery_settings = app_settings.STORAGE_CELERY + fastapi_app = app_factory() async def setup_task_worker(): redis_client_sdk = RedisClientSDK( diff --git a/services/storage/src/simcore_service_storage/modules/celery/__init__.py b/services/storage/src/simcore_service_storage/modules/celery/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py index e0dc2a50e46..d1ae4c46b01 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py +++ b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py @@ -1,6 +1,7 @@ """Main application to be deployed in for example uvicorn.""" import logging +from functools import partial from celery.signals import worker_init, worker_shutdown # type: ignore[import-untyped] from celery_library.common import create_app as create_celery_app @@ -11,6 +12,7 @@ from servicelib.logging_utils import config_all_loggers from simcore_service_storage.api._worker_tasks.tasks import setup_worker_tasks +from ...core.application import create_app from ...core.settings import ApplicationSettings _settings = ApplicationSettings.create_from_envs() @@ -26,7 +28,8 @@ assert _settings.STORAGE_CELERY app = create_celery_app(_settings.STORAGE_CELERY) -worker_init.connect(on_worker_init) +app_factory = partial(create_app(_settings)) +worker_init.connect(partial(on_worker_init, app_factory, _settings.STORAGE_CELERY)) worker_shutdown.connect(on_worker_shutdown) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index fd6385519d1..9388a33c2fc 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -12,6 +12,7 @@ import random import sys from collections.abc import AsyncIterator, Awaitable, Callable +from functools import partial from pathlib import Path from typing import Any, Final, cast @@ -974,10 +975,7 @@ def celery_config() -> dict[str, Any]: def mock_celery_app(mocker: MockerFixture, celery_config: dict[str, Any]) -> Celery: celery_app = Celery(**celery_config) - for module in ( - "simcore_service_storage.modules.celery._common.create_app", - "simcore_service_storage.modules.celery.create_app", - ): + for module in ("celery_library.create_app",): mocker.patch(module, return_value=celery_app) return celery_app @@ -1000,13 +998,17 @@ async def with_storage_celery_worker_controller( register_celery_tasks: Callable[[Celery], None], ) -> AsyncIterator[TestWorkController]: # Signals must be explicitily connected - worker_init.connect(on_worker_init) + monkeypatch.setenv("STORAGE_WORKER_MODE", "true") + app_settings = ApplicationSettings.create_from_envs() + app_factory = partial(create_app, app_settings) + worker_init.connect( + partial(on_worker_init, app_factory, app_settings.STORAGE_CELERY) + ) worker_shutdown.connect(on_worker_shutdown) setup_worker_tasks(celery_app) register_celery_tasks(celery_app) - monkeypatch.setenv("STORAGE_WORKER_MODE", "true") with start_worker( celery_app, pool="threads", From 305f9253bae9748717d90ff1657f4b6e8ece342a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 19 May 2025 15:46:51 +0200 Subject: [PATCH 11/91] continue --- .../src/celery_library/signals.py | 1 + .../celery-library/src/celery_library/task.py | 1 + .../src/celery_library/types.py | 21 ------------------- .../api/_worker_tasks/tasks.py | 10 +-------- .../modules/celery/worker_main.py | 14 ++++++++++--- services/storage/tests/conftest.py | 10 ++++++--- 6 files changed, 21 insertions(+), 36 deletions(-) diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index 03296bdd314..0c427180da3 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -40,6 +40,7 @@ def _init(startup_complete_event: threading.Event) -> None: shutdown_event = asyncio.Event() fastapi_app = app_factory() + assert isinstance(fastapi_app, FastAPI) # nosec async def setup_task_worker(): redis_client_sdk = RedisClientSDK( diff --git a/packages/celery-library/src/celery_library/task.py b/packages/celery-library/src/celery_library/task.py index e367a3a73da..39b9d0e87ee 100644 --- a/packages/celery-library/src/celery_library/task.py +++ b/packages/celery-library/src/celery_library/task.py @@ -205,4 +205,5 @@ def register_task( # type: ignore[misc] bind=True, base=AbortableTask, time_limit=None if timeout is None else timeout.total_seconds(), + pydantic=True, )(wrapped_fn) diff --git a/packages/celery-library/src/celery_library/types.py b/packages/celery-library/src/celery_library/types.py index bbd04eabc56..c7c2f0be1b8 100644 --- a/packages/celery-library/src/celery_library/types.py +++ b/packages/celery-library/src/celery_library/types.py @@ -1,9 +1,6 @@ -from functools import partial from pathlib import Path -from typing import Any from kombu.utils.json import register_type # type: ignore[import-untyped] -from pydantic import BaseModel def _path_encoder(obj): @@ -23,14 +20,6 @@ def _class_full_name(clz: type) -> str: return ".".join([clz.__module__, clz.__qualname__]) -def _pydantic_model_encoder(obj: BaseModel, *args, **kwargs) -> dict[str, Any]: - return obj.model_dump(*args, **kwargs, mode="json") - - -def _pydantic_model_decoder(clz: type[BaseModel], data: dict[str, Any]) -> BaseModel: - return clz(**data) - - def register_celery_types() -> None: register_type( Path, @@ -39,13 +28,3 @@ def register_celery_types() -> None: _path_decoder, ) register_type(set, _class_full_name(set), encoder=list, decoder=set) - - -def register_pydantic_types(*models: type[BaseModel]) -> None: - for model in models: - register_type( - model, - _class_full_name(model), - encoder=_pydantic_model_encoder, - decoder=partial(_pydantic_model_decoder, model), - ) diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py index 55e4be6ac5b..b8dc131c98f 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py @@ -2,15 +2,10 @@ from celery import Celery # type: ignore[import-untyped] from celery_library.task import register_task -from celery_library.types import register_celery_types, register_pydantic_types +from celery_library.types import register_celery_types from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError -from models_library.api_schemas_storage.storage_schemas import ( - FileUploadCompletionBody, - FoldersBody, -) from servicelib.logging_utils import log_context -from ...models import FileMetaData from ._files import complete_upload_file from ._paths import compute_path_size, delete_paths from ._simcore_s3 import deep_copy_files_from_project, export_data @@ -20,9 +15,6 @@ def setup_worker_tasks(app: Celery) -> None: register_celery_types() - register_pydantic_types(FileMetaData) - register_pydantic_types(FileUploadCompletionBody) - register_pydantic_types(FoldersBody) with log_context(_logger, logging.INFO, msg="worker task registration"): register_task(app, export_data, dont_autoretry_for=(AccessRightError,)) diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py index d1ae4c46b01..2ac9aed23c0 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py +++ b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py @@ -26,10 +26,18 @@ ) -assert _settings.STORAGE_CELERY +assert _settings.STORAGE_CELERY # nosec app = create_celery_app(_settings.STORAGE_CELERY) -app_factory = partial(create_app(_settings)) -worker_init.connect(partial(on_worker_init, app_factory, _settings.STORAGE_CELERY)) +app_factory = partial(create_app, _settings) + + +def worker_init_wrapper(sender, **_kwargs): + return partial(on_worker_init, app_factory, _settings.STORAGE_CELERY)( + sender, **_kwargs + ) + + +worker_init.connect(worker_init_wrapper) worker_shutdown.connect(on_worker_shutdown) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 9388a33c2fc..7a97c3f484f 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -1001,9 +1001,13 @@ async def with_storage_celery_worker_controller( monkeypatch.setenv("STORAGE_WORKER_MODE", "true") app_settings = ApplicationSettings.create_from_envs() app_factory = partial(create_app, app_settings) - worker_init.connect( - partial(on_worker_init, app_factory, app_settings.STORAGE_CELERY) - ) + + def _on_worker_init_wrapper(sender: Celery, **_kwargs) -> None: + return partial(on_worker_init, app_factory, app_settings.STORAGE_CELERY)( + sender, **_kwargs + ) + + worker_init.connect(_on_worker_init_wrapper) worker_shutdown.connect(on_worker_shutdown) setup_worker_tasks(celery_app) From 836f751450330c2687641357467e6199c8256180 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 19 May 2025 15:54:15 +0200 Subject: [PATCH 12/91] update deps --- packages/celery-library/requirements/_base.in | 2 +- services/storage/requirements/_base.in | 1 - services/storage/requirements/_base.txt | 253 +++++++++++++++++- services/storage/requirements/_test.txt | 7 +- 4 files changed, 250 insertions(+), 13 deletions(-) diff --git a/packages/celery-library/requirements/_base.in b/packages/celery-library/requirements/_base.in index 4fa518fa56a..b29fac51d2c 100644 --- a/packages/celery-library/requirements/_base.in +++ b/packages/celery-library/requirements/_base.in @@ -7,4 +7,4 @@ --requirement ../../../packages/service-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in -celery +celery[redis] diff --git a/services/storage/requirements/_base.in b/services/storage/requirements/_base.in index 22e8fac0e03..5d33b6a6cfe 100644 --- a/services/storage/requirements/_base.in +++ b/services/storage/requirements/_base.in @@ -19,7 +19,6 @@ aioboto3 # s3 storage aiofiles # i/o asgi_lifespan asyncpg # database -celery[redis] httpx opentelemetry-instrumentation-botocore packaging diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt index fd9e7abdca0..946dba7e16c 100644 --- a/services/storage/requirements/_base.txt +++ b/services/storage/requirements/_base.txt @@ -1,6 +1,7 @@ aio-pika==9.5.4 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in aioboto3==14.3.0 # via @@ -12,18 +13,22 @@ aiocache==0.12.3 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in aiodebug==2.3.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in aiodocker==0.24.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in aiofiles==24.1.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in # aioboto3 @@ -43,6 +48,18 @@ aiohttp==3.11.18 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -84,6 +101,9 @@ arrow==1.3.0 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -111,8 +131,8 @@ botocore==1.37.3 # s3transfer botocore-stubs==1.36.17 # via types-aiobotocore -celery==5.4.0 - # via -r requirements/_base.in +celery==5.5.2 + # via -r requirements/../../../packages/celery-library/requirements/_base.in certifi==2025.1.31 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -127,6 +147,18 @@ certifi==2025.1.31 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -191,6 +223,7 @@ fastapi-pagination==0.12.34 faststream==0.5.34 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.5.0 # via @@ -226,6 +259,18 @@ httpx==0.28.1 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -268,6 +313,18 @@ jinja2==3.1.5 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -292,11 +349,13 @@ jsonschema==4.23.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 # via jsonschema -kombu==5.4.2 +kombu==5.5.3 # via celery mako==1.3.9 # via @@ -312,6 +371,18 @@ mako==1.3.9 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -343,6 +414,7 @@ multidict==6.1.0 opentelemetry-api==1.30.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -361,6 +433,7 @@ opentelemetry-api==1.30.0 opentelemetry-exporter-otlp==1.30.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-exporter-otlp-proto-common==1.30.0 # via @@ -395,14 +468,17 @@ opentelemetry-instrumentation-httpx==0.51b0 opentelemetry-instrumentation-logging==0.51b0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-redis==0.51b0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-requests==0.51b0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-propagator-aws-xray==1.0.2 # via opentelemetry-instrumentation-botocore @@ -414,6 +490,7 @@ opentelemetry-proto==1.30.0 opentelemetry-sdk==1.30.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -448,6 +525,18 @@ orjson==3.10.15 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -470,6 +559,14 @@ orjson==3.10.15 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -506,6 +603,7 @@ protobuf==5.29.3 psutil==6.1.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in psycopg2-binary==2.9.10 # via sqlalchemy @@ -525,6 +623,18 @@ pydantic==2.10.6 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -551,6 +661,17 @@ pydantic==2.10.6 # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -582,6 +703,14 @@ pydantic-extra-types==2.10.2 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -606,6 +735,18 @@ pydantic-settings==2.7.0 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -624,6 +765,10 @@ pydantic-settings==2.7.0 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -634,6 +779,7 @@ pygments==2.19.1 pyinstrument==5.0.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via @@ -661,6 +807,18 @@ pyyaml==6.0.2 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -676,6 +834,7 @@ pyyaml==6.0.2 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # fastapi # uvicorn @@ -693,6 +852,18 @@ redis==5.2.1 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -708,6 +879,7 @@ redis==5.2.1 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # celery referencing==0.35.1 @@ -724,6 +896,18 @@ referencing==0.35.1 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -746,6 +930,8 @@ rich==13.9.4 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit @@ -782,6 +968,18 @@ sqlalchemy==1.4.54 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -812,6 +1010,18 @@ starlette==0.45.3 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -831,24 +1041,30 @@ starlette==0.45.3 stream-zip==0.0.83 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in toolz==1.0.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in typer==0.15.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in @@ -885,10 +1101,8 @@ typing-extensions==4.12.2 # types-aiobotocore-ec2 # types-aiobotocore-s3 # types-aiobotocore-ssm -tzdata==2025.1 - # via - # celery - # kombu +tzdata==2025.2 + # via kombu ujson==5.10.0 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -903,6 +1117,18 @@ ujson==5.10.0 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -932,6 +1158,18 @@ urllib3==2.3.0 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -978,6 +1216,7 @@ wrapt==1.17.2 yarl==1.18.3 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # aio-pika diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt index a04ae5b2759..2896f858ff6 100644 --- a/services/storage/requirements/_test.txt +++ b/services/storage/requirements/_test.txt @@ -59,7 +59,7 @@ botocore==1.37.3 # boto3 # moto # s3transfer -celery==5.4.0 +celery==5.5.2 # via # -c requirements/_base.txt # pytest-celery @@ -201,7 +201,7 @@ jsonschema-specifications==2024.10.1 # -c requirements/_base.txt # jsonschema # openapi-schema-validator -kombu==5.4.2 +kombu==5.5.3 # via # -c requirements/_base.txt # celery @@ -412,10 +412,9 @@ typing-extensions==4.12.2 # pydantic # pydantic-core # sqlalchemy2-stubs -tzdata==2025.1 +tzdata==2025.2 # via # -c requirements/_base.txt - # celery # faker # kombu # pandas From 61eb0704e2b0b94fb80468b81f6280bace9698d3 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 09:38:27 +0200 Subject: [PATCH 13/91] tests --- packages/celery-library/src/celery_library/client.py | 2 +- packages/celery-library/src/celery_library/task.py | 4 ++-- services/storage/tests/unit/test_async_jobs.py | 3 ++- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/celery-library/src/celery_library/client.py b/packages/celery-library/src/celery_library/client.py index f68baf558fe..7378b7d8516 100644 --- a/packages/celery-library/src/celery_library/client.py +++ b/packages/celery-library/src/celery_library/client.py @@ -54,7 +54,7 @@ async def submit_task( self._celery_app.send_task( task_metadata.name, task_id=task_id, - kwargs=task_params, + kwargs={"task_id": task_id} | task_params, queue=task_metadata.queue.value, ) diff --git a/packages/celery-library/src/celery_library/task.py b/packages/celery-library/src/celery_library/task.py index 39b9d0e87ee..4bb9f2318c0 100644 --- a/packages/celery-library/src/celery_library/task.py +++ b/packages/celery-library/src/celery_library/task.py @@ -44,7 +44,7 @@ def _async_task_wrapper( Callable[Concatenate[AbortableTask, P], R], ]: def decorator( - coro: Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]], + coro: Callable[Concatenate[AbortableTask, P], Coroutine[Any, Any, R]], ) -> Callable[Concatenate[AbortableTask, P], R]: @wraps(coro) def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: @@ -56,7 +56,7 @@ async def run_task(task_id: TaskID) -> R: try: async with asyncio.TaskGroup() as tg: main_task = tg.create_task( - coro(task, task_id, *args, **kwargs), + coro(task, *args, **kwargs), ) async def abort_monitor(): diff --git a/services/storage/tests/unit/test_async_jobs.py b/services/storage/tests/unit/test_async_jobs.py index 6870080ed21..837f8d154cc 100644 --- a/services/storage/tests/unit/test_async_jobs.py +++ b/services/storage/tests/unit/test_async_jobs.py @@ -94,8 +94,9 @@ async def _process_action(action: str, payload: Any) -> Any: return None -def sync_job(task: Task, action: Action, payload: Any) -> Any: +def sync_job(task: Task, task_id: TaskID, action: Action, payload: Any) -> Any: _ = task + _ = task_id return asyncio.run(_process_action(action, payload)) From 115748e1bafa14db738e561cf840c209b48b4816 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 09:43:21 +0200 Subject: [PATCH 14/91] add py.typed --- packages/celery-library/src/celery_library/py.typed | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 packages/celery-library/src/celery_library/py.typed diff --git a/packages/celery-library/src/celery_library/py.typed b/packages/celery-library/src/celery_library/py.typed new file mode 100644 index 00000000000..e69de29bb2d From 50983d3006eea7420a8e62e706f83dd2a7970a34 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 09:47:25 +0200 Subject: [PATCH 15/91] add assert --- .../storage/src/simcore_service_storage/core/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/storage/src/simcore_service_storage/core/application.py b/services/storage/src/simcore_service_storage/core/application.py index 6b57eeb7f09..558bf6cbe3e 100644 --- a/services/storage/src/simcore_service_storage/core/application.py +++ b/services/storage/src/simcore_service_storage/core/application.py @@ -87,6 +87,8 @@ def create_app(settings: ApplicationSettings) -> FastAPI: # noqa: C901 if not settings.STORAGE_WORKER_MODE: setup_rabbitmq(app) setup_rpc_api_routes(app) + + assert settings.STORAGE_CELERY # nosec setup_celery_client(app, celery_settings=settings.STORAGE_CELERY) setup_rest_api_long_running_tasks_for_uploads(app) setup_rest_api_routes(app, API_VTAG) From 6ea72b163d6c38b833c5b2a6dce6233266bdfc78 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 09:57:10 +0200 Subject: [PATCH 16/91] move tests --- .../celery-library/tests/unit/test_errors.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename services/storage/tests/unit/test_modules_celery_errors.py => packages/celery-library/tests/unit/test_errors.py (100%) diff --git a/services/storage/tests/unit/test_modules_celery_errors.py b/packages/celery-library/tests/unit/test_errors.py similarity index 100% rename from services/storage/tests/unit/test_modules_celery_errors.py rename to packages/celery-library/tests/unit/test_errors.py From daa3a7a5b9ede5b96934fc749b706df5f472745d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 10:01:06 +0200 Subject: [PATCH 17/91] add celery-library --- .github/codeql/codeql-config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/codeql/codeql-config.yml b/.github/codeql/codeql-config.yml index adac3b13795..739b7c8a5a6 100644 --- a/.github/codeql/codeql-config.yml +++ b/.github/codeql/codeql-config.yml @@ -4,6 +4,7 @@ disable-default-queries: false paths: - packages/aws-library/src + - packages/celery-library/src - packages/dask-task-models-library/src - packages/models-library/src/models_library - packages/postgres-database/src/simcore_postgres_database From 39856a0b8f7d9a2a27dad986dd0b6df0a2eafdc0 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 10:07:55 +0200 Subject: [PATCH 18/91] ci --- .github/workflows/ci-testing-deploy.yml | 51 +++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 1e9a97c49cb..cabd6e15465 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -47,6 +47,7 @@ jobs: # Set job outputs to values from filter step outputs: aws-library: ${{ steps.filter.outputs.aws-library }} + celery-library: ${{ steps.filter.outputs.celery-library }} dask-task-models-library: ${{ steps.filter.outputs.dask-task-models-library }} models-library: ${{ steps.filter.outputs.models-library }} common-library: ${{ steps.filter.outputs.common-library }} @@ -94,6 +95,12 @@ jobs: - 'services/docker-compose*' - 'scripts/mypy/*' - 'mypy.ini' + celery-library: + - 'packages/celery-library/**' + - 'packages/pytest-simcore/**' + - 'services/docker-compose*' + - 'scripts/mypy/*' + - 'mypy.ini' dask-task-models-library: - 'packages/dask-task-models-library/**' - 'packages/pytest-simcore/**' @@ -1031,6 +1038,49 @@ jobs: with: flags: unittests #optional + unit-test-celery-library: + needs: changes + if: ${{ needs.changes.outputs.celery-library == 'true' || github.event_name == 'push' || github.event.inputs.force_all_builds == 'true' }} + timeout-minutes: 18 # if this timeout gets too small, then split the tests + name: "[unit] celery-library" + runs-on: ${{ matrix.os }} + strategy: + matrix: + python: ["3.11"] + os: [ubuntu-24.04] + fail-fast: false + steps: + - uses: actions/checkout@v4 + - name: setup docker buildx + id: buildx + uses: docker/setup-buildx-action@v3 + with: + driver: docker-container + - name: setup python environment + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: install uv + uses: astral-sh/setup-uv@v6 + with: + version: "0.6.x" + enable-cache: false + cache-dependency-glob: "**/celery-library/requirements/ci.txt" + - name: show system version + run: ./ci/helpers/show_system_versions.bash + - name: install + run: ./ci/github/unit-testing/celery-library.bash install + - name: typecheck + run: ./ci/github/unit-testing/celery-library.bash typecheck + - name: test + if: ${{ !cancelled() }} + run: ./ci/github/unit-testing/celery-library.bash test + - uses: codecov/codecov-action@v5 + if: ${{ !cancelled() }} + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + with: + flags: unittests #optional unit-test-dask-task-models-library: needs: changes @@ -1813,6 +1863,7 @@ jobs: unit-test-clusters-keeper, unit-test-dask-sidecar, unit-test-aws-library, + unit-test-celery-library, unit-test-dask-task-models-library, unit-test-datcore-adapter, unit-test-director-v2, From 1630ee9ef72a83ee54ad441d143aa99be938639d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 10:11:10 +0200 Subject: [PATCH 19/91] add celery-library.bash --- ci/github/unit-testing/celery-library.bash | 43 ++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100755 ci/github/unit-testing/celery-library.bash diff --git a/ci/github/unit-testing/celery-library.bash b/ci/github/unit-testing/celery-library.bash new file mode 100755 index 00000000000..a05004c677f --- /dev/null +++ b/ci/github/unit-testing/celery-library.bash @@ -0,0 +1,43 @@ +#!/bin/bash +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ +set -o errexit # abort on nonzero exitstatus +set -o nounset # abort on unbound variable +set -o pipefail # don't hide errors within pipes +IFS=$'\n\t' + +install() { + make devenv + # shellcheck source=/dev/null + source .venv/bin/activate + pushd packages/celery-library + make install-ci + popd + uv pip list +} + +test() { + # shellcheck source=/dev/null + source .venv/bin/activate + pushd packages/celery-library + make tests-ci + popd +} + +typecheck() { + # shellcheck source=/dev/null + source .venv/bin/activate + uv pip install mypy + pushd packages/celery-library + make mypy + popd +} + +# Check if the function exists (bash specific) +if declare -f "$1" >/dev/null; then + # call arguments verbatim + "$@" +else + # Show a helpful error + echo "'$1' is not a known function name" >&2 + exit 1 +fi From 757d96accbb430ad76df74917c228fd3091ba911 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 10:16:07 +0200 Subject: [PATCH 20/91] add celery-lib --- .codecov.yml | 3 +++ .vscode/settings.template.json | 1 + 2 files changed, 4 insertions(+) diff --git a/.codecov.yml b/.codecov.yml index a3f9e9e6dd0..72735bedb30 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -31,6 +31,9 @@ component_management: - component_id: pkg_aws_library paths: - packages/aws-library/** + - component_id: pkg_celery_library + paths: + - packages/celery-library/** - component_id: pkg_dask_task_models_library paths: - packages/dask-task-models-library/** diff --git a/.vscode/settings.template.json b/.vscode/settings.template.json index 4ebda848845..6d9454625ea 100644 --- a/.vscode/settings.template.json +++ b/.vscode/settings.template.json @@ -34,6 +34,7 @@ "python.analysis.typeCheckingMode": "basic", "python.analysis.extraPaths": [ "./packages/aws-library/src", + "./packages/celery-library/src", "./packages/models-library/src", "./packages/postgres-database/src", "./packages/postgres-database/tests", From 08e887ffe62e7896f0163df4e73621b496b784fa Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 10:22:36 +0200 Subject: [PATCH 21/91] add Makefile --- packages/celery-library/Makefile | 50 ++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 packages/celery-library/Makefile diff --git a/packages/celery-library/Makefile b/packages/celery-library/Makefile new file mode 100644 index 00000000000..04596d3d124 --- /dev/null +++ b/packages/celery-library/Makefile @@ -0,0 +1,50 @@ +# +# Targets for DEVELOPMENT of Celery Library +# +include ../../scripts/common.Makefile +include ../../scripts/common-package.Makefile + +.PHONY: requirements +requirements: ## compiles pip requirements (.in -> .txt) + @$(MAKE_C) requirements reqs + + +.PHONY: install-dev install-prod install-ci +install-dev install-prod install-ci: _check_venv_active ## install app in development/production or CI mode + # installing in $(subst install-,,$@) mode + @uv pip sync requirements/$(subst install-,,$@).txt + + +.PHONY: tests tests-ci +tests: ## runs unit tests + # running unit tests + @pytest \ + --asyncio-mode=auto \ + --color=yes \ + --cov-config=../../.coveragerc \ + --cov-report=term-missing \ + --cov=celery_library \ + --durations=10 \ + --exitfirst \ + --failed-first \ + --pdb \ + -vv \ + $(CURDIR)/tests + +tests-ci: ## runs unit tests + # running unit tests + @pytest \ + --asyncio-mode=auto \ + --color=yes \ + --cov-append \ + --cov-config=../../.coveragerc \ + --cov-report=term-missing \ + --cov-report=xml \ + --junitxml=junit.xml -o junit_family=legacy \ + --cov=celery_library \ + --durations=10 \ + --log-date-format="%Y-%m-%d %H:%M:%S" \ + --log-format="%(asctime)s %(levelname)s %(message)s" \ + --verbose \ + -m "not heavy_load" \ + $(CURDIR)/tests From 5ccbdedcbde1f23f352427b6ea68524f645af67c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 10:28:10 +0200 Subject: [PATCH 22/91] typecheck --- packages/celery-library/requirements/_base.in | 3 ++- packages/celery-library/requirements/_base.txt | 11 +++++++++-- packages/celery-library/requirements/_test.in | 2 +- packages/celery-library/requirements/ci.txt | 2 +- packages/celery-library/requirements/dev.txt | 2 +- packages/celery-library/src/celery_library/task.py | 2 +- 6 files changed, 15 insertions(+), 7 deletions(-) diff --git a/packages/celery-library/requirements/_base.in b/packages/celery-library/requirements/_base.in index b29fac51d2c..27d9cc883b0 100644 --- a/packages/celery-library/requirements/_base.in +++ b/packages/celery-library/requirements/_base.in @@ -1,5 +1,5 @@ # -# Specifies third-party dependencies for 'aws-library' +# Specifies third-party dependencies for 'celery-library' # --constraint ../../../requirements/constraints.txt --requirement ../../../packages/common-library/requirements/_base.in @@ -7,4 +7,5 @@ --requirement ../../../packages/service-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in +asgi_lifespan celery[redis] diff --git a/packages/celery-library/requirements/_base.txt b/packages/celery-library/requirements/_base.txt index be5b739b431..af8a4cd518a 100644 --- a/packages/celery-library/requirements/_base.txt +++ b/packages/celery-library/requirements/_base.txt @@ -42,6 +42,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/_base.in attrs==25.3.0 # via # aiohttp @@ -283,6 +285,8 @@ pygments==2.19.1 # via rich pyinstrument==5.0.1 # via -r requirements/../../../packages/service-library/requirements/_base.in +pyjwt==2.9.0 + # via redis python-dateutil==2.9.0.post0 # via # arrow @@ -304,7 +308,7 @@ pyyaml==6.0.2 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in -redis==6.1.0 +redis==5.3.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -319,6 +323,7 @@ redis==6.1.0 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in + # celery referencing==0.35.1 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -351,7 +356,9 @@ shellingham==1.5.4 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan stream-zip==0.0.83 # via -r requirements/../../../packages/service-library/requirements/_base.in tenacity==9.1.2 diff --git a/packages/celery-library/requirements/_test.in b/packages/celery-library/requirements/_test.in index 92c66219160..fdf47680fb2 100644 --- a/packages/celery-library/requirements/_test.in +++ b/packages/celery-library/requirements/_test.in @@ -1,5 +1,5 @@ # -# Specifies dependencies required to run 'models-library' +# Specifies dependencies required to run 'celery-library' # --constraint ../../../requirements/constraints.txt diff --git a/packages/celery-library/requirements/ci.txt b/packages/celery-library/requirements/ci.txt index c0623831422..ccbecf3196f 100644 --- a/packages/celery-library/requirements/ci.txt +++ b/packages/celery-library/requirements/ci.txt @@ -1,4 +1,4 @@ -# Shortcut to install all packages for the contigous integration (CI) of 'models-library' +# Shortcut to install all packages for the contigous integration (CI) of 'celery-library' # # - As ci.txt but w/ tests # diff --git a/packages/celery-library/requirements/dev.txt b/packages/celery-library/requirements/dev.txt index 34cc644b370..115e5cfb20d 100644 --- a/packages/celery-library/requirements/dev.txt +++ b/packages/celery-library/requirements/dev.txt @@ -1,4 +1,4 @@ -# Shortcut to install all packages needed to develop 'models-library' +# Shortcut to install all packages needed to develop 'celery-library' # # - As ci.txt but with current and repo packages in develop (edit) mode # diff --git a/packages/celery-library/src/celery_library/task.py b/packages/celery-library/src/celery_library/task.py index 4bb9f2318c0..aa3358d7c4a 100644 --- a/packages/celery-library/src/celery_library/task.py +++ b/packages/celery-library/src/celery_library/task.py @@ -40,7 +40,7 @@ class TaskAbortedError(Exception): ... def _async_task_wrapper( app: Celery, ) -> Callable[ - [Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]]], + [Callable[Concatenate[AbortableTask, P], Coroutine[Any, Any, R]]], Callable[Concatenate[AbortableTask, P], R], ]: def decorator( From 69d24cafd0a1bf9a436379396ba7bc8030e7a6e1 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 11:21:51 +0200 Subject: [PATCH 23/91] fix tests --- .../src/celery_library/client.py | 2 +- .../celery-library/src/celery_library/task.py | 7 +++--- .../src/celery_library/types.py | 23 ++++++++++++++++++- .../api/_worker_tasks/tasks.py | 9 +++++++- 4 files changed, 34 insertions(+), 7 deletions(-) diff --git a/packages/celery-library/src/celery_library/client.py b/packages/celery-library/src/celery_library/client.py index 7378b7d8516..f68baf558fe 100644 --- a/packages/celery-library/src/celery_library/client.py +++ b/packages/celery-library/src/celery_library/client.py @@ -54,7 +54,7 @@ async def submit_task( self._celery_app.send_task( task_metadata.name, task_id=task_id, - kwargs={"task_id": task_id} | task_params, + kwargs=task_params, queue=task_metadata.queue.value, ) diff --git a/packages/celery-library/src/celery_library/task.py b/packages/celery-library/src/celery_library/task.py index aa3358d7c4a..e367a3a73da 100644 --- a/packages/celery-library/src/celery_library/task.py +++ b/packages/celery-library/src/celery_library/task.py @@ -40,11 +40,11 @@ class TaskAbortedError(Exception): ... def _async_task_wrapper( app: Celery, ) -> Callable[ - [Callable[Concatenate[AbortableTask, P], Coroutine[Any, Any, R]]], + [Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]]], Callable[Concatenate[AbortableTask, P], R], ]: def decorator( - coro: Callable[Concatenate[AbortableTask, P], Coroutine[Any, Any, R]], + coro: Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]], ) -> Callable[Concatenate[AbortableTask, P], R]: @wraps(coro) def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: @@ -56,7 +56,7 @@ async def run_task(task_id: TaskID) -> R: try: async with asyncio.TaskGroup() as tg: main_task = tg.create_task( - coro(task, *args, **kwargs), + coro(task, task_id, *args, **kwargs), ) async def abort_monitor(): @@ -205,5 +205,4 @@ def register_task( # type: ignore[misc] bind=True, base=AbortableTask, time_limit=None if timeout is None else timeout.total_seconds(), - pydantic=True, )(wrapped_fn) diff --git a/packages/celery-library/src/celery_library/types.py b/packages/celery-library/src/celery_library/types.py index c7c2f0be1b8..78e92783612 100644 --- a/packages/celery-library/src/celery_library/types.py +++ b/packages/celery-library/src/celery_library/types.py @@ -1,6 +1,9 @@ +from functools import partial from pathlib import Path +from typing import Any -from kombu.utils.json import register_type # type: ignore[import-untyped] +from kombu.utils.json import register_type +from pydantic import BaseModel # type: ignore[import-untyped] def _path_encoder(obj): @@ -20,6 +23,14 @@ def _class_full_name(clz: type) -> str: return ".".join([clz.__module__, clz.__qualname__]) +def _pydantic_model_encoder(obj: BaseModel, *args, **kwargs) -> dict[str, Any]: + return obj.model_dump(*args, **kwargs, mode="json") + + +def _pydantic_model_decoder(clz: type[BaseModel], data: dict[str, Any]) -> BaseModel: + return clz(**data) + + def register_celery_types() -> None: register_type( Path, @@ -28,3 +39,13 @@ def register_celery_types() -> None: _path_decoder, ) register_type(set, _class_full_name(set), encoder=list, decoder=set) + + +def register_pydantic_types(*models: type[BaseModel]) -> None: + for model in models: + register_type( + model, + _class_full_name(model), + encoder=_pydantic_model_encoder, + decoder=partial(_pydantic_model_decoder, model), + ) diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py index b8dc131c98f..d6582bc0824 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py @@ -2,10 +2,15 @@ from celery import Celery # type: ignore[import-untyped] from celery_library.task import register_task -from celery_library.types import register_celery_types +from celery_library.types import register_celery_types, register_pydantic_types from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadCompletionBody, + FoldersBody, +) from servicelib.logging_utils import log_context +from ...models import FileMetaData from ._files import complete_upload_file from ._paths import compute_path_size, delete_paths from ._simcore_s3 import deep_copy_files_from_project, export_data @@ -16,6 +21,8 @@ def setup_worker_tasks(app: Celery) -> None: register_celery_types() + register_pydantic_types(FileUploadCompletionBody, FileMetaData, FoldersBody) + with log_context(_logger, logging.INFO, msg="worker task registration"): register_task(app, export_data, dont_autoretry_for=(AccessRightError,)) register_task(app, compute_path_size) From da2bf5346d73e119dec8dedb94cc819fb74b0d63 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 11:27:01 +0200 Subject: [PATCH 24/91] typecheck --- packages/celery-library/src/celery_library/types.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/celery-library/src/celery_library/types.py b/packages/celery-library/src/celery_library/types.py index 78e92783612..bbd04eabc56 100644 --- a/packages/celery-library/src/celery_library/types.py +++ b/packages/celery-library/src/celery_library/types.py @@ -2,8 +2,8 @@ from pathlib import Path from typing import Any -from kombu.utils.json import register_type -from pydantic import BaseModel # type: ignore[import-untyped] +from kombu.utils.json import register_type # type: ignore[import-untyped] +from pydantic import BaseModel def _path_encoder(obj): From ab48cea174614a3412bf7332a8ae83bfc100727f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 11:33:08 +0200 Subject: [PATCH 25/91] revert --- services/storage/tests/unit/test_async_jobs.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/services/storage/tests/unit/test_async_jobs.py b/services/storage/tests/unit/test_async_jobs.py index 837f8d154cc..6870080ed21 100644 --- a/services/storage/tests/unit/test_async_jobs.py +++ b/services/storage/tests/unit/test_async_jobs.py @@ -94,9 +94,8 @@ async def _process_action(action: str, payload: Any) -> Any: return None -def sync_job(task: Task, task_id: TaskID, action: Action, payload: Any) -> Any: +def sync_job(task: Task, action: Action, payload: Any) -> Any: _ = task - _ = task_id return asyncio.run(_process_action(action, payload)) From fa25e67cb9d67891ef34d225204a703e4c80b9d9 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 13:48:37 +0200 Subject: [PATCH 26/91] add ref --- packages/celery-library/README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/celery-library/README.md b/packages/celery-library/README.md index bab2c37cc61..b64223cfcc6 100644 --- a/packages/celery-library/README.md +++ b/packages/celery-library/README.md @@ -1,6 +1,6 @@ # simcore Celery library -Provides a wrapper around Celery library. +Provides a wrapper around Celery library [1]. ## Installation @@ -15,3 +15,6 @@ make install-dev make help make test-dev ``` + + +[1] https://github.com/celery/celery From 74d77b4209d1a4c3864779365653bd0be10abce1 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 14:04:20 +0200 Subject: [PATCH 27/91] add type hint --- packages/celery-library/src/celery_library/signals.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index 0c427180da3..e093c7c255d 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -2,13 +2,16 @@ import datetime import logging import threading +from collections.abc import Callable from typing import Final from asgi_lifespan import LifespanManager from celery import Celery # type: ignore[import-untyped] +from celery.worker.worker import WorkController from fastapi import FastAPI from servicelib.logging_utils import log_context from servicelib.redis._client import RedisClientSDK +from settings_library.celery import CelerySettings from settings_library.redis import RedisDatabase from . import set_event_loop @@ -27,9 +30,9 @@ def on_worker_init( - app_factory, - celery_settings, - sender, + app_factory: Callable[[], FastAPI], + celery_settings: CelerySettings, + sender: WorkController, **_kwargs, ) -> None: startup_complete_event = threading.Event() @@ -50,6 +53,8 @@ async def setup_task_worker(): client_name=f"{fastapi_app.title}.celery_tasks", ) + assert sender.app # nosec + assert isinstance(sender.app, Celery) # nosec set_celery_worker( sender.app, CeleryTaskWorker( From 43fd068115fcf98d6de85da61d1de112c9114350 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 14:08:23 +0200 Subject: [PATCH 28/91] typecheck --- services/storage/tests/conftest.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 7a97c3f484f..94b644f22fb 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -25,6 +25,7 @@ from celery import Celery from celery.contrib.testing.worker import TestWorkController, start_worker from celery.signals import worker_init, worker_shutdown +from celery.worker.worker import WorkController from celery_library.signals import on_worker_init, on_worker_shutdown from celery_library.utils import get_celery_worker from celery_library.worker import CeleryTaskWorker @@ -1002,7 +1003,8 @@ async def with_storage_celery_worker_controller( app_settings = ApplicationSettings.create_from_envs() app_factory = partial(create_app, app_settings) - def _on_worker_init_wrapper(sender: Celery, **_kwargs) -> None: + def _on_worker_init_wrapper(sender: WorkController, **_kwargs) -> None: + assert app_settings.STORAGE_CELERY # nosec return partial(on_worker_init, app_factory, app_settings.STORAGE_CELERY)( sender, **_kwargs ) From 6de9a838c12c48322e094e3511bc23754e6ecf9b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 14:12:23 +0200 Subject: [PATCH 29/91] typecheck --- packages/celery-library/src/celery_library/signals.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index e093c7c255d..2fe822749a5 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -7,7 +7,7 @@ from asgi_lifespan import LifespanManager from celery import Celery # type: ignore[import-untyped] -from celery.worker.worker import WorkController +from celery.worker.worker import WorkController # type: ignore[import-untyped] from fastapi import FastAPI from servicelib.logging_utils import log_context from servicelib.redis._client import RedisClientSDK From 2405161486f3d48b9c651f3fcabfbfc402cfad0b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 16:18:34 +0200 Subject: [PATCH 30/91] remove pydantic --- packages/celery-library/src/celery_library/client.py | 2 +- packages/celery-library/src/celery_library/task.py | 7 ++++--- .../simcore_service_storage/api/_worker_tasks/tasks.py | 9 +-------- .../src/simcore_service_storage/api/rest/_files.py | 5 +++-- 4 files changed, 9 insertions(+), 14 deletions(-) diff --git a/packages/celery-library/src/celery_library/client.py b/packages/celery-library/src/celery_library/client.py index f68baf558fe..7378b7d8516 100644 --- a/packages/celery-library/src/celery_library/client.py +++ b/packages/celery-library/src/celery_library/client.py @@ -54,7 +54,7 @@ async def submit_task( self._celery_app.send_task( task_metadata.name, task_id=task_id, - kwargs=task_params, + kwargs={"task_id": task_id} | task_params, queue=task_metadata.queue.value, ) diff --git a/packages/celery-library/src/celery_library/task.py b/packages/celery-library/src/celery_library/task.py index e367a3a73da..aa3358d7c4a 100644 --- a/packages/celery-library/src/celery_library/task.py +++ b/packages/celery-library/src/celery_library/task.py @@ -40,11 +40,11 @@ class TaskAbortedError(Exception): ... def _async_task_wrapper( app: Celery, ) -> Callable[ - [Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]]], + [Callable[Concatenate[AbortableTask, P], Coroutine[Any, Any, R]]], Callable[Concatenate[AbortableTask, P], R], ]: def decorator( - coro: Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]], + coro: Callable[Concatenate[AbortableTask, P], Coroutine[Any, Any, R]], ) -> Callable[Concatenate[AbortableTask, P], R]: @wraps(coro) def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: @@ -56,7 +56,7 @@ async def run_task(task_id: TaskID) -> R: try: async with asyncio.TaskGroup() as tg: main_task = tg.create_task( - coro(task, task_id, *args, **kwargs), + coro(task, *args, **kwargs), ) async def abort_monitor(): @@ -205,4 +205,5 @@ def register_task( # type: ignore[misc] bind=True, base=AbortableTask, time_limit=None if timeout is None else timeout.total_seconds(), + pydantic=True, )(wrapped_fn) diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py index d6582bc0824..b8dc131c98f 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py @@ -2,15 +2,10 @@ from celery import Celery # type: ignore[import-untyped] from celery_library.task import register_task -from celery_library.types import register_celery_types, register_pydantic_types +from celery_library.types import register_celery_types from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError -from models_library.api_schemas_storage.storage_schemas import ( - FileUploadCompletionBody, - FoldersBody, -) from servicelib.logging_utils import log_context -from ...models import FileMetaData from ._files import complete_upload_file from ._paths import compute_path_size, delete_paths from ._simcore_s3 import deep_copy_files_from_project, export_data @@ -21,8 +16,6 @@ def setup_worker_tasks(app: Celery) -> None: register_celery_types() - register_pydantic_types(FileUploadCompletionBody, FileMetaData, FoldersBody) - with log_context(_logger, logging.INFO, msg="worker task registration"): register_task(app, export_data, dont_autoretry_for=(AccessRightError,)) register_task(app, compute_path_size) diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py index 91ffc3fd864..91b4c9c268a 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_files.py +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -292,7 +292,7 @@ async def complete_upload_file( user_id=async_job_name_data.user_id, location_id=location_id, file_id=file_id, - body=body, + body=body.model_dump(), ) route = ( @@ -353,7 +353,8 @@ async def is_completed_upload_file( assert new_fmd.location_id == location_id # nosec assert new_fmd.file_id == file_id # nosec response = FileUploadCompleteFutureResponse( - state=FileUploadCompleteState.OK, e_tag=new_fmd.entity_tag + state=FileUploadCompleteState.OK, + e_tag=FileMetaData.model_validate(new_fmd).entity_tag, ) else: # the task is still running From 9d234de7f1c7f80194b42f4258735b61cb9e6ce4 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 16:25:08 +0200 Subject: [PATCH 31/91] add task_id --- services/storage/tests/unit/test_async_jobs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/storage/tests/unit/test_async_jobs.py b/services/storage/tests/unit/test_async_jobs.py index 6870080ed21..837f8d154cc 100644 --- a/services/storage/tests/unit/test_async_jobs.py +++ b/services/storage/tests/unit/test_async_jobs.py @@ -94,8 +94,9 @@ async def _process_action(action: str, payload: Any) -> Any: return None -def sync_job(task: Task, action: Action, payload: Any) -> Any: +def sync_job(task: Task, task_id: TaskID, action: Action, payload: Any) -> Any: _ = task + _ = task_id return asyncio.run(_process_action(action, payload)) From 4589ae1c918954d5e04b35fd3f44bb2f9361ef6f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 19:42:05 +0200 Subject: [PATCH 32/91] fix body --- .../storage/src/simcore_service_storage/api/rpc/_simcore_s3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py index 52724a1da23..1e1f9a2c900 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py @@ -27,7 +27,7 @@ async def copy_folders_from_project( ), task_context=job_id_data.model_dump(), user_id=job_id_data.user_id, - body=body, + body=body.model_dump(), ) return AsyncJobGet(job_id=task_uuid, job_name=task_name) From 490ba71a9450fa7fb999078c9d675c84350068e8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 20:36:55 +0200 Subject: [PATCH 33/91] assert --- .../storage/src/simcore_service_storage/api/rest/_files.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py index 91b4c9c268a..1020d6a510f 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_files.py +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -348,7 +348,9 @@ async def is_completed_upload_file( task_result = await celery_client.get_task_result( task_context=async_job_name_data.model_dump(), task_uuid=TaskUUID(future_id) ) - assert isinstance(task_result, FileMetaData), f"{task_result=}" # nosec + assert TypeAdapter(FileMetaData).validate_python( + task_result + ), f"{task_result=}" # nosec new_fmd = task_result assert new_fmd.location_id == location_id # nosec assert new_fmd.file_id == file_id # nosec From 147753636e7492a3278da0575158147a2b7f4a62 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 20:47:46 +0200 Subject: [PATCH 34/91] fix return --- .../src/simcore_service_storage/api/rest/_files.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py index 1020d6a510f..83f41bf76dc 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_files.py +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -345,12 +345,12 @@ async def is_completed_upload_file( ) # first check if the task is in the app if task_status.is_done: - task_result = await celery_client.get_task_result( - task_context=async_job_name_data.model_dump(), task_uuid=TaskUUID(future_id) + task_result = TypeAdapter(FileMetaData).validate_python( + await celery_client.get_task_result( + task_context=async_job_name_data.model_dump(), + task_uuid=TaskUUID(future_id), + ) ) - assert TypeAdapter(FileMetaData).validate_python( - task_result - ), f"{task_result=}" # nosec new_fmd = task_result assert new_fmd.location_id == location_id # nosec assert new_fmd.file_id == file_id # nosec From c0803445c849c1a74adde05cb3ecf47b57e42e0a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 21:50:58 +0200 Subject: [PATCH 35/91] fix modeldump --- .../storage/src/simcore_service_storage/api/rpc/_simcore_s3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py index 1e1f9a2c900..18d24cec0f1 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py @@ -27,7 +27,7 @@ async def copy_folders_from_project( ), task_context=job_id_data.model_dump(), user_id=job_id_data.user_id, - body=body.model_dump(), + body=body.model_dump(mode="json"), ) return AsyncJobGet(job_id=task_uuid, job_name=task_name) From 0fc9137c97f08df8c4238c1d30f94868d44ef4ce Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 20 May 2025 22:17:22 +0200 Subject: [PATCH 36/91] add params --- services/storage/tests/unit/test_modules_celery.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/services/storage/tests/unit/test_modules_celery.py b/services/storage/tests/unit/test_modules_celery.py index 943f97a3051..5d57838bb9d 100644 --- a/services/storage/tests/unit/test_modules_celery.py +++ b/services/storage/tests/unit/test_modules_celery.py @@ -67,7 +67,8 @@ def sleep_for(seconds: float) -> None: return "archive.zip" -def fake_file_processor(task: Task, files: list[str]) -> str: +def fake_file_processor(task: Task, task_id: TaskID, files: list[str]) -> str: + _ = task_id assert task.name _logger.info("Calling _fake_file_processor") return asyncio.run_coroutine_threadsafe( @@ -80,7 +81,8 @@ class MyError(OsparcErrorMixin, Exception): msg_template = "Something strange happened: {msg}" -def failure_task(task: Task): +def failure_task(task: Task, task_id: TaskID) -> None: + _ = task_id assert task msg = "BOOM!" raise MyError(msg=msg) From 58e7fa1e985c79e4c7535dc104354f38f17c0a42 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 27 May 2025 08:51:51 +0200 Subject: [PATCH 37/91] start adding tests to celery module --- .../celery-library/requirements/_base.txt | 27 +++++++++------ packages/celery-library/requirements/_test.in | 1 + .../celery-library/requirements/_test.txt | 32 ++++++++++++----- .../celery-library/requirements/_tools.txt | 8 ++--- .../celery-library/tests/unit/conftest.py | 0 .../tests/unit/test_async_jobs.py | 34 +++++++++++-------- .../tests/unit/test_modules_celery.py | 0 7 files changed, 65 insertions(+), 37 deletions(-) create mode 100644 packages/celery-library/tests/unit/conftest.py rename {services/storage => packages/celery-library}/tests/unit/test_async_jobs.py (93%) rename {services/storage => packages/celery-library}/tests/unit/test_modules_celery.py (100%) diff --git a/packages/celery-library/requirements/_base.txt b/packages/celery-library/requirements/_base.txt index af8a4cd518a..d52f0e54eb9 100644 --- a/packages/celery-library/requirements/_base.txt +++ b/packages/celery-library/requirements/_base.txt @@ -10,7 +10,7 @@ aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -70,7 +70,7 @@ certifi==2025.4.26 # requests charset-normalizer==3.4.2 # via requests -click==8.1.8 +click==8.2.1 # via # celery # click-didyoumean @@ -97,7 +97,7 @@ exceptiongroup==1.3.0 # via aio-pika fast-depends==2.4.12 # via faststream -faststream==0.5.41 +faststream==0.5.42 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.6.0 # via @@ -117,7 +117,7 @@ idna==3.10 # yarl importlib-metadata==8.6.1 # via opentelemetry-api -jsonschema==4.23.0 +jsonschema==4.24.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in @@ -129,7 +129,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.4.3 +multidict==6.4.4 # via # aiohttp # yarl @@ -139,6 +139,7 @@ opentelemetry-api==1.33.1 # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-instrumentation + # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests @@ -156,9 +157,12 @@ opentelemetry-exporter-otlp-proto-http==1.33.1 # via opentelemetry-exporter-otlp opentelemetry-instrumentation==0.54b1 # via + # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests +opentelemetry-instrumentation-aio-pika==0.54b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-logging==0.54b1 # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-redis==0.54b1 @@ -223,7 +227,7 @@ psutil==7.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in pycryptodome==3.23.0 # via stream-zip -pydantic==2.11.4 +pydantic==2.11.5 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -283,7 +287,7 @@ pydantic-settings==2.7.0 # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.19.1 # via rich -pyinstrument==5.0.1 +pyinstrument==5.0.2 # via -r requirements/../../../packages/service-library/requirements/_base.in pyjwt==2.9.0 # via redis @@ -347,7 +351,7 @@ rich==14.0.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.25.0 +rpds-py==0.25.1 # via # jsonschema # referencing @@ -367,7 +371,7 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.4 +typer==0.16.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -385,7 +389,7 @@ typing-extensions==4.13.2 # pydantic-extra-types # typer # typing-inspection -typing-inspection==0.4.0 +typing-inspection==0.4.1 # via pydantic tzdata==2025.2 # via kombu @@ -415,6 +419,7 @@ wrapt==1.17.2 # via # deprecated # opentelemetry-instrumentation + # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-redis yarl==1.20.0 # via @@ -422,5 +427,5 @@ yarl==1.20.0 # aio-pika # aiohttp # aiormq -zipp==3.21.0 +zipp==3.22.0 # via importlib-metadata diff --git a/packages/celery-library/requirements/_test.in b/packages/celery-library/requirements/_test.in index fdf47680fb2..4b93bbbdd78 100644 --- a/packages/celery-library/requirements/_test.in +++ b/packages/celery-library/requirements/_test.in @@ -12,6 +12,7 @@ coverage faker fastapi +docker httpx pint pytest diff --git a/packages/celery-library/requirements/_test.txt b/packages/celery-library/requirements/_test.txt index 1bf99eeff9f..bf475d637ea 100644 --- a/packages/celery-library/requirements/_test.txt +++ b/packages/celery-library/requirements/_test.txt @@ -13,10 +13,17 @@ certifi==2025.4.26 # -c requirements/_base.txt # httpcore # httpx -coverage==7.8.0 + # requests +charset-normalizer==3.4.2 + # via + # -c requirements/_base.txt + # requests +coverage==7.8.2 # via # -r requirements/_test.in # pytest-cov +docker==7.1.0 + # via -r requirements/_test.in faker==37.3.0 # via -r requirements/_test.in fastapi==0.115.12 @@ -40,6 +47,7 @@ idna==3.10 # -c requirements/_base.txt # anyio # httpx + # requests iniconfig==2.1.0 # via pytest packaging==25.0 @@ -57,7 +65,7 @@ pprintpp==0.4.0 # via pytest-icdiff py-cpuinfo==9.0.0 # via pytest-benchmark -pydantic==2.11.4 +pydantic==2.11.5 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -76,10 +84,8 @@ pytest==8.3.5 # pytest-instafail # pytest-mock # pytest-sugar -pytest-asyncio==0.23.8 - # via - # -c requirements/../../../requirements/constraints.txt - # -r requirements/_test.in +pytest-asyncio==1.0.0 + # via -r requirements/_test.in pytest-benchmark==5.1.0 # via -r requirements/_test.in pytest-cov==6.1.1 @@ -88,7 +94,7 @@ pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -103,6 +109,10 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # -r requirements/_test.in +requests==2.32.3 + # via + # -c requirements/_base.txt + # docker sniffio==1.3.1 # via # -c requirements/_base.txt @@ -124,7 +134,7 @@ typing-extensions==4.13.2 # pydantic # pydantic-core # typing-inspection -typing-inspection==0.4.0 +typing-inspection==0.4.1 # via # -c requirements/_base.txt # pydantic @@ -132,3 +142,9 @@ tzdata==2025.2 # via # -c requirements/_base.txt # faker +urllib3==2.4.0 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # docker + # requests diff --git a/packages/celery-library/requirements/_tools.txt b/packages/celery-library/requirements/_tools.txt index e0213f1353c..d90c0074c58 100644 --- a/packages/celery-library/requirements/_tools.txt +++ b/packages/celery-library/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # black @@ -19,7 +19,7 @@ distlib==0.3.9 # via virtualenv filelock==3.18.0 # via virtualenv -identify==2.6.10 +identify==2.6.12 # via pre-commit isort==6.0.1 # via @@ -67,9 +67,9 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.11.10 +ruff==0.11.11 # via -r requirements/../../../requirements/devenv.txt -setuptools==80.7.1 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint diff --git a/packages/celery-library/tests/unit/conftest.py b/packages/celery-library/tests/unit/conftest.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/storage/tests/unit/test_async_jobs.py b/packages/celery-library/tests/unit/test_async_jobs.py similarity index 93% rename from services/storage/tests/unit/test_async_jobs.py rename to packages/celery-library/tests/unit/test_async_jobs.py index 837f8d154cc..63562140064 100644 --- a/services/storage/tests/unit/test_async_jobs.py +++ b/packages/celery-library/tests/unit/test_async_jobs.py @@ -3,7 +3,7 @@ import asyncio import pickle -from collections.abc import Callable +from collections.abc import Awaitable, Callable from datetime import timedelta from enum import Enum from typing import Any @@ -31,7 +31,6 @@ from models_library.users import UserID from servicelib.rabbitmq import RabbitMQRPCClient, RPCRouter from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs -from simcore_service_storage.api.rpc.routes import get_rabbitmq_rpc_server from tenacity import ( AsyncRetrying, retry_if_exception_type, @@ -44,6 +43,12 @@ "postgres", ] +pytest_plugins = [ + "pytest_simcore.rabbit_service", + "pytest_simcore.docker_compose", + "pytest_simcore.docker_swarm", + "pytest_simcore.repository_paths", +] ###### RPC Interface ###### router = RPCRouter() @@ -110,9 +115,12 @@ async def async_job(task: Task, task_id: TaskID, action: Action, payload: Any) - @pytest.fixture -async def register_rpc_routes(initialized_app: FastAPI) -> None: - rpc_server = get_rabbitmq_rpc_server(initialized_app) - await rpc_server.register_router(router, STORAGE_RPC_NAMESPACE, initialized_app) +async def rpc_client( + rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], +) -> RabbitMQRPCClient: + client = await rpc_client("celery_test_client") + await client.register_router(router, STORAGE_RPC_NAMESPACE) + return client async def _start_task_via_rpc( @@ -200,9 +208,7 @@ async def _wait_for_job( ], ) async def test_async_jobs_workflow( - initialized_app: FastAPI, - register_rpc_routes: None, - storage_rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, with_storage_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, @@ -210,7 +216,7 @@ async def test_async_jobs_workflow( payload: Any, ): async_job_get, job_id_data = await _start_task_via_rpc( - storage_rabbitmq_rpc_client, + rpc_client, rpc_task_name=exposed_rpc_start, user_id=user_id, product_name=product_name, @@ -219,7 +225,7 @@ async def test_async_jobs_workflow( ) jobs = await async_jobs.list_jobs( - storage_rabbitmq_rpc_client, + rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, filter_="", # currently not used job_id_data=job_id_data, @@ -227,13 +233,13 @@ async def test_async_jobs_workflow( assert len(jobs) > 0 await _wait_for_job( - storage_rabbitmq_rpc_client, + rpc_client, async_job_get=async_job_get, job_id_data=job_id_data, ) async_job_result = await async_jobs.result( - storage_rabbitmq_rpc_client, + rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, job_id=async_job_get.job_id, job_id_data=job_id_data, @@ -248,7 +254,7 @@ async def test_async_jobs_workflow( ], ) async def test_async_jobs_cancel( - initialized_app: FastAPI, + # initialized_app: FastAPI, register_rpc_routes: None, storage_rabbitmq_rpc_client: RabbitMQRPCClient, with_storage_celery_worker: CeleryTaskWorker, @@ -313,7 +319,7 @@ async def test_async_jobs_cancel( ], ) async def test_async_jobs_raises( - initialized_app: FastAPI, + # initialized_app: FastAPI, register_rpc_routes: None, storage_rabbitmq_rpc_client: RabbitMQRPCClient, with_storage_celery_worker: CeleryTaskWorker, diff --git a/services/storage/tests/unit/test_modules_celery.py b/packages/celery-library/tests/unit/test_modules_celery.py similarity index 100% rename from services/storage/tests/unit/test_modules_celery.py rename to packages/celery-library/tests/unit/test_modules_celery.py From c8d9b01cc095dd10f3f1383c2116a3da07655610 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 27 May 2025 08:53:43 +0200 Subject: [PATCH 38/91] create fixture for creating rabbitmq rpc server --- packages/celery-library/tests/unit/test_async_jobs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/celery-library/tests/unit/test_async_jobs.py b/packages/celery-library/tests/unit/test_async_jobs.py index 63562140064..cbeb20854ef 100644 --- a/packages/celery-library/tests/unit/test_async_jobs.py +++ b/packages/celery-library/tests/unit/test_async_jobs.py @@ -116,9 +116,9 @@ async def async_job(task: Task, task_id: TaskID, action: Action, payload: Any) - @pytest.fixture async def rpc_client( - rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], + rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], ) -> RabbitMQRPCClient: - client = await rpc_client("celery_test_client") + client = await rabbitmq_rpc_client("celery_test_client") await client.register_router(router, STORAGE_RPC_NAMESPACE) return client From 07ee77ad6b6690d92541da6e6b80e3aa576e9bf2 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 27 May 2025 11:25:34 +0200 Subject: [PATCH 39/91] make sure celery tasks actually run --- packages/celery-library/requirements/_test.in | 2 + .../celery-library/requirements/_test.txt | 94 +++++++++++- .../celery-library/requirements/_tools.txt | 5 +- .../src/celery_library/routes/rpc.py | 0 .../celery-library/tests/unit/conftest.py | 144 ++++++++++++++++++ .../tests/unit/test_async_jobs.py | 47 +++--- 6 files changed, 269 insertions(+), 23 deletions(-) rename services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py => packages/celery-library/src/celery_library/routes/rpc.py (100%) diff --git a/packages/celery-library/requirements/_test.in b/packages/celery-library/requirements/_test.in index 4b93bbbdd78..31bb61caf01 100644 --- a/packages/celery-library/requirements/_test.in +++ b/packages/celery-library/requirements/_test.in @@ -11,6 +11,7 @@ # testing coverage faker +fakeredis fastapi docker httpx @@ -18,6 +19,7 @@ pint pytest pytest-asyncio pytest-benchmark +pytest-celery pytest-cov pytest-icdiff pytest-instafail diff --git a/packages/celery-library/requirements/_test.txt b/packages/celery-library/requirements/_test.txt index bf475d637ea..843d2fc02a8 100644 --- a/packages/celery-library/requirements/_test.txt +++ b/packages/celery-library/requirements/_test.txt @@ -1,3 +1,7 @@ +amqp==5.3.1 + # via + # -c requirements/_base.txt + # kombu annotated-types==0.7.0 # via # -c requirements/_base.txt @@ -7,6 +11,14 @@ anyio==4.9.0 # -c requirements/_base.txt # httpx # starlette +billiard==4.2.1 + # via + # -c requirements/_base.txt + # celery +celery==5.5.2 + # via + # -c requirements/_base.txt + # pytest-celery certifi==2025.4.26 # via # -c requirements/../../../requirements/constraints.txt @@ -18,14 +30,40 @@ charset-normalizer==3.4.2 # via # -c requirements/_base.txt # requests +click==8.2.1 + # via + # -c requirements/_base.txt + # celery + # click-didyoumean + # click-plugins + # click-repl +click-didyoumean==0.3.1 + # via + # -c requirements/_base.txt + # celery +click-plugins==1.1.1 + # via + # -c requirements/_base.txt + # celery +click-repl==0.3.0 + # via + # -c requirements/_base.txt + # celery coverage==7.8.2 # via # -r requirements/_test.in # pytest-cov +debugpy==1.8.14 + # via pytest-celery docker==7.1.0 - # via -r requirements/_test.in + # via + # -r requirements/_test.in + # pytest-celery + # pytest-docker-tools faker==37.3.0 # via -r requirements/_test.in +fakeredis==2.29.0 + # via -r requirements/_test.in fastapi==0.115.12 # via -r requirements/_test.in flexcache==0.3 @@ -50,6 +88,11 @@ idna==3.10 # requests iniconfig==2.1.0 # via pytest +kombu==5.5.3 + # via + # -c requirements/_base.txt + # celery + # pytest-celery packaging==25.0 # via # -c requirements/_base.txt @@ -63,6 +106,14 @@ pluggy==1.6.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff +prompt-toolkit==3.0.51 + # via + # -c requirements/_base.txt + # click-repl +psutil==7.0.0 + # via + # -c requirements/_base.txt + # pytest-celery py-cpuinfo==9.0.0 # via pytest-benchmark pydantic==2.11.5 @@ -74,12 +125,17 @@ pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic +pyjwt==2.9.0 + # via + # -c requirements/_base.txt + # redis pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio # pytest-benchmark # pytest-cov + # pytest-docker-tools # pytest-icdiff # pytest-instafail # pytest-mock @@ -88,8 +144,12 @@ pytest-asyncio==1.0.0 # via -r requirements/_test.in pytest-benchmark==5.1.0 # via -r requirements/_test.in +pytest-celery==1.2.0 + # via -r requirements/_test.in pytest-cov==6.1.1 # via -r requirements/_test.in +pytest-docker-tools==3.1.9 + # via pytest-celery pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 @@ -100,6 +160,10 @@ pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in +python-dateutil==2.9.0.post0 + # via + # -c requirements/_base.txt + # celery python-dotenv==1.1.0 # via # -c requirements/_base.txt @@ -109,18 +173,35 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # -r requirements/_test.in +redis==5.3.0 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # fakeredis requests==2.32.3 # via # -c requirements/_base.txt # docker +setuptools==80.9.0 + # via pytest-celery +six==1.17.0 + # via + # -c requirements/_base.txt + # python-dateutil sniffio==1.3.1 # via # -c requirements/_base.txt # anyio +sortedcontainers==2.4.0 + # via fakeredis starlette==0.46.2 # via # -c requirements/../../../requirements/constraints.txt # fastapi +tenacity==9.1.2 + # via + # -c requirements/_base.txt + # pytest-celery termcolor==3.1.0 # via pytest-sugar typing-extensions==4.13.2 @@ -142,9 +223,20 @@ tzdata==2025.2 # via # -c requirements/_base.txt # faker + # kombu urllib3==2.4.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # docker # requests +vine==5.1.0 + # via + # -c requirements/_base.txt + # amqp + # celery + # kombu +wcwidth==0.2.13 + # via + # -c requirements/_base.txt + # prompt-toolkit diff --git a/packages/celery-library/requirements/_tools.txt b/packages/celery-library/requirements/_tools.txt index d90c0074c58..33274e8afd6 100644 --- a/packages/celery-library/requirements/_tools.txt +++ b/packages/celery-library/requirements/_tools.txt @@ -11,6 +11,7 @@ cfgv==3.4.0 click==8.2.1 # via # -c requirements/_base.txt + # -c requirements/_test.txt # black # pip-tools dill==0.4.0 @@ -70,7 +71,9 @@ pyyaml==6.0.2 ruff==0.11.11 # via -r requirements/../../../requirements/devenv.txt setuptools==80.9.0 - # via pip-tools + # via + # -c requirements/_test.txt + # pip-tools tomlkit==0.13.2 # via pylint typing-extensions==4.13.2 diff --git a/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py b/packages/celery-library/src/celery_library/routes/rpc.py similarity index 100% rename from services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py rename to packages/celery-library/src/celery_library/routes/rpc.py diff --git a/packages/celery-library/tests/unit/conftest.py b/packages/celery-library/tests/unit/conftest.py index e69de29bb2d..ba6b5bdc5e8 100644 --- a/packages/celery-library/tests/unit/conftest.py +++ b/packages/celery-library/tests/unit/conftest.py @@ -0,0 +1,144 @@ +from collections.abc import AsyncIterator, Awaitable, Callable +from functools import partial +from typing import Final + +import pytest +from asgi_lifespan import LifespanManager +from celery import Celery +from celery.contrib.testing.worker import TestWorkController, start_worker +from celery.signals import worker_init, worker_shutdown +from celery.worker.worker import WorkController +from celery_library import setup_celery_client +from celery_library.routes.rpc import router as async_jobs_router +from celery_library.signals import on_worker_init, on_worker_shutdown +from celery_library.utils import get_celery_worker +from celery_library.worker import CeleryTaskWorker +from faker import Faker +from fastapi import FastAPI +from models_library.products import ProductName +from models_library.rabbitmq_basic_types import RPCNamespace +from models_library.users import UserID +from pydantic import TypeAdapter +from servicelib.rabbitmq import RabbitMQRPCClient +from settings_library.celery import CelerySettings +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings + +pytest_simcore_core_services_selection = [ + "rabbit", + "redis", + "postgres", +] + +pytest_plugins = [ + "pytest_simcore.docker_compose", + "pytest_simcore.docker_swarm", + "pytest_simcore.rabbit_service", + "pytest_simcore.redis_service", + "pytest_simcore.repository_paths", +] + +_LIFESPAN_TIMEOUT: Final[int] = 10 + + +@pytest.fixture +def rpc_namespace() -> RPCNamespace: + return TypeAdapter(RPCNamespace).validate_python("test") + + +@pytest.fixture +def celery_settings( + rabbit_service: RabbitSettings, + redis_service: RedisSettings, +) -> CelerySettings: + return CelerySettings.create_from_envs() + + +@pytest.fixture +async def initialized_fast_api( + rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], + celery_settings: CelerySettings, + rpc_namespace: RPCNamespace, +) -> AsyncIterator[FastAPI]: + app = FastAPI( + title="master_fastapi_app", + description="Service that manages osparc storage backend", + version="0.0.0", + ) + + setup_celery_client(app, celery_settings=celery_settings) + rpc_client = await rabbitmq_rpc_client("celery_test_client") + app.state.rabbitmq_rpc_client = rpc_client + + async def startup() -> None: + rpc_server = app.state.rabbitmq_rpc_client + assert isinstance(rpc_server, RabbitMQRPCClient) + await rpc_server.register_router(async_jobs_router, rpc_namespace, app) + + app.add_event_handler("startup", startup) + + async with LifespanManager( + app, startup_timeout=_LIFESPAN_TIMEOUT, shutdown_timeout=_LIFESPAN_TIMEOUT + ): + yield app + + +@pytest.fixture +def register_celery_tasks() -> Callable[[Celery], None]: + """override if tasks are needed""" + + def _(celery_app: Celery) -> None: ... + + return _ + + +@pytest.fixture +async def celery_worker_controller( + celery_settings: CelerySettings, + celery_app: Celery, + register_celery_tasks: Callable[[Celery], None], +) -> AsyncIterator[TestWorkController]: + + def _create_app() -> FastAPI: + + return FastAPI( + title="worker_fastapi_app", + description="Test application for celery_library", + version="0.0.0", + ) + + def _on_worker_init_wrapper(sender: WorkController, **_kwargs) -> None: + return partial(on_worker_init, _create_app, celery_settings)(sender, **_kwargs) + + worker_init.connect(_on_worker_init_wrapper) + worker_shutdown.connect(on_worker_shutdown) + + register_celery_tasks(celery_app) + + with start_worker( + celery_app, + pool="threads", + concurrency=1, + loglevel="info", + perform_ping_check=False, + queues="default,cpu_bound", + ) as worker: + yield worker + + +@pytest.fixture +def with_storage_celery_worker( + celery_worker_controller: TestWorkController, +) -> CeleryTaskWorker: + assert isinstance(celery_worker_controller.app, Celery) + return get_celery_worker(celery_worker_controller.app) + + +@pytest.fixture +def user_id(faker: Faker) -> UserID: + return TypeAdapter(UserID).validate_python(faker.pyint(min_value=1, max_value=1000)) + + +@pytest.fixture +def product_name() -> ProductName: + return TypeAdapter(ProductName).validate_python("pytest-product") diff --git a/packages/celery-library/tests/unit/test_async_jobs.py b/packages/celery-library/tests/unit/test_async_jobs.py index cbeb20854ef..99990ca4234 100644 --- a/packages/celery-library/tests/unit/test_async_jobs.py +++ b/packages/celery-library/tests/unit/test_async_jobs.py @@ -3,7 +3,7 @@ import asyncio import pickle -from collections.abc import Awaitable, Callable +from collections.abc import Callable from datetime import timedelta from enum import Enum from typing import Any @@ -24,10 +24,9 @@ JobAbortedError, JobError, ) -from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError from models_library.products import ProductName -from models_library.rabbitmq_basic_types import RPCMethodName +from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace from models_library.users import UserID from servicelib.rabbitmq import RabbitMQRPCClient, RPCRouter from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs @@ -40,16 +39,10 @@ pytest_simcore_core_services_selection = [ "rabbit", + "redis", "postgres", ] -pytest_plugins = [ - "pytest_simcore.rabbit_service", - "pytest_simcore.docker_compose", - "pytest_simcore.docker_swarm", - "pytest_simcore.repository_paths", -] - ###### RPC Interface ###### router = RPCRouter() @@ -116,16 +109,18 @@ async def async_job(task: Task, task_id: TaskID, action: Action, payload: Any) - @pytest.fixture async def rpc_client( - rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], + initialized_fast_api: FastAPI, rpc_namespace: RPCNamespace ) -> RabbitMQRPCClient: - client = await rabbitmq_rpc_client("celery_test_client") - await client.register_router(router, STORAGE_RPC_NAMESPACE) + client = initialized_fast_api.state.rabbitmq_rpc_client + assert isinstance(client, RabbitMQRPCClient) + await client.register_router(router, rpc_namespace, initialized_fast_api) return client async def _start_task_via_rpc( client: RabbitMQRPCClient, *, + rpc_namespace: RPCNamespace, rpc_task_name: str, user_id: UserID, product_name: ProductName, @@ -134,7 +129,7 @@ async def _start_task_via_rpc( job_id_data = AsyncJobNameData(user_id=user_id, product_name=product_name) async_job_get = await async_jobs.submit( rabbitmq_rpc_client=client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + rpc_namespace=rpc_namespace, method_name=RPCMethodName(rpc_task_name), job_id_data=job_id_data, **kwargs, @@ -166,6 +161,7 @@ def _(celery_app: Celery) -> None: async def _wait_for_job( storage_rabbitmq_rpc_client: RabbitMQRPCClient, *, + rpc_namespace: RPCNamespace, async_job_get: AsyncJobGet, job_id_data: AsyncJobNameData, stop_after: timedelta = timedelta(seconds=5), @@ -180,7 +176,7 @@ async def _wait_for_job( with attempt: result = await async_jobs.status( storage_rabbitmq_rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + rpc_namespace=rpc_namespace, job_id=async_job_get.job_id, job_id_data=job_id_data, ) @@ -209,6 +205,7 @@ async def _wait_for_job( ) async def test_async_jobs_workflow( rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, with_storage_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, @@ -217,6 +214,7 @@ async def test_async_jobs_workflow( ): async_job_get, job_id_data = await _start_task_via_rpc( rpc_client, + rpc_namespace=rpc_namespace, rpc_task_name=exposed_rpc_start, user_id=user_id, product_name=product_name, @@ -226,7 +224,7 @@ async def test_async_jobs_workflow( jobs = await async_jobs.list_jobs( rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + rpc_namespace=rpc_namespace, filter_="", # currently not used job_id_data=job_id_data, ) @@ -234,13 +232,14 @@ async def test_async_jobs_workflow( await _wait_for_job( rpc_client, + rpc_namespace=rpc_namespace, async_job_get=async_job_get, job_id_data=job_id_data, ) async_job_result = await async_jobs.result( rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + rpc_namespace=rpc_namespace, job_id=async_job_get.job_id, job_id_data=job_id_data, ) @@ -256,6 +255,7 @@ async def test_async_jobs_workflow( async def test_async_jobs_cancel( # initialized_app: FastAPI, register_rpc_routes: None, + rpc_namespace: RPCNamespace, storage_rabbitmq_rpc_client: RabbitMQRPCClient, with_storage_celery_worker: CeleryTaskWorker, user_id: UserID, @@ -264,6 +264,7 @@ async def test_async_jobs_cancel( ): async_job_get, job_id_data = await _start_task_via_rpc( storage_rabbitmq_rpc_client, + rpc_namespace=rpc_namespace, rpc_task_name=exposed_rpc_start, user_id=user_id, product_name=product_name, @@ -273,20 +274,21 @@ async def test_async_jobs_cancel( await async_jobs.cancel( storage_rabbitmq_rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + rpc_namespace=rpc_namespace, job_id=async_job_get.job_id, job_id_data=job_id_data, ) await _wait_for_job( storage_rabbitmq_rpc_client, + rpc_namespace=rpc_namespace, async_job_get=async_job_get, job_id_data=job_id_data, ) jobs = await async_jobs.list_jobs( storage_rabbitmq_rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + rpc_namespace=rpc_namespace, filter_="", # currently not used job_id_data=job_id_data, ) @@ -295,7 +297,7 @@ async def test_async_jobs_cancel( with pytest.raises(JobAbortedError): await async_jobs.result( storage_rabbitmq_rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + rpc_namespace=rpc_namespace, job_id=async_job_get.job_id, job_id_data=job_id_data, ) @@ -321,6 +323,7 @@ async def test_async_jobs_cancel( async def test_async_jobs_raises( # initialized_app: FastAPI, register_rpc_routes: None, + rpc_namespace: RPCNamespace, storage_rabbitmq_rpc_client: RabbitMQRPCClient, with_storage_celery_worker: CeleryTaskWorker, user_id: UserID, @@ -330,6 +333,7 @@ async def test_async_jobs_raises( ): async_job_get, job_id_data = await _start_task_via_rpc( storage_rabbitmq_rpc_client, + rpc_namespace=rpc_namespace, rpc_task_name=exposed_rpc_start, user_id=user_id, product_name=product_name, @@ -339,6 +343,7 @@ async def test_async_jobs_raises( await _wait_for_job( storage_rabbitmq_rpc_client, + rpc_namespace=rpc_namespace, async_job_get=async_job_get, job_id_data=job_id_data, stop_after=timedelta(minutes=1), @@ -347,7 +352,7 @@ async def test_async_jobs_raises( with pytest.raises(JobError) as exc: await async_jobs.result( storage_rabbitmq_rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + rpc_namespace=rpc_namespace, job_id=async_job_get.job_id, job_id_data=job_id_data, ) From dcef2f110b3cb8d86c5a4750b350508c62017a4a Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 28 May 2025 08:20:46 +0200 Subject: [PATCH 40/91] minor changes --- .../celery-library/tests/unit/conftest.py | 2 +- .../tests/unit/test_async_jobs.py | 20 +++++++++++++------ .../tests/unit/test_modules_celery.py | 2 +- 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/packages/celery-library/tests/unit/conftest.py b/packages/celery-library/tests/unit/conftest.py index ba6b5bdc5e8..7b4464e658d 100644 --- a/packages/celery-library/tests/unit/conftest.py +++ b/packages/celery-library/tests/unit/conftest.py @@ -127,7 +127,7 @@ def _on_worker_init_wrapper(sender: WorkController, **_kwargs) -> None: @pytest.fixture -def with_storage_celery_worker( +def with_celery_worker( celery_worker_controller: TestWorkController, ) -> CeleryTaskWorker: assert isinstance(celery_worker_controller.app, Celery) diff --git a/packages/celery-library/tests/unit/test_async_jobs.py b/packages/celery-library/tests/unit/test_async_jobs.py index 99990ca4234..294a517068f 100644 --- a/packages/celery-library/tests/unit/test_async_jobs.py +++ b/packages/celery-library/tests/unit/test_async_jobs.py @@ -3,7 +3,7 @@ import asyncio import pickle -from collections.abc import Callable +from collections.abc import Awaitable, Callable from datetime import timedelta from enum import Enum from typing import Any @@ -108,12 +108,19 @@ async def async_job(task: Task, task_id: TaskID, action: Action, payload: Any) - @pytest.fixture -async def rpc_client( +async def register_routes( initialized_fast_api: FastAPI, rpc_namespace: RPCNamespace -) -> RabbitMQRPCClient: +) -> None: client = initialized_fast_api.state.rabbitmq_rpc_client assert isinstance(client, RabbitMQRPCClient) await client.register_router(router, rpc_namespace, initialized_fast_api) + + +@pytest.fixture +async def rpc_client( + rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], +) -> RabbitMQRPCClient: + client = await rabbitmq_rpc_client("celery_test_client") return client @@ -204,9 +211,10 @@ async def _wait_for_job( ], ) async def test_async_jobs_workflow( + register_routes, rpc_client: RabbitMQRPCClient, rpc_namespace: RPCNamespace, - with_storage_celery_worker: CeleryTaskWorker, + with_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, @@ -257,7 +265,7 @@ async def test_async_jobs_cancel( register_rpc_routes: None, rpc_namespace: RPCNamespace, storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskWorker, + with_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, @@ -325,7 +333,7 @@ async def test_async_jobs_raises( register_rpc_routes: None, rpc_namespace: RPCNamespace, storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskWorker, + with_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, diff --git a/packages/celery-library/tests/unit/test_modules_celery.py b/packages/celery-library/tests/unit/test_modules_celery.py index 5d57838bb9d..7f84d4f1851 100644 --- a/packages/celery-library/tests/unit/test_modules_celery.py +++ b/packages/celery-library/tests/unit/test_modules_celery.py @@ -43,7 +43,7 @@ @pytest.fixture def celery_client( initialized_app: FastAPI, - with_storage_celery_worker: CeleryTaskWorker, + with_celery_worker: CeleryTaskWorker, ) -> CeleryTaskClient: return get_celery_client(initialized_app) From fc9875260caec99a56f5e9494850a0d1edd11a09 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 4 Jun 2025 15:47:29 +0200 Subject: [PATCH 41/91] upgraded celery-library requirements --- .../celery-library/requirements/_base.txt | 26 +++++++++---------- .../celery-library/requirements/_test.txt | 15 ++++++----- .../celery-library/requirements/_tools.txt | 10 ++++--- 3 files changed, 27 insertions(+), 24 deletions(-) diff --git a/packages/celery-library/requirements/_base.txt b/packages/celery-library/requirements/_base.txt index d52f0e54eb9..30116912503 100644 --- a/packages/celery-library/requirements/_base.txt +++ b/packages/celery-library/requirements/_base.txt @@ -10,7 +10,7 @@ aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.12.2 +aiohttp==3.12.8 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -51,7 +51,7 @@ attrs==25.3.0 # referencing billiard==4.2.1 # via celery -celery==5.5.2 +celery==5.5.3 # via -r requirements/_base.in certifi==2025.4.26 # via @@ -99,7 +99,7 @@ fast-depends==2.4.12 # via faststream faststream==0.5.42 # via -r requirements/../../../packages/service-library/requirements/_base.in -frozenlist==1.6.0 +frozenlist==1.6.2 # via # aiohttp # aiosignal @@ -107,7 +107,7 @@ googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -grpcio==1.71.0 +grpcio==1.72.1 # via opentelemetry-exporter-otlp-proto-grpc idna==3.10 # via @@ -123,7 +123,7 @@ jsonschema==4.24.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2025.4.1 # via jsonschema -kombu==5.5.3 +kombu==5.5.4 # via celery markdown-it-py==3.0.0 # via rich @@ -210,7 +210,9 @@ orjson==3.10.18 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in packaging==25.0 - # via opentelemetry-instrumentation + # via + # kombu + # opentelemetry-instrumentation pamqp==3.3.0 # via aiormq prompt-toolkit==3.0.51 @@ -219,7 +221,7 @@ propcache==0.3.1 # via # aiohttp # yarl -protobuf==5.29.4 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -257,7 +259,7 @@ pydantic==2.11.5 # pydantic-settings pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.4 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -289,8 +291,6 @@ pygments==2.19.1 # via rich pyinstrument==5.0.2 # via -r requirements/../../../packages/service-library/requirements/_base.in -pyjwt==2.9.0 - # via redis python-dateutil==2.9.0.post0 # via # arrow @@ -312,7 +312,7 @@ pyyaml==6.0.2 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in -redis==5.3.0 +redis==5.2.1 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -327,7 +327,7 @@ redis==5.3.0 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in - # celery + # kombu referencing==0.35.1 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -377,7 +377,7 @@ typer==0.16.0 # -r requirements/../../../packages/settings-library/requirements/_base.in types-python-dateutil==2.9.0.20250516 # via arrow -typing-extensions==4.13.2 +typing-extensions==4.14.0 # via # aiodebug # anyio diff --git a/packages/celery-library/requirements/_test.txt b/packages/celery-library/requirements/_test.txt index 843d2fc02a8..6b8a225c9b3 100644 --- a/packages/celery-library/requirements/_test.txt +++ b/packages/celery-library/requirements/_test.txt @@ -15,7 +15,7 @@ billiard==4.2.1 # via # -c requirements/_base.txt # celery -celery==5.5.2 +celery==5.5.3 # via # -c requirements/_base.txt # pytest-celery @@ -88,7 +88,7 @@ idna==3.10 # requests iniconfig==2.1.0 # via pytest -kombu==5.5.3 +kombu==5.5.4 # via # -c requirements/_base.txt # celery @@ -96,6 +96,7 @@ kombu==5.5.3 packaging==25.0 # via # -c requirements/_base.txt + # kombu # pytest # pytest-sugar pint==0.24.4 @@ -125,11 +126,11 @@ pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic -pyjwt==2.9.0 +pygments==2.19.1 # via # -c requirements/_base.txt - # redis -pytest==8.3.5 + # pytest +pytest==8.4.0 # via # -r requirements/_test.in # pytest-asyncio @@ -173,7 +174,7 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # -r requirements/_test.in -redis==5.3.0 +redis==5.2.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -204,7 +205,7 @@ tenacity==9.1.2 # pytest-celery termcolor==3.1.0 # via pytest-sugar -typing-extensions==4.13.2 +typing-extensions==4.14.0 # via # -c requirements/_base.txt # anyio diff --git a/packages/celery-library/requirements/_tools.txt b/packages/celery-library/requirements/_tools.txt index 33274e8afd6..a00dfe6cb8c 100644 --- a/packages/celery-library/requirements/_tools.txt +++ b/packages/celery-library/requirements/_tools.txt @@ -28,7 +28,7 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.1.0 # via @@ -43,7 +43,9 @@ packaging==25.0 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.1.1 # via pip-tools pip-tools==7.4.1 @@ -68,7 +70,7 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.11.11 +ruff==0.11.12 # via -r requirements/../../../requirements/devenv.txt setuptools==80.9.0 # via @@ -76,7 +78,7 @@ setuptools==80.9.0 # pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.13.2 +typing-extensions==4.14.0 # via # -c requirements/_base.txt # -c requirements/_test.txt From e212495527ebf4e197a68ff60b4822e390007d0a Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 4 Jun 2025 16:54:48 +0200 Subject: [PATCH 42/91] minor fix --- .../tests/unit/test_async_jobs.py | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/packages/celery-library/tests/unit/test_async_jobs.py b/packages/celery-library/tests/unit/test_async_jobs.py index 294a517068f..a84b7f9f2f8 100644 --- a/packages/celery-library/tests/unit/test_async_jobs.py +++ b/packages/celery-library/tests/unit/test_async_jobs.py @@ -166,7 +166,7 @@ def _(celery_app: Celery) -> None: async def _wait_for_job( - storage_rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, *, rpc_namespace: RPCNamespace, async_job_get: AsyncJobGet, @@ -182,7 +182,7 @@ async def _wait_for_job( ): with attempt: result = await async_jobs.status( - storage_rabbitmq_rpc_client, + rpc_client, rpc_namespace=rpc_namespace, job_id=async_job_get.job_id, job_id_data=job_id_data, @@ -262,16 +262,16 @@ async def test_async_jobs_workflow( ) async def test_async_jobs_cancel( # initialized_app: FastAPI, - register_rpc_routes: None, + register_routes: None, rpc_namespace: RPCNamespace, - storage_rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, with_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, ): async_job_get, job_id_data = await _start_task_via_rpc( - storage_rabbitmq_rpc_client, + rpc_client, rpc_namespace=rpc_namespace, rpc_task_name=exposed_rpc_start, user_id=user_id, @@ -281,21 +281,21 @@ async def test_async_jobs_cancel( ) await async_jobs.cancel( - storage_rabbitmq_rpc_client, + rpc_client, rpc_namespace=rpc_namespace, job_id=async_job_get.job_id, job_id_data=job_id_data, ) await _wait_for_job( - storage_rabbitmq_rpc_client, + rpc_client, rpc_namespace=rpc_namespace, async_job_get=async_job_get, job_id_data=job_id_data, ) jobs = await async_jobs.list_jobs( - storage_rabbitmq_rpc_client, + rpc_client, rpc_namespace=rpc_namespace, filter_="", # currently not used job_id_data=job_id_data, @@ -304,7 +304,7 @@ async def test_async_jobs_cancel( with pytest.raises(JobAbortedError): await async_jobs.result( - storage_rabbitmq_rpc_client, + rpc_client, rpc_namespace=rpc_namespace, job_id=async_job_get.job_id, job_id_data=job_id_data, @@ -330,9 +330,9 @@ async def test_async_jobs_cancel( ) async def test_async_jobs_raises( # initialized_app: FastAPI, - register_rpc_routes: None, + register_routes: None, rpc_namespace: RPCNamespace, - storage_rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, with_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, @@ -340,7 +340,7 @@ async def test_async_jobs_raises( error: Exception, ): async_job_get, job_id_data = await _start_task_via_rpc( - storage_rabbitmq_rpc_client, + rpc_client, rpc_namespace=rpc_namespace, rpc_task_name=exposed_rpc_start, user_id=user_id, @@ -350,7 +350,7 @@ async def test_async_jobs_raises( ) await _wait_for_job( - storage_rabbitmq_rpc_client, + rpc_client, rpc_namespace=rpc_namespace, async_job_get=async_job_get, job_id_data=job_id_data, @@ -359,7 +359,7 @@ async def test_async_jobs_raises( with pytest.raises(JobError) as exc: await async_jobs.result( - storage_rabbitmq_rpc_client, + rpc_client, rpc_namespace=rpc_namespace, job_id=async_job_get.job_id, job_id_data=job_id_data, From f27e9add4eccf78738bbea612c808c03da1d0b95 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 5 Jun 2025 10:10:34 +0200 Subject: [PATCH 43/91] =?UTF-8?q?Revert=20"=F0=9F=90=9B=20fix=20wrong=20pr?= =?UTF-8?q?oject=20name=20in=20billing=20center=20usage=20view=20(#7798)"?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit 7d720d197e3e6519ac303f32e9bf9bb7107ad4fa. --- .../celery-library/requirements/_base.txt | 25 ++- packages/celery-library/requirements/_test.in | 1 - .../celery-library/requirements/_test.txt | 13 +- .../celery-library/requirements/_tools.txt | 4 +- .../celery-library/tests/unit/conftest.py | 144 ------------------ .../storage}/tests/unit/test_async_jobs.py | 60 +++----- .../tests/unit/test_modules_celery.py | 0 7 files changed, 42 insertions(+), 205 deletions(-) delete mode 100644 packages/celery-library/tests/unit/conftest.py rename {packages/celery-library => services/storage}/tests/unit/test_async_jobs.py (87%) rename {packages/celery-library => services/storage}/tests/unit/test_modules_celery.py (100%) diff --git a/packages/celery-library/requirements/_base.txt b/packages/celery-library/requirements/_base.txt index 30116912503..c3842b5eb3b 100644 --- a/packages/celery-library/requirements/_base.txt +++ b/packages/celery-library/requirements/_base.txt @@ -70,7 +70,7 @@ certifi==2025.4.26 # requests charset-normalizer==3.4.2 # via requests -click==8.2.1 +click==8.1.8 # via # celery # click-didyoumean @@ -97,7 +97,7 @@ exceptiongroup==1.3.0 # via aio-pika fast-depends==2.4.12 # via faststream -faststream==0.5.42 +faststream==0.5.41 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.6.2 # via @@ -117,7 +117,7 @@ idna==3.10 # yarl importlib-metadata==8.6.1 # via opentelemetry-api -jsonschema==4.24.0 +jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in @@ -129,7 +129,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.4.4 +multidict==6.4.3 # via # aiohttp # yarl @@ -139,7 +139,6 @@ opentelemetry-api==1.33.1 # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-instrumentation - # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests @@ -157,12 +156,9 @@ opentelemetry-exporter-otlp-proto-http==1.33.1 # via opentelemetry-exporter-otlp opentelemetry-instrumentation==0.54b1 # via - # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.54b1 - # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-logging==0.54b1 # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-redis==0.54b1 @@ -229,7 +225,7 @@ psutil==7.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in pycryptodome==3.23.0 # via stream-zip -pydantic==2.11.5 +pydantic==2.11.4 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -289,7 +285,7 @@ pydantic-settings==2.7.0 # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.19.1 # via rich -pyinstrument==5.0.2 +pyinstrument==5.0.1 # via -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via @@ -351,7 +347,7 @@ rich==14.0.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.25.1 +rpds-py==0.25.0 # via # jsonschema # referencing @@ -371,7 +367,7 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.16.0 +typer==0.15.4 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -389,7 +385,7 @@ typing-extensions==4.14.0 # pydantic-extra-types # typer # typing-inspection -typing-inspection==0.4.1 +typing-inspection==0.4.0 # via pydantic tzdata==2025.2 # via kombu @@ -419,7 +415,6 @@ wrapt==1.17.2 # via # deprecated # opentelemetry-instrumentation - # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-redis yarl==1.20.0 # via @@ -427,5 +422,5 @@ yarl==1.20.0 # aio-pika # aiohttp # aiormq -zipp==3.22.0 +zipp==3.21.0 # via importlib-metadata diff --git a/packages/celery-library/requirements/_test.in b/packages/celery-library/requirements/_test.in index 31bb61caf01..debdbb724fe 100644 --- a/packages/celery-library/requirements/_test.in +++ b/packages/celery-library/requirements/_test.in @@ -13,7 +13,6 @@ coverage faker fakeredis fastapi -docker httpx pint pytest diff --git a/packages/celery-library/requirements/_test.txt b/packages/celery-library/requirements/_test.txt index 6b8a225c9b3..73a58a1673e 100644 --- a/packages/celery-library/requirements/_test.txt +++ b/packages/celery-library/requirements/_test.txt @@ -85,7 +85,6 @@ idna==3.10 # -c requirements/_base.txt # anyio # httpx - # requests iniconfig==2.1.0 # via pytest kombu==5.5.4 @@ -117,7 +116,7 @@ psutil==7.0.0 # pytest-celery py-cpuinfo==9.0.0 # via pytest-benchmark -pydantic==2.11.5 +pydantic==2.11.4 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -141,8 +140,10 @@ pytest==8.4.0 # pytest-instafail # pytest-mock # pytest-sugar -pytest-asyncio==1.0.0 - # via -r requirements/_test.in +pytest-asyncio==0.23.8 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_test.in pytest-benchmark==5.1.0 # via -r requirements/_test.in pytest-celery==1.2.0 @@ -155,7 +156,7 @@ pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.1 +pytest-mock==3.14.0 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -216,7 +217,7 @@ typing-extensions==4.14.0 # pydantic # pydantic-core # typing-inspection -typing-inspection==0.4.1 +typing-inspection==0.4.0 # via # -c requirements/_base.txt # pydantic diff --git a/packages/celery-library/requirements/_tools.txt b/packages/celery-library/requirements/_tools.txt index a00dfe6cb8c..fc3b332f4f6 100644 --- a/packages/celery-library/requirements/_tools.txt +++ b/packages/celery-library/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.2.1 +click==8.1.8 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -20,7 +20,7 @@ distlib==0.3.9 # via virtualenv filelock==3.18.0 # via virtualenv -identify==2.6.12 +identify==2.6.10 # via pre-commit isort==6.0.1 # via diff --git a/packages/celery-library/tests/unit/conftest.py b/packages/celery-library/tests/unit/conftest.py deleted file mode 100644 index 7b4464e658d..00000000000 --- a/packages/celery-library/tests/unit/conftest.py +++ /dev/null @@ -1,144 +0,0 @@ -from collections.abc import AsyncIterator, Awaitable, Callable -from functools import partial -from typing import Final - -import pytest -from asgi_lifespan import LifespanManager -from celery import Celery -from celery.contrib.testing.worker import TestWorkController, start_worker -from celery.signals import worker_init, worker_shutdown -from celery.worker.worker import WorkController -from celery_library import setup_celery_client -from celery_library.routes.rpc import router as async_jobs_router -from celery_library.signals import on_worker_init, on_worker_shutdown -from celery_library.utils import get_celery_worker -from celery_library.worker import CeleryTaskWorker -from faker import Faker -from fastapi import FastAPI -from models_library.products import ProductName -from models_library.rabbitmq_basic_types import RPCNamespace -from models_library.users import UserID -from pydantic import TypeAdapter -from servicelib.rabbitmq import RabbitMQRPCClient -from settings_library.celery import CelerySettings -from settings_library.rabbit import RabbitSettings -from settings_library.redis import RedisSettings - -pytest_simcore_core_services_selection = [ - "rabbit", - "redis", - "postgres", -] - -pytest_plugins = [ - "pytest_simcore.docker_compose", - "pytest_simcore.docker_swarm", - "pytest_simcore.rabbit_service", - "pytest_simcore.redis_service", - "pytest_simcore.repository_paths", -] - -_LIFESPAN_TIMEOUT: Final[int] = 10 - - -@pytest.fixture -def rpc_namespace() -> RPCNamespace: - return TypeAdapter(RPCNamespace).validate_python("test") - - -@pytest.fixture -def celery_settings( - rabbit_service: RabbitSettings, - redis_service: RedisSettings, -) -> CelerySettings: - return CelerySettings.create_from_envs() - - -@pytest.fixture -async def initialized_fast_api( - rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], - celery_settings: CelerySettings, - rpc_namespace: RPCNamespace, -) -> AsyncIterator[FastAPI]: - app = FastAPI( - title="master_fastapi_app", - description="Service that manages osparc storage backend", - version="0.0.0", - ) - - setup_celery_client(app, celery_settings=celery_settings) - rpc_client = await rabbitmq_rpc_client("celery_test_client") - app.state.rabbitmq_rpc_client = rpc_client - - async def startup() -> None: - rpc_server = app.state.rabbitmq_rpc_client - assert isinstance(rpc_server, RabbitMQRPCClient) - await rpc_server.register_router(async_jobs_router, rpc_namespace, app) - - app.add_event_handler("startup", startup) - - async with LifespanManager( - app, startup_timeout=_LIFESPAN_TIMEOUT, shutdown_timeout=_LIFESPAN_TIMEOUT - ): - yield app - - -@pytest.fixture -def register_celery_tasks() -> Callable[[Celery], None]: - """override if tasks are needed""" - - def _(celery_app: Celery) -> None: ... - - return _ - - -@pytest.fixture -async def celery_worker_controller( - celery_settings: CelerySettings, - celery_app: Celery, - register_celery_tasks: Callable[[Celery], None], -) -> AsyncIterator[TestWorkController]: - - def _create_app() -> FastAPI: - - return FastAPI( - title="worker_fastapi_app", - description="Test application for celery_library", - version="0.0.0", - ) - - def _on_worker_init_wrapper(sender: WorkController, **_kwargs) -> None: - return partial(on_worker_init, _create_app, celery_settings)(sender, **_kwargs) - - worker_init.connect(_on_worker_init_wrapper) - worker_shutdown.connect(on_worker_shutdown) - - register_celery_tasks(celery_app) - - with start_worker( - celery_app, - pool="threads", - concurrency=1, - loglevel="info", - perform_ping_check=False, - queues="default,cpu_bound", - ) as worker: - yield worker - - -@pytest.fixture -def with_celery_worker( - celery_worker_controller: TestWorkController, -) -> CeleryTaskWorker: - assert isinstance(celery_worker_controller.app, Celery) - return get_celery_worker(celery_worker_controller.app) - - -@pytest.fixture -def user_id(faker: Faker) -> UserID: - return TypeAdapter(UserID).validate_python(faker.pyint(min_value=1, max_value=1000)) - - -@pytest.fixture -def product_name() -> ProductName: - return TypeAdapter(ProductName).validate_python("pytest-product") diff --git a/packages/celery-library/tests/unit/test_async_jobs.py b/services/storage/tests/unit/test_async_jobs.py similarity index 87% rename from packages/celery-library/tests/unit/test_async_jobs.py rename to services/storage/tests/unit/test_async_jobs.py index a84b7f9f2f8..9a96df5b81d 100644 --- a/packages/celery-library/tests/unit/test_async_jobs.py +++ b/services/storage/tests/unit/test_async_jobs.py @@ -3,7 +3,7 @@ import asyncio import pickle -from collections.abc import Awaitable, Callable +from collections.abc import Callable from datetime import timedelta from enum import Enum from typing import Any @@ -30,6 +30,7 @@ from models_library.users import UserID from servicelib.rabbitmq import RabbitMQRPCClient, RPCRouter from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs +from simcore_service_storage.api.rpc.routes import get_rabbitmq_rpc_server from tenacity import ( AsyncRetrying, retry_if_exception_type, @@ -108,20 +109,9 @@ async def async_job(task: Task, task_id: TaskID, action: Action, payload: Any) - @pytest.fixture -async def register_routes( - initialized_fast_api: FastAPI, rpc_namespace: RPCNamespace -) -> None: - client = initialized_fast_api.state.rabbitmq_rpc_client - assert isinstance(client, RabbitMQRPCClient) - await client.register_router(router, rpc_namespace, initialized_fast_api) - - -@pytest.fixture -async def rpc_client( - rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], -) -> RabbitMQRPCClient: - client = await rabbitmq_rpc_client("celery_test_client") - return client +async def register_rpc_routes(initialized_app: FastAPI) -> None: + rpc_server = get_rabbitmq_rpc_server(initialized_app) + await rpc_server.register_router(router, STORAGE_RPC_NAMESPACE, initialized_app) async def _start_task_via_rpc( @@ -211,18 +201,17 @@ async def _wait_for_job( ], ) async def test_async_jobs_workflow( - register_routes, - rpc_client: RabbitMQRPCClient, - rpc_namespace: RPCNamespace, - with_celery_worker: CeleryTaskWorker, + initialized_app: FastAPI, + register_rpc_routes: None, + storage_rabbitmq_rpc_client: RabbitMQRPCClient, + with_storage_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, payload: Any, ): async_job_get, job_id_data = await _start_task_via_rpc( - rpc_client, - rpc_namespace=rpc_namespace, + storage_rabbitmq_rpc_client, rpc_task_name=exposed_rpc_start, user_id=user_id, product_name=product_name, @@ -231,23 +220,22 @@ async def test_async_jobs_workflow( ) jobs = await async_jobs.list_jobs( - rpc_client, - rpc_namespace=rpc_namespace, + storage_rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, filter_="", # currently not used job_id_data=job_id_data, ) assert len(jobs) > 0 await _wait_for_job( - rpc_client, - rpc_namespace=rpc_namespace, + storage_rabbitmq_rpc_client, async_job_get=async_job_get, job_id_data=job_id_data, ) async_job_result = await async_jobs.result( - rpc_client, - rpc_namespace=rpc_namespace, + storage_rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, job_id=async_job_get.job_id, job_id_data=job_id_data, ) @@ -261,11 +249,10 @@ async def test_async_jobs_workflow( ], ) async def test_async_jobs_cancel( - # initialized_app: FastAPI, - register_routes: None, - rpc_namespace: RPCNamespace, - rpc_client: RabbitMQRPCClient, - with_celery_worker: CeleryTaskWorker, + initialized_app: FastAPI, + register_rpc_routes: None, + storage_rabbitmq_rpc_client: RabbitMQRPCClient, + with_storage_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, @@ -329,11 +316,10 @@ async def test_async_jobs_cancel( ], ) async def test_async_jobs_raises( - # initialized_app: FastAPI, - register_routes: None, - rpc_namespace: RPCNamespace, - rpc_client: RabbitMQRPCClient, - with_celery_worker: CeleryTaskWorker, + initialized_app: FastAPI, + register_rpc_routes: None, + storage_rabbitmq_rpc_client: RabbitMQRPCClient, + with_storage_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, diff --git a/packages/celery-library/tests/unit/test_modules_celery.py b/services/storage/tests/unit/test_modules_celery.py similarity index 100% rename from packages/celery-library/tests/unit/test_modules_celery.py rename to services/storage/tests/unit/test_modules_celery.py From 384a2a1fdda8805ea2de1d41715ae7fd4b8e332a Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 5 Jun 2025 10:12:31 +0200 Subject: [PATCH 44/91] Revert "create fixture for creating rabbitmq rpc server" This reverts commit c8d9b01cc095dd10f3f1383c2116a3da07655610. --- services/storage/tests/unit/test_async_jobs.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/services/storage/tests/unit/test_async_jobs.py b/services/storage/tests/unit/test_async_jobs.py index 9a96df5b81d..28365d26f62 100644 --- a/services/storage/tests/unit/test_async_jobs.py +++ b/services/storage/tests/unit/test_async_jobs.py @@ -30,7 +30,6 @@ from models_library.users import UserID from servicelib.rabbitmq import RabbitMQRPCClient, RPCRouter from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs -from simcore_service_storage.api.rpc.routes import get_rabbitmq_rpc_server from tenacity import ( AsyncRetrying, retry_if_exception_type, @@ -109,9 +108,12 @@ async def async_job(task: Task, task_id: TaskID, action: Action, payload: Any) - @pytest.fixture -async def register_rpc_routes(initialized_app: FastAPI) -> None: - rpc_server = get_rabbitmq_rpc_server(initialized_app) - await rpc_server.register_router(router, STORAGE_RPC_NAMESPACE, initialized_app) +async def rpc_client( + rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], +) -> RabbitMQRPCClient: + client = await rpc_client("celery_test_client") + await client.register_router(router, STORAGE_RPC_NAMESPACE) + return client async def _start_task_via_rpc( From 6021ddb676b49c80e52aefd74393a329a215b139 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 5 Jun 2025 10:14:46 +0200 Subject: [PATCH 45/91] =?UTF-8?q?Revert=20"Revert=20"=F0=9F=90=9B=20fix=20?= =?UTF-8?q?wrong=20project=20name=20in=20billing=20center=20usage=20view?= =?UTF-8?q?=20(#7798)""?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit f27e9add4eccf78738bbea612c808c03da1d0b95. --- .../celery-library/requirements/_base.txt | 25 +-- packages/celery-library/requirements/_test.in | 1 + .../celery-library/requirements/_test.txt | 13 +- .../celery-library/requirements/_tools.txt | 4 +- .../celery-library/tests/unit/conftest.py | 144 ++++++++++++++++++ .../tests/unit/test_async_jobs.py | 56 ++++--- .../tests/unit/test_modules_celery.py | 0 7 files changed, 202 insertions(+), 41 deletions(-) create mode 100644 packages/celery-library/tests/unit/conftest.py rename {services/storage => packages/celery-library}/tests/unit/test_async_jobs.py (88%) rename {services/storage => packages/celery-library}/tests/unit/test_modules_celery.py (100%) diff --git a/packages/celery-library/requirements/_base.txt b/packages/celery-library/requirements/_base.txt index c3842b5eb3b..30116912503 100644 --- a/packages/celery-library/requirements/_base.txt +++ b/packages/celery-library/requirements/_base.txt @@ -70,7 +70,7 @@ certifi==2025.4.26 # requests charset-normalizer==3.4.2 # via requests -click==8.1.8 +click==8.2.1 # via # celery # click-didyoumean @@ -97,7 +97,7 @@ exceptiongroup==1.3.0 # via aio-pika fast-depends==2.4.12 # via faststream -faststream==0.5.41 +faststream==0.5.42 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.6.2 # via @@ -117,7 +117,7 @@ idna==3.10 # yarl importlib-metadata==8.6.1 # via opentelemetry-api -jsonschema==4.23.0 +jsonschema==4.24.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in @@ -129,7 +129,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.4.3 +multidict==6.4.4 # via # aiohttp # yarl @@ -139,6 +139,7 @@ opentelemetry-api==1.33.1 # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-instrumentation + # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests @@ -156,9 +157,12 @@ opentelemetry-exporter-otlp-proto-http==1.33.1 # via opentelemetry-exporter-otlp opentelemetry-instrumentation==0.54b1 # via + # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests +opentelemetry-instrumentation-aio-pika==0.54b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-logging==0.54b1 # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-redis==0.54b1 @@ -225,7 +229,7 @@ psutil==7.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in pycryptodome==3.23.0 # via stream-zip -pydantic==2.11.4 +pydantic==2.11.5 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -285,7 +289,7 @@ pydantic-settings==2.7.0 # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.19.1 # via rich -pyinstrument==5.0.1 +pyinstrument==5.0.2 # via -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via @@ -347,7 +351,7 @@ rich==14.0.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.25.0 +rpds-py==0.25.1 # via # jsonschema # referencing @@ -367,7 +371,7 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.4 +typer==0.16.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -385,7 +389,7 @@ typing-extensions==4.14.0 # pydantic-extra-types # typer # typing-inspection -typing-inspection==0.4.0 +typing-inspection==0.4.1 # via pydantic tzdata==2025.2 # via kombu @@ -415,6 +419,7 @@ wrapt==1.17.2 # via # deprecated # opentelemetry-instrumentation + # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-redis yarl==1.20.0 # via @@ -422,5 +427,5 @@ yarl==1.20.0 # aio-pika # aiohttp # aiormq -zipp==3.21.0 +zipp==3.22.0 # via importlib-metadata diff --git a/packages/celery-library/requirements/_test.in b/packages/celery-library/requirements/_test.in index debdbb724fe..31bb61caf01 100644 --- a/packages/celery-library/requirements/_test.in +++ b/packages/celery-library/requirements/_test.in @@ -13,6 +13,7 @@ coverage faker fakeredis fastapi +docker httpx pint pytest diff --git a/packages/celery-library/requirements/_test.txt b/packages/celery-library/requirements/_test.txt index 73a58a1673e..6b8a225c9b3 100644 --- a/packages/celery-library/requirements/_test.txt +++ b/packages/celery-library/requirements/_test.txt @@ -85,6 +85,7 @@ idna==3.10 # -c requirements/_base.txt # anyio # httpx + # requests iniconfig==2.1.0 # via pytest kombu==5.5.4 @@ -116,7 +117,7 @@ psutil==7.0.0 # pytest-celery py-cpuinfo==9.0.0 # via pytest-benchmark -pydantic==2.11.4 +pydantic==2.11.5 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -140,10 +141,8 @@ pytest==8.4.0 # pytest-instafail # pytest-mock # pytest-sugar -pytest-asyncio==0.23.8 - # via - # -c requirements/../../../requirements/constraints.txt - # -r requirements/_test.in +pytest-asyncio==1.0.0 + # via -r requirements/_test.in pytest-benchmark==5.1.0 # via -r requirements/_test.in pytest-celery==1.2.0 @@ -156,7 +155,7 @@ pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -217,7 +216,7 @@ typing-extensions==4.14.0 # pydantic # pydantic-core # typing-inspection -typing-inspection==0.4.0 +typing-inspection==0.4.1 # via # -c requirements/_base.txt # pydantic diff --git a/packages/celery-library/requirements/_tools.txt b/packages/celery-library/requirements/_tools.txt index fc3b332f4f6..a00dfe6cb8c 100644 --- a/packages/celery-library/requirements/_tools.txt +++ b/packages/celery-library/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -20,7 +20,7 @@ distlib==0.3.9 # via virtualenv filelock==3.18.0 # via virtualenv -identify==2.6.10 +identify==2.6.12 # via pre-commit isort==6.0.1 # via diff --git a/packages/celery-library/tests/unit/conftest.py b/packages/celery-library/tests/unit/conftest.py new file mode 100644 index 00000000000..7b4464e658d --- /dev/null +++ b/packages/celery-library/tests/unit/conftest.py @@ -0,0 +1,144 @@ +from collections.abc import AsyncIterator, Awaitable, Callable +from functools import partial +from typing import Final + +import pytest +from asgi_lifespan import LifespanManager +from celery import Celery +from celery.contrib.testing.worker import TestWorkController, start_worker +from celery.signals import worker_init, worker_shutdown +from celery.worker.worker import WorkController +from celery_library import setup_celery_client +from celery_library.routes.rpc import router as async_jobs_router +from celery_library.signals import on_worker_init, on_worker_shutdown +from celery_library.utils import get_celery_worker +from celery_library.worker import CeleryTaskWorker +from faker import Faker +from fastapi import FastAPI +from models_library.products import ProductName +from models_library.rabbitmq_basic_types import RPCNamespace +from models_library.users import UserID +from pydantic import TypeAdapter +from servicelib.rabbitmq import RabbitMQRPCClient +from settings_library.celery import CelerySettings +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings + +pytest_simcore_core_services_selection = [ + "rabbit", + "redis", + "postgres", +] + +pytest_plugins = [ + "pytest_simcore.docker_compose", + "pytest_simcore.docker_swarm", + "pytest_simcore.rabbit_service", + "pytest_simcore.redis_service", + "pytest_simcore.repository_paths", +] + +_LIFESPAN_TIMEOUT: Final[int] = 10 + + +@pytest.fixture +def rpc_namespace() -> RPCNamespace: + return TypeAdapter(RPCNamespace).validate_python("test") + + +@pytest.fixture +def celery_settings( + rabbit_service: RabbitSettings, + redis_service: RedisSettings, +) -> CelerySettings: + return CelerySettings.create_from_envs() + + +@pytest.fixture +async def initialized_fast_api( + rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], + celery_settings: CelerySettings, + rpc_namespace: RPCNamespace, +) -> AsyncIterator[FastAPI]: + app = FastAPI( + title="master_fastapi_app", + description="Service that manages osparc storage backend", + version="0.0.0", + ) + + setup_celery_client(app, celery_settings=celery_settings) + rpc_client = await rabbitmq_rpc_client("celery_test_client") + app.state.rabbitmq_rpc_client = rpc_client + + async def startup() -> None: + rpc_server = app.state.rabbitmq_rpc_client + assert isinstance(rpc_server, RabbitMQRPCClient) + await rpc_server.register_router(async_jobs_router, rpc_namespace, app) + + app.add_event_handler("startup", startup) + + async with LifespanManager( + app, startup_timeout=_LIFESPAN_TIMEOUT, shutdown_timeout=_LIFESPAN_TIMEOUT + ): + yield app + + +@pytest.fixture +def register_celery_tasks() -> Callable[[Celery], None]: + """override if tasks are needed""" + + def _(celery_app: Celery) -> None: ... + + return _ + + +@pytest.fixture +async def celery_worker_controller( + celery_settings: CelerySettings, + celery_app: Celery, + register_celery_tasks: Callable[[Celery], None], +) -> AsyncIterator[TestWorkController]: + + def _create_app() -> FastAPI: + + return FastAPI( + title="worker_fastapi_app", + description="Test application for celery_library", + version="0.0.0", + ) + + def _on_worker_init_wrapper(sender: WorkController, **_kwargs) -> None: + return partial(on_worker_init, _create_app, celery_settings)(sender, **_kwargs) + + worker_init.connect(_on_worker_init_wrapper) + worker_shutdown.connect(on_worker_shutdown) + + register_celery_tasks(celery_app) + + with start_worker( + celery_app, + pool="threads", + concurrency=1, + loglevel="info", + perform_ping_check=False, + queues="default,cpu_bound", + ) as worker: + yield worker + + +@pytest.fixture +def with_celery_worker( + celery_worker_controller: TestWorkController, +) -> CeleryTaskWorker: + assert isinstance(celery_worker_controller.app, Celery) + return get_celery_worker(celery_worker_controller.app) + + +@pytest.fixture +def user_id(faker: Faker) -> UserID: + return TypeAdapter(UserID).validate_python(faker.pyint(min_value=1, max_value=1000)) + + +@pytest.fixture +def product_name() -> ProductName: + return TypeAdapter(ProductName).validate_python("pytest-product") diff --git a/services/storage/tests/unit/test_async_jobs.py b/packages/celery-library/tests/unit/test_async_jobs.py similarity index 88% rename from services/storage/tests/unit/test_async_jobs.py rename to packages/celery-library/tests/unit/test_async_jobs.py index 28365d26f62..a84b7f9f2f8 100644 --- a/services/storage/tests/unit/test_async_jobs.py +++ b/packages/celery-library/tests/unit/test_async_jobs.py @@ -3,7 +3,7 @@ import asyncio import pickle -from collections.abc import Callable +from collections.abc import Awaitable, Callable from datetime import timedelta from enum import Enum from typing import Any @@ -107,12 +107,20 @@ async def async_job(task: Task, task_id: TaskID, action: Action, payload: Any) - ################################# +@pytest.fixture +async def register_routes( + initialized_fast_api: FastAPI, rpc_namespace: RPCNamespace +) -> None: + client = initialized_fast_api.state.rabbitmq_rpc_client + assert isinstance(client, RabbitMQRPCClient) + await client.register_router(router, rpc_namespace, initialized_fast_api) + + @pytest.fixture async def rpc_client( - rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], + rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], ) -> RabbitMQRPCClient: - client = await rpc_client("celery_test_client") - await client.register_router(router, STORAGE_RPC_NAMESPACE) + client = await rabbitmq_rpc_client("celery_test_client") return client @@ -203,17 +211,18 @@ async def _wait_for_job( ], ) async def test_async_jobs_workflow( - initialized_app: FastAPI, - register_rpc_routes: None, - storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskWorker, + register_routes, + rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, + with_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, payload: Any, ): async_job_get, job_id_data = await _start_task_via_rpc( - storage_rabbitmq_rpc_client, + rpc_client, + rpc_namespace=rpc_namespace, rpc_task_name=exposed_rpc_start, user_id=user_id, product_name=product_name, @@ -222,22 +231,23 @@ async def test_async_jobs_workflow( ) jobs = await async_jobs.list_jobs( - storage_rabbitmq_rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + rpc_client, + rpc_namespace=rpc_namespace, filter_="", # currently not used job_id_data=job_id_data, ) assert len(jobs) > 0 await _wait_for_job( - storage_rabbitmq_rpc_client, + rpc_client, + rpc_namespace=rpc_namespace, async_job_get=async_job_get, job_id_data=job_id_data, ) async_job_result = await async_jobs.result( - storage_rabbitmq_rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + rpc_client, + rpc_namespace=rpc_namespace, job_id=async_job_get.job_id, job_id_data=job_id_data, ) @@ -251,10 +261,11 @@ async def test_async_jobs_workflow( ], ) async def test_async_jobs_cancel( - initialized_app: FastAPI, - register_rpc_routes: None, - storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskWorker, + # initialized_app: FastAPI, + register_routes: None, + rpc_namespace: RPCNamespace, + rpc_client: RabbitMQRPCClient, + with_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, @@ -318,10 +329,11 @@ async def test_async_jobs_cancel( ], ) async def test_async_jobs_raises( - initialized_app: FastAPI, - register_rpc_routes: None, - storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskWorker, + # initialized_app: FastAPI, + register_routes: None, + rpc_namespace: RPCNamespace, + rpc_client: RabbitMQRPCClient, + with_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, diff --git a/services/storage/tests/unit/test_modules_celery.py b/packages/celery-library/tests/unit/test_modules_celery.py similarity index 100% rename from services/storage/tests/unit/test_modules_celery.py rename to packages/celery-library/tests/unit/test_modules_celery.py From 97a1e8bf4d0cc6b6218b006c516ba6035849004e Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 5 Jun 2025 10:15:09 +0200 Subject: [PATCH 46/91] Revert "upgraded celery-library requirements" This reverts commit fc9875260caec99a56f5e9494850a0d1edd11a09. --- .../celery-library/requirements/_base.txt | 26 +++++++++---------- .../celery-library/requirements/_test.txt | 15 +++++------ .../celery-library/requirements/_tools.txt | 10 +++---- 3 files changed, 24 insertions(+), 27 deletions(-) diff --git a/packages/celery-library/requirements/_base.txt b/packages/celery-library/requirements/_base.txt index 30116912503..d52f0e54eb9 100644 --- a/packages/celery-library/requirements/_base.txt +++ b/packages/celery-library/requirements/_base.txt @@ -10,7 +10,7 @@ aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.12.8 +aiohttp==3.12.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -51,7 +51,7 @@ attrs==25.3.0 # referencing billiard==4.2.1 # via celery -celery==5.5.3 +celery==5.5.2 # via -r requirements/_base.in certifi==2025.4.26 # via @@ -99,7 +99,7 @@ fast-depends==2.4.12 # via faststream faststream==0.5.42 # via -r requirements/../../../packages/service-library/requirements/_base.in -frozenlist==1.6.2 +frozenlist==1.6.0 # via # aiohttp # aiosignal @@ -107,7 +107,7 @@ googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -grpcio==1.72.1 +grpcio==1.71.0 # via opentelemetry-exporter-otlp-proto-grpc idna==3.10 # via @@ -123,7 +123,7 @@ jsonschema==4.24.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2025.4.1 # via jsonschema -kombu==5.5.4 +kombu==5.5.3 # via celery markdown-it-py==3.0.0 # via rich @@ -210,9 +210,7 @@ orjson==3.10.18 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in packaging==25.0 - # via - # kombu - # opentelemetry-instrumentation + # via opentelemetry-instrumentation pamqp==3.3.0 # via aiormq prompt-toolkit==3.0.51 @@ -221,7 +219,7 @@ propcache==0.3.1 # via # aiohttp # yarl -protobuf==5.29.5 +protobuf==5.29.4 # via # googleapis-common-protos # opentelemetry-proto @@ -259,7 +257,7 @@ pydantic==2.11.5 # pydantic-settings pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.5 +pydantic-extra-types==2.10.4 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -291,6 +289,8 @@ pygments==2.19.1 # via rich pyinstrument==5.0.2 # via -r requirements/../../../packages/service-library/requirements/_base.in +pyjwt==2.9.0 + # via redis python-dateutil==2.9.0.post0 # via # arrow @@ -312,7 +312,7 @@ pyyaml==6.0.2 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in -redis==5.2.1 +redis==5.3.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -327,7 +327,7 @@ redis==5.2.1 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in - # kombu + # celery referencing==0.35.1 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -377,7 +377,7 @@ typer==0.16.0 # -r requirements/../../../packages/settings-library/requirements/_base.in types-python-dateutil==2.9.0.20250516 # via arrow -typing-extensions==4.14.0 +typing-extensions==4.13.2 # via # aiodebug # anyio diff --git a/packages/celery-library/requirements/_test.txt b/packages/celery-library/requirements/_test.txt index 6b8a225c9b3..843d2fc02a8 100644 --- a/packages/celery-library/requirements/_test.txt +++ b/packages/celery-library/requirements/_test.txt @@ -15,7 +15,7 @@ billiard==4.2.1 # via # -c requirements/_base.txt # celery -celery==5.5.3 +celery==5.5.2 # via # -c requirements/_base.txt # pytest-celery @@ -88,7 +88,7 @@ idna==3.10 # requests iniconfig==2.1.0 # via pytest -kombu==5.5.4 +kombu==5.5.3 # via # -c requirements/_base.txt # celery @@ -96,7 +96,6 @@ kombu==5.5.4 packaging==25.0 # via # -c requirements/_base.txt - # kombu # pytest # pytest-sugar pint==0.24.4 @@ -126,11 +125,11 @@ pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic -pygments==2.19.1 +pyjwt==2.9.0 # via # -c requirements/_base.txt - # pytest -pytest==8.4.0 + # redis +pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio @@ -174,7 +173,7 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # -r requirements/_test.in -redis==5.2.1 +redis==5.3.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -205,7 +204,7 @@ tenacity==9.1.2 # pytest-celery termcolor==3.1.0 # via pytest-sugar -typing-extensions==4.14.0 +typing-extensions==4.13.2 # via # -c requirements/_base.txt # anyio diff --git a/packages/celery-library/requirements/_tools.txt b/packages/celery-library/requirements/_tools.txt index a00dfe6cb8c..33274e8afd6 100644 --- a/packages/celery-library/requirements/_tools.txt +++ b/packages/celery-library/requirements/_tools.txt @@ -28,7 +28,7 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.16.0 +mypy==1.15.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.1.0 # via @@ -43,9 +43,7 @@ packaging==25.0 # black # build pathspec==0.12.1 - # via - # black - # mypy + # via black pip==25.1.1 # via pip-tools pip-tools==7.4.1 @@ -70,7 +68,7 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.11.12 +ruff==0.11.11 # via -r requirements/../../../requirements/devenv.txt setuptools==80.9.0 # via @@ -78,7 +76,7 @@ setuptools==80.9.0 # pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.14.0 +typing-extensions==4.13.2 # via # -c requirements/_base.txt # -c requirements/_test.txt From b71cbcaa20a22ae319182a93bb5bbc524d2571bf Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 5 Jun 2025 10:15:59 +0200 Subject: [PATCH 47/91] Revert "minor changes" This reverts commit dcef2f110b3cb8d86c5a4750b350508c62017a4a. --- .../celery-library/tests/unit/conftest.py | 2 +- .../tests/unit/test_async_jobs.py | 24 +++++++------------ .../tests/unit/test_modules_celery.py | 2 +- 3 files changed, 10 insertions(+), 18 deletions(-) diff --git a/packages/celery-library/tests/unit/conftest.py b/packages/celery-library/tests/unit/conftest.py index 7b4464e658d..ba6b5bdc5e8 100644 --- a/packages/celery-library/tests/unit/conftest.py +++ b/packages/celery-library/tests/unit/conftest.py @@ -127,7 +127,7 @@ def _on_worker_init_wrapper(sender: WorkController, **_kwargs) -> None: @pytest.fixture -def with_celery_worker( +def with_storage_celery_worker( celery_worker_controller: TestWorkController, ) -> CeleryTaskWorker: assert isinstance(celery_worker_controller.app, Celery) diff --git a/packages/celery-library/tests/unit/test_async_jobs.py b/packages/celery-library/tests/unit/test_async_jobs.py index a84b7f9f2f8..dd0884c16be 100644 --- a/packages/celery-library/tests/unit/test_async_jobs.py +++ b/packages/celery-library/tests/unit/test_async_jobs.py @@ -3,7 +3,7 @@ import asyncio import pickle -from collections.abc import Awaitable, Callable +from collections.abc import Callable from datetime import timedelta from enum import Enum from typing import Any @@ -108,19 +108,12 @@ async def async_job(task: Task, task_id: TaskID, action: Action, payload: Any) - @pytest.fixture -async def register_routes( +async def rpc_client( initialized_fast_api: FastAPI, rpc_namespace: RPCNamespace -) -> None: +) -> RabbitMQRPCClient: client = initialized_fast_api.state.rabbitmq_rpc_client assert isinstance(client, RabbitMQRPCClient) await client.register_router(router, rpc_namespace, initialized_fast_api) - - -@pytest.fixture -async def rpc_client( - rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], -) -> RabbitMQRPCClient: - client = await rabbitmq_rpc_client("celery_test_client") return client @@ -211,10 +204,9 @@ async def _wait_for_job( ], ) async def test_async_jobs_workflow( - register_routes, rpc_client: RabbitMQRPCClient, rpc_namespace: RPCNamespace, - with_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, @@ -264,8 +256,8 @@ async def test_async_jobs_cancel( # initialized_app: FastAPI, register_routes: None, rpc_namespace: RPCNamespace, - rpc_client: RabbitMQRPCClient, - with_celery_worker: CeleryTaskWorker, + storage_rabbitmq_rpc_client: RabbitMQRPCClient, + with_storage_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, @@ -332,8 +324,8 @@ async def test_async_jobs_raises( # initialized_app: FastAPI, register_routes: None, rpc_namespace: RPCNamespace, - rpc_client: RabbitMQRPCClient, - with_celery_worker: CeleryTaskWorker, + storage_rabbitmq_rpc_client: RabbitMQRPCClient, + with_storage_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, diff --git a/packages/celery-library/tests/unit/test_modules_celery.py b/packages/celery-library/tests/unit/test_modules_celery.py index 7f84d4f1851..5d57838bb9d 100644 --- a/packages/celery-library/tests/unit/test_modules_celery.py +++ b/packages/celery-library/tests/unit/test_modules_celery.py @@ -43,7 +43,7 @@ @pytest.fixture def celery_client( initialized_app: FastAPI, - with_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: CeleryTaskWorker, ) -> CeleryTaskClient: return get_celery_client(initialized_app) From 1202074bd290f2daf50bf7069487984337e6087d Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 5 Jun 2025 10:16:57 +0200 Subject: [PATCH 48/91] Revert "make sure celery tasks actually run" This reverts commit 07ee77ad6b6690d92541da6e6b80e3aa576e9bf2. --- packages/celery-library/requirements/_test.in | 2 - .../celery-library/requirements/_test.txt | 94 +----------- .../celery-library/requirements/_tools.txt | 5 +- .../celery-library/tests/unit/conftest.py | 144 ------------------ .../tests/unit/test_async_jobs.py | 69 ++++----- .../api/rpc/_async_jobs.py | 0 6 files changed, 34 insertions(+), 280 deletions(-) rename packages/celery-library/src/celery_library/routes/rpc.py => services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py (100%) diff --git a/packages/celery-library/requirements/_test.in b/packages/celery-library/requirements/_test.in index 31bb61caf01..4b93bbbdd78 100644 --- a/packages/celery-library/requirements/_test.in +++ b/packages/celery-library/requirements/_test.in @@ -11,7 +11,6 @@ # testing coverage faker -fakeredis fastapi docker httpx @@ -19,7 +18,6 @@ pint pytest pytest-asyncio pytest-benchmark -pytest-celery pytest-cov pytest-icdiff pytest-instafail diff --git a/packages/celery-library/requirements/_test.txt b/packages/celery-library/requirements/_test.txt index 843d2fc02a8..bf475d637ea 100644 --- a/packages/celery-library/requirements/_test.txt +++ b/packages/celery-library/requirements/_test.txt @@ -1,7 +1,3 @@ -amqp==5.3.1 - # via - # -c requirements/_base.txt - # kombu annotated-types==0.7.0 # via # -c requirements/_base.txt @@ -11,14 +7,6 @@ anyio==4.9.0 # -c requirements/_base.txt # httpx # starlette -billiard==4.2.1 - # via - # -c requirements/_base.txt - # celery -celery==5.5.2 - # via - # -c requirements/_base.txt - # pytest-celery certifi==2025.4.26 # via # -c requirements/../../../requirements/constraints.txt @@ -30,39 +18,13 @@ charset-normalizer==3.4.2 # via # -c requirements/_base.txt # requests -click==8.2.1 - # via - # -c requirements/_base.txt - # celery - # click-didyoumean - # click-plugins - # click-repl -click-didyoumean==0.3.1 - # via - # -c requirements/_base.txt - # celery -click-plugins==1.1.1 - # via - # -c requirements/_base.txt - # celery -click-repl==0.3.0 - # via - # -c requirements/_base.txt - # celery coverage==7.8.2 # via # -r requirements/_test.in # pytest-cov -debugpy==1.8.14 - # via pytest-celery docker==7.1.0 - # via - # -r requirements/_test.in - # pytest-celery - # pytest-docker-tools -faker==37.3.0 # via -r requirements/_test.in -fakeredis==2.29.0 +faker==37.3.0 # via -r requirements/_test.in fastapi==0.115.12 # via -r requirements/_test.in @@ -88,11 +50,6 @@ idna==3.10 # requests iniconfig==2.1.0 # via pytest -kombu==5.5.3 - # via - # -c requirements/_base.txt - # celery - # pytest-celery packaging==25.0 # via # -c requirements/_base.txt @@ -106,14 +63,6 @@ pluggy==1.6.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff -prompt-toolkit==3.0.51 - # via - # -c requirements/_base.txt - # click-repl -psutil==7.0.0 - # via - # -c requirements/_base.txt - # pytest-celery py-cpuinfo==9.0.0 # via pytest-benchmark pydantic==2.11.5 @@ -125,17 +74,12 @@ pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic -pyjwt==2.9.0 - # via - # -c requirements/_base.txt - # redis pytest==8.3.5 # via # -r requirements/_test.in # pytest-asyncio # pytest-benchmark # pytest-cov - # pytest-docker-tools # pytest-icdiff # pytest-instafail # pytest-mock @@ -144,12 +88,8 @@ pytest-asyncio==1.0.0 # via -r requirements/_test.in pytest-benchmark==5.1.0 # via -r requirements/_test.in -pytest-celery==1.2.0 - # via -r requirements/_test.in pytest-cov==6.1.1 # via -r requirements/_test.in -pytest-docker-tools==3.1.9 - # via pytest-celery pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 @@ -160,10 +100,6 @@ pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -python-dateutil==2.9.0.post0 - # via - # -c requirements/_base.txt - # celery python-dotenv==1.1.0 # via # -c requirements/_base.txt @@ -173,35 +109,18 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # -r requirements/_test.in -redis==5.3.0 - # via - # -c requirements/../../../requirements/constraints.txt - # -c requirements/_base.txt - # fakeredis requests==2.32.3 # via # -c requirements/_base.txt # docker -setuptools==80.9.0 - # via pytest-celery -six==1.17.0 - # via - # -c requirements/_base.txt - # python-dateutil sniffio==1.3.1 # via # -c requirements/_base.txt # anyio -sortedcontainers==2.4.0 - # via fakeredis starlette==0.46.2 # via # -c requirements/../../../requirements/constraints.txt # fastapi -tenacity==9.1.2 - # via - # -c requirements/_base.txt - # pytest-celery termcolor==3.1.0 # via pytest-sugar typing-extensions==4.13.2 @@ -223,20 +142,9 @@ tzdata==2025.2 # via # -c requirements/_base.txt # faker - # kombu urllib3==2.4.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # docker # requests -vine==5.1.0 - # via - # -c requirements/_base.txt - # amqp - # celery - # kombu -wcwidth==0.2.13 - # via - # -c requirements/_base.txt - # prompt-toolkit diff --git a/packages/celery-library/requirements/_tools.txt b/packages/celery-library/requirements/_tools.txt index 33274e8afd6..d90c0074c58 100644 --- a/packages/celery-library/requirements/_tools.txt +++ b/packages/celery-library/requirements/_tools.txt @@ -11,7 +11,6 @@ cfgv==3.4.0 click==8.2.1 # via # -c requirements/_base.txt - # -c requirements/_test.txt # black # pip-tools dill==0.4.0 @@ -71,9 +70,7 @@ pyyaml==6.0.2 ruff==0.11.11 # via -r requirements/../../../requirements/devenv.txt setuptools==80.9.0 - # via - # -c requirements/_test.txt - # pip-tools + # via pip-tools tomlkit==0.13.2 # via pylint typing-extensions==4.13.2 diff --git a/packages/celery-library/tests/unit/conftest.py b/packages/celery-library/tests/unit/conftest.py index ba6b5bdc5e8..e69de29bb2d 100644 --- a/packages/celery-library/tests/unit/conftest.py +++ b/packages/celery-library/tests/unit/conftest.py @@ -1,144 +0,0 @@ -from collections.abc import AsyncIterator, Awaitable, Callable -from functools import partial -from typing import Final - -import pytest -from asgi_lifespan import LifespanManager -from celery import Celery -from celery.contrib.testing.worker import TestWorkController, start_worker -from celery.signals import worker_init, worker_shutdown -from celery.worker.worker import WorkController -from celery_library import setup_celery_client -from celery_library.routes.rpc import router as async_jobs_router -from celery_library.signals import on_worker_init, on_worker_shutdown -from celery_library.utils import get_celery_worker -from celery_library.worker import CeleryTaskWorker -from faker import Faker -from fastapi import FastAPI -from models_library.products import ProductName -from models_library.rabbitmq_basic_types import RPCNamespace -from models_library.users import UserID -from pydantic import TypeAdapter -from servicelib.rabbitmq import RabbitMQRPCClient -from settings_library.celery import CelerySettings -from settings_library.rabbit import RabbitSettings -from settings_library.redis import RedisSettings - -pytest_simcore_core_services_selection = [ - "rabbit", - "redis", - "postgres", -] - -pytest_plugins = [ - "pytest_simcore.docker_compose", - "pytest_simcore.docker_swarm", - "pytest_simcore.rabbit_service", - "pytest_simcore.redis_service", - "pytest_simcore.repository_paths", -] - -_LIFESPAN_TIMEOUT: Final[int] = 10 - - -@pytest.fixture -def rpc_namespace() -> RPCNamespace: - return TypeAdapter(RPCNamespace).validate_python("test") - - -@pytest.fixture -def celery_settings( - rabbit_service: RabbitSettings, - redis_service: RedisSettings, -) -> CelerySettings: - return CelerySettings.create_from_envs() - - -@pytest.fixture -async def initialized_fast_api( - rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], - celery_settings: CelerySettings, - rpc_namespace: RPCNamespace, -) -> AsyncIterator[FastAPI]: - app = FastAPI( - title="master_fastapi_app", - description="Service that manages osparc storage backend", - version="0.0.0", - ) - - setup_celery_client(app, celery_settings=celery_settings) - rpc_client = await rabbitmq_rpc_client("celery_test_client") - app.state.rabbitmq_rpc_client = rpc_client - - async def startup() -> None: - rpc_server = app.state.rabbitmq_rpc_client - assert isinstance(rpc_server, RabbitMQRPCClient) - await rpc_server.register_router(async_jobs_router, rpc_namespace, app) - - app.add_event_handler("startup", startup) - - async with LifespanManager( - app, startup_timeout=_LIFESPAN_TIMEOUT, shutdown_timeout=_LIFESPAN_TIMEOUT - ): - yield app - - -@pytest.fixture -def register_celery_tasks() -> Callable[[Celery], None]: - """override if tasks are needed""" - - def _(celery_app: Celery) -> None: ... - - return _ - - -@pytest.fixture -async def celery_worker_controller( - celery_settings: CelerySettings, - celery_app: Celery, - register_celery_tasks: Callable[[Celery], None], -) -> AsyncIterator[TestWorkController]: - - def _create_app() -> FastAPI: - - return FastAPI( - title="worker_fastapi_app", - description="Test application for celery_library", - version="0.0.0", - ) - - def _on_worker_init_wrapper(sender: WorkController, **_kwargs) -> None: - return partial(on_worker_init, _create_app, celery_settings)(sender, **_kwargs) - - worker_init.connect(_on_worker_init_wrapper) - worker_shutdown.connect(on_worker_shutdown) - - register_celery_tasks(celery_app) - - with start_worker( - celery_app, - pool="threads", - concurrency=1, - loglevel="info", - perform_ping_check=False, - queues="default,cpu_bound", - ) as worker: - yield worker - - -@pytest.fixture -def with_storage_celery_worker( - celery_worker_controller: TestWorkController, -) -> CeleryTaskWorker: - assert isinstance(celery_worker_controller.app, Celery) - return get_celery_worker(celery_worker_controller.app) - - -@pytest.fixture -def user_id(faker: Faker) -> UserID: - return TypeAdapter(UserID).validate_python(faker.pyint(min_value=1, max_value=1000)) - - -@pytest.fixture -def product_name() -> ProductName: - return TypeAdapter(ProductName).validate_python("pytest-product") diff --git a/packages/celery-library/tests/unit/test_async_jobs.py b/packages/celery-library/tests/unit/test_async_jobs.py index dd0884c16be..eeb40edae5d 100644 --- a/packages/celery-library/tests/unit/test_async_jobs.py +++ b/packages/celery-library/tests/unit/test_async_jobs.py @@ -3,7 +3,7 @@ import asyncio import pickle -from collections.abc import Callable +from collections.abc import Awaitable, Callable from datetime import timedelta from enum import Enum from typing import Any @@ -24,9 +24,10 @@ JobAbortedError, JobError, ) +from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError from models_library.products import ProductName -from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace +from models_library.rabbitmq_basic_types import RPCMethodName from models_library.users import UserID from servicelib.rabbitmq import RabbitMQRPCClient, RPCRouter from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs @@ -39,10 +40,16 @@ pytest_simcore_core_services_selection = [ "rabbit", - "redis", "postgres", ] +pytest_plugins = [ + "pytest_simcore.rabbit_service", + "pytest_simcore.docker_compose", + "pytest_simcore.docker_swarm", + "pytest_simcore.repository_paths", +] + ###### RPC Interface ###### router = RPCRouter() @@ -109,18 +116,16 @@ async def async_job(task: Task, task_id: TaskID, action: Action, payload: Any) - @pytest.fixture async def rpc_client( - initialized_fast_api: FastAPI, rpc_namespace: RPCNamespace + rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], ) -> RabbitMQRPCClient: - client = initialized_fast_api.state.rabbitmq_rpc_client - assert isinstance(client, RabbitMQRPCClient) - await client.register_router(router, rpc_namespace, initialized_fast_api) + client = await rabbitmq_rpc_client("celery_test_client") + await client.register_router(router, STORAGE_RPC_NAMESPACE) return client async def _start_task_via_rpc( client: RabbitMQRPCClient, *, - rpc_namespace: RPCNamespace, rpc_task_name: str, user_id: UserID, product_name: ProductName, @@ -129,7 +134,7 @@ async def _start_task_via_rpc( job_id_data = AsyncJobNameData(user_id=user_id, product_name=product_name) async_job_get = await async_jobs.submit( rabbitmq_rpc_client=client, - rpc_namespace=rpc_namespace, + rpc_namespace=STORAGE_RPC_NAMESPACE, method_name=RPCMethodName(rpc_task_name), job_id_data=job_id_data, **kwargs, @@ -161,7 +166,6 @@ def _(celery_app: Celery) -> None: async def _wait_for_job( rpc_client: RabbitMQRPCClient, *, - rpc_namespace: RPCNamespace, async_job_get: AsyncJobGet, job_id_data: AsyncJobNameData, stop_after: timedelta = timedelta(seconds=5), @@ -175,8 +179,8 @@ async def _wait_for_job( ): with attempt: result = await async_jobs.status( - rpc_client, - rpc_namespace=rpc_namespace, + storage_rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, job_id=async_job_get.job_id, job_id_data=job_id_data, ) @@ -205,7 +209,6 @@ async def _wait_for_job( ) async def test_async_jobs_workflow( rpc_client: RabbitMQRPCClient, - rpc_namespace: RPCNamespace, with_storage_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, @@ -214,7 +217,6 @@ async def test_async_jobs_workflow( ): async_job_get, job_id_data = await _start_task_via_rpc( rpc_client, - rpc_namespace=rpc_namespace, rpc_task_name=exposed_rpc_start, user_id=user_id, product_name=product_name, @@ -224,7 +226,7 @@ async def test_async_jobs_workflow( jobs = await async_jobs.list_jobs( rpc_client, - rpc_namespace=rpc_namespace, + rpc_namespace=STORAGE_RPC_NAMESPACE, filter_="", # currently not used job_id_data=job_id_data, ) @@ -232,14 +234,13 @@ async def test_async_jobs_workflow( await _wait_for_job( rpc_client, - rpc_namespace=rpc_namespace, async_job_get=async_job_get, job_id_data=job_id_data, ) async_job_result = await async_jobs.result( rpc_client, - rpc_namespace=rpc_namespace, + rpc_namespace=STORAGE_RPC_NAMESPACE, job_id=async_job_get.job_id, job_id_data=job_id_data, ) @@ -254,8 +255,7 @@ async def test_async_jobs_workflow( ) async def test_async_jobs_cancel( # initialized_app: FastAPI, - register_routes: None, - rpc_namespace: RPCNamespace, + register_rpc_routes: None, storage_rabbitmq_rpc_client: RabbitMQRPCClient, with_storage_celery_worker: CeleryTaskWorker, user_id: UserID, @@ -263,8 +263,7 @@ async def test_async_jobs_cancel( exposed_rpc_start: str, ): async_job_get, job_id_data = await _start_task_via_rpc( - rpc_client, - rpc_namespace=rpc_namespace, + storage_rabbitmq_rpc_client, rpc_task_name=exposed_rpc_start, user_id=user_id, product_name=product_name, @@ -273,22 +272,21 @@ async def test_async_jobs_cancel( ) await async_jobs.cancel( - rpc_client, - rpc_namespace=rpc_namespace, + storage_rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, job_id=async_job_get.job_id, job_id_data=job_id_data, ) await _wait_for_job( - rpc_client, - rpc_namespace=rpc_namespace, + storage_rabbitmq_rpc_client, async_job_get=async_job_get, job_id_data=job_id_data, ) jobs = await async_jobs.list_jobs( - rpc_client, - rpc_namespace=rpc_namespace, + storage_rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, filter_="", # currently not used job_id_data=job_id_data, ) @@ -296,8 +294,8 @@ async def test_async_jobs_cancel( with pytest.raises(JobAbortedError): await async_jobs.result( - rpc_client, - rpc_namespace=rpc_namespace, + storage_rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, job_id=async_job_get.job_id, job_id_data=job_id_data, ) @@ -322,8 +320,7 @@ async def test_async_jobs_cancel( ) async def test_async_jobs_raises( # initialized_app: FastAPI, - register_routes: None, - rpc_namespace: RPCNamespace, + register_rpc_routes: None, storage_rabbitmq_rpc_client: RabbitMQRPCClient, with_storage_celery_worker: CeleryTaskWorker, user_id: UserID, @@ -332,8 +329,7 @@ async def test_async_jobs_raises( error: Exception, ): async_job_get, job_id_data = await _start_task_via_rpc( - rpc_client, - rpc_namespace=rpc_namespace, + storage_rabbitmq_rpc_client, rpc_task_name=exposed_rpc_start, user_id=user_id, product_name=product_name, @@ -342,8 +338,7 @@ async def test_async_jobs_raises( ) await _wait_for_job( - rpc_client, - rpc_namespace=rpc_namespace, + storage_rabbitmq_rpc_client, async_job_get=async_job_get, job_id_data=job_id_data, stop_after=timedelta(minutes=1), @@ -351,8 +346,8 @@ async def test_async_jobs_raises( with pytest.raises(JobError) as exc: await async_jobs.result( - rpc_client, - rpc_namespace=rpc_namespace, + storage_rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, job_id=async_job_get.job_id, job_id_data=job_id_data, ) diff --git a/packages/celery-library/src/celery_library/routes/rpc.py b/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py similarity index 100% rename from packages/celery-library/src/celery_library/routes/rpc.py rename to services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py From 738ad556a81aa6f20b8017e582898a004d6c39ed Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 5 Jun 2025 10:17:12 +0200 Subject: [PATCH 49/91] Revert "create fixture for creating rabbitmq rpc server" This reverts commit c8d9b01cc095dd10f3f1383c2116a3da07655610. --- packages/celery-library/tests/unit/test_async_jobs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/celery-library/tests/unit/test_async_jobs.py b/packages/celery-library/tests/unit/test_async_jobs.py index eeb40edae5d..4122a0fd090 100644 --- a/packages/celery-library/tests/unit/test_async_jobs.py +++ b/packages/celery-library/tests/unit/test_async_jobs.py @@ -116,9 +116,9 @@ async def async_job(task: Task, task_id: TaskID, action: Action, payload: Any) - @pytest.fixture async def rpc_client( - rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], + rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], ) -> RabbitMQRPCClient: - client = await rabbitmq_rpc_client("celery_test_client") + client = await rpc_client("celery_test_client") await client.register_router(router, STORAGE_RPC_NAMESPACE) return client From 2e05671704df022b3b8a8f6344b58aed4282c458 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 5 Jun 2025 10:17:29 +0200 Subject: [PATCH 50/91] Revert "start adding tests to celery module" This reverts commit 58e7fa1e985c79e4c7535dc104354f38f17c0a42. --- .../celery-library/requirements/_base.txt | 27 ++++++--------- packages/celery-library/requirements/_test.in | 1 - .../celery-library/requirements/_test.txt | 32 +++++------------ .../celery-library/requirements/_tools.txt | 8 ++--- .../celery-library/tests/unit/conftest.py | 0 .../storage}/tests/unit/test_async_jobs.py | 34 ++++++++----------- .../tests/unit/test_modules_celery.py | 0 7 files changed, 37 insertions(+), 65 deletions(-) delete mode 100644 packages/celery-library/tests/unit/conftest.py rename {packages/celery-library => services/storage}/tests/unit/test_async_jobs.py (93%) rename {packages/celery-library => services/storage}/tests/unit/test_modules_celery.py (100%) diff --git a/packages/celery-library/requirements/_base.txt b/packages/celery-library/requirements/_base.txt index d52f0e54eb9..af8a4cd518a 100644 --- a/packages/celery-library/requirements/_base.txt +++ b/packages/celery-library/requirements/_base.txt @@ -10,7 +10,7 @@ aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.12.2 +aiohttp==3.11.18 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -70,7 +70,7 @@ certifi==2025.4.26 # requests charset-normalizer==3.4.2 # via requests -click==8.2.1 +click==8.1.8 # via # celery # click-didyoumean @@ -97,7 +97,7 @@ exceptiongroup==1.3.0 # via aio-pika fast-depends==2.4.12 # via faststream -faststream==0.5.42 +faststream==0.5.41 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.6.0 # via @@ -117,7 +117,7 @@ idna==3.10 # yarl importlib-metadata==8.6.1 # via opentelemetry-api -jsonschema==4.24.0 +jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in @@ -129,7 +129,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.4.4 +multidict==6.4.3 # via # aiohttp # yarl @@ -139,7 +139,6 @@ opentelemetry-api==1.33.1 # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-instrumentation - # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests @@ -157,12 +156,9 @@ opentelemetry-exporter-otlp-proto-http==1.33.1 # via opentelemetry-exporter-otlp opentelemetry-instrumentation==0.54b1 # via - # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.54b1 - # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-logging==0.54b1 # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-redis==0.54b1 @@ -227,7 +223,7 @@ psutil==7.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in pycryptodome==3.23.0 # via stream-zip -pydantic==2.11.5 +pydantic==2.11.4 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -287,7 +283,7 @@ pydantic-settings==2.7.0 # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.19.1 # via rich -pyinstrument==5.0.2 +pyinstrument==5.0.1 # via -r requirements/../../../packages/service-library/requirements/_base.in pyjwt==2.9.0 # via redis @@ -351,7 +347,7 @@ rich==14.0.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.25.1 +rpds-py==0.25.0 # via # jsonschema # referencing @@ -371,7 +367,7 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.16.0 +typer==0.15.4 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -389,7 +385,7 @@ typing-extensions==4.13.2 # pydantic-extra-types # typer # typing-inspection -typing-inspection==0.4.1 +typing-inspection==0.4.0 # via pydantic tzdata==2025.2 # via kombu @@ -419,7 +415,6 @@ wrapt==1.17.2 # via # deprecated # opentelemetry-instrumentation - # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-redis yarl==1.20.0 # via @@ -427,5 +422,5 @@ yarl==1.20.0 # aio-pika # aiohttp # aiormq -zipp==3.22.0 +zipp==3.21.0 # via importlib-metadata diff --git a/packages/celery-library/requirements/_test.in b/packages/celery-library/requirements/_test.in index 4b93bbbdd78..fdf47680fb2 100644 --- a/packages/celery-library/requirements/_test.in +++ b/packages/celery-library/requirements/_test.in @@ -12,7 +12,6 @@ coverage faker fastapi -docker httpx pint pytest diff --git a/packages/celery-library/requirements/_test.txt b/packages/celery-library/requirements/_test.txt index bf475d637ea..1bf99eeff9f 100644 --- a/packages/celery-library/requirements/_test.txt +++ b/packages/celery-library/requirements/_test.txt @@ -13,17 +13,10 @@ certifi==2025.4.26 # -c requirements/_base.txt # httpcore # httpx - # requests -charset-normalizer==3.4.2 - # via - # -c requirements/_base.txt - # requests -coverage==7.8.2 +coverage==7.8.0 # via # -r requirements/_test.in # pytest-cov -docker==7.1.0 - # via -r requirements/_test.in faker==37.3.0 # via -r requirements/_test.in fastapi==0.115.12 @@ -47,7 +40,6 @@ idna==3.10 # -c requirements/_base.txt # anyio # httpx - # requests iniconfig==2.1.0 # via pytest packaging==25.0 @@ -65,7 +57,7 @@ pprintpp==0.4.0 # via pytest-icdiff py-cpuinfo==9.0.0 # via pytest-benchmark -pydantic==2.11.5 +pydantic==2.11.4 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -84,8 +76,10 @@ pytest==8.3.5 # pytest-instafail # pytest-mock # pytest-sugar -pytest-asyncio==1.0.0 - # via -r requirements/_test.in +pytest-asyncio==0.23.8 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_test.in pytest-benchmark==5.1.0 # via -r requirements/_test.in pytest-cov==6.1.1 @@ -94,7 +88,7 @@ pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.1 +pytest-mock==3.14.0 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -109,10 +103,6 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # -r requirements/_test.in -requests==2.32.3 - # via - # -c requirements/_base.txt - # docker sniffio==1.3.1 # via # -c requirements/_base.txt @@ -134,7 +124,7 @@ typing-extensions==4.13.2 # pydantic # pydantic-core # typing-inspection -typing-inspection==0.4.1 +typing-inspection==0.4.0 # via # -c requirements/_base.txt # pydantic @@ -142,9 +132,3 @@ tzdata==2025.2 # via # -c requirements/_base.txt # faker -urllib3==2.4.0 - # via - # -c requirements/../../../requirements/constraints.txt - # -c requirements/_base.txt - # docker - # requests diff --git a/packages/celery-library/requirements/_tools.txt b/packages/celery-library/requirements/_tools.txt index d90c0074c58..e0213f1353c 100644 --- a/packages/celery-library/requirements/_tools.txt +++ b/packages/celery-library/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.2.1 +click==8.1.8 # via # -c requirements/_base.txt # black @@ -19,7 +19,7 @@ distlib==0.3.9 # via virtualenv filelock==3.18.0 # via virtualenv -identify==2.6.12 +identify==2.6.10 # via pre-commit isort==6.0.1 # via @@ -67,9 +67,9 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -c requirements/_test.txt # pre-commit -ruff==0.11.11 +ruff==0.11.10 # via -r requirements/../../../requirements/devenv.txt -setuptools==80.9.0 +setuptools==80.7.1 # via pip-tools tomlkit==0.13.2 # via pylint diff --git a/packages/celery-library/tests/unit/conftest.py b/packages/celery-library/tests/unit/conftest.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/packages/celery-library/tests/unit/test_async_jobs.py b/services/storage/tests/unit/test_async_jobs.py similarity index 93% rename from packages/celery-library/tests/unit/test_async_jobs.py rename to services/storage/tests/unit/test_async_jobs.py index 4122a0fd090..004c5160d54 100644 --- a/packages/celery-library/tests/unit/test_async_jobs.py +++ b/services/storage/tests/unit/test_async_jobs.py @@ -3,7 +3,7 @@ import asyncio import pickle -from collections.abc import Awaitable, Callable +from collections.abc import Callable from datetime import timedelta from enum import Enum from typing import Any @@ -31,6 +31,7 @@ from models_library.users import UserID from servicelib.rabbitmq import RabbitMQRPCClient, RPCRouter from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs +from simcore_service_storage.api.rpc.routes import get_rabbitmq_rpc_server from tenacity import ( AsyncRetrying, retry_if_exception_type, @@ -43,12 +44,6 @@ "postgres", ] -pytest_plugins = [ - "pytest_simcore.rabbit_service", - "pytest_simcore.docker_compose", - "pytest_simcore.docker_swarm", - "pytest_simcore.repository_paths", -] ###### RPC Interface ###### router = RPCRouter() @@ -115,12 +110,9 @@ async def async_job(task: Task, task_id: TaskID, action: Action, payload: Any) - @pytest.fixture -async def rpc_client( - rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], -) -> RabbitMQRPCClient: - client = await rpc_client("celery_test_client") - await client.register_router(router, STORAGE_RPC_NAMESPACE) - return client +async def register_rpc_routes(initialized_app: FastAPI) -> None: + rpc_server = get_rabbitmq_rpc_server(initialized_app) + await rpc_server.register_router(router, STORAGE_RPC_NAMESPACE, initialized_app) async def _start_task_via_rpc( @@ -208,7 +200,9 @@ async def _wait_for_job( ], ) async def test_async_jobs_workflow( - rpc_client: RabbitMQRPCClient, + initialized_app: FastAPI, + register_rpc_routes: None, + storage_rabbitmq_rpc_client: RabbitMQRPCClient, with_storage_celery_worker: CeleryTaskWorker, user_id: UserID, product_name: ProductName, @@ -216,7 +210,7 @@ async def test_async_jobs_workflow( payload: Any, ): async_job_get, job_id_data = await _start_task_via_rpc( - rpc_client, + storage_rabbitmq_rpc_client, rpc_task_name=exposed_rpc_start, user_id=user_id, product_name=product_name, @@ -225,7 +219,7 @@ async def test_async_jobs_workflow( ) jobs = await async_jobs.list_jobs( - rpc_client, + storage_rabbitmq_rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, filter_="", # currently not used job_id_data=job_id_data, @@ -233,13 +227,13 @@ async def test_async_jobs_workflow( assert len(jobs) > 0 await _wait_for_job( - rpc_client, + storage_rabbitmq_rpc_client, async_job_get=async_job_get, job_id_data=job_id_data, ) async_job_result = await async_jobs.result( - rpc_client, + storage_rabbitmq_rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, job_id=async_job_get.job_id, job_id_data=job_id_data, @@ -254,7 +248,7 @@ async def test_async_jobs_workflow( ], ) async def test_async_jobs_cancel( - # initialized_app: FastAPI, + initialized_app: FastAPI, register_rpc_routes: None, storage_rabbitmq_rpc_client: RabbitMQRPCClient, with_storage_celery_worker: CeleryTaskWorker, @@ -319,7 +313,7 @@ async def test_async_jobs_cancel( ], ) async def test_async_jobs_raises( - # initialized_app: FastAPI, + initialized_app: FastAPI, register_rpc_routes: None, storage_rabbitmq_rpc_client: RabbitMQRPCClient, with_storage_celery_worker: CeleryTaskWorker, diff --git a/packages/celery-library/tests/unit/test_modules_celery.py b/services/storage/tests/unit/test_modules_celery.py similarity index 100% rename from packages/celery-library/tests/unit/test_modules_celery.py rename to services/storage/tests/unit/test_modules_celery.py From f85f878f32dcf3c618f51f4988e2af974ac0c923 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 5 Jun 2025 10:26:04 +0200 Subject: [PATCH 51/91] fix after reverts --- services/storage/tests/unit/test_async_jobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/storage/tests/unit/test_async_jobs.py b/services/storage/tests/unit/test_async_jobs.py index 004c5160d54..1f900ff2c6d 100644 --- a/services/storage/tests/unit/test_async_jobs.py +++ b/services/storage/tests/unit/test_async_jobs.py @@ -171,7 +171,7 @@ async def _wait_for_job( ): with attempt: result = await async_jobs.status( - storage_rabbitmq_rpc_client, + rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, job_id=async_job_get.job_id, job_id_data=job_id_data, From f7a6279225b116463eacc16a1e567238d3daf52e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 6 Jun 2025 10:31:40 +0200 Subject: [PATCH 52/91] unify interfaces --- .../src/celery_library/__init__.py | 8 ++++---- .../src/celery_library/signals.py | 6 ++++-- .../{client.py => task_manager.py} | 10 ++++++++-- .../src/celery_library/utils.py | 8 ++++---- .../src/celery_library/worker.py | 19 ------------------- .../api/rest/_files.py | 6 +++--- .../api/rest/dependencies/celery.py | 4 ++-- services/storage/tests/conftest.py | 8 ++++---- .../storage/tests/unit/test_async_jobs.py | 11 +++++------ .../storage/tests/unit/test_handlers_files.py | 4 ++-- .../storage/tests/unit/test_modules_celery.py | 15 +++++++-------- .../tests/unit/test_rpc_handlers_paths.py | 8 ++++---- .../unit/test_rpc_handlers_simcore_s3.py | 8 ++++---- 13 files changed, 51 insertions(+), 64 deletions(-) rename packages/celery-library/src/celery_library/{client.py => task_manager.py} (95%) delete mode 100644 packages/celery-library/src/celery_library/worker.py diff --git a/packages/celery-library/src/celery_library/__init__.py b/packages/celery-library/src/celery_library/__init__.py index 28704063292..818f272a73e 100644 --- a/packages/celery-library/src/celery_library/__init__.py +++ b/packages/celery-library/src/celery_library/__init__.py @@ -7,8 +7,8 @@ from settings_library.redis import RedisDatabase from .backends._redis import RedisTaskInfoStore -from .client import CeleryTaskClient from .common import create_app +from .task_manager import CeleryTaskManager from .types import register_celery_types _logger = logging.getLogger(__name__) @@ -24,7 +24,7 @@ async def on_startup() -> None: client_name=f"{app.title}.celery_tasks", ) - app.state.celery_client = CeleryTaskClient( + app.state.celery_client = CeleryTaskManager( celery_app, celery_settings, RedisTaskInfoStore(redis_client_sdk), @@ -35,10 +35,10 @@ async def on_startup() -> None: app.add_event_handler("startup", on_startup) -def get_celery_client(app: FastAPI) -> CeleryTaskClient: +def get_celery_client(app: FastAPI) -> CeleryTaskManager: assert hasattr(app.state, "celery_client") # nosec celery_client = app.state.celery_client - assert isinstance(celery_client, CeleryTaskClient) + assert isinstance(celery_client, CeleryTaskManager) return celery_client diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index 2fe822749a5..59ffdb1ec58 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -16,12 +16,12 @@ from . import set_event_loop from .backends._redis import RedisTaskInfoStore +from .task_manager import CeleryTaskManager from .utils import ( get_fastapi_app, set_celery_worker, set_fastapi_app, ) -from .worker import CeleryTaskWorker _logger = logging.getLogger(__name__) @@ -57,7 +57,9 @@ async def setup_task_worker(): assert isinstance(sender.app, Celery) # nosec set_celery_worker( sender.app, - CeleryTaskWorker( + CeleryTaskManager( + sender.app, + celery_settings, RedisTaskInfoStore(redis_client_sdk), ), ) diff --git a/packages/celery-library/src/celery_library/client.py b/packages/celery-library/src/celery_library/task_manager.py similarity index 95% rename from packages/celery-library/src/celery_library/client.py rename to packages/celery-library/src/celery_library/task_manager.py index 7378b7d8516..977c66bcdf3 100644 --- a/packages/celery-library/src/celery_library/client.py +++ b/packages/celery-library/src/celery_library/task_manager.py @@ -31,8 +31,8 @@ _MAX_PROGRESS_VALUE = 1.0 -@dataclass -class CeleryTaskClient: +@dataclass(frozen=True) +class CeleryTaskManager: _celery_app: Celery _celery_settings: CelerySettings _task_info_store: TaskInfoStore @@ -155,3 +155,9 @@ async def list_tasks(self, task_context: TaskContext) -> list[Task]: msg=f"Listing tasks: {task_context=}", ): return await self._task_info_store.list_tasks(task_context) + + async def set_task_progress(self, task_id: TaskID, report: ProgressReport) -> None: + await self._task_info_store.set_task_progress( + task_id=task_id, + report=report, + ) diff --git a/packages/celery-library/src/celery_library/utils.py b/packages/celery-library/src/celery_library/utils.py index d09c1a1ce41..fbf30030627 100644 --- a/packages/celery-library/src/celery_library/utils.py +++ b/packages/celery-library/src/celery_library/utils.py @@ -1,19 +1,19 @@ from celery import Celery # type: ignore[import-untyped] from fastapi import FastAPI -from .worker import CeleryTaskWorker +from .task_manager import CeleryTaskManager _WORKER_KEY = "celery_worker" _FASTAPI_APP_KEY = "fastapi_app" -def set_celery_worker(celery_app: Celery, worker: CeleryTaskWorker) -> None: +def set_celery_worker(celery_app: Celery, worker: CeleryTaskManager) -> None: celery_app.conf[_WORKER_KEY] = worker -def get_celery_worker(celery_app: Celery) -> CeleryTaskWorker: +def get_celery_worker(celery_app: Celery) -> CeleryTaskManager: worker = celery_app.conf[_WORKER_KEY] - assert isinstance(worker, CeleryTaskWorker) + assert isinstance(worker, CeleryTaskManager) return worker diff --git a/packages/celery-library/src/celery_library/worker.py b/packages/celery-library/src/celery_library/worker.py deleted file mode 100644 index 1d5b603b7a5..00000000000 --- a/packages/celery-library/src/celery_library/worker.py +++ /dev/null @@ -1,19 +0,0 @@ -import logging -from dataclasses import dataclass - -from models_library.progress_bar import ProgressReport - -from .models import TaskID, TaskInfoStore - -_logger = logging.getLogger(__name__) - - -@dataclass -class CeleryTaskWorker: - _task_info_store: TaskInfoStore - - async def set_task_progress(self, task_id: TaskID, report: ProgressReport) -> None: - await self._task_info_store.set_task_progress( - task_id=task_id, - report=report, - ) diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py index 83f41bf76dc..59415bf70df 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_files.py +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -2,8 +2,8 @@ from typing import Annotated, Final, cast from urllib.parse import quote -from celery_library.client import CeleryTaskClient from celery_library.models import TaskMetadata, TaskUUID +from celery_library.task_manager import CeleryTaskManager from fastapi import APIRouter, Depends, Header, Request from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobNameData from models_library.api_schemas_storage.storage_schemas import ( @@ -270,7 +270,7 @@ async def abort_upload_file( status_code=status.HTTP_202_ACCEPTED, ) async def complete_upload_file( - celery_client: Annotated[CeleryTaskClient, Depends(get_celery_client)], + celery_client: Annotated[CeleryTaskManager, Depends(get_celery_client)], query_params: Annotated[StorageQueryParamsBase, Depends()], location_id: LocationID, file_id: StorageFileID, @@ -326,7 +326,7 @@ async def complete_upload_file( response_model=Envelope[FileUploadCompleteFutureResponse], ) async def is_completed_upload_file( - celery_client: Annotated[CeleryTaskClient, Depends(get_celery_client)], + celery_client: Annotated[CeleryTaskManager, Depends(get_celery_client)], query_params: Annotated[StorageQueryParamsBase, Depends()], location_id: LocationID, file_id: StorageFileID, diff --git a/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py b/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py index 277ef83d589..7f93f519b07 100644 --- a/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py +++ b/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py @@ -1,12 +1,12 @@ from typing import Annotated from celery_library import get_celery_client as _get_celery_client_from_app -from celery_library.client import CeleryTaskClient +from celery_library.task_manager import CeleryTaskManager from fastapi import Depends, FastAPI from servicelib.fastapi.dependencies import get_app def get_celery_client( app: Annotated[FastAPI, Depends(get_app)], -) -> CeleryTaskClient: +) -> CeleryTaskManager: return _get_celery_client_from_app(app) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 94b644f22fb..c238417648c 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -27,8 +27,8 @@ from celery.signals import worker_init, worker_shutdown from celery.worker.worker import WorkController from celery_library.signals import on_worker_init, on_worker_shutdown +from celery_library.task_manager import CeleryTaskManager from celery_library.utils import get_celery_worker -from celery_library.worker import CeleryTaskWorker from faker import Faker from fakeredis.aioredis import FakeRedis from fastapi import FastAPI @@ -364,7 +364,7 @@ def upload_file( create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], create_file_of_size: Callable[[ByteSize, str | None], Path], create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: CeleryTaskManager, ) -> Callable[ [ByteSize, str, SimcoreS3FileID | None], Awaitable[tuple[Path, SimcoreS3FileID]] ]: @@ -479,7 +479,7 @@ async def create_empty_directory( create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], client: httpx.AsyncClient, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: CeleryTaskManager, ) -> Callable[[str, ProjectID, NodeID], Awaitable[SimcoreS3FileID]]: async def _directory_creator( dir_name: str, project_id: ProjectID, node_id: NodeID @@ -1029,7 +1029,7 @@ def _on_worker_init_wrapper(sender: WorkController, **_kwargs) -> None: @pytest.fixture def with_storage_celery_worker( with_storage_celery_worker_controller: TestWorkController, -) -> CeleryTaskWorker: +) -> CeleryTaskManager: assert isinstance(with_storage_celery_worker_controller.app, Celery) return get_celery_worker(with_storage_celery_worker_controller.app) diff --git a/services/storage/tests/unit/test_async_jobs.py b/services/storage/tests/unit/test_async_jobs.py index 1f900ff2c6d..9a2c60cd596 100644 --- a/services/storage/tests/unit/test_async_jobs.py +++ b/services/storage/tests/unit/test_async_jobs.py @@ -11,10 +11,9 @@ import pytest from celery import Celery, Task from celery_library import get_celery_client -from celery_library.client import TaskMetadata -from celery_library.models import TaskID +from celery_library.models import TaskID, TaskMetadata from celery_library.task import register_task -from celery_library.worker import CeleryTaskWorker +from celery_library.task_manager import CeleryTaskManager from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, @@ -203,7 +202,7 @@ async def test_async_jobs_workflow( initialized_app: FastAPI, register_rpc_routes: None, storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: CeleryTaskManager, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, @@ -251,7 +250,7 @@ async def test_async_jobs_cancel( initialized_app: FastAPI, register_rpc_routes: None, storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: CeleryTaskManager, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, @@ -316,7 +315,7 @@ async def test_async_jobs_raises( initialized_app: FastAPI, register_rpc_routes: None, storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: CeleryTaskManager, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index f4149b39672..10073b92770 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -23,7 +23,7 @@ from aiohttp import ClientSession from aws_library.s3 import S3KeyNotFoundError, S3ObjectKey, SimcoreS3API from aws_library.s3._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE -from celery_library.worker import CeleryTaskWorker +from celery_library.task_manager import CeleryTaskManager from faker import Faker from fastapi import FastAPI from models_library.api_schemas_storage.storage_schemas import ( @@ -683,7 +683,7 @@ async def test_upload_real_file_with_s3_client( node_id: NodeID, faker: Faker, s3_client: S3Client, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: CeleryTaskManager, ): file_size = TypeAdapter(ByteSize).validate_python("500Mib") file_name = faker.file_name() diff --git a/services/storage/tests/unit/test_modules_celery.py b/services/storage/tests/unit/test_modules_celery.py index 5d57838bb9d..866f8f74481 100644 --- a/services/storage/tests/unit/test_modules_celery.py +++ b/services/storage/tests/unit/test_modules_celery.py @@ -14,7 +14,6 @@ from celery import Celery, Task from celery.contrib.abortable import AbortableTask from celery_library import get_celery_client, get_event_loop -from celery_library.client import CeleryTaskClient from celery_library.errors import TransferrableCeleryError from celery_library.models import ( TaskContext, @@ -26,8 +25,8 @@ AbortableAsyncResult, register_task, ) +from celery_library.task_manager import CeleryTaskManager from celery_library.utils import get_celery_worker, get_fastapi_app -from celery_library.worker import CeleryTaskWorker from common_library.errors_classes import OsparcErrorMixin from fastapi import FastAPI from models_library.progress_bar import ProgressReport @@ -43,8 +42,8 @@ @pytest.fixture def celery_client( initialized_app: FastAPI, - with_storage_celery_worker: CeleryTaskWorker, -) -> CeleryTaskClient: + with_storage_celery_worker: CeleryTaskManager, +) -> CeleryTaskManager: return get_celery_client(initialized_app) @@ -110,7 +109,7 @@ def _(celery_app: Celery) -> None: async def test_submitting_task_calling_async_function_results_with_success_state( - celery_client: CeleryTaskClient, + celery_client: CeleryTaskManager, ): task_context = TaskContext(user_id=42) @@ -140,7 +139,7 @@ async def test_submitting_task_calling_async_function_results_with_success_state async def test_submitting_task_with_failure_results_with_error( - celery_client: CeleryTaskClient, + celery_client: CeleryTaskManager, ): task_context = TaskContext(user_id=42) @@ -166,7 +165,7 @@ async def test_submitting_task_with_failure_results_with_error( async def test_cancelling_a_running_task_aborts_and_deletes( - celery_client: CeleryTaskClient, + celery_client: CeleryTaskManager, ): task_context = TaskContext(user_id=42) @@ -196,7 +195,7 @@ async def test_cancelling_a_running_task_aborts_and_deletes( async def test_listing_task_uuids_contains_submitted_task( - celery_client: CeleryTaskClient, + celery_client: CeleryTaskManager, ): task_context = TaskContext(user_id=42) diff --git a/services/storage/tests/unit/test_rpc_handlers_paths.py b/services/storage/tests/unit/test_rpc_handlers_paths.py index c0f45284de2..afe5b62ddd8 100644 --- a/services/storage/tests/unit/test_rpc_handlers_paths.py +++ b/services/storage/tests/unit/test_rpc_handlers_paths.py @@ -13,7 +13,7 @@ from typing import Any, TypeAlias import pytest -from celery_library.worker import CeleryTaskWorker +from celery_library.task_manager import CeleryTaskManager from faker import Faker from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( @@ -265,7 +265,7 @@ async def test_path_compute_size_inexistent_path( mock_celery_app: None, initialized_app: FastAPI, storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: CeleryTaskManager, location_id: LocationID, user_id: UserID, faker: Faker, @@ -294,7 +294,7 @@ async def test_delete_paths_empty_set( user_id: UserID, location_id: LocationID, product_name: ProductName, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: CeleryTaskManager, ): await _assert_delete_paths( storage_rabbitmq_rpc_client, @@ -333,7 +333,7 @@ async def test_delete_paths( ], project_params: ProjectWithFilesParams, product_name: ProductName, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: CeleryTaskManager, ): assert ( len(project_params.allowed_file_sizes) == 1 diff --git a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py index 135b3b49cfd..4f52884179b 100644 --- a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py @@ -20,7 +20,7 @@ import pytest import sqlalchemy as sa from celery.contrib.testing.worker import TestWorkController -from celery_library.worker import CeleryTaskWorker +from celery_library.task_manager import CeleryTaskManager from faker import Faker from fastapi import FastAPI from fastapi.encoders import jsonable_encoder @@ -113,7 +113,7 @@ async def test_copy_folders_from_non_existing_project( product_name: ProductName, create_project: Callable[..., Awaitable[dict[str, Any]]], faker: Faker, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: CeleryTaskManager, ): src_project = await create_project() incorrect_src_project = deepcopy(src_project) @@ -154,7 +154,7 @@ async def test_copy_folders_from_empty_project( product_name: ProductName, create_project: Callable[[], Awaitable[dict[str, Any]]], sqlalchemy_async_engine: AsyncEngine, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: CeleryTaskManager, ): # we will copy from src to dst src_project = await create_project() @@ -547,7 +547,7 @@ async def _request_start_export_data( @pytest.fixture def task_progress_spy(mocker: MockerFixture) -> Mock: - return mocker.spy(CeleryTaskWorker, "set_task_progress") + return mocker.spy(CeleryTaskManager, "set_task_progress") @pytest.mark.parametrize( From 64a0c4e77b7368bf078456ceef5af0e41213fb54 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 6 Jun 2025 10:42:05 +0200 Subject: [PATCH 53/91] remove duplicate --- packages/celery-library/src/celery_library/models.py | 3 --- packages/celery-library/src/celery_library/task.py | 6 +++--- .../src/simcore_service_storage/api/_worker_tasks/_files.py | 4 ++-- .../src/simcore_service_storage/api/_worker_tasks/_paths.py | 6 +++--- .../api/_worker_tasks/_simcore_s3.py | 6 +++--- 5 files changed, 11 insertions(+), 14 deletions(-) diff --git a/packages/celery-library/src/celery_library/models.py b/packages/celery-library/src/celery_library/models.py index 8b19d124ff1..56901b33f7d 100644 --- a/packages/celery-library/src/celery_library/models.py +++ b/packages/celery-library/src/celery_library/models.py @@ -87,6 +87,3 @@ class TaskStatus(BaseModel): @property def is_done(self) -> bool: return self.task_state in _TASK_DONE - - -TaskId: TypeAlias = str diff --git a/packages/celery-library/src/celery_library/task.py b/packages/celery-library/src/celery_library/task.py index aa3358d7c4a..dc0e8454bea 100644 --- a/packages/celery-library/src/celery_library/task.py +++ b/packages/celery-library/src/celery_library/task.py @@ -17,7 +17,7 @@ from . import get_event_loop from .errors import encore_celery_transferrable_error -from .models import TaskID, TaskId +from .models import TaskID from .utils import get_fastapi_app _logger = logging.getLogger(__name__) @@ -145,7 +145,7 @@ def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: @overload def register_task( app: Celery, - fn: Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]], + fn: Callable[Concatenate[AbortableTask, TaskID, P], Coroutine[Any, Any, R]], task_name: str | None = None, timeout: timedelta | None = _DEFAULT_TASK_TIMEOUT, max_retries: NonNegativeInt = _DEFAULT_MAX_RETRIES, @@ -169,7 +169,7 @@ def register_task( def register_task( # type: ignore[misc] app: Celery, fn: ( - Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]] + Callable[Concatenate[AbortableTask, TaskID, P], Coroutine[Any, Any, R]] | Callable[Concatenate[AbortableTask, P], R] ), task_name: str | None = None, diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py index 2930656d825..15837f54e4a 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py @@ -1,7 +1,7 @@ import logging from celery import Task # type: ignore[import-untyped] -from celery_library.models import TaskId +from celery_library.models import TaskID from celery_library.utils import get_fastapi_app from models_library.api_schemas_storage.storage_schemas import ( FileUploadCompletionBody, @@ -18,7 +18,7 @@ async def complete_upload_file( task: Task, - task_id: TaskId, + task_id: TaskID, user_id: UserID, location_id: LocationID, file_id: StorageFileID, diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py index b61448d52ea..6028112d9a4 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py @@ -2,7 +2,7 @@ from pathlib import Path from celery import Task # type: ignore[import-untyped] -from celery_library.models import TaskId +from celery_library.models import TaskID from celery_library.utils import get_fastapi_app from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID @@ -17,7 +17,7 @@ async def compute_path_size( - task: Task, task_id: TaskId, user_id: UserID, location_id: LocationID, path: Path + task: Task, task_id: TaskID, user_id: UserID, location_id: LocationID, path: Path ) -> ByteSize: assert task_id # nosec with log_context( @@ -31,7 +31,7 @@ async def compute_path_size( async def delete_paths( task: Task, - task_id: TaskId, + task_id: TaskID, user_id: UserID, location_id: LocationID, paths: set[Path], diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py index 9a1c7d6502d..af7b8ac259f 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py @@ -4,7 +4,7 @@ from aws_library.s3._models import S3ObjectKey from celery import Task # type: ignore[import-untyped] -from celery_library.models import TaskID, TaskId +from celery_library.models import TaskID from celery_library.utils import get_celery_worker, get_fastapi_app from models_library.api_schemas_storage.storage_schemas import FoldersBody from models_library.api_schemas_webserver.storage import PathToExport @@ -22,7 +22,7 @@ async def _task_progress_cb( - task: Task, task_id: TaskId, report: ProgressReport + task: Task, task_id: TaskID, report: ProgressReport ) -> None: worker = get_celery_worker(task.app) assert task.name # nosec @@ -33,7 +33,7 @@ async def _task_progress_cb( async def deep_copy_files_from_project( - task: Task, task_id: TaskId, user_id: UserID, body: FoldersBody + task: Task, task_id: TaskID, user_id: UserID, body: FoldersBody ) -> dict[str, Any]: with log_context( _logger, From 5d3425e6d083e7f89150a94d3406db3d037cd6d9 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 6 Jun 2025 11:04:55 +0200 Subject: [PATCH 54/91] rename --- packages/celery-library/src/celery_library/signals.py | 4 ++-- packages/celery-library/src/celery_library/utils.py | 4 ++-- .../api/_worker_tasks/_simcore_s3.py | 6 +++--- services/storage/tests/conftest.py | 4 ++-- services/storage/tests/unit/test_modules_celery.py | 4 ++-- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index 59ffdb1ec58..fe19a473209 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -19,8 +19,8 @@ from .task_manager import CeleryTaskManager from .utils import ( get_fastapi_app, - set_celery_worker, set_fastapi_app, + set_task_manager, ) _logger = logging.getLogger(__name__) @@ -55,7 +55,7 @@ async def setup_task_worker(): assert sender.app # nosec assert isinstance(sender.app, Celery) # nosec - set_celery_worker( + set_task_manager( sender.app, CeleryTaskManager( sender.app, diff --git a/packages/celery-library/src/celery_library/utils.py b/packages/celery-library/src/celery_library/utils.py index fbf30030627..f840004e533 100644 --- a/packages/celery-library/src/celery_library/utils.py +++ b/packages/celery-library/src/celery_library/utils.py @@ -7,11 +7,11 @@ _FASTAPI_APP_KEY = "fastapi_app" -def set_celery_worker(celery_app: Celery, worker: CeleryTaskManager) -> None: +def set_task_manager(celery_app: Celery, worker: CeleryTaskManager) -> None: celery_app.conf[_WORKER_KEY] = worker -def get_celery_worker(celery_app: Celery) -> CeleryTaskManager: +def get_task_manager(celery_app: Celery) -> CeleryTaskManager: worker = celery_app.conf[_WORKER_KEY] assert isinstance(worker, CeleryTaskManager) return worker diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py index af7b8ac259f..e06dfd884c6 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py @@ -5,7 +5,7 @@ from aws_library.s3._models import S3ObjectKey from celery import Task # type: ignore[import-untyped] from celery_library.models import TaskID -from celery_library.utils import get_celery_worker, get_fastapi_app +from celery_library.utils import get_fastapi_app, get_task_manager from models_library.api_schemas_storage.storage_schemas import FoldersBody from models_library.api_schemas_webserver.storage import PathToExport from models_library.progress_bar import ProgressReport @@ -24,7 +24,7 @@ async def _task_progress_cb( task: Task, task_id: TaskID, report: ProgressReport ) -> None: - worker = get_celery_worker(task.app) + worker = get_task_manager(task.app) assert task.name # nosec await worker.set_task_progress( task_id=task_id, @@ -87,7 +87,7 @@ async def export_data( async def _progress_cb(report: ProgressReport) -> None: assert task.name # nosec - await get_celery_worker(task.app).set_task_progress(task_id, report) + await get_task_manager(task.app).set_task_progress(task_id, report) _logger.debug("'%s' progress %s", task_id, report.percent_value) async with ProgressBarData( diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index c238417648c..9cbede09968 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -28,7 +28,7 @@ from celery.worker.worker import WorkController from celery_library.signals import on_worker_init, on_worker_shutdown from celery_library.task_manager import CeleryTaskManager -from celery_library.utils import get_celery_worker +from celery_library.utils import get_task_manager from faker import Faker from fakeredis.aioredis import FakeRedis from fastapi import FastAPI @@ -1031,7 +1031,7 @@ def with_storage_celery_worker( with_storage_celery_worker_controller: TestWorkController, ) -> CeleryTaskManager: assert isinstance(with_storage_celery_worker_controller.app, Celery) - return get_celery_worker(with_storage_celery_worker_controller.app) + return get_task_manager(with_storage_celery_worker_controller.app) @pytest.fixture diff --git a/services/storage/tests/unit/test_modules_celery.py b/services/storage/tests/unit/test_modules_celery.py index 866f8f74481..d9f56978b58 100644 --- a/services/storage/tests/unit/test_modules_celery.py +++ b/services/storage/tests/unit/test_modules_celery.py @@ -26,7 +26,7 @@ register_task, ) from celery_library.task_manager import CeleryTaskManager -from celery_library.utils import get_celery_worker, get_fastapi_app +from celery_library.utils import get_fastapi_app, get_task_manager from common_library.errors_classes import OsparcErrorMixin from fastapi import FastAPI from models_library.progress_bar import ProgressReport @@ -50,7 +50,7 @@ def celery_client( async def _fake_file_processor( celery_app: Celery, task_name: str, task_id: str, files: list[str] ) -> str: - worker = get_celery_worker(celery_app) + worker = get_task_manager(celery_app) def sleep_for(seconds: float) -> None: time.sleep(seconds) From 3d0bbb549e257d1d07cfe46dc6378495a419812a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 6 Jun 2025 16:18:33 +0200 Subject: [PATCH 55/91] move method --- .../src/celery_library/__init__.py | 19 ++---------- .../src/celery_library/common.py | 30 ++++++++++++++----- .../src/celery_library/signals.py | 19 +++--------- 3 files changed, 30 insertions(+), 38 deletions(-) diff --git a/packages/celery-library/src/celery_library/__init__.py b/packages/celery-library/src/celery_library/__init__.py index 818f272a73e..bb7e467d1cf 100644 --- a/packages/celery-library/src/celery_library/__init__.py +++ b/packages/celery-library/src/celery_library/__init__.py @@ -2,12 +2,9 @@ from asyncio import AbstractEventLoop from fastapi import FastAPI -from servicelib.redis._client import RedisClientSDK from settings_library.celery import CelerySettings -from settings_library.redis import RedisDatabase -from .backends._redis import RedisTaskInfoStore -from .common import create_app +from .common import create_app, create_task_manager from .task_manager import CeleryTaskManager from .types import register_celery_types @@ -16,18 +13,8 @@ def setup_celery_client(app: FastAPI, celery_settings: CelerySettings) -> None: async def on_startup() -> None: - celery_app = create_app(celery_settings) - redis_client_sdk = RedisClientSDK( - celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( - RedisDatabase.CELERY_TASKS - ), - client_name=f"{app.title}.celery_tasks", - ) - - app.state.celery_client = CeleryTaskManager( - celery_app, - celery_settings, - RedisTaskInfoStore(redis_client_sdk), + app.state.celery_client = create_task_manager( + create_app(celery_settings), celery_settings ) register_celery_types() diff --git a/packages/celery-library/src/celery_library/common.py b/packages/celery-library/src/celery_library/common.py index 545bb98f682..4b38f0aacd1 100644 --- a/packages/celery-library/src/celery_library/common.py +++ b/packages/celery-library/src/celery_library/common.py @@ -1,12 +1,13 @@ -import logging import ssl from typing import Any from celery import Celery # type: ignore[import-untyped] +from servicelib.redis import RedisClientSDK from settings_library.celery import CelerySettings from settings_library.redis import RedisDatabase -_logger = logging.getLogger(__name__) +from .backends._redis import RedisTaskInfoStore +from .task_manager import CeleryTaskManager def _celery_configure(celery_settings: CelerySettings) -> dict[str, Any]: @@ -25,13 +26,28 @@ def _celery_configure(celery_settings: CelerySettings) -> dict[str, Any]: return base_config -def create_app(celery_settings: CelerySettings) -> Celery: - assert celery_settings +def create_app(settings: CelerySettings) -> Celery: + assert settings return Celery( - broker=celery_settings.CELERY_RABBIT_BROKER.dsn, - backend=celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( + broker=settings.CELERY_RABBIT_BROKER.dsn, + backend=settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( RedisDatabase.CELERY_TASKS, ), - **_celery_configure(celery_settings), + **_celery_configure(settings), + ) + + +def create_task_manager(app: Celery, settings: CelerySettings) -> CeleryTaskManager: + redis_client_sdk = RedisClientSDK( + settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( + RedisDatabase.CELERY_TASKS + ), + client_name="celery_tasks", + ) + + return CeleryTaskManager( + app, + settings, + RedisTaskInfoStore(redis_client_sdk), ) diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index fe19a473209..245322217f1 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -10,13 +10,10 @@ from celery.worker.worker import WorkController # type: ignore[import-untyped] from fastapi import FastAPI from servicelib.logging_utils import log_context -from servicelib.redis._client import RedisClientSDK from settings_library.celery import CelerySettings -from settings_library.redis import RedisDatabase from . import set_event_loop -from .backends._redis import RedisTaskInfoStore -from .task_manager import CeleryTaskManager +from .common import create_task_manager from .utils import ( get_fastapi_app, set_fastapi_app, @@ -45,22 +42,14 @@ def _init(startup_complete_event: threading.Event) -> None: fastapi_app = app_factory() assert isinstance(fastapi_app, FastAPI) # nosec - async def setup_task_worker(): - redis_client_sdk = RedisClientSDK( - celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( - RedisDatabase.CELERY_TASKS - ), - client_name=f"{fastapi_app.title}.celery_tasks", - ) - + async def setup_task_manager(): assert sender.app # nosec assert isinstance(sender.app, Celery) # nosec set_task_manager( sender.app, - CeleryTaskManager( + create_task_manager( sender.app, celery_settings, - RedisTaskInfoStore(redis_client_sdk), ), ) @@ -83,7 +72,7 @@ async def fastapi_lifespan( set_event_loop(fastapi_app, loop) set_fastapi_app(sender.app, fastapi_app) - loop.run_until_complete(setup_task_worker()) + loop.run_until_complete(setup_task_manager()) loop.run_until_complete( fastapi_lifespan(startup_complete_event, shutdown_event) ) From cb91384c1b60c2435126ffe2756921a9532882ae Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 11 Jun 2025 09:39:38 +0200 Subject: [PATCH 56/91] fix: typo --- packages/celery-library/src/celery_library/errors.py | 2 +- packages/celery-library/src/celery_library/task.py | 6 +++--- packages/celery-library/tests/unit/test_errors.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/celery-library/src/celery_library/errors.py b/packages/celery-library/src/celery_library/errors.py index 0e340f35e71..37b174189f8 100644 --- a/packages/celery-library/src/celery_library/errors.py +++ b/packages/celery-library/src/celery_library/errors.py @@ -11,7 +11,7 @@ def __str__(self) -> str: return f"{decode_celery_transferrable_error(self)}" -def encore_celery_transferrable_error(error: Exception) -> TransferrableCeleryError: +def encode_celery_transferrable_error(error: Exception) -> TransferrableCeleryError: # NOTE: Celery modifies exceptions during serialization, which can cause # the original error context to be lost. This mechanism ensures the same # error can be recreated on the caller side exactly as it was raised here. diff --git a/packages/celery-library/src/celery_library/task.py b/packages/celery-library/src/celery_library/task.py index dc0e8454bea..da7c16272cb 100644 --- a/packages/celery-library/src/celery_library/task.py +++ b/packages/celery-library/src/celery_library/task.py @@ -16,7 +16,7 @@ from servicelib.async_utils import cancel_wait_task from . import get_event_loop -from .errors import encore_celery_transferrable_error +from .errors import encode_celery_transferrable_error from .models import TaskID from .utils import get_fastapi_app @@ -120,7 +120,7 @@ def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: if isinstance(exc, dont_autoretry_for): _logger.debug("Not retrying for exception %s", type(exc).__name__) # propagate without retry - raise encore_celery_transferrable_error(exc) from exc + raise encode_celery_transferrable_error(exc) from exc exc_type = type(exc).__name__ exc_message = f"{exc}" @@ -134,7 +134,7 @@ def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: raise task.retry( max_retries=max_retries, countdown=delay_between_retries.total_seconds(), - exc=encore_celery_transferrable_error(exc), + exc=encode_celery_transferrable_error(exc), ) from exc return wrapper diff --git a/packages/celery-library/tests/unit/test_errors.py b/packages/celery-library/tests/unit/test_errors.py index 2808025c099..62ad4b73fc1 100644 --- a/packages/celery-library/tests/unit/test_errors.py +++ b/packages/celery-library/tests/unit/test_errors.py @@ -1,7 +1,7 @@ import pytest from celery_library.errors import ( decode_celery_transferrable_error, - encore_celery_transferrable_error, + encode_celery_transferrable_error, ) from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError @@ -17,7 +17,7 @@ def test_workflow(original_error: Exception): try: raise original_error # noqa: TRY301 except Exception as e: # pylint: disable=broad-exception-caught - result = encore_celery_transferrable_error(e) + result = encode_celery_transferrable_error(e) assert decode_celery_transferrable_error(result).args == original_error.args assert f"{decode_celery_transferrable_error(result)}" == f"{original_error}" From c08faf9062a46c4426cb792fb08871b31e90d3d4 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 11 Jun 2025 14:08:46 +0200 Subject: [PATCH 57/91] feat: wrap app server --- .../src/celery_library/__init__.py | 11 ---- .../src/celery_library/signals.py | 62 +++++-------------- .../celery-library/src/celery_library/task.py | 7 +-- .../src/celery_library/utils.py | 28 ++++----- .../src/servicelib/base_app_server.py | 36 +++++++++++ .../src/servicelib/fastapi/app_server.py | 38 ++++++++++++ .../api/_worker_tasks/_files.py | 4 +- .../api/_worker_tasks/_paths.py | 6 +- .../api/_worker_tasks/_simcore_s3.py | 6 +- .../modules/celery/worker_main.py | 6 +- 10 files changed, 117 insertions(+), 87 deletions(-) create mode 100644 packages/service-library/src/servicelib/base_app_server.py create mode 100644 packages/service-library/src/servicelib/fastapi/app_server.py diff --git a/packages/celery-library/src/celery_library/__init__.py b/packages/celery-library/src/celery_library/__init__.py index bb7e467d1cf..1e4838e563a 100644 --- a/packages/celery-library/src/celery_library/__init__.py +++ b/packages/celery-library/src/celery_library/__init__.py @@ -1,5 +1,4 @@ import logging -from asyncio import AbstractEventLoop from fastapi import FastAPI from settings_library.celery import CelerySettings @@ -27,13 +26,3 @@ def get_celery_client(app: FastAPI) -> CeleryTaskManager: celery_client = app.state.celery_client assert isinstance(celery_client, CeleryTaskManager) return celery_client - - -def get_event_loop(app: FastAPI) -> AbstractEventLoop: - event_loop = app.state.event_loop - assert isinstance(event_loop, AbstractEventLoop) - return event_loop - - -def set_event_loop(app: FastAPI, event_loop: AbstractEventLoop) -> None: - app.state.event_loop = event_loop diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index 245322217f1..591ac1213d4 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -1,50 +1,39 @@ import asyncio -import datetime import logging import threading -from collections.abc import Callable -from typing import Final -from asgi_lifespan import LifespanManager from celery import Celery # type: ignore[import-untyped] from celery.worker.worker import WorkController # type: ignore[import-untyped] -from fastapi import FastAPI +from servicelib.base_app_server import BaseAppServer from servicelib.logging_utils import log_context from settings_library.celery import CelerySettings -from . import set_event_loop from .common import create_task_manager from .utils import ( - get_fastapi_app, - set_fastapi_app, + get_app_server, + set_app_server, set_task_manager, ) _logger = logging.getLogger(__name__) -_SHUTDOWN_TIMEOUT: Final[float] = datetime.timedelta(seconds=10).total_seconds() -_STARTUP_TIMEOUT: Final[float] = datetime.timedelta(minutes=1).total_seconds() - def on_worker_init( - app_factory: Callable[[], FastAPI], + app_server: BaseAppServer, celery_settings: CelerySettings, sender: WorkController, **_kwargs, ) -> None: - startup_complete_event = threading.Event() - - def _init(startup_complete_event: threading.Event) -> None: + def _init() -> None: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) - shutdown_event = asyncio.Event() - fastapi_app = app_factory() - assert isinstance(fastapi_app, FastAPI) # nosec + app_server.event_loop = loop async def setup_task_manager(): assert sender.app # nosec assert isinstance(sender.app, Celery) # nosec + set_task_manager( sender.app, create_task_manager( @@ -53,45 +42,22 @@ async def setup_task_manager(): ), ) - async def fastapi_lifespan( - startup_complete_event: threading.Event, shutdown_event: asyncio.Event - ) -> None: - async with LifespanManager( - fastapi_app, - startup_timeout=_STARTUP_TIMEOUT, - shutdown_timeout=_SHUTDOWN_TIMEOUT, - ): - try: - _logger.info("fastapi APP started!") - startup_complete_event.set() - await shutdown_event.wait() - except asyncio.CancelledError: - _logger.warning("Lifespan task cancelled") - - fastapi_app.state.shutdown_event = shutdown_event - set_event_loop(fastapi_app, loop) - - set_fastapi_app(sender.app, fastapi_app) + set_app_server(sender.app, app_server) loop.run_until_complete(setup_task_manager()) - loop.run_until_complete( - fastapi_lifespan(startup_complete_event, shutdown_event) - ) + loop.run_until_complete(app_server.startup()) thread = threading.Thread( group=None, target=_init, - name="fastapi_app", - args=(startup_complete_event,), + name="app_server_init", daemon=True, ) thread.start() - # ensure the fastapi app is ready before going on - startup_complete_event.wait(_STARTUP_TIMEOUT * 1.1) def on_worker_shutdown(sender, **_kwargs) -> None: - with log_context(_logger, logging.INFO, "Worker Shuts-down"): + with log_context(_logger, logging.INFO, "Worker shutdown"): assert isinstance(sender.app, Celery) - fastapi_app = get_fastapi_app(sender.app) - assert isinstance(fastapi_app, FastAPI) - fastapi_app.state.shutdown_event.set() + app_server = get_app_server(sender.app) + + app_server.event_loop.run_until_complete(app_server.shutdown()) diff --git a/packages/celery-library/src/celery_library/task.py b/packages/celery-library/src/celery_library/task.py index da7c16272cb..339cee5a907 100644 --- a/packages/celery-library/src/celery_library/task.py +++ b/packages/celery-library/src/celery_library/task.py @@ -15,10 +15,9 @@ from pydantic import NonNegativeInt from servicelib.async_utils import cancel_wait_task -from . import get_event_loop from .errors import encode_celery_transferrable_error from .models import TaskID -from .utils import get_fastapi_app +from .utils import get_app_server _logger = logging.getLogger(__name__) @@ -48,7 +47,7 @@ def decorator( ) -> Callable[Concatenate[AbortableTask, P], R]: @wraps(coro) def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: - fastapi_app = get_fastapi_app(app) + app_server = get_app_server(app) # NOTE: task.request is a thread local object, so we need to pass the id explicitly assert task.request.id is not None # nosec @@ -90,7 +89,7 @@ async def abort_monitor(): return asyncio.run_coroutine_threadsafe( run_task(task.request.id), - get_event_loop(fastapi_app), + app_server.event_loop, ).result() return wrapper diff --git a/packages/celery-library/src/celery_library/utils.py b/packages/celery-library/src/celery_library/utils.py index f840004e533..174268a9a2e 100644 --- a/packages/celery-library/src/celery_library/utils.py +++ b/packages/celery-library/src/celery_library/utils.py @@ -1,27 +1,27 @@ from celery import Celery # type: ignore[import-untyped] -from fastapi import FastAPI +from servicelib.base_app_server import BaseAppServer from .task_manager import CeleryTaskManager -_WORKER_KEY = "celery_worker" -_FASTAPI_APP_KEY = "fastapi_app" +_APP_SERVER_KEY = "app_server" +_TASK_MANAGER_KEY = "task_manager" -def set_task_manager(celery_app: Celery, worker: CeleryTaskManager) -> None: - celery_app.conf[_WORKER_KEY] = worker +def get_app_server(app: Celery) -> BaseAppServer: + app_server = app.conf[_APP_SERVER_KEY] + assert isinstance(app_server, BaseAppServer) + return app_server + + +def set_app_server(app: Celery, app_server: BaseAppServer) -> None: + app.conf[_APP_SERVER_KEY] = app_server def get_task_manager(celery_app: Celery) -> CeleryTaskManager: - worker = celery_app.conf[_WORKER_KEY] + worker = celery_app.conf[_TASK_MANAGER_KEY] assert isinstance(worker, CeleryTaskManager) return worker -def set_fastapi_app(celery_app: Celery, fastapi_app: FastAPI) -> None: - celery_app.conf[_FASTAPI_APP_KEY] = fastapi_app - - -def get_fastapi_app(celery_app: Celery) -> FastAPI: - fastapi_app = celery_app.conf[_FASTAPI_APP_KEY] - assert isinstance(fastapi_app, FastAPI) - return fastapi_app +def set_task_manager(celery_app: Celery, worker: CeleryTaskManager) -> None: + celery_app.conf[_TASK_MANAGER_KEY] = worker diff --git a/packages/service-library/src/servicelib/base_app_server.py b/packages/service-library/src/servicelib/base_app_server.py new file mode 100644 index 00000000000..e9f8e1d8cbd --- /dev/null +++ b/packages/service-library/src/servicelib/base_app_server.py @@ -0,0 +1,36 @@ +from abc import ABC, abstractmethod +from asyncio import AbstractEventLoop +from contextlib import suppress +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + with suppress(ImportError): + from fastapi import FastAPI + with suppress(ImportError): + from aiohttp.web import Application + + +class BaseAppServer(ABC): + @property + def fastapi_app(self) -> "FastAPI": + raise NotImplementedError + + @property + def aiohttp_app(self) -> "Application": + raise NotImplementedError + + @abstractmethod + async def startup(self): + pass + + @property + def event_loop(self) -> AbstractEventLoop: + return self._event_loop + + @event_loop.setter + def event_loop(self, loop: AbstractEventLoop) -> None: + self._event_loop = loop + + @abstractmethod + async def shutdown(self): + pass diff --git a/packages/service-library/src/servicelib/fastapi/app_server.py b/packages/service-library/src/servicelib/fastapi/app_server.py new file mode 100644 index 00000000000..9268f0d65c8 --- /dev/null +++ b/packages/service-library/src/servicelib/fastapi/app_server.py @@ -0,0 +1,38 @@ +import asyncio +from datetime import timedelta +from typing import Final + +from asgi_lifespan import LifespanManager +from fastapi import FastAPI + +from ..base_app_server import BaseAppServer + +_SHUTDOWN_TIMEOUT: Final[float] = timedelta(seconds=10).total_seconds() +_STARTUP_TIMEOUT: Final[float] = timedelta(minutes=1).total_seconds() + + +class FastAPIAppServer(BaseAppServer): + def __init__(self, app: FastAPI): + self._app = app + self._lifespan_manager = None + self._shutdown_event = asyncio.Event() + + @property + def fastapi_app(self) -> FastAPI: + assert isinstance(self._app, FastAPI) # nosec + return self._app + + async def startup(self): + self._lifespan_manager = LifespanManager( + self.fastapi_app, + startup_timeout=_STARTUP_TIMEOUT, + shutdown_timeout=_SHUTDOWN_TIMEOUT, + ) + await self._lifespan_manager.__aenter__() + await self._shutdown_event.wait() + + async def shutdown(self): + self._shutdown_event.set() + if self._lifespan_manager is None: + return + await self._lifespan_manager.__aexit__(None, None, None) diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py index 15837f54e4a..651948d5b8a 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py @@ -2,7 +2,7 @@ from celery import Task # type: ignore[import-untyped] from celery_library.models import TaskID -from celery_library.utils import get_fastapi_app +from celery_library.utils import get_app_server from models_library.api_schemas_storage.storage_schemas import ( FileUploadCompletionBody, ) @@ -30,7 +30,7 @@ async def complete_upload_file( logging.INFO, msg=f"completing upload of file {user_id=}, {location_id=}, {file_id=}", ): - dsm = get_dsm_provider(get_fastapi_app(task.app)).get(location_id) + dsm = get_dsm_provider(get_app_server(task.app).fastapi_app).get(location_id) # NOTE: completing a multipart upload on AWS can take up to several minutes # if it returns slow we return a 202 - Accepted, the client will have to check later # for completeness diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py index 6028112d9a4..34e18617e0c 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py @@ -3,7 +3,7 @@ from celery import Task # type: ignore[import-untyped] from celery_library.models import TaskID -from celery_library.utils import get_fastapi_app +from celery_library.utils import get_app_server from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID from pydantic import ByteSize, TypeAdapter @@ -25,7 +25,7 @@ async def compute_path_size( logging.INFO, msg=f"computing path size {user_id=}, {location_id=}, {path=}", ): - dsm = get_dsm_provider(get_fastapi_app(task.app)).get(location_id) + dsm = get_dsm_provider(get_app_server(task.app).fastapi_app).get(location_id) return await dsm.compute_path_size(user_id, path=Path(path)) @@ -42,7 +42,7 @@ async def delete_paths( logging.INFO, msg=f"delete {paths=} in {location_id=} for {user_id=}", ): - dsm = get_dsm_provider(get_fastapi_app(task.app)).get(location_id) + dsm = get_dsm_provider(get_app_server(task.app).fastapi_app).get(location_id) files_ids: set[StorageFileID] = { TypeAdapter(StorageFileID).validate_python(f"{path}") for path in paths } diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py index e06dfd884c6..246f7c08368 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py @@ -5,7 +5,7 @@ from aws_library.s3._models import S3ObjectKey from celery import Task # type: ignore[import-untyped] from celery_library.models import TaskID -from celery_library.utils import get_fastapi_app, get_task_manager +from celery_library.utils import get_app_server, get_task_manager from models_library.api_schemas_storage.storage_schemas import FoldersBody from models_library.api_schemas_webserver.storage import PathToExport from models_library.progress_bar import ProgressReport @@ -40,7 +40,7 @@ async def deep_copy_files_from_project( logging.INFO, msg=f"copying {body.source['uuid']} -> {body.destination['uuid']} with {task.request.id}", ): - dsm = get_dsm_provider(get_fastapi_app(task.app)).get( + dsm = get_dsm_provider(get_app_server(task.app).fastapi_app).get( SimcoreS3DataManager.get_location_id() ) assert isinstance(dsm, SimcoreS3DataManager) # nosec @@ -75,7 +75,7 @@ async def export_data( logging.INFO, f"'{task_id}' export data (for {user_id=}) fom selection: {paths_to_export}", ): - dsm = get_dsm_provider(get_fastapi_app(task.app)).get( + dsm = get_dsm_provider(get_app_server(task.app).fastapi_app).get( SimcoreS3DataManager.get_location_id() ) assert isinstance(dsm, SimcoreS3DataManager) # nosec diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py index 2ac9aed23c0..0d922b58d81 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py +++ b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py @@ -9,6 +9,7 @@ on_worker_init, on_worker_shutdown, ) +from servicelib.fastapi.app_server import FastAPIAppServer from servicelib.logging_utils import config_all_loggers from simcore_service_storage.api._worker_tasks.tasks import setup_worker_tasks @@ -28,11 +29,12 @@ assert _settings.STORAGE_CELERY # nosec app = create_celery_app(_settings.STORAGE_CELERY) -app_factory = partial(create_app, _settings) + +app_server = FastAPIAppServer(app=create_app(_settings)) def worker_init_wrapper(sender, **_kwargs): - return partial(on_worker_init, app_factory, _settings.STORAGE_CELERY)( + return partial(on_worker_init, app_server, _settings.STORAGE_CELERY)( sender, **_kwargs ) From a5c933e7e76f98671014eeb74faabbe734f9fc83 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 11 Jun 2025 14:29:12 +0200 Subject: [PATCH 58/91] fix: tests --- services/storage/tests/conftest.py | 9 +++++---- services/storage/tests/unit/test_modules_celery.py | 6 +++--- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 9cbede09968..8707276a622 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -65,6 +65,7 @@ ) from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status +from servicelib.fastapi.app_server import FastAPIAppServer from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient from servicelib.utils import limited_gather from settings_library.rabbit import RabbitSettings @@ -1001,11 +1002,11 @@ async def with_storage_celery_worker_controller( # Signals must be explicitily connected monkeypatch.setenv("STORAGE_WORKER_MODE", "true") app_settings = ApplicationSettings.create_from_envs() - app_factory = partial(create_app, app_settings) - def _on_worker_init_wrapper(sender: WorkController, **_kwargs) -> None: - assert app_settings.STORAGE_CELERY # nosec - return partial(on_worker_init, app_factory, app_settings.STORAGE_CELERY)( + app_server = FastAPIAppServer(app=create_app(app_settings)) + + def _on_worker_init_wrapper(sender: WorkController, **_kwargs): + return partial(on_worker_init, app_server, app_settings.STORAGE_CELERY)( sender, **_kwargs ) diff --git a/services/storage/tests/unit/test_modules_celery.py b/services/storage/tests/unit/test_modules_celery.py index d9f56978b58..72e9727fede 100644 --- a/services/storage/tests/unit/test_modules_celery.py +++ b/services/storage/tests/unit/test_modules_celery.py @@ -13,7 +13,7 @@ import pytest from celery import Celery, Task from celery.contrib.abortable import AbortableTask -from celery_library import get_celery_client, get_event_loop +from celery_library import get_celery_client from celery_library.errors import TransferrableCeleryError from celery_library.models import ( TaskContext, @@ -26,7 +26,7 @@ register_task, ) from celery_library.task_manager import CeleryTaskManager -from celery_library.utils import get_fastapi_app, get_task_manager +from celery_library.utils import get_app_server, get_task_manager from common_library.errors_classes import OsparcErrorMixin from fastapi import FastAPI from models_library.progress_bar import ProgressReport @@ -72,7 +72,7 @@ def fake_file_processor(task: Task, task_id: TaskID, files: list[str]) -> str: _logger.info("Calling _fake_file_processor") return asyncio.run_coroutine_threadsafe( _fake_file_processor(task.app, task.name, task.request.id, files), - get_event_loop(get_fastapi_app(task.app)), + get_app_server(task.app).event_loop, ).result() From 72a0ce98dfca3f1bf9d1653f16a416e665e70b3c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 11 Jun 2025 14:32:43 +0200 Subject: [PATCH 59/91] fix: typecheck --- packages/service-library/src/servicelib/fastapi/app_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/service-library/src/servicelib/fastapi/app_server.py b/packages/service-library/src/servicelib/fastapi/app_server.py index 9268f0d65c8..d1d6280db7a 100644 --- a/packages/service-library/src/servicelib/fastapi/app_server.py +++ b/packages/service-library/src/servicelib/fastapi/app_server.py @@ -14,7 +14,7 @@ class FastAPIAppServer(BaseAppServer): def __init__(self, app: FastAPI): self._app = app - self._lifespan_manager = None + self._lifespan_manager: LifespanManager | None = None self._shutdown_event = asyncio.Event() @property From 1c4f7ec775fd290fe8547ad2ce2fece8b1296693 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 11 Jun 2025 15:37:11 +0200 Subject: [PATCH 60/91] fix: startup --- .../celery-library/src/celery_library/signals.py | 15 ++++++++++----- .../src/servicelib/base_app_server.py | 3 ++- .../src/servicelib/fastapi/app_server.py | 3 ++- 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index 591ac1213d4..2186b2e99e8 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -24,16 +24,19 @@ def on_worker_init( sender: WorkController, **_kwargs, ) -> None: - def _init() -> None: + startup_complete_event = threading.Event() + + def _init(startup_complete_event: threading.Event) -> None: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) app_server.event_loop = loop - async def setup_task_manager(): + async def _setup(): assert sender.app # nosec assert isinstance(sender.app, Celery) # nosec + set_app_server(sender.app, app_server) set_task_manager( sender.app, create_task_manager( @@ -42,18 +45,20 @@ async def setup_task_manager(): ), ) - set_app_server(sender.app, app_server) - loop.run_until_complete(setup_task_manager()) - loop.run_until_complete(app_server.startup()) + loop.run_until_complete(_setup()) + loop.run_until_complete(app_server.startup(startup_complete_event)) thread = threading.Thread( group=None, target=_init, name="app_server_init", + args=(startup_complete_event,), daemon=True, ) thread.start() + startup_complete_event.wait() + def on_worker_shutdown(sender, **_kwargs) -> None: with log_context(_logger, logging.INFO, "Worker shutdown"): diff --git a/packages/service-library/src/servicelib/base_app_server.py b/packages/service-library/src/servicelib/base_app_server.py index e9f8e1d8cbd..6c6ae685466 100644 --- a/packages/service-library/src/servicelib/base_app_server.py +++ b/packages/service-library/src/servicelib/base_app_server.py @@ -1,3 +1,4 @@ +import threading from abc import ABC, abstractmethod from asyncio import AbstractEventLoop from contextlib import suppress @@ -20,7 +21,7 @@ def aiohttp_app(self) -> "Application": raise NotImplementedError @abstractmethod - async def startup(self): + async def startup(self, completed: threading.Event): pass @property diff --git a/packages/service-library/src/servicelib/fastapi/app_server.py b/packages/service-library/src/servicelib/fastapi/app_server.py index d1d6280db7a..a048af2e02a 100644 --- a/packages/service-library/src/servicelib/fastapi/app_server.py +++ b/packages/service-library/src/servicelib/fastapi/app_server.py @@ -22,13 +22,14 @@ def fastapi_app(self) -> FastAPI: assert isinstance(self._app, FastAPI) # nosec return self._app - async def startup(self): + async def startup(self, completed: asyncio.Event): self._lifespan_manager = LifespanManager( self.fastapi_app, startup_timeout=_STARTUP_TIMEOUT, shutdown_timeout=_SHUTDOWN_TIMEOUT, ) await self._lifespan_manager.__aenter__() + completed.set() await self._shutdown_event.wait() async def shutdown(self): From c8515a00964a0385404a40bb902e1054ce469c18 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 11 Jun 2025 15:47:23 +0200 Subject: [PATCH 61/91] fix typecheck --- packages/service-library/src/servicelib/fastapi/app_server.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/service-library/src/servicelib/fastapi/app_server.py b/packages/service-library/src/servicelib/fastapi/app_server.py index a048af2e02a..4152c7f5081 100644 --- a/packages/service-library/src/servicelib/fastapi/app_server.py +++ b/packages/service-library/src/servicelib/fastapi/app_server.py @@ -1,4 +1,5 @@ import asyncio +import threading from datetime import timedelta from typing import Final @@ -22,7 +23,7 @@ def fastapi_app(self) -> FastAPI: assert isinstance(self._app, FastAPI) # nosec return self._app - async def startup(self, completed: asyncio.Event): + async def startup(self, completed: threading.Event): self._lifespan_manager = LifespanManager( self.fastapi_app, startup_timeout=_STARTUP_TIMEOUT, From 88f99220d530f39eee9a8ed0a964df8e69876d22 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 11 Jun 2025 15:49:29 +0200 Subject: [PATCH 62/91] fix: rename --- packages/service-library/src/servicelib/base_app_server.py | 2 +- packages/service-library/src/servicelib/fastapi/app_server.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/service-library/src/servicelib/base_app_server.py b/packages/service-library/src/servicelib/base_app_server.py index 6c6ae685466..8e8ddfbd00a 100644 --- a/packages/service-library/src/servicelib/base_app_server.py +++ b/packages/service-library/src/servicelib/base_app_server.py @@ -21,7 +21,7 @@ def aiohttp_app(self) -> "Application": raise NotImplementedError @abstractmethod - async def startup(self, completed: threading.Event): + async def startup(self, completed_event: threading.Event): pass @property diff --git a/packages/service-library/src/servicelib/fastapi/app_server.py b/packages/service-library/src/servicelib/fastapi/app_server.py index 4152c7f5081..adc511d8f73 100644 --- a/packages/service-library/src/servicelib/fastapi/app_server.py +++ b/packages/service-library/src/servicelib/fastapi/app_server.py @@ -23,14 +23,14 @@ def fastapi_app(self) -> FastAPI: assert isinstance(self._app, FastAPI) # nosec return self._app - async def startup(self, completed: threading.Event): + async def startup(self, completed_event: threading.Event): self._lifespan_manager = LifespanManager( self.fastapi_app, startup_timeout=_STARTUP_TIMEOUT, shutdown_timeout=_SHUTDOWN_TIMEOUT, ) await self._lifespan_manager.__aenter__() - completed.set() + completed_event.set() await self._shutdown_event.wait() async def shutdown(self): From d204914fd48c8e499a5ee216c084d0ae76872e66 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 11 Jun 2025 15:52:30 +0200 Subject: [PATCH 63/91] fix: update reqs --- packages/celery-library/requirements/_base.in | 1 - packages/celery-library/requirements/_base.txt | 11 ++++++----- packages/celery-library/requirements/_test.txt | 4 +--- 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/packages/celery-library/requirements/_base.in b/packages/celery-library/requirements/_base.in index 27d9cc883b0..3fcf6a9e24c 100644 --- a/packages/celery-library/requirements/_base.in +++ b/packages/celery-library/requirements/_base.in @@ -7,5 +7,4 @@ --requirement ../../../packages/service-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in -asgi_lifespan celery[redis] diff --git a/packages/celery-library/requirements/_base.txt b/packages/celery-library/requirements/_base.txt index af8a4cd518a..6954b4092b4 100644 --- a/packages/celery-library/requirements/_base.txt +++ b/packages/celery-library/requirements/_base.txt @@ -42,8 +42,6 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -asgi-lifespan==2.1.0 - # via -r requirements/_base.in attrs==25.3.0 # via # aiohttp @@ -139,6 +137,7 @@ opentelemetry-api==1.33.1 # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-instrumentation + # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests @@ -156,9 +155,12 @@ opentelemetry-exporter-otlp-proto-http==1.33.1 # via opentelemetry-exporter-otlp opentelemetry-instrumentation==0.54b1 # via + # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests +opentelemetry-instrumentation-aio-pika==0.54b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-logging==0.54b1 # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-redis==0.54b1 @@ -356,9 +358,7 @@ shellingham==1.5.4 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via - # anyio - # asgi-lifespan + # via anyio stream-zip==0.0.83 # via -r requirements/../../../packages/service-library/requirements/_base.in tenacity==9.1.2 @@ -415,6 +415,7 @@ wrapt==1.17.2 # via # deprecated # opentelemetry-instrumentation + # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-redis yarl==1.20.0 # via diff --git a/packages/celery-library/requirements/_test.txt b/packages/celery-library/requirements/_test.txt index 1bf99eeff9f..059125f415c 100644 --- a/packages/celery-library/requirements/_test.txt +++ b/packages/celery-library/requirements/_test.txt @@ -77,9 +77,7 @@ pytest==8.3.5 # pytest-mock # pytest-sugar pytest-asyncio==0.23.8 - # via - # -c requirements/../../../requirements/constraints.txt - # -r requirements/_test.in + # via -r requirements/_test.in pytest-benchmark==5.1.0 # via -r requirements/_test.in pytest-cov==6.1.1 From 4691989818765bebefeee4d5d77061bc9187b04b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 11 Jun 2025 16:07:01 +0200 Subject: [PATCH 64/91] fix: assert --- services/storage/tests/conftest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 8707276a622..e3ab99d6605 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -1006,6 +1006,7 @@ async def with_storage_celery_worker_controller( app_server = FastAPIAppServer(app=create_app(app_settings)) def _on_worker_init_wrapper(sender: WorkController, **_kwargs): + assert app_settings.STORAGE_CELERY # nosec return partial(on_worker_init, app_server, app_settings.STORAGE_CELERY)( sender, **_kwargs ) From b1da2eb100201b6805a05ad7bc97f5e23a5815ca Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 11 Jun 2025 16:43:24 +0200 Subject: [PATCH 65/91] fix: typecheck --- .../src/simcore_service_storage/modules/celery/worker_main.py | 1 + 1 file changed, 1 insertion(+) diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py index 0d922b58d81..913bef6c6b8 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py +++ b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py @@ -34,6 +34,7 @@ def worker_init_wrapper(sender, **_kwargs): + assert _settings.STORAGE_CELERY # nosec return partial(on_worker_init, app_server, _settings.STORAGE_CELERY)( sender, **_kwargs ) From bf18f60a998a09c483510aa87dc3fb63236f0aa7 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 13 Jun 2025 11:18:29 +0200 Subject: [PATCH 66/91] refactor --- .../src/celery_library/__init__.py | 28 ------------------- .../api/rest/_files.py | 6 ++-- .../api/rest/dependencies/celery.py | 7 +++-- .../api/rpc/_async_jobs.py | 13 +++++---- .../simcore_service_storage/api/rpc/_paths.py | 6 ++-- .../api/rpc/_simcore_s3.py | 6 ++-- .../core/application.py | 4 +-- .../modules/celery/__init__.py | 23 +++++++++++++++ services/storage/tests/conftest.py | 2 +- .../storage/tests/unit/test_async_jobs.py | 6 ++-- .../storage/tests/unit/test_modules_celery.py | 4 +-- 11 files changed, 51 insertions(+), 54 deletions(-) diff --git a/packages/celery-library/src/celery_library/__init__.py b/packages/celery-library/src/celery_library/__init__.py index 1e4838e563a..e69de29bb2d 100644 --- a/packages/celery-library/src/celery_library/__init__.py +++ b/packages/celery-library/src/celery_library/__init__.py @@ -1,28 +0,0 @@ -import logging - -from fastapi import FastAPI -from settings_library.celery import CelerySettings - -from .common import create_app, create_task_manager -from .task_manager import CeleryTaskManager -from .types import register_celery_types - -_logger = logging.getLogger(__name__) - - -def setup_celery_client(app: FastAPI, celery_settings: CelerySettings) -> None: - async def on_startup() -> None: - app.state.celery_client = create_task_manager( - create_app(celery_settings), celery_settings - ) - - register_celery_types() - - app.add_event_handler("startup", on_startup) - - -def get_celery_client(app: FastAPI) -> CeleryTaskManager: - assert hasattr(app.state, "celery_client") # nosec - celery_client = app.state.celery_client - assert isinstance(celery_client, CeleryTaskManager) - return celery_client diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py index 59415bf70df..4f176d45d00 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_files.py +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -38,7 +38,7 @@ ) from ...simcore_s3_dsm import SimcoreS3DataManager from .._worker_tasks._files import complete_upload_file as remote_complete_upload_file -from .dependencies.celery import get_celery_client +from .dependencies.celery import get_task_manager _logger = logging.getLogger(__name__) @@ -270,7 +270,7 @@ async def abort_upload_file( status_code=status.HTTP_202_ACCEPTED, ) async def complete_upload_file( - celery_client: Annotated[CeleryTaskManager, Depends(get_celery_client)], + celery_client: Annotated[CeleryTaskManager, Depends(get_task_manager)], query_params: Annotated[StorageQueryParamsBase, Depends()], location_id: LocationID, file_id: StorageFileID, @@ -326,7 +326,7 @@ async def complete_upload_file( response_model=Envelope[FileUploadCompleteFutureResponse], ) async def is_completed_upload_file( - celery_client: Annotated[CeleryTaskManager, Depends(get_celery_client)], + celery_client: Annotated[CeleryTaskManager, Depends(get_task_manager)], query_params: Annotated[StorageQueryParamsBase, Depends()], location_id: LocationID, file_id: StorageFileID, diff --git a/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py b/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py index 7f93f519b07..1c6cce503aa 100644 --- a/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py +++ b/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py @@ -1,12 +1,13 @@ from typing import Annotated -from celery_library import get_celery_client as _get_celery_client_from_app from celery_library.task_manager import CeleryTaskManager from fastapi import Depends, FastAPI from servicelib.fastapi.dependencies import get_app +from ....modules.celery import get_task_manager_from_app -def get_celery_client( + +def get_task_manager( app: Annotated[FastAPI, Depends(get_app)], ) -> CeleryTaskManager: - return _get_celery_client_from_app(app) + return get_task_manager_from_app(app) diff --git a/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py b/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py index 10c485f3644..68cc5977811 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py @@ -3,7 +3,6 @@ import logging from celery.exceptions import CeleryError # type: ignore[import-untyped] -from celery_library import get_celery_client from celery_library.errors import ( TransferrableCeleryError, decode_celery_transferrable_error, @@ -26,6 +25,8 @@ from servicelib.logging_utils import log_catch from servicelib.rabbitmq import RPCRouter +from ...modules.celery import get_task_manager_from_app + _logger = logging.getLogger(__name__) router = RPCRouter() @@ -35,7 +36,7 @@ async def cancel(app: FastAPI, job_id: AsyncJobId, job_id_data: AsyncJobNameData assert app # nosec assert job_id_data # nosec try: - await get_celery_client(app).cancel_task( + await get_task_manager_from_app(app).cancel_task( task_context=job_id_data.model_dump(), task_uuid=job_id, ) @@ -51,7 +52,7 @@ async def status( assert job_id_data # nosec try: - task_status = await get_celery_client(app).get_task_status( + task_status = await get_task_manager_from_app(app).get_task_status( task_context=job_id_data.model_dump(), task_uuid=job_id, ) @@ -81,13 +82,13 @@ async def result( assert job_id_data # nosec try: - _status = await get_celery_client(app).get_task_status( + _status = await get_task_manager_from_app(app).get_task_status( task_context=job_id_data.model_dump(), task_uuid=job_id, ) if not _status.is_done: raise JobNotDoneError(job_id=job_id) - _result = await get_celery_client(app).get_task_result( + _result = await get_task_manager_from_app(app).get_task_result( task_context=job_id_data.model_dump(), task_uuid=job_id, ) @@ -126,7 +127,7 @@ async def list_jobs( _ = filter_ assert app # nosec try: - tasks = await get_celery_client(app).list_tasks( + tasks = await get_task_manager_from_app(app).list_tasks( task_context=job_id_data.model_dump(), ) except CeleryError as exc: diff --git a/services/storage/src/simcore_service_storage/api/rpc/_paths.py b/services/storage/src/simcore_service_storage/api/rpc/_paths.py index ea8491877aa..fa69da125c7 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_paths.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_paths.py @@ -1,7 +1,6 @@ import logging from pathlib import Path -from celery_library import get_celery_client from celery_library.models import TaskMetadata from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( @@ -11,6 +10,7 @@ from models_library.projects_nodes_io import LocationID from servicelib.rabbitmq import RPCRouter +from ...modules.celery import get_task_manager_from_app from .._worker_tasks._paths import compute_path_size as remote_compute_path_size from .._worker_tasks._paths import delete_paths as remote_delete_paths @@ -26,7 +26,7 @@ async def compute_path_size( path: Path, ) -> AsyncJobGet: task_name = remote_compute_path_size.__name__ - task_uuid = await get_celery_client(app).submit_task( + task_uuid = await get_task_manager_from_app(app).submit_task( task_metadata=TaskMetadata( name=task_name, ), @@ -47,7 +47,7 @@ async def delete_paths( paths: set[Path], ) -> AsyncJobGet: task_name = remote_delete_paths.__name__ - task_uuid = await get_celery_client(app).submit_task( + task_uuid = await get_task_manager_from_app(app).submit_task( task_metadata=TaskMetadata( name=task_name, ), diff --git a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py index 18d24cec0f1..1b85893de9e 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py @@ -1,4 +1,3 @@ -from celery_library import get_celery_client from celery_library.models import TaskMetadata, TasksQueue from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( @@ -9,6 +8,7 @@ from models_library.api_schemas_webserver.storage import PathToExport from servicelib.rabbitmq import RPCRouter +from ...modules.celery import get_task_manager_from_app from .._worker_tasks._simcore_s3 import deep_copy_files_from_project, export_data router = RPCRouter() @@ -21,7 +21,7 @@ async def copy_folders_from_project( body: FoldersBody, ) -> AsyncJobGet: task_name = deep_copy_files_from_project.__name__ - task_uuid = await get_celery_client(app).submit_task( + task_uuid = await get_task_manager_from_app(app).submit_task( task_metadata=TaskMetadata( name=task_name, ), @@ -38,7 +38,7 @@ async def start_export_data( app: FastAPI, job_id_data: AsyncJobNameData, paths_to_export: list[PathToExport] ) -> AsyncJobGet: task_name = export_data.__name__ - task_uuid = await get_celery_client(app).submit_task( + task_uuid = await get_task_manager_from_app(app).submit_task( task_metadata=TaskMetadata( name=task_name, ephemeral=False, diff --git a/services/storage/src/simcore_service_storage/core/application.py b/services/storage/src/simcore_service_storage/core/application.py index 9468a0d0e35..987878d32cf 100644 --- a/services/storage/src/simcore_service_storage/core/application.py +++ b/services/storage/src/simcore_service_storage/core/application.py @@ -5,7 +5,6 @@ import logging -from celery_library import setup_celery_client from common_library.basic_types import BootModeEnum from fastapi import FastAPI from fastapi.middleware.gzip import GZipMiddleware @@ -37,6 +36,7 @@ from ..dsm import setup_dsm from ..dsm_cleaner import setup_dsm_cleaner from ..exceptions.handlers import set_exception_handlers +from ..modules.celery import setup_task_manager from ..modules.db import setup_db from ..modules.long_running_tasks import setup_rest_api_long_running_tasks_for_uploads from ..modules.rabbitmq import setup as setup_rabbitmq @@ -95,7 +95,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: # noqa: C901 setup_rpc_api_routes(app) assert settings.STORAGE_CELERY # nosec - setup_celery_client(app, celery_settings=settings.STORAGE_CELERY) + setup_task_manager(app, celery_settings=settings.STORAGE_CELERY) setup_rest_api_long_running_tasks_for_uploads(app) setup_rest_api_routes(app, API_VTAG) set_exception_handlers(app) diff --git a/services/storage/src/simcore_service_storage/modules/celery/__init__.py b/services/storage/src/simcore_service_storage/modules/celery/__init__.py index e69de29bb2d..ee01f352ae7 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/__init__.py +++ b/services/storage/src/simcore_service_storage/modules/celery/__init__.py @@ -0,0 +1,23 @@ +from celery_library.common import create_app, create_task_manager +from celery_library.task_manager import CeleryTaskManager +from celery_library.types import register_celery_types +from fastapi import FastAPI +from settings_library.celery import CelerySettings + + +def setup_task_manager(app: FastAPI, celery_settings: CelerySettings) -> None: + async def on_startup() -> None: + app.state.celery_client = create_task_manager( + create_app(celery_settings), celery_settings + ) + + register_celery_types() + + app.add_event_handler("startup", on_startup) + + +def get_task_manager_from_app(app: FastAPI) -> CeleryTaskManager: + assert hasattr(app.state, "celery_client") # nosec + celery_client = app.state.celery_client + assert isinstance(celery_client, CeleryTaskManager) # nosec + return celery_client diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index e3ab99d6605..2b3f1940416 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -977,7 +977,7 @@ def celery_config() -> dict[str, Any]: def mock_celery_app(mocker: MockerFixture, celery_config: dict[str, Any]) -> Celery: celery_app = Celery(**celery_config) - for module in ("celery_library.create_app",): + for module in ("celery_library.common.create_app",): mocker.patch(module, return_value=celery_app) return celery_app diff --git a/services/storage/tests/unit/test_async_jobs.py b/services/storage/tests/unit/test_async_jobs.py index 9a2c60cd596..ef164c8bbe7 100644 --- a/services/storage/tests/unit/test_async_jobs.py +++ b/services/storage/tests/unit/test_async_jobs.py @@ -10,7 +10,6 @@ import pytest from celery import Celery, Task -from celery_library import get_celery_client from celery_library.models import TaskID, TaskMetadata from celery_library.task import register_task from celery_library.task_manager import CeleryTaskManager @@ -31,6 +30,7 @@ from servicelib.rabbitmq import RabbitMQRPCClient, RPCRouter from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs from simcore_service_storage.api.rpc.routes import get_rabbitmq_rpc_server +from simcore_service_storage.modules.celery import get_task_manager_from_app from tenacity import ( AsyncRetrying, retry_if_exception_type, @@ -53,7 +53,7 @@ async def rpc_sync_job( app: FastAPI, *, job_id_data: AsyncJobNameData, **kwargs: Any ) -> AsyncJobGet: task_name = sync_job.__name__ - task_uuid = await get_celery_client(app).submit_task( + task_uuid = await get_task_manager_from_app(app).submit_task( TaskMetadata(name=task_name), task_context=job_id_data.model_dump(), **kwargs ) @@ -65,7 +65,7 @@ async def rpc_async_job( app: FastAPI, *, job_id_data: AsyncJobNameData, **kwargs: Any ) -> AsyncJobGet: task_name = async_job.__name__ - task_uuid = await get_celery_client(app).submit_task( + task_uuid = await get_task_manager_from_app(app).submit_task( TaskMetadata(name=task_name), task_context=job_id_data.model_dump(), **kwargs ) diff --git a/services/storage/tests/unit/test_modules_celery.py b/services/storage/tests/unit/test_modules_celery.py index 72e9727fede..b81d1275fae 100644 --- a/services/storage/tests/unit/test_modules_celery.py +++ b/services/storage/tests/unit/test_modules_celery.py @@ -13,7 +13,6 @@ import pytest from celery import Celery, Task from celery.contrib.abortable import AbortableTask -from celery_library import get_celery_client from celery_library.errors import TransferrableCeleryError from celery_library.models import ( TaskContext, @@ -31,6 +30,7 @@ from fastapi import FastAPI from models_library.progress_bar import ProgressReport from servicelib.logging_utils import log_context +from simcore_service_storage.modules.celery import get_task_manager_from_app from tenacity import Retrying, retry_if_exception_type, stop_after_delay, wait_fixed _logger = logging.getLogger(__name__) @@ -44,7 +44,7 @@ def celery_client( initialized_app: FastAPI, with_storage_celery_worker: CeleryTaskManager, ) -> CeleryTaskManager: - return get_celery_client(initialized_app) + return get_task_manager_from_app(initialized_app) async def _fake_file_processor( From 578b30d332dead0aa77c971fdc3dcd05e3175cf5 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 13 Jun 2025 12:39:13 +0200 Subject: [PATCH 67/91] remove unused fixture --- services/storage/tests/conftest.py | 16 +++------------- services/storage/tests/unit/test_async_jobs.py | 8 ++++---- .../storage/tests/unit/test_handlers_files.py | 4 ++-- .../storage/tests/unit/test_modules_celery.py | 3 ++- .../tests/unit/test_rpc_handlers_paths.py | 8 ++++---- .../tests/unit/test_rpc_handlers_simcore_s3.py | 4 ++-- 6 files changed, 17 insertions(+), 26 deletions(-) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 2b3f1940416..080d52bc181 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -27,8 +27,6 @@ from celery.signals import worker_init, worker_shutdown from celery.worker.worker import WorkController from celery_library.signals import on_worker_init, on_worker_shutdown -from celery_library.task_manager import CeleryTaskManager -from celery_library.utils import get_task_manager from faker import Faker from fakeredis.aioredis import FakeRedis from fastapi import FastAPI @@ -365,7 +363,7 @@ def upload_file( create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], create_file_of_size: Callable[[ByteSize, str | None], Path], create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], - with_storage_celery_worker: CeleryTaskManager, + with_storage_celery_worker: TestWorkController, ) -> Callable[ [ByteSize, str, SimcoreS3FileID | None], Awaitable[tuple[Path, SimcoreS3FileID]] ]: @@ -480,7 +478,7 @@ async def create_empty_directory( create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], client: httpx.AsyncClient, - with_storage_celery_worker: CeleryTaskManager, + with_storage_celery_worker: TestWorkController, ) -> Callable[[str, ProjectID, NodeID], Awaitable[SimcoreS3FileID]]: async def _directory_creator( dir_name: str, project_id: ProjectID, node_id: NodeID @@ -993,7 +991,7 @@ def _(celery_app: Celery) -> None: ... @pytest.fixture -async def with_storage_celery_worker_controller( +async def with_storage_celery_worker( app_environment: EnvVarsDict, celery_app: Celery, monkeypatch: pytest.MonkeyPatch, @@ -1028,14 +1026,6 @@ def _on_worker_init_wrapper(sender: WorkController, **_kwargs): yield worker -@pytest.fixture -def with_storage_celery_worker( - with_storage_celery_worker_controller: TestWorkController, -) -> CeleryTaskManager: - assert isinstance(with_storage_celery_worker_controller.app, Celery) - return get_task_manager(with_storage_celery_worker_controller.app) - - @pytest.fixture async def storage_rabbitmq_rpc_client( rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], diff --git a/services/storage/tests/unit/test_async_jobs.py b/services/storage/tests/unit/test_async_jobs.py index ef164c8bbe7..9f3cd04529d 100644 --- a/services/storage/tests/unit/test_async_jobs.py +++ b/services/storage/tests/unit/test_async_jobs.py @@ -10,9 +10,9 @@ import pytest from celery import Celery, Task +from celery.contrib.testing.worker import TestWorkController from celery_library.models import TaskID, TaskMetadata from celery_library.task import register_task -from celery_library.task_manager import CeleryTaskManager from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, @@ -202,7 +202,7 @@ async def test_async_jobs_workflow( initialized_app: FastAPI, register_rpc_routes: None, storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskManager, + with_storage_celery_worker: TestWorkController, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, @@ -250,7 +250,7 @@ async def test_async_jobs_cancel( initialized_app: FastAPI, register_rpc_routes: None, storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskManager, + with_storage_celery_worker: TestWorkController, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, @@ -315,7 +315,7 @@ async def test_async_jobs_raises( initialized_app: FastAPI, register_rpc_routes: None, storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskManager, + with_storage_celery_worker: TestWorkController, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index 10073b92770..f07b63cdbe9 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -23,7 +23,7 @@ from aiohttp import ClientSession from aws_library.s3 import S3KeyNotFoundError, S3ObjectKey, SimcoreS3API from aws_library.s3._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE -from celery_library.task_manager import CeleryTaskManager +from celery.contrib.testing.worker import TestWorkController from faker import Faker from fastapi import FastAPI from models_library.api_schemas_storage.storage_schemas import ( @@ -683,7 +683,7 @@ async def test_upload_real_file_with_s3_client( node_id: NodeID, faker: Faker, s3_client: S3Client, - with_storage_celery_worker: CeleryTaskManager, + with_storage_celery_worker: TestWorkController, ): file_size = TypeAdapter(ByteSize).validate_python("500Mib") file_name = faker.file_name() diff --git a/services/storage/tests/unit/test_modules_celery.py b/services/storage/tests/unit/test_modules_celery.py index b81d1275fae..f3eb5d21e7e 100644 --- a/services/storage/tests/unit/test_modules_celery.py +++ b/services/storage/tests/unit/test_modules_celery.py @@ -13,6 +13,7 @@ import pytest from celery import Celery, Task from celery.contrib.abortable import AbortableTask +from celery.contrib.testing.worker import TestWorkController from celery_library.errors import TransferrableCeleryError from celery_library.models import ( TaskContext, @@ -42,7 +43,7 @@ @pytest.fixture def celery_client( initialized_app: FastAPI, - with_storage_celery_worker: CeleryTaskManager, + with_storage_celery_worker: TestWorkController, ) -> CeleryTaskManager: return get_task_manager_from_app(initialized_app) diff --git a/services/storage/tests/unit/test_rpc_handlers_paths.py b/services/storage/tests/unit/test_rpc_handlers_paths.py index afe5b62ddd8..cd5db414037 100644 --- a/services/storage/tests/unit/test_rpc_handlers_paths.py +++ b/services/storage/tests/unit/test_rpc_handlers_paths.py @@ -13,7 +13,7 @@ from typing import Any, TypeAlias import pytest -from celery_library.task_manager import CeleryTaskManager +from celery.contrib.testing.worker import TestWorkController from faker import Faker from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( @@ -265,7 +265,7 @@ async def test_path_compute_size_inexistent_path( mock_celery_app: None, initialized_app: FastAPI, storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskManager, + with_storage_celery_worker: TestWorkController, location_id: LocationID, user_id: UserID, faker: Faker, @@ -294,7 +294,7 @@ async def test_delete_paths_empty_set( user_id: UserID, location_id: LocationID, product_name: ProductName, - with_storage_celery_worker: CeleryTaskManager, + with_storage_celery_worker: TestWorkController, ): await _assert_delete_paths( storage_rabbitmq_rpc_client, @@ -333,7 +333,7 @@ async def test_delete_paths( ], project_params: ProjectWithFilesParams, product_name: ProductName, - with_storage_celery_worker: CeleryTaskManager, + with_storage_celery_worker: TestWorkController, ): assert ( len(project_params.allowed_file_sizes) == 1 diff --git a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py index 4f52884179b..b16b4f2e4a0 100644 --- a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py @@ -113,7 +113,7 @@ async def test_copy_folders_from_non_existing_project( product_name: ProductName, create_project: Callable[..., Awaitable[dict[str, Any]]], faker: Faker, - with_storage_celery_worker: CeleryTaskManager, + with_storage_celery_worker: TestWorkController, ): src_project = await create_project() incorrect_src_project = deepcopy(src_project) @@ -154,7 +154,7 @@ async def test_copy_folders_from_empty_project( product_name: ProductName, create_project: Callable[[], Awaitable[dict[str, Any]]], sqlalchemy_async_engine: AsyncEngine, - with_storage_celery_worker: CeleryTaskManager, + with_storage_celery_worker: TestWorkController, ): # we will copy from src to dst src_project = await create_project() From 22513d879dba9dbb54b9b7c0b840a1aefe62021d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 13 Jun 2025 12:48:31 +0200 Subject: [PATCH 68/91] fix: mock --- services/storage/tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 080d52bc181..5c622ce0c04 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -975,7 +975,7 @@ def celery_config() -> dict[str, Any]: def mock_celery_app(mocker: MockerFixture, celery_config: dict[str, Any]) -> Celery: celery_app = Celery(**celery_config) - for module in ("celery_library.common.create_app",): + for module in ("simcore_service_storage.modules.celery.create_app",): mocker.patch(module, return_value=celery_app) return celery_app From 07e37d0710fecaf2447139dc76f86ef562c5925b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 13 Jun 2025 14:15:51 +0200 Subject: [PATCH 69/91] rename fixture --- services/storage/tests/unit/test_rpc_handlers_simcore_s3.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py index b16b4f2e4a0..7f0d87667f6 100644 --- a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py @@ -575,7 +575,7 @@ def task_progress_spy(mocker: MockerFixture) -> Mock: async def test_start_export_data( initialized_app: FastAPI, short_dsm_cleaner_interval: int, - with_storage_celery_worker_controller: TestWorkController, + with_storage_celery_worker: TestWorkController, storage_rabbitmq_rpc_client: RabbitMQRPCClient, user_id: UserID, product_name: ProductName, @@ -621,7 +621,7 @@ async def test_start_export_data( async def test_start_export_data_access_error( initialized_app: FastAPI, short_dsm_cleaner_interval: int, - with_storage_celery_worker_controller: TestWorkController, + with_storage_celery_worker: TestWorkController, storage_rabbitmq_rpc_client: RabbitMQRPCClient, user_id: UserID, product_name: ProductName, From 146b08057a5e457e441d4f02aff3428283c4775b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 13 Jun 2025 15:40:33 +0200 Subject: [PATCH 70/91] add deps --- packages/celery-library/requirements/_test.in | 1 + .../celery-library/requirements/_test.txt | 96 +++++++++++++++++++ .../celery-library/requirements/_tools.txt | 7 +- 3 files changed, 102 insertions(+), 2 deletions(-) diff --git a/packages/celery-library/requirements/_test.in b/packages/celery-library/requirements/_test.in index fdf47680fb2..9de06104bf0 100644 --- a/packages/celery-library/requirements/_test.in +++ b/packages/celery-library/requirements/_test.in @@ -17,6 +17,7 @@ pint pytest pytest-asyncio pytest-benchmark +pytest-celery pytest-cov pytest-icdiff pytest-instafail diff --git a/packages/celery-library/requirements/_test.txt b/packages/celery-library/requirements/_test.txt index 059125f415c..dcd135db6d7 100644 --- a/packages/celery-library/requirements/_test.txt +++ b/packages/celery-library/requirements/_test.txt @@ -1,3 +1,7 @@ +amqp==5.3.1 + # via + # -c requirements/_base.txt + # kombu annotated-types==0.7.0 # via # -c requirements/_base.txt @@ -7,16 +11,54 @@ anyio==4.9.0 # -c requirements/_base.txt # httpx # starlette +billiard==4.2.1 + # via + # -c requirements/_base.txt + # celery +celery==5.5.2 + # via + # -c requirements/_base.txt + # pytest-celery certifi==2025.4.26 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # httpcore # httpx + # requests +charset-normalizer==3.4.2 + # via + # -c requirements/_base.txt + # requests +click==8.1.8 + # via + # -c requirements/_base.txt + # celery + # click-didyoumean + # click-plugins + # click-repl +click-didyoumean==0.3.1 + # via + # -c requirements/_base.txt + # celery +click-plugins==1.1.1 + # via + # -c requirements/_base.txt + # celery +click-repl==0.3.0 + # via + # -c requirements/_base.txt + # celery coverage==7.8.0 # via # -r requirements/_test.in # pytest-cov +debugpy==1.8.14 + # via pytest-celery +docker==7.1.0 + # via + # pytest-celery + # pytest-docker-tools faker==37.3.0 # via -r requirements/_test.in fastapi==0.115.12 @@ -40,8 +82,14 @@ idna==3.10 # -c requirements/_base.txt # anyio # httpx + # requests iniconfig==2.1.0 # via pytest +kombu==5.5.3 + # via + # -c requirements/_base.txt + # celery + # pytest-celery packaging==25.0 # via # -c requirements/_base.txt @@ -55,6 +103,14 @@ pluggy==1.6.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff +prompt-toolkit==3.0.51 + # via + # -c requirements/_base.txt + # click-repl +psutil==7.0.0 + # via + # -c requirements/_base.txt + # pytest-celery py-cpuinfo==9.0.0 # via pytest-benchmark pydantic==2.11.4 @@ -72,6 +128,7 @@ pytest==8.3.5 # pytest-asyncio # pytest-benchmark # pytest-cov + # pytest-docker-tools # pytest-icdiff # pytest-instafail # pytest-mock @@ -80,8 +137,12 @@ pytest-asyncio==0.23.8 # via -r requirements/_test.in pytest-benchmark==5.1.0 # via -r requirements/_test.in +pytest-celery==1.2.0 + # via -r requirements/_test.in pytest-cov==6.1.1 # via -r requirements/_test.in +pytest-docker-tools==3.1.9 + # via pytest-celery pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 @@ -92,6 +153,10 @@ pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in +python-dateutil==2.9.0.post0 + # via + # -c requirements/_base.txt + # celery python-dotenv==1.1.0 # via # -c requirements/_base.txt @@ -101,6 +166,16 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # -r requirements/_test.in +requests==2.32.3 + # via + # -c requirements/_base.txt + # docker +setuptools==80.9.0 + # via pytest-celery +six==1.17.0 + # via + # -c requirements/_base.txt + # python-dateutil sniffio==1.3.1 # via # -c requirements/_base.txt @@ -109,6 +184,10 @@ starlette==0.46.2 # via # -c requirements/../../../requirements/constraints.txt # fastapi +tenacity==9.1.2 + # via + # -c requirements/_base.txt + # pytest-celery termcolor==3.1.0 # via pytest-sugar typing-extensions==4.13.2 @@ -130,3 +209,20 @@ tzdata==2025.2 # via # -c requirements/_base.txt # faker + # kombu +urllib3==2.4.0 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # docker + # requests +vine==5.1.0 + # via + # -c requirements/_base.txt + # amqp + # celery + # kombu +wcwidth==0.2.13 + # via + # -c requirements/_base.txt + # prompt-toolkit diff --git a/packages/celery-library/requirements/_tools.txt b/packages/celery-library/requirements/_tools.txt index e0213f1353c..f2f7f688aac 100644 --- a/packages/celery-library/requirements/_tools.txt +++ b/packages/celery-library/requirements/_tools.txt @@ -11,6 +11,7 @@ cfgv==3.4.0 click==8.1.8 # via # -c requirements/_base.txt + # -c requirements/_test.txt # black # pip-tools dill==0.4.0 @@ -69,8 +70,10 @@ pyyaml==6.0.2 # pre-commit ruff==0.11.10 # via -r requirements/../../../requirements/devenv.txt -setuptools==80.7.1 - # via pip-tools +setuptools==80.9.0 + # via + # -c requirements/_test.txt + # pip-tools tomlkit==0.13.2 # via pylint typing-extensions==4.13.2 From e8a4510cd733fda90cfdedfc33e7da40fe1fd510 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 16 Jun 2025 12:59:02 +0200 Subject: [PATCH 71/91] upgrade req --- packages/celery-library/requirements/_test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/celery-library/requirements/_test.txt b/packages/celery-library/requirements/_test.txt index dcd135db6d7..06df05f2779 100644 --- a/packages/celery-library/requirements/_test.txt +++ b/packages/celery-library/requirements/_test.txt @@ -133,7 +133,7 @@ pytest==8.3.5 # pytest-instafail # pytest-mock # pytest-sugar -pytest-asyncio==0.23.8 +pytest-asyncio==0.26.0 # via -r requirements/_test.in pytest-benchmark==5.1.0 # via -r requirements/_test.in From 687d84870eb93529de40414e44cb6ec3919904f6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 16 Jun 2025 15:07:11 +0200 Subject: [PATCH 72/91] tests: refactor --- .../src/celery_library/backends/_memory.py | 71 ++++++++++ .../src/celery_library/signals.py | 10 +- packages/celery-library/tests/conftest.py | 130 ++++++++++++++++++ .../celery-library/tests/unit/test_tasks.py | 56 ++++---- .../src/servicelib/base_app_server.py | 11 +- .../src/servicelib/fastapi/app_server.py | 12 +- 6 files changed, 248 insertions(+), 42 deletions(-) create mode 100644 packages/celery-library/src/celery_library/backends/_memory.py create mode 100644 packages/celery-library/tests/conftest.py rename services/storage/tests/unit/test_modules_celery.py => packages/celery-library/tests/unit/test_tasks.py (76%) diff --git a/packages/celery-library/src/celery_library/backends/_memory.py b/packages/celery-library/src/celery_library/backends/_memory.py new file mode 100644 index 00000000000..4d1f8aaae3f --- /dev/null +++ b/packages/celery-library/src/celery_library/backends/_memory.py @@ -0,0 +1,71 @@ +from dataclasses import dataclass +from datetime import timedelta + +from models_library.progress_bar import ProgressReport + +from ..models import ( + Task, + TaskContext, + TaskID, + TaskMetadata, + TaskUUID, + build_task_id_prefix, +) + + +@dataclass +class MemoryTaskInfo: + metadata: TaskMetadata + progress: ProgressReport + + +class MemoryTaskInfoStore: + def __init__(self) -> None: + self._tasks: dict[TaskID, MemoryTaskInfo] = {} + + async def create_task( + self, + task_id: TaskID, + task_metadata: TaskMetadata, + expiry: timedelta, + ) -> None: + self._tasks[task_id] = MemoryTaskInfo( + metadata=task_metadata, + progress=ProgressReport(actual_value=0.0), + ) + + async def exists_task(self, task_id: TaskID) -> bool: + return task_id in self._tasks + + async def get_task_metadata(self, task_id: TaskID) -> TaskMetadata | None: + task_info = self._tasks.get(task_id) + if task_info is None: + return None + return task_info.metadata + + async def get_task_progress(self, task_id: TaskID) -> ProgressReport | None: + task_info = self._tasks.get(task_id) + if task_info is None: + return None + return task_info.progress + + async def list_tasks(self, task_context: TaskContext) -> list[Task]: + tasks = [] + task_id_prefix = build_task_id_prefix(task_context) + for task_id, task_info in self._tasks.items(): + if task_id.startswith(task_id_prefix): + tasks.append( + Task( + uuid=TaskUUID(task_id[len(task_id_prefix) + 1 :]), + metadata=task_info.metadata, + ) + ) + return tasks + + async def remove_task(self, task_id: TaskID) -> None: + self._tasks.pop(task_id, None) + + async def set_task_progress(self, task_id: TaskID, report: ProgressReport) -> None: + task_info = self._tasks.get(task_id) + if task_info is not None: + task_info.progress = report diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index 2186b2e99e8..c43ecb3c8a5 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -4,7 +4,7 @@ from celery import Celery # type: ignore[import-untyped] from celery.worker.worker import WorkController # type: ignore[import-untyped] -from servicelib.base_app_server import BaseAppServer +from servicelib.base_app_server import STARTUP_TIMEOUT, BaseAppServer from servicelib.logging_utils import log_context from settings_library.celery import CelerySettings @@ -30,6 +30,8 @@ def _init(startup_complete_event: threading.Event) -> None: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) + shutdown_event = asyncio.Event() + app_server.event_loop = loop async def _setup(): @@ -46,7 +48,9 @@ async def _setup(): ) loop.run_until_complete(_setup()) - loop.run_until_complete(app_server.startup(startup_complete_event)) + loop.run_until_complete( + app_server.startup(startup_complete_event, shutdown_event) + ) thread = threading.Thread( group=None, @@ -57,7 +61,7 @@ async def _setup(): ) thread.start() - startup_complete_event.wait() + startup_complete_event.wait(STARTUP_TIMEOUT * 1.1) def on_worker_shutdown(sender, **_kwargs) -> None: diff --git a/packages/celery-library/tests/conftest.py b/packages/celery-library/tests/conftest.py new file mode 100644 index 00000000000..609415099d1 --- /dev/null +++ b/packages/celery-library/tests/conftest.py @@ -0,0 +1,130 @@ +import asyncio +import datetime +from collections.abc import AsyncIterator, Callable +from functools import partial +from threading import Event +from typing import Any + +import pytest +from celery import Celery # type: ignore[import-untyped] +from celery.contrib.testing.worker import TestWorkController, start_worker +from celery.signals import worker_init, worker_shutdown +from celery.worker.worker import WorkController +from celery_library.backends._memory import MemoryTaskInfoStore +from celery_library.signals import on_worker_init, on_worker_shutdown +from celery_library.utils import CeleryTaskManager, get_task_manager +from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from servicelib.base_app_server import BaseAppServer +from settings_library.celery import CelerySettings + +pytest_plugins = [ + "pytest_simcore.environment_configs", + "pytest_simcore.repository_paths", +] + + +class FakeAppServer(BaseAppServer): + def __init__(self): + self._shutdown_event: asyncio.Event | None = None + + async def startup( + self, completed_event: Event, shutdown_event: asyncio.Event + ) -> None: + completed_event.set() + await shutdown_event.wait() + + async def shutdown(self) -> None: + if self._shutdown_event is not None: + self._shutdown_event.set() + + +@pytest.fixture +def celery_config() -> dict[str, Any]: + return { + "broker_connection_retry_on_startup": True, + "broker_url": "memory://localhost//", + "result_backend": "cache+memory://localhost//", + "result_expires": datetime.timedelta(days=7), + "result_extended": True, + "pool": "threads", + "task_default_queue": "default", + "task_send_sent_event": True, + "task_track_started": True, + "worker_send_task_events": True, + } + + +@pytest.fixture +def register_celery_tasks() -> Callable[[Celery], None]: + """override if tasks are needed""" + + def _(celery_app: Celery) -> None: ... + + return _ + + +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + env_devel_dict: EnvVarsDict, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **env_devel_dict, + }, + ) + + +@pytest.fixture +def celery_settings( + app_environment: EnvVarsDict, +) -> CelerySettings: + return CelerySettings.create_from_envs() + + +@pytest.fixture +async def with_storage_celery_worker( + celery_app: Celery, + celery_settings: CelerySettings, + register_celery_tasks: Callable[[Celery], None], + mocker: MockerFixture, +) -> AsyncIterator[TestWorkController]: + mocker.patch( + "celery_library.signals.create_task_manager", + return_value=CeleryTaskManager( + celery_app, celery_settings, MemoryTaskInfoStore() + ), + ) + + def _on_worker_init_wrapper(sender: WorkController, **_kwargs): + return partial(on_worker_init, FakeAppServer(), celery_settings)( + sender, **_kwargs + ) + + worker_init.connect(_on_worker_init_wrapper) + worker_shutdown.connect(on_worker_shutdown) + + register_celery_tasks(celery_app) + + with start_worker( + celery_app, + pool="threads", + concurrency=1, + loglevel="info", + perform_ping_check=False, + queues="default", + ) as worker: + yield worker + + +@pytest.fixture +def celery_task_manager( + with_storage_celery_worker: TestWorkController, +) -> CeleryTaskManager: + assert with_storage_celery_worker.app # nosec + assert isinstance(with_storage_celery_worker.app, Celery) # nosec + + return get_task_manager(with_storage_celery_worker.app) diff --git a/services/storage/tests/unit/test_modules_celery.py b/packages/celery-library/tests/unit/test_tasks.py similarity index 76% rename from services/storage/tests/unit/test_modules_celery.py rename to packages/celery-library/tests/unit/test_tasks.py index f3eb5d21e7e..9f5d43c25e3 100644 --- a/services/storage/tests/unit/test_modules_celery.py +++ b/packages/celery-library/tests/unit/test_tasks.py @@ -13,7 +13,6 @@ import pytest from celery import Celery, Task from celery.contrib.abortable import AbortableTask -from celery.contrib.testing.worker import TestWorkController from celery_library.errors import TransferrableCeleryError from celery_library.models import ( TaskContext, @@ -28,25 +27,12 @@ from celery_library.task_manager import CeleryTaskManager from celery_library.utils import get_app_server, get_task_manager from common_library.errors_classes import OsparcErrorMixin -from fastapi import FastAPI from models_library.progress_bar import ProgressReport from servicelib.logging_utils import log_context -from simcore_service_storage.modules.celery import get_task_manager_from_app from tenacity import Retrying, retry_if_exception_type, stop_after_delay, wait_fixed _logger = logging.getLogger(__name__) -pytest_simcore_core_services_selection = ["postgres", "rabbit"] -pytest_simcore_ops_services_selection = [] - - -@pytest.fixture -def celery_client( - initialized_app: FastAPI, - with_storage_celery_worker: TestWorkController, -) -> CeleryTaskManager: - return get_task_manager_from_app(initialized_app) - async def _fake_file_processor( celery_app: Celery, task_name: str, task_id: str, files: list[str] @@ -110,11 +96,11 @@ def _(celery_app: Celery) -> None: async def test_submitting_task_calling_async_function_results_with_success_state( - celery_client: CeleryTaskManager, + celery_task_manager: CeleryTaskManager, ): task_context = TaskContext(user_id=42) - task_uuid = await celery_client.submit_task( + task_uuid = await celery_task_manager.submit_task( TaskMetadata( name=fake_file_processor.__name__, ), @@ -128,23 +114,23 @@ async def test_submitting_task_calling_async_function_results_with_success_state stop=stop_after_delay(30), ): with attempt: - status = await celery_client.get_task_status(task_context, task_uuid) + status = await celery_task_manager.get_task_status(task_context, task_uuid) assert status.task_state == TaskState.SUCCESS assert ( - await celery_client.get_task_status(task_context, task_uuid) + await celery_task_manager.get_task_status(task_context, task_uuid) ).task_state == TaskState.SUCCESS assert ( - await celery_client.get_task_result(task_context, task_uuid) + await celery_task_manager.get_task_result(task_context, task_uuid) ) == "archive.zip" async def test_submitting_task_with_failure_results_with_error( - celery_client: CeleryTaskManager, + celery_task_manager: CeleryTaskManager, ): task_context = TaskContext(user_id=42) - task_uuid = await celery_client.submit_task( + task_uuid = await celery_task_manager.submit_task( TaskMetadata( name=failure_task.__name__, ), @@ -158,26 +144,28 @@ async def test_submitting_task_with_failure_results_with_error( ): with attempt: - raw_result = await celery_client.get_task_result(task_context, task_uuid) + raw_result = await celery_task_manager.get_task_result( + task_context, task_uuid + ) assert isinstance(raw_result, TransferrableCeleryError) - raw_result = await celery_client.get_task_result(task_context, task_uuid) + raw_result = await celery_task_manager.get_task_result(task_context, task_uuid) assert f"{raw_result}" == "Something strange happened: BOOM!" async def test_cancelling_a_running_task_aborts_and_deletes( - celery_client: CeleryTaskManager, + celery_task_manager: CeleryTaskManager, ): task_context = TaskContext(user_id=42) - task_uuid = await celery_client.submit_task( + task_uuid = await celery_task_manager.submit_task( TaskMetadata( name=dreamer_task.__name__, ), task_context=task_context, ) - await celery_client.cancel_task(task_context, task_uuid) + await celery_task_manager.cancel_task(task_context, task_uuid) for attempt in Retrying( retry=retry_if_exception_type(AssertionError), @@ -185,22 +173,24 @@ async def test_cancelling_a_running_task_aborts_and_deletes( stop=stop_after_delay(30), ): with attempt: - progress = await celery_client.get_task_status(task_context, task_uuid) + progress = await celery_task_manager.get_task_status( + task_context, task_uuid + ) assert progress.task_state == TaskState.ABORTED assert ( - await celery_client.get_task_status(task_context, task_uuid) + await celery_task_manager.get_task_status(task_context, task_uuid) ).task_state == TaskState.ABORTED - assert task_uuid not in await celery_client.list_tasks(task_context) + assert task_uuid not in await celery_task_manager.list_tasks(task_context) async def test_listing_task_uuids_contains_submitted_task( - celery_client: CeleryTaskManager, + celery_task_manager: CeleryTaskManager, ): task_context = TaskContext(user_id=42) - task_uuid = await celery_client.submit_task( + task_uuid = await celery_task_manager.submit_task( TaskMetadata( name=dreamer_task.__name__, ), @@ -213,10 +203,10 @@ async def test_listing_task_uuids_contains_submitted_task( stop=stop_after_delay(10), ): with attempt: - tasks = await celery_client.list_tasks(task_context) + tasks = await celery_task_manager.list_tasks(task_context) assert len(tasks) == 1 assert task_uuid == tasks[0].uuid - tasks = await celery_client.list_tasks(task_context) + tasks = await celery_task_manager.list_tasks(task_context) assert len(tasks) == 1 assert task_uuid == tasks[0].uuid diff --git a/packages/service-library/src/servicelib/base_app_server.py b/packages/service-library/src/servicelib/base_app_server.py index 8e8ddfbd00a..8b83fde3b41 100644 --- a/packages/service-library/src/servicelib/base_app_server.py +++ b/packages/service-library/src/servicelib/base_app_server.py @@ -1,8 +1,10 @@ +import asyncio +import datetime import threading from abc import ABC, abstractmethod from asyncio import AbstractEventLoop from contextlib import suppress -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Final if TYPE_CHECKING: with suppress(ImportError): @@ -11,6 +13,9 @@ from aiohttp.web import Application +STARTUP_TIMEOUT: Final[float] = datetime.timedelta(minutes=1).total_seconds() + + class BaseAppServer(ABC): @property def fastapi_app(self) -> "FastAPI": @@ -21,7 +26,9 @@ def aiohttp_app(self) -> "Application": raise NotImplementedError @abstractmethod - async def startup(self, completed_event: threading.Event): + async def startup( + self, completed_event: threading.Event, shutdown_event: asyncio.Event + ) -> None: pass @property diff --git a/packages/service-library/src/servicelib/fastapi/app_server.py b/packages/service-library/src/servicelib/fastapi/app_server.py index adc511d8f73..d7bfa29e3eb 100644 --- a/packages/service-library/src/servicelib/fastapi/app_server.py +++ b/packages/service-library/src/servicelib/fastapi/app_server.py @@ -16,14 +16,16 @@ class FastAPIAppServer(BaseAppServer): def __init__(self, app: FastAPI): self._app = app self._lifespan_manager: LifespanManager | None = None - self._shutdown_event = asyncio.Event() + self._shutdown_event: asyncio.Event | None = None @property def fastapi_app(self) -> FastAPI: assert isinstance(self._app, FastAPI) # nosec return self._app - async def startup(self, completed_event: threading.Event): + async def startup( + self, completed_event: threading.Event, shutdown_event: asyncio.Event + ): self._lifespan_manager = LifespanManager( self.fastapi_app, startup_timeout=_STARTUP_TIMEOUT, @@ -31,10 +33,12 @@ async def startup(self, completed_event: threading.Event): ) await self._lifespan_manager.__aenter__() completed_event.set() - await self._shutdown_event.wait() + await shutdown_event.wait() async def shutdown(self): - self._shutdown_event.set() + if self._shutdown_event is not None: + self._shutdown_event.set() + if self._lifespan_manager is None: return await self._lifespan_manager.__aexit__(None, None, None) From d1bed3aceeb3166b837dd360f78ca555659bb331 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Jun 2025 11:28:17 +0200 Subject: [PATCH 73/91] tests: fix --- .../src/celery_library/backends/_memory.py | 71 ------------------- .../src/celery_library/common.py | 4 +- .../src/celery_library/models.py | 6 +- .../src/celery_library/signals.py | 21 +++--- .../src/celery_library/utils.py | 2 +- packages/celery-library/tests/conftest.py | 71 ++++++++++--------- .../celery-library/tests/unit/test_tasks.py | 3 + .../app_server.py} | 12 ++-- .../fastapi/{ => celery}/app_server.py | 5 +- .../modules/celery/__init__.py | 2 +- .../modules/celery/worker_main.py | 4 +- services/storage/tests/conftest.py | 2 +- 12 files changed, 71 insertions(+), 132 deletions(-) delete mode 100644 packages/celery-library/src/celery_library/backends/_memory.py rename packages/service-library/src/servicelib/{base_app_server.py => celery/app_server.py} (100%) rename packages/service-library/src/servicelib/fastapi/{ => celery}/app_server.py (90%) diff --git a/packages/celery-library/src/celery_library/backends/_memory.py b/packages/celery-library/src/celery_library/backends/_memory.py deleted file mode 100644 index 4d1f8aaae3f..00000000000 --- a/packages/celery-library/src/celery_library/backends/_memory.py +++ /dev/null @@ -1,71 +0,0 @@ -from dataclasses import dataclass -from datetime import timedelta - -from models_library.progress_bar import ProgressReport - -from ..models import ( - Task, - TaskContext, - TaskID, - TaskMetadata, - TaskUUID, - build_task_id_prefix, -) - - -@dataclass -class MemoryTaskInfo: - metadata: TaskMetadata - progress: ProgressReport - - -class MemoryTaskInfoStore: - def __init__(self) -> None: - self._tasks: dict[TaskID, MemoryTaskInfo] = {} - - async def create_task( - self, - task_id: TaskID, - task_metadata: TaskMetadata, - expiry: timedelta, - ) -> None: - self._tasks[task_id] = MemoryTaskInfo( - metadata=task_metadata, - progress=ProgressReport(actual_value=0.0), - ) - - async def exists_task(self, task_id: TaskID) -> bool: - return task_id in self._tasks - - async def get_task_metadata(self, task_id: TaskID) -> TaskMetadata | None: - task_info = self._tasks.get(task_id) - if task_info is None: - return None - return task_info.metadata - - async def get_task_progress(self, task_id: TaskID) -> ProgressReport | None: - task_info = self._tasks.get(task_id) - if task_info is None: - return None - return task_info.progress - - async def list_tasks(self, task_context: TaskContext) -> list[Task]: - tasks = [] - task_id_prefix = build_task_id_prefix(task_context) - for task_id, task_info in self._tasks.items(): - if task_id.startswith(task_id_prefix): - tasks.append( - Task( - uuid=TaskUUID(task_id[len(task_id_prefix) + 1 :]), - metadata=task_info.metadata, - ) - ) - return tasks - - async def remove_task(self, task_id: TaskID) -> None: - self._tasks.pop(task_id, None) - - async def set_task_progress(self, task_id: TaskID, report: ProgressReport) -> None: - task_info = self._tasks.get(task_id) - if task_info is not None: - task_info.progress = report diff --git a/packages/celery-library/src/celery_library/common.py b/packages/celery-library/src/celery_library/common.py index 4b38f0aacd1..3b7c9cd22ab 100644 --- a/packages/celery-library/src/celery_library/common.py +++ b/packages/celery-library/src/celery_library/common.py @@ -38,7 +38,9 @@ def create_app(settings: CelerySettings) -> Celery: ) -def create_task_manager(app: Celery, settings: CelerySettings) -> CeleryTaskManager: +async def create_task_manager( + app: Celery, settings: CelerySettings +) -> CeleryTaskManager: redis_client_sdk = RedisClientSDK( settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( RedisDatabase.CELERY_TASKS diff --git a/packages/celery-library/src/celery_library/models.py b/packages/celery-library/src/celery_library/models.py index 56901b33f7d..8aa442a2416 100644 --- a/packages/celery-library/src/celery_library/models.py +++ b/packages/celery-library/src/celery_library/models.py @@ -13,17 +13,17 @@ ] TaskUUID: TypeAlias = UUID -_CELERY_TASK_ID_KEY_SEPARATOR: Final[str] = ":" +_TASK_ID_KEY_DELIMITATOR: Final[str] = ":" def build_task_id_prefix(task_context: TaskContext) -> str: - return _CELERY_TASK_ID_KEY_SEPARATOR.join( + return _TASK_ID_KEY_DELIMITATOR.join( [f"{task_context[key]}" for key in sorted(task_context)] ) def build_task_id(task_context: TaskContext, task_uuid: TaskUUID) -> TaskID: - return _CELERY_TASK_ID_KEY_SEPARATOR.join( + return _TASK_ID_KEY_DELIMITATOR.join( [build_task_id_prefix(task_context), f"{task_uuid}"] ) diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index c43ecb3c8a5..36f1e028c7d 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -3,8 +3,8 @@ import threading from celery import Celery # type: ignore[import-untyped] -from celery.worker.worker import WorkController # type: ignore[import-untyped] -from servicelib.base_app_server import STARTUP_TIMEOUT, BaseAppServer +from celery.worker.worker import WorkController +from servicelib.celery.app_server import STARTUP_TIMEOUT, BaseAppServer from servicelib.logging_utils import log_context from settings_library.celery import CelerySettings @@ -32,25 +32,26 @@ def _init(startup_complete_event: threading.Event) -> None: shutdown_event = asyncio.Event() - app_server.event_loop = loop - - async def _setup(): + async def _setup_task_manager(): assert sender.app # nosec assert isinstance(sender.app, Celery) # nosec set_app_server(sender.app, app_server) set_task_manager( sender.app, - create_task_manager( + await create_task_manager( sender.app, celery_settings, ), ) - loop.run_until_complete(_setup()) - loop.run_until_complete( - app_server.startup(startup_complete_event, shutdown_event) - ) + async def _setup_app_server(): + await app_server.startup(startup_complete_event, shutdown_event) + + app_server.event_loop = loop + + loop.run_until_complete(_setup_task_manager()) + loop.run_until_complete(_setup_app_server()) thread = threading.Thread( group=None, diff --git a/packages/celery-library/src/celery_library/utils.py b/packages/celery-library/src/celery_library/utils.py index 174268a9a2e..7a63d9fbcde 100644 --- a/packages/celery-library/src/celery_library/utils.py +++ b/packages/celery-library/src/celery_library/utils.py @@ -1,5 +1,5 @@ from celery import Celery # type: ignore[import-untyped] -from servicelib.base_app_server import BaseAppServer +from servicelib.celery.app_server import BaseAppServer from .task_manager import CeleryTaskManager diff --git a/packages/celery-library/tests/conftest.py b/packages/celery-library/tests/conftest.py index 609415099d1..0bf92d2434d 100644 --- a/packages/celery-library/tests/conftest.py +++ b/packages/celery-library/tests/conftest.py @@ -10,17 +10,20 @@ from celery.contrib.testing.worker import TestWorkController, start_worker from celery.signals import worker_init, worker_shutdown from celery.worker.worker import WorkController -from celery_library.backends._memory import MemoryTaskInfoStore +from celery_library.common import create_task_manager from celery_library.signals import on_worker_init, on_worker_shutdown -from celery_library.utils import CeleryTaskManager, get_task_manager -from pytest_mock import MockerFixture +from celery_library.utils import CeleryTaskManager from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from servicelib.base_app_server import BaseAppServer +from servicelib.celery.app_server import BaseAppServer from settings_library.celery import CelerySettings +from settings_library.redis import RedisSettings pytest_plugins = [ + "pytest_simcore.docker_compose", + "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", + "pytest_simcore.redis_service", "pytest_simcore.repository_paths", ] @@ -32,30 +35,15 @@ def __init__(self): async def startup( self, completed_event: Event, shutdown_event: asyncio.Event ) -> None: + self._shutdown_event = shutdown_event completed_event.set() - await shutdown_event.wait() + await self._shutdown_event.wait() async def shutdown(self) -> None: if self._shutdown_event is not None: self._shutdown_event.set() -@pytest.fixture -def celery_config() -> dict[str, Any]: - return { - "broker_connection_retry_on_startup": True, - "broker_url": "memory://localhost//", - "result_backend": "cache+memory://localhost//", - "result_expires": datetime.timedelta(days=7), - "result_extended": True, - "pool": "threads", - "task_default_queue": "default", - "task_send_sent_event": True, - "task_track_started": True, - "worker_send_task_events": True, - } - - @pytest.fixture def register_celery_tasks() -> Callable[[Celery], None]: """override if tasks are needed""" @@ -68,12 +56,16 @@ def _(celery_app: Celery) -> None: ... @pytest.fixture def app_environment( monkeypatch: pytest.MonkeyPatch, + redis_service: RedisSettings, env_devel_dict: EnvVarsDict, ) -> EnvVarsDict: return setenvs_from_dict( monkeypatch, { **env_devel_dict, + "REDIS_HOST": redis_service.REDIS_HOST, + "REDIS_PORT": f"{redis_service.REDIS_PORT}", + "REDIS_PASSWORD": redis_service.REDIS_PASSWORD.get_secret_value(), }, ) @@ -85,20 +77,29 @@ def celery_settings( return CelerySettings.create_from_envs() +@pytest.fixture(scope="session") +def celery_config() -> dict[str, Any]: + return { + "broker_connection_retry_on_startup": True, + "broker_url": "memory://localhost//", + "result_backend": "cache+memory://localhost//", + # "result_backend": celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn(RedisDatabase.CELERY_TASKS), + "result_expires": datetime.timedelta(days=7), + "result_extended": True, + "pool": "threads", + "task_default_queue": "default", + "task_send_sent_event": True, + "task_track_started": True, + "worker_send_task_events": True, + } + + @pytest.fixture async def with_storage_celery_worker( celery_app: Celery, celery_settings: CelerySettings, register_celery_tasks: Callable[[Celery], None], - mocker: MockerFixture, ) -> AsyncIterator[TestWorkController]: - mocker.patch( - "celery_library.signals.create_task_manager", - return_value=CeleryTaskManager( - celery_app, celery_settings, MemoryTaskInfoStore() - ), - ) - def _on_worker_init_wrapper(sender: WorkController, **_kwargs): return partial(on_worker_init, FakeAppServer(), celery_settings)( sender, **_kwargs @@ -121,10 +122,12 @@ def _on_worker_init_wrapper(sender: WorkController, **_kwargs): @pytest.fixture -def celery_task_manager( +async def celery_task_manager( + celery_app: Celery, + celery_settings: CelerySettings, with_storage_celery_worker: TestWorkController, ) -> CeleryTaskManager: - assert with_storage_celery_worker.app # nosec - assert isinstance(with_storage_celery_worker.app, Celery) # nosec - - return get_task_manager(with_storage_celery_worker.app) + return await create_task_manager( + celery_app, + celery_settings, + ) diff --git a/packages/celery-library/tests/unit/test_tasks.py b/packages/celery-library/tests/unit/test_tasks.py index 9f5d43c25e3..1c40d812dcc 100644 --- a/packages/celery-library/tests/unit/test_tasks.py +++ b/packages/celery-library/tests/unit/test_tasks.py @@ -33,6 +33,9 @@ _logger = logging.getLogger(__name__) +pytest_simcore_core_services_selection = ["redis"] +pytest_simcore_ops_services_selection = [] + async def _fake_file_processor( celery_app: Celery, task_name: str, task_id: str, files: list[str] diff --git a/packages/service-library/src/servicelib/base_app_server.py b/packages/service-library/src/servicelib/celery/app_server.py similarity index 100% rename from packages/service-library/src/servicelib/base_app_server.py rename to packages/service-library/src/servicelib/celery/app_server.py index 8b83fde3b41..9b01f5e35ca 100644 --- a/packages/service-library/src/servicelib/base_app_server.py +++ b/packages/service-library/src/servicelib/celery/app_server.py @@ -25,12 +25,6 @@ def fastapi_app(self) -> "FastAPI": def aiohttp_app(self) -> "Application": raise NotImplementedError - @abstractmethod - async def startup( - self, completed_event: threading.Event, shutdown_event: asyncio.Event - ) -> None: - pass - @property def event_loop(self) -> AbstractEventLoop: return self._event_loop @@ -39,6 +33,12 @@ def event_loop(self) -> AbstractEventLoop: def event_loop(self, loop: AbstractEventLoop) -> None: self._event_loop = loop + @abstractmethod + async def startup( + self, completed_event: threading.Event, shutdown_event: asyncio.Event + ) -> None: + pass + @abstractmethod async def shutdown(self): pass diff --git a/packages/service-library/src/servicelib/fastapi/app_server.py b/packages/service-library/src/servicelib/fastapi/celery/app_server.py similarity index 90% rename from packages/service-library/src/servicelib/fastapi/app_server.py rename to packages/service-library/src/servicelib/fastapi/celery/app_server.py index d7bfa29e3eb..d6b9310d0eb 100644 --- a/packages/service-library/src/servicelib/fastapi/app_server.py +++ b/packages/service-library/src/servicelib/fastapi/celery/app_server.py @@ -6,7 +6,7 @@ from asgi_lifespan import LifespanManager from fastapi import FastAPI -from ..base_app_server import BaseAppServer +from ...celery.app_server import BaseAppServer _SHUTDOWN_TIMEOUT: Final[float] = timedelta(seconds=10).total_seconds() _STARTUP_TIMEOUT: Final[float] = timedelta(minutes=1).total_seconds() @@ -31,9 +31,10 @@ async def startup( startup_timeout=_STARTUP_TIMEOUT, shutdown_timeout=_SHUTDOWN_TIMEOUT, ) + self._shutdown_event = shutdown_event await self._lifespan_manager.__aenter__() completed_event.set() - await shutdown_event.wait() + await self._shutdown_event.wait() async def shutdown(self): if self._shutdown_event is not None: diff --git a/services/storage/src/simcore_service_storage/modules/celery/__init__.py b/services/storage/src/simcore_service_storage/modules/celery/__init__.py index ee01f352ae7..5e6ff2b0b93 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/__init__.py +++ b/services/storage/src/simcore_service_storage/modules/celery/__init__.py @@ -7,7 +7,7 @@ def setup_task_manager(app: FastAPI, celery_settings: CelerySettings) -> None: async def on_startup() -> None: - app.state.celery_client = create_task_manager( + app.state.celery_client = await create_task_manager( create_app(celery_settings), celery_settings ) diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py index 913bef6c6b8..ebd9832b9e1 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py +++ b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py @@ -9,10 +9,10 @@ on_worker_init, on_worker_shutdown, ) -from servicelib.fastapi.app_server import FastAPIAppServer +from servicelib.fastapi.celery.app_server import FastAPIAppServer from servicelib.logging_utils import config_all_loggers -from simcore_service_storage.api._worker_tasks.tasks import setup_worker_tasks +from ...api._worker_tasks.tasks import setup_worker_tasks from ...core.application import create_app from ...core.settings import ApplicationSettings diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 5c622ce0c04..ad37ba752e6 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -63,7 +63,7 @@ ) from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status -from servicelib.fastapi.app_server import FastAPIAppServer +from servicelib.fastapi.celery.app_server import FastAPIAppServer from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient from servicelib.utils import limited_gather from settings_library.rabbit import RabbitSettings From 60de6c667dd5594b6f8c84c1eb8990d096db399b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Jun 2025 11:31:35 +0200 Subject: [PATCH 74/91] tests: fix --- packages/celery-library/src/celery_library/signals.py | 2 +- packages/service-library/src/servicelib/celery/__init__.py | 0 .../service-library/src/servicelib/fastapi/celery/__init__.py | 0 3 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 packages/service-library/src/servicelib/celery/__init__.py create mode 100644 packages/service-library/src/servicelib/fastapi/celery/__init__.py diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index 36f1e028c7d..4a973d69cb4 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -3,7 +3,7 @@ import threading from celery import Celery # type: ignore[import-untyped] -from celery.worker.worker import WorkController +from celery.worker.worker import WorkController # type: ignore[import-untyped] from servicelib.celery.app_server import STARTUP_TIMEOUT, BaseAppServer from servicelib.logging_utils import log_context from settings_library.celery import CelerySettings diff --git a/packages/service-library/src/servicelib/celery/__init__.py b/packages/service-library/src/servicelib/celery/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/packages/service-library/src/servicelib/fastapi/celery/__init__.py b/packages/service-library/src/servicelib/fastapi/celery/__init__.py new file mode 100644 index 00000000000..e69de29bb2d From 54ea767a17511029d9075fc387fe0b1e3a9a1469 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Jun 2025 11:38:24 +0200 Subject: [PATCH 75/91] tests: fix list --- packages/celery-library/tests/unit/test_tasks.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/celery-library/tests/unit/test_tasks.py b/packages/celery-library/tests/unit/test_tasks.py index 1c40d812dcc..8b2dbb08a3b 100644 --- a/packages/celery-library/tests/unit/test_tasks.py +++ b/packages/celery-library/tests/unit/test_tasks.py @@ -207,9 +207,7 @@ async def test_listing_task_uuids_contains_submitted_task( ): with attempt: tasks = await celery_task_manager.list_tasks(task_context) - assert len(tasks) == 1 - assert task_uuid == tasks[0].uuid + assert any(task.uuid == task_uuid for task in tasks) tasks = await celery_task_manager.list_tasks(task_context) - assert len(tasks) == 1 - assert task_uuid == tasks[0].uuid + assert any(task.uuid == task_uuid for task in tasks) From 4d408c63f86b2931ab2fccf94bf8bdb312bc8ca1 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Jun 2025 12:28:51 +0200 Subject: [PATCH 76/91] tests: fix typecheck --- packages/celery-library/tests/conftest.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/celery-library/tests/conftest.py b/packages/celery-library/tests/conftest.py index 0bf92d2434d..0ac391f2b13 100644 --- a/packages/celery-library/tests/conftest.py +++ b/packages/celery-library/tests/conftest.py @@ -1,4 +1,5 @@ -import asyncio +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argumentimport asyncio import datetime from collections.abc import AsyncIterator, Callable from functools import partial From 6e1fc9f867509503ff15b4b55636c6c058ae8ef5 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Jun 2025 12:33:09 +0200 Subject: [PATCH 77/91] tests: fix import --- packages/celery-library/tests/conftest.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/celery-library/tests/conftest.py b/packages/celery-library/tests/conftest.py index 0ac391f2b13..4a7c974144f 100644 --- a/packages/celery-library/tests/conftest.py +++ b/packages/celery-library/tests/conftest.py @@ -1,5 +1,7 @@ # pylint: disable=redefined-outer-name -# pylint: disable=unused-argumentimport asyncio +# pylint: disable=unused-argument + +import asyncio import datetime from collections.abc import AsyncIterator, Callable from functools import partial From 614df7d93344ac9292f87a49dd44414c68c7d16a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Jun 2025 13:07:02 +0200 Subject: [PATCH 78/91] use hooks --- packages/celery-library/requirements/_test.in | 1 - packages/celery-library/tests/conftest.py | 29 +++++++------------ .../celery-library/tests/unit/test_errors.py | 2 +- .../src/servicelib/celery/app_server.py | 21 ++++++++++++-- .../servicelib/fastapi/celery/app_server.py | 16 ++-------- 5 files changed, 33 insertions(+), 36 deletions(-) diff --git a/packages/celery-library/requirements/_test.in b/packages/celery-library/requirements/_test.in index 9de06104bf0..e85e3cb5177 100644 --- a/packages/celery-library/requirements/_test.in +++ b/packages/celery-library/requirements/_test.in @@ -11,7 +11,6 @@ # testing coverage faker -fastapi httpx pint pytest diff --git a/packages/celery-library/tests/conftest.py b/packages/celery-library/tests/conftest.py index 4a7c974144f..6b87f7090a9 100644 --- a/packages/celery-library/tests/conftest.py +++ b/packages/celery-library/tests/conftest.py @@ -1,11 +1,9 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument -import asyncio import datetime from collections.abc import AsyncIterator, Callable from functools import partial -from threading import Event from typing import Any import pytest @@ -32,19 +30,11 @@ class FakeAppServer(BaseAppServer): - def __init__(self): - self._shutdown_event: asyncio.Event | None = None + async def on_startup(self) -> None: + pass - async def startup( - self, completed_event: Event, shutdown_event: asyncio.Event - ) -> None: - self._shutdown_event = shutdown_event - completed_event.set() - await self._shutdown_event.wait() - - async def shutdown(self) -> None: - if self._shutdown_event is not None: - self._shutdown_event.set() + async def on_shutdown(self) -> None: + pass @pytest.fixture @@ -80,13 +70,17 @@ def celery_settings( return CelerySettings.create_from_envs() +@pytest.fixture +def app_server() -> BaseAppServer: + return FakeAppServer() + + @pytest.fixture(scope="session") def celery_config() -> dict[str, Any]: return { "broker_connection_retry_on_startup": True, "broker_url": "memory://localhost//", "result_backend": "cache+memory://localhost//", - # "result_backend": celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn(RedisDatabase.CELERY_TASKS), "result_expires": datetime.timedelta(days=7), "result_extended": True, "pool": "threads", @@ -100,13 +94,12 @@ def celery_config() -> dict[str, Any]: @pytest.fixture async def with_storage_celery_worker( celery_app: Celery, + app_server: BaseAppServer, celery_settings: CelerySettings, register_celery_tasks: Callable[[Celery], None], ) -> AsyncIterator[TestWorkController]: def _on_worker_init_wrapper(sender: WorkController, **_kwargs): - return partial(on_worker_init, FakeAppServer(), celery_settings)( - sender, **_kwargs - ) + return partial(on_worker_init, app_server, celery_settings)(sender, **_kwargs) worker_init.connect(_on_worker_init_wrapper) worker_shutdown.connect(on_worker_shutdown) diff --git a/packages/celery-library/tests/unit/test_errors.py b/packages/celery-library/tests/unit/test_errors.py index 62ad4b73fc1..c18e3bb50d6 100644 --- a/packages/celery-library/tests/unit/test_errors.py +++ b/packages/celery-library/tests/unit/test_errors.py @@ -13,7 +13,7 @@ AccessRightError(user_id=1, file_id="a/path/to/a/file.txt", location_id=0), ], ) -def test_workflow(original_error: Exception): +def test_error(original_error: Exception): try: raise original_error # noqa: TRY301 except Exception as e: # pylint: disable=broad-exception-caught diff --git a/packages/service-library/src/servicelib/celery/app_server.py b/packages/service-library/src/servicelib/celery/app_server.py index 9b01f5e35ca..f34dd58a183 100644 --- a/packages/service-library/src/servicelib/celery/app_server.py +++ b/packages/service-library/src/servicelib/celery/app_server.py @@ -17,6 +17,9 @@ class BaseAppServer(ABC): + def __init__(self) -> None: + self._shutdown_event: asyncio.Event | None = None + @property def fastapi_app(self) -> "FastAPI": raise NotImplementedError @@ -34,11 +37,23 @@ def event_loop(self, loop: AbstractEventLoop) -> None: self._event_loop = loop @abstractmethod + async def on_startup(self) -> None: + raise NotImplementedError + async def startup( self, completed_event: threading.Event, shutdown_event: asyncio.Event ) -> None: - pass + self._shutdown_event = shutdown_event + completed_event.set() + await self.on_startup() + await self._shutdown_event.wait() @abstractmethod - async def shutdown(self): - pass + async def on_shutdown(self) -> None: + raise NotImplementedError + + async def shutdown(self) -> None: + if self._shutdown_event is not None: + self._shutdown_event.set() + + await self.on_shutdown() diff --git a/packages/service-library/src/servicelib/fastapi/celery/app_server.py b/packages/service-library/src/servicelib/fastapi/celery/app_server.py index d6b9310d0eb..9839fe721c4 100644 --- a/packages/service-library/src/servicelib/fastapi/celery/app_server.py +++ b/packages/service-library/src/servicelib/fastapi/celery/app_server.py @@ -1,5 +1,3 @@ -import asyncio -import threading from datetime import timedelta from typing import Final @@ -14,32 +12,24 @@ class FastAPIAppServer(BaseAppServer): def __init__(self, app: FastAPI): + super().__init__() self._app = app self._lifespan_manager: LifespanManager | None = None - self._shutdown_event: asyncio.Event | None = None @property def fastapi_app(self) -> FastAPI: assert isinstance(self._app, FastAPI) # nosec return self._app - async def startup( - self, completed_event: threading.Event, shutdown_event: asyncio.Event - ): + async def on_startup(self) -> None: self._lifespan_manager = LifespanManager( self.fastapi_app, startup_timeout=_STARTUP_TIMEOUT, shutdown_timeout=_SHUTDOWN_TIMEOUT, ) - self._shutdown_event = shutdown_event await self._lifespan_manager.__aenter__() - completed_event.set() - await self._shutdown_event.wait() - - async def shutdown(self): - if self._shutdown_event is not None: - self._shutdown_event.set() + async def on_shutdown(self) -> None: if self._lifespan_manager is None: return await self._lifespan_manager.__aexit__(None, None, None) From 0ff0e3cdb7236b9798f71d958a4d3f28bb43a50d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Jun 2025 14:30:53 +0200 Subject: [PATCH 79/91] refactor models --- .../src/celery_library/backends/_redis.py | 8 ++--- .../src/celery_library/signals.py | 23 +++++------- .../celery-library/src/celery_library/task.py | 2 +- .../src/celery_library/task_manager.py | 8 ++--- .../src/celery_library/utils.py | 32 +++++++++-------- packages/celery-library/tests/conftest.py | 4 +-- .../celery-library/tests/unit/test_tasks.py | 18 +++++----- .../queued_tasks}/__init__.py | 0 .../{celery => queued_tasks}/app_server.py | 2 +- .../celery => queued_tasks}/__init__.py | 0 .../{celery => queued_tasks}/app_server.py | 10 ++++++ .../src/servicelib/queued_tasks}/models.py | 20 ++--------- .../servicelib/queued_tasks/task_manager.py | 35 +++++++++++++++++++ .../api/_worker_tasks/_files.py | 2 +- .../api/_worker_tasks/_paths.py | 2 +- .../api/_worker_tasks/_simcore_s3.py | 10 +++--- .../api/rest/_files.py | 14 ++++---- .../api/rpc/_async_jobs.py | 2 +- .../simcore_service_storage/api/rpc/_paths.py | 2 +- .../api/rpc/_simcore_s3.py | 2 +- .../modules/celery/worker_main.py | 2 +- services/storage/tests/conftest.py | 2 +- .../storage/tests/unit/test_async_jobs.py | 5 ++- 23 files changed, 116 insertions(+), 89 deletions(-) rename packages/service-library/src/servicelib/{celery => fastapi/queued_tasks}/__init__.py (100%) rename packages/service-library/src/servicelib/fastapi/{celery => queued_tasks}/app_server.py (95%) rename packages/service-library/src/servicelib/{fastapi/celery => queued_tasks}/__init__.py (100%) rename packages/service-library/src/servicelib/{celery => queued_tasks}/app_server.py (85%) rename packages/{celery-library/src/celery_library => service-library/src/servicelib/queued_tasks}/models.py (76%) create mode 100644 packages/service-library/src/servicelib/queued_tasks/task_manager.py diff --git a/packages/celery-library/src/celery_library/backends/_redis.py b/packages/celery-library/src/celery_library/backends/_redis.py index 3fd9984fb2a..07abc789747 100644 --- a/packages/celery-library/src/celery_library/backends/_redis.py +++ b/packages/celery-library/src/celery_library/backends/_redis.py @@ -5,16 +5,16 @@ from models_library.progress_bar import ProgressReport from pydantic import ValidationError -from servicelib.redis._client import RedisClientSDK - -from ..models import ( +from servicelib.queued_tasks.models import ( Task, TaskContext, TaskID, TaskMetadata, TaskUUID, - build_task_id_prefix, ) +from servicelib.redis._client import RedisClientSDK + +from ..utils import build_task_id_prefix _CELERY_TASK_INFO_PREFIX: Final[str] = "celery-task-info-" _CELERY_TASK_ID_KEY_ENCODING = "utf-8" diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index 4a973d69cb4..56ae218c885 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -4,16 +4,12 @@ from celery import Celery # type: ignore[import-untyped] from celery.worker.worker import WorkController # type: ignore[import-untyped] -from servicelib.celery.app_server import STARTUP_TIMEOUT, BaseAppServer from servicelib.logging_utils import log_context +from servicelib.queued_tasks.app_server import STARTUP_TIMEOUT, BaseAppServer from settings_library.celery import CelerySettings from .common import create_task_manager -from .utils import ( - get_app_server, - set_app_server, - set_task_manager, -) +from .utils import get_app_server, set_app_server _logger = logging.getLogger(__name__) @@ -36,22 +32,19 @@ async def _setup_task_manager(): assert sender.app # nosec assert isinstance(sender.app, Celery) # nosec - set_app_server(sender.app, app_server) - set_task_manager( + app_server.task_manager = await create_task_manager( sender.app, - await create_task_manager( - sender.app, - celery_settings, - ), + celery_settings, ) - async def _setup_app_server(): - await app_server.startup(startup_complete_event, shutdown_event) + set_app_server(sender.app, app_server) app_server.event_loop = loop loop.run_until_complete(_setup_task_manager()) - loop.run_until_complete(_setup_app_server()) + loop.run_until_complete( + app_server.startup(startup_complete_event, shutdown_event) + ) thread = threading.Thread( group=None, diff --git a/packages/celery-library/src/celery_library/task.py b/packages/celery-library/src/celery_library/task.py index 339cee5a907..004419ca6e3 100644 --- a/packages/celery-library/src/celery_library/task.py +++ b/packages/celery-library/src/celery_library/task.py @@ -14,9 +14,9 @@ from celery.exceptions import Ignore # type: ignore[import-untyped] from pydantic import NonNegativeInt from servicelib.async_utils import cancel_wait_task +from servicelib.queued_tasks.models import TaskID from .errors import encode_celery_transferrable_error -from .models import TaskID from .utils import get_app_server _logger = logging.getLogger(__name__) diff --git a/packages/celery-library/src/celery_library/task_manager.py b/packages/celery-library/src/celery_library/task_manager.py index 977c66bcdf3..bc9cd3b46fe 100644 --- a/packages/celery-library/src/celery_library/task_manager.py +++ b/packages/celery-library/src/celery_library/task_manager.py @@ -10,9 +10,7 @@ from common_library.async_tools import make_async from models_library.progress_bar import ProgressReport from servicelib.logging_utils import log_context -from settings_library.celery import CelerySettings - -from .models import ( +from servicelib.queued_tasks.models import ( Task, TaskContext, TaskID, @@ -21,8 +19,10 @@ TaskState, TaskStatus, TaskUUID, - build_task_id, ) +from settings_library.celery import CelerySettings + +from .utils import build_task_id _logger = logging.getLogger(__name__) diff --git a/packages/celery-library/src/celery_library/utils.py b/packages/celery-library/src/celery_library/utils.py index 7a63d9fbcde..175e9d32316 100644 --- a/packages/celery-library/src/celery_library/utils.py +++ b/packages/celery-library/src/celery_library/utils.py @@ -1,10 +1,24 @@ -from celery import Celery # type: ignore[import-untyped] -from servicelib.celery.app_server import BaseAppServer +from typing import Final -from .task_manager import CeleryTaskManager +from celery import Celery # type: ignore[import-untyped] +from servicelib.queued_tasks.app_server import BaseAppServer +from servicelib.queued_tasks.models import TaskContext, TaskID, TaskUUID _APP_SERVER_KEY = "app_server" -_TASK_MANAGER_KEY = "task_manager" + +_TASK_ID_KEY_DELIMITATOR: Final[str] = ":" + + +def build_task_id_prefix(task_context: TaskContext) -> str: + return _TASK_ID_KEY_DELIMITATOR.join( + [f"{task_context[key]}" for key in sorted(task_context)] + ) + + +def build_task_id(task_context: TaskContext, task_uuid: TaskUUID) -> TaskID: + return _TASK_ID_KEY_DELIMITATOR.join( + [build_task_id_prefix(task_context), f"{task_uuid}"] + ) def get_app_server(app: Celery) -> BaseAppServer: @@ -15,13 +29,3 @@ def get_app_server(app: Celery) -> BaseAppServer: def set_app_server(app: Celery, app_server: BaseAppServer) -> None: app.conf[_APP_SERVER_KEY] = app_server - - -def get_task_manager(celery_app: Celery) -> CeleryTaskManager: - worker = celery_app.conf[_TASK_MANAGER_KEY] - assert isinstance(worker, CeleryTaskManager) - return worker - - -def set_task_manager(celery_app: Celery, worker: CeleryTaskManager) -> None: - celery_app.conf[_TASK_MANAGER_KEY] = worker diff --git a/packages/celery-library/tests/conftest.py b/packages/celery-library/tests/conftest.py index 6b87f7090a9..17a923179b6 100644 --- a/packages/celery-library/tests/conftest.py +++ b/packages/celery-library/tests/conftest.py @@ -13,10 +13,10 @@ from celery.worker.worker import WorkController from celery_library.common import create_task_manager from celery_library.signals import on_worker_init, on_worker_shutdown -from celery_library.utils import CeleryTaskManager +from celery_library.task_manager import CeleryTaskManager from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from servicelib.celery.app_server import BaseAppServer +from servicelib.queued_tasks.app_server import BaseAppServer from settings_library.celery import CelerySettings from settings_library.redis import RedisSettings diff --git a/packages/celery-library/tests/unit/test_tasks.py b/packages/celery-library/tests/unit/test_tasks.py index 8b2dbb08a3b..d9d0bd4522d 100644 --- a/packages/celery-library/tests/unit/test_tasks.py +++ b/packages/celery-library/tests/unit/test_tasks.py @@ -14,21 +14,21 @@ from celery import Celery, Task from celery.contrib.abortable import AbortableTask from celery_library.errors import TransferrableCeleryError -from celery_library.models import ( - TaskContext, - TaskID, - TaskMetadata, - TaskState, -) from celery_library.task import ( AbortableAsyncResult, register_task, ) from celery_library.task_manager import CeleryTaskManager -from celery_library.utils import get_app_server, get_task_manager +from celery_library.utils import get_app_server from common_library.errors_classes import OsparcErrorMixin from models_library.progress_bar import ProgressReport from servicelib.logging_utils import log_context +from servicelib.queued_tasks.models import ( + TaskContext, + TaskID, + TaskMetadata, + TaskState, +) from tenacity import Retrying, retry_if_exception_type, stop_after_delay, wait_fixed _logger = logging.getLogger(__name__) @@ -40,14 +40,12 @@ async def _fake_file_processor( celery_app: Celery, task_name: str, task_id: str, files: list[str] ) -> str: - worker = get_task_manager(celery_app) - def sleep_for(seconds: float) -> None: time.sleep(seconds) for n, file in enumerate(files, start=1): with log_context(_logger, logging.INFO, msg=f"Processing file {file}"): - await worker.set_task_progress( + await get_app_server(celery_app).task_manager.set_task_progress( task_id=task_id, report=ProgressReport(actual_value=n / len(files)), ) diff --git a/packages/service-library/src/servicelib/celery/__init__.py b/packages/service-library/src/servicelib/fastapi/queued_tasks/__init__.py similarity index 100% rename from packages/service-library/src/servicelib/celery/__init__.py rename to packages/service-library/src/servicelib/fastapi/queued_tasks/__init__.py diff --git a/packages/service-library/src/servicelib/fastapi/celery/app_server.py b/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py similarity index 95% rename from packages/service-library/src/servicelib/fastapi/celery/app_server.py rename to packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py index 9839fe721c4..3e4f048a1ab 100644 --- a/packages/service-library/src/servicelib/fastapi/celery/app_server.py +++ b/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py @@ -4,7 +4,7 @@ from asgi_lifespan import LifespanManager from fastapi import FastAPI -from ...celery.app_server import BaseAppServer +from ...queued_tasks.app_server import BaseAppServer _SHUTDOWN_TIMEOUT: Final[float] = timedelta(seconds=10).total_seconds() _STARTUP_TIMEOUT: Final[float] = timedelta(minutes=1).total_seconds() diff --git a/packages/service-library/src/servicelib/fastapi/celery/__init__.py b/packages/service-library/src/servicelib/queued_tasks/__init__.py similarity index 100% rename from packages/service-library/src/servicelib/fastapi/celery/__init__.py rename to packages/service-library/src/servicelib/queued_tasks/__init__.py diff --git a/packages/service-library/src/servicelib/celery/app_server.py b/packages/service-library/src/servicelib/queued_tasks/app_server.py similarity index 85% rename from packages/service-library/src/servicelib/celery/app_server.py rename to packages/service-library/src/servicelib/queued_tasks/app_server.py index f34dd58a183..0616f58721a 100644 --- a/packages/service-library/src/servicelib/celery/app_server.py +++ b/packages/service-library/src/servicelib/queued_tasks/app_server.py @@ -6,6 +6,8 @@ from contextlib import suppress from typing import TYPE_CHECKING, Final +from servicelib.queued_tasks.task_manager import TaskManager + if TYPE_CHECKING: with suppress(ImportError): from fastapi import FastAPI @@ -36,6 +38,14 @@ def event_loop(self) -> AbstractEventLoop: def event_loop(self, loop: AbstractEventLoop) -> None: self._event_loop = loop + @property + def task_manager(self) -> TaskManager: + return self._task_manager + + @task_manager.setter + def task_manager(self, manager: TaskManager) -> None: + self._task_manager = manager + @abstractmethod async def on_startup(self) -> None: raise NotImplementedError diff --git a/packages/celery-library/src/celery_library/models.py b/packages/service-library/src/servicelib/queued_tasks/models.py similarity index 76% rename from packages/celery-library/src/celery_library/models.py rename to packages/service-library/src/servicelib/queued_tasks/models.py index 8aa442a2416..8bc744fcb3e 100644 --- a/packages/celery-library/src/celery_library/models.py +++ b/packages/service-library/src/servicelib/queued_tasks/models.py @@ -1,6 +1,6 @@ -from datetime import timedelta +import datetime from enum import StrEnum -from typing import Annotated, Any, Final, Protocol, TypeAlias +from typing import Annotated, Any, Protocol, TypeAlias from uuid import UUID from models_library.progress_bar import ProgressReport @@ -13,20 +13,6 @@ ] TaskUUID: TypeAlias = UUID -_TASK_ID_KEY_DELIMITATOR: Final[str] = ":" - - -def build_task_id_prefix(task_context: TaskContext) -> str: - return _TASK_ID_KEY_DELIMITATOR.join( - [f"{task_context[key]}" for key in sorted(task_context)] - ) - - -def build_task_id(task_context: TaskContext, task_uuid: TaskUUID) -> TaskID: - return _TASK_ID_KEY_DELIMITATOR.join( - [build_task_id_prefix(task_context), f"{task_uuid}"] - ) - class TaskState(StrEnum): PENDING = "PENDING" @@ -61,7 +47,7 @@ async def create_task( self, task_id: TaskID, task_metadata: TaskMetadata, - expiry: timedelta, + expiry: datetime.timedelta, ) -> None: ... async def exists_task(self, task_id: TaskID) -> bool: ... diff --git a/packages/service-library/src/servicelib/queued_tasks/task_manager.py b/packages/service-library/src/servicelib/queued_tasks/task_manager.py new file mode 100644 index 00000000000..625c410e96e --- /dev/null +++ b/packages/service-library/src/servicelib/queued_tasks/task_manager.py @@ -0,0 +1,35 @@ +from typing import Any, Protocol + +from ..queued_tasks.models import ( + ProgressReport, + Task, + TaskContext, + TaskID, + TaskMetadata, + TaskStatus, + TaskUUID, +) + + +class TaskManager(Protocol): + async def submit_task( + self, task_metadata: TaskMetadata, *, task_context: TaskContext, **task_param + ) -> TaskUUID: ... + + async def cancel_task( + self, task_context: TaskContext, task_uuid: TaskUUID + ) -> None: ... + + async def get_task_result( + self, task_context: TaskContext, task_uuid: TaskUUID + ) -> Any: ... + + async def get_task_status( + self, task_context: TaskContext, task_uuid: TaskUUID + ) -> TaskStatus: ... + + async def list_tasks(self, task_context: TaskContext) -> list[Task]: ... + + async def set_task_progress( + self, task_id: TaskID, report: ProgressReport + ) -> None: ... diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py index 651948d5b8a..3cea617b7f3 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py @@ -1,7 +1,6 @@ import logging from celery import Task # type: ignore[import-untyped] -from celery_library.models import TaskID from celery_library.utils import get_app_server from models_library.api_schemas_storage.storage_schemas import ( FileUploadCompletionBody, @@ -9,6 +8,7 @@ from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID from servicelib.logging_utils import log_context +from servicelib.queued_tasks.models import TaskID from ...dsm import get_dsm_provider from ...models import FileMetaData diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py index 34e18617e0c..2c0d107c71a 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py @@ -2,12 +2,12 @@ from pathlib import Path from celery import Task # type: ignore[import-untyped] -from celery_library.models import TaskID from celery_library.utils import get_app_server from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID from pydantic import ByteSize, TypeAdapter from servicelib.logging_utils import log_context +from servicelib.queued_tasks.models import TaskID from servicelib.utils import limited_gather from ...constants import MAX_CONCURRENT_S3_TASKS diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py index 246f7c08368..13c52b77b60 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py @@ -4,8 +4,7 @@ from aws_library.s3._models import S3ObjectKey from celery import Task # type: ignore[import-untyped] -from celery_library.models import TaskID -from celery_library.utils import get_app_server, get_task_manager +from celery_library.utils import get_app_server from models_library.api_schemas_storage.storage_schemas import FoldersBody from models_library.api_schemas_webserver.storage import PathToExport from models_library.progress_bar import ProgressReport @@ -14,6 +13,7 @@ from pydantic import TypeAdapter from servicelib.logging_utils import log_context from servicelib.progress_bar import ProgressBarData +from servicelib.queued_tasks.models import TaskID from ...dsm import get_dsm_provider from ...simcore_s3_dsm import SimcoreS3DataManager @@ -24,7 +24,7 @@ async def _task_progress_cb( task: Task, task_id: TaskID, report: ProgressReport ) -> None: - worker = get_task_manager(task.app) + worker = get_app_server(task.app).task_manager assert task.name # nosec await worker.set_task_progress( task_id=task_id, @@ -87,7 +87,9 @@ async def export_data( async def _progress_cb(report: ProgressReport) -> None: assert task.name # nosec - await get_task_manager(task.app).set_task_progress(task_id, report) + await get_app_server(task.app).task_manager.set_task_progress( + task_id, report + ) _logger.debug("'%s' progress %s", task_id, report.percent_value) async with ProgressBarData( diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py index 4f176d45d00..0d91e10d392 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_files.py +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -2,8 +2,6 @@ from typing import Annotated, Final, cast from urllib.parse import quote -from celery_library.models import TaskMetadata, TaskUUID -from celery_library.task_manager import CeleryTaskManager from fastapi import APIRouter, Depends, Header, Request from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobNameData from models_library.api_schemas_storage.storage_schemas import ( @@ -22,6 +20,8 @@ from models_library.projects_nodes_io import LocationID, StorageFileID from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.aiohttp import status +from servicelib.queued_tasks.app_server import TaskManager +from servicelib.queued_tasks.models import TaskMetadata, TaskUUID from yarl import URL from ...dsm import get_dsm_provider @@ -270,7 +270,7 @@ async def abort_upload_file( status_code=status.HTTP_202_ACCEPTED, ) async def complete_upload_file( - celery_client: Annotated[CeleryTaskManager, Depends(get_task_manager)], + task_manager: Annotated[TaskManager, Depends(get_task_manager)], query_params: Annotated[StorageQueryParamsBase, Depends()], location_id: LocationID, file_id: StorageFileID, @@ -284,7 +284,7 @@ async def complete_upload_file( user_id=query_params.user_id, product_name=_UNDEFINED_PRODUCT_NAME_FOR_WORKER_TASKS, # NOTE: I would need to change the API here ) - task_uuid = await celery_client.submit_task( + task_uuid = await task_manager.submit_task( TaskMetadata( name=remote_complete_upload_file.__name__, ), @@ -326,7 +326,7 @@ async def complete_upload_file( response_model=Envelope[FileUploadCompleteFutureResponse], ) async def is_completed_upload_file( - celery_client: Annotated[CeleryTaskManager, Depends(get_task_manager)], + task_manager: Annotated[TaskManager, Depends(get_task_manager)], query_params: Annotated[StorageQueryParamsBase, Depends()], location_id: LocationID, file_id: StorageFileID, @@ -340,13 +340,13 @@ async def is_completed_upload_file( user_id=query_params.user_id, product_name=_UNDEFINED_PRODUCT_NAME_FOR_WORKER_TASKS, # NOTE: I would need to change the API here ) - task_status = await celery_client.get_task_status( + task_status = await task_manager.get_task_status( task_context=async_job_name_data.model_dump(), task_uuid=TaskUUID(future_id) ) # first check if the task is in the app if task_status.is_done: task_result = TypeAdapter(FileMetaData).validate_python( - await celery_client.get_task_result( + await task_manager.get_task_result( task_context=async_job_name_data.model_dump(), task_uuid=TaskUUID(future_id), ) diff --git a/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py b/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py index 68cc5977811..e89c32de291 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py @@ -7,7 +7,6 @@ TransferrableCeleryError, decode_celery_transferrable_error, ) -from celery_library.models import TaskState from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, @@ -23,6 +22,7 @@ JobSchedulerError, ) from servicelib.logging_utils import log_catch +from servicelib.queued_tasks.models import TaskState from servicelib.rabbitmq import RPCRouter from ...modules.celery import get_task_manager_from_app diff --git a/services/storage/src/simcore_service_storage/api/rpc/_paths.py b/services/storage/src/simcore_service_storage/api/rpc/_paths.py index fa69da125c7..8a5170ad102 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_paths.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_paths.py @@ -1,13 +1,13 @@ import logging from pathlib import Path -from celery_library.models import TaskMetadata from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, AsyncJobNameData, ) from models_library.projects_nodes_io import LocationID +from servicelib.queued_tasks.models import TaskMetadata from servicelib.rabbitmq import RPCRouter from ...modules.celery import get_task_manager_from_app diff --git a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py index 1b85893de9e..dde7fc52932 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py @@ -1,4 +1,3 @@ -from celery_library.models import TaskMetadata, TasksQueue from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, @@ -6,6 +5,7 @@ ) from models_library.api_schemas_storage.storage_schemas import FoldersBody from models_library.api_schemas_webserver.storage import PathToExport +from servicelib.queued_tasks.models import TaskMetadata, TasksQueue from servicelib.rabbitmq import RPCRouter from ...modules.celery import get_task_manager_from_app diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py index ebd9832b9e1..0be4ae04433 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py +++ b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py @@ -9,7 +9,7 @@ on_worker_init, on_worker_shutdown, ) -from servicelib.fastapi.celery.app_server import FastAPIAppServer +from servicelib.fastapi.queued_tasks.app_server import FastAPIAppServer from servicelib.logging_utils import config_all_loggers from ...api._worker_tasks.tasks import setup_worker_tasks diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index ad37ba752e6..76015251c34 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -63,7 +63,7 @@ ) from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status -from servicelib.fastapi.celery.app_server import FastAPIAppServer +from servicelib.fastapi.queued_tasks.app_server import FastAPIAppServer from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient from servicelib.utils import limited_gather from settings_library.rabbit import RabbitSettings diff --git a/services/storage/tests/unit/test_async_jobs.py b/services/storage/tests/unit/test_async_jobs.py index 9f3cd04529d..c6546aaade6 100644 --- a/services/storage/tests/unit/test_async_jobs.py +++ b/services/storage/tests/unit/test_async_jobs.py @@ -11,7 +11,6 @@ import pytest from celery import Celery, Task from celery.contrib.testing.worker import TestWorkController -from celery_library.models import TaskID, TaskMetadata from celery_library.task import register_task from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( @@ -25,8 +24,8 @@ from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError from models_library.products import ProductName -from models_library.rabbitmq_basic_types import RPCMethodName from models_library.users import UserID +from servicelib.queued_tasks.models import TaskID, TaskMetadata from servicelib.rabbitmq import RabbitMQRPCClient, RPCRouter from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs from simcore_service_storage.api.rpc.routes import get_rabbitmq_rpc_server @@ -126,7 +125,7 @@ async def _start_task_via_rpc( async_job_get = await async_jobs.submit( rabbitmq_rpc_client=client, rpc_namespace=STORAGE_RPC_NAMESPACE, - method_name=RPCMethodName(rpc_task_name), + method_name=rpc_task_name, job_id_data=job_id_data, **kwargs, ) From baff971f10e05496ef2ecaf1802007a1af533a07 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Jun 2025 14:35:24 +0200 Subject: [PATCH 80/91] fix: typecheck --- services/storage/src/simcore_service_storage/api/rest/_files.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py index 0d91e10d392..601adbc5381 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_files.py +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -20,8 +20,8 @@ from models_library.projects_nodes_io import LocationID, StorageFileID from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.aiohttp import status -from servicelib.queued_tasks.app_server import TaskManager from servicelib.queued_tasks.models import TaskMetadata, TaskUUID +from servicelib.queued_tasks.task_manager import TaskManager from yarl import URL from ...dsm import get_dsm_provider From 912d041082a42f79f96c3bd656937bfd89cfc9b5 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Jun 2025 14:56:49 +0200 Subject: [PATCH 81/91] fix: typecheck --- .../src/servicelib/queued_tasks/task_manager.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/service-library/src/servicelib/queued_tasks/task_manager.py b/packages/service-library/src/servicelib/queued_tasks/task_manager.py index 625c410e96e..6b50fe521b0 100644 --- a/packages/service-library/src/servicelib/queued_tasks/task_manager.py +++ b/packages/service-library/src/servicelib/queued_tasks/task_manager.py @@ -1,7 +1,8 @@ from typing import Any, Protocol +from models_library.progress_bar import ProgressReport + from ..queued_tasks.models import ( - ProgressReport, Task, TaskContext, TaskID, From 4b403b08af7a36822b6ed8d361abb513d7c7a0af Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Jun 2025 21:38:58 +0200 Subject: [PATCH 82/91] fix findings --- packages/celery-library/tests/conftest.py | 4 ++-- packages/celery-library/tests/unit/test_tasks.py | 4 ++-- .../storage/src/simcore_service_storage/api/rest/_files.py | 4 ++-- .../src/simcore_service_storage/api/rpc/_simcore_s3.py | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/celery-library/tests/conftest.py b/packages/celery-library/tests/conftest.py index 17a923179b6..569755ebe67 100644 --- a/packages/celery-library/tests/conftest.py +++ b/packages/celery-library/tests/conftest.py @@ -92,7 +92,7 @@ def celery_config() -> dict[str, Any]: @pytest.fixture -async def with_storage_celery_worker( +async def with_celery_worker( celery_app: Celery, app_server: BaseAppServer, celery_settings: CelerySettings, @@ -121,7 +121,7 @@ def _on_worker_init_wrapper(sender: WorkController, **_kwargs): async def celery_task_manager( celery_app: Celery, celery_settings: CelerySettings, - with_storage_celery_worker: TestWorkController, + with_celery_worker: TestWorkController, ) -> CeleryTaskManager: return await create_task_manager( celery_app, diff --git a/packages/celery-library/tests/unit/test_tasks.py b/packages/celery-library/tests/unit/test_tasks.py index d9d0bd4522d..f081492e078 100644 --- a/packages/celery-library/tests/unit/test_tasks.py +++ b/packages/celery-library/tests/unit/test_tasks.py @@ -55,7 +55,7 @@ def sleep_for(seconds: float) -> None: def fake_file_processor(task: Task, task_id: TaskID, files: list[str]) -> str: - _ = task_id + assert task_id assert task.name _logger.info("Calling _fake_file_processor") return asyncio.run_coroutine_threadsafe( @@ -69,7 +69,7 @@ class MyError(OsparcErrorMixin, Exception): def failure_task(task: Task, task_id: TaskID) -> None: - _ = task_id + assert task_id assert task msg = "BOOM!" raise MyError(msg=msg) diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py index 601adbc5381..8c3ab307226 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_files.py +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -292,7 +292,7 @@ async def complete_upload_file( user_id=async_job_name_data.user_id, location_id=location_id, file_id=file_id, - body=body.model_dump(), + body=body, ) route = ( @@ -356,7 +356,7 @@ async def is_completed_upload_file( assert new_fmd.file_id == file_id # nosec response = FileUploadCompleteFutureResponse( state=FileUploadCompleteState.OK, - e_tag=FileMetaData.model_validate(new_fmd).entity_tag, + e_tag=new_fmd.entity_tag, ) else: # the task is still running diff --git a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py index dde7fc52932..f3db8f061ca 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py @@ -27,7 +27,7 @@ async def copy_folders_from_project( ), task_context=job_id_data.model_dump(), user_id=job_id_data.user_id, - body=body.model_dump(mode="json"), + body=body, ) return AsyncJobGet(job_id=task_uuid, job_name=task_name) From 1c2800918f96322e9c7e1855f4b680fcece5cde9 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Jun 2025 21:53:15 +0200 Subject: [PATCH 83/91] fix: pydantic types --- .../simcore_service_storage/api/_worker_tasks/tasks.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py index b8dc131c98f..02c6ff60dc8 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py @@ -2,10 +2,15 @@ from celery import Celery # type: ignore[import-untyped] from celery_library.task import register_task -from celery_library.types import register_celery_types +from celery_library.types import register_celery_types, register_pydantic_types from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadCompletionBody, + FoldersBody, +) from servicelib.logging_utils import log_context +from ...models import FileMetaData from ._files import complete_upload_file from ._paths import compute_path_size, delete_paths from ._simcore_s3 import deep_copy_files_from_project, export_data @@ -15,6 +20,7 @@ def setup_worker_tasks(app: Celery) -> None: register_celery_types() + register_pydantic_types(FileUploadCompletionBody, FileMetaData, FoldersBody) with log_context(_logger, logging.INFO, msg="worker task registration"): register_task(app, export_data, dont_autoretry_for=(AccessRightError,)) From 7de918a7476634e81844273fdde9fef3fbbb52dd Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Jun 2025 23:37:31 +0200 Subject: [PATCH 84/91] fix abortable task test --- packages/celery-library/tests/unit/test_tasks.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/packages/celery-library/tests/unit/test_tasks.py b/packages/celery-library/tests/unit/test_tasks.py index f081492e078..09f5a48fb56 100644 --- a/packages/celery-library/tests/unit/test_tasks.py +++ b/packages/celery-library/tests/unit/test_tasks.py @@ -14,10 +14,7 @@ from celery import Celery, Task from celery.contrib.abortable import AbortableTask from celery_library.errors import TransferrableCeleryError -from celery_library.task import ( - AbortableAsyncResult, - register_task, -) +from celery_library.task import register_task from celery_library.task_manager import CeleryTaskManager from celery_library.utils import get_app_server from common_library.errors_classes import OsparcErrorMixin @@ -78,11 +75,8 @@ def failure_task(task: Task, task_id: TaskID) -> None: async def dreamer_task(task: AbortableTask, task_id: TaskID) -> list[int]: numbers = [] for _ in range(30): - if AbortableAsyncResult(task_id, app=task.app).is_aborted(): - _logger.warning("Alarm clock") - return numbers numbers.append(randint(1, 90)) # noqa: S311 - await asyncio.sleep(0.1) + await asyncio.sleep(0.5) return numbers @@ -166,6 +160,8 @@ async def test_cancelling_a_running_task_aborts_and_deletes( task_context=task_context, ) + await asyncio.sleep(3.0) + await celery_task_manager.cancel_task(task_context, task_uuid) for attempt in Retrying( From 9981610d9208e227573fd454d12ee35bbff8e142 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Jun 2025 23:48:15 +0200 Subject: [PATCH 85/91] add redis secure --- packages/celery-library/tests/conftest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/celery-library/tests/conftest.py b/packages/celery-library/tests/conftest.py index 569755ebe67..fc089fba092 100644 --- a/packages/celery-library/tests/conftest.py +++ b/packages/celery-library/tests/conftest.py @@ -56,6 +56,7 @@ def app_environment( monkeypatch, { **env_devel_dict, + "REDIS_SECURE": redis_service.REDIS_SECURE, "REDIS_HOST": redis_service.REDIS_HOST, "REDIS_PORT": f"{redis_service.REDIS_PORT}", "REDIS_PASSWORD": redis_service.REDIS_PASSWORD.get_secret_value(), From 0ad9d0a2eaabd797f691ffdf2e467976e668a252 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Jun 2025 13:08:25 +0200 Subject: [PATCH 86/91] fix --- .../src/celery_library/backends/_redis.py | 2 +- .../src/servicelib/fastapi/queued_tasks/app_server.py | 11 +++++------ 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/celery-library/src/celery_library/backends/_redis.py b/packages/celery-library/src/celery_library/backends/_redis.py index 07abc789747..c9ce0a84568 100644 --- a/packages/celery-library/src/celery_library/backends/_redis.py +++ b/packages/celery-library/src/celery_library/backends/_redis.py @@ -12,7 +12,7 @@ TaskMetadata, TaskUUID, ) -from servicelib.redis._client import RedisClientSDK +from servicelib.redis import RedisClientSDK from ..utils import build_task_id_prefix diff --git a/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py b/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py index 3e4f048a1ab..42e946c3a59 100644 --- a/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py +++ b/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py @@ -14,7 +14,11 @@ class FastAPIAppServer(BaseAppServer): def __init__(self, app: FastAPI): super().__init__() self._app = app - self._lifespan_manager: LifespanManager | None = None + self._lifespan_manager = LifespanManager( + self.fastapi_app, + startup_timeout=_STARTUP_TIMEOUT, + shutdown_timeout=_SHUTDOWN_TIMEOUT, + ) @property def fastapi_app(self) -> FastAPI: @@ -22,11 +26,6 @@ def fastapi_app(self) -> FastAPI: return self._app async def on_startup(self) -> None: - self._lifespan_manager = LifespanManager( - self.fastapi_app, - startup_timeout=_STARTUP_TIMEOUT, - shutdown_timeout=_SHUTDOWN_TIMEOUT, - ) await self._lifespan_manager.__aenter__() async def on_shutdown(self) -> None: From f8596178209c279319e492ca7a2deb73f40e8397 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Jun 2025 14:20:21 +0200 Subject: [PATCH 87/91] use generics --- packages/celery-library/tests/conftest.py | 2 +- .../fastapi/queued_tasks/app_server.py | 12 +++------ .../src/servicelib/queued_tasks/app_server.py | 25 ++++++------------- .../api/_worker_tasks/_files.py | 4 +-- .../api/_worker_tasks/_paths.py | 4 +-- .../api/_worker_tasks/_simcore_s3.py | 4 +-- 6 files changed, 18 insertions(+), 33 deletions(-) diff --git a/packages/celery-library/tests/conftest.py b/packages/celery-library/tests/conftest.py index fc089fba092..7c61ee93b56 100644 --- a/packages/celery-library/tests/conftest.py +++ b/packages/celery-library/tests/conftest.py @@ -73,7 +73,7 @@ def celery_settings( @pytest.fixture def app_server() -> BaseAppServer: - return FakeAppServer() + return FakeAppServer(app=None) @pytest.fixture(scope="session") diff --git a/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py b/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py index 42e946c3a59..0129e06ed8a 100644 --- a/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py +++ b/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py @@ -10,21 +10,15 @@ _STARTUP_TIMEOUT: Final[float] = timedelta(minutes=1).total_seconds() -class FastAPIAppServer(BaseAppServer): +class FastAPIAppServer(BaseAppServer[FastAPI]): def __init__(self, app: FastAPI): - super().__init__() - self._app = app + super().__init__(app) self._lifespan_manager = LifespanManager( - self.fastapi_app, + app, startup_timeout=_STARTUP_TIMEOUT, shutdown_timeout=_SHUTDOWN_TIMEOUT, ) - @property - def fastapi_app(self) -> FastAPI: - assert isinstance(self._app, FastAPI) # nosec - return self._app - async def on_startup(self) -> None: await self._lifespan_manager.__aenter__() diff --git a/packages/service-library/src/servicelib/queued_tasks/app_server.py b/packages/service-library/src/servicelib/queued_tasks/app_server.py index 0616f58721a..cd43179929b 100644 --- a/packages/service-library/src/servicelib/queued_tasks/app_server.py +++ b/packages/service-library/src/servicelib/queued_tasks/app_server.py @@ -3,32 +3,23 @@ import threading from abc import ABC, abstractmethod from asyncio import AbstractEventLoop -from contextlib import suppress -from typing import TYPE_CHECKING, Final +from typing import Final, Generic, TypeVar from servicelib.queued_tasks.task_manager import TaskManager -if TYPE_CHECKING: - with suppress(ImportError): - from fastapi import FastAPI - with suppress(ImportError): - from aiohttp.web import Application - - STARTUP_TIMEOUT: Final[float] = datetime.timedelta(minutes=1).total_seconds() +AppType = TypeVar("AppType") -class BaseAppServer(ABC): - def __init__(self) -> None: - self._shutdown_event: asyncio.Event | None = None - @property - def fastapi_app(self) -> "FastAPI": - raise NotImplementedError +class BaseAppServer(ABC, Generic[AppType]): + def __init__(self, app: AppType) -> None: + self._app: AppType = app + self._shutdown_event: asyncio.Event | None = None @property - def aiohttp_app(self) -> "Application": - raise NotImplementedError + def app(self) -> AppType: + return self._app @property def event_loop(self) -> AbstractEventLoop: diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py index 3cea617b7f3..afa60c6cf8b 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py @@ -1,6 +1,6 @@ import logging -from celery import Task # type: ignore[import-untyped] +from celery import Task from celery_library.utils import get_app_server from models_library.api_schemas_storage.storage_schemas import ( FileUploadCompletionBody, @@ -30,7 +30,7 @@ async def complete_upload_file( logging.INFO, msg=f"completing upload of file {user_id=}, {location_id=}, {file_id=}", ): - dsm = get_dsm_provider(get_app_server(task.app).fastapi_app).get(location_id) + dsm = get_dsm_provider(get_app_server(task.app).app).get(location_id) # NOTE: completing a multipart upload on AWS can take up to several minutes # if it returns slow we return a 202 - Accepted, the client will have to check later # for completeness diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py index 2c0d107c71a..2b0adc3405e 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py @@ -25,7 +25,7 @@ async def compute_path_size( logging.INFO, msg=f"computing path size {user_id=}, {location_id=}, {path=}", ): - dsm = get_dsm_provider(get_app_server(task.app).fastapi_app).get(location_id) + dsm = get_dsm_provider(get_app_server(task.app).app).get(location_id) return await dsm.compute_path_size(user_id, path=Path(path)) @@ -42,7 +42,7 @@ async def delete_paths( logging.INFO, msg=f"delete {paths=} in {location_id=} for {user_id=}", ): - dsm = get_dsm_provider(get_app_server(task.app).fastapi_app).get(location_id) + dsm = get_dsm_provider(get_app_server(task.app).app).get(location_id) files_ids: set[StorageFileID] = { TypeAdapter(StorageFileID).validate_python(f"{path}") for path in paths } diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py index 13c52b77b60..6d87d4db246 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py @@ -40,7 +40,7 @@ async def deep_copy_files_from_project( logging.INFO, msg=f"copying {body.source['uuid']} -> {body.destination['uuid']} with {task.request.id}", ): - dsm = get_dsm_provider(get_app_server(task.app).fastapi_app).get( + dsm = get_dsm_provider(get_app_server(task.app).app).get( SimcoreS3DataManager.get_location_id() ) assert isinstance(dsm, SimcoreS3DataManager) # nosec @@ -75,7 +75,7 @@ async def export_data( logging.INFO, f"'{task_id}' export data (for {user_id=}) fom selection: {paths_to_export}", ): - dsm = get_dsm_provider(get_app_server(task.app).fastapi_app).get( + dsm = get_dsm_provider(get_app_server(task.app).app).get( SimcoreS3DataManager.get_location_id() ) assert isinstance(dsm, SimcoreS3DataManager) # nosec From 6ea49fefc6a45f080bf4d8fa2060fc24007b29da Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Jun 2025 14:55:06 +0200 Subject: [PATCH 88/91] fix: typecheck --- .../src/simcore_service_storage/api/_worker_tasks/_files.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py index afa60c6cf8b..e7b31d8f1dc 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py @@ -1,6 +1,6 @@ import logging -from celery import Task +from celery import Task # type: ignore[import-untyped] from celery_library.utils import get_app_server from models_library.api_schemas_storage.storage_schemas import ( FileUploadCompletionBody, From d7d7a3e83250f50a0fa003d81f9d796d8e3d9c3f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Jun 2025 22:51:20 +0200 Subject: [PATCH 89/91] tests: fix --- .../src/servicelib/fastapi/queued_tasks/app_server.py | 7 ++++--- .../src/servicelib/queued_tasks/app_server.py | 10 +++++----- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py b/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py index 0129e06ed8a..5cac911f6ab 100644 --- a/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py +++ b/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py @@ -13,13 +13,14 @@ class FastAPIAppServer(BaseAppServer[FastAPI]): def __init__(self, app: FastAPI): super().__init__(app) + self._lifespan_manager: LifespanManager | None = None + + async def on_startup(self) -> None: self._lifespan_manager = LifespanManager( - app, + self.app, startup_timeout=_STARTUP_TIMEOUT, shutdown_timeout=_SHUTDOWN_TIMEOUT, ) - - async def on_startup(self) -> None: await self._lifespan_manager.__aenter__() async def on_shutdown(self) -> None: diff --git a/packages/service-library/src/servicelib/queued_tasks/app_server.py b/packages/service-library/src/servicelib/queued_tasks/app_server.py index cd43179929b..72f82fcc70e 100644 --- a/packages/service-library/src/servicelib/queued_tasks/app_server.py +++ b/packages/service-library/src/servicelib/queued_tasks/app_server.py @@ -9,16 +9,16 @@ STARTUP_TIMEOUT: Final[float] = datetime.timedelta(minutes=1).total_seconds() -AppType = TypeVar("AppType") +T = TypeVar("T") -class BaseAppServer(ABC, Generic[AppType]): - def __init__(self, app: AppType) -> None: - self._app: AppType = app +class BaseAppServer(ABC, Generic[T]): + def __init__(self, app: T) -> None: + self._app: T = app self._shutdown_event: asyncio.Event | None = None @property - def app(self) -> AppType: + def app(self) -> T: return self._app @property From b02e11a62fdf73c3eb211bc20c3cd30161a6a751 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Jun 2025 09:12:32 +0200 Subject: [PATCH 90/91] fix: pydantic types registration --- .../simcore_service_storage/modules/celery/__init__.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/services/storage/src/simcore_service_storage/modules/celery/__init__.py b/services/storage/src/simcore_service_storage/modules/celery/__init__.py index 5e6ff2b0b93..45795d329a3 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/__init__.py +++ b/services/storage/src/simcore_service_storage/modules/celery/__init__.py @@ -1,9 +1,15 @@ from celery_library.common import create_app, create_task_manager from celery_library.task_manager import CeleryTaskManager -from celery_library.types import register_celery_types +from celery_library.types import register_celery_types, register_pydantic_types from fastapi import FastAPI +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadCompletionBody, + FoldersBody, +) from settings_library.celery import CelerySettings +from ...models import FileMetaData + def setup_task_manager(app: FastAPI, celery_settings: CelerySettings) -> None: async def on_startup() -> None: @@ -12,6 +18,7 @@ async def on_startup() -> None: ) register_celery_types() + register_pydantic_types(FileUploadCompletionBody, FileMetaData, FoldersBody) app.add_event_handler("startup", on_startup) From 73c03ccc4c3290f9134b65c6e77a1eff1769f026 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Jun 2025 15:31:55 +0200 Subject: [PATCH 91/91] fix: rename --- packages/celery-library/src/celery_library/backends/_redis.py | 2 +- packages/celery-library/src/celery_library/signals.py | 2 +- packages/celery-library/src/celery_library/task.py | 2 +- packages/celery-library/src/celery_library/task_manager.py | 4 ++-- packages/celery-library/src/celery_library/utils.py | 4 ++-- packages/celery-library/tests/conftest.py | 2 +- packages/celery-library/tests/unit/test_tasks.py | 4 ++-- .../servicelib/{fastapi/queued_tasks => celery}/__init__.py | 0 .../src/servicelib/{queued_tasks => celery}/app_server.py | 2 +- .../src/servicelib/{queued_tasks => celery}/models.py | 0 .../src/servicelib/{queued_tasks => celery}/task_manager.py | 2 +- .../servicelib/{queued_tasks => fastapi/celery}/__init__.py | 0 .../servicelib/fastapi/{queued_tasks => celery}/app_server.py | 2 +- .../src/simcore_service_storage/api/_worker_tasks/_files.py | 2 +- .../src/simcore_service_storage/api/_worker_tasks/_paths.py | 2 +- .../simcore_service_storage/api/_worker_tasks/_simcore_s3.py | 2 +- .../storage/src/simcore_service_storage/api/rest/_files.py | 4 ++-- .../src/simcore_service_storage/api/rpc/_async_jobs.py | 2 +- .../storage/src/simcore_service_storage/api/rpc/_paths.py | 2 +- .../src/simcore_service_storage/api/rpc/_simcore_s3.py | 2 +- .../src/simcore_service_storage/modules/celery/worker_main.py | 2 +- services/storage/tests/conftest.py | 2 +- services/storage/tests/unit/test_async_jobs.py | 2 +- 23 files changed, 24 insertions(+), 24 deletions(-) rename packages/service-library/src/servicelib/{fastapi/queued_tasks => celery}/__init__.py (100%) rename packages/service-library/src/servicelib/{queued_tasks => celery}/app_server.py (96%) rename packages/service-library/src/servicelib/{queued_tasks => celery}/models.py (100%) rename packages/service-library/src/servicelib/{queued_tasks => celery}/task_manager.py (96%) rename packages/service-library/src/servicelib/{queued_tasks => fastapi/celery}/__init__.py (100%) rename packages/service-library/src/servicelib/fastapi/{queued_tasks => celery}/app_server.py (94%) diff --git a/packages/celery-library/src/celery_library/backends/_redis.py b/packages/celery-library/src/celery_library/backends/_redis.py index c9ce0a84568..37a9a415cd5 100644 --- a/packages/celery-library/src/celery_library/backends/_redis.py +++ b/packages/celery-library/src/celery_library/backends/_redis.py @@ -5,7 +5,7 @@ from models_library.progress_bar import ProgressReport from pydantic import ValidationError -from servicelib.queued_tasks.models import ( +from servicelib.celery.models import ( Task, TaskContext, TaskID, diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py index 56ae218c885..09a29f3b0dc 100644 --- a/packages/celery-library/src/celery_library/signals.py +++ b/packages/celery-library/src/celery_library/signals.py @@ -4,8 +4,8 @@ from celery import Celery # type: ignore[import-untyped] from celery.worker.worker import WorkController # type: ignore[import-untyped] +from servicelib.celery.app_server import STARTUP_TIMEOUT, BaseAppServer from servicelib.logging_utils import log_context -from servicelib.queued_tasks.app_server import STARTUP_TIMEOUT, BaseAppServer from settings_library.celery import CelerySettings from .common import create_task_manager diff --git a/packages/celery-library/src/celery_library/task.py b/packages/celery-library/src/celery_library/task.py index 004419ca6e3..f14771cf207 100644 --- a/packages/celery-library/src/celery_library/task.py +++ b/packages/celery-library/src/celery_library/task.py @@ -14,7 +14,7 @@ from celery.exceptions import Ignore # type: ignore[import-untyped] from pydantic import NonNegativeInt from servicelib.async_utils import cancel_wait_task -from servicelib.queued_tasks.models import TaskID +from servicelib.celery.models import TaskID from .errors import encode_celery_transferrable_error from .utils import get_app_server diff --git a/packages/celery-library/src/celery_library/task_manager.py b/packages/celery-library/src/celery_library/task_manager.py index bc9cd3b46fe..7f14d4ddd34 100644 --- a/packages/celery-library/src/celery_library/task_manager.py +++ b/packages/celery-library/src/celery_library/task_manager.py @@ -9,8 +9,7 @@ ) from common_library.async_tools import make_async from models_library.progress_bar import ProgressReport -from servicelib.logging_utils import log_context -from servicelib.queued_tasks.models import ( +from servicelib.celery.models import ( Task, TaskContext, TaskID, @@ -20,6 +19,7 @@ TaskStatus, TaskUUID, ) +from servicelib.logging_utils import log_context from settings_library.celery import CelerySettings from .utils import build_task_id diff --git a/packages/celery-library/src/celery_library/utils.py b/packages/celery-library/src/celery_library/utils.py index 175e9d32316..64da3e0c248 100644 --- a/packages/celery-library/src/celery_library/utils.py +++ b/packages/celery-library/src/celery_library/utils.py @@ -1,8 +1,8 @@ from typing import Final from celery import Celery # type: ignore[import-untyped] -from servicelib.queued_tasks.app_server import BaseAppServer -from servicelib.queued_tasks.models import TaskContext, TaskID, TaskUUID +from servicelib.celery.app_server import BaseAppServer +from servicelib.celery.models import TaskContext, TaskID, TaskUUID _APP_SERVER_KEY = "app_server" diff --git a/packages/celery-library/tests/conftest.py b/packages/celery-library/tests/conftest.py index 7c61ee93b56..c513d262d7f 100644 --- a/packages/celery-library/tests/conftest.py +++ b/packages/celery-library/tests/conftest.py @@ -16,7 +16,7 @@ from celery_library.task_manager import CeleryTaskManager from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from servicelib.queued_tasks.app_server import BaseAppServer +from servicelib.celery.app_server import BaseAppServer from settings_library.celery import CelerySettings from settings_library.redis import RedisSettings diff --git a/packages/celery-library/tests/unit/test_tasks.py b/packages/celery-library/tests/unit/test_tasks.py index 09f5a48fb56..4270efcc065 100644 --- a/packages/celery-library/tests/unit/test_tasks.py +++ b/packages/celery-library/tests/unit/test_tasks.py @@ -19,13 +19,13 @@ from celery_library.utils import get_app_server from common_library.errors_classes import OsparcErrorMixin from models_library.progress_bar import ProgressReport -from servicelib.logging_utils import log_context -from servicelib.queued_tasks.models import ( +from servicelib.celery.models import ( TaskContext, TaskID, TaskMetadata, TaskState, ) +from servicelib.logging_utils import log_context from tenacity import Retrying, retry_if_exception_type, stop_after_delay, wait_fixed _logger = logging.getLogger(__name__) diff --git a/packages/service-library/src/servicelib/fastapi/queued_tasks/__init__.py b/packages/service-library/src/servicelib/celery/__init__.py similarity index 100% rename from packages/service-library/src/servicelib/fastapi/queued_tasks/__init__.py rename to packages/service-library/src/servicelib/celery/__init__.py diff --git a/packages/service-library/src/servicelib/queued_tasks/app_server.py b/packages/service-library/src/servicelib/celery/app_server.py similarity index 96% rename from packages/service-library/src/servicelib/queued_tasks/app_server.py rename to packages/service-library/src/servicelib/celery/app_server.py index 72f82fcc70e..0c55c0ed919 100644 --- a/packages/service-library/src/servicelib/queued_tasks/app_server.py +++ b/packages/service-library/src/servicelib/celery/app_server.py @@ -5,7 +5,7 @@ from asyncio import AbstractEventLoop from typing import Final, Generic, TypeVar -from servicelib.queued_tasks.task_manager import TaskManager +from servicelib.celery.task_manager import TaskManager STARTUP_TIMEOUT: Final[float] = datetime.timedelta(minutes=1).total_seconds() diff --git a/packages/service-library/src/servicelib/queued_tasks/models.py b/packages/service-library/src/servicelib/celery/models.py similarity index 100% rename from packages/service-library/src/servicelib/queued_tasks/models.py rename to packages/service-library/src/servicelib/celery/models.py diff --git a/packages/service-library/src/servicelib/queued_tasks/task_manager.py b/packages/service-library/src/servicelib/celery/task_manager.py similarity index 96% rename from packages/service-library/src/servicelib/queued_tasks/task_manager.py rename to packages/service-library/src/servicelib/celery/task_manager.py index 6b50fe521b0..f8e178348c0 100644 --- a/packages/service-library/src/servicelib/queued_tasks/task_manager.py +++ b/packages/service-library/src/servicelib/celery/task_manager.py @@ -2,7 +2,7 @@ from models_library.progress_bar import ProgressReport -from ..queued_tasks.models import ( +from ..celery.models import ( Task, TaskContext, TaskID, diff --git a/packages/service-library/src/servicelib/queued_tasks/__init__.py b/packages/service-library/src/servicelib/fastapi/celery/__init__.py similarity index 100% rename from packages/service-library/src/servicelib/queued_tasks/__init__.py rename to packages/service-library/src/servicelib/fastapi/celery/__init__.py diff --git a/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py b/packages/service-library/src/servicelib/fastapi/celery/app_server.py similarity index 94% rename from packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py rename to packages/service-library/src/servicelib/fastapi/celery/app_server.py index 5cac911f6ab..3a5ab7303de 100644 --- a/packages/service-library/src/servicelib/fastapi/queued_tasks/app_server.py +++ b/packages/service-library/src/servicelib/fastapi/celery/app_server.py @@ -4,7 +4,7 @@ from asgi_lifespan import LifespanManager from fastapi import FastAPI -from ...queued_tasks.app_server import BaseAppServer +from ...celery.app_server import BaseAppServer _SHUTDOWN_TIMEOUT: Final[float] = timedelta(seconds=10).total_seconds() _STARTUP_TIMEOUT: Final[float] = timedelta(minutes=1).total_seconds() diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py index e7b31d8f1dc..56a0343fdae 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py @@ -7,8 +7,8 @@ ) from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID +from servicelib.celery.models import TaskID from servicelib.logging_utils import log_context -from servicelib.queued_tasks.models import TaskID from ...dsm import get_dsm_provider from ...models import FileMetaData diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py index 2b0adc3405e..142c0f3968b 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py @@ -6,8 +6,8 @@ from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID from pydantic import ByteSize, TypeAdapter +from servicelib.celery.models import TaskID from servicelib.logging_utils import log_context -from servicelib.queued_tasks.models import TaskID from servicelib.utils import limited_gather from ...constants import MAX_CONCURRENT_S3_TASKS diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py index 6d87d4db246..83f93f6fc4c 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py @@ -11,9 +11,9 @@ from models_library.projects_nodes_io import StorageFileID from models_library.users import UserID from pydantic import TypeAdapter +from servicelib.celery.models import TaskID from servicelib.logging_utils import log_context from servicelib.progress_bar import ProgressBarData -from servicelib.queued_tasks.models import TaskID from ...dsm import get_dsm_provider from ...simcore_s3_dsm import SimcoreS3DataManager diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py index 8c3ab307226..84857370260 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_files.py +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -20,8 +20,8 @@ from models_library.projects_nodes_io import LocationID, StorageFileID from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.aiohttp import status -from servicelib.queued_tasks.models import TaskMetadata, TaskUUID -from servicelib.queued_tasks.task_manager import TaskManager +from servicelib.celery.models import TaskMetadata, TaskUUID +from servicelib.celery.task_manager import TaskManager from yarl import URL from ...dsm import get_dsm_provider diff --git a/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py b/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py index e89c32de291..8628413e83c 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py @@ -21,8 +21,8 @@ JobNotDoneError, JobSchedulerError, ) +from servicelib.celery.models import TaskState from servicelib.logging_utils import log_catch -from servicelib.queued_tasks.models import TaskState from servicelib.rabbitmq import RPCRouter from ...modules.celery import get_task_manager_from_app diff --git a/services/storage/src/simcore_service_storage/api/rpc/_paths.py b/services/storage/src/simcore_service_storage/api/rpc/_paths.py index 8a5170ad102..9c704d46820 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_paths.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_paths.py @@ -7,7 +7,7 @@ AsyncJobNameData, ) from models_library.projects_nodes_io import LocationID -from servicelib.queued_tasks.models import TaskMetadata +from servicelib.celery.models import TaskMetadata from servicelib.rabbitmq import RPCRouter from ...modules.celery import get_task_manager_from_app diff --git a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py index f3db8f061ca..6b0b27f87a5 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py @@ -5,7 +5,7 @@ ) from models_library.api_schemas_storage.storage_schemas import FoldersBody from models_library.api_schemas_webserver.storage import PathToExport -from servicelib.queued_tasks.models import TaskMetadata, TasksQueue +from servicelib.celery.models import TaskMetadata, TasksQueue from servicelib.rabbitmq import RPCRouter from ...modules.celery import get_task_manager_from_app diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py index 0be4ae04433..ebd9832b9e1 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py +++ b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py @@ -9,7 +9,7 @@ on_worker_init, on_worker_shutdown, ) -from servicelib.fastapi.queued_tasks.app_server import FastAPIAppServer +from servicelib.fastapi.celery.app_server import FastAPIAppServer from servicelib.logging_utils import config_all_loggers from ...api._worker_tasks.tasks import setup_worker_tasks diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 76015251c34..ad37ba752e6 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -63,7 +63,7 @@ ) from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status -from servicelib.fastapi.queued_tasks.app_server import FastAPIAppServer +from servicelib.fastapi.celery.app_server import FastAPIAppServer from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient from servicelib.utils import limited_gather from settings_library.rabbit import RabbitSettings diff --git a/services/storage/tests/unit/test_async_jobs.py b/services/storage/tests/unit/test_async_jobs.py index c6546aaade6..26140eb037c 100644 --- a/services/storage/tests/unit/test_async_jobs.py +++ b/services/storage/tests/unit/test_async_jobs.py @@ -25,7 +25,7 @@ from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError from models_library.products import ProductName from models_library.users import UserID -from servicelib.queued_tasks.models import TaskID, TaskMetadata +from servicelib.celery.models import TaskID, TaskMetadata from servicelib.rabbitmq import RabbitMQRPCClient, RPCRouter from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs from simcore_service_storage.api.rpc.routes import get_rabbitmq_rpc_server