|
1 | 1 | import asyncio |
| 2 | +import datetime |
2 | 3 | import logging |
3 | 4 | import time |
4 | | -from collections.abc import Callable |
| 5 | +from collections.abc import Callable, Iterable |
5 | 6 | from random import randint |
| 7 | +from typing import Any |
6 | 8 |
|
7 | 9 | import pytest |
8 | 10 | from celery import Celery, Task |
9 | 11 | from celery.contrib.abortable import AbortableTask |
| 12 | +from celery.contrib.testing.worker import TestWorkController, start_worker |
| 13 | +from celery.signals import worker_init, worker_shutdown |
10 | 14 | from common_library.errors_classes import OsparcErrorMixin |
11 | 15 | from models_library.progress_bar import ProgressReport |
12 | 16 | from pydantic import TypeAdapter, ValidationError |
13 | | -from servicelib.logging_utils import log_context |
| 17 | +from pytest_simcore.helpers.typing_env import EnvVarsDict |
| 18 | +from servicelib.logging_utils import config_all_loggers, log_context |
| 19 | +from simcore_service_storage.core.settings import ApplicationSettings |
14 | 20 | from simcore_service_storage.modules.celery import get_event_loop |
15 | 21 | from simcore_service_storage.modules.celery._task import define_task |
16 | 22 | from simcore_service_storage.modules.celery.client import CeleryTaskQueueClient |
|
19 | 25 | TaskError, |
20 | 26 | TaskState, |
21 | 27 | ) |
| 28 | +from simcore_service_storage.modules.celery.signals import ( |
| 29 | + on_worker_init, |
| 30 | + on_worker_shutdown, |
| 31 | +) |
22 | 32 | from simcore_service_storage.modules.celery.utils import ( |
23 | 33 | get_celery_worker, |
24 | 34 | get_fastapi_app, |
|
31 | 41 | pytest_simcore_ops_services_selection = [] |
32 | 42 |
|
33 | 43 |
|
| 44 | +@pytest.fixture |
| 45 | +def celery_conf() -> dict[str, Any]: |
| 46 | + return { |
| 47 | + "broker_url": "memory://", |
| 48 | + "result_backend": "cache+memory://", |
| 49 | + "result_expires": datetime.timedelta(days=7), |
| 50 | + "result_extended": True, |
| 51 | + "pool": "threads", |
| 52 | + "worker_send_task_events": True, |
| 53 | + "task_track_started": True, |
| 54 | + "task_send_sent_event": True, |
| 55 | + "broker_connection_retry_on_startup": True, |
| 56 | + } |
| 57 | + |
| 58 | + |
| 59 | +@pytest.fixture |
| 60 | +def celery_app(celery_conf: dict[str, Any]): |
| 61 | + return Celery(**celery_conf) |
| 62 | + |
| 63 | + |
| 64 | +@pytest.fixture |
| 65 | +def celery_client( |
| 66 | + app_environment: EnvVarsDict, celery_app: Celery |
| 67 | +) -> CeleryTaskQueueClient: |
| 68 | + return CeleryTaskQueueClient(celery_app) |
| 69 | + |
| 70 | + |
| 71 | +@pytest.fixture |
| 72 | +def celery_worker_controller( |
| 73 | + app_environment: EnvVarsDict, |
| 74 | + app_settings: ApplicationSettings, |
| 75 | + register_celery_tasks: Callable[[Celery], None], |
| 76 | + celery_app: Celery, |
| 77 | +) -> Iterable[TestWorkController]: |
| 78 | + # Signals must be explicitily connected |
| 79 | + logging.basicConfig(level=logging.WARNING) # NOSONAR |
| 80 | + logging.root.setLevel(app_settings.log_level) |
| 81 | + config_all_loggers( |
| 82 | + log_format_local_dev_enabled=app_settings.STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED, |
| 83 | + logger_filter_mapping=app_settings.STORAGE_LOG_FILTER_MAPPING, |
| 84 | + tracing_settings=app_settings.STORAGE_TRACING, |
| 85 | + ) |
| 86 | + worker_init.connect(on_worker_init) |
| 87 | + worker_shutdown.connect(on_worker_shutdown) |
| 88 | + |
| 89 | + register_celery_tasks(celery_app) |
| 90 | + |
| 91 | + with start_worker( |
| 92 | + celery_app, |
| 93 | + pool="threads", |
| 94 | + loglevel="info", |
| 95 | + perform_ping_check=False, |
| 96 | + worker_kwargs={"hostname": "celery@worker1"}, |
| 97 | + ) as worker: |
| 98 | + worker_init.send(sender=worker) |
| 99 | + |
| 100 | + yield worker |
| 101 | + |
| 102 | + worker_shutdown.send(sender=worker) |
| 103 | + |
| 104 | + |
34 | 105 | async def _async_archive( |
35 | 106 | celery_app: Celery, task_name: str, task_id: str, files: list[str] |
36 | 107 | ) -> str: |
|
0 commit comments