|
6 | 6 |
|
7 | 7 | import asyncio |
8 | 8 | import concurrent.futures |
| 9 | +import logging |
9 | 10 | import time |
10 | 11 | from collections.abc import AsyncIterator, Callable, Coroutine |
11 | 12 | from typing import Any |
| 13 | +from unittest import mock |
12 | 14 |
|
13 | 15 | import distributed |
14 | 16 | import pytest |
15 | 17 | from dask_task_models_library.container_tasks.errors import TaskCancelledError |
16 | 18 | from dask_task_models_library.container_tasks.events import TaskProgressEvent |
17 | 19 | from dask_task_models_library.container_tasks.io import TaskCancelEventName |
18 | 20 | from dask_task_models_library.container_tasks.protocol import TaskOwner |
| 21 | +from pytest_simcore.helpers.logging_tools import log_context |
19 | 22 | from simcore_service_dask_sidecar.utils.dask import ( |
20 | 23 | _DEFAULT_MAX_RESOURCES, |
21 | 24 | TaskPublisher, |
|
38 | 41 | ] |
39 | 42 |
|
40 | 43 |
|
| 44 | +@pytest.mark.parametrize("handler", [mock.Mock(), mock.AsyncMock()]) |
41 | 45 | async def test_publish_event( |
42 | | - dask_client: distributed.Client, job_id: str, task_owner: TaskOwner |
| 46 | + dask_client: distributed.Client, |
| 47 | + job_id: str, |
| 48 | + task_owner: TaskOwner, |
| 49 | + monkeypatch: pytest.MonkeyPatch, |
| 50 | + handler: mock.Mock | mock.AsyncMock, |
43 | 51 | ): |
| 52 | + monkeypatch.setenv("DASK_SIDECAR_LOGLEVEL", "DEBUG") |
44 | 53 | event_to_publish = TaskProgressEvent( |
45 | 54 | job_id=job_id, |
46 | 55 | msg="the log", |
47 | 56 | progress=1, |
48 | 57 | task_owner=task_owner, |
49 | 58 | ) |
50 | 59 |
|
51 | | - def handler(event: tuple) -> None: |
52 | | - print("received event", event) |
53 | | - assert isinstance(event, tuple) |
54 | | - received_task_log_event = TaskProgressEvent.model_validate_json(event[1]) |
55 | | - assert received_task_log_event == event_to_publish |
56 | | - |
| 60 | + # NOTE: only 1 handler per topic is allowed |
57 | 61 | dask_client.subscribe_topic(TaskProgressEvent.topic_name(), handler) |
58 | 62 |
|
59 | | - await publish_event(dask_client, event=event_to_publish) |
| 63 | + def _worker_task() -> int: |
| 64 | + with log_context(logging.INFO, "_worker_task"): |
| 65 | + |
| 66 | + async def _() -> int: |
| 67 | + with log_context(logging.INFO, "_worker_task_async"): |
| 68 | + await publish_event(event_to_publish) |
| 69 | + return 2 |
| 70 | + |
| 71 | + return asyncio.run(_()) |
| 72 | + |
| 73 | + future = dask_client.submit(_worker_task) |
| 74 | + assert future.result(timeout=DASK_TESTING_TIMEOUT_S) == 2 |
| 75 | + |
60 | 76 | for attempt in Retrying( |
61 | | - wait=wait_fixed(0.2), stop=stop_after_delay(15), reraise=True |
| 77 | + wait=wait_fixed(0.2), |
| 78 | + stop=stop_after_delay(15), |
| 79 | + reraise=True, |
| 80 | + retry=retry_if_exception_type(AssertionError), |
62 | 81 | ): |
63 | 82 | with attempt: |
64 | 83 | events = dask_client.get_events(TaskProgressEvent.topic_name()) |
65 | | - assert events is not None |
| 84 | + assert events is not None, "No events received" |
| 85 | + assert isinstance(events, tuple) |
| 86 | + |
| 87 | + handler.assert_called_with(events[-1]) |
66 | 88 |
|
67 | 89 | assert isinstance(events, tuple) |
68 | 90 | assert len(events) == 1 |
|
0 commit comments