15
15
@respx .mock
16
16
async def test_redirected_logs (caplog ) -> None :
17
17
"""Test that redirected logs are formatted correctly."""
18
- mocked_actor_logs_logs = (b"INFO a" , b"WARNING b" , b"DEBUG c" )
18
+ mocked_actor_logs_logs = (
19
+ b"2025-05-13T07:24:12.588Z ACTOR: Pulling Docker image of build.\n "
20
+ b"2025-05-13T07:24:12.686Z ACTOR: Creating Docker container.\n "
21
+ b"2025-05-13T07:24:12.745Z ACTOR: Starting Docker container." , # Several logs merged into one message
22
+ b"2025-05-13T07:24:14.132Z [apify] INFO multiline \n log" ,
23
+ b"2025-05-13T07:25:14.132Z [apify] WARNING some warning" ,
24
+ b"2025-05-13T07:26:14.132Z [apify] DEBUG c" )
19
25
mocked_actor_name = "mocked_actor"
26
+ mocked_run_id = "mocked_run_id"
27
+
28
+ expected_logs_and_levels = [
29
+ ("2025-05-13T07:24:12.588Z ACTOR: Pulling Docker image of build." , logging .INFO ),
30
+ ("2025-05-13T07:24:12.686Z ACTOR: Creating Docker container." , logging .INFO ),
31
+ ("2025-05-13T07:24:12.745Z ACTOR: Starting Docker container." , logging .INFO ),
32
+ ("2025-05-13T07:24:14.132Z [apify] INFO multiline \n log" , logging .INFO ),
33
+ ("2025-05-13T07:25:14.132Z [apify] WARNING some warning" , logging .WARNING ),
34
+ ("2025-05-13T07:26:14.132Z [apify] DEBUG c" , logging .DEBUG ),
35
+ ]
20
36
21
37
class AsyncByteStream :
22
38
async def __aiter__ (self ) -> AsyncIterator [bytes ]:
@@ -27,27 +43,25 @@ async def __aiter__(self) -> AsyncIterator[bytes]:
27
43
async def aclose (self ) -> None :
28
44
pass
29
45
30
- run_client = ApifyClientAsync (token = "mocked_token" , api_url = 'https://example.com' ).run (run_id = "run_is_mocked" )
31
- respx .get (url = 'https://example.com/v2/actor-runs/run_is_mocked' ).mock (
32
- return_value = httpx .Response (content = json .dumps ({"data" :{'actId' : 'SbjD4JEucMevUdQAH' }}),status_code = 200 ))
33
- respx .get (url = 'https://example.com/v2/actor-runs/run_is_mocked/log?stream=1' ).mock (
46
+ respx .get (url = f'https://example.com/v2/actor-runs/{ mocked_run_id } ' ).mock (
47
+ return_value = httpx .Response (content = json .dumps ({"data" :{'id' : mocked_run_id }}),status_code = 200 ))
48
+ respx .get (url = f'https://example.com/v2/actor-runs/{ mocked_run_id } /log?stream=1' ).mock (
34
49
return_value = httpx .Response (stream = AsyncByteStream (), status_code = 200 ))
35
- # {'http_version': b'HTTP/1.1', 'network_stream': <httpcore._backends.anyio.AnyIOStream object at 0x7fc82543db70>, 'reason_phrase': b'OK'}
36
- # [(b'Date', b'Mon, 12 May 2025 13:24:41 GMT'), (b'Content-Type', b'application/json; charset=utf-8'), (b'Transfer-Encoding', b'chunked'), (b'Connection', b'keep-alive'), (b'Cache-Control', b'no-cache, no-store, must-revalidate'), (b'Pragma', b'no-cache'), (b'Expires', b'0'), (b'Access-Control-Allow-Origin', b'*'), (b'Access-Control-Allow-Headers', b'User-Agent, Content-Type, Authorization, X-Apify-Request-Origin, openai-conversation-id, openai-ephemeral-user-id'), (b'Access-Control-Allow-Methods', b'GET, POST'), (b'Access-Control-Expose-Headers', b'X-Apify-Pagination-Total, X-Apify-Pagination-Offset, X-Apify-Pagination-Desc, X-Apify-Pagination-Count, X-Apify-Pagination-Limit'), (b'Referrer-Policy', b'no-referrer'), (b'X-Robots-Tag', b'none'), (b'X-RateLimit-Limit', b'200'), (b'Location', b' https://api.apify. com/v2/actor-runs/ywNUnFFbOksQLa4mH'), (b'Vary', b'Accept-Encoding'), (b'Content-Encoding', b'gzip')]
50
+
51
+ run_client = ApifyClientAsync ( token = "mocked_token" , api_url = ' https://example. com' ). run ( run_id = mocked_run_id )
37
52
streamed_log = await run_client .get_streamed_log (actor_name = mocked_actor_name )
38
53
39
- with caplog .at_level (logging .DEBUG ):
54
+ # Set `propagate=True` during the tests, so that caplog can see the logs..
55
+ logger_name = f"apify.{ mocked_actor_name } -{ mocked_run_id } "
56
+ logging .getLogger (logger_name ).propagate = True
57
+
58
+ with caplog .at_level (logging .DEBUG , logger = logger_name ):
40
59
async with streamed_log :
60
+ # Do stuff while the log from the other actor is being redirected to the logs.
41
61
await asyncio .sleep (1 )
42
- # do some stuff
43
- pass
44
62
45
63
records = caplog .records
46
- assert len (records ) == 2
47
-
48
-
49
-
50
- """
51
-
52
- {'actId': 'SbjD4JEucMevUdQAH', 'buildId': 'Jv7iIjo1JV0gEXQEm', 'buildNumber': '0.0.5', 'containerUrl': 'https://tlo2axp6qbc7.runs.apify.net', 'defaultDatasetId': 'DZq6uDwZ4gSXev8h2', 'defaultKeyValueStoreId': '7UswAGyvNKFGlddHS', 'defaultRequestQueueId': 'Gk4ye89GRCoqFNdsM', 'finishedAt': None, 'generalAccess': 'FOLLOW_USER_SETTING', 'id': 'u6Q52apBHWO09NjDP', 'meta': {'origin': 'API', 'userAgent': 'ApifyClient/1.9.0 (linux; Python/3.10.12); isAtHome/False'}, 'options': {'build': 'latest', 'diskMbytes': 2048, 'memoryMbytes': 1024, 'timeoutSecs': 3600}, 'startedAt': '2025-05-12T13:54:23.028Z', 'stats': {'computeUnits': 0, 'inputBodyLen': 15, 'migrationCount': 0, 'rebootCount': 0, 'restartCount': 0, 'resurrectCount': 0}, 'status': 'READY', 'usage': {'ACTOR_COMPUTE_UNITS': 0, 'DATASET_READS': 0, 'DATASET_WRITES': 0, 'DATA_TRANSFER_EXTERNAL_GBYTES': 0, 'DATA_TRANSFER_INTERNAL_GBYTES': 0, 'KEY_VALUE_STORE_LISTS': 0, 'KEY_VALUE_STORE_READS': 0, 'KEY_VALUE_STORE_WRITES': 1, 'PROXY_RESIDENTIAL_TRANSFER_GBYTES': 0, 'PROXY_SERPS': 0, 'REQUEST_QUEUE_READS': 0, 'REQUEST_QUEUE_WRITES': 0}, 'usageTotalUsd': 5e-05, 'usageUsd': {'ACTOR_COMPUTE_UNITS': 0, 'DATASET_READS': 0, 'DATASET_WRITES': 0, 'DATA_TRANSFER_EXTERNAL_GBYTES': 0, 'DATA_TRANSFER_INTERNAL_GBYTES': 0, 'KEY_VALUE_STORE_LISTS': 0, 'KEY_VALUE_STORE_READS': 0, 'KEY_VALUE_STORE_WRITES': 5e-05, 'PROXY_RESIDENTIAL_TRANSFER_GBYTES': 0, 'PROXY_SERPS': 0, 'REQUEST_QUEUE_READS': 0, 'REQUEST_QUEUE_WRITES': 0}, 'userId': 'LjAzEG1CadliECnrn'}
53
- """
64
+ assert len (records ) == 6
65
+ for expected_log_and_level , record in zip (expected_logs_and_levels , records ):
66
+ assert expected_log_and_level [0 ] == record .message
67
+ assert expected_log_and_level [1 ] == record .levelno
0 commit comments