Skip to content

Commit c26dd9f

Browse files
committed
Merge branch 'master' into adapt-to-apify-client-v3
2 parents e8a6186 + 4b7032d commit c26dd9f

File tree

6 files changed

+165
-197
lines changed

6 files changed

+165
-197
lines changed

codecov.yaml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
coverage:
2+
status:
3+
project:
4+
default:
5+
target: auto
6+
threshold: 0.10% # tolerate up to 0.10% decrease

src/apify/_actor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -175,7 +175,7 @@ async def __aenter__(self) -> Self:
175175
self.log.debug('Configuration initialized')
176176

177177
# Update the global Actor proxy to refer to this instance.
178-
cast('Proxy', Actor).__wrapped__ = self
178+
cast('Proxy', Actor).__wrapped__ = self # ty: ignore[invalid-assignment]
179179
self._is_exiting = False
180180
self._was_final_persist_state_emitted = False
181181

tests/integration/apify_api/test_request_queue.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1098,7 +1098,7 @@ async def test_pre_existing_request_with_user_data(
10981098
rq = request_queue_apify
10991099
request = Request.from_url(
11001100
'https://example.com',
1101-
user_data=custom_data,
1101+
user_data=custom_data.copy(),
11021102
)
11031103

11041104
# Add request by a different producer

tests/unit/actor/test_request_list.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import pytest
99
from yarl import URL
1010

11-
from crawlee._request import UserData
11+
from crawlee._request import CrawleeRequestData, UserData
1212
from crawlee._types import HttpMethod
1313

1414
from apify.request_loaders import ApifyRequestList
@@ -65,6 +65,8 @@ async def test_request_list_open_request_types(
6565
assert request.url == request_dict_input['url']
6666
assert request.payload == request_dict_input.get('payload', '').encode('utf-8')
6767
expected_user_data = UserData()
68+
# `crawlee_data` must be present in user_data
69+
expected_user_data.crawlee_data = CrawleeRequestData()
6870
if 'userData' in optional_input:
6971
for key, value in optional_input['userData'].items():
7072
expected_user_data[key] = value
@@ -181,6 +183,8 @@ async def test_request_list_open_from_url_additional_inputs(httpserver: HTTPServ
181183
assert request.headers.root == example_start_url_input['headers']
182184
assert request.payload == str(example_start_url_input['payload']).encode('utf-8')
183185
expected_user_data = UserData()
186+
# `crawlee_data` must be present in user_data
187+
expected_user_data.crawlee_data = CrawleeRequestData()
184188
for key, value in example_start_url_input['userData'].items():
185189
expected_user_data[key] = value
186190
assert request.user_data == expected_user_data

0 commit comments

Comments
 (0)