Skip to content

Commit 325660d

Browse files
committed
Merge remote-tracking branch 'origin/master' into unique-key
2 parents 1b68d1a + f7dad25 commit 325660d

File tree

14 files changed

+100
-117
lines changed

14 files changed

+100
-117
lines changed

.github/workflows/release.yaml

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -61,16 +61,9 @@ jobs:
6161
with:
6262
python-versions: '["3.10", "3.11", "3.12", "3.13"]'
6363

64-
integration_tests:
65-
name: Integration tests
66-
uses: apify/workflows/.github/workflows/python_integration_tests.yaml@main
67-
secrets: inherit
68-
with:
69-
python-versions: '["3.10", "3.13"]'
70-
7164
update_changelog:
7265
name: Update changelog
73-
needs: [release_metadata, lint_check, type_check, unit_tests, integration_tests]
66+
needs: [release_metadata, lint_check, type_check, unit_tests]
7467
uses: apify/workflows/.github/workflows/python_bump_and_update_changelog.yaml@main
7568
with:
7669
version_number: ${{ needs.release_metadata.outputs.version_number }}

CHANGELOG.md

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@ All notable changes to this project will be documented in this file.
2222
### Refactor
2323

2424
- [**breaking**] Adapt to the Crawlee v1.0 ([#470](https://github.com/apify/apify-sdk-python/pull/470)) ([f7e3320](https://github.com/apify/apify-sdk-python/commit/f7e33206cf3e4767faacbdc43511b45b6785f929)) by [@vdusek](https://github.com/vdusek), closes [#469](https://github.com/apify/apify-sdk-python/issues/469), [#540](https://github.com/apify/apify-sdk-python/issues/540)
25+
- [**breaking**] Replace `httpx` with `impit` ([#560](https://github.com/apify/apify-sdk-python/pull/560)) ([cca3869](https://github.com/apify/apify-sdk-python/commit/cca3869e85968865e56aafcdcb36fbccba27aef0)) by [@Mantisus](https://github.com/Mantisus), closes [#558](https://github.com/apify/apify-sdk-python/issues/558)
26+
- [**breaking**] Remove `Request.id` field ([#553](https://github.com/apify/apify-sdk-python/pull/553)) ([445ab5d](https://github.com/apify/apify-sdk-python/commit/445ab5d752b785fc2018b35c8adbe779253d7acd)) by [@Pijukatel](https://github.com/Pijukatel)
2527

2628

2729
<!-- git-cliff-unreleased-end -->
@@ -121,6 +123,30 @@ All notable changes to this project will be documented in this file.
121123
- Tagline overlap ([#501](https://github.com/apify/apify-sdk-python/pull/501)) ([bae8340](https://github.com/apify/apify-sdk-python/commit/bae8340c46fea756ea35ea4d591da84c09d478e2)) by [@katzino](https://github.com/katzino)
122124

123125

126+
## [2.7.0](https://github.com/apify/apify-sdk-python/releases/tag/v2.7.0) (2025-07-14)
127+
128+
### 🚀 Features
129+
130+
- **crypto:** Decrypt secret objects ([#482](https://github.com/apify/apify-sdk-python/pull/482)) ([ce9daf7](https://github.com/apify/apify-sdk-python/commit/ce9daf7381212b8dc194e8a643e5ca0dedbc0078)) by [@MFori](https://github.com/MFori)
131+
132+
### 🐛 Bug Fixes
133+
134+
- Sync `@docusaurus` theme version [internal] ([#500](https://github.com/apify/apify-sdk-python/pull/500)) ([a7485e7](https://github.com/apify/apify-sdk-python/commit/a7485e7d2276fde464ce862573d5b95e7d4d836a)) by [@katzino](https://github.com/katzino)
135+
- Tagline overlap ([#501](https://github.com/apify/apify-sdk-python/pull/501)) ([bae8340](https://github.com/apify/apify-sdk-python/commit/bae8340c46fea756ea35ea4d591da84c09d478e2)) by [@katzino](https://github.com/katzino)
136+
137+
138+
## [2.7.0](https://github.com/apify/apify-sdk-python/releases/tag/v2.7.0) (2025-07-14)
139+
140+
### 🚀 Features
141+
142+
- **crypto:** Decrypt secret objects ([#482](https://github.com/apify/apify-sdk-python/pull/482)) ([ce9daf7](https://github.com/apify/apify-sdk-python/commit/ce9daf7381212b8dc194e8a643e5ca0dedbc0078)) by [@MFori](https://github.com/MFori)
143+
144+
### 🐛 Bug Fixes
145+
146+
- Sync `@docusaurus` theme version [internal] ([#500](https://github.com/apify/apify-sdk-python/pull/500)) ([a7485e7](https://github.com/apify/apify-sdk-python/commit/a7485e7d2276fde464ce862573d5b95e7d4d836a)) by [@katzino](https://github.com/katzino)
147+
- Tagline overlap ([#501](https://github.com/apify/apify-sdk-python/pull/501)) ([bae8340](https://github.com/apify/apify-sdk-python/commit/bae8340c46fea756ea35ea4d591da84c09d478e2)) by [@katzino](https://github.com/katzino)
148+
149+
124150
## [2.7.3](https://github.com/apify/apify-sdk-python/releases/tag/v2.7.3) (2025-08-11)
125151

126152
### 🐛 Bug Fixes

pyproject.toml

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -36,16 +36,17 @@ keywords = [
3636
dependencies = [
3737
"apify-client>=2.0.0,<3.0.0",
3838
"apify-shared>=2.0.0,<3.0.0",
39-
"crawlee@git+https://github.com/apify/crawlee-python.git@master",
39+
"crawlee==1.0.0rc1",
4040
"cachetools>=5.5.0",
4141
"cryptography>=42.0.0",
42-
"httpx>=0.27.0",
4342
# TODO: ensure compatibility with the latest version of lazy-object-proxy
4443
# https://github.com/apify/apify-sdk-python/issues/460
44+
"impit>=0.5.3",
4545
"lazy-object-proxy<1.11.0",
4646
"more_itertools>=10.2.0",
4747
"typing-extensions>=4.1.0",
4848
"websockets>=14.0",
49+
"yarl>=1.18.0",
4950
]
5051

5152
[project.optional-dependencies]
@@ -81,7 +82,6 @@ dev = [
8182
"types-cachetools~=6.0.0.20250525",
8283
"uvicorn[standard]",
8384
"werkzeug~=3.1.0", # Werkzeug is used by httpserver
84-
"yarl~=1.20.0", # yarl is used by crawlee
8585
]
8686

8787
[tool.hatch.build.targets.wheel]
@@ -213,12 +213,12 @@ exclude = []
213213

214214
[[tool.mypy.overrides]]
215215
module = [
216-
'bs4',
217-
'lazy_object_proxy',
218-
'nest_asyncio',
219-
'playwright.*',
220-
'scrapy.*',
221-
'selenium.*',
216+
'bs4', # Documentation
217+
'httpx', # Documentation
218+
'lazy_object_proxy', # Untyped and stubs not available
219+
'playwright.*', # Documentation
220+
'scrapy.*', # Untyped and stubs not available
221+
'selenium.*', # Documentation
222222
]
223223
ignore_missing_imports = true
224224

src/apify/_proxy_configuration.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,15 @@
11
from __future__ import annotations
22

33
import ipaddress
4+
import json
45
import re
56
from dataclasses import dataclass, field
67
from re import Pattern
78
from typing import TYPE_CHECKING, Any
89
from urllib.parse import urljoin, urlparse
910

10-
import httpx
11+
import impit
12+
from yarl import URL
1113

1214
from apify_shared.consts import ApifyEnvVars
1315
from crawlee.proxy_configuration import ProxyConfiguration as CrawleeProxyConfiguration
@@ -231,7 +233,7 @@ async def new_proxy_info(
231233
return None
232234

233235
if self._uses_apify_proxy:
234-
parsed_url = httpx.URL(proxy_info.url)
236+
parsed_url = URL(proxy_info.url)
235237
username = self._get_username(session_id)
236238

237239
return ProxyInfo(
@@ -275,11 +277,11 @@ async def _check_access(self) -> None:
275277
return
276278

277279
status = None
278-
async with httpx.AsyncClient(proxy=proxy_info.url, timeout=10) as client:
280+
async with impit.AsyncClient(proxy=proxy_info.url, timeout=10) as client:
279281
for _ in range(2):
280282
try:
281283
response = await client.get(proxy_status_url)
282-
status = response.json()
284+
status = json.loads(response.text)
283285
break
284286
except Exception: # noqa: S110
285287
# retry on connection errors

src/apify/log.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -27,13 +27,6 @@ def _configure_logging() -> None:
2727
else:
2828
apify_client_logger.setLevel(level)
2929

30-
# Silence HTTPX logger unless debug logging is requested
31-
httpx_logger = logging.getLogger('httpx')
32-
if level > logging.DEBUG:
33-
httpx_logger.setLevel(logging.WARNING)
34-
else:
35-
httpx_logger.setLevel(level)
36-
3730
# Use configured log level for apify logger
3831
apify_logger = logging.getLogger('apify')
3932
configure_logger(apify_logger, remove_old_handlers=True)

src/apify/scrapy/_logging_config.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010

1111
# Define logger names.
1212
_PRIMARY_LOGGERS = ['apify', 'apify_client', 'scrapy']
13-
_SUPPLEMENTAL_LOGGERS = ['filelock', 'hpack', 'httpcore', 'httpx', 'protego', 'twisted']
13+
_SUPPLEMENTAL_LOGGERS = ['filelock', 'hpack', 'httpcore', 'protego', 'twisted']
1414
_ALL_LOGGERS = _PRIMARY_LOGGERS + _SUPPLEMENTAL_LOGGERS
1515

1616

@@ -37,9 +37,6 @@ def initialize_logging() -> None:
3737
for logger_name in [None, *_ALL_LOGGERS]:
3838
_configure_logger(logger_name, logging_level, handler)
3939

40-
# Set the 'httpx' logger to a less verbose level.
41-
logging.getLogger('httpx').setLevel('WARNING')
42-
4340
# Monkey-patch Scrapy's logging configuration to re-apply our settings.
4441
original_configure_logging = scrapy_logging.configure_logging
4542

src/apify/storage_clients/_apify/_request_queue_client.py

Lines changed: 23 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,29 @@
3030
logger = getLogger(__name__)
3131

3232

33+
def unique_key_to_request_id(unique_key: str, *, request_id_length: int = 15) -> str:
34+
"""Generate a deterministic request ID based on a unique key.
35+
36+
Args:
37+
unique_key: The unique key to convert into a request ID.
38+
request_id_length: The length of the request ID.
39+
40+
Returns:
41+
A URL-safe, truncated request ID based on the unique key.
42+
"""
43+
# Encode the unique key and compute its SHA-256 hash
44+
hashed_key = sha256(unique_key.encode('utf-8')).digest()
45+
46+
# Encode the hash in base64 and decode it to get a string
47+
base64_encoded = b64encode(hashed_key).decode('utf-8')
48+
49+
# Remove characters that are not URL-safe ('+', '/', or '=')
50+
url_safe_key = re.sub(r'(\+|\/|=)', '', base64_encoded)
51+
52+
# Truncate the key to the desired length
53+
return url_safe_key[:request_id_length]
54+
55+
3356
class ApifyRequestQueueClient(RequestQueueClient):
3457
"""An Apify platform implementation of the request queue client."""
3558

@@ -760,26 +783,3 @@ def _cache_request(
760783
hydrated=hydrated_request,
761784
lock_expires_at=None,
762785
)
763-
764-
765-
def unique_key_to_request_id(unique_key: str, *, request_id_length: int = 15) -> str:
766-
"""Generate a deterministic request ID based on a unique key.
767-
768-
Args:
769-
unique_key: The unique key to convert into a request ID.
770-
request_id_length: The length of the request ID.
771-
772-
Returns:
773-
A URL-safe, truncated request ID based on the unique key.
774-
"""
775-
# Encode the unique key and compute its SHA-256 hash
776-
hashed_key = sha256(unique_key.encode('utf-8')).digest()
777-
778-
# Encode the hash in base64 and decode it to get a string
779-
base64_encoded = b64encode(hashed_key).decode('utf-8')
780-
781-
# Remove characters that are not URL-safe ('+', '/', or '=')
782-
url_safe_key = re.sub(r'(\+|\/|=)', '', base64_encoded)
783-
784-
# Truncate the key to the desired length
785-
return url_safe_key[:request_id_length]
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# The test fixture will put the Apify SDK wheel path on the next line
22
APIFY_SDK_WHEEL_PLACEHOLDER
33
uvicorn[standard]
4-
crawlee[parsel] @ git+https://github.com/apify/crawlee-python.git@master
4+
crawlee[parsel]==1.0.0rc1

tests/integration/conftest.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -98,14 +98,9 @@ def apify_token() -> str:
9898
return api_token
9999

100100

101-
@pytest.fixture
101+
@pytest.fixture(scope='session')
102102
def apify_client_async(apify_token: str) -> ApifyClientAsync:
103-
"""Create an instance of the ApifyClientAsync.
104-
105-
This fixture can't be session-scoped, because then you start getting `RuntimeError: Event loop is closed` errors,
106-
because `httpx.AsyncClient` in `ApifyClientAsync` tries to reuse the same event loop across requests,
107-
but `pytest-asyncio` closes the event loop after each test, and uses a new one for the next test.
108-
"""
103+
"""Create an instance of the ApifyClientAsync."""
109104
api_url = os.getenv(_API_URL_ENV_VAR)
110105

111106
return ApifyClientAsync(apify_token, api_url=api_url)

tests/unit/actor/test_actor_create_proxy_configuration.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def patched_apify_client(apify_client_async_patcher: ApifyClientAsyncPatcher) ->
2525
return ApifyClientAsync()
2626

2727

28-
@pytest.mark.usefixtures('patched_httpx_client')
28+
@pytest.mark.usefixtures('patched_impit_client')
2929
async def test_basic_proxy_configuration_creation(
3030
monkeypatch: pytest.MonkeyPatch,
3131
httpserver: HTTPServer,
@@ -68,7 +68,7 @@ def request_handler(request: Request, response: Response) -> Response:
6868
await Actor.exit()
6969

7070

71-
@pytest.mark.usefixtures('patched_httpx_client')
71+
@pytest.mark.usefixtures('patched_impit_client')
7272
async def test_proxy_configuration_with_actor_proxy_input(
7373
monkeypatch: pytest.MonkeyPatch,
7474
httpserver: HTTPServer,

0 commit comments

Comments
 (0)