Skip to content

Commit 2923c87

Browse files
authored
chore: Fix some types ignoring (#1667)
### Description - Resolving some cases of type ignoring for `ty`
1 parent 72c26e1 commit 2923c87

File tree

5 files changed

+17
-9
lines changed

5 files changed

+17
-9
lines changed

src/crawlee/_browserforge_workaround.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,8 @@
11
# ruff: noqa: N802, PLC0415
2+
from typing import TYPE_CHECKING
3+
4+
if TYPE_CHECKING:
5+
from collections.abc import Callable
26

37

48
def patch_browserforge() -> None:
@@ -12,15 +16,15 @@ def patch_browserforge() -> None:
1216
import apify_fingerprint_datapoints
1317
from browserforge import download
1418

15-
download.DATA_DIRS: dict[str, Path] = { # type:ignore[misc]
19+
download.DATA_DIRS = {
1620
'headers': apify_fingerprint_datapoints.get_header_network().parent,
1721
'fingerprints': apify_fingerprint_datapoints.get_fingerprint_network().parent,
1822
}
1923

2024
def DownloadIfNotExists(**flags: bool) -> None:
2125
pass
2226

23-
download.DownloadIfNotExists = DownloadIfNotExists # ty: ignore[invalid-assignment]
27+
download.DownloadIfNotExists: Callable[..., None] = DownloadIfNotExists
2428

2529
import browserforge.bayesian_network
2630

@@ -33,7 +37,7 @@ def __init__(self, path: Path) -> None:
3337
path = download.DATA_DIRS['fingerprints'] / download.DATA_FILES['fingerprints'][path.name]
3438
super().__init__(path)
3539

36-
browserforge.bayesian_network.BayesianNetwork = BayesianNetwork # type:ignore[misc]
40+
browserforge.bayesian_network.BayesianNetwork: BayesianNetwork = BayesianNetwork
3741
import browserforge.headers.generator
3842

3943
browserforge.headers.generator.DATA_DIR = download.DATA_DIRS['headers']

src/crawlee/_utils/context.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import inspect
44
from collections.abc import Callable
55
from functools import wraps
6-
from typing import Any, TypeVar
6+
from typing import Any, TypeVar, cast
77

88
T = TypeVar('T', bound=Callable[..., Any])
99

@@ -44,4 +44,4 @@ async def async_wrapper(self: Any, *args: Any, **kwargs: Any) -> Any:
4444

4545
return await method(self, *args, **kwargs)
4646

47-
return async_wrapper if inspect.iscoroutinefunction(method) else sync_wrapper # ty: ignore[invalid-return-type]
47+
return cast('T', async_wrapper if inspect.iscoroutinefunction(method) else sync_wrapper)

src/crawlee/fingerprint_suite/_browserforge_adapter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ def _get_single_browser_type(self, browser: Iterable[str | Browser] | None) -> s
154154
class PatchedFingerprintGenerator(bf_FingerprintGenerator):
155155
"""Browserforge `FingerprintGenerator` that contains patches not accepted in upstream repo."""
156156

157-
def __init__( # type:ignore[no-untyped-def] # Upstream repo types missing.
157+
def __init__(
158158
self,
159159
*,
160160
screen: Screen | None = None,

src/crawlee/sessions/_cookies.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010

1111
if TYPE_CHECKING:
1212
from collections.abc import Iterator
13+
from typing import TypeGuard
1314

1415

1516
@docs_group('Session management')
@@ -152,8 +153,8 @@ def _convert_cookie_to_dict(self, cookie: Cookie) -> CookieParam:
152153
if cookie.expires:
153154
cookie_dict['expires'] = cookie.expires
154155

155-
if (same_site := cookie.get_nonstandard_attr('SameSite')) and same_site in {'Lax', 'None', 'Strict'}:
156-
cookie_dict['same_site'] = same_site # ty: ignore[invalid-assignment]
156+
if (same_site := cookie.get_nonstandard_attr('SameSite')) and self._is_valid_same_site(same_site):
157+
cookie_dict['same_site'] = same_site
157158

158159
return cookie_dict
159160

@@ -274,3 +275,6 @@ def __hash__(self) -> int:
274275
"""Return hash based on the cookies key attributes."""
275276
cookie_tuples = frozenset((cookie.name, cookie.value, cookie.domain, cookie.path) for cookie in self._jar)
276277
return hash(cookie_tuples)
278+
279+
def _is_valid_same_site(self, value: str | None) -> TypeGuard[Literal['Lax', 'None', 'Strict']]:
280+
return value in {'Lax', 'None', 'Strict'}

tests/unit/crawlers/_playwright/test_playwright_crawler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -424,7 +424,7 @@ async def test_save_cookies_after_handler_processing(server_url: URL) -> None:
424424
@crawler.router.default_handler
425425
async def request_handler(context: PlaywrightCrawlingContext) -> None:
426426
# Simulate cookies installed from an external source in the browser
427-
await context.page.context.add_cookies([{'name': 'check', 'value': 'test', 'url': str(server_url)}]) # ty: ignore[invalid-argument-type]
427+
await context.page.context.add_cookies([{'name': 'check', 'value': 'test', 'url': str(server_url)}])
428428

429429
if context.session:
430430
session_ids.append(context.session.id)

0 commit comments

Comments
 (0)