Skip to content

Commit 9843350

Browse files
chore(deps): update dependency ruff to ~=0.13.0 (#1404)
Co-authored-by: Vlada Dusek <[email protected]>
1 parent d2f32fc commit 9843350

23 files changed

+70
-67
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ dev = [
9999
"pytest-timeout~=2.4.0",
100100
"pytest-xdist~=3.8.0",
101101
"pytest~=8.4.0",
102-
"ruff~=0.12.0",
102+
"ruff~=0.13.0",
103103
"setuptools", # setuptools are used by pytest, but not explicitly required
104104
"types-beautifulsoup4~=4.12.0.20240229",
105105
"types-cachetools~=6.2.0.20250827",

src/crawlee/crawlers/_abstract_http/_abstract_http_crawler.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,9 @@
3434

3535
@docs_group('Crawlers')
3636
class AbstractHttpCrawler(
37-
Generic[TCrawlingContext, TParseResult, TSelectResult], BasicCrawler[TCrawlingContext, StatisticsState], ABC
37+
BasicCrawler[TCrawlingContext, StatisticsState],
38+
ABC,
39+
Generic[TCrawlingContext, TParseResult, TSelectResult],
3840
):
3941
"""A web crawler for performing HTTP requests.
4042

src/crawlee/crawlers/_abstract_http/_abstract_http_parser.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616

1717

1818
@docs_group('HTTP parsers')
19-
class AbstractHttpParser(Generic[TParseResult, TSelectResult], ABC):
19+
class AbstractHttpParser(ABC, Generic[TParseResult, TSelectResult]):
2020
"""Parser used for parsing HTTP response and inspecting parsed result to find links or detect blocking."""
2121

2222
@abstractmethod

src/crawlee/crawlers/_abstract_http/_http_crawling_context.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ async def get_snapshot(self) -> PageSnapshot:
3131

3232
@dataclass(frozen=True)
3333
@docs_group('Crawling contexts')
34-
class ParsedHttpCrawlingContext(Generic[TParseResult], HttpCrawlingContext):
34+
class ParsedHttpCrawlingContext(HttpCrawlingContext, Generic[TParseResult]):
3535
"""The crawling context used by `AbstractHttpCrawler`.
3636
3737
It provides access to key objects as well as utility functions for handling crawling tasks.

src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,8 +85,8 @@ async def __aexit__(
8585

8686
@docs_group('Crawlers')
8787
class AdaptivePlaywrightCrawler(
88-
Generic[TStaticCrawlingContext, TStaticParseResult, TStaticSelectResult],
8988
BasicCrawler[AdaptivePlaywrightCrawlingContext, AdaptivePlaywrightCrawlerStatisticState],
89+
Generic[TStaticCrawlingContext, TStaticParseResult, TStaticSelectResult],
9090
):
9191
"""An adaptive web crawler capable of using both static HTTP request based crawling and browser based crawling.
9292

src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawling_context.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,8 @@ class AdaptiveContextError(RuntimeError):
3131
@dataclass(frozen=True)
3232
@docs_group('Crawling contexts')
3333
class AdaptivePlaywrightCrawlingContext(
34-
Generic[TStaticParseResult, TStaticSelectResult], ParsedHttpCrawlingContext[TStaticParseResult]
34+
ParsedHttpCrawlingContext[TStaticParseResult],
35+
Generic[TStaticParseResult, TStaticSelectResult],
3536
):
3637
_static_parser: AbstractHttpParser[TStaticParseResult, TStaticSelectResult]
3738
"""The crawling context used by `AdaptivePlaywrightCrawler`.

src/crawlee/crawlers/_basic/_basic_crawler.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@ class _BasicCrawlerOptions(TypedDict):
204204
Returning `None` suppresses the status message."""
205205

206206

207-
class _BasicCrawlerOptionsGeneric(Generic[TCrawlingContext, TStatisticsState], TypedDict):
207+
class _BasicCrawlerOptionsGeneric(TypedDict, Generic[TCrawlingContext, TStatisticsState]):
208208
"""Generic options the `BasicCrawler` constructor."""
209209

210210
request_handler: NotRequired[Callable[[TCrawlingContext], Awaitable[None]]]
@@ -219,9 +219,9 @@ class _BasicCrawlerOptionsGeneric(Generic[TCrawlingContext, TStatisticsState], T
219219

220220

221221
class BasicCrawlerOptions(
222-
Generic[TCrawlingContext, TStatisticsState],
223222
_BasicCrawlerOptions,
224223
_BasicCrawlerOptionsGeneric[TCrawlingContext, TStatisticsState],
224+
Generic[TCrawlingContext, TStatisticsState],
225225
):
226226
"""Arguments for the `BasicCrawler` constructor.
227227

src/crawlee/crawlers/_playwright/_playwright_crawler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -509,9 +509,9 @@ class _PlaywrightCrawlerAdditionalOptions(TypedDict):
509509

510510

511511
class PlaywrightCrawlerOptions(
512-
Generic[TCrawlingContext, TStatisticsState],
513512
_PlaywrightCrawlerAdditionalOptions,
514513
BasicCrawlerOptions[TCrawlingContext, StatisticsState],
514+
Generic[TCrawlingContext, TStatisticsState],
515515
):
516516
"""Arguments for the `AbstractHttpCrawler` constructor.
517517

src/crawlee/storage_clients/_file_system/_utils.py

Whitespace-only changes.

tests/unit/_autoscaling/test_autoscaled_pool.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ async def run() -> None:
111111
),
112112
)
113113

114-
with pytest.raises(RuntimeError, match='Scheduled crash'):
114+
with pytest.raises(RuntimeError, match=r'Scheduled crash'):
115115
await pool.run()
116116

117117
assert done_count < 20
@@ -139,7 +139,7 @@ async def run() -> None:
139139
),
140140
)
141141

142-
with pytest.raises(RuntimeError, match='Scheduled crash'):
142+
with pytest.raises(RuntimeError, match=r'Scheduled crash'):
143143
await pool.run()
144144

145145

0 commit comments

Comments
 (0)