Skip to content

Commit 44c8a70

Browse files
committed
More ruff rules.
1 parent 5b9cd49 commit 44c8a70

26 files changed

+561
-395
lines changed

.pre-commit-config.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
repos:
22
- repo: https://github.com/astral-sh/ruff-pre-commit
3-
rev: v0.11.0
3+
rev: v0.14.13
44
hooks:
5-
- id: ruff
5+
- id: ruff-check
66
args: [ --fix ]
77
- id: ruff-format
88
- repo: https://github.com/adamchainz/blacken-docs

pyproject.toml

Lines changed: 166 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -129,3 +129,169 @@ filterwarnings = [
129129
"ignore:RetryMiddleware\\.process_spider_exception\\(\\):scrapy.exceptions.ScrapyDeprecationWarning",
130130
"ignore::scrapy.exceptions.ScrapyDeprecationWarning:scrapy_poet",
131131
]
132+
133+
[tool.ruff.lint]
134+
extend-select = [
135+
# flake8-builtins
136+
"A",
137+
# flake8-async
138+
"ASYNC",
139+
# flake8-bugbear
140+
"B",
141+
# flake8-comprehensions
142+
"C4",
143+
# flake8-commas
144+
"COM",
145+
# pydocstyle
146+
"D",
147+
# flake8-future-annotations
148+
"FA",
149+
# flynt
150+
"FLY",
151+
# refurb
152+
"FURB",
153+
# isort
154+
"I",
155+
# flake8-implicit-str-concat
156+
"ISC",
157+
# flake8-logging
158+
"LOG",
159+
# Perflint
160+
"PERF",
161+
# pygrep-hooks
162+
"PGH",
163+
# flake8-pie
164+
"PIE",
165+
# pylint
166+
"PL",
167+
# flake8-pytest-style
168+
"PT",
169+
# flake8-use-pathlib
170+
"PTH",
171+
# flake8-pyi
172+
"PYI",
173+
# flake8-quotes
174+
"Q",
175+
# flake8-return
176+
"RET",
177+
# flake8-raise
178+
"RSE",
179+
# Ruff-specific rules
180+
"RUF",
181+
# flake8-bandit
182+
"S",
183+
# flake8-simplify
184+
"SIM",
185+
# flake8-slots
186+
"SLOT",
187+
# flake8-debugger
188+
"T10",
189+
# flake8-type-checking
190+
"TC",
191+
# pyupgrade
192+
"UP",
193+
# pycodestyle warnings
194+
"W",
195+
# flake8-2020
196+
"YTT",
197+
]
198+
ignore = [
199+
# Trailing comma missing
200+
"COM812",
201+
# Missing docstring in public module
202+
"D100",
203+
# Missing docstring in public class
204+
"D101",
205+
# Missing docstring in public method
206+
"D102",
207+
# Missing docstring in public function
208+
"D103",
209+
# Missing docstring in public package
210+
"D104",
211+
# Missing docstring in magic method
212+
"D105",
213+
# Missing docstring in public nested class
214+
"D106",
215+
# Missing docstring in __init__
216+
"D107",
217+
# One-line docstring should fit on one line with quotes
218+
"D200",
219+
# No blank lines allowed after function docstring
220+
"D202",
221+
# 1 blank line required between summary line and description
222+
"D205",
223+
# Multi-line docstring closing quotes should be on a separate line
224+
"D209",
225+
# First line should end with a period
226+
"D400",
227+
# First line should be in imperative mood; try rephrasing
228+
"D401",
229+
# First line should not be the function's "signature"
230+
"D402",
231+
# First word of the first line should be properly capitalized
232+
"D403",
233+
# No blank lines allowed between a section header and its content
234+
"D412",
235+
# `try`-`except` within a loop incurs performance overhead
236+
"PERF203",
237+
# Too many return statements
238+
"PLR0911",
239+
# Too many branches
240+
"PLR0912",
241+
# Too many arguments in function definition
242+
"PLR0913",
243+
# Too many statements
244+
"PLR0915",
245+
# Magic value used in comparison
246+
"PLR2004",
247+
# String contains ambiguous {}.
248+
"RUF001",
249+
# Docstring contains ambiguous {}.
250+
"RUF002",
251+
# Comment contains ambiguous {}.
252+
"RUF003",
253+
# Mutable class attributes should be annotated with `typing.ClassVar`
254+
"RUF012",
255+
# Use of `assert` detected
256+
"S101",
257+
258+
# TODO
259+
"A001",
260+
"A002",
261+
"B006",
262+
"B007",
263+
"B011",
264+
"B028",
265+
"B904",
266+
"B905",
267+
"C401",
268+
"C408",
269+
"PERF401",
270+
"PGH003",
271+
"PIE810",
272+
"PLC0206",
273+
"PLC0415",
274+
"PLW2901",
275+
"PT003",
276+
"PT011",
277+
"PT015",
278+
"PT017",
279+
"PYI049",
280+
"RUF005",
281+
"RUF059",
282+
"S324",
283+
"S603",
284+
"SIM102",
285+
"SIM103",
286+
"SIM105",
287+
"SIM108",
288+
"SIM211",
289+
"UP007",
290+
"UP031",
291+
]
292+
293+
[tool.ruff.lint.isort]
294+
split-on-trailing-comma = false
295+
296+
[tool.ruff.lint.pydocstyle]
297+
convention = "pep257"

scrapy_zyte_api/__init__.py

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77

88
# Register web-poet serializers
99
from . import _serialization # noqa: F401
10-
1110
from ._annotations import ExtractFrom, actions, custom_attrs
1211
from ._middlewares import (
1312
ScrapyZyteAPIDownloaderMiddleware,
@@ -51,25 +50,25 @@
5150
session_config_registry = _session_config_registry
5251

5352
__all__ = [
54-
"ExtractFrom",
53+
"SESSION_AGGRESSIVE_RETRY_POLICY",
54+
"SESSION_DEFAULT_RETRY_POLICY",
5555
"Actions",
56+
"Addon",
57+
"ExtractFrom",
5658
"Geolocation",
57-
"Screenshot",
59+
"LocationSessionConfig",
5860
"ScrapyZyteAPIDownloadHandler",
59-
"ScrapyZyteAPIRequestFingerprinter",
6061
"ScrapyZyteAPIDownloaderMiddleware",
6162
"ScrapyZyteAPIRefererSpiderMiddleware",
62-
"ScrapyZyteAPISpiderMiddleware",
63+
"ScrapyZyteAPIRequestFingerprinter",
6364
"ScrapyZyteAPISessionDownloaderMiddleware",
64-
"Addon",
65+
"ScrapyZyteAPISpiderMiddleware",
66+
"Screenshot",
67+
"SessionConfig",
6568
"actions",
6669
"custom_attrs",
6770
"get_request_session_id",
6871
"is_session_init_request",
6972
"session_config",
7073
"session_config_registry",
71-
"LocationSessionConfig",
72-
"SessionConfig",
73-
"SESSION_DEFAULT_RETRY_POLICY",
74-
"SESSION_AGGRESSIVE_RETRY_POLICY",
7574
]

scrapy_zyte_api/_annotations.py

Lines changed: 32 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1+
from collections.abc import Iterable
12
from enum import Enum
2-
from typing import Any, Dict, FrozenSet, Iterable, List, Optional, Tuple, TypedDict
3+
from typing import Any, TypedDict
34

45

56
class ExtractFrom(str, Enum):
@@ -18,48 +19,48 @@ class ExtractFrom(str, Enum):
1819
class _Selector(TypedDict, total=False):
1920
type: str
2021
value: str
21-
state: Optional[str]
22+
state: str | None
2223

2324

2425
class Action(TypedDict, total=False):
2526
action: str
26-
address: Optional[dict]
27-
args: Optional[dict]
28-
button: Optional[str]
29-
delay: Optional[float]
30-
id: Optional[str]
31-
key: Optional[str]
32-
keyword: Optional[str]
33-
left: Optional[int]
34-
maxPageHeight: Optional[int]
35-
maxScrollCount: Optional[int]
36-
maxScrollDelay: Optional[float]
37-
onError: Optional[str]
38-
options: Optional[dict]
39-
selector: Optional[_Selector]
40-
source: Optional[str]
41-
text: Optional[str]
42-
timeout: Optional[float]
43-
top: Optional[int]
44-
url: Optional[str]
45-
urlMatchingOptions: Optional[str]
46-
urlPattern: Optional[str]
47-
values: Optional[List[str]]
48-
waitForNavigationTimeout: Optional[float]
49-
waitUntil: Optional[str]
27+
address: dict | None
28+
args: dict | None
29+
button: str | None
30+
delay: float | None
31+
id: str | None
32+
key: str | None
33+
keyword: str | None
34+
left: int | None
35+
maxPageHeight: int | None
36+
maxScrollCount: int | None
37+
maxScrollDelay: float | None
38+
onError: str | None
39+
options: dict | None
40+
selector: _Selector | None
41+
source: str | None
42+
text: str | None
43+
timeout: float | None
44+
top: int | None
45+
url: str | None
46+
urlMatchingOptions: str | None
47+
urlPattern: str | None
48+
values: list[str] | None
49+
waitForNavigationTimeout: float | None
50+
waitUntil: str | None
5051

5152

5253
class _ActionResult(TypedDict, total=False):
5354
action: str
5455
elapsedTime: float
5556
status: str
56-
error: Optional[str]
57+
error: str | None
5758

5859

5960
def make_hashable(obj: Any) -> Any:
6061
"""Converts input into hashable form, to use in ``Annotated``."""
6162
if isinstance(obj, (tuple, list)):
62-
return tuple((make_hashable(e) for e in obj))
63+
return tuple(make_hashable(e) for e in obj)
6364

6465
if isinstance(obj, dict):
6566
return frozenset((make_hashable(k), make_hashable(v)) for k, v in obj.items())
@@ -78,15 +79,15 @@ def _from_hashable(obj: Any) -> Any:
7879
return obj
7980

8081

81-
def actions(value: Iterable[Action]) -> Tuple[Any, ...]:
82+
def actions(value: Iterable[Action]) -> tuple[Any, ...]:
8283
"""Convert an iterable of :class:`~scrapy_zyte_api.Action` dicts into a hashable value."""
8384
# both lists and dicts are not hashable and we need dep types to be hashable
8485
return tuple(make_hashable(action) for action in value)
8586

8687

8788
def custom_attrs(
88-
input: Dict[str, Any], options: Optional[Dict[str, Any]] = None
89-
) -> Tuple[FrozenSet[Any], Optional[FrozenSet[Any]]]:
89+
input: dict[str, Any], options: dict[str, Any] | None = None
90+
) -> tuple[frozenset[Any], frozenset[Any] | None]:
9091
input_wrapped = make_hashable(input)
9192
options_wrapped = make_hashable(options) if options else None
9293
return input_wrapped, options_wrapped

scrapy_zyte_api/_cookies.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
from http.cookiejar import Cookie
2-
from typing import Any, Dict, List, Optional
2+
from typing import Any
33
from urllib.parse import urlparse
44

55
from scrapy.http import Request
66
from scrapy.http.cookies import CookieJar
77

88

9-
def _get_cookie_jar(request: Request, cookie_jars: Dict[Any, CookieJar]) -> CookieJar:
9+
def _get_cookie_jar(request: Request, cookie_jars: dict[Any, CookieJar]) -> CookieJar:
1010
jar_id = request.meta.get("cookiejar")
1111
return cookie_jars[jar_id]
1212

@@ -24,9 +24,9 @@ def _get_cookie_domain(cookie, url):
2424

2525

2626
def _process_cookies(
27-
api_response: Dict[str, Any],
27+
api_response: dict[str, Any],
2828
request: Request,
29-
cookie_jars: Optional[Dict[Any, CookieJar]],
29+
cookie_jars: dict[Any, CookieJar] | None,
3030
):
3131
if not cookie_jars:
3232
return
@@ -64,7 +64,7 @@ def _process_cookies(
6464

6565

6666
def _get_all_cookies(
67-
request: Request, cookie_jars: Dict[Any, CookieJar]
68-
) -> List[Cookie]:
67+
request: Request, cookie_jars: dict[Any, CookieJar]
68+
) -> list[Cookie]:
6969
cookie_jar = _get_cookie_jar(request, cookie_jars)
7070
return list(cookie_jar.jar)

scrapy_zyte_api/_page_inputs.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
from base64 import b64decode
2-
from typing import List, Optional
32

43
import attrs
54

@@ -15,7 +14,7 @@ class Actions:
1514
"""
1615

1716
#: Results of actions.
18-
results: Optional[List[_ActionResult]]
17+
results: list[_ActionResult] | None
1918

2019

2120
@attrs.define
@@ -26,8 +25,6 @@ class Geolocation:
2625
<geolocation>`.
2726
"""
2827

29-
pass
30-
3128

3229
@attrs.define
3330
class Screenshot:

0 commit comments

Comments
 (0)