Skip to content

Commit fe29fd8

Browse files
authored
chore: Use runtime-evaluated-base-classes for ruff (#711)
This allows to remove ruff exceptions, without breaking the ruff check. Also some exceptions were not necessary in the first place, so fix them.
1 parent a17b3b1 commit fe29fd8

File tree

14 files changed

+23
-25
lines changed

14 files changed

+23
-25
lines changed

pyproject.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -168,6 +168,9 @@ indent-style = "space"
168168
docstring-quotes = "double"
169169
inline-quotes = "single"
170170

171+
[tool.ruff.lint.flake8-type-checking]
172+
runtime-evaluated-base-classes = ["pydantic.BaseModel", "pydantic_settings.BaseSettings"]
173+
171174
[tool.ruff.lint.flake8-builtins]
172175
builtins-ignorelist = ["id"]
173176

src/crawlee/_request.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,22 @@
1-
# ruff: noqa: TCH001, TCH002, TCH003 (because of Pydantic)
2-
31
from __future__ import annotations
42

53
from collections.abc import Iterator, MutableMapping
64
from datetime import datetime
75
from decimal import Decimal
86
from enum import IntEnum
9-
from typing import Annotated, Any, cast
7+
from typing import TYPE_CHECKING, Annotated, Any, cast
108

119
from pydantic import BaseModel, BeforeValidator, ConfigDict, Field, PlainSerializer, PlainValidator, TypeAdapter
12-
from typing_extensions import Self
1310

1411
from crawlee._types import EnqueueStrategy, HttpHeaders, HttpMethod, HttpPayload, JsonSerializable
1512
from crawlee._utils.crypto import crypto_random_object_id
1613
from crawlee._utils.docs import docs_group
1714
from crawlee._utils.requests import compute_unique_key, unique_key_to_request_id
1815
from crawlee._utils.urls import extract_query_params, validate_http_url
1916

17+
if TYPE_CHECKING:
18+
from typing_extensions import Self
19+
2020

2121
class RequestState(IntEnum):
2222
"""Crawlee-specific request handling state."""

src/crawlee/_utils/system.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
# ruff: noqa: TCH001, TCH002, TCH003 (because of Pydantic)
2-
31
from __future__ import annotations
42

53
import os

src/crawlee/base_storage_client/_models.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
# ruff: noqa: TCH001, TCH002, TCH003 (because of Pydantic)
2-
31
from __future__ import annotations
42

53
from datetime import datetime

src/crawlee/beautifulsoup_crawler/_beautifulsoup_crawler.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66

77
from bs4 import BeautifulSoup, Tag
88
from pydantic import ValidationError
9-
from typing_extensions import Unpack
109

1110
from crawlee import EnqueueStrategy
1211
from crawlee._request import BaseRequestData
@@ -20,6 +19,8 @@
2019
from crawlee.http_crawler import HttpCrawlingContext
2120

2221
if TYPE_CHECKING:
22+
from typing_extensions import Unpack
23+
2324
from crawlee._types import BasicCrawlingContext, EnqueueLinksKwargs
2425

2526
BeautifulSoupParser = Literal['html.parser', 'lxml', 'xml', 'html5lib']

src/crawlee/configuration.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,17 @@
1-
# ruff: noqa: TCH003 TCH002 TCH001
2-
31
from __future__ import annotations
42

53
from datetime import timedelta
6-
from typing import Annotated, Literal
4+
from typing import TYPE_CHECKING, Annotated, Literal
75

86
from pydantic import AliasChoices, BeforeValidator, Field
97
from pydantic_settings import BaseSettings, SettingsConfigDict
10-
from typing_extensions import Self
118

129
from crawlee._utils.docs import docs_group
1310
from crawlee._utils.models import timedelta_ms
1411

12+
if TYPE_CHECKING:
13+
from typing_extensions import Self
14+
1515
__all__ = ['Configuration']
1616

1717

src/crawlee/events/_event_manager.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
from typing import TYPE_CHECKING, TypedDict
1111

1212
from pyee.asyncio import AsyncIOEventEmitter
13-
from typing_extensions import NotRequired
1413

1514
from crawlee._utils.docs import docs_group
1615
from crawlee._utils.recurring_task import RecurringTask
@@ -20,6 +19,8 @@
2019
if TYPE_CHECKING:
2120
from types import TracebackType
2221

22+
from typing_extensions import NotRequired
23+
2324
from crawlee.events._types import EventData, Listener, WrappedListener
2425

2526
logger = getLogger(__name__)

src/crawlee/events/_local_event_manager.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,6 @@
77
from logging import getLogger
88
from typing import TYPE_CHECKING
99

10-
from typing_extensions import Unpack
11-
1210
from crawlee._utils.docs import docs_group
1311
from crawlee._utils.recurring_task import RecurringTask
1412
from crawlee._utils.system import get_cpu_info, get_memory_info
@@ -18,6 +16,8 @@
1816
if TYPE_CHECKING:
1917
from types import TracebackType
2018

19+
from typing_extensions import Unpack
20+
2121
logger = getLogger(__name__)
2222

2323

src/crawlee/events/_types.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
# ruff: noqa: TCH001 TCH002
21
from __future__ import annotations
32

43
from collections.abc import Callable, Coroutine

src/crawlee/http_crawler/_http_crawler.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,15 +3,15 @@
33
import logging
44
from typing import TYPE_CHECKING, AsyncGenerator, Iterable
55

6-
from typing_extensions import Unpack
7-
86
from crawlee._utils.docs import docs_group
97
from crawlee.basic_crawler import BasicCrawler, BasicCrawlerOptions, ContextPipeline
108
from crawlee.errors import SessionError
119
from crawlee.http_clients import HttpxHttpClient
1210
from crawlee.http_crawler._http_crawling_context import HttpCrawlingContext
1311

1412
if TYPE_CHECKING:
13+
from typing_extensions import Unpack
14+
1515
from crawlee._types import BasicCrawlingContext
1616

1717

0 commit comments

Comments
 (0)