Skip to content

Commit b41ab81

Browse files
chore(deps): update dependency mypy to ~=1.19.0 (#1586)
Co-authored-by: Vlada Dusek <[email protected]>
1 parent 991873f commit b41ab81

File tree

7 files changed

+120
-46
lines changed

7 files changed

+120
-46
lines changed

docs/deployment/code_examples/google/cloud_run_example.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from crawlee.storage_clients import MemoryStorageClient
1010

1111

12-
@get('/')
12+
@get('/') # type: ignore[untyped-decorator]
1313
async def main() -> str:
1414
"""The crawler entry point that will be called when the HTTP endpoint is accessed."""
1515
# highlight-start

docs/deployment/code_examples/google/google_example.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,7 @@
66
import functions_framework
77
from flask import Request, Response
88

9-
from crawlee.crawlers import (
10-
BeautifulSoupCrawler,
11-
BeautifulSoupCrawlingContext,
12-
)
9+
from crawlee.crawlers import BeautifulSoupCrawler, BeautifulSoupCrawlingContext
1310
from crawlee.storage_clients import MemoryStorageClient
1411

1512

@@ -51,7 +48,7 @@ async def request_handler(context: BeautifulSoupCrawlingContext) -> None:
5148
# highlight-end
5249

5350

54-
@functions_framework.http
51+
@functions_framework.http # type: ignore[untyped-decorator]
5552
def crawlee_run(request: Request) -> Response:
5653
# You can pass data to your crawler using `request`
5754
function_id = request.headers['Function-Execution-Id']

docs/guides/code_examples/running_in_web_server/server.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
app = FastAPI(lifespan=lifespan, title='Crawler app')
1515

1616

17-
@app.get('/', response_class=HTMLResponse)
17+
@app.get('/', response_class=HTMLResponse) # type: ignore[untyped-decorator]
1818
def index() -> str:
1919
return """
2020
<!DOCTYPE html>
@@ -32,7 +32,7 @@ def index() -> str:
3232
"""
3333

3434

35-
@app.get('/scrape')
35+
@app.get('/scrape') # type: ignore[untyped-decorator]
3636
async def scrape_url(request: Request, url: str | None = None) -> dict:
3737
if not url:
3838
return {'url': 'missing', 'scrape result': 'no results'}

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ dev = [
101101
"build<2.0.0", # For e2e tests.
102102
"dycw-pytest-only<3.0.0",
103103
"fakeredis[probabilistic,json,lua]<3.0.0",
104-
"mypy~=1.18.0",
104+
"mypy~=1.19.0",
105105
"pre-commit<5.0.0",
106106
"proxy-py<3.0.0",
107107
"pydoc-markdown<5.0.0",

tests/unit/_utils/test_system.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,7 @@ def no_extra_memory_child(ready: synchronize.Barrier, measured: synchronize.Barr
5454

5555
def extra_memory_child(ready: synchronize.Barrier, measured: synchronize.Barrier) -> None:
5656
memory = SharedMemory(size=extra_memory_size, create=True)
57+
assert memory.buf is not None
5758
memory.buf[:] = bytearray([255 for _ in range(extra_memory_size)])
5859
print(f'Using the memory... {memory.buf[-1]}')
5960
ready.wait()
@@ -64,6 +65,7 @@ def extra_memory_child(ready: synchronize.Barrier, measured: synchronize.Barrier
6465
def shared_extra_memory_child(
6566
ready: synchronize.Barrier, measured: synchronize.Barrier, memory: SharedMemory
6667
) -> None:
68+
assert memory.buf is not None
6769
print(f'Using the memory... {memory.buf[-1]}')
6870
ready.wait()
6971
measured.wait()
@@ -79,6 +81,7 @@ def get_additional_memory_estimation_while_running_processes(
7981

8082
if use_shared_memory:
8183
shared_memory = SharedMemory(size=extra_memory_size, create=True)
84+
assert shared_memory.buf is not None
8285
shared_memory.buf[:] = bytearray([255 for _ in range(extra_memory_size)])
8386
extra_args = [shared_memory]
8487
else:

tests/unit/crawlers/_basic/test_basic_crawler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -305,7 +305,7 @@ async def request_handler(context: BasicCrawlingContext) -> None:
305305
raise RuntimeError('Arbitrary crash for testing purposes')
306306

307307
# Apply one of the handlers
308-
@getattr(crawler, handler) # type:ignore[misc] # Untyped decorator is ok to make the test concise
308+
@getattr(crawler, handler) # type: ignore[untyped-decorator]
309309
async def handler_implementation(context: BasicCrawlingContext, error: Exception) -> None:
310310
await context.push_data(test_data)
311311
await context.add_requests(requests=[test_request], rq_alias=rq_alias)

0 commit comments

Comments
 (0)