1- from tests .integration .conftest import MakeActorFunction , RunActorFunction
1+ from __future__ import annotations
2+
3+ from typing import TYPE_CHECKING
4+
5+ if TYPE_CHECKING :
6+ from .conftest import MakeActorFunction , RunActorFunction
27
38
49async def test_actor_on_platform_max_crawl_depth (
@@ -79,8 +84,7 @@ async def test_actor_on_platform_max_request_retries(
7984
8085 async def main () -> None :
8186 """The crawler entry point."""
82- from crawlee ._types import BasicCrawlingContext
83- from crawlee .crawlers import ParselCrawler , ParselCrawlingContext
87+ from crawlee .crawlers import BasicCrawlingContext , ParselCrawler , ParselCrawlingContext
8488
8589 from apify import Actor
8690
@@ -99,7 +103,8 @@ async def default_handler(_: ParselCrawlingContext) -> None:
99103 raise RuntimeError ('Some error' )
100104
101105 await crawler .run (['http://localhost:8080/' ])
102- assert failed_counter == max_retries , f'{ failed_counter = } ' # TODO max_retries + 1
106+ # https://github.com/apify/crawlee-python/issues/1326 , should be max_retries + 1
107+ assert failed_counter == max_retries , f'{ failed_counter = } '
103108
104109 actor = await make_actor (label = 'crawler-max-retries' , main_func = main )
105110 run_result = await run_actor (actor )
0 commit comments