@@ -799,7 +799,7 @@ async def test_max_requests_per_crawl() -> None:
799799
800800 # Set max_concurrency to 1 to ensure testing max_requests_per_crawl accurately
801801 crawler = BasicCrawler (
802- concurrency_settings = ConcurrencySettings (max_concurrency = 1 ),
802+ concurrency_settings = ConcurrencySettings (desired_concurrency = 1 , max_concurrency = 1 ),
803803 max_requests_per_crawl = 3 ,
804804 )
805805
@@ -820,7 +820,7 @@ async def test_max_crawl_depth() -> None:
820820
821821 # Set max_concurrency to 1 to ensure testing max_requests_per_crawl accurately
822822 crawler = BasicCrawler (
823- concurrency_settings = ConcurrencySettings (max_concurrency = 1 ),
823+ concurrency_settings = ConcurrencySettings (desired_concurrency = 1 , max_concurrency = 1 ),
824824 max_crawl_depth = 2 ,
825825 )
826826
@@ -859,7 +859,10 @@ async def test_abort_on_error(
859859) -> None :
860860 starts_urls = []
861861
862- crawler = BasicCrawler (concurrency_settings = ConcurrencySettings (max_concurrency = 1 ), abort_on_error = True )
862+ crawler = BasicCrawler (
863+ concurrency_settings = ConcurrencySettings (desired_concurrency = 1 , max_concurrency = 1 ),
864+ abort_on_error = True ,
865+ )
863866
864867 @crawler .router .default_handler
865868 async def handler (context : BasicCrawlingContext ) -> None :
@@ -991,7 +994,7 @@ async def test_crawler_manual_stop() -> None:
991994 processed_urls = []
992995
993996 # Set max_concurrency to 1 to ensure testing urls are visited one by one in order.
994- crawler = BasicCrawler (concurrency_settings = ConcurrencySettings (max_concurrency = 1 ))
997+ crawler = BasicCrawler (concurrency_settings = ConcurrencySettings (desired_concurrency = 1 , max_concurrency = 1 ))
995998
996999 @crawler .router .default_handler
9971000 async def handler (context : BasicCrawlingContext ) -> None :
@@ -1018,8 +1021,8 @@ async def test_crawler_multiple_stops_in_parallel() -> None:
10181021 ]
10191022 processed_urls = []
10201023
1021- # Set max_concurrency to 2 to ensure two urls are being visited in parallel.
1022- crawler = BasicCrawler (concurrency_settings = ConcurrencySettings (max_concurrency = 2 ))
1024+ # Set concurrency to 2 to ensure two urls are being visited in parallel.
1025+ crawler = BasicCrawler (concurrency_settings = ConcurrencySettings (desired_concurrency = 2 , max_concurrency = 2 ))
10231026
10241027 both_handlers_started = asyncio .Barrier (2 ) # type:ignore[attr-defined] # Test is skipped in older Python versions.
10251028 only_one_handler_at_a_time = asyncio .Semaphore (1 )
@@ -1298,7 +1301,7 @@ async def test_keep_alive(
12981301 keep_alive = keep_alive ,
12991302 max_requests_per_crawl = max_requests_per_crawl ,
13001303 # If more request can run in parallel, then max_requests_per_crawl is not deterministic.
1301- concurrency_settings = ConcurrencySettings (max_concurrency = 1 ),
1304+ concurrency_settings = ConcurrencySettings (desired_concurrency = 1 , max_concurrency = 1 ),
13021305 )
13031306 mocked_handler = Mock ()
13041307
0 commit comments