@@ -799,7 +799,7 @@ async def test_max_requests_per_crawl() -> None:
799
799
800
800
# Set max_concurrency to 1 to ensure testing max_requests_per_crawl accurately
801
801
crawler = BasicCrawler (
802
- concurrency_settings = ConcurrencySettings (max_concurrency = 1 ),
802
+ concurrency_settings = ConcurrencySettings (desired_concurrency = 1 , max_concurrency = 1 ),
803
803
max_requests_per_crawl = 3 ,
804
804
)
805
805
@@ -820,7 +820,7 @@ async def test_max_crawl_depth() -> None:
820
820
821
821
# Set max_concurrency to 1 to ensure testing max_requests_per_crawl accurately
822
822
crawler = BasicCrawler (
823
- concurrency_settings = ConcurrencySettings (max_concurrency = 1 ),
823
+ concurrency_settings = ConcurrencySettings (desired_concurrency = 1 , max_concurrency = 1 ),
824
824
max_crawl_depth = 2 ,
825
825
)
826
826
@@ -859,7 +859,10 @@ async def test_abort_on_error(
859
859
) -> None :
860
860
starts_urls = []
861
861
862
- crawler = BasicCrawler (concurrency_settings = ConcurrencySettings (max_concurrency = 1 ), abort_on_error = True )
862
+ crawler = BasicCrawler (
863
+ concurrency_settings = ConcurrencySettings (desired_concurrency = 1 , max_concurrency = 1 ),
864
+ abort_on_error = True ,
865
+ )
863
866
864
867
@crawler .router .default_handler
865
868
async def handler (context : BasicCrawlingContext ) -> None :
@@ -991,7 +994,7 @@ async def test_crawler_manual_stop() -> None:
991
994
processed_urls = []
992
995
993
996
# Set max_concurrency to 1 to ensure testing urls are visited one by one in order.
994
- crawler = BasicCrawler (concurrency_settings = ConcurrencySettings (max_concurrency = 1 ))
997
+ crawler = BasicCrawler (concurrency_settings = ConcurrencySettings (desired_concurrency = 1 , max_concurrency = 1 ))
995
998
996
999
@crawler .router .default_handler
997
1000
async def handler (context : BasicCrawlingContext ) -> None :
@@ -1018,8 +1021,8 @@ async def test_crawler_multiple_stops_in_parallel() -> None:
1018
1021
]
1019
1022
processed_urls = []
1020
1023
1021
- # Set max_concurrency to 2 to ensure two urls are being visited in parallel.
1022
- crawler = BasicCrawler (concurrency_settings = ConcurrencySettings (max_concurrency = 2 ))
1024
+ # Set concurrency to 2 to ensure two urls are being visited in parallel.
1025
+ crawler = BasicCrawler (concurrency_settings = ConcurrencySettings (desired_concurrency = 2 , max_concurrency = 2 ))
1023
1026
1024
1027
both_handlers_started = asyncio .Barrier (2 ) # type:ignore[attr-defined] # Test is skipped in older Python versions.
1025
1028
only_one_handler_at_a_time = asyncio .Semaphore (1 )
@@ -1298,7 +1301,7 @@ async def test_keep_alive(
1298
1301
keep_alive = keep_alive ,
1299
1302
max_requests_per_crawl = max_requests_per_crawl ,
1300
1303
# If more request can run in parallel, then max_requests_per_crawl is not deterministic.
1301
- concurrency_settings = ConcurrencySettings (max_concurrency = 1 ),
1304
+ concurrency_settings = ConcurrencySettings (desired_concurrency = 1 , max_concurrency = 1 ),
1302
1305
)
1303
1306
mocked_handler = Mock ()
1304
1307
0 commit comments