@@ -361,6 +361,7 @@ async def function_raising_cancelled_error():
361361
362362@pytest .mark .heavy_load
363363async def test_with_large_capacity (
364+ with_slow_redis_socket_timeout : None ,
364365 redis_client_sdk : RedisClientSDK ,
365366 semaphore_name : str ,
366367):
@@ -377,16 +378,18 @@ async def test_with_large_capacity(
377378 blocking = True ,
378379 blocking_timeout = None ,
379380 )
380- async def limited_function () -> None :
381+ async def limited_function (task_id : int ) -> None :
381382 nonlocal concurrent_count , max_concurrent
382383 concurrent_count += 1
383384 max_concurrent = max (max_concurrent , concurrent_count )
384- with log_context (logging .INFO , f"task with { concurrent_count = } " ):
385+ with log_context (logging .INFO , f"{ task_id = } " ) as ctx :
386+ ctx .logger .info ("started %s with %s" , task_id , concurrent_count )
385387 await asyncio .sleep (sleep_time_s )
388+ ctx .logger .info ("done %s with %s" , task_id , concurrent_count )
386389 concurrent_count -= 1
387390
388391 # Start tasks equal to the large capacity
389- tasks = [asyncio .create_task (limited_function ()) for _ in range (num_tasks )]
392+ tasks = [asyncio .create_task (limited_function (i )) for i in range (num_tasks )]
390393 done , pending = await asyncio .wait (
391394 tasks ,
392395 timeout = float (num_tasks ) / float (large_capacity ) * 10.0 * float (sleep_time_s ),
0 commit comments