@@ -1454,16 +1454,16 @@ def _folder_with_files(
14541454 target_folder ,
14551455 )
14561456
1457- s3_keys_to_files = get_files_info_from_path (folder_path )
1457+ relative_names_to_paths = get_files_info_from_path (folder_path )
14581458
1459- yield s3_keys_to_files
1459+ yield relative_names_to_paths
14601460
1461- for file in s3_keys_to_files .values ():
1461+ for file in relative_names_to_paths .values ():
14621462 file .unlink ()
14631463
14641464
14651465@pytest .fixture
1466- def files_stored_locally (
1466+ def path_local_files_for_archive (
14671467 tmp_path : Path ,
14681468 create_folder_of_size_with_multiple_files : Callable [
14691469 [ByteSize , ByteSize , ByteSize , Path | None ], Path
@@ -1475,7 +1475,7 @@ def files_stored_locally(
14751475
14761476
14771477@pytest .fixture
1478- async def files_stored_in_s3 (
1478+ async def path_s3_files_for_archive (
14791479 tmp_path : Path ,
14801480 create_folder_of_size_with_multiple_files : Callable [
14811481 [ByteSize , ByteSize , ByteSize , Path | None ], Path
@@ -1486,20 +1486,20 @@ async def files_stored_in_s3(
14861486 dir_path = tmp_path / "stored_in_s3"
14871487 with _folder_with_files (
14881488 create_folder_of_size_with_multiple_files , dir_path
1489- ) as s3_keys_to_files :
1489+ ) as relative_names_to_paths :
14901490 await limited_gather (
14911491 * (
14921492 s3_client .upload_file (
1493- Filename = f"{ file_path } " , Bucket = with_s3_bucket , Key = s3_key
1493+ Filename = f"{ file } " , Bucket = with_s3_bucket , Key = s3_object_key
14941494 )
1495- for s3_key , file_path in s3_keys_to_files .items ()
1495+ for s3_object_key , file in relative_names_to_paths .items ()
14961496 ),
14971497 limit = 10 ,
14981498 )
14991499 yield dir_path
15001500
15011501 await delete_all_object_versions (
1502- s3_client , with_s3_bucket , s3_keys_to_files .keys ()
1502+ s3_client , with_s3_bucket , relative_names_to_paths .keys ()
15031503 )
15041504
15051505
@@ -1540,8 +1540,8 @@ def _progress_cb(*args, **kwargs) -> None:
15401540
15411541async def test_workflow_compress_s3_objects_and_local_files_in_a_single_archive_then_upload_to_s3 (
15421542 mocked_s3_server_envs : EnvVarsDict ,
1543- files_stored_locally : Path ,
1544- files_stored_in_s3 : Path ,
1543+ path_local_files_for_archive : Path ,
1544+ path_s3_files_for_archive : Path ,
15451545 archive_download_path : Path ,
15461546 extracted_archive_path : Path ,
15471547 simcore_s3_api : SimcoreS3API ,
@@ -1560,7 +1560,7 @@ async def test_workflow_compress_s3_objects_and_local_files_in_a_single_archive_
15601560
15611561 archive_file_entries : ArchiveEntries = []
15621562
1563- local_files = get_files_info_from_path (files_stored_locally )
1563+ local_files = get_files_info_from_path (path_local_files_for_archive )
15641564 for file_name , file_path in local_files .items ():
15651565 archive_file_entries .append (
15661566 (
@@ -1569,7 +1569,7 @@ async def test_workflow_compress_s3_objects_and_local_files_in_a_single_archive_
15691569 )
15701570 )
15711571
1572- s3_files = get_files_info_from_path (files_stored_in_s3 )
1572+ s3_files = get_files_info_from_path (path_s3_files_for_archive )
15731573
15741574 for s3_object_key in s3_files :
15751575 archive_file_entries .append (
@@ -1614,9 +1614,7 @@ async def test_workflow_compress_s3_objects_and_local_files_in_a_single_archive_
16141614
16151615 # 4. compare
16161616 print ("comparing files" )
1617- all_files_in_zip = get_files_info_from_path (
1618- files_stored_locally
1619- ) | get_files_info_from_path (files_stored_in_s3 )
1617+ all_files_in_zip = get_files_info_from_path (path_local_files_for_archive ) | s3_files
16201618
16211619 await assert_same_contents (
16221620 all_files_in_zip , get_files_info_from_path (extracted_archive_path )
0 commit comments