2525from concurrent .futures import Future
2626from concurrent .futures import ThreadPoolExecutor
2727from typing import Any
28- from typing import Dict
29- from typing import List
3028from typing import Optional
31- from typing import Set
3229from typing import Union
3330
3431import pandas as pd
@@ -66,7 +63,7 @@ class AsyncComputationResult:
6663 def __init__ (
6764 self ,
6865 future : Future ,
69- pcolls : Set [beam .pvalue .PCollection ],
66+ pcolls : set [beam .pvalue .PCollection ],
7067 user_pipeline : beam .Pipeline ,
7168 recording_manager : 'RecordingManager' ,
7269 ):
@@ -323,7 +320,7 @@ class Recording:
323320 def __init__ (
324321 self ,
325322 user_pipeline : beam .Pipeline ,
326- pcolls : List [beam .pvalue .PCollection ], # noqa: F821
323+ pcolls : list [beam .pvalue .PCollection ], # noqa: F821
327324 result : 'beam.runner.PipelineResult' ,
328325 max_n : int ,
329326 max_duration_secs : float ,
@@ -416,7 +413,7 @@ def wait_until_finish(self) -> None:
416413 self ._mark_computed .join ()
417414 return self ._result .state
418415
419- def describe (self ) -> Dict [str , int ]:
416+ def describe (self ) -> dict [str , int ]:
420417 """Returns a dictionary describing the cache and recording."""
421418 cache_manager = ie .current_env ().get_cache_manager (self ._user_pipeline )
422419
@@ -431,7 +428,7 @@ def __init__(
431428 self ,
432429 user_pipeline : beam .Pipeline ,
433430 pipeline_var : str = None ,
434- test_limiters : List ['Limiter' ] = None ) -> None : # noqa: F821
431+ test_limiters : list ['Limiter' ] = None ) -> None : # noqa: F821
435432
436433 self .user_pipeline : beam .Pipeline = user_pipeline
437434 self .pipeline_var : str = pipeline_var if pipeline_var else ''
@@ -440,12 +437,12 @@ def __init__(
440437 self ._test_limiters = test_limiters if test_limiters else []
441438 self ._executor = ThreadPoolExecutor (max_workers = os .cpu_count ())
442439 self ._env = ie .current_env ()
443- self ._async_computations : Dict [str , AsyncComputationResult ] = {}
440+ self ._async_computations : dict [str , AsyncComputationResult ] = {}
444441 self ._pipeline_graph = PipelineGraph (self .user_pipeline )
445442
446443 def _execute_pipeline_fragment (
447444 self ,
448- pcolls_to_compute : Set [beam .pvalue .PCollection ],
445+ pcolls_to_compute : set [beam .pvalue .PCollection ],
449446 async_result : Optional ['AsyncComputationResult' ] = None ,
450447 runner : runner .PipelineRunner = None ,
451448 options : pipeline_options .PipelineOptions = None ,
@@ -483,7 +480,7 @@ def _execute_pipeline_fragment(
483480
484481 def _run_async_computation (
485482 self ,
486- pcolls_to_compute : Set [beam .pvalue .PCollection ],
483+ pcolls_to_compute : set [beam .pvalue .PCollection ],
487484 async_result : 'AsyncComputationResult' ,
488485 wait_for_inputs : bool ,
489486 runner : runner .PipelineRunner = None ,
@@ -522,7 +519,7 @@ def _run_async_computation(
522519 # finally:
523520 # self._env.unmark_pcollection_computing(pcolls_to_compute)
524521
525- def _watch (self , pcolls : List [beam .pvalue .PCollection ]) -> None :
522+ def _watch (self , pcolls : list [beam .pvalue .PCollection ]) -> None :
526523 """Watch any pcollections not being watched.
527524
528525 This allows for the underlying caching layer to identify the PCollection as
@@ -592,7 +589,7 @@ def cancel(self: None) -> None:
592589 # evict the BCJ after they complete.
593590 ie .current_env ().evict_background_caching_job (self .user_pipeline )
594591
595- def describe (self ) -> Dict [str , int ]:
592+ def describe (self ) -> dict [str , int ]:
596593 """Returns a dictionary describing the cache and recording."""
597594
598595 cache_manager = ie .current_env ().get_cache_manager (self .user_pipeline )
@@ -643,7 +640,7 @@ def record_pipeline(self) -> bool:
643640
644641 def compute_async (
645642 self ,
646- pcolls : Set [beam .pvalue .PCollection ],
643+ pcolls : set [beam .pvalue .PCollection ],
647644 wait_for_inputs : bool = True ,
648645 blocking : bool = False ,
649646 runner : runner .PipelineRunner = None ,
@@ -721,7 +718,7 @@ def _get_pcoll_id_map(self):
721718
722719 def _get_all_dependencies (
723720 self ,
724- pcolls : Set [beam .pvalue .PCollection ]) -> Set [beam .pvalue .PCollection ]:
721+ pcolls : set [beam .pvalue .PCollection ]) -> set [beam .pvalue .PCollection ]:
725722 """Gets all upstream PCollection dependencies
726723 for the given set of PCollections."""
727724 if not self ._pipeline_graph :
@@ -780,13 +777,13 @@ def _get_all_dependencies(
780777
781778 def _wait_for_dependencies (
782779 self ,
783- pcolls : Set [beam .pvalue .PCollection ],
780+ pcolls : set [beam .pvalue .PCollection ],
784781 async_result : Optional [AsyncComputationResult ] = None ,
785782 ) -> bool :
786783 """Waits for any dependencies of the given
787784 PCollections that are currently being computed."""
788785 dependencies = self ._get_all_dependencies (pcolls )
789- computing_deps : Dict [beam .pvalue .PCollection , AsyncComputationResult ] = {}
786+ computing_deps : dict [beam .pvalue .PCollection , AsyncComputationResult ] = {}
790787
791788 for dep in dependencies :
792789 if self ._env .is_pcollection_computing (dep ):
@@ -829,7 +826,7 @@ def _wait_for_dependencies(
829826
830827 def record (
831828 self ,
832- pcolls : List [beam .pvalue .PCollection ],
829+ pcolls : list [beam .pvalue .PCollection ],
833830 * ,
834831 max_n : int ,
835832 max_duration : Union [int , str ],
0 commit comments