|
7 | 7 | from cluster_tools.executors.pickle_ import PickleExecutor |
8 | 8 | from cluster_tools.executors.sequential import SequentialExecutor |
9 | 9 | from cluster_tools.schedulers.cluster_executor import ( |
10 | | - ClusterExecutor, |
11 | | - RemoteOutOfMemoryException, |
12 | | - RemoteResourceLimitException, |
13 | | - RemoteTimeLimitException, |
| 10 | + ClusterExecutor, # noqa: F401 `cluster_tools.schedulers.cluster_executor.ClusterExecutor` imported but unused; |
| 11 | + RemoteOutOfMemoryException, # noqa: F401 `cluster_tools.schedulers.cluster_executor.ClusterExecutor` imported but unused; |
| 12 | + RemoteResourceLimitException, # noqa: F401 `cluster_tools.schedulers.cluster_executor.ClusterExecutor` imported but unused; |
| 13 | + RemoteTimeLimitException, # noqa: F401 `cluster_tools.schedulers.cluster_executor.ClusterExecutor` imported but unused; |
14 | 14 | ) |
15 | 15 | from cluster_tools.schedulers.kube import KubernetesExecutor |
16 | 16 | from cluster_tools.schedulers.pbs import PBSExecutor |
@@ -54,53 +54,45 @@ def _test_valid_multiprocessing() -> None: |
54 | 54 |
|
55 | 55 |
|
56 | 56 | @overload |
57 | | -def get_executor(environment: Literal["slurm"], **kwargs: Any) -> SlurmExecutor: |
58 | | - ... |
| 57 | +def get_executor(environment: Literal["slurm"], **kwargs: Any) -> SlurmExecutor: ... |
59 | 58 |
|
60 | 59 |
|
61 | 60 | @overload |
62 | | -def get_executor(environment: Literal["pbs"], **kwargs: Any) -> PBSExecutor: |
63 | | - ... |
| 61 | +def get_executor(environment: Literal["pbs"], **kwargs: Any) -> PBSExecutor: ... |
64 | 62 |
|
65 | 63 |
|
66 | 64 | @overload |
67 | 65 | def get_executor( |
68 | 66 | environment: Literal["kubernetes"], **kwargs: Any |
69 | | -) -> KubernetesExecutor: |
70 | | - ... |
| 67 | +) -> KubernetesExecutor: ... |
71 | 68 |
|
72 | 69 |
|
73 | 70 | @overload |
74 | | -def get_executor(environment: Literal["dask"], **kwargs: Any) -> DaskExecutor: |
75 | | - ... |
| 71 | +def get_executor(environment: Literal["dask"], **kwargs: Any) -> DaskExecutor: ... |
76 | 72 |
|
77 | 73 |
|
78 | 74 | @overload |
79 | 75 | def get_executor( |
80 | 76 | environment: Literal["multiprocessing"], **kwargs: Any |
81 | | -) -> MultiprocessingExecutor: |
82 | | - ... |
| 77 | +) -> MultiprocessingExecutor: ... |
83 | 78 |
|
84 | 79 |
|
85 | 80 | @overload |
86 | 81 | def get_executor( |
87 | 82 | environment: Literal["sequential"], **kwargs: Any |
88 | | -) -> SequentialExecutor: |
89 | | - ... |
| 83 | +) -> SequentialExecutor: ... |
90 | 84 |
|
91 | 85 |
|
92 | 86 | @overload |
93 | 87 | def get_executor( |
94 | 88 | environment: Literal["debug_sequential"], **kwargs: Any |
95 | | -) -> DebugSequentialExecutor: |
96 | | - ... |
| 89 | +) -> DebugSequentialExecutor: ... |
97 | 90 |
|
98 | 91 |
|
99 | 92 | @overload |
100 | 93 | def get_executor( |
101 | 94 | environment: Literal["test_pickling"], **kwargs: Any |
102 | | -) -> PickleExecutor: |
103 | | - ... |
| 95 | +) -> PickleExecutor: ... |
104 | 96 |
|
105 | 97 |
|
106 | 98 | def get_executor(environment: str, **kwargs: Any) -> "Executor": |
|
0 commit comments