28
28
from .builder import Builder
29
29
from .cwlrdf import printdot , printrdf
30
30
from .errors import UnsupportedRequirement , WorkflowException
31
+ from .executors import SingleJobExecutor , MultithreadedJobExecutor
31
32
from .load_tool import (FetcherConstructorType , resolve_tool_uri ,
32
33
fetch_document , make_tool , validate_document , jobloaderctx ,
33
34
resolve_overrides , load_overrides )
@@ -64,7 +65,8 @@ def arg_parser(): # type: () -> argparse.ArgumentParser
64
65
parser .add_argument ("--no-container" , action = "store_false" , default = True ,
65
66
help = "Do not execute jobs in a Docker container, even when specified by the CommandLineTool" ,
66
67
dest = "use_container" )
67
-
68
+ parser .add_argument ("--executor" , type = Text , choices = {"single" , "parallel" }, default = "single" ,
69
+ help = "Workflow executor type: sequential/multithreaded. Default: sequential" )
68
70
parser .add_argument ("--preserve-environment" , type = Text , action = "append" ,
69
71
help = "Preserve specific environment variable when running CommandLineTools. May be provided multiple times." ,
70
72
metavar = "ENVVAR" ,
@@ -253,100 +255,6 @@ def arg_parser(): # type: () -> argparse.ArgumentParser
253
255
return parser
254
256
255
257
256
- def job_executor (t , # type: Process
257
- job_order_object , # type: Dict[Text, Any]
258
- ** kwargs # type: Any
259
- ):
260
- # type: (...) -> Tuple[Dict[Text, Any], Text]
261
- final_output = []
262
- final_status = []
263
-
264
- def output_callback (out , processStatus ):
265
- final_status .append (processStatus )
266
- final_output .append (out )
267
-
268
- if "basedir" not in kwargs :
269
- raise WorkflowException ("Must provide 'basedir' in kwargs" )
270
-
271
- output_dirs = set ()
272
- finaloutdir = os .path .abspath (kwargs .get ("outdir" )) if kwargs .get ("outdir" ) else None
273
- kwargs ["outdir" ] = tempfile .mkdtemp (prefix = kwargs ["tmp_outdir_prefix" ]) if kwargs .get (
274
- "tmp_outdir_prefix" ) else tempfile .mkdtemp ()
275
- output_dirs .add (kwargs ["outdir" ])
276
- kwargs ["mutation_manager" ] = MutationManager ()
277
-
278
- jobReqs = None
279
- if "cwl:requirements" in job_order_object :
280
- jobReqs = job_order_object ["cwl:requirements" ]
281
- elif ("cwl:defaults" in t .metadata and "cwl:requirements" in t .metadata ["cwl:defaults" ]):
282
- jobReqs = t .metadata ["cwl:defaults" ]["cwl:requirements" ]
283
- if jobReqs :
284
- for req in jobReqs :
285
- t .requirements .append (req )
286
-
287
- fetch_iter_lock = threading .Lock ()
288
- threads = set ()
289
- exceptions = []
290
-
291
- def run_job (job ):
292
- def runner ():
293
- try :
294
- job .run (** kwargs )
295
- except WorkflowException as e :
296
- exceptions .append (e )
297
- except Exception as e :
298
- exceptions .append (WorkflowException (Text (e )))
299
-
300
- threads .remove (thread )
301
-
302
- if fetch_iter_lock .locked ():
303
- fetch_iter_lock .release ()
304
-
305
- thread = threading .Thread (target = runner )
306
- thread .daemon = True
307
- threads .add (thread )
308
- thread .start ()
309
-
310
- def wait_for_next_completion ():
311
- fetch_iter_lock .acquire ()
312
- fetch_iter_lock .acquire ()
313
- fetch_iter_lock .release ()
314
- if exceptions :
315
- raise exceptions [0 ]
316
-
317
- jobiter = t .job (job_order_object , output_callback , ** kwargs )
318
-
319
- for r in jobiter :
320
- if r :
321
- builder = kwargs .get ("builder" , None ) # type: Builder
322
- if builder is not None :
323
- r .builder = builder
324
- if r .outdir :
325
- output_dirs .add (r .outdir )
326
- run_job (r )
327
- else :
328
- if len (threads ):
329
- wait_for_next_completion ()
330
- else :
331
- _logger .error ("Workflow cannot make any more progress." )
332
- break
333
-
334
- while len (threads ) > 0 :
335
- wait_for_next_completion ()
336
-
337
- if final_output and final_output [0 ] and finaloutdir :
338
- final_output [0 ] = relocateOutputs (final_output [0 ], finaloutdir ,
339
- output_dirs , kwargs .get ("move_outputs" ),
340
- kwargs ["make_fs_access" ]("" ))
341
-
342
- if kwargs .get ("rm_tmpdir" ):
343
- cleanIntermediate (output_dirs )
344
-
345
- if final_output and final_status :
346
- return (final_output [0 ], final_status [0 ])
347
- else :
348
- return (None , "permanentFail" )
349
-
350
258
351
259
class FSAction (argparse .Action ):
352
260
objclass = None # type: Text
@@ -756,7 +664,7 @@ def supportedCWLversions(enable_dev):
756
664
757
665
def main (argsl = None , # type: List[str]
758
666
args = None , # type: argparse.Namespace
759
- executor = job_executor , # type: Callable[..., Tuple[Dict[Text, Any], Text]]
667
+ executor = None , # type: Callable[..., Tuple[Dict[Text, Any], Text]]
760
668
makeTool = workflow .defaultMakeTool , # type: Callable[..., Process]
761
669
selectResources = None , # type: Callable[[Dict[Text, int]], Dict[Text, int]]
762
670
stdin = sys .stdin , # type: IO[Any]
@@ -991,6 +899,17 @@ def main(argsl=None, # type: List[str]
991
899
except SystemExit as e :
992
900
return e .code
993
901
902
+ if not executor :
903
+ if args .executor == "single" :
904
+ executor = SingleJobExecutor ()
905
+ elif args .executor == "parallel" :
906
+ executor = MultithreadedJobExecutor ()
907
+ else :
908
+ _logger .error ("Unknow type of executor: {0}" .format (args .executor ))
909
+ arg_parser ().print_help ()
910
+ return 1
911
+
912
+
994
913
if isinstance (job_order_object , int ):
995
914
return job_order_object
996
915
@@ -999,6 +918,7 @@ def main(argsl=None, # type: List[str]
999
918
del args .workflow
1000
919
del args .job_order
1001
920
(out , status ) = executor (tool , job_order_object ,
921
+ logger = _logger ,
1002
922
makeTool = makeTool ,
1003
923
select_resources = selectResources ,
1004
924
make_fs_access = make_fs_access ,
0 commit comments