@@ -307,7 +307,7 @@ def run(self, updatehash=False):
307
307
updatehash: boolean
308
308
Update the hash stored in the output directory
309
309
"""
310
- cwd = os .getcwd ()
310
+ cwd = os .getcwd () # First thing, keep track of where we are
311
311
312
312
if self .config is None :
313
313
self .config = {}
@@ -327,6 +327,7 @@ def run(self, updatehash=False):
327
327
makedirs (outdir , exist_ok = True )
328
328
os .chdir (outdir )
329
329
330
+ # Check hash, check whether run should be enforced
330
331
logger .info ('[Node] Setting-up "%s" in "%s".' , self .fullname , outdir )
331
332
hash_info = self .hash_exists (updatehash = updatehash )
332
333
hash_exists , hashvalue , hashfile , hashed_inputs = hash_info
@@ -359,7 +360,7 @@ def run(self, updatehash=False):
359
360
if need_rerun :
360
361
log_debug = config .get ('logging' , 'workflow_level' ) == 'DEBUG'
361
362
logger .debug ('[Node] Rerunning "%s"' , self .fullname )
362
- if log_debug and not hash_exists :
363
+ if log_debug and not hash_exists : # Lazy logging - only debug
363
364
exp_hash_paths = glob (json_pat )
364
365
if len (exp_hash_paths ) == 1 :
365
366
split_out = split_filename (exp_hash_paths [0 ])
@@ -375,9 +376,10 @@ def run(self, updatehash=False):
375
376
hashed_inputs )
376
377
if not force_run and str2bool (self .config ['execution' ]['stop_on_first_rerun' ]):
377
378
raise Exception ('Cannot rerun when "stop_on_first_rerun" is set to True' )
378
- hashfile_unfinished = op .join (outdir ,
379
- '_0x%s_unfinished.json' %
380
- hashvalue )
379
+
380
+ # Hashfile while running, remove if exists already
381
+ hashfile_unfinished = op .join (
382
+ outdir , '_0x%s_unfinished.json' % hashvalue )
381
383
if op .exists (hashfile ):
382
384
os .remove (hashfile )
383
385
@@ -396,6 +398,7 @@ def run(self, updatehash=False):
396
398
for filename in glob (op .join (outdir , '_0x*.json' )):
397
399
os .remove (filename )
398
400
401
+ # Store runtime-hashfile, pre-execution report, the node and the inputs set.
399
402
self ._save_hashfile (hashfile_unfinished , hashed_inputs )
400
403
self .write_report (report_type = 'preexec' , cwd = outdir )
401
404
savepkl (op .join (outdir , '_node.pklz' ), self )
@@ -405,11 +408,12 @@ def run(self, updatehash=False):
405
408
self ._run_interface (execute = True )
406
409
except :
407
410
logger .warning ('[Node] Exception "%s" (%s)' , self .fullname , outdir )
411
+ # Tear-up after error
408
412
os .remove (hashfile_unfinished )
409
413
os .chdir (cwd )
410
414
raise
411
415
412
- # Tear-up
416
+ # Tear-up after success
413
417
shutil .move (hashfile_unfinished , hashfile )
414
418
self .write_report (report_type = 'postexec' , cwd = outdir )
415
419
logger .info ('[Node] Finished "%s".' , self .fullname )
0 commit comments