Skip to content

Commit 88dad85

Browse files
committed
Merge pull request #641 from satra/fix/mapnode
Fix/mapnode
2 parents 94779b4 + eb21a65 commit 88dad85

File tree

2 files changed

+55
-0
lines changed

2 files changed

+55
-0
lines changed

nipype/pipeline/engine.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1205,6 +1205,11 @@ def hash_exists(self, updatehash=False):
12051205
# of the dictionary itself.
12061206
hashed_inputs, hashvalue = self._get_hashval()
12071207
outdir = self.output_dir()
1208+
hashfiles = glob(os.path.join(outdir, '_0x*.json'))
1209+
if len(hashfiles) > 1:
1210+
warn('Removing multiple hashfiles and forcing node to rerun')
1211+
for hashfile in hashfiles:
1212+
os.unlink(hashfile)
12081213
hashfile = os.path.join(outdir, '_0x%s.json' % hashvalue)
12091214
if updatehash and os.path.exists(outdir):
12101215
logger.debug("Updating hash: %s" % hashvalue)
@@ -1298,6 +1303,10 @@ def run(self, updatehash=False):
12981303
logger.debug(("%s found and can_resume is True or Node is a "
12991304
"MapNode - resuming execution") %
13001305
hashfile_unfinished)
1306+
if isinstance(self, MapNode):
1307+
# remove old json files
1308+
for filename in glob(os.path.join(outdir, '_0x*.json')):
1309+
os.unlink(filename)
13011310
outdir = make_output_dir(outdir)
13021311
self._save_hashfile(hashfile_unfinished, hashed_inputs)
13031312
self.write_report(report_type='preexec', cwd=outdir)

nipype/pipeline/tests/test_engine.py

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
"""Tests for the engine module
44
"""
55
from copy import deepcopy
6+
from glob import glob
67
import os
78
from shutil import rmtree
89
from tempfile import mkdtemp
@@ -13,6 +14,7 @@
1314
assert_false)
1415
import nipype.interfaces.base as nib
1516
import nipype.pipeline.engine as pe
17+
from nipype import logging
1618

1719
class InputSpec(nib.TraitedSpec):
1820
input1 = nib.traits.Int(desc='a random int')
@@ -439,3 +441,47 @@ def func2(a):
439441
yield assert_false, error_raised
440442
os.chdir(cwd)
441443
rmtree(wd)
444+
445+
446+
def test_mapnode_json():
447+
"""Tests that mapnodes don't generate excess jsons
448+
"""
449+
cwd = os.getcwd()
450+
wd = mkdtemp()
451+
os.chdir(wd)
452+
from nipype import MapNode, Function, Workflow
453+
def func1(in1):
454+
return in1 + 1
455+
n1 = MapNode(Function(input_names=['in1'],
456+
output_names=['out'],
457+
function=func1),
458+
iterfield=['in1'],
459+
name='n1')
460+
n1.inputs.in1 = [1]
461+
w1 = Workflow(name='test')
462+
w1.base_dir = wd
463+
w1.config = {'crashdump_dir': wd}
464+
w1.add_nodes([n1])
465+
w1.run()
466+
n1.inputs.in1 = [2]
467+
w1.run()
468+
# should rerun
469+
n1.inputs.in1 = [1]
470+
eg = w1.run()
471+
472+
node = eg.nodes()[0]
473+
outjson = glob(os.path.join(node.output_dir(), '_0x*.json'))
474+
yield assert_equal, len(outjson), 1
475+
476+
# check that multiple json's don't trigger rerun
477+
with open(os.path.join(node.output_dir(), 'test.json'), 'wt') as fp:
478+
fp.write('dummy file')
479+
w1.config['execution'].update(**{'stop_on_first_rerun': True})
480+
error_raised = False
481+
try:
482+
w1.run()
483+
except:
484+
error_raised = True
485+
yield assert_false, error_raised
486+
os.chdir(cwd)
487+
rmtree(wd)

0 commit comments

Comments
 (0)