Skip to content

Commit d2ac3f2

Browse files
committed
Merge branch 'master' into enh/MRTrix3
Conflicts: CHANGES
2 parents 19effb6 + b6e83d8 commit d2ac3f2

File tree

10 files changed

+341
-125
lines changed

10 files changed

+341
-125
lines changed

CHANGES

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,10 @@ Next release
22
============
33

44
* ENH: New interfaces for MRTrix3 (https://github.com/nipy/nipype/pull/1126)
5+
* ENH: Added interface to simulate DWIs using the multi-tensor model
6+
(https://github.com/nipy/nipype/pull/1085)
57
* ENH: New interface for FSL fslcpgeom utility (https://github.com/nipy/nipype/pull/1152)
8+
* ENH: Added SLURMGraph plugin for submitting jobs to SLURM with dependencies (https://github.com/nipy/nipype/pull/1136)
69
* FIX: Enable absolute path definitions in DCMStack (https://github.com/nipy/nipype/pull/1089,
710
replaced by https://github.com/nipy/nipype/pull/1093)
811
* ENH: New mesh.MeshWarpMaths to operate on surface-defined warpings

doc/users/plugins.rst

Lines changed: 30 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -139,9 +139,11 @@ particular node might use more resources than other nodes in a workflow.
139139
140140
SGEGraph
141141
~~~~~~~~
142-
SGEGraph_ is a exuction plugin working with Sun Grid Engine that allows for
142+
SGEGraph_ is an execution plugin working with Sun Grid Engine that allows for
143143
submitting entire graph of dependent jobs at once. This way Nipype does not
144-
need to run a monitoring process - SGE takes care of this.
144+
need to run a monitoring process - SGE takes care of this. The use of SGEGraph_
145+
is preferred over SGE_ since the latter adds unnecessary load on the submit
146+
machine.
145147

146148
.. note::
147149

@@ -175,6 +177,26 @@ Optional arguments::
175177

176178
template: custom template file to use
177179
sbatch_args: any other command line args to be passed to bsub.
180+
181+
182+
SLURMGraph
183+
~~~~~~~~~~
184+
SLURMGraph_ is an execution plugin working with SLURM that allows for
185+
submitting entire graph of dependent jobs at once. This way Nipype does not
186+
need to run a monitoring process - SLURM takes care of this. The use of SLURMGraph_
187+
plugin is preferred over the vanilla SLURM_ plugin since the latter adds
188+
unnecessary load on the submit machine.
189+
190+
191+
.. note::
192+
193+
When rerunning unfinished workflows using SLURMGraph you may decide not to
194+
submit jobs for Nodes that previously finished running. This can speed up
195+
execution, but new or modified inputs that would previously trigger a Node
196+
to rerun will be ignored. The following option turns on this functionality::
197+
198+
workflow.run(plugin='SLURMGraph', plugin_args = {'dont_resubmit_completed_jobs': True})
199+
178200

179201
HTCondor
180202
--------
@@ -183,12 +205,12 @@ DAGMan
183205
~~~~~~
184206

185207
With its DAGMan_ component HTCondor_ (previously Condor) allows for submitting
186-
entire graphs of dependent jobs at once (similar to SGEGraph_). With the ``CondorDAGMan`` plug-in
187-
Nipype can utilize this functionality to submit complete workflows directly and
188-
in a single step. Consequently, and in contrast to other plug-ins, workflow
189-
execution returns almost instantaneously -- Nipype is only used to generate the
190-
workflow graph, while job scheduling and dependency resolution are entirely
191-
managed by HTCondor_.
208+
entire graphs of dependent jobs at once (similar to SGEGraph_ and SLURMGaaoh_).
209+
With the ``CondorDAGMan`` plug-in Nipype can utilize this functionality to
210+
submit complete workflows directly and in a single step. Consequently, and
211+
in contrast to other plug-ins, workflow execution returns almost
212+
instantaneously -- Nipype is only used to generate the workflow graph,
213+
while job scheduling and dependency resolution are entirely managed by HTCondor_.
192214

193215
Please note that although DAGMan_ supports specification of data dependencies
194216
as well as data provisioning on compute nodes this functionality is currently

nipype/interfaces/ants/registration.py

Lines changed: 128 additions & 96 deletions
Large diffs are not rendered by default.

nipype/interfaces/ants/segmentation.py

Lines changed: 17 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -574,6 +574,7 @@ def _list_outputs(self):
574574
'brainvols.csv')
575575
return outputs
576576

577+
577578
class JointFusionInputSpec(ANTSCommandInputSpec):
578579
dimension = traits.Enum(3, 2, 4, argstr='%d', position=0, usedefault=True, mandatory=True,
579580
desc='image dimension (2, 3, or 4)')
@@ -588,21 +589,23 @@ class JointFusionInputSpec(ANTSCommandInputSpec):
588589
patch_radius = traits.ListInt(minlen=3, maxlen=3, argstr='-rp %s', desc='Patch radius for similarity measures, scalar or vector. Default: 2x2x2')
589590
search_radius = traits.ListInt(minlen=3, maxlen=3, argstr='-rs %s', desc='Local search radius. Default: 3x3x3')
590591
exclusion_region = File(exists=True, argstr='-x %s', desc='Specify an exclusion region for the given label.')
591-
output_posteriors_name_template = traits.Str('POSTERIOR_%02d.nii.gz', argstr='-p %s',
592-
desc="Save the posterior maps (probability that each voxel belongs to each " +\
593-
"label) as images. The number of images saved equals the number of labels. " +\
594-
"The filename pattern must be in C printf format, e.g. posterior%04d.nii.gz")
595-
output_voting_weights_name_template = traits.Str('WEIGHTED_%04d.nii.gz', argstr='-w %s', desc="Save the voting weights as " +\
596-
"images. The number of images saved equals the number of atlases. The " +\
597-
"filename pattern must be in C printf format, e.g. weight%04d.nii.gz")
592+
# TODO: These are almost never needed except for debugging
593+
# output_posteriors_name_template = traits.Str('POSTERIOR_%02d.nii.gz', argstr='-p %s',
594+
# desc="Save the posterior maps (probability that each voxel belongs to each " +\
595+
# "label) as images. The number of images saved equals the number of labels. " +\
596+
# "The filename pattern must be in C printf format, e.g. posterior%04d.nii.gz")
597+
# output_voting_weights_name_template = traits.Str('WEIGHTED_%04d.nii.gz', argstr='-w %s', desc="Save the voting weights as " +\
598+
# "images. The number of images saved equals the number of atlases. The " +\
599+
# "filename pattern must be in C printf format, e.g. weight%04d.nii.gz")
598600
atlas_group_id = traits.ListInt(argstr='-gp %d...', desc='Assign a group ID for each atlas')
599601
atlas_group_weights = traits.ListInt(argstr='-gpw %d...', desc='Assign the voting weights to each atlas group')
600602

601603

602604
class JointFusionOutputSpec(TraitedSpec):
603605
output_label_image = File(exists=True)
604-
# TODO: optional outputs - output_posteriors, output_voting_weights
605-
606+
# TODO: These are almost never needed except for debugging,
607+
# so delay complicated implementation until a need arises
608+
# optional outputs - output_posteriors, output_voting_weights
606609

607610
class JointFusion(ANTSCommand):
608611
"""
@@ -616,22 +619,24 @@ class JointFusion(ANTSCommand):
616619
>>> at.inputs.method = 'Joint[0.1,2]'
617620
>>> at.inputs.output_label_image ='fusion_labelimage_output.nii'
618621
>>> at.inputs.warped_intensity_images = ['im1.nii',
619-
... 'im2.nii']
622+
... 'im2.nii',
623+
... 'im3.nii']
620624
>>> at.inputs.warped_label_images = ['segmentation0.nii.gz',
625+
... 'segmentation1.nii.gz',
621626
... 'segmentation1.nii.gz']
622627
>>> at.inputs.target_image = 'T1.nii'
623628
>>> at.inputs.patch_radius = [3,2,1]
624629
>>> at.inputs.search_radius = [1,2,3]
625630
>>> at.cmdline
626-
'jointfusion 3 1 -m Joint[0.1,2] -rp 3x2x1 -rs 1x2x3 -tg T1.nii -g im1.nii -g im2.nii -l segmentation0.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii'
631+
'jointfusion 3 1 -m Joint[0.1,2] -rp 3x2x1 -rs 1x2x3 -tg T1.nii -g im1.nii -g im2.nii -g im3.nii -l segmentation0.nii.gz -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii'
627632
628633
Alternately, you can specify the voting method and parameters more 'Pythonically':
629634
630635
>>> at.inputs.method = 'Joint'
631636
>>> at.inputs.alpha = 0.5
632637
>>> at.inputs.beta = 1
633638
>>> at.cmdline
634-
'jointfusion 3 1 -m Joint[0.5,1] -rp 3x2x1 -rs 1x2x3 -tg T1.nii -g im1.nii -g im2.nii -l segmentation0.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii'
639+
'jointfusion 3 1 -m Joint[0.5,1] -rp 3x2x1 -rs 1x2x3 -tg T1.nii -g im1.nii -g im2.nii -g im3.nii -l segmentation0.nii.gz -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii'
635640
"""
636641
input_spec = JointFusionInputSpec
637642
output_spec = JointFusionOutputSpec

nipype/interfaces/ants/tests/test_auto_Registration.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,9 @@ def test_Registration_inputs():
3333
initial_moving_transform_com=dict(argstr='%s',
3434
xor=['initial_moving_transform'],
3535
),
36+
initialize_transforms_per_stage=dict(argstr='--initialize-transforms-per-stage %d',
37+
usedefault=True,
38+
),
3639
interpolation=dict(argstr='%s',
3740
usedefault=True,
3841
),
@@ -70,6 +73,8 @@ def test_Registration_inputs():
7073
radius_or_number_of_bins=dict(requires=['metric_weight'],
7174
usedefault=True,
7275
),
76+
restore_state=dict(argstr='--restore-state %s',
77+
),
7378
sampling_percentage=dict(requires=['sampling_strategy'],
7479
),
7580
sampling_percentage_item_trait=dict(),
@@ -78,6 +83,8 @@ def test_Registration_inputs():
7883
),
7984
sampling_strategy_item_trait=dict(),
8085
sampling_strategy_stage_trait=dict(),
86+
save_state=dict(argstr='--save-state %s',
87+
),
8188
shrink_factors=dict(mandatory=True,
8289
),
8390
sigma_units=dict(requires=['smoothing_sigmas'],
@@ -117,6 +124,7 @@ def test_Registration_outputs():
117124
inverse_warped_image=dict(),
118125
reverse_invert_flags=dict(),
119126
reverse_transforms=dict(),
127+
save_state=dict(),
120128
warped_image=dict(),
121129
)
122130
outputs = Registration.output_spec()

nipype/interfaces/slicer/registration/brainsfit.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@ class BRAINSFitInputSpec(CommandLineInputSpec):
6767
NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 = traits.Bool(desc="DO NOT USE THIS FLAG", argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 ")
6868
permitParameterVariation = InputMultiPath(traits.Int, desc="A bit vector to permit linear transform parameters to vary under optimization. The vector order corresponds with transform parameters, and beyond the end ones fill in as a default. For instance, you can choose to rotate only in x (pitch) with 1,0,0; this is mostly for expert use in turning on and off individual degrees of freedom in rotation, translation or scaling without multiplying the number of transform representations; this trick is probably meaningless when tried with the general affine transform.", sep=",", argstr="--permitParameterVariation %s")
6969
costMetric = traits.Enum("MMI", "MSE", "NC", "MC", desc="The cost metric to be used during fitting. Defaults to MMI. Options are MMI (Mattes Mutual Information), MSE (Mean Square Error), NC (Normalized Correlation), MC (Match Cardinality for binary images)", argstr="--costMetric %s")
70+
writeOutputTransformInFloat = traits.Bool(desc="By default, the output registration transforms (either the output composite transform or each transform component) are written to the disk in double precision. If this flag is ON, the output transforms will be written in single (float) precision. It is especially important if the output transform is a displacement field transform, or it is a composite transform that includes several displacement fields.", argstr="--writeOutputTransformInFloat ")
7071

7172

7273
class BRAINSFitOutputSpec(TraitedSpec):

nipype/interfaces/slicer/registration/tests/test_auto_BRAINSFit.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -146,6 +146,8 @@ def test_BRAINSFit_inputs():
146146
),
147147
writeTransformOnFailure=dict(argstr='--writeTransformOnFailure ',
148148
),
149+
writeOutputTransformInFloat=dict(argstr='--writeOutputTransformInFloat ',
150+
),
149151
)
150152
inputs = BRAINSFit.input_spec()
151153

nipype/pipeline/plugins/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,3 +15,4 @@
1515
from .sgegraph import SGEGraphPlugin
1616
from .lsf import LSFPlugin
1717
from .slurm import SLURMPlugin
18+
from .slurmgraph import SLURMGraphPlugin

nipype/pipeline/plugins/sgegraph.py

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -80,14 +80,6 @@ def make_job_name(jobnumber, nodeslist):
8080
for idx, pyscript in enumerate(pyfiles):
8181
node = nodes[idx]
8282
node_status_done = node_completed_status(node)
83-
## If a node has no dependencies, and it is requested to run_without_submitting
84-
## then run this node in place
85-
if (not node_status_done) and (len(dependencies[idx]) == 0 ) and (node.run_without_submitting == True):
86-
try:
87-
node.run()
88-
except Exception:
89-
node._clean_queue(idx, nodes)
90-
node_status_done = True # if successfully run locally, then claim true
9183

9284
#if the node itself claims done, then check to ensure all
9385
#dependancies are also done
@@ -130,7 +122,7 @@ def make_job_name(jobnumber, nodeslist):
130122
values = ' '
131123
for jobid in dependencies[idx]:
132124
## Avoid dependancies of done jobs
133-
if cache_doneness_per_node[jobid] == False:
125+
if not self._dont_resubmit_completed_jobs or cache_doneness_per_node[jobid] == False:
134126
values += "${{{0}}},".format(make_job_name(jobid, nodes))
135127
if values != ' ': # i.e. if some jobs were added to dependency list
136128
values = values.rstrip(',')

0 commit comments

Comments
 (0)