Skip to content

Commit c409154

Browse files
committed
enh: allow for quick or fusion labeling
1 parent 10695c1 commit c409154

File tree

2 files changed

+112
-47
lines changed

2 files changed

+112
-47
lines changed

mindboggle/mindboggle

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -182,6 +182,8 @@ adv_args.add_argument("--plugin", dest="plugin",
182182
help="nipype plugin (see nipype documentation)")
183183
adv_args.add_argument("--plugin_args", dest="plugin_args",
184184
help="plugin arguments (see nipype documentation)")
185+
adv_args.add_argument("--prov", action='store_true',
186+
help="Capture provenance")
185187
args = parser.parse_args()
186188

187189
# ----------------------------------------------------------------------------
@@ -2361,7 +2363,8 @@ if __name__ == '__main__':
23612363
# ------------------------------------------------------------------------
23622364
# Workflow configuration: provenance tracking, content hashing, etc.:
23632365
# ------------------------------------------------------------------------
2364-
# config.enable_provenance()
2366+
if args.prov:
2367+
config.enable_provenance()
23652368
mbFlow.config['execution']['hash_method'] = 'content'
23662369
# mbFlow.config['execution']['use_relative_paths'] = True
23672370

mindboggle/mindboggle123

Lines changed: 108 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ from nipype.interfaces.utility import Merge
3838
from nipype.interfaces.freesurfer import ReconAll
3939
from nipype.interfaces.ants.segmentation import CorticalThickness
4040
from nipype.interfaces.ants import (ApplyTransforms, AntsJointFusion,
41-
LabelGeometry)
41+
LabelGeometry, RegistrationSynQuick)
4242
from nipype.utils.misc import human_order_sorted
4343

4444

@@ -101,9 +101,21 @@ adv_args.add_argument("--fs_flags", dest="fs_flags",
101101
adv_args.add_argument("--ants_num_threads", dest="num_threads",
102102
default=1, type=int,
103103
help="Number of threads to use with ANTs")
104+
def SegOptions(option):
105+
if option in ['quick', 'fusion']:
106+
return option
107+
else:
108+
raise argparse.ArgumentError('ants_seg value must be one of "quick" or "fusion".')
109+
adv_args.add_argument("--ants_seg", dest="seg",
110+
default="quick", type=SegOptions,
111+
help="Use ANTs 'quick' or 'long' to label subcortical structures")
112+
adv_args.add_argument("--ants_segN", dest="segN", type=int,
113+
help="Number of images to use for joint fusion (2-20)")
104114
adv_args.add_argument("--mb_num_threads", dest="mb_num_threads",
105115
default=1, type=int,
106116
help="Number of threads to use with mindboggle")
117+
adv_args.add_argument("--prov", action='store_true',
118+
help="Capture provenance")
107119
args = parser.parse_args()
108120

109121
# ----------------------------------------------------------------------------
@@ -204,7 +216,8 @@ if args.num_threads and args.num_threads > 1:
204216
# ----------------------------------------------------------------------------
205217
# Create function to call mindboggle
206218
# ----------------------------------------------------------------------------
207-
def mindboggle(subjectid, fsdir, antsdir, antsseg, out, args, num_threads=1):
219+
def mindboggle(subjectid, fsdir, antsdir, antsseg, out, prov, args,
220+
num_threads=1):
208221
"""
209222
Run the mindboggle morphology pipeline (see http://mindboggle.info).
210223
@@ -220,6 +233,8 @@ def mindboggle(subjectid, fsdir, antsdir, antsseg, out, args, num_threads=1):
220233
name of antsCorticalThickness.sh output segmentation file
221234
out : string
222235
path to mindboggle output directory
236+
prov : boolean
237+
capture provenance
223238
args : string
224239
extra arguments
225240
num_threads
@@ -239,7 +254,9 @@ def mindboggle(subjectid, fsdir, antsdir, antsseg, out, args, num_threads=1):
239254
ants = os.path.join(antsdir, subjectid, antsseg)
240255

241256
all_args = ' '.join([DATA, '--out', out, '--ants', ants,
242-
'--working', os.getcwd(), args])
257+
'--working', os.getcwd()] +
258+
['--prov', prov] if prov else [] +
259+
[args])
243260

244261
if num_threads > 1:
245262
all_args += ' --plugin MultiProc --plugin_args "dict(n_procs={0})"'.\
@@ -268,6 +285,7 @@ Mindboggle = Node(name='mindboggle',
268285
'antsdir',
269286
'antsseg',
270287
'out',
288+
'prov',
271289
'args',
272290
'num_threads'],
273291
output_names=['command']))
@@ -287,35 +305,7 @@ else:
287305
['OASIS-TRT_labels_to_OASIS_Atropos_template'])
288306
T1s = human_order_sorted(glob(os.path.join(IDIR, '*.nii.gz')))
289307
labels = human_order_sorted(glob(os.path.join(LDIR, '*.nii.gz')))
290-
291-
# ----------------------------------------------------------------------
292-
# Create workflow to label ANTs output with JointFusion and derive stats
293-
# ----------------------------------------------------------------------
294-
merge_transforms = Node(Merge(2), name="merge_transforms")
295-
296-
# label_wf = Workflow('labelflow')
297-
transformer = MapNode(ApplyTransforms(), iterfield=['input_image'],
298-
name="transformer")
299-
transformer.inputs.reference_image = IMAGE
300-
transformer.inputs.input_image = T1s
301-
transformer.inputs.dimension = 3
302-
transformer.inputs.invert_transform_flags = [False, False]
303-
transformer.inputs.interpolation = 'BSpline'
304-
305-
transformer_nn = MapNode(ApplyTransforms(), iterfield=['input_image'],
306-
name="transformer_nn")
307-
transformer_nn.inputs.reference_image = IMAGE
308-
transformer_nn.inputs.dimension = 3
309-
transformer_nn.inputs.invert_transform_flags = [False, False]
310-
transformer_nn.inputs.input_image = labels
311-
transformer_nn.inputs.interpolation = 'NearestNeighbor'
312-
313-
labeler = Node(AntsJointFusion(), name='labeler')
314-
labeler.inputs.dimension = 3
315-
labeler.inputs.target_image = [IMAGE]
316-
labeler.inputs.out_label_fusion = 'label.nii.gz'
317-
if args.num_threads and args.num_threads > 1:
318-
labeler.inputs.num_threads = args.num_threads
308+
N = args.segN or len(T1s)
319309

320310
def mask_labels(intensity_image, label_image, output_dir=None):
321311
import nibabel as nb
@@ -344,23 +334,93 @@ else:
344334
tocsv.inputs.output_file = os.path.join(ants_output, ID,
345335
'antslabelstats.csv')
346336

347-
mbFlow.connect(corticalthickness, 'TemplateToSubject1GenericAffine',
348-
merge_transforms, 'in1')
349-
mbFlow.connect(corticalthickness, 'TemplateToSubject0Warp',
350-
merge_transforms, 'in2')
351-
mbFlow.connect(merge_transforms, 'out', transformer, 'transforms')
352-
mbFlow.connect(merge_transforms, 'out', transformer_nn, 'transforms')
353-
mbFlow.connect(corticalthickness, 'BrainExtractionMask', labeler,
354-
'mask_image')
337+
if args.seg and args.seg == "quick":
338+
# -----------------------------------------------------
339+
# Label ANTs output with Labels in template space
340+
# -----------------------------------------------------
341+
merge_transforms = Node(Merge(2), name="merge_transforms")
342+
transformer_nn = Node(ApplyTransforms(), name="transformer_nn")
343+
transformer_nn.inputs.reference_image = IMAGE
344+
transformer_nn.inputs.dimension = 3
345+
transformer_nn.inputs.invert_transform_flags = [False, False]
346+
transformer_nn.inputs.input_image = '/opt/data/OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_OASIS-30_v2.nii.gz'
347+
transformer_nn.inputs.interpolation = 'NearestNeighbor'
348+
349+
mbFlow.connect(corticalthickness, 'TemplateToSubject1GenericAffine',
350+
merge_transforms, 'in1')
351+
mbFlow.connect(corticalthickness, 'TemplateToSubject0Warp',
352+
merge_transforms, 'in2')
353+
mbFlow.connect(corticalthickness, 'TemplateToSubject1GenericAffine',
354+
merge_transforms, 'in1')
355+
mbFlow.connect(corticalthickness, 'TemplateToSubject0Warp',
356+
merge_transforms, 'in2')
357+
mbFlow.connect(merge_transforms, 'out', transformer_nn, 'transforms')
358+
mbFlow.connect(transformer_nn, 'output_image', masker, 'label_image')
359+
elif args.seg:
360+
# -----------------------------------------------------
361+
# Create workflow to label ANTs output with JointFusion
362+
# -----------------------------------------------------
363+
reg = MapNode(Registration(), iterfield=['moving_image'],
364+
name="register")
365+
reg.inputs.fixed_image = IMAGE
366+
reg.inputs.moving_image = T1s[:N]
367+
reg.inputs.output_transform_prefix = "output_"
368+
reg.inputs.transforms = ['Rigid', 'Affine', 'SyN']
369+
reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.2, 3.0, 0.0)]
370+
reg.inputs.number_of_iterations = [[10000, 11110, 11110]] * 2 + [[
371+
100, 30, 20
372+
]]
373+
reg.inputs.dimension = 3
374+
reg.inputs.write_composite_transform = True
375+
reg.inputs.collapse_output_transforms = True
376+
reg.inputs.initial_moving_transform_com = True
377+
reg.inputs.metric = ['Mattes'] * 2 + [['Mattes', 'CC']]
378+
reg.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]]
379+
reg.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]]
380+
reg.inputs.sampling_strategy = ['Regular'] * 2 + [[None, None]]
381+
reg.inputs.sampling_percentage = [0.3] * 2 + [[None, None]]
382+
reg.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01]
383+
reg.inputs.convergence_window_size = [20] * 2 + [5]
384+
reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]]
385+
reg.inputs.sigma_units = ['vox'] * 3
386+
reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]]
387+
reg.inputs.use_estimate_learning_rate_once = [True] * 3
388+
reg.inputs.use_histogram_matching = [False] * 2 + [True]
389+
reg.inputs.winsorize_lower_quantile = 0.005
390+
reg.inputs.winsorize_upper_quantile = 0.995
391+
reg.inputs.float = True
392+
reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
393+
if args.num_threads and args.num_threads > 1:
394+
reg.inputs.num_threads = args.num_threads
395+
396+
transformer_nn = MapNode(ApplyTransforms(), iterfield=['input_image'],
397+
name="transformer_nn")
398+
transformer_nn.inputs.reference_image = IMAGE
399+
transformer_nn.inputs.dimension = 3
400+
transformer_nn.inputs.invert_transform_flags = [False, False]
401+
transformer_nn.inputs.input_image = labels[:N]
402+
transformer_nn.inputs.interpolation = 'NearestNeighbor'
403+
404+
labeler = Node(AntsJointFusion(), name='labeler')
405+
labeler.inputs.dimension = 3
406+
labeler.inputs.target_image = [IMAGE]
407+
labeler.inputs.out_label_fusion = 'label.nii.gz'
408+
if args.num_threads and args.num_threads > 1:
409+
labeler.inputs.num_threads = args.num_threads
410+
411+
mbFlow.connect(reg, 'composite_transform', transformer_nn, 'transforms')
412+
mbFlow.connect(corticalthickness, 'BrainExtractionMask', labeler,
413+
'mask_image')
414+
mbFlow.connect(reg, 'warped_image', labeler, 'atlas_image')
415+
mbFlow.connect(transformer_nn, 'output_image', labeler,
416+
'atlas_segmentation_image')
417+
mbFlow.connect(labeler, 'out_label_fusion', masker, 'label_image')
418+
419+
355420
mbFlow.connect(corticalthickness, 'CorticalThickness',
356421
tocsv, 'intensity_image')
357422
mbFlow.connect(corticalthickness, 'CorticalThickness',
358423
masker, 'intensity_image')
359-
360-
mbFlow.connect(transformer, 'output_image', labeler, 'atlas_image')
361-
mbFlow.connect(transformer_nn, 'output_image', labeler,
362-
'atlas_segmentation_image')
363-
mbFlow.connect(labeler, 'out_label_fusion', masker, 'label_image')
364424
mbFlow.connect(masker, 'new_label_file', tocsv, 'label_image')
365425

366426
# ----------------------------------------------------------------------------
@@ -369,6 +429,7 @@ else:
369429
mbFlow.connect(corticalthickness, 'BrainSegmentation',
370430
Mindboggle, 'antsseg')
371431
Mindboggle.inputs.out = mindboggle_output
432+
Mindboggle.inputs.prov = args.prov
372433
Mindboggle.inputs.args = '--roygbiv' # ' --graph hier'
373434
if args.mb_num_threads:
374435
Mindboggle.inputs.num_threads = args.mb_num_threads
@@ -384,7 +445,8 @@ if __name__ == '__main__':
384445
# --------------------------------------------------------------------
385446
# Workflow configuration: provenance tracking, content hashing, etc.:
386447
# --------------------------------------------------------------------
387-
# config.enable_provenance()
448+
if args.prov:
449+
config.enable_provenance()
388450
mbFlow.config['execution']['hash_method'] = 'content'
389451
# mbFlow.config['execution']['use_relative_paths'] = True
390452
mbFlow.config['execution']['crashfile_format'] = 'txt'

0 commit comments

Comments
 (0)