Skip to content

Commit abb8e04

Browse files
committed
add inputnode.in_data to all test workflows #1538
1 parent 460f691 commit abb8e04

File tree

8 files changed

+74
-71
lines changed

8 files changed

+74
-71
lines changed

circle.yml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,13 @@ dependencies:
1919
test:
2020
override:
2121
- docker run -i -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /scratch --entrypoint="/usr/bin/run_builddocs.sh" nipype/testbench
22-
- docker run -i -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /scratch nipype/testbench test_spm Linear workflow3d workflow4d :
22+
- docker run -i -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /scratch nipype/testbench test_spm Linear /root/examples/ workflow3d workflow4d :
2323
timeout: 1600
24-
- docker run -i -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /scratch nipype/testbench fmri_fsl_feeds Linear l1pipeline
25-
- docker run -i -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /scratch nipype/testbench fmri_spm_dartel Linear level1 l2pipeline :
24+
- docker run -i -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /scratch nipype/testbench fmri_fsl_feeds Linear /root/examples/ l1pipeline
25+
- docker run -i -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /scratch nipype/testbench fmri_spm_dartel Linear /root/examples/ level1 l2pipeline :
2626
timeout: 1600
27-
- docker run -i -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /scratch nipype/testbench fmri_fsl_reuse Linear level1_workflow
28-
- docker run -i -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /scratch nipype/testbench fmri_spm_nested Linear level1 l2pipeline
27+
- docker run -i -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /scratch nipype/testbench fmri_fsl_reuse Linear /root/examples/ level1_workflow
28+
- docker run -i -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /scratch nipype/testbench fmri_spm_nested Linear /root/examples/ level1 l2pipeline
2929
- docker run -i -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /scratch --entrypoint="/usr/bin/run_nosetests.sh" nipype/testbench :
3030
timeout: 2600
3131
post:

docker/test-image-nipype/Dockerfile

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -53,18 +53,21 @@ RUN conda config --add channels conda-forge
5353
RUN conda create -y -n nipypetests-2.7 lockfile nipype && \
5454
echo '#!/bin/bash' >> /etc/profile.d/nipype.sh && \
5555
echo '#!/bin/bash' >> /etc/bashrc && \
56-
echo 'source activate nipypetests-2.7' >> /etc/profile.d/nipype.sh && \
57-
echo 'source activate nipypetests-2.7' >> /etc/bashrc
56+
echo 'source activate nipypetests-2.7' >> /etc/profile.d/nipype.sh
5857

5958
# Create conda environment
6059
RUN conda create -y -n nipypetests-3.4 lockfile nipype python=3.4
6160

6261
# Create conda environment
6362
RUN conda create -y -n nipypetests-3.5 lockfile nipype python=3.5
6463

65-
# Install pip
66-
# RUN source activate nipypetests-2.7 && \
67-
# pip install --upgrade pip && \
64+
# Install dipy
65+
RUN source activate nipypetests-2.7 && \
66+
pip install dipy && \
67+
source activate nipypetests-3.4 && \
68+
pip install dipy && \
69+
source activate nipypetests-3.5 && \
70+
pip install dipy
6871

6972
RUN echo "source /etc/profile.d/nipype.sh" >> /etc/bash.bashrc
7073
CMD ["/bin/bash"]

examples/fmri_fsl_feeds.py

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -16,12 +16,12 @@
1616
from __future__ import division
1717
from builtins import range
1818

19-
import os # system functions
20-
21-
import nipype.interfaces.io as nio # Data i/o
22-
import nipype.interfaces.fsl as fsl # fsl
23-
import nipype.pipeline.engine as pe # pypeline engine
24-
import nipype.algorithms.modelgen as model # model generation
19+
import os # system functions
20+
from nipype.interfaces import io as nio # Data i/o
21+
from nipype.interfaces import utility as niu # Utilities
22+
from nipype.interfaces import fsl # fsl
23+
from nipype.pipeline import engine as pe # pypeline engine
24+
from nipype.algorithms import modelgen as model # model generation
2525
from nipype.workflows.fmri.fsl import (create_featreg_preproc,
2626
create_modelfit_workflow,
2727
create_reg_workflow)
@@ -48,7 +48,9 @@
4848
"""
4949

5050
# Specify the location of the FEEDS data. You can find it at http://www.fmrib.ox.ac.uk/fsl/feeds/doc/index.html
51-
feeds_data_dir = os.path.abspath('feeds/data')
51+
52+
53+
inputnode = pe.Node(niu.IdentityInterface(fields=['in_data']), name='inputnode')
5254
# Specify the subject directories
5355
# Map field names to individual subject runs.
5456
info = dict(func=[['fmri']],
@@ -63,7 +65,6 @@
6365

6466
datasource = pe.Node(interface=nio.DataGrabber(outfields=['func', 'struct']),
6567
name='datasource')
66-
datasource.inputs.base_directory = feeds_data_dir
6768
datasource.inputs.template = '%s.nii.gz'
6869
datasource.inputs.template_args = info
6970
datasource.inputs.sort_filelist = True
@@ -110,6 +111,7 @@
110111
l1pipeline.base_dir = os.path.abspath('./fsl_feeds/workingdir')
111112
l1pipeline.config = {"execution": {"crashdump_dir": os.path.abspath('./fsl_feeds/crashdumps')}}
112113

114+
l1pipeline.connect(inputnode, 'in_data', datasource, 'base_directory')
113115
l1pipeline.connect(datasource, 'func', preproc, 'inputspec.func')
114116
l1pipeline.connect(preproc, 'outputspec.highpassed_files', modelspec, 'functional_runs')
115117
l1pipeline.connect(preproc, 'outputspec.motion_parameters', modelspec, 'realignment_parameters')
@@ -142,4 +144,5 @@
142144
"""
143145

144146
if __name__ == '__main__':
147+
l1pipeline.inputs.inputnode.in_data = os.path.abspath('feeds/data')
145148
l1pipeline.run()

examples/fmri_fsl_reuse.py

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,9 @@
2020
from builtins import range
2121

2222
import os # system functions
23-
2423
import nipype.interfaces.io as nio # Data i/o
2524
import nipype.interfaces.fsl as fsl # fsl
26-
import nipype.interfaces.utility as util # utility
25+
from nipype.interfaces import utility as niu # Utilities
2726
import nipype.pipeline.engine as pe # pypeline engine
2827
import nipype.algorithms.modelgen as model # model generation
2928
import nipype.algorithms.rapidart as ra # artifact detection
@@ -56,7 +55,7 @@
5655
and modelfitting workflows.
5756
"""
5857

59-
art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False],
58+
art = pe.MapNode(ra.ArtifactDetect(use_differences=[True, False],
6059
use_norm=True,
6160
norm_threshold=1,
6261
zintensity_threshold=3,
@@ -65,7 +64,7 @@
6564
iterfield=['realigned_files', 'realignment_parameters', 'mask_file'],
6665
name="art")
6766

68-
modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec")
67+
modelspec = pe.Node(model.SpecifyModel(), name="modelspec")
6968

7069
level1_workflow.connect([(preproc, art, [('outputspec.motion_parameters',
7170
'realignment_parameters'),
@@ -139,15 +138,15 @@ def num_copes(files):
139138
140139
"""
141140

142-
# Specify the location of the data.
143-
data_dir = os.path.abspath('data')
141+
inputnode = pe.Node(niu.IdentityInterface(fields=['in_data']), name='inputnode')
142+
144143
# Specify the subject directories
145144
subject_list = ['s1'] # , 's3']
146145
# Map field names to individual subject runs.
147146
info = dict(func=[['subject_id', ['f3', 'f5', 'f7', 'f10']]],
148147
struct=[['subject_id', 'struct']])
149148

150-
infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']),
149+
infosource = pe.Node(niu.IdentityInterface(fields=['subject_id']),
151150
name="infosource")
152151

153152
"""Here we set up iteration over all the subjects. The following line
@@ -169,10 +168,9 @@ def num_copes(files):
169168
functionality.
170169
"""
171170

172-
datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'],
171+
datasource = pe.Node(nio.DataGrabber(infields=['subject_id'],
173172
outfields=['func', 'struct']),
174173
name='datasource')
175-
datasource.inputs.base_directory = data_dir
176174
datasource.inputs.template = '%s/%s.nii'
177175
datasource.inputs.template_args = info
178176
datasource.inputs.sort_filelist = True
@@ -182,12 +180,12 @@ def num_copes(files):
182180
iterables on this node to perform two different extents of smoothing.
183181
"""
184182

185-
inputnode = level1_workflow.get_node('featpreproc.inputspec')
186-
inputnode.iterables = ('fwhm', [5., 10.])
183+
featinput = level1_workflow.get_node('featpreproc.inputspec')
184+
featinput.iterables = ('fwhm', [5., 10.])
187185

188186
hpcutoff = 120.
189187
TR = 3.
190-
inputnode.inputs.highpass = hpcutoff / (2. * TR)
188+
featinput.inputs.highpass = hpcutoff / (2. * TR)
191189

192190
"""
193191
Setup a function that returns subject-specific information about the
@@ -239,7 +237,8 @@ def subjectinfo(subject_id):
239237
level1_workflow.base_dir = os.path.abspath('./fsl/workingdir')
240238
level1_workflow.config['execution'] = dict(crashdump_dir=os.path.abspath('./fsl/crashdumps'))
241239

242-
level1_workflow.connect([(infosource, datasource, [('subject_id', 'subject_id')]),
240+
level1_workflow.connect([(inputnode, datasource, [('in_data', 'base_directory')]),
241+
(infosource, datasource, [('subject_id', 'subject_id')]),
243242
(infosource, modelspec, [(('subject_id', subjectinfo),
244243
'subject_info')]),
245244
(datasource, preproc, [('func', 'inputspec.func')]),

examples/fmri_spm_dartel.py

Lines changed: 27 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
import nipype.interfaces.spm as spm # spm
2323
import nipype.workflows.fmri.spm as spm_wf # spm
2424
import nipype.interfaces.fsl as fsl # fsl
25-
import nipype.interfaces.utility as util # utility
25+
from nipype.interfaces import utility as niu # Utilities
2626
import nipype.pipeline.engine as pe # pypeline engine
2727
import nipype.algorithms.rapidart as ra # artifact detection
2828
import nipype.algorithms.modelgen as model # model specification
@@ -68,15 +68,15 @@
6868
and register all images to the mean image.
6969
"""
7070

71-
realign = pe.Node(interface=spm.Realign(), name="realign")
71+
realign = pe.Node(spm.Realign(), name="realign")
7272
realign.inputs.register_to_mean = True
7373

7474
"""Use :class:`nipype.algorithms.rapidart` to determine which of the
7575
images in the functional series are outliers based on deviations in
7676
intensity or movement.
7777
"""
7878

79-
art = pe.Node(interface=ra.ArtifactDetect(), name="art")
79+
art = pe.Node(ra.ArtifactDetect(), name="art")
8080
art.inputs.use_differences = [True, False]
8181
art.inputs.use_norm = True
8282
art.inputs.norm_threshold = 1
@@ -88,14 +88,14 @@
8888
:class:`nipype.interfaces.fsl.BET`.
8989
"""
9090

91-
skullstrip = pe.Node(interface=fsl.BET(), name="skullstrip")
91+
skullstrip = pe.Node(fsl.BET(), name="skullstrip")
9292
skullstrip.inputs.mask = True
9393

9494
"""Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid
9595
body registration of the functional data to the structural data.
9696
"""
9797

98-
coregister = pe.Node(interface=spm.Coregister(), name="coregister")
98+
coregister = pe.Node(spm.Coregister(), name="coregister")
9999
coregister.inputs.jobtype = 'estimate'
100100

101101

@@ -134,40 +134,40 @@
134134
:class:`nipype.interfaces.spm.SpecifyModel`.
135135
"""
136136

137-
modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec")
137+
modelspec = pe.Node(model.SpecifySPMModel(), name="modelspec")
138138
modelspec.inputs.concatenate_runs = True
139139

140140
"""Generate a first level SPM.mat file for analysis
141141
:class:`nipype.interfaces.spm.Level1Design`.
142142
"""
143143

144-
level1design = pe.Node(interface=spm.Level1Design(), name="level1design")
144+
level1design = pe.Node(spm.Level1Design(), name="level1design")
145145
level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}}
146146

147147
"""Use :class:`nipype.interfaces.spm.EstimateModel` to determine the
148148
parameters of the model.
149149
"""
150150

151-
level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate")
151+
level1estimate = pe.Node(spm.EstimateModel(), name="level1estimate")
152152
level1estimate.inputs.estimation_method = {'Classical': 1}
153153

154154
"""Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the
155155
first level contrasts specified in a few steps above.
156156
"""
157157

158-
contrastestimate = pe.Node(interface=spm.EstimateContrast(), name="contrastestimate")
158+
contrastestimate = pe.Node(spm.EstimateContrast(), name="contrastestimate")
159159

160160
"""Use :class: `nipype.interfaces.utility.Select` to select each contrast for
161161
reporting.
162162
"""
163163

164-
selectcontrast = pe.Node(interface=util.Select(), name="selectcontrast")
164+
selectcontrast = pe.Node(niu.Select(), name="selectcontrast")
165165

166166
"""Use :class:`nipype.interfaces.fsl.Overlay` to combine the statistical output of
167167
the contrast estimate and a background image into one volume.
168168
"""
169169

170-
overlaystats = pe.Node(interface=fsl.Overlay(), name="overlaystats")
170+
overlaystats = pe.Node(fsl.Overlay(), name="overlaystats")
171171
overlaystats.inputs.stat_thresh = (3, 10)
172172
overlaystats.inputs.show_negative_stats = True
173173
overlaystats.inputs.auto_thresh_bg = True
@@ -176,7 +176,7 @@
176176
statistical volumes for a report of the first-level results.
177177
"""
178178

179-
slicestats = pe.Node(interface=fsl.Slicer(), name="slicestats")
179+
slicestats = pe.Node(fsl.Slicer(), name="slicestats")
180180
slicestats.inputs.all_axial = True
181181
slicestats.inputs.image_width = 750
182182

@@ -232,14 +232,14 @@
232232
"""
233233

234234
# Specify the location of the data.
235-
data_dir = os.path.abspath('data')
235+
# data_dir = os.path.abspath('data')
236236
# Specify the subject directories
237237
subject_list = ['s1', 's3']
238238
# Map field names to individual subject runs.
239239
info = dict(func=[['subject_id', ['f3', 'f5', 'f7', 'f10']]],
240240
struct=[['subject_id', 'struct']])
241241

242-
infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource")
242+
infosource = pe.Node(niu.IdentityInterface(fields=['subject_id']), name="infosource")
243243

244244
"""Here we set up iteration over all the subjects. The following line
245245
is a particular example of the flexibility of the system. The
@@ -260,22 +260,21 @@
260260
functionality.
261261
"""
262262

263-
datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'],
263+
inputnode = pe.Node(niu.IdentityInterface(fields=['in_data']), name='inputnode')
264+
datasource = pe.Node(nio.DataGrabber(infields=['subject_id'],
264265
outfields=['func', 'struct']),
265266
name='datasource')
266-
datasource.inputs.base_directory = data_dir
267267
datasource.inputs.template = '%s/%s.nii'
268268
datasource.inputs.template_args = info
269269
datasource.inputs.sort_filelist = True
270270

271271
"""We need to create a separate workflow to make the DARTEL template
272272
"""
273273

274-
datasource_dartel = pe.MapNode(interface=nio.DataGrabber(infields=['subject_id'],
274+
datasource_dartel = pe.MapNode(nio.DataGrabber(infields=['subject_id'],
275275
outfields=['struct']),
276276
name='datasource_dartel',
277277
iterfield=['subject_id'])
278-
datasource_dartel.inputs.base_directory = data_dir
279278
datasource_dartel.inputs.template = '%s/%s.nii'
280279
datasource_dartel.inputs.template_args = dict(struct=[['subject_id', 'struct']])
281280
datasource_dartel.inputs.sort_filelist = True
@@ -285,7 +284,7 @@
285284
This way we will be able to pick the right field flows later.
286285
"""
287286

288-
rename_dartel = pe.MapNode(util.Rename(format_string="subject_id_%(subject_id)s_struct"),
287+
rename_dartel = pe.MapNode(niu.Rename(format_string="subject_id_%(subject_id)s_struct"),
289288
iterfield=['in_file', 'subject_id'],
290289
name='rename_dartel')
291290
rename_dartel.inputs.subject_id = subject_list
@@ -307,7 +306,7 @@ def pickFieldFlow(dartel_flow_fields, subject_id):
307306

308307
raise Exception
309308

310-
pick_flow = pe.Node(util.Function(input_names=['dartel_flow_fields',
309+
pick_flow = pe.Node(niu.Function(input_names=['dartel_flow_fields',
311310
'subject_id'],
312311
output_names=['dartel_flow_field'],
313312
function=pickFieldFlow),
@@ -399,7 +398,9 @@ def subjectinfo(subject_id):
399398
level1 = pe.Workflow(name="level1")
400399
level1.base_dir = os.path.abspath('spm_dartel_tutorial/workingdir')
401400

402-
level1.connect([(datasource_dartel, rename_dartel, [('struct', 'in_file')]),
401+
level1.connect([(inputnode, datasource, [('in_data', 'base_directory')]),
402+
(inputnode, datasource_dartel, [('in_data', 'base_directory')]),
403+
(datasource_dartel, rename_dartel, [('struct', 'in_file')]),
403404
(rename_dartel, dartel_workflow, [('out_file', 'inputspec.structural_files')]),
404405

405406
(infosource, datasource, [('subject_id', 'subject_id')]),
@@ -437,9 +438,9 @@ def subjectinfo(subject_id):
437438
the mean image would be copied to that directory.
438439
"""
439440

440-
datasink = pe.Node(interface=nio.DataSink(), name="datasink")
441+
datasink = pe.Node(nio.DataSink(), name="datasink")
441442
datasink.inputs.base_directory = os.path.abspath('spm_dartel_tutorial/l1output')
442-
report = pe.Node(interface=nio.DataSink(), name='report')
443+
report = pe.Node(nio.DataSink(), name='report')
443444
report.inputs.base_directory = os.path.abspath('spm_dartel_tutorial/report')
444445
report.inputs.parameterization = False
445446

@@ -501,10 +502,10 @@ def getstripdir(subject_id):
501502
"""
502503

503504
# setup a 1-sample t-test node
504-
onesamplettestdes = pe.Node(interface=spm.OneSampleTTestDesign(), name="onesampttestdes")
505-
l2estimate = pe.Node(interface=spm.EstimateModel(), name="level2estimate")
505+
onesamplettestdes = pe.Node(spm.OneSampleTTestDesign(), name="onesampttestdes")
506+
l2estimate = pe.Node(spm.EstimateModel(), name="level2estimate")
506507
l2estimate.inputs.estimation_method = {'Classical': 1}
507-
l2conestimate = pe.Node(interface=spm.EstimateContrast(), name="level2conestimate")
508+
l2conestimate = pe.Node(spm.EstimateContrast(), name="level2conestimate")
508509
cont1 = ('Group', 'T', ['mean'], [1])
509510
l2conestimate.inputs.contrasts = [cont1]
510511
l2conestimate.inputs.group_contrast = True

0 commit comments

Comments
 (0)