Skip to content

Commit 5e780bf

Browse files
committed
Merge branch 'master' into enh/AddCSVRow
Conflicts: CHANGES nipype/algorithms/misc.py Merge after updating master to upstream
2 parents cda31e9 + 8a5a190 commit 5e780bf

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

57 files changed

+2433
-546
lines changed

.travis.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ before_install:
1212
- sudo rm -rf /dev/shm
1313
- sudo ln -s /run/shm /dev/shm
1414
- bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh)
15-
- travis_retry sudo apt-get install -qq --no-install-recommends fsl afni
15+
- travis_retry sudo apt-get install -qq --no-install-recommends fsl afni elastix
1616
- travis_retry sudo apt-get install -qq fsl-atlases
1717
- source /etc/fsl/fsl.sh
1818

CHANGES

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,24 @@ Next Release
22
============
33

44
* ENH: New miscelaneous interface: AddCSVRow
5-
* API: Interfaces to external packages are no longer available in the top-level ``nipype`` namespace, and must be imported directly (e.g. ``from nipype.interfaces import fsl``).
5+
* API: Interfaces to external packages are no longer available in the top-level
6+
``nipype`` namespace, and must be imported directly (e.g.
7+
``from nipype.interfaces import fsl``).
8+
* ENH: New FSL interface: ProbTrackX2
9+
* ENH: New misc algorithm: NormalizeProbabilityMapSet
10+
* ENH: Support for elastix via a set of new interfaces: Registration, ApplyWarp,
11+
AnalyzeWarp, PointsWarp, and EditTransform
612
* ENH: New ANTs interface: ApplyTransformsToPoints
13+
* ENH: New metrics group in algorithms. Now Distance, Overlap, and FuzzyOverlap
14+
are found in nipype.algorithms.metrics instead of misc
15+
* ENH: New interface in algorithms.metrics: ErrorMap (a voxel-wise diff map).
716
* ENH: New FreeSurfer workflow: create_skullstripped_recon_flow()
17+
* ENH: New data grabbing interface that works over SSH connections, SSHDataGrabber
18+
* ENH: New color mode for write_graph
19+
* ENH: You can now force MapNodes to be run serially
20+
* ENH: New ANTs interface: LaplacianThickness
821
* FIX: MRTrix tracking algorithms were ignoring mask parameters.
22+
* FIX: FNIRT registration pathway and associated OpenFMRI example script
923

1024
Release 0.9.2 (January 31, 2014)
1125
============
@@ -26,7 +40,8 @@ Release 0.9.0 (December 20, 2013)
2640
* ENH: new tools for defining workflows: JoinNode, synchronize and itersource
2741
* ENH: W3C PROV support with optional RDF export built into Nipype
2842
* ENH: Added support for Simple Linux Utility Resource Management (SLURM)
29-
* ENH: AFNI interfaces refactor, prefix, suffix are replaced by "flexible_%s_templates"
43+
* ENH: AFNI interfaces refactor, prefix, suffix are replaced by
44+
"flexible_%s_templates"
3045
* ENH: New SPM interfaces:
3146
- spm.ResliceToReference,
3247
- spm.DicomImport

Vagrantfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ $HOME/miniconda/bin/conda install --yes pip numpy scipy nose traits networkx
3131
$HOME/miniconda/bin/conda install --yes dateutil ipython-notebook matplotlib
3232
$HOME/miniconda/bin/pip install nibabel --use-mirrors
3333
$HOME/miniconda/bin/pip install https://github.com/RDFLib/rdflib/archive/master.zip
34-
$HOME/miniconda/bin/pip install https://github.com/satra/prov/archive/enh/rdf.zip
34+
$HOME/miniconda/bin/pip install https://github.com/trungdong/prov/archive/rdf.zip
3535
$HOME/miniconda/bin/pip install https://github.com/nipy/nipype/archive/master.zip
3636
SCRIPT
3737

doc/users/install.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ Download
1010
--------
1111

1212
Release 0.9.2: [`zip <https://github.com/nipy/nipype/archive/0.9.2.zip>`__ `tar.gz
13-
<https://github.com/nipy/nipype/archive/0.9.1.tar.gz>`__]
13+
<https://github.com/nipy/nipype/archive/0.9.2.tar.gz>`__]
1414

1515
Development: [`zip <http://github.com/nipy/nipype/zipball/master>`__ `tar.gz
1616
<http://github.com/nipy/nipype/tarball/master>`__]

examples/fmri_openfmri.py

Lines changed: 61 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id):
8787

8888

8989
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
90-
task_id=None, output_dir=None):
90+
task_id=None, output_dir=None, subj_prefix='*'):
9191
"""Analyzes an open fmri dataset
9292
9393
Parameters
@@ -121,22 +121,22 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
121121
Set up openfmri data specific components
122122
"""
123123

124-
subjects = [path.split(os.path.sep)[-1] for path in
125-
glob(os.path.join(data_dir, 'sub*'))]
124+
subjects = sorted([path.split(os.path.sep)[-1] for path in
125+
glob(os.path.join(data_dir, subj_prefix))])
126126

127127
infosource = pe.Node(niu.IdentityInterface(fields=['subject_id',
128128
'model_id',
129129
'task_id']),
130130
name='infosource')
131-
if subject is None:
132-
infosource.iterables = [('subject_id', subjects[:2]),
131+
if len(subject) == 0:
132+
infosource.iterables = [('subject_id', subjects),
133133
('model_id', [model_id]),
134-
('task_id', [task_id])]
134+
('task_id', task_id)]
135135
else:
136136
infosource.iterables = [('subject_id',
137-
[subjects[subjects.index(subject)]]),
137+
[subjects[subjects.index(subj)] for subj in subject]),
138138
('model_id', [model_id]),
139-
('task_id', [task_id])]
139+
('task_id', task_id)]
140140

141141
subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir',
142142
'task_id', 'model_id'],
@@ -156,7 +156,7 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
156156
name='datasource')
157157
datasource.inputs.base_directory = data_dir
158158
datasource.inputs.template = '*'
159-
datasource.inputs.field_template = {'anat': '%s/anatomy/highres001.nii.gz',
159+
datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz',
160160
'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
161161
'behav': ('%s/model/model%03d/onsets/task%03d_'
162162
'run%03d/cond*.txt'),
@@ -200,13 +200,19 @@ def get_highpass(TR, hpcutoff):
200200
def get_contrasts(contrast_file, task_id, conds):
201201
import numpy as np
202202
contrast_def = np.genfromtxt(contrast_file, dtype=object)
203+
if len(contrast_def.shape) == 1:
204+
contrast_def = contrast_def[None, :]
203205
contrasts = []
204206
for row in contrast_def:
205207
if row[0] != 'task%03d' % task_id:
206208
continue
207-
con = [row[1], 'T', ['cond%03d' % i for i in range(len(conds))],
209+
con = [row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))],
208210
row[2:].astype(float).tolist()]
209211
contrasts.append(con)
212+
# add auto contrasts for each column
213+
for i, cond in enumerate(conds):
214+
con = [cond, 'T', ['cond%03d' % (i + 1)], [1]]
215+
contrasts.append(con)
210216
return contrasts
211217

212218
contrastgen = pe.Node(niu.Function(input_names=['contrast_file',
@@ -229,8 +235,19 @@ def get_contrasts(contrast_file, task_id, conds):
229235
name="modelspec")
230236
modelspec.inputs.input_units = 'secs'
231237

238+
def check_behav_list(behav):
239+
out_behav = []
240+
if isinstance(behav, basestring):
241+
behav = [behav]
242+
for val in behav:
243+
if not isinstance(val, list):
244+
out_behav.append([val])
245+
else:
246+
out_behav.append(val)
247+
return out_behav
248+
232249
wf.connect(subjinfo, 'TR', modelspec, 'time_repetition')
233-
wf.connect(datasource, 'behav', modelspec, 'event_files')
250+
wf.connect(datasource, ('behav', check_behav_list), modelspec, 'event_files')
234251
wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval')
235252
wf.connect(subjinfo, 'conds', contrastgen, 'conds')
236253
wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file')
@@ -288,33 +305,41 @@ def num_copes(files):
288305
wf.connect(preproc, 'outputspec.mean', registration, 'inputspec.mean_image')
289306
wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image')
290307
registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
308+
registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
309+
registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm'
291310

292-
def merge_files(copes, varcopes):
311+
def merge_files(copes, varcopes, zstats):
293312
out_files = []
294313
splits = []
295314
out_files.extend(copes)
296315
splits.append(len(copes))
297316
out_files.extend(varcopes)
298317
splits.append(len(varcopes))
318+
out_files.extend(zstats)
319+
splits.append(len(zstats))
299320
return out_files, splits
300321

301-
mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes'],
322+
mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes',
323+
'zstats'],
302324
output_names=['out_files', 'splits'],
303325
function=merge_files),
304326
name='merge_files')
305327
wf.connect([(fixed_fx.get_node('outputspec'), mergefunc,
306328
[('copes', 'copes'),
307329
('varcopes', 'varcopes'),
330+
('zstats', 'zstats'),
308331
])])
309332
wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files')
310333

311334
def split_files(in_files, splits):
312-
copes = in_files[:splits[1]]
313-
varcopes = in_files[splits[1]:]
314-
return copes, varcopes
335+
copes = in_files[:splits[0]]
336+
varcopes = in_files[splits[0]:(splits[0] + splits[1])]
337+
zstats = in_files[(splits[0] + splits[1]):]
338+
return copes, varcopes, zstats
315339

316340
splitfunc = pe.Node(niu.Function(input_names=['in_files', 'splits'],
317-
output_names=['copes', 'varcopes'],
341+
output_names=['copes', 'varcopes',
342+
'zstats'],
318343
function=split_files),
319344
name='split_files')
320345
wf.connect(mergefunc, 'splits', splitfunc, 'splits')
@@ -330,18 +355,23 @@ def get_subs(subject_id, conds, model_id, task_id):
330355
subs = [('_subject_id_%s_' % subject_id, '')]
331356
subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id))
332357
subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id))
333-
subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp_warp',
358+
subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp',
334359
'mean'))
360+
subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt',
361+
'affine'))
362+
335363
for i in range(len(conds)):
336364
subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1)))
337365
subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1)))
338366
subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1)))
339367
subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1)))
340368
subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1)))
341-
subs.append(('_warpall%d/cope1_warp_warp.' % i,
369+
subs.append(('_warpall%d/cope1_warp.' % i,
342370
'cope%02d.' % (i + 1)))
343-
subs.append(('_warpall%d/varcope1_warp_warp.' % (len(conds) + i),
371+
subs.append(('_warpall%d/varcope1_warp.' % (len(conds) + i),
344372
'varcope%02d.' % (i + 1)))
373+
subs.append(('_warpall%d/zstat1_warp.' % (2 * len(conds) + i),
374+
'zstat%02d.' % (i + 1)))
345375
return subs
346376

347377
subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds',
@@ -368,8 +398,11 @@ def get_subs(subject_id, conds, model_id, task_id):
368398
wf.connect([(splitfunc, datasink,
369399
[('copes', 'copes.mni'),
370400
('varcopes', 'varcopes.mni'),
401+
('zstats', 'zstats.mni'),
371402
])])
372403
wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni')
404+
wf.connect(registration, 'outputspec.func2anat_transform', datasink, 'xfm.mean2anat')
405+
wf.connect(registration, 'outputspec.anat2target_transform', datasink, 'xfm.anat2target')
373406

374407
"""
375408
Set processing parameters
@@ -392,12 +425,15 @@ def get_subs(subject_id, conds, model_id, task_id):
392425
parser = argparse.ArgumentParser(prog='fmri_openfmri.py',
393426
description=__doc__)
394427
parser.add_argument('-d', '--datasetdir', required=True)
395-
parser.add_argument('-s', '--subject', default=None,
428+
parser.add_argument('-s', '--subject', default=[],
429+
nargs='+', type=str,
396430
help="Subject name (e.g. 'sub001')")
397431
parser.add_argument('-m', '--model', default=1,
398432
help="Model index" + defstr)
399-
parser.add_argument('-t', '--task', default=1,
400-
help="Task index" + defstr)
433+
parser.add_argument('-x', '--subjectprefix', default='sub*',
434+
help="Subject prefix" + defstr)
435+
parser.add_argument('-t', '--task', default=1, #nargs='+',
436+
type=int, help="Task index" + defstr)
401437
parser.add_argument("-o", "--output_dir", dest="outdir",
402438
help="Output directory base")
403439
parser.add_argument("-w", "--work_dir", dest="work_dir",
@@ -421,7 +457,8 @@ def get_subs(subject_id, conds, model_id, task_id):
421457
wf = analyze_openfmri_dataset(data_dir=os.path.abspath(args.datasetdir),
422458
subject=args.subject,
423459
model_id=int(args.model),
424-
task_id=int(args.task),
460+
task_id=[int(args.task)],
461+
subj_prefix=args.subjectprefix,
425462
output_dir=outdir)
426463
wf.base_dir = work_dir
427464
if args.plugin_args:

0 commit comments

Comments
 (0)