Skip to content

Commit 4089ead

Browse files
committed
FIX: PEP8 - E128 continuation line under-indented for visual indent
1 parent 452ca79 commit 4089ead

File tree

118 files changed

+1868
-1868
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

118 files changed

+1868
-1868
lines changed

build_docs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ def zip_docs(self):
153153
if not relative.startswith('.doctrees'):
154154
for f in files:
155155
zf.write(os.path.join(root, f),
156-
os.path.join(relative, 'html_docs', f))
156+
os.path.join(relative, 'html_docs', f))
157157
zf.close()
158158

159159

examples/dmri_camino_dti.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -265,7 +265,7 @@ def get_affine(volume):
265265
tractography.connect([(dtifit, fa,[("tensor_fitted","in_file")])])
266266
tractography.connect([(fa, analyzeheader_fa,[("fa","in_file")])])
267267
tractography.connect([(inputnode, analyzeheader_fa,[(('dwi', get_vox_dims), 'voxel_dims'),
268-
(('dwi', get_data_dims), 'data_dims')])])
268+
(('dwi', get_data_dims), 'data_dims')])])
269269
tractography.connect([(fa, fa2nii,[('fa','data_file')])])
270270
tractography.connect([(inputnode, fa2nii,[(('dwi', get_affine), 'affine')])])
271271
tractography.connect([(analyzeheader_fa, fa2nii,[('header', 'header_file')])])
@@ -274,7 +274,7 @@ def get_affine(volume):
274274
tractography.connect([(dtifit, trace,[("tensor_fitted","in_file")])])
275275
tractography.connect([(trace, analyzeheader_trace,[("trace","in_file")])])
276276
tractography.connect([(inputnode, analyzeheader_trace,[(('dwi', get_vox_dims), 'voxel_dims'),
277-
(('dwi', get_data_dims), 'data_dims')])])
277+
(('dwi', get_data_dims), 'data_dims')])])
278278
tractography.connect([(trace, trace2nii,[('trace','data_file')])])
279279
tractography.connect([(inputnode, trace2nii,[(('dwi', get_affine), 'affine')])])
280280
tractography.connect([(analyzeheader_trace, trace2nii,[('header', 'header_file')])])

examples/dmri_connectivity.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -459,7 +459,7 @@ def select_aparc_annot(list_of_files):
459459
mapping.connect([(dtifit, fa,[("tensor_fitted","in_file")])])
460460
mapping.connect([(fa, analyzeheader_fa,[("fa","in_file")])])
461461
mapping.connect([(inputnode, analyzeheader_fa,[(('dwi', get_vox_dims), 'voxel_dims'),
462-
(('dwi', get_data_dims), 'data_dims')])])
462+
(('dwi', get_data_dims), 'data_dims')])])
463463
mapping.connect([(fa, fa2nii,[('fa','data_file')])])
464464
mapping.connect([(inputnode, fa2nii,[(('dwi', get_affine), 'affine')])])
465465
mapping.connect([(analyzeheader_fa, fa2nii,[('header', 'header_file')])])
@@ -468,7 +468,7 @@ def select_aparc_annot(list_of_files):
468468
mapping.connect([(dtifit, trace,[("tensor_fitted","in_file")])])
469469
mapping.connect([(trace, analyzeheader_trace,[("trace","in_file")])])
470470
mapping.connect([(inputnode, analyzeheader_trace,[(('dwi', get_vox_dims), 'voxel_dims'),
471-
(('dwi', get_data_dims), 'data_dims')])])
471+
(('dwi', get_data_dims), 'data_dims')])])
472472
mapping.connect([(trace, trace2nii,[('trace','data_file')])])
473473
mapping.connect([(inputnode, trace2nii,[(('dwi', get_affine), 'affine')])])
474474
mapping.connect([(analyzeheader_trace, trace2nii,[('header', 'header_file')])])
@@ -485,7 +485,7 @@ def select_aparc_annot(list_of_files):
485485
(camino2trackvis, trk2camino,[['trackvis','in_file']])
486486
])
487487
mapping.connect([(inputnode, camino2trackvis,[(('dwi', get_vox_dims), 'voxel_dims'),
488-
(('dwi', get_data_dims), 'data_dims')])])
488+
(('dwi', get_data_dims), 'data_dims')])])
489489

490490
"""
491491
Here the CMTK connectivity mapping nodes are connected.

examples/dmri_group_connectivity_camino.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@
5959
import cmp
6060
from nipype.workflows.dmri.camino.group_connectivity import create_group_connectivity_pipeline
6161
from nipype.workflows.dmri.connectivity.group_connectivity import (create_merge_networks_by_group_workflow,
62-
create_merge_group_networks_workflow, create_average_networks_by_group_workflow)
62+
create_merge_group_networks_workflow, create_average_networks_by_group_workflow)
6363

6464
"""
6565
Set the proper directories

examples/fmri_ants_openfmri.py

Lines changed: 26 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -350,7 +350,7 @@ def create_fs_reg_workflow(name='registration'):
350350

351351
# Coregister the median to the surface
352352
bbregister = Node(freesurfer.BBRegister(registered_file=True),
353-
name='bbregister')
353+
name='bbregister')
354354
bbregister.inputs.init = 'fsl'
355355
bbregister.inputs.contrast_type = 't2'
356356
bbregister.inputs.out_fsl_file = True
@@ -577,8 +577,8 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id):
577577
TR = data['global']['const']['RepetitionTime'] / 1000.
578578
else:
579579
task_scan_key = os.path.join(base_dir, subject_id, 'BOLD',
580-
'task%03d_run%03d' % (task_id, run_ids[task_id - 1][0]),
581-
'scan_key.txt')
580+
'task%03d_run%03d' % (task_id, run_ids[task_id - 1][0]),
581+
'scan_key.txt')
582582
if os.path.exists(task_scan_key):
583583
TR = np.genfromtxt(task_scan_key)[1]
584584
else:
@@ -662,15 +662,15 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
662662
has_contrast = os.path.exists(contrast_file)
663663
if has_contrast:
664664
datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id',
665-
'task_id', 'model_id'],
666-
outfields=['anat', 'bold', 'behav',
667-
'contrasts']),
668-
name='datasource')
665+
'task_id', 'model_id'],
666+
outfields=['anat', 'bold', 'behav',
667+
'contrasts']),
668+
name='datasource')
669669
else:
670670
datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id',
671-
'task_id', 'model_id'],
672-
outfields=['anat', 'bold', 'behav']),
673-
name='datasource')
671+
'task_id', 'model_id'],
672+
outfields=['anat', 'bold', 'behav']),
673+
name='datasource')
674674
datasource.inputs.base_directory = data_dir
675675
datasource.inputs.template = '*'
676676

@@ -682,19 +682,19 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
682682
'contrasts': ('models/model%03d/'
683683
'task_contrasts.txt')}
684684
datasource.inputs.template_args = {'anat': [['subject_id']],
685-
'bold': [['subject_id', 'task_id']],
686-
'behav': [['subject_id', 'model_id',
687-
'task_id', 'run_id']],
688-
'contrasts': [['model_id']]}
685+
'bold': [['subject_id', 'task_id']],
686+
'behav': [['subject_id', 'model_id',
687+
'task_id', 'run_id']],
688+
'contrasts': [['model_id']]}
689689
else:
690690
datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz',
691691
'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
692692
'behav': ('%s/model/model%03d/onsets/task%03d_'
693693
'run%03d/cond*.txt')}
694694
datasource.inputs.template_args = {'anat': [['subject_id']],
695-
'bold': [['subject_id', 'task_id']],
696-
'behav': [['subject_id', 'model_id',
697-
'task_id', 'run_id']]}
695+
'bold': [['subject_id', 'task_id']],
696+
'behav': [['subject_id', 'model_id',
697+
'task_id', 'run_id']]}
698698

699699
datasource.inputs.sort_filelist = True
700700

@@ -777,9 +777,9 @@ def check_behav_list(behav, run_id, conds):
777777
num_conds).tolist()
778778

779779
reshape_behav = pe.Node(niu.Function(input_names=['behav', 'run_id', 'conds'],
780-
output_names=['behav'],
781-
function=check_behav_list),
782-
name='reshape_behav')
780+
output_names=['behav'],
781+
function=check_behav_list),
782+
name='reshape_behav')
783783

784784
wf.connect(subjinfo, 'TR', modelspec, 'time_repetition')
785785
wf.connect(datasource, 'behav', reshape_behav, 'behav')
@@ -891,9 +891,9 @@ def merge_files(copes, varcopes, zstats):
891891

892892
mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes',
893893
'zstats'],
894-
output_names=['out_files', 'splits'],
895-
function=merge_files),
896-
name='merge_files')
894+
output_names=['out_files', 'splits'],
895+
function=merge_files),
896+
name='merge_files')
897897
wf.connect([(fixed_fx.get_node('outputspec'), mergefunc,
898898
[('copes', 'copes'),
899899
('varcopes', 'varcopes'),
@@ -911,7 +911,7 @@ def split_files(in_files, splits):
911911
output_names=['copes', 'varcopes',
912912
'zstats'],
913913
function=split_files),
914-
name='split_files')
914+
name='split_files')
915915
wf.connect(mergefunc, 'splits', splitfunc, 'splits')
916916
wf.connect(registration, 'outputspec.transformed_files',
917917
splitfunc, 'in_files')
@@ -938,9 +938,9 @@ def get_subs(subject_id, conds, run_id, model_id, task_id):
938938
subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id))
939939
subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id))
940940
subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp',
941-
'mean'))
941+
'mean'))
942942
subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt',
943-
'affine'))
943+
'affine'))
944944

945945
for i in range(len(conds)):
946946
subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1)))

examples/fmri_freesurfer_smooth.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@
177177
(realign, ApplyVolTransform,[('mean_image', 'source_file')]),
178178
(ApplyVolTransform, Threshold,[('transformed_file','in_file')]),
179179
(realign, art,[('realignment_parameters','realignment_parameters'),
180-
('realigned_files','realigned_files')]),
180+
('realigned_files','realigned_files')]),
181181
(Threshold, art, [('binary_file', 'mask_file')]),
182182
(realign, volsmooth, [('realigned_files', 'in_files')]),
183183
(realign, surfsmooth, [('realigned_files', 'in_file')]),
@@ -225,10 +225,10 @@
225225
contrastestimate = pe.Node(interface = spm.EstimateContrast(), name="contrastestimate")
226226

227227
volanalysis.connect([(modelspec,level1design,[('session_info','session_info')]),
228-
(level1design,level1estimate,[('spm_mat_file','spm_mat_file')]),
229-
(level1estimate,contrastestimate,[('spm_mat_file','spm_mat_file'),
230-
('beta_images','beta_images'),
231-
('residual_image','residual_image')]),
228+
(level1design,level1estimate,[('spm_mat_file','spm_mat_file')]),
229+
(level1estimate,contrastestimate,[('spm_mat_file','spm_mat_file'),
230+
('beta_images','beta_images'),
231+
('residual_image','residual_image')]),
232232
])
233233

234234
"""
@@ -338,13 +338,13 @@
338338

339339
# attach volume and surface model specification and estimation components
340340
l1pipeline.connect([(preproc, volanalysis, [('realign.realignment_parameters',
341-
'modelspec.realignment_parameters'),
342-
('volsmooth.smoothed_files',
343-
'modelspec.functional_runs'),
344-
('art.outlier_files',
345-
'modelspec.outlier_files'),
346-
('threshold.binary_file',
347-
'level1design.mask_image')]),
341+
'modelspec.realignment_parameters'),
342+
('volsmooth.smoothed_files',
343+
'modelspec.functional_runs'),
344+
('art.outlier_files',
345+
'modelspec.outlier_files'),
346+
('threshold.binary_file',
347+
'level1design.mask_image')]),
348348
(preproc, surfanalysis, [('realign.realignment_parameters',
349349
'modelspec.realignment_parameters'),
350350
('surfsmooth.smoothed_file',
@@ -520,8 +520,8 @@ def subjectinfo(subject_id):
520520
level1.connect([(infosource, datasource, [('subject_id', 'subject_id')]),
521521
(datasource,l1pipeline,[('func','inputnode.func')]),
522522
(infosource,l1pipeline,[('subject_id','inputnode.subject_id'),
523-
(('subject_id', subjectinfo),
524-
'inputnode.session_info')]),
523+
(('subject_id', subjectinfo),
524+
'inputnode.session_info')]),
525525
])
526526

527527

examples/fmri_fsl.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -73,8 +73,8 @@
7373
"""
7474

7575
img2float = pe.MapNode(interface=fsl.ImageMaths(out_data_type='float',
76-
op_string = '',
77-
suffix='_dtype'),
76+
op_string = '',
77+
suffix='_dtype'),
7878
iterfield=['in_file'],
7979
name='img2float')
8080
preproc.connect(inputnode, 'func', img2float, 'in_file')
@@ -129,8 +129,8 @@ def getmiddlevolume(func):
129129
"""
130130

131131
plot_motion = pe.MapNode(interface=fsl.PlotMotionParams(in_source='fsl'),
132-
name='plot_motion',
133-
iterfield=['in_file'])
132+
name='plot_motion',
133+
iterfield=['in_file'])
134134
plot_motion.iterables = ('plot_type', ['rotations', 'translations'])
135135
preproc.connect(motion_correct, 'par_file', plot_motion, 'in_file')
136136

@@ -430,8 +430,8 @@ def getinormscale(medianvals):
430430
name="copemerge")
431431

432432
varcopemerge = pe.MapNode(interface=fsl.Merge(dimension='t'),
433-
iterfield=['in_files'],
434-
name="varcopemerge")
433+
iterfield=['in_files'],
434+
name="varcopemerge")
435435

436436
"""
437437
Use :class:`nipype.interfaces.fsl.L2Model` to generate subject and condition

examples/fmri_nipy_glm.py

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -242,28 +242,28 @@ def subjectinfo(subject_id):
242242
l1pipeline.base_dir = os.path.abspath('nipy_tutorial/workingdir')
243243

244244
l1pipeline.connect([(infosource, datasource, [('subject_id', 'subject_id')]),
245-
(datasource,realign,[('func','in_files')]),
246-
(realign, compute_mask, [('mean_image','mean_volume')]),
247-
(realign, coregister,[('mean_image', 'source'),
248-
('realigned_files','apply_to_files')]),
249-
(datasource, coregister,[('struct', 'target')]),
250-
(coregister, smooth, [('coregistered_files', 'in_files')]),
251-
(realign, modelspec,[('realignment_parameters','realignment_parameters')]),
252-
(smooth, modelspec,[('smoothed_files','functional_runs')]),
253-
(realign, art,[('realignment_parameters','realignment_parameters')]),
254-
(coregister, art,[('coregistered_files','realigned_files')]),
255-
(compute_mask,art,[('brain_mask','mask_file')]),
256-
(art, modelspec,[('outlier_files','outlier_files')]),
257-
(infosource, modelspec, [(("subject_id", subjectinfo), "subject_info")]),
258-
(modelspec, model_estimate,[('session_info','session_info')]),
259-
(compute_mask, model_estimate, [('brain_mask','mask')]),
260-
(model_estimate, contrast_estimate, [("beta","beta"),
261-
("nvbeta","nvbeta"),
262-
("s2","s2"),
263-
("dof", "dof"),
264-
("axis", "axis"),
265-
("constants", "constants"),
266-
("reg_names", "reg_names")])
245+
(datasource,realign,[('func','in_files')]),
246+
(realign, compute_mask, [('mean_image','mean_volume')]),
247+
(realign, coregister,[('mean_image', 'source'),
248+
('realigned_files','apply_to_files')]),
249+
(datasource, coregister,[('struct', 'target')]),
250+
(coregister, smooth, [('coregistered_files', 'in_files')]),
251+
(realign, modelspec,[('realignment_parameters','realignment_parameters')]),
252+
(smooth, modelspec,[('smoothed_files','functional_runs')]),
253+
(realign, art,[('realignment_parameters','realignment_parameters')]),
254+
(coregister, art,[('coregistered_files','realigned_files')]),
255+
(compute_mask,art,[('brain_mask','mask_file')]),
256+
(art, modelspec,[('outlier_files','outlier_files')]),
257+
(infosource, modelspec, [(("subject_id", subjectinfo), "subject_info")]),
258+
(modelspec, model_estimate,[('session_info','session_info')]),
259+
(compute_mask, model_estimate, [('brain_mask','mask')]),
260+
(model_estimate, contrast_estimate, [("beta","beta"),
261+
("nvbeta","nvbeta"),
262+
("s2","s2"),
263+
("dof", "dof"),
264+
("axis", "axis"),
265+
("constants", "constants"),
266+
("reg_names", "reg_names")])
267267
])
268268

269269
if __name__ == '__main__':

0 commit comments

Comments
 (0)