Skip to content

Commit a663c5d

Browse files
committed
FIX: PEP8 - E231 missing whitespace after ','
1 parent 3951685 commit a663c5d

File tree

159 files changed

+1908
-1908
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

159 files changed

+1908
-1908
lines changed

doc/conf.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
import sys, os
1616

1717
nipypepath = os.path.abspath('..')
18-
sys.path.insert(1,nipypepath)
18+
sys.path.insert(1, nipypepath)
1919

2020
import nipype
2121

@@ -168,7 +168,7 @@
168168
#html_use_smartypants = True
169169

170170
# Custom sidebar templates, maps document names to template names.
171-
html_sidebars = {'**': ['gse.html','localtoc.html', 'sidebar_versions.html', 'indexsidebar.html'],
171+
html_sidebars = {'**': ['gse.html', 'localtoc.html', 'sidebar_versions.html', 'indexsidebar.html'],
172172
'searchresults': ['sidebar_versions.html', 'indexsidebar.html'],
173173
'version': []}
174174

doc/sphinxext/numpy_ext/docscrape.py

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def __init__(self, data):
2828
String with lines separated by '\n'.
2929
3030
"""
31-
if isinstance(data,list):
31+
if isinstance(data, list):
3232
self._str = data
3333
else:
3434
self._str = data.split('\n') # store string as list of lines
@@ -80,7 +80,7 @@ def is_unindented(line):
8080
return (line.strip() and (len(line.lstrip()) == len(line)))
8181
return self.read_to_condition(is_unindented)
8282

83-
def peek(self,n=0):
83+
def peek(self, n=0):
8484
if self._l + n < len(self._str):
8585
return self[self._l + n]
8686
else:
@@ -116,10 +116,10 @@ def __init__(self, docstring, config={}):
116116

117117
self._parse()
118118

119-
def __getitem__(self,key):
119+
def __getitem__(self, key):
120120
return self._parsed_data[key]
121121

122-
def __setitem__(self,key,val):
122+
def __setitem__(self, key, val):
123123
if key not in self._parsed_data:
124124
warn("Unknown section %s" % key)
125125
else:
@@ -139,13 +139,13 @@ def _is_at_section(self):
139139
l2 = self._doc.peek(1).strip() # ---------- or ==========
140140
return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1))
141141

142-
def _strip(self,doc):
142+
def _strip(self, doc):
143143
i = 0
144144
j = 0
145-
for i,line in enumerate(doc):
145+
for i, line in enumerate(doc):
146146
if line.strip(): break
147147

148-
for j,line in enumerate(doc[::-1]):
148+
for j, line in enumerate(doc[::-1]):
149149
if line.strip(): break
150150

151151
return doc[i:len(doc)-j]
@@ -173,7 +173,7 @@ def _read_sections(self):
173173
else:
174174
yield name, self._strip(data[2:])
175175

176-
def _parse_param_list(self,content):
176+
def _parse_param_list(self, content):
177177
r = Reader(content)
178178
params = []
179179
while not r.eof():
@@ -186,7 +186,7 @@ def _parse_param_list(self,content):
186186
desc = r.read_to_next_unindented_line()
187187
desc = dedent_lines(desc)
188188

189-
params.append((arg_name,arg_type,desc))
189+
params.append((arg_name, arg_type, desc))
190190

191191
return params
192192

@@ -288,7 +288,7 @@ def _parse(self):
288288
self._doc.reset()
289289
self._parse_summary()
290290

291-
for (section,content) in self._read_sections():
291+
for (section, content) in self._read_sections():
292292
if not section.startswith('..'):
293293
section = ' '.join([s.capitalize() for s in section.split(' ')])
294294
if section in ('Parameters', 'Returns', 'Raises', 'Warns',
@@ -314,7 +314,7 @@ def _str_indent(self, doc, indent=4):
314314

315315
def _str_signature(self):
316316
if self['Signature']:
317-
return [self['Signature'].replace('*','\*')] + ['']
317+
return [self['Signature'].replace('*', '\*')] + ['']
318318
else:
319319
return ['']
320320

@@ -334,7 +334,7 @@ def _str_param_list(self, name):
334334
out = []
335335
if self[name]:
336336
out += self._str_header(name)
337-
for param,param_type,desc in self[name]:
337+
for param, param_type, desc in self[name]:
338338
out += ['%s : %s' % (param, param_type)]
339339
out += self._str_indent(desc)
340340
out += ['']
@@ -376,7 +376,7 @@ def _str_see_also(self, func_role):
376376
def _str_index(self):
377377
idx = self['index']
378378
out = []
379-
out += ['.. index:: %s' % idx.get('default','')]
379+
out += ['.. index:: %s' % idx.get('default', '')]
380380
for section, references in list(idx.items()):
381381
if section == 'default':
382382
continue
@@ -393,15 +393,15 @@ def __str__(self, func_role=''):
393393
out += self._str_param_list(param_list)
394394
out += self._str_section('Warnings')
395395
out += self._str_see_also(func_role)
396-
for s in ('Notes','References','Examples'):
396+
for s in ('Notes', 'References', 'Examples'):
397397
out += self._str_section(s)
398398
for param_list in ('Attributes', 'Methods'):
399399
out += self._str_param_list(param_list)
400400
out += self._str_index()
401401
return '\n'.join(out)
402402

403403

404-
def indent(str,indent=4):
404+
def indent(str, indent=4):
405405
indent_str = ' '*indent
406406
if str is None:
407407
return indent_str
@@ -433,7 +433,7 @@ def __init__(self, func, role='func', doc=None, config={}):
433433
# try to read signature
434434
argspec = inspect.getargspec(func)
435435
argspec = inspect.formatargspec(*argspec)
436-
argspec = argspec.replace('*','\*')
436+
argspec = argspec.replace('*', '\*')
437437
signature = '%s%s' % (func_name, argspec)
438438
except TypeError as e:
439439
signature = '%s()' % func_name
@@ -459,7 +459,7 @@ def __str__(self):
459459
if self._role:
460460
if self._role not in roles:
461461
print("Warning: invalid role %s" % self._role)
462-
out += '.. %s:: %s\n \n\n' % (roles.get(self._role,''),
462+
out += '.. %s:: %s\n \n\n' % (roles.get(self._role, ''),
463463
func_name)
464464

465465
out += super(FunctionDoc, self).__str__(func_role=self._role)
@@ -499,7 +499,7 @@ def __init__(self, cls, doc=None, modulename='', func_doc=FunctionDoc,
499499
def methods(self):
500500
if self._cls is None:
501501
return []
502-
return [name for name,func in inspect.getmembers(self._cls)
502+
return [name for name, func in inspect.getmembers(self._cls)
503503
if ((not name.startswith('_')
504504
or name in self.extra_public_methods)
505505
and callable(func))]
@@ -508,5 +508,5 @@ def methods(self):
508508
def properties(self):
509509
if self._cls is None:
510510
return []
511-
return [name for name,func in inspect.getmembers(self._cls)
511+
return [name for name, func in inspect.getmembers(self._cls)
512512
if not name.startswith('_') and func is None]

doc/sphinxext/numpy_ext/docscrape_sphinx.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -42,11 +42,11 @@ def _str_param_list(self, name):
4242
if self[name]:
4343
out += self._str_field_list(name)
4444
out += ['']
45-
for param,param_type,desc in self[name]:
45+
for param, param_type, desc in self[name]:
4646
out += self._str_indent(['**%s** : %s' % (param.strip(),
4747
param_type)])
4848
out += ['']
49-
out += self._str_indent(desc,8)
49+
out += self._str_indent(desc, 8)
5050
out += ['']
5151
return out
5252

@@ -130,7 +130,7 @@ def _str_index(self):
130130
if len(idx) == 0:
131131
return out
132132

133-
out += ['.. index:: %s' % idx.get('default','')]
133+
out += ['.. index:: %s' % idx.get('default', '')]
134134
for section, references in list(idx.items()):
135135
if section == 'default':
136136
continue
@@ -151,9 +151,9 @@ def _str_references(self):
151151
# Latex collects all references to a separate bibliography,
152152
# so we need to insert links to it
153153
if sphinx.__version__ >= "0.6":
154-
out += ['.. only:: latex','']
154+
out += ['.. only:: latex', '']
155155
else:
156-
out += ['.. latexonly::','']
156+
out += ['.. latexonly::', '']
157157
items = []
158158
for line in self['References']:
159159
m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I)
@@ -192,7 +192,7 @@ def __str__(self, indent=0, func_role="obj"):
192192
out += self._str_examples()
193193
for param_list in ('Attributes', 'Methods'):
194194
out += self._str_member_list(param_list)
195-
out = self._str_indent(out,indent)
195+
out = self._str_indent(out, indent)
196196
return '\n'.join(out)
197197

198198
class SphinxFunctionDoc(SphinxDocString, FunctionDoc):

examples/dmri_camino_dti.py

Lines changed: 38 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -65,8 +65,8 @@ def get_affine(volume):
6565
"""
6666

6767
info = dict(dwi=[['subject_id', 'data']],
68-
bvecs=[['subject_id','bvecs']],
69-
bvals=[['subject_id','bvals']])
68+
bvecs=[['subject_id', 'bvecs']],
69+
bvals=[['subject_id', 'bvals']])
7070

7171
infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']),
7272
name="infosource")
@@ -125,7 +125,7 @@ def get_affine(volume):
125125
Second, diffusion tensors are fit to the voxel-order data.
126126
"""
127127

128-
dtifit = pe.Node(interface=camino.DTIFit(),name='dtifit')
128+
dtifit = pe.Node(interface=camino.DTIFit(), name='dtifit')
129129

130130
"""
131131
Next, a lookup table is generated from the schemefile and the
@@ -196,15 +196,15 @@ def get_affine(volume):
196196
fractional anisotropy and diffusivity trace maps and their associated headers.
197197
"""
198198

199-
fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(),name='fa')
200-
trace = pe.Node(interface=camino.ComputeTensorTrace(),name='trace')
199+
fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(), name='fa')
200+
trace = pe.Node(interface=camino.ComputeTensorTrace(), name='trace')
201201
dteig = pe.Node(interface=camino.ComputeEigensystem(), name='dteig')
202202

203203
analyzeheader_fa = pe.Node(interface= camino.AnalyzeHeader(), name = "analyzeheader_fa")
204204
analyzeheader_fa.inputs.datatype = "double"
205205
analyzeheader_trace = analyzeheader_fa.clone('analyzeheader_trace')
206206

207-
fa2nii = pe.Node(interface=misc.CreateNifti(),name='fa2nii')
207+
fa2nii = pe.Node(interface=misc.CreateNifti(), name='fa2nii')
208208
trace2nii = fa2nii.clone("trace2nii")
209209

210210
"""
@@ -213,7 +213,7 @@ def get_affine(volume):
213213

214214
tractography = pe.Workflow(name='tractography')
215215

216-
tractography.connect([(inputnode, bet,[("dwi","in_file")])])
216+
tractography.connect([(inputnode, bet, [("dwi", "in_file")])])
217217

218218
"""
219219
File format conversion
@@ -228,29 +228,29 @@ def get_affine(volume):
228228
Tensor fitting
229229
"""
230230

231-
tractography.connect([(image2voxel, dtifit,[['voxel_order','in_file']]),
232-
(fsl2scheme, dtifit,[['scheme','scheme_file']])
231+
tractography.connect([(image2voxel, dtifit, [['voxel_order', 'in_file']]),
232+
(fsl2scheme, dtifit, [['scheme', 'scheme_file']])
233233
])
234234

235235
"""
236236
Workflow for applying DT streamline tractogpahy
237237
"""
238238

239-
tractography.connect([(bet, trackdt,[("mask_file","seed_file")])])
240-
tractography.connect([(dtifit, trackdt,[("tensor_fitted","in_file")])])
239+
tractography.connect([(bet, trackdt, [("mask_file", "seed_file")])])
240+
tractography.connect([(dtifit, trackdt, [("tensor_fitted", "in_file")])])
241241

242242
"""
243243
Workflow for applying PICo
244244
"""
245245

246-
tractography.connect([(bet, trackpico,[("mask_file","seed_file")])])
247-
tractography.connect([(fsl2scheme, dtlutgen,[("scheme","scheme_file")])])
248-
tractography.connect([(dtlutgen, picopdfs,[("dtLUT","luts")])])
249-
tractography.connect([(dtifit, picopdfs,[("tensor_fitted","in_file")])])
250-
tractography.connect([(picopdfs, trackpico,[("pdfs","in_file")])])
246+
tractography.connect([(bet, trackpico, [("mask_file", "seed_file")])])
247+
tractography.connect([(fsl2scheme, dtlutgen, [("scheme", "scheme_file")])])
248+
tractography.connect([(dtlutgen, picopdfs, [("dtLUT", "luts")])])
249+
tractography.connect([(dtifit, picopdfs, [("tensor_fitted", "in_file")])])
250+
tractography.connect([(picopdfs, trackpico, [("pdfs", "in_file")])])
251251

252252
# ProcStreamlines might throw memory errors - comment this line out in such case
253-
tractography.connect([(trackdt, procstreamlines,[("tracked","in_file")])])
253+
tractography.connect([(trackdt, procstreamlines, [("tracked", "in_file")])])
254254

255255

256256
"""
@@ -262,31 +262,31 @@ def get_affine(volume):
262262
will be correct and readable.
263263
"""
264264

265-
tractography.connect([(dtifit, fa,[("tensor_fitted","in_file")])])
266-
tractography.connect([(fa, analyzeheader_fa,[("fa","in_file")])])
267-
tractography.connect([(inputnode, analyzeheader_fa,[(('dwi', get_vox_dims), 'voxel_dims'),
265+
tractography.connect([(dtifit, fa, [("tensor_fitted", "in_file")])])
266+
tractography.connect([(fa, analyzeheader_fa, [("fa", "in_file")])])
267+
tractography.connect([(inputnode, analyzeheader_fa, [(('dwi', get_vox_dims), 'voxel_dims'),
268268
(('dwi', get_data_dims), 'data_dims')])])
269-
tractography.connect([(fa, fa2nii,[('fa','data_file')])])
270-
tractography.connect([(inputnode, fa2nii,[(('dwi', get_affine), 'affine')])])
271-
tractography.connect([(analyzeheader_fa, fa2nii,[('header', 'header_file')])])
269+
tractography.connect([(fa, fa2nii, [('fa', 'data_file')])])
270+
tractography.connect([(inputnode, fa2nii, [(('dwi', get_affine), 'affine')])])
271+
tractography.connect([(analyzeheader_fa, fa2nii, [('header', 'header_file')])])
272272

273273

274-
tractography.connect([(dtifit, trace,[("tensor_fitted","in_file")])])
275-
tractography.connect([(trace, analyzeheader_trace,[("trace","in_file")])])
276-
tractography.connect([(inputnode, analyzeheader_trace,[(('dwi', get_vox_dims), 'voxel_dims'),
274+
tractography.connect([(dtifit, trace, [("tensor_fitted", "in_file")])])
275+
tractography.connect([(trace, analyzeheader_trace, [("trace", "in_file")])])
276+
tractography.connect([(inputnode, analyzeheader_trace, [(('dwi', get_vox_dims), 'voxel_dims'),
277277
(('dwi', get_data_dims), 'data_dims')])])
278-
tractography.connect([(trace, trace2nii,[('trace','data_file')])])
279-
tractography.connect([(inputnode, trace2nii,[(('dwi', get_affine), 'affine')])])
280-
tractography.connect([(analyzeheader_trace, trace2nii,[('header', 'header_file')])])
278+
tractography.connect([(trace, trace2nii, [('trace', 'data_file')])])
279+
tractography.connect([(inputnode, trace2nii, [(('dwi', get_affine), 'affine')])])
280+
tractography.connect([(analyzeheader_trace, trace2nii, [('header', 'header_file')])])
281281

282-
tractography.connect([(dtifit, dteig,[("tensor_fitted","in_file")])])
282+
tractography.connect([(dtifit, dteig, [("tensor_fitted", "in_file")])])
283283

284-
tractography.connect([(trackpico, cam2trk_pico, [('tracked','in_file')])])
285-
tractography.connect([(trackdt, cam2trk_dt, [('tracked','in_file')])])
286-
tractography.connect([(inputnode, cam2trk_pico,[(('dwi', get_vox_dims), 'voxel_dims'),
284+
tractography.connect([(trackpico, cam2trk_pico, [('tracked', 'in_file')])])
285+
tractography.connect([(trackdt, cam2trk_dt, [('tracked', 'in_file')])])
286+
tractography.connect([(inputnode, cam2trk_pico, [(('dwi', get_vox_dims), 'voxel_dims'),
287287
(('dwi', get_data_dims), 'data_dims')])])
288288

289-
tractography.connect([(inputnode, cam2trk_dt,[(('dwi', get_vox_dims), 'voxel_dims'),
289+
tractography.connect([(inputnode, cam2trk_dt, [(('dwi', get_vox_dims), 'voxel_dims'),
290290
(('dwi', get_data_dims), 'data_dims')])])
291291

292292

@@ -298,10 +298,10 @@ def get_affine(volume):
298298

299299
workflow = pe.Workflow(name="workflow")
300300
workflow.base_dir = os.path.abspath('camino_dti_tutorial')
301-
workflow.connect([(infosource,datasource,[('subject_id', 'subject_id')]),
302-
(datasource,tractography,[('dwi','inputnode.dwi'),
303-
('bvals','inputnode.bvals'),
304-
('bvecs','inputnode.bvecs')
301+
workflow.connect([(infosource, datasource, [('subject_id', 'subject_id')]),
302+
(datasource, tractography, [('dwi', 'inputnode.dwi'),
303+
('bvals', 'inputnode.bvals'),
304+
('bvecs', 'inputnode.bvecs')
305305
])
306306
])
307307
"""

0 commit comments

Comments
 (0)