Skip to content

Commit ca8d229

Browse files
committed
PEP8
1 parent 546665c commit ca8d229

File tree

4 files changed

+55
-48
lines changed

4 files changed

+55
-48
lines changed

nipype/pipeline/plugins/base.py

Lines changed: 17 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -218,7 +218,7 @@ def run(self, graph, config, updatehash=False):
218218
# setup polling - TODO: change to threaded model
219219
notrun = []
220220
while np.any(self.proc_done == False) | \
221-
np.any(self.proc_pending == True):
221+
np.any(self.proc_pending == True):
222222
toappend = []
223223
# trigger callbacks for any pending results
224224
while self.pending_tasks:
@@ -297,11 +297,12 @@ def _submit_mapnode(self, jobid):
297297
self.procs.extend(mapnodesubids)
298298
self.depidx = ssp.vstack((self.depidx,
299299
ssp.lil_matrix(np.zeros((numnodes,
300-
self.depidx.shape[1])))),
300+
self.depidx.shape[1])))),
301301
'lil')
302302
self.depidx = ssp.hstack((self.depidx,
303-
ssp.lil_matrix(np.zeros((self.depidx.shape[0],
304-
numnodes)))),
303+
ssp.lil_matrix(
304+
np.zeros((self.depidx.shape[0],
305+
numnodes)))),
305306
'lil')
306307
self.depidx[-numnodes:, jobid] = 1
307308
self.proc_done = np.concatenate((self.proc_done,
@@ -315,7 +316,7 @@ def _send_procs_to_workers(self, updatehash=False, slots=None, graph=None):
315316
"""
316317
while np.any(self.proc_done == False):
317318
# Check to see if a job is available
318-
jobids = np.flatnonzero((self.proc_done == False) & \
319+
jobids = np.flatnonzero((self.proc_done == False) &
319320
(self.depidx.sum(axis=0) == 0).__array__())
320321
if len(jobids) > 0:
321322
# send all available jobs
@@ -336,20 +337,21 @@ def _send_procs_to_workers(self, updatehash=False, slots=None, graph=None):
336337
self.proc_done[jobid] = True
337338
self.proc_pending[jobid] = True
338339
# Send job to task manager and add to pending tasks
339-
logger.info('Executing: %s ID: %d' % \
340-
(self.procs[jobid]._id, jobid))
340+
logger.info('Executing: %s ID: %d' %
341+
(self.procs[jobid]._id, jobid))
341342
if self._status_callback:
342343
self._status_callback(self.procs[jobid], 'start')
343344
continue_with_submission = True
344345
if str2bool(self.procs[jobid].config['execution']['local_hash_check']):
345346
logger.debug('checking hash locally')
346347
try:
347-
hash_exists, _, _, _ = self.procs[jobid].hash_exists()
348+
hash_exists, _, _, _ = self.procs[
349+
jobid].hash_exists()
348350
logger.debug('Hash exists %s' % str(hash_exists))
349351
if (hash_exists and
350-
(self.procs[jobid].overwrite == False or
351-
(self.procs[jobid].overwrite == None and
352-
not self.procs[jobid]._interface.always_run))):
352+
(self.procs[jobid].overwrite == False or
353+
(self.procs[jobid].overwrite == None and
354+
not self.procs[jobid]._interface.always_run))):
353355
continue_with_submission = False
354356
self._task_finished_cb(jobid)
355357
self._remove_node_dirs()
@@ -385,7 +387,7 @@ def _task_finished_cb(self, jobid):
385387
386388
This is called when a job is completed.
387389
"""
388-
logger.info('[Job finished] jobname: %s jobid: %d' % \
390+
logger.info('[Job finished] jobname: %s jobid: %d' %
389391
(self.procs[jobid]._id, jobid))
390392
if self._status_callback:
391393
self._status_callback(self.procs[jobid], 'end')
@@ -431,7 +433,7 @@ def _remove_node_dirs(self):
431433
self.refidx[idx, idx] = -1
432434
outdir = self.procs[idx]._output_directory()
433435
logger.info(('[node dependencies finished] '
434-
'removing node: %s from directory %s') % \
436+
'removing node: %s from directory %s') %
435437
(self.procs[idx]._id, outdir))
436438
shutil.rmtree(outdir)
437439

@@ -563,14 +565,14 @@ def run(self, graph, config, updatehash=False):
563565
dependencies[idx] = [nodes.index(prevnode) for prevnode in
564566
graph.predecessors(node)]
565567
self._submit_graph(pyfiles, dependencies, nodes)
566-
568+
567569
def _get_args(self, node, keywords):
568570
values = ()
569571
for keyword in keywords:
570572
value = getattr(self, "_" + keyword)
571573
if hasattr(node, "plugin_args") and isinstance(node.plugin_args, dict) and keyword in node.plugin_args:
572574
if 'overwrite' in node.plugin_args and node.plugin_args['overwrite']:
573-
value = node.plugin_args[keyword]
575+
value = node.plugin_args[keyword]
574576
else:
575577
value += node.plugin_args[keyword]
576578
else:

nipype/pipeline/plugins/dagman.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -59,28 +59,29 @@ def _submit_graph(self, pyfiles, dependencies, nodes):
5959
# as jobs in the DAG
6060
for idx, pyscript in enumerate(pyfiles):
6161
node = nodes[idx]
62-
template, submit_specs = self._get_args(node, ["template", "submit_specs"])
62+
template, submit_specs = self._get_args(
63+
node, ["template", "submit_specs"])
6364
# XXX redundant with previous value? or could it change between
64-
# scripts?
65+
# scripts?
6566
batch_dir, name = os.path.split(pyscript)
6667
name = '.'.join(name.split('.')[:-1])
6768
submitspec = '\n'.join(
68-
(template,
69-
'executable = %s' % sys.executable,
70-
'arguments = %s' % pyscript,
71-
'output = %s' % os.path.join(batch_dir,
72-
'%s.out' % name),
73-
'error = %s' % os.path.join(batch_dir,
74-
'%s.err' % name),
75-
'log = %s' % os.path.join(batch_dir,
76-
'%s.log' % name),
77-
'getenv = True',
78-
submit_specs,
79-
'queue'
80-
))
69+
(template,
70+
'executable = %s' % sys.executable,
71+
'arguments = %s' % pyscript,
72+
'output = %s' % os.path.join(batch_dir,
73+
'%s.out' % name),
74+
'error = %s' % os.path.join(batch_dir,
75+
'%s.err' % name),
76+
'log = %s' % os.path.join(batch_dir,
77+
'%s.log' % name),
78+
'getenv = True',
79+
submit_specs,
80+
'queue'
81+
))
8182
# write submit spec for this job
8283
submitfile = os.path.join(batch_dir,
83-
'%s.submit' % name)
84+
'%s.submit' % name)
8485
with open(submitfile, 'wt') as submitfileprt:
8586
submitfileprt.writelines(submitspec)
8687
submitfileprt.close()
@@ -100,4 +101,3 @@ def _submit_graph(self, pyfiles, dependencies, nodes):
100101
self._dagman_args)
101102
cmd.run()
102103
logger.info('submitted all jobs to Condor DAGMan')
103-

nipype/pipeline/plugins/pbsgraph.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,9 @@ def _submit_graph(self, pyfiles, dependencies, nodes):
3232
fp.writelines('#!/usr/bin/env sh\n')
3333
for idx, pyscript in enumerate(pyfiles):
3434
node = nodes[idx]
35-
template, qsub_args = self._get_args(node, ["template", "qsub_args"])
36-
35+
template, qsub_args = self._get_args(
36+
node, ["template", "qsub_args"])
37+
3738
batch_dir, name = os.path.split(pyscript)
3839
name = '.'.join(name.split('.')[:-1])
3940
batchscript = '\n'.join((template,
@@ -45,7 +46,8 @@ def _submit_graph(self, pyfiles, dependencies, nodes):
4546
batchfp.close()
4647
deps = ''
4748
if idx in dependencies:
48-
values = ['$job%05d' % jobid for jobid in dependencies[idx]]
49+
values = ['$job%05d' %
50+
jobid for jobid in dependencies[idx]]
4951
if len(values):
5052
deps = '-W depend=afterok:%s' % ':'.join(values)
5153
fp.writelines('job%05d=`qsub %s %s %s`\n' % (idx, deps,
@@ -55,4 +57,3 @@ def _submit_graph(self, pyfiles, dependencies, nodes):
5557
cmd.inputs.args = '%s' % submitjobsfile
5658
cmd.run()
5759
logger.info('submitted all jobs to queue')
58-

nipype/pipeline/plugins/sgegraph.py

Lines changed: 16 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ class SGEGraphPlugin(GraphPluginBase):
2525
#$ -V
2626
#$ -S /bin/bash
2727
"""
28+
2829
def __init__(self, **kwargs):
2930
self._qsub_args = ''
3031
if 'plugin_args' in kwargs:
@@ -44,8 +45,9 @@ def _submit_graph(self, pyfiles, dependencies, nodes):
4445
fp.writelines('#!/usr/bin/env bash\n')
4546
for idx, pyscript in enumerate(pyfiles):
4647
node = nodes[idx]
47-
template, qsub_args = self._get_args(node, ["template", "qsub_args"])
48-
48+
template, qsub_args = self._get_args(
49+
node, ["template", "qsub_args"])
50+
4951
batch_dir, name = os.path.split(pyscript)
5052
name = '.'.join(name.split('.')[:-1])
5153
batchscript = '\n'.join((template,
@@ -67,22 +69,24 @@ def _submit_graph(self, pyfiles, dependencies, nodes):
6769
if 'job' in values:
6870
values = values.rstrip(',')
6971
deps = '-hold_jid%s' % values
70-
jobname = 'job%05d' % ( idx )
72+
jobname = 'job%05d' % (idx)
7173
## Do not use default output locations if they are set in self._qsub_args
7274
stderrFile = ''
7375
if self._qsub_args.count('-e ') == 0:
74-
stderrFile='-e {errFile}'.format(errFile=batchscripterrfile)
76+
stderrFile = '-e {errFile}'.format(
77+
errFile=batchscripterrfile)
7578
stdoutFile = ''
7679
if self._qsub_args.count('-o ') == 0:
77-
stdoutFile='-o {outFile}'.format(outFile=batchscriptoutfile)
80+
stdoutFile = '-o {outFile}'.format(
81+
outFile=batchscriptoutfile)
7882
full_line = '{jobNm}=$(qsub {outFileOption} {errFileOption} {extraQSubArgs} {dependantIndex} -N {jobNm} {batchscript})\n'.format(
79-
jobNm=jobname,
80-
outFileOption=stdoutFile,
81-
errFileOption=stderrFile,
82-
extraQSubArgs=qsub_args,
83-
dependantIndex=deps,
84-
batchscript=batchscriptfile)
85-
fp.writelines( full_line )
83+
jobNm=jobname,
84+
outFileOption=stdoutFile,
85+
errFileOption=stderrFile,
86+
extraQSubArgs=qsub_args,
87+
dependantIndex=deps,
88+
batchscript=batchscriptfile)
89+
fp.writelines(full_line)
8690

8791
cmd = CommandLine('bash', environ=os.environ.data)
8892
cmd.inputs.args = '%s' % submitjobsfile

0 commit comments

Comments
 (0)