Skip to content

Commit a9224dd

Browse files
authored
Merge pull request #464 from satra/sty/black
Style update files with latest release of black
2 parents 0027552 + 9c3b20e commit a9224dd

30 files changed

+865
-870
lines changed

.github/workflows/teststyle.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,5 +25,5 @@ jobs:
2525

2626
- name: Check Style
2727
run: |
28-
pip install black==19.3b0 codecov
28+
pip install black==21.4b2 codecov
2929
black --check pydra setup.py

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,6 @@ repos:
99
- id: check-yaml
1010
- id: check-added-large-files
1111
- repo: https://github.com/psf/black
12-
rev: 19.3b0
12+
rev: 21.4b2
1313
hooks:
1414
- id: black

pydra/engine/boutiques.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -107,8 +107,8 @@ def _download_spec(self, zenodo_id):
107107
return zenodo_file
108108

109109
def _prepare_input_spec(self, names_subset=None):
110-
""" creating input spec from the zenodo file
111-
if name_subset provided, only names from the subset will be used in the spec
110+
"""creating input spec from the zenodo file
111+
if name_subset provided, only names from the subset will be used in the spec
112112
"""
113113
binputs = self.bosh_spec["inputs"]
114114
self._input_spec_keys = {}
@@ -148,8 +148,8 @@ def _prepare_input_spec(self, names_subset=None):
148148
return spec
149149

150150
def _prepare_output_spec(self, names_subset=None):
151-
""" creating output spec from the zenodo file
152-
if name_subset provided, only names from the subset will be used in the spec
151+
"""creating output spec from the zenodo file
152+
if name_subset provided, only names from the subset will be used in the spec
153153
"""
154154
boutputs = self.bosh_spec["output-files"]
155155
fields = []

pydra/engine/core.py

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -238,10 +238,10 @@ def errored(self):
238238

239239
@property
240240
def checksum(self):
241-
""" Calculates the unique checksum of the task.
242-
Used to create specific directory name for task that are run;
243-
and to create nodes checksums needed for graph checkums
244-
(before the tasks have inputs etc.)
241+
"""Calculates the unique checksum of the task.
242+
Used to create specific directory name for task that are run;
243+
and to create nodes checksums needed for graph checkums
244+
(before the tasks have inputs etc.)
245245
"""
246246
input_hash = self.inputs.hash
247247
if self.state is None:
@@ -305,9 +305,9 @@ def checksum_states(self, state_index=None):
305305

306306
@property
307307
def uid(self):
308-
""" the unique id number for the task
309-
It will be used to create unique names for slurm scripts etc.
310-
without a need to run checksum
308+
"""the unique id number for the task
309+
It will be used to create unique names for slurm scripts etc.
310+
without a need to run checksum
311311
"""
312312
return self._uid
313313

@@ -334,16 +334,16 @@ def set_state(self, splitter, combiner=None):
334334
@property
335335
def output_names(self):
336336
"""Get the names of the outputs from the task's output_spec
337-
(not everything has to be generated, see generated_output_names).
337+
(not everything has to be generated, see generated_output_names).
338338
"""
339339
return [f.name for f in attr.fields(make_klass(self.output_spec))]
340340

341341
@property
342342
def generated_output_names(self):
343-
""" Get the names of the outputs generated by the task.
344-
If the spec doesn't have generated_output_names method,
345-
it uses output_names.
346-
The results depends on the input provided to the task
343+
"""Get the names of the outputs generated by the task.
344+
If the spec doesn't have generated_output_names method,
345+
it uses output_names.
346+
The results depends on the input provided to the task
347347
"""
348348
output_klass = make_klass(self.output_spec)
349349
if hasattr(output_klass, "generated_output_names"):
@@ -606,7 +606,7 @@ def get_input_el(self, ind):
606606
return None, inputs_dict
607607

608608
def pickle_task(self):
609-
""" Pickling the tasks with full inputs"""
609+
"""Pickling the tasks with full inputs"""
610610
pkl_files = self.cache_dir / "pkl_files"
611611
pkl_files.mkdir(exist_ok=True, parents=True)
612612
task_main_path = pkl_files / f"{self.name}_{self.uid}_task.pklz"
@@ -862,10 +862,10 @@ def graph_sorted(self):
862862

863863
@property
864864
def checksum(self):
865-
""" Calculates the unique checksum of the task.
866-
Used to create specific directory name for task that are run;
867-
and to create nodes checksums needed for graph checkums
868-
(before the tasks have inputs etc.)
865+
"""Calculates the unique checksum of the task.
866+
Used to create specific directory name for task that are run;
867+
and to create nodes checksums needed for graph checkums
868+
(before the tasks have inputs etc.)
869869
"""
870870
# if checksum is called before run the _graph_checksums is not ready
871871
if is_workflow(self) and self.inputs._graph_checksums is attr.NOTHING:
@@ -884,8 +884,8 @@ def checksum(self):
884884
return self._checksum
885885

886886
def _checksum_wf(self, input_hash, with_splitter=False):
887-
""" creating hash value for workflows
888-
includes connections and splitter if with_splitter is True
887+
"""creating hash value for workflows
888+
includes connections and splitter if with_splitter is True
889889
"""
890890
connection_hash = hash_function(self._connections)
891891
hash_list = [input_hash, connection_hash]

pydra/engine/graph.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -99,9 +99,9 @@ def edges_names(self):
9999

100100
@property
101101
def nodes_details(self):
102-
""" dictionary with details of the nodes
103-
for each task, there are inputs/outputs and connections
104-
(with input/output fields names)
102+
"""dictionary with details of the nodes
103+
for each task, there are inputs/outputs and connections
104+
(with input/output fields names)
105105
"""
106106
# removing repeated fields from inputs and outputs
107107
for el in self._nodes_details.values():
@@ -156,7 +156,7 @@ def add_edges(self, new_edges):
156156
self.sorting(presorted=self.sorted_nodes + [])
157157

158158
def add_edges_description(self, new_edge_details):
159-
""" adding detailed description of the connections, filling _nodes_details"""
159+
"""adding detailed description of the connections, filling _nodes_details"""
160160
in_nd, in_fld, out_nd, out_fld = new_edge_details
161161
for key in [in_nd, out_nd]:
162162
self._nodes_details.setdefault(
@@ -309,7 +309,7 @@ def _checking_successors_nodes(self, node, remove=True):
309309
return True
310310

311311
def remove_successors_nodes(self, node):
312-
""" Removing all the nodes that follow the node"""
312+
"""Removing all the nodes that follow the node"""
313313
self._successors_all = []
314314
self._checking_successors_nodes(node=node, remove=False)
315315
self.remove_nodes_connections(nodes=node)
@@ -352,7 +352,7 @@ def calculate_max_paths(self):
352352
self._checking_path(node_name=nm, first_name=nm)
353353

354354
def create_dotfile_simple(self, outdir, name="graph"):
355-
""" creates a simple dotfile (no nested structure)"""
355+
"""creates a simple dotfile (no nested structure)"""
356356
from .core import is_workflow
357357

358358
dotstr = "digraph G {\n"
@@ -384,7 +384,7 @@ def create_dotfile_simple(self, outdir, name="graph"):
384384
return dotfile
385385

386386
def create_dotfile_detailed(self, outdir, name="graph_det"):
387-
""" creates a detailed dotfile (detailed connections - input/output fields,
387+
"""creates a detailed dotfile (detailed connections - input/output fields,
388388
but no nested structure)
389389
"""
390390
dotstr = "digraph structs {\n"
@@ -492,7 +492,7 @@ def _create_dotfile_single_graph(self, nodes, edges):
492492
return dotstr
493493

494494
def export_graph(self, dotfile, ext="png"):
495-
""" exporting dotfile to other format, equires the dot command"""
495+
"""exporting dotfile to other format, equires the dot command"""
496496
available_ext = [
497497
"bmp",
498498
"canon",

pydra/engine/helpers.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,7 @@ def save(task_path: Path, result=None, task=None, name_prefix=None):
159159

160160

161161
def copyfile_workflow(wf_path, result):
162-
""" if file in the wf results, the file will be copied to the workflow directory"""
162+
"""if file in the wf results, the file will be copied to the workflow directory"""
163163
for field in attr_fields(result.output):
164164
value = getattr(result.output, field.name)
165165
# if the field is a path or it can contain a path _copyfile_single_value is run
@@ -175,7 +175,7 @@ def copyfile_workflow(wf_path, result):
175175

176176

177177
def _copyfile_single_value(wf_path, value):
178-
""" checking a single value for files that need to be copied to the wf dir"""
178+
"""checking a single value for files that need to be copied to the wf dir"""
179179
if isinstance(value, (tuple, list)):
180180
return [_copyfile_single_value(wf_path, val) for val in value]
181181
elif isinstance(value, dict):
@@ -369,7 +369,7 @@ def custom_validator(instance, attribute, value):
369369

370370

371371
def _type_validator(instance, attribute, value, tp, cont_type):
372-
""" creating a customized type validator,
372+
"""creating a customized type validator,
373373
uses validator.deep_iterable/mapping if the field is a container
374374
(i.e. ty.List or ty.Dict),
375375
it also tries to guess when the value is a list due to the splitter
@@ -421,9 +421,9 @@ def _types_updates(tp_list, name):
421421

422422

423423
def _single_type_update(tp, name, simplify=False):
424-
""" updating a single type with other related types - e.g. adding bytes for str
425-
if simplify is True, than changing typing.List to list etc.
426-
(assuming that I validate only one depth, so have to simplify at some point)
424+
"""updating a single type with other related types - e.g. adding bytes for str
425+
if simplify is True, than changing typing.List to list etc.
426+
(assuming that I validate only one depth, so have to simplify at some point)
427427
"""
428428
if isinstance(tp, type) or tp in [File, Directory]:
429429
if tp is str:
@@ -456,7 +456,7 @@ def _single_type_update(tp, name, simplify=False):
456456

457457

458458
def _check_special_type(tp, name):
459-
"""checking if the type is a container: ty.List, ty.Dict or ty.Union """
459+
"""checking if the type is a container: ty.List, ty.Dict or ty.Union"""
460460
if sys.version_info.minor >= 8:
461461
return ty.get_origin(tp), ty.get_args(tp)
462462
else:
@@ -477,7 +477,7 @@ def _check_special_type(tp, name):
477477

478478

479479
def _allowed_values_validator(instance, attribute, value):
480-
""" checking if the values is in allowed_values"""
480+
"""checking if the values is in allowed_values"""
481481
allowed = attribute.metadata["allowed_values"]
482482
if value is attr.NOTHING:
483483
pass
@@ -769,9 +769,9 @@ def load_and_run(
769769
task_pkl, ind=None, rerun=False, submitter=None, plugin=None, **kwargs
770770
):
771771
"""
772-
loading a task from a pickle file, settings proper input
773-
and running the task
774-
"""
772+
loading a task from a pickle file, settings proper input
773+
and running the task
774+
"""
775775
try:
776776
task = load_task(task_pkl=task_pkl, ind=ind)
777777
except Exception as excinfo:
@@ -813,7 +813,7 @@ async def load_and_run_async(task_pkl, ind=None, submitter=None, rerun=False, **
813813

814814

815815
def load_task(task_pkl, ind=None):
816-
""" loading a task from a pickle file, settings proper input for the specific ind"""
816+
"""loading a task from a pickle file, settings proper input for the specific ind"""
817817
if isinstance(task_pkl, str):
818818
task_pkl = Path(task_pkl)
819819
task = cp.loads(task_pkl.read_bytes())
@@ -863,7 +863,7 @@ def position_sort(args):
863863

864864

865865
def argstr_formatting(argstr, inputs, value_updates=None):
866-
""" formatting argstr that have form {field_name},
866+
"""formatting argstr that have form {field_name},
867867
using values from inputs and updating with value_update if provided
868868
"""
869869
inputs_dict = attr.asdict(inputs)

pydra/engine/helpers_file.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -741,7 +741,7 @@ def is_local_file(f):
741741

742742

743743
def is_existing_file(value):
744-
""" checking if an object is an existing file"""
744+
"""checking if an object is an existing file"""
745745
if isinstance(value, str) and value == "":
746746
return False
747747
try:

pydra/engine/helpers_state.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,7 @@ def rpn2splitter(splitter_rpn):
307307

308308

309309
def add_name_combiner(combiner, name):
310-
""" adding a node's name to each field from the combiner"""
310+
"""adding a node's name to each field from the combiner"""
311311
combiner_changed = []
312312
for comb in combiner:
313313
if "." not in comb:
@@ -318,7 +318,7 @@ def add_name_combiner(combiner, name):
318318

319319

320320
def add_name_splitter(splitter, name):
321-
""" adding a node's name to each field from the splitter"""
321+
"""adding a node's name to each field from the splitter"""
322322
if isinstance(splitter, str):
323323
return _add_name([splitter], name)[0]
324324
elif isinstance(splitter, list):
@@ -329,7 +329,7 @@ def add_name_splitter(splitter, name):
329329

330330

331331
def _add_name(mlist, name):
332-
""" adding anem to each element from the list"""
332+
"""adding anem to each element from the list"""
333333
for i, elem in enumerate(mlist):
334334
if isinstance(elem, str):
335335
if "." in elem or elem.startswith("_"):
@@ -371,7 +371,7 @@ def iter_splits(iterable, keys):
371371

372372

373373
def input_shape(inp, cont_dim=1):
374-
"""Get input shape, depends on the container dimension, if not specify it is assumed to be 1 """
374+
"""Get input shape, depends on the container dimension, if not specify it is assumed to be 1"""
375375
# TODO: have to be changed for inner splitter (sometimes different length)
376376
cont_dim -= 1
377377
shape = [len(inp)]
@@ -536,7 +536,7 @@ def splits(splitter_rpn, inputs, inner_inputs=None, cont_dim=None):
536536
def _single_op_splits(
537537
op_single, inputs, inner_inputs, previous_states_ind, cont_dim=None
538538
):
539-
""" splits function if splitter is a singleton"""
539+
"""splits function if splitter is a singleton"""
540540
if op_single.startswith("_"):
541541
return (previous_states_ind[op_single][0], previous_states_ind[op_single][1])
542542
if cont_dim is None:
@@ -562,8 +562,8 @@ def _single_op_splits(
562562

563563

564564
def splits_groups(splitter_rpn, combiner=None, inner_inputs=None):
565-
""" splits inputs to groups (axes) and creates stacks for these groups
566-
This is used to specify which input can be combined.
565+
"""splits inputs to groups (axes) and creates stacks for these groups
566+
This is used to specify which input can be combined.
567567
"""
568568
if not splitter_rpn:
569569
return [], {}, [], []
@@ -696,7 +696,7 @@ def splits_groups(splitter_rpn, combiner=None, inner_inputs=None):
696696

697697

698698
def _single_op_splits_groups(op_single, combiner, inner_inputs, groups):
699-
""" splits_groups function if splitter is a singleton"""
699+
"""splits_groups function if splitter is a singleton"""
700700
if op_single in inner_inputs:
701701
# TODO: have to be changed if differ length
702702
# TODO: i think I don't want to add here from left part

pydra/engine/specs.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -476,9 +476,9 @@ def collect_additional_outputs(self, inputs, output_dir, outputs):
476476
return additional_out
477477

478478
def generated_output_names(self, inputs, output_dir):
479-
""" Returns a list of all outputs that will be generated by the task.
480-
Takes into account the task input and the requires list for the output fields.
481-
TODO: should be in all Output specs?
479+
"""Returns a list of all outputs that will be generated by the task.
480+
Takes into account the task input and the requires list for the output fields.
481+
TODO: should be in all Output specs?
482482
"""
483483
# checking the input (if all mandatory fields are provided, etc.)
484484
inputs.check_fields_input_spec()
@@ -575,8 +575,8 @@ def _field_metadata(self, fld, inputs, output_dir, outputs=None):
575575
raise Exception("(_field_metadata) is not a current valid metadata key.")
576576

577577
def _check_requires(self, fld, inputs):
578-
""" checking if all fields from the requires and template are set in the input
579-
if requires is a list of list, checking if at least one list has all elements set
578+
"""checking if all fields from the requires and template are set in the input
579+
if requires is a list of list, checking if at least one list has all elements set
580580
"""
581581
from .helpers import ensure_list
582582

0 commit comments

Comments
 (0)