Skip to content

Commit 364fc12

Browse files
committed
mass-format with longer lines
1 parent ae9fa45 commit 364fc12

File tree

112 files changed

+588
-1686
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

112 files changed

+588
-1686
lines changed

cwltool/argparser.py

Lines changed: 11 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -126,9 +126,7 @@ def arg_parser() -> argparse.ArgumentParser:
126126
help="Path prefix for temporary directories. If --tmpdir-prefix is not "
127127
"provided, then the prefix for temporary directories is influenced by "
128128
"the value of the TMPDIR, TEMP, or TMP environment variables. Taking "
129-
"those into consideration, the current default is {}.".format(
130-
DEFAULT_TMP_PREFIX
131-
),
129+
"those into consideration, the current default is {}.".format(DEFAULT_TMP_PREFIX),
132130
default=DEFAULT_TMP_PREFIX,
133131
)
134132

@@ -318,12 +316,8 @@ def arg_parser() -> argparse.ArgumentParser:
318316
action="store_true",
319317
help="Combine components into single document and print.",
320318
)
321-
printgroup.add_argument(
322-
"--version", action="store_true", help="Print version and exit"
323-
)
324-
printgroup.add_argument(
325-
"--validate", action="store_true", help="Validate CWL document only."
326-
)
319+
printgroup.add_argument("--version", action="store_true", help="Print version and exit")
320+
printgroup.add_argument("--validate", action="store_true", help="Validate CWL document only.")
327321
printgroup.add_argument(
328322
"--print-supported-versions",
329323
action="store_true",
@@ -384,12 +378,8 @@ def arg_parser() -> argparse.ArgumentParser:
384378

385379
volumegroup = parser.add_mutually_exclusive_group()
386380
volumegroup.add_argument("--verbose", action="store_true", help="Default logging")
387-
volumegroup.add_argument(
388-
"--quiet", action="store_true", help="Only print warnings and errors."
389-
)
390-
volumegroup.add_argument(
391-
"--debug", action="store_true", help="Print even more logging"
392-
)
381+
volumegroup.add_argument("--quiet", action="store_true", help="Only print warnings and errors.")
382+
volumegroup.add_argument("--debug", action="store_true", help="Print even more logging")
393383

394384
parser.add_argument(
395385
"--write-summary",
@@ -494,12 +484,9 @@ def arg_parser() -> argparse.ArgumentParser:
494484
"Default root directory used by dependency resolvers configuration."
495485
)
496486
use_biocontainers_help = (
497-
"Use biocontainers for tools without an "
498-
"explicitly annotated Docker container."
499-
)
500-
conda_dependencies = (
501-
"Short cut to use Conda to resolve 'SoftwareRequirement' packages."
487+
"Use biocontainers for tools without an " "explicitly annotated Docker container."
502488
)
489+
conda_dependencies = "Short cut to use Conda to resolve 'SoftwareRequirement' packages."
503490

504491
parser.add_argument(
505492
"--beta-dependency-resolvers-configuration",
@@ -522,9 +509,7 @@ def arg_parser() -> argparse.ArgumentParser:
522509
action="store_true",
523510
)
524511

525-
parser.add_argument(
526-
"--tool-help", action="store_true", help="Print command line help for tool"
527-
)
512+
parser.add_argument("--tool-help", action="store_true", help="Print command line help for tool")
528513

529514
parser.add_argument(
530515
"--relative-deps",
@@ -537,8 +522,7 @@ def arg_parser() -> argparse.ArgumentParser:
537522
parser.add_argument(
538523
"--enable-dev",
539524
action="store_true",
540-
help="Enable loading and running unofficial development versions of "
541-
"the CWL standards.",
525+
help="Enable loading and running unofficial development versions of " "the CWL standards.",
542526
default=False,
543527
)
544528

@@ -647,8 +631,7 @@ def arg_parser() -> argparse.ArgumentParser:
647631
"--relax-path-checks",
648632
action="store_true",
649633
default=False,
650-
help="Relax requirements on path names to permit "
651-
"spaces and hash characters.",
634+
help="Relax requirements on path names to permit " "spaces and hash characters.",
652635
dest="relax_path_checks",
653636
)
654637

@@ -933,9 +916,7 @@ def add_argument(
933916
fieldtype,
934917
records,
935918
fielddescription,
936-
default=default.get(shortname(field["name"]), None)
937-
if default
938-
else None,
919+
default=default.get(shortname(field["name"]), None) if default else None,
939920
input_required=required,
940921
)
941922
return

cwltool/builder.py

Lines changed: 13 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -191,9 +191,7 @@ def bind_input(
191191
bindings: List[MutableMapping[str, Union[str, List[int]]]] = []
192192
binding: Union[MutableMapping[str, Union[str, List[int]]], CommentedMap] = {}
193193
value_from_expression = False
194-
if "inputBinding" in schema and isinstance(
195-
schema["inputBinding"], MutableMapping
196-
):
194+
if "inputBinding" in schema and isinstance(schema["inputBinding"], MutableMapping):
197195
binding = CommentedMap(schema["inputBinding"].items())
198196

199197
bp = list(aslist(lead_pos))
@@ -303,8 +301,7 @@ def bind_input(
303301
else:
304302
schema["type"] = "record"
305303
schema["fields"] = [
306-
{"name": field_name, "type": "Any"}
307-
for field_name in datum.keys()
304+
{"name": field_name, "type": "Any"} for field_name in datum.keys()
308305
]
309306
elif isinstance(datum, list):
310307
schema["type"] = "array"
@@ -378,14 +375,10 @@ def _capture_files(f: CWLObjectType) -> CWLObjectType:
378375
debug,
379376
):
380377
try:
381-
with self.fs_access.open(
382-
cast(str, datum["location"]), "rb"
383-
) as f2:
378+
with self.fs_access.open(cast(str, datum["location"]), "rb") as f2:
384379
datum["contents"] = content_limit_respected_read(f2)
385380
except Exception as e:
386-
raise Exception(
387-
"Reading {}\n{}".format(datum["location"], e)
388-
) from e
381+
raise Exception("Reading {}\n{}".format(datum["location"], e)) from e
389382

390383
if "secondaryFiles" in schema:
391384
if "secondaryFiles" not in datum:
@@ -398,13 +391,8 @@ def _capture_files(f: CWLObjectType) -> CWLObjectType:
398391

399392
for num, sf_entry in enumerate(sf_schema):
400393
if "required" in sf_entry and sf_entry["required"] is not None:
401-
required_result = self.do_eval(
402-
sf_entry["required"], context=datum
403-
)
404-
if not (
405-
isinstance(required_result, bool)
406-
or required_result is None
407-
):
394+
required_result = self.do_eval(sf_entry["required"], context=datum)
395+
if not (isinstance(required_result, bool) or required_result is None):
408396
if sf_schema == schema["secondaryFiles"]:
409397
sf_item: Any = sf_schema[num]
410398
else:
@@ -425,9 +413,7 @@ def _capture_files(f: CWLObjectType) -> CWLObjectType:
425413
if "$(" in sf_entry["pattern"] or "${" in sf_entry["pattern"]:
426414
sfpath = self.do_eval(sf_entry["pattern"], context=datum)
427415
else:
428-
sfpath = substitute(
429-
cast(str, datum["basename"]), sf_entry["pattern"]
430-
)
416+
sfpath = substitute(cast(str, datum["basename"]), sf_entry["pattern"])
431417

432418
for sfname in aslist(sfpath):
433419
if not sfname:
@@ -438,8 +424,7 @@ def _capture_files(f: CWLObjectType) -> CWLObjectType:
438424
d_location = cast(str, datum["location"])
439425
if "/" in d_location:
440426
sf_location = (
441-
d_location[0 : d_location.rindex("/") + 1]
442-
+ sfname
427+
d_location[0 : d_location.rindex("/") + 1] + sfname
443428
)
444429
else:
445430
sf_location = d_location + sfname
@@ -462,9 +447,7 @@ def _capture_files(f: CWLObjectType) -> CWLObjectType:
462447
datum["secondaryFiles"],
463448
):
464449
if not d.get("basename"):
465-
d["basename"] = d["location"][
466-
d["location"].rindex("/") + 1 :
467-
]
450+
d["basename"] = d["location"][d["location"].rindex("/") + 1 :]
468451
if d["basename"] == sfbasename:
469452
found = True
470453

@@ -488,9 +471,7 @@ def addsf(
488471
),
489472
sfname,
490473
)
491-
elif discover_secondaryFiles and self.fs_access.exists(
492-
sf_location
493-
):
474+
elif discover_secondaryFiles and self.fs_access.exists(sf_location):
494475
addsf(
495476
cast(
496477
MutableSequence[CWLObjectType],
@@ -550,9 +531,7 @@ def addsf(
550531
).makeError(message)
551532
evaluated_format = cast(List[str], eval_format)
552533
else:
553-
raise SourceLine(
554-
schema, "format", WorkflowException, debug
555-
).makeError(
534+
raise SourceLine(schema, "format", WorkflowException, debug).makeError(
556535
"An expression in the 'format' field must "
557536
"evaluate to a string, or list of strings. "
558537
"However the type of the expression result was "
@@ -642,9 +621,7 @@ def generate_arg(self, binding: CWLObjectType) -> List[str]:
642621
WorkflowException,
643622
debug,
644623
):
645-
raise WorkflowException(
646-
"'separate' option can not be specified without prefix"
647-
)
624+
raise WorkflowException("'separate' option can not be specified without prefix")
648625

649626
argl: MutableSequence[CWLOutputType] = []
650627
if isinstance(value, MutableSequence):
@@ -653,9 +630,7 @@ def generate_arg(self, binding: CWLObjectType) -> List[str]:
653630
argl = [itemSeparator.join([self.tostr(v) for v in value])]
654631
elif binding.get("valueFrom"):
655632
value = [self.tostr(v) for v in value]
656-
return cast(List[str], ([prefix] if prefix else [])) + cast(
657-
List[str], value
658-
)
633+
return cast(List[str], ([prefix] if prefix else [])) + cast(List[str], value)
659634
elif prefix and value:
660635
return [prefix]
661636
else:

cwltool/checker.py

Lines changed: 14 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -59,9 +59,7 @@ def check_types(
5959
None,
6060
)
6161
if linkMerge == "merge_flattened":
62-
return check_types(
63-
merge_flatten_type(_get_type(srctype)), _get_type(sinktype), None, None
64-
)
62+
return check_types(merge_flatten_type(_get_type(srctype)), _get_type(sinktype), None, None)
6563
raise WorkflowException(f"Unrecognized linkMerge enum {linkMerge!r}")
6664

6765

@@ -74,9 +72,7 @@ def merge_flatten_type(src: SinkType) -> CWLOutputType:
7472
return {"items": src, "type": "array"}
7573

7674

77-
def can_assign_src_to_sink(
78-
src: SinkType, sink: Optional[SinkType], strict: bool = False
79-
) -> bool:
75+
def can_assign_src_to_sink(src: SinkType, sink: Optional[SinkType], strict: bool = False) -> bool:
8076
"""
8177
Check for identical type specifications, ignoring extra keys like inputBinding.
8278
@@ -104,9 +100,7 @@ def can_assign_src_to_sink(
104100
for sinksf in cast(List[CWLObjectType], sink.get("secondaryFiles", [])):
105101
if not [
106102
1
107-
for srcsf in cast(
108-
List[CWLObjectType], src.get("secondaryFiles", [])
109-
)
103+
for srcsf in cast(List[CWLObjectType], src.get("secondaryFiles", []))
110104
if sinksf == srcsf
111105
]:
112106
if strict:
@@ -122,9 +116,7 @@ def can_assign_src_to_sink(
122116
return False
123117
return True
124118
for this_src in src:
125-
if this_src != "null" and can_assign_src_to_sink(
126-
cast(SinkType, this_src), sink
127-
):
119+
if this_src != "null" and can_assign_src_to_sink(cast(SinkType, this_src), sink):
128120
return True
129121
return False
130122
if isinstance(sink, MutableSequence):
@@ -135,9 +127,7 @@ def can_assign_src_to_sink(
135127
return bool(src == sink)
136128

137129

138-
def _compare_records(
139-
src: CWLObjectType, sink: CWLObjectType, strict: bool = False
140-
) -> bool:
130+
def _compare_records(src: CWLObjectType, sink: CWLObjectType, strict: bool = False) -> bool:
141131
"""
142132
Compare two records, ensuring they have compatible fields.
143133
@@ -219,9 +209,7 @@ def static_checker(
219209
sink = warning.sink
220210
linkMerge = warning.linkMerge
221211
sinksf = sorted(
222-
p["pattern"]
223-
for p in sink.get("secondaryFiles", [])
224-
if p.get("required", True)
212+
p["pattern"] for p in sink.get("secondaryFiles", []) if p.get("required", True)
225213
)
226214
srcsf = sorted(p["pattern"] for p in src.get("secondaryFiles", []))
227215
# Every secondaryFile required by the sink, should be declared
@@ -233,16 +221,13 @@ def static_checker(
233221
missing,
234222
)
235223
msg3 = SourceLine(src, "id").makeError(
236-
"source '%s' does not provide those secondaryFiles."
237-
% (shortname(src["id"]))
224+
"source '%s' does not provide those secondaryFiles." % (shortname(src["id"]))
238225
)
239226
msg4 = SourceLine(src.get("_tool_entry", src), "secondaryFiles").makeError(
240227
"To resolve, add missing secondaryFiles patterns to definition of '%s' or"
241228
% (shortname(src["id"]))
242229
)
243-
msg5 = SourceLine(
244-
sink.get("_tool_entry", sink), "secondaryFiles"
245-
).makeError(
230+
msg5 = SourceLine(sink.get("_tool_entry", sink), "secondaryFiles").makeError(
246231
"mark missing secondaryFiles in definition of '%s' as optional."
247232
% shortname(sink["id"])
248233
)
@@ -303,17 +288,14 @@ def static_checker(
303288
)
304289
+ "\n"
305290
+ SourceLine(sink, "type").makeError(
306-
" with sink '%s' of type %s"
307-
% (shortname(sink["id"]), json_dumps(sink["type"]))
291+
" with sink '%s' of type %s" % (shortname(sink["id"]), json_dumps(sink["type"]))
308292
)
309293
)
310294
if extra_message is not None:
311295
msg += "\n" + SourceLine(sink).makeError(" " + extra_message)
312296

313297
if linkMerge is not None:
314-
msg += "\n" + SourceLine(sink).makeError(
315-
" source has linkMerge method %s" % linkMerge
316-
)
298+
msg += "\n" + SourceLine(sink).makeError(" source has linkMerge method %s" % linkMerge)
317299
exception_msgs.append(msg)
318300

319301
for sink in step_inputs:
@@ -358,7 +340,6 @@ def check_all_types(
358340
validation = {"warning": [], "exception": []} # type: Dict[str, List[SrcSink]]
359341
for sink in sinks:
360342
if sourceField in sink:
361-
362343
valueFrom = cast(Optional[str], sink.get("valueFrom"))
363344
pickValue = cast(Optional[str], sink.get("pickValue"))
364345

@@ -371,11 +352,7 @@ def check_all_types(
371352
Optional[str],
372353
sink.get(
373354
"linkMerge",
374-
(
375-
"merge_nested"
376-
if len(cast(Sized, sink[sourceField])) > 1
377-
else None
378-
),
355+
("merge_nested" if len(cast(Sized, sink[sourceField])) > 1 else None),
379356
),
380357
) # type: Optional[str]
381358

@@ -385,10 +362,7 @@ def check_all_types(
385362
srcs_of_sink = [] # type: List[CWLObjectType]
386363
for parm_id in cast(MutableSequence[str], sink[sourceField]):
387364
srcs_of_sink += [src_dict[parm_id]]
388-
if (
389-
is_conditional_step(param_to_step, parm_id)
390-
and pickValue is None
391-
):
365+
if is_conditional_step(param_to_step, parm_id) and pickValue is None:
392366
validation["warning"].append(
393367
SrcSink(
394368
src_dict[parm_id],
@@ -490,9 +464,7 @@ def get_dependency_tree(step_inputs: List[CWLObjectType]) -> Dict[str, List[str]
490464
for step_input in step_inputs:
491465
if "source" in step_input:
492466
if isinstance(step_input["source"], list):
493-
vertices_in = [
494-
get_step_id(cast(str, src)) for src in step_input["source"]
495-
]
467+
vertices_in = [get_step_id(cast(str, src)) for src in step_input["source"]]
496468
else:
497469
vertices_in = [get_step_id(cast(str, step_input["source"]))]
498470
vertex_out = get_step_id(cast(str, step_input["id"]))
@@ -542,9 +514,7 @@ def is_conditional_step(param_to_step: Dict[str, CWLObjectType], parm_id: str) -
542514
return False
543515

544516

545-
def is_all_output_method_loop_step(
546-
param_to_step: Dict[str, CWLObjectType], parm_id: str
547-
) -> bool:
517+
def is_all_output_method_loop_step(param_to_step: Dict[str, CWLObjectType], parm_id: str) -> bool:
548518
"""Check if a step contains a http://commonwl.org/cwltool#Loop requirement with `all` outputMethod."""
549519
source_step: Optional[MutableMapping[str, Any]] = param_to_step.get(parm_id)
550520
if source_step is not None:

0 commit comments

Comments
 (0)