Skip to content

Commit 5f07079

Browse files
authored
fix for Python 3.5.{01} (#860)
Typing.Text doesn't exist in this version of Python And many pylint inspired cleanups
1 parent 8056a64 commit 5f07079

33 files changed

+435
-425
lines changed

cwltool/__main__.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
from __future__ import absolute_import
22

3-
import sys
4-
53
from . import main
64

75
main.run()

cwltool/argparser.py

Lines changed: 33 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,10 @@
22

33
import argparse
44
import os
5-
from typing import (Any, AnyStr, Dict, List, # pylint: disable=unused-import
6-
Optional, Sequence, Text, Union, cast)
5+
from typing import Any, AnyStr, Dict, List, Optional, Sequence, Union, cast
6+
7+
from typing_extensions import Text # pylint: disable=unused-import
8+
# move to a regular typing import when Python 3.3-3.6 is no longer supported
79

810
from schema_salad.ref_resolver import file_uri
911

@@ -25,44 +27,43 @@ def arg_parser(): # type: () -> argparse.ArgumentParser
2527
help="[experimental] Run jobs in parallel. ")
2628
envgroup = parser.add_mutually_exclusive_group()
2729
envgroup.add_argument("--preserve-environment", type=Text, action="append",
28-
help="Preserve specific environment variable when "
29-
"running CommandLineTools. May be provided multiple "
30-
"times.", metavar="ENVVAR", default=["PATH"],
31-
dest="preserve_environment")
30+
help="Preserve specific environment variable when "
31+
"running CommandLineTools. May be provided multiple "
32+
"times.", metavar="ENVVAR", default=["PATH"],
33+
dest="preserve_environment")
3234
envgroup.add_argument("--preserve-entire-environment", action="store_true",
33-
help="Preserve all environment variable when running "
34-
"CommandLineTools.", default=False,
35-
dest="preserve_entire_environment")
35+
help="Preserve all environment variable when running "
36+
"CommandLineTools.", default=False,
37+
dest="preserve_entire_environment")
3638

3739
exgroup = parser.add_mutually_exclusive_group()
3840
exgroup.add_argument("--rm-container", action="store_true", default=True,
3941
help="Delete Docker container used by jobs after they exit (default)",
4042
dest="rm_container")
4143

42-
exgroup.add_argument("--leave-container", action="store_false",
43-
default=True, help="Do not delete Docker container used by jobs after they exit",
44-
dest="rm_container")
44+
exgroup.add_argument(
45+
"--leave-container", action="store_false", default=True,
46+
help="Do not delete Docker container used by jobs after they exit",
47+
dest="rm_container")
4548

46-
cidgroup = parser.add_argument_group("Options for recording the Docker "
47-
"container identifier into a file")
49+
cidgroup = parser.add_argument_group(
50+
"Options for recording the Docker container identifier into a file")
4851
cidgroup.add_argument("--record-container-id", action="store_true",
4952
default=False,
5053
help="If enabled, store the Docker container ID into a file. "
5154
"See --cidfile-dir to specify the directory.",
5255
dest="record_container_id")
5356

54-
cidgroup.add_argument("--cidfile-dir", type=Text,
55-
help="Directory for storing the Docker container ID file. "
56-
"The default is the current directory",
57-
default="",
58-
dest="cidfile_dir")
57+
cidgroup.add_argument(
58+
"--cidfile-dir", type=Text, help="Directory for storing the Docker "
59+
"container ID file. The default is the current directory",
60+
default="", dest="cidfile_dir")
5961

60-
cidgroup.add_argument("--cidfile-prefix", type=Text,
61-
help="Specify a prefix to the container ID filename. "
62-
"Final file name will be followed by a timestamp. "
63-
"The default is no prefix.",
64-
default="",
65-
dest="cidfile_prefix")
62+
cidgroup.add_argument(
63+
"--cidfile-prefix", type=Text,
64+
help="Specify a prefix to the container ID filename. "
65+
"Final file name will be followed by a timestamp. The default is no prefix.",
66+
default="", dest="cidfile_prefix")
6667

6768
parser.add_argument("--tmpdir-prefix", type=Text,
6869
help="Path prefix for temporary directories",
@@ -73,8 +74,9 @@ def arg_parser(): # type: () -> argparse.ArgumentParser
7374
help="Path prefix for intermediate output directories",
7475
default=DEFAULT_TMP_PREFIX)
7576

76-
exgroup.add_argument("--cachedir", type=Text, default="",
77-
help="Directory to cache intermediate workflow outputs to avoid recomputing steps.")
77+
exgroup.add_argument(
78+
"--cachedir", type=Text, default="",
79+
help="Directory to cache intermediate workflow outputs to avoid recomputing steps.")
7880

7981
exgroup = parser.add_mutually_exclusive_group()
8082
exgroup.add_argument("--rm-tmpdir", action="store_true", default=True,
@@ -86,9 +88,10 @@ def arg_parser(): # type: () -> argparse.ArgumentParser
8688
dest="rm_tmpdir")
8789

8890
exgroup = parser.add_mutually_exclusive_group()
89-
exgroup.add_argument("--move-outputs", action="store_const", const="move", default="move",
90-
help="Move output files to the workflow output directory and delete intermediate output directories (default).",
91-
dest="move_outputs")
91+
exgroup.add_argument(
92+
"--move-outputs", action="store_const", const="move", default="move",
93+
help="Move output files to the workflow output directory and delete "
94+
"intermediate output directories (default).", dest="move_outputs")
9295

9396
exgroup.add_argument("--leave-outputs", action="store_const", const="leave", default="move",
9497
help="Leave output files in intermediate output directories.",

cwltool/builder.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,14 @@
22

33
import copy
44
import logging
5-
from typing import (Any, Callable, Dict, List, # pylint: disable=unused-import
6-
Optional, Set, Text, Type, Union, Tuple, TYPE_CHECKING)
5+
from typing import Any, Callable, Dict, List, Optional, Set, Union, Tuple
6+
from typing_extensions import Text, Type, TYPE_CHECKING # pylint: disable=unused-import
7+
# move to a regular typing import when Python 3.3-3.6 is no longer supported
78

89
from rdflib import Graph, URIRef # pylint: disable=unused-import
910
from rdflib.namespace import OWL, RDFS
1011
import schema_salad.schema # pylint: disable=unused-import
11-
import schema_salad.validate as validate
12+
from schema_salad import validate
1213
from schema_salad.schema import AvroSchemaFromJSONData
1314
from schema_salad.sourceline import SourceLine
1415
from six import iteritems, string_types

cwltool/checker.py

Lines changed: 25 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
1-
import logging
21
from collections import namedtuple
3-
from typing import (Any, Callable, Dict, # pylint: disable=unused-import
4-
Generator, Iterable, List, Optional, Text, Union, cast)
2+
from typing import Any, Dict, List, Optional
3+
from typing_extensions import Text # pylint: disable=unused-import
4+
# move to a regular typing import when Python 3.3-3.6 is no longer supported
55

6-
import schema_salad.validate as validate
6+
from schema_salad import validate
77
from schema_salad.sourceline import SourceLine, bullets, strip_dup_lineno
88
import six
99

@@ -27,19 +27,18 @@ def check_types(srctype, sinktype, linkMerge, valueFrom):
2727

2828
if valueFrom:
2929
return "pass"
30-
elif not linkMerge:
30+
if not linkMerge:
3131
if can_assign_src_to_sink(srctype, sinktype, strict=True):
3232
return "pass"
33-
elif can_assign_src_to_sink(srctype, sinktype, strict=False):
33+
if can_assign_src_to_sink(srctype, sinktype, strict=False):
3434
return "warning"
35-
else:
36-
return "exception"
37-
elif linkMerge == "merge_nested":
38-
return check_types({"items": _get_type(srctype), "type": "array"}, _get_type(sinktype), None, None)
39-
elif linkMerge == "merge_flattened":
35+
return "exception"
36+
if linkMerge == "merge_nested":
37+
return check_types({"items": _get_type(srctype), "type": "array"},
38+
_get_type(sinktype), None, None)
39+
if linkMerge == "merge_flattened":
4040
return check_types(merge_flatten_type(_get_type(srctype)), _get_type(sinktype), None, None)
41-
else:
42-
raise WorkflowException(u"Unrecognized linkMerge enu_m '%s'" % linkMerge)
41+
raise WorkflowException(u"Unrecognized linkMerge enu_m '{}'".format(linkMerge))
4342

4443

4544
def merge_flatten_type(src):
@@ -49,10 +48,9 @@ def merge_flatten_type(src):
4948

5049
if isinstance(src, list):
5150
return [merge_flatten_type(t) for t in src]
52-
elif isinstance(src, dict) and src.get("type") == "array":
51+
if isinstance(src, dict) and src.get("type") == "array":
5352
return src
54-
else:
55-
return {"items": src, "type": "array"}
53+
return {"items": src, "type": "array"}
5654

5755

5856
def can_assign_src_to_sink(src, sink, strict=False): # type: (Any, Any, bool) -> bool
@@ -72,16 +70,15 @@ def can_assign_src_to_sink(src, sink, strict=False): # type: (Any, Any, bool) -
7270
return False
7371
if src["type"] == "array" and sink["type"] == "array":
7472
return can_assign_src_to_sink(src["items"], sink["items"], strict)
75-
elif src["type"] == "record" and sink["type"] == "record":
73+
if src["type"] == "record" and sink["type"] == "record":
7674
return _compare_records(src, sink, strict)
77-
elif src["type"] == "File" and sink["type"] == "File":
75+
if src["type"] == "File" and sink["type"] == "File":
7876
for sinksf in sink.get("secondaryFiles", []):
7977
if not [1 for srcsf in src.get("secondaryFiles", []) if sinksf == srcsf]:
8078
if strict:
8179
return False
8280
return True
83-
else:
84-
return can_assign_src_to_sink(src["type"], sink["type"], strict)
81+
return can_assign_src_to_sink(src["type"], sink["type"], strict)
8582
elif isinstance(src, list):
8683
if strict:
8784
for t in src:
@@ -122,11 +119,11 @@ def _rec_fields(rec): # type: (Dict[Text, Any]) -> Dict[Text, Any]
122119
for key in six.iterkeys(sinkfields):
123120
if (not can_assign_src_to_sink(
124121
srcfields.get(key, "null"), sinkfields.get(key, "null"), strict)
125-
and sinkfields.get(key) is not None):
122+
and sinkfields.get(key) is not None):
126123
_logger.info("Record comparison failure for %s and %s\n"
127-
"Did not match fields for %s: %s and %s" %
128-
(src["name"], sink["name"], key, srcfields.get(key),
129-
sinkfields.get(key)))
124+
"Did not match fields for %s: %s and %s",
125+
src["name"], sink["name"], key, srcfields.get(key),
126+
sinkfields.get(key))
130127
return False
131128
return True
132129

@@ -156,7 +153,8 @@ def static_checker(workflow_inputs, workflow_outputs, step_inputs, step_outputs,
156153
src = warning.src
157154
sink = warning.sink
158155
linkMerge = warning.linkMerge
159-
if sink.get("secondaryFiles") and sorted(sink.get("secondaryFiles",[])) != sorted(src.get("secondaryFiles",[])):
156+
if sink.get("secondaryFiles") and sorted(
157+
sink.get("secondaryFiles", [])) != sorted(src.get("secondaryFiles", [])):
160158
msg1 = "Sink '%s'" % (shortname(sink["id"]))
161159
msg2 = SourceLine(sink.get("_tool_entry", sink), "secondaryFiles").makeError(
162160
"expects secondaryFiles: %s but" % (sink.get("secondaryFiles")))
@@ -202,7 +200,7 @@ def static_checker(workflow_inputs, workflow_outputs, step_inputs, step_outputs,
202200

203201
for sink in step_inputs:
204202
if ('null' != sink["type"] and 'null' not in sink["type"]
205-
and "source" not in sink and "default" not in sink and "valueFrom" not in sink):
203+
and "source" not in sink and "default" not in sink and "valueFrom" not in sink):
206204
msg = SourceLine(sink).makeError(
207205
"Required parameter '%s' does not have source, default, or valueFrom expression"
208206
% shortname(sink["id"]))
@@ -212,7 +210,7 @@ def static_checker(workflow_inputs, workflow_outputs, step_inputs, step_outputs,
212210
all_exception_msg = strip_dup_lineno("\n".join(exception_msgs))
213211

214212
if warnings:
215-
_logger.warning("Workflow checker warning:\n%s" % all_warning_msg)
213+
_logger.warning("Workflow checker warning:\n%s", all_warning_msg)
216214
if exceptions:
217215
raise validate.ValidationException(all_exception_msg)
218216

cwltool/command_line_tool.py

Lines changed: 19 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,12 @@
1111
import tempfile
1212
import threading
1313
from functools import cmp_to_key, partial
14-
from typing import (Any, Callable, Dict, # pylint: disable=unused-import
15-
Generator, List, Optional, Set, Text, Type, TYPE_CHECKING,
16-
Union, cast)
14+
from typing import (Any, Callable, Dict, Generator, List, Optional, Set, Union,
15+
cast)
16+
from typing_extensions import Text, Type, TYPE_CHECKING # pylint: disable=unused-import
17+
# move to a regular typing import when Python 3.3-3.6 is no longer supported
1718

18-
import schema_salad.validate as validate
19+
from schema_salad import validate
1920
from schema_salad.ref_resolver import file_uri, uri_file_path
2021
from schema_salad.sourceline import SourceLine
2122
import shellescape
@@ -92,9 +93,9 @@ def run(self, runtimeContext): # type: (RuntimeContext) -> None
9293
ev = self.builder.do_eval(self.script)
9394
normalizeFilesDirs(ev)
9495
self.output_callback(ev, "success")
95-
except Exception as e:
96+
except Exception as err:
9697
_logger.warning(u"Failed to evaluate expression:\n%s",
97-
e, exc_info=runtimeContext.debug)
98+
err, exc_info=runtimeContext.debug)
9899
self.output_callback({}, "permanentFail")
99100

100101
def job(self,
@@ -238,21 +239,22 @@ def make_job_runner(self,
238239
"dockerPull": default_container
239240
})
240241
dockerReq = self.requirements[0]
241-
if default_container == windows_default_container_id and runtimeContext.use_container and onWindows():
242-
_logger.warning(DEFAULT_CONTAINER_MSG % (windows_default_container_id, windows_default_container_id))
242+
if default_container == windows_default_container_id \
243+
and runtimeContext.use_container and onWindows():
244+
_logger.warning(
245+
DEFAULT_CONTAINER_MSG, windows_default_container_id,
246+
windows_default_container_id)
243247

244248
if dockerReq and runtimeContext.use_container:
245249
if runtimeContext.singularity:
246250
return SingularityCommandLineJob
247-
else:
248-
return DockerCommandLineJob
249-
else:
250-
for t in reversed(self.requirements):
251-
if t["class"] == "DockerRequirement":
252-
raise UnsupportedRequirement(
253-
"--no-container, but this CommandLineTool has "
254-
"DockerRequirement under 'requirements'.")
255-
return CommandLineJob
251+
return DockerCommandLineJob
252+
for t in reversed(self.requirements):
253+
if t["class"] == "DockerRequirement":
254+
raise UnsupportedRequirement(
255+
"--no-container, but this CommandLineTool has "
256+
"DockerRequirement under 'requirements'.")
257+
return CommandLineJob
256258

257259
def make_path_mapper(self, reffiles, stagedir, runtimeContext, separateDirs):
258260
# type: (List[Any], Text, RuntimeContext, bool) -> PathMapper

cwltool/context.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,19 @@
11
import copy
22
import threading # pylint: disable=unused-import
3+
from typing import Any, Callable, Dict, Iterable, List, Optional
4+
from typing_extensions import Text, TYPE_CHECKING # pylint: disable=unused-import
5+
# move to a regular typing import when Python 3.3-3.6 is no longer supported
6+
from schema_salad.ref_resolver import ( # pylint: disable=unused-import
7+
ContextType, Fetcher, Loader)
8+
from schema_salad import schema
39

410
from .utils import DEFAULT_TMP_PREFIX
511
from .stdfsaccess import StdFsAccess
6-
from typing import (Any, Callable, Dict, # pylint: disable=unused-import
7-
Generator, Iterable, List, Optional, Text, Union, AnyStr)
8-
from schema_salad.ref_resolver import ( # pylint: disable=unused-import
9-
ContextType, Fetcher, Loader)
10-
import schema_salad.schema as schema
1112
from .builder import Builder, HasReqsHints
1213
from .mutation import MutationManager
1314
from .software_requirements import DependenciesConfiguration
1415
from .secrets import SecretStore
15-
import six
1616

17-
from typing import TYPE_CHECKING
1817
if TYPE_CHECKING:
1918
from .process import Process
2019
from .provenance import (ResearchObject, # pylint: disable=unused-import

cwltool/cwlrdf.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
from __future__ import absolute_import
22

3-
from typing import IO, Any, Dict, Text
3+
from typing import IO, Any, Dict
4+
from typing_extensions import Text # pylint: disable=unused-import
5+
# move to a regular typing import when Python 3.3-3.6 is no longer supported
46

57
from rdflib import Graph
68
from schema_salad.jsonld_context import makerdf
@@ -33,8 +35,7 @@ def lastpart(uri): # type: (Any) -> Text
3335
uri = Text(uri)
3436
if "/" in uri:
3537
return uri[uri.rindex("/") + 1:]
36-
else:
37-
return uri
38+
return uri
3839

3940

4041
def dot_with_parameters(g, stdout): # type: (Graph, IO[Any]) -> None
@@ -45,7 +46,7 @@ def dot_with_parameters(g, stdout): # type: (Graph, IO[Any]) -> None
4546
?run rdf:type ?runtype .
4647
}""")
4748

48-
for step, run, runtype in qres:
49+
for step, run, _ in qres:
4950
stdout.write(u'"%s" [label="%s"]\n' % (lastpart(step), "%s (%s)" % (lastpart(step), lastpart(run))))
5051

5152
qres = g.query(

cwltool/docker.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,9 @@
88
import tempfile
99
import threading
1010
from io import open # pylint: disable=redefined-builtin
11-
from typing import (Any, Dict, List, # pylint: disable=unused-import
12-
MutableMapping, Optional, Set, Text)
11+
from typing import Dict, List, MutableMapping, Optional, Set
12+
from typing_extensions import Text # pylint: disable=unused-import
13+
# move to a regular typing import when Python 3.3-3.6 is no longer supported
1314

1415
import requests
1516

@@ -132,7 +133,7 @@ def get_image(docker_requirement, # type: Dict[Text, Text]
132133
elif "dockerFile" in docker_requirement:
133134
dockerfile_dir = str(tempfile.mkdtemp(prefix=tmp_outdir_prefix))
134135
with open(os.path.join(
135-
dockerfile_dir, "Dockerfile"), "wb") as dfile:
136+
dockerfile_dir, "Dockerfile"), "wb") as dfile:
136137
dfile.write(docker_requirement["dockerFile"].encode('utf-8'))
137138
cmd = ["docker", "build", "--tag=%s" %
138139
str(docker_requirement["dockerImageId"]), dockerfile_dir]

0 commit comments

Comments
 (0)