|
4 | 4 | import copy
|
5 | 5 | import datetime
|
6 | 6 | import hashlib
|
7 |
| -import io |
8 | 7 | import logging
|
9 | 8 | import os
|
10 | 9 | import os.path
|
|
15 | 14 | import uuid
|
16 | 15 | from collections import OrderedDict
|
17 | 16 | from getpass import getuser
|
18 |
| -from io import open |
| 17 | +from io import BytesIO, FileIO, TextIOWrapper, open |
19 | 18 | from socket import getfqdn
|
20 |
| -from typing import (IO, Any, Callable, Dict, List, MutableMapping, Optional, |
21 |
| - Set, Tuple, Union, cast) |
| 19 | +from typing import (IO, Any, Callable, Dict, List, Generator, MutableMapping, |
| 20 | + Optional, Set, Tuple, Union, cast) |
22 | 21 |
|
23 | 22 | import prov.model as provM
|
24 | 23 | import six
|
@@ -149,7 +148,7 @@ def _whoami():
|
149 | 148 | return (username, fullname)
|
150 | 149 |
|
151 | 150 |
|
152 |
| -class WritableBagFile(io.FileIO): |
| 151 | +class WritableBagFile(FileIO): |
153 | 152 | """Writes files in research object."""
|
154 | 153 |
|
155 | 154 | def __init__(self, research_object, rel_path):
|
@@ -209,7 +208,7 @@ def readable(self):
|
209 | 208 |
|
210 | 209 | def truncate(self, size=None):
|
211 | 210 | # type: (Optional[int]) -> int
|
212 |
| - # FIXME: This breaks contract io.IOBase, |
| 211 | + # FIXME: This breaks contract IOBase, |
213 | 212 | # as it means we would have to recalculate the hash
|
214 | 213 | if size is not None:
|
215 | 214 | raise IOError("WritableBagFile can't truncate")
|
@@ -680,7 +679,7 @@ def declare_directory(self, value): # type: (MutableMapping) -> ProvEntity
|
680 | 679 | def declare_string(self, value):
|
681 | 680 | # type: (Union[Text, str]) -> Tuple[ProvEntity,Text]
|
682 | 681 | """Save as string in UTF-8."""
|
683 |
| - byte_s = io.BytesIO(str(value).encode(ENCODING)) |
| 682 | + byte_s = BytesIO(str(value).encode(ENCODING)) |
684 | 683 | data_file = self.research_object.add_data_file(byte_s, content_type=TEXT_PLAIN)
|
685 | 684 | checksum = posixpath.basename(data_file)
|
686 | 685 | # FIXME: Don't naively assume add_data_file uses hash in filename!
|
@@ -716,7 +715,7 @@ def declare_artefact(self, value):
|
716 | 715 |
|
717 | 716 | if isinstance(value, bytes):
|
718 | 717 | # If we got here then we must be in Python 3
|
719 |
| - byte_s = io.BytesIO(value) |
| 718 | + byte_s = BytesIO(value) |
720 | 719 | data_file = self.research_object.add_data_file(byte_s)
|
721 | 720 | # FIXME: Don't naively assume add_data_file uses hash in filename!
|
722 | 721 | data_id = "data:%s" % posixpath.split(data_file)[1]
|
@@ -1051,13 +1050,13 @@ def write_bag_file(self, path, encoding=ENCODING):
|
1051 | 1050 | # type: (Text, Optional[str]) -> IO
|
1052 | 1051 | """Write the bag file into our research object."""
|
1053 | 1052 | # For some reason below throws BlockingIOError
|
1054 |
| - #fp = io.BufferedWriter(WritableBagFile(self, path)) |
| 1053 | + #fp = BufferedWriter(WritableBagFile(self, path)) |
1055 | 1054 | bag_file = cast(IO, WritableBagFile(self, path))
|
1056 | 1055 | if encoding:
|
1057 | 1056 | # encoding: match Tag-File-Character-Encoding: UTF-8
|
1058 | 1057 | # newline: ensure LF also on Windows
|
1059 | 1058 | return cast(IO,
|
1060 |
| - io.TextIOWrapper(bag_file, encoding=encoding, newline="\n")) |
| 1059 | + TextIOWrapper(bag_file, encoding=encoding, newline="\n")) |
1061 | 1060 | return bag_file
|
1062 | 1061 |
|
1063 | 1062 | def add_tagfile(self, path, when=None):
|
@@ -1505,7 +1504,7 @@ def jdefault(o):
|
1505 | 1504 | rel_path = posixpath.join(_posix_path(WORKFLOW), "primary-output.json")
|
1506 | 1505 | else:
|
1507 | 1506 | rel_path = posixpath.join(_posix_path(WORKFLOW), "primary-job.json")
|
1508 |
| - j = json.dumps(copied, indent=4, ensure_ascii=False, default=jdefault) |
| 1507 | + j = json_dumps(copied, indent=4, ensure_ascii=False, default=jdefault) |
1509 | 1508 | with self.write_bag_file(rel_path) as file_path:
|
1510 | 1509 | file_path.write(j + u"\n")
|
1511 | 1510 | _logger.debug(u"[provenance] Generated customised job file: %s",
|
|
0 commit comments