Skip to content

Commit a477905

Browse files
authored
apply pyupgrade (#3026)
* apply pyupgrade Signed-off-by: Jirka <[email protected]> * exclude monai/_version.py Signed-off-by: Jirka <[email protected]>
1 parent 453a9c0 commit a477905

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

72 files changed

+164
-162
lines changed

.pre-commit-config.yaml

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -22,12 +22,17 @@ repos:
2222
args: ['--maxkb=1024']
2323
- id: detect-private-key
2424

25-
#- repo: https://github.com/asottile/pyupgrade
26-
# rev: v2.23.2
27-
# hooks:
28-
# - id: pyupgrade
29-
# args: [--py36-plus]
30-
# name: Upgrade code
25+
- repo: https://github.com/asottile/pyupgrade
26+
rev: v2.27.0
27+
hooks:
28+
- id: pyupgrade
29+
args: [--py36-plus]
30+
name: Upgrade code
31+
exclude: |
32+
(?x)^(
33+
versioneer.py|
34+
monai/_version.py
35+
)$
3136
3237
#- repo: https://github.com/asottile/yesqa
3338
# rev: v1.2.3

monai/apps/datasets.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def _generate_data_list(self, dataset_dir: str) -> List[Dict]:
111111
ValueError: When ``section`` is not one of ["training", "validation", "test"].
112112
113113
"""
114-
class_names = sorted((x for x in os.listdir(dataset_dir) if os.path.isdir(os.path.join(dataset_dir, x))))
114+
class_names = sorted(x for x in os.listdir(dataset_dir) if os.path.isdir(os.path.join(dataset_dir, x)))
115115
self.num_class = len(class_names)
116116
image_files = [
117117
[

monai/apps/deepgrow/dataset.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ def create_dataset(
9797
image = os.path.abspath(image)
9898
label = os.path.abspath(label) if label else None
9999

100-
logging.info("Image: {}; Label: {}".format(image, label if label else None))
100+
logging.info(f"Image: {image}; Label: {label if label else None}")
101101
data = transforms({image_key: image, label_key: label})
102102
if dimension == 2:
103103
data = _save_data_2d(
@@ -154,7 +154,7 @@ def _save_data_2d(vol_idx, vol_image, vol_label, dataset_dir, relative_path):
154154
if vol_label is not None and np.sum(label) == 0:
155155
continue
156156

157-
image_file_prefix = "vol_idx_{:0>4d}_slice_{:0>3d}".format(vol_idx, sid)
157+
image_file_prefix = f"vol_idx_{vol_idx:0>4d}_slice_{sid:0>3d}"
158158
image_file = os.path.join(dataset_dir, "images", image_file_prefix)
159159
image_file += ".npy"
160160

@@ -177,7 +177,7 @@ def _save_data_2d(vol_idx, vol_image, vol_label, dataset_dir, relative_path):
177177
unique_labels_count = max(unique_labels_count, len(unique_labels))
178178

179179
for idx in unique_labels:
180-
label_file_prefix = "{}_region_{:0>2d}".format(image_file_prefix, int(idx))
180+
label_file_prefix = f"{image_file_prefix}_region_{int(idx):0>2d}"
181181
label_file = os.path.join(dataset_dir, "labels", label_file_prefix)
182182
label_file += ".npy"
183183

@@ -226,7 +226,7 @@ def _save_data_3d(vol_idx, vol_image, vol_label, dataset_dir, relative_path):
226226
label_count = 0
227227
unique_labels_count = 0
228228

229-
image_file_prefix = "vol_idx_{:0>4d}".format(vol_idx)
229+
image_file_prefix = f"vol_idx_{vol_idx:0>4d}"
230230
image_file = os.path.join(dataset_dir, "images", image_file_prefix)
231231
image_file += ".npy"
232232

@@ -248,7 +248,7 @@ def _save_data_3d(vol_idx, vol_image, vol_label, dataset_dir, relative_path):
248248
unique_labels_count = max(unique_labels_count, len(unique_labels))
249249

250250
for idx in unique_labels:
251-
label_file_prefix = "{}_region_{:0>2d}".format(image_file_prefix, int(idx))
251+
label_file_prefix = f"{image_file_prefix}_region_{int(idx):0>2d}"
252252
label_file = os.path.join(dataset_dir, "labels", label_file_prefix)
253253
label_file += ".npy"
254254

monai/apps/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def update_to(self, b: int = 1, bsize: int = 1, tsize: Optional[int] = None):
8181
if not has_tqdm and progress:
8282
warnings.warn("tqdm is not installed, will not show the downloading progress bar.")
8383
urlretrieve(url, filepath)
84-
except (URLError, HTTPError, ContentTooShortError, IOError) as e:
84+
except (URLError, HTTPError, ContentTooShortError, OSError) as e:
8585
print(f"Download failed from {url} to {filepath}.")
8686
raise e
8787

monai/config/deviceconfig.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ def get_system_info() -> OrderedDict:
122122
elif output["System"] == "Darwin":
123123
_dict_append(output, "Mac version", lambda: platform.mac_ver()[0])
124124
else:
125-
with open("/etc/os-release", "r") as rel_f:
125+
with open("/etc/os-release") as rel_f:
126126
linux_ver = re.search(r'PRETTY_NAME="(.*)"', rel_f.read())
127127
if linux_ver:
128128
_dict_append(output, "Linux version", lambda: linux_ver.group(1))

monai/data/dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -989,7 +989,7 @@ def __init__(self, datasets: Sequence, transform: Optional[Callable] = None) ->
989989
super().__init__(list(datasets), transform=transform)
990990

991991
def __len__(self) -> int:
992-
return min((len(dataset) for dataset in self.data))
992+
return min(len(dataset) for dataset in self.data)
993993

994994
def _transform(self, index: int):
995995
def to_list(x):

monai/data/dataset_summary.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ def __init__(
6060
self.image_key = image_key
6161
self.label_key = label_key
6262
if image_key:
63-
self.meta_key = "{}_{}".format(image_key, meta_key_postfix)
63+
self.meta_key = f"{image_key}_{meta_key_postfix}"
6464
self.all_meta_data: List = []
6565

6666
def collect_meta_data(self):

monai/data/utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1029,7 +1029,7 @@ def json_hashing(item) -> bytes:
10291029
"""
10301030
# TODO: Find way to hash transforms content as part of the cache
10311031
cache_key = hashlib.md5(json.dumps(item, sort_keys=True).encode("utf-8")).hexdigest()
1032-
return f"{cache_key}".encode("utf-8")
1032+
return f"{cache_key}".encode()
10331033

10341034

10351035
def pickle_hashing(item, protocol=pickle.HIGHEST_PROTOCOL) -> bytes:
@@ -1044,7 +1044,7 @@ def pickle_hashing(item, protocol=pickle.HIGHEST_PROTOCOL) -> bytes:
10441044
10451045
"""
10461046
cache_key = hashlib.md5(pickle.dumps(sorted_dict(item), protocol=protocol)).hexdigest()
1047-
return f"{cache_key}".encode("utf-8")
1047+
return f"{cache_key}".encode()
10481048

10491049

10501050
def sorted_dict(item, key=None, reverse=False):

monai/losses/deform.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ def __init__(
6565
- ``"mean"``: the sum of the output will be divided by the number of elements in the output.
6666
- ``"sum"``: the output will be summed.
6767
"""
68-
super(BendingEnergyLoss, self).__init__(reduction=LossReduction(reduction).value)
68+
super().__init__(reduction=LossReduction(reduction).value)
6969

7070
def forward(self, pred: torch.Tensor) -> torch.Tensor:
7171
"""

monai/losses/dice.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -419,7 +419,7 @@ def __init__(
419419
wass_loss(pred_score, grnd) # 0
420420
421421
"""
422-
super(GeneralizedWassersteinDiceLoss, self).__init__(reduction=LossReduction(reduction).value)
422+
super().__init__(reduction=LossReduction(reduction).value)
423423

424424
if dist_matrix.shape[0] != dist_matrix.shape[1]:
425425
raise ValueError(f"dist_matrix must be C x C, got {dist_matrix.shape[0]} x {dist_matrix.shape[1]}.")

0 commit comments

Comments
 (0)