Skip to content

Commit 0830349

Browse files
DimitriPapadopoulosFrancescAlted
authored andcommitted
Modernise string formatting
Use f-strings or format() instead of old string formatting. https://docs.python.org/3/tutorial/inputoutput.html#fancier-output-formatting Suggested by pyupgrade.
1 parent f02871b commit 0830349

File tree

14 files changed

+71
-71
lines changed

14 files changed

+71
-71
lines changed

bench/compress_numpy.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424

2525
blosc2.print_versions()
2626

27-
print("Creating NumPy arrays with 10**%d int64/float64 elements:" % Nexp)
27+
print(f"Creating NumPy arrays with 10**{Nexp!d} int64/float64 elements:")
2828
arrays = (
2929
(np.arange(N, dtype=np.int64), "the arange linear distribution"),
3030
(np.linspace(0, 10_000, N), "the linspace linear distribution"),
@@ -38,11 +38,11 @@
3838
for i in range(NREP):
3939
np.copyto(out_, in_)
4040
tcpy = (time.time() - t0) / NREP
41-
print(" *** np.copyto() *** Time for memcpy():\t%.3f s\t(%.2f GB/s)" % (tcpy, (N * 8 / tcpy) / 2**30))
41+
print(" *** np.copyto() *** Time for memcpy():\t{:.3f} s\t({:.2f} GB/s)".format(tcpy, (N * 8 / tcpy) / 2**30))
4242

4343
print("\nTimes for compressing/decompressing:")
4444
for in_, label in arrays:
45-
print("\n*** %s ***" % label)
45+
print(f"\n*** {label} ***")
4646
for codec in blosc2.Codec:
4747
for filter in (
4848
blosc2.Filter.NOFILTER,
@@ -61,8 +61,8 @@
6161
td = (time.time() - t0) / NREP
6262
assert np.array_equal(in_, out)
6363
print(
64-
" *** %-7s, %-10s *** %6.3f s (%.2f GB/s) / %5.3f s (%.2f GB/s)"
65-
% (
64+
" *** {:-7s}, {:-10s} *** {:6.3f} s ({:.2f} GB/s) / {:5.3f} s ({:.2f} GB/s)"
65+
.format(
6666
codec,
6767
filter,
6868
tc,
@@ -72,4 +72,4 @@
7272
),
7373
end="",
7474
)
75-
print("\tcr: %5.1fx" % (N * 8.0 / len(c)))
75+
print("\tcr: {:5.1f}x".format(N * 8.0 / len(c)))

bench/get_slice.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,6 @@
4949
for i in range(shape // blocksize):
5050
_ = schunk.get_slice(start=i * blocksize, stop=(i + 1) * blocksize - 1)
5151
t1 = time()
52-
print("Time for reading with get_slice: %.3fs" % (t1 - t0))
52+
print("Time for reading with get_slice: {:.3f}s".format(t1 - t0))
5353

5454
blosc2.remove_urlpath(urlpath)

bench/ndarray/compare_getslice.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@
117117

118118
# Create and fill an HDF5 array (PyTables)
119119
t0 = time()
120-
filters = tables.Filters(complevel=clevel, complib="blosc2:%s" % cname, shuffle=True)
120+
filters = tables.Filters(complevel=clevel, complib=f"blosc2:{cname}", shuffle=True)
121121
tables.set_blosc_max_threads(nthreads)
122122
if persistent:
123123
h5f = tables.open_file(fname_tables, "w")

bench/ndarray/download_data.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212

1313
def open_zarr(year, month, datestart, dateend, dset):
1414
fs = s3fs.S3FileSystem(anon=True)
15-
datestring = "era5-pds/zarr/{year}/{month:02d}/data/".format(year=year, month=month)
15+
datestring = f"era5-pds/zarr/{year}/{month:02d}/data/"
1616
s3map = s3fs.S3Map(datestring + dset + ".zarr/", s3=fs)
1717
arr = xr.open_dataset(s3map, engine="zarr")
1818
if dset[:3] in ("air", "sno", "eas"):
@@ -37,4 +37,4 @@ def open_zarr(year, month, datestart, dateend, dset):
3737
print(f"Fetching dataset {dset} from S3 (era5-pds)...")
3838
precip_m0 = open_zarr(1987, 10, "1987-10-01", "1987-10-30 23:59", dset)
3939
cparams = {"codec": blosc2.Codec.ZSTD, "clevel": 6}
40-
blosc2.asarray(precip_m0.values, urlpath="%s/%s.b2nd" % (dir_path, short), mode="w", cparams=cparams)
40+
blosc2.asarray(precip_m0.values, urlpath=f"{dir_path}/{short}.b2nd", mode="w", cparams=cparams)

bench/pack_compress.py

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,8 @@
3939
toc = time.time()
4040
tcpy = (toc - tic) / NREP
4141
print(
42-
" Time for copying array with np.copy: %.3f s (%.2f GB/s))"
43-
% (tcpy, ((N * 8 / tcpy) / 2**30))
42+
" Time for copying array with np.copy: {:.3f} s ({:.2f} GB/s))"
43+
.format(tcpy, ((N * 8 / tcpy) / 2**30))
4444
)
4545

4646
if comprehensive_copy_timing:
@@ -51,8 +51,8 @@
5151
toc = time.time()
5252
tcpy = (toc - tic) / NREP
5353
print(
54-
" Time for copying array with np.copyto and empty_like: %.3f s (%.2f GB/s))"
55-
% (tcpy, ((N * 8 / tcpy) / 2**30))
54+
" Time for copying array with np.copyto and empty_like: {:.3f} s ({:.2f} GB/s))"
55+
.format(tcpy, ((N * 8 / tcpy) / 2**30))
5656
)
5757

5858
# Unlike numpy.zeros, numpy.zeros_like doesn't use calloc, but instead uses
@@ -66,8 +66,8 @@
6666
toc = time.time()
6767
tcpy = (toc - tic) / NREP
6868
print(
69-
" Time for copying array with np.copyto and zeros: %.3f s (%.2f GB/s))"
70-
% (tcpy, ((N * 8 / tcpy) / 2**30))
69+
" Time for copying array with np.copyto and zeros: {:.3f} s ({:.2f} GB/s))"
70+
.format(tcpy, ((N * 8 / tcpy) / 2**30))
7171
)
7272

7373
# Cause a page fault before the benchmark
@@ -78,8 +78,8 @@
7878
toc = time.time()
7979
tcpy = (toc - tic) / NREP
8080
print(
81-
" Time for copying array with np.copyto and full_like: %.3f s (%.2f GB/s))"
82-
% (tcpy, ((N * 8 / tcpy) / 2**30))
81+
" Time for copying array with np.copyto and full_like: {:.3f} s ({:.2f} GB/s))"
82+
.format(tcpy, ((N * 8 / tcpy) / 2**30))
8383
)
8484

8585
tic = time.time()
@@ -89,16 +89,16 @@
8989
toc = time.time()
9090
tcpy = (toc - tic) / NREP
9191
print(
92-
" Time for copying array with numpy assignment: %.3f s (%.2f GB/s))"
93-
% (tcpy, ((N * 8 / tcpy) / 2**30))
92+
" Time for copying array with numpy assignment: {:.3f} s ({:.2f} GB/s))"
93+
.format(tcpy, ((N * 8 / tcpy) / 2**30))
9494
)
9595

9696
print()
9797
filters = [blosc2.Filter.SHUFFLE, blosc2.Filter.BYTEDELTA]
9898
print(f"Using {filters=}")
9999

100100
for in_, label in arrays:
101-
print("\n*** %s ***" % label)
101+
print(f"\n*** {label} ***")
102102
for codec in blosc2.compressor_list():
103103
clevel = 6
104104
print(f"Using *** {codec} (clevel {clevel}) *** :")
@@ -122,11 +122,11 @@
122122
tc = (ctoc - ctic) / NREP
123123
td = (dtoc - dtic) / NREP
124124
print(
125-
" Time for compress/decompress: %.3f/%.3f s (%.2f/%.2f GB/s)) "
126-
% (tc, td, ((N * 8 / tc) / 2**30), ((N * 8 / td) / 2**30)),
125+
" Time for compress/decompress: {:.3f}/{:.3f} s ({:.2f}/{:.2f} GB/s)) "
126+
.format(tc, td, ((N * 8 / tc) / 2**30), ((N * 8 / td) / 2**30)),
127127
end="",
128128
)
129-
print("\tcr: %5.1fx" % (in_.size * in_.dtype.itemsize * 1.0 / len(c)))
129+
print("\tcr: {:5.1f}x".format(in_.size * in_.dtype.itemsize * 1.0 / len(c)))
130130

131131
ctic = time.time()
132132
for i in range(NREP):
@@ -141,11 +141,11 @@
141141
tc = (ctoc - ctic) / NREP
142142
td = (dtoc - dtic) / NREP
143143
print(
144-
" Time for pack_array2/unpack_array2: %.3f/%.3f s (%.2f/%.2f GB/s)) "
145-
% (tc, td, ((N * 8 / tc) / 2**30), ((N * 8 / td) / 2**30)),
144+
" Time for pack_array2/unpack_array2: {:.3f}/{:.3f} s ({:.2f}/{:.2f} GB/s)) "
145+
.format(tc, td, ((N * 8 / tc) / 2**30), ((N * 8 / td) / 2**30)),
146146
end="",
147147
)
148-
print("\tcr: %5.1fx" % (in_.size * in_.dtype.itemsize * 1.0 / len(c)))
148+
print("\tcr: {:5.1f}x".format(in_.size * in_.dtype.itemsize * 1.0 / len(c)))
149149

150150
ctic = time.time()
151151
for i in range(NREP):
@@ -160,8 +160,8 @@
160160
tc = (ctoc - ctic) / NREP
161161
td = (dtoc - dtic) / NREP
162162
print(
163-
" Time for pack_tensor/unpack_tensor: %.3f/%.3f s (%.2f/%.2f GB/s)) "
164-
% (tc, td, ((N * 8 / tc) / 2**30), ((N * 8 / td) / 2**30)),
163+
" Time for pack_tensor/unpack_tensor: {:.3f}/{:.3f} s ({:.2f}/{:.2f} GB/s)) "
164+
.format(tc, td, ((N * 8 / tc) / 2**30), ((N * 8 / td) / 2**30)),
165165
end="",
166166
)
167-
print("\tcr: %5.1fx" % (in_.size * in_.dtype.itemsize * 1.0 / len(c)))
167+
print("\tcr: {:5.1f}x".format(in_.size * in_.dtype.itemsize * 1.0 / len(c)))

bench/pack_large.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,9 +41,9 @@
4141
ctoc = time.time()
4242
tc = (ctoc - ctic) / NREP
4343
print(
44-
" Time for pack_tensor: %.3f (%.2f GB/s)) " % (tc, ((N * 8 / tc) / 2**30)),
44+
" Time for pack_tensor: {:.3f} ({:.2f} GB/s)) ".format(tc, ((N * 8 / tc) / 2**30)),
4545
)
46-
print("\tcr: %5.1fx" % (in_.size * in_.dtype.itemsize * 1.0 / len(c)))
46+
print("\tcr: {:5.1f}x".format(in_.size * in_.dtype.itemsize * 1.0 / len(c)))
4747

4848
out = None
4949
dtic = time.time()
@@ -53,6 +53,6 @@
5353

5454
td = (dtoc - dtic) / NREP
5555
print(
56-
" Time for unpack_tensor: %.3f s (%.2f GB/s)) " % (td, ((N * 8 / td) / 2**30)),
56+
" Time for unpack_tensor: {:.3f} s ({:.2f} GB/s)) ".format(td, ((N * 8 / td) / 2**30)),
5757
)
5858
assert np.array_equal(in_, out)

bench/pack_tensor.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -48,10 +48,10 @@
4848
ctoc = time.time()
4949
tc = (ctoc - ctic) / NREP
5050
print(
51-
" Time for tensorflow (tf.io.serialize):\t%.3f s (%.2f GB/s)) " % (tc, ((N * 8 / tc) / 2**30)),
51+
" Time for tensorflow (tf.io.serialize):\t[:.3f} s ({:.2f} GB/s)) ".format(tc, ((N * 8 / tc) / 2**30)),
5252
end="",
5353
)
54-
print("\tcr: %5.1fx" % (in_.size * in_.dtype.itemsize * 1.0 / len(c)))
54+
print("\tcr: {:5.1f}x".format(in_.size * in_.dtype.itemsize * 1.0 / len(c)))
5555

5656
with open("serialize_tensorflow.bin", "wb") as f:
5757
f.write(c)
@@ -65,11 +65,11 @@
6565
ctoc = time.time()
6666
tc = (ctoc - ctic) / NREP
6767
print(
68-
" Time for torch (torch.save):\t\t\t%.3f s (%.2f GB/s)) " % (tc, ((N * 8 / tc) / 2**30)), end=""
68+
" Time for torch (torch.save):\t\t\t{:.3f} s ({:.2f} GB/s)) ".format(tc, ((N * 8 / tc) / 2**30)), end=""
6969
)
7070
buff.seek(0)
7171
c = buff.read()
72-
print("\tcr: %5.1fx" % (in_.size * in_.dtype.itemsize * 1.0 / len(c)))
72+
print("\tcr: {:5.1f}x".format(in_.size * in_.dtype.itemsize * 1.0 / len(c)))
7373

7474
with open("serialize_torch.bin", "wb") as f:
7575
f.write(c)
@@ -85,10 +85,10 @@
8585
ctoc = time.time()
8686
tc = (ctoc - ctic) / NREP
8787
print(
88-
" Time for tensorflow (blosc2.pack_tensor):\t%.3f s (%.2f GB/s)) " % (tc, ((N * 8 / tc) / 2**30)),
88+
" Time for tensorflow (blosc2.pack_tensor):\t{:.3f} s ({:.2f} GB/s)) ".format(tc, ((N * 8 / tc) / 2**30)),
8989
end="",
9090
)
91-
print("\tcr: %5.1fx" % (in_.size * in_.dtype.itemsize * 1.0 / len(c)))
91+
print("\tcr: {:5.1f}x".format(in_.size * in_.dtype.itemsize * 1.0 / len(c)))
9292

9393
with open("pack_tensorflow.bl2", "wb") as f:
9494
f.write(c)
@@ -101,10 +101,10 @@
101101
ctoc = time.time()
102102
tc = (ctoc - ctic) / NREP
103103
print(
104-
" Time for torch (blosc2.pack_tensor):\t\t%.3f s (%.2f GB/s)) " % (tc, ((N * 8 / tc) / 2**30)),
104+
" Time for torch (blosc2.pack_tensor):\t\t{:.3f} s ({:.2f} GB/s)) ".format(tc, ((N * 8 / tc) / 2**30)),
105105
end="",
106106
)
107-
print("\tcr: %5.1fx" % (in_.size * in_.dtype.itemsize * 1.0 / len(c)))
107+
print("\tcr: {:5.1f}x".format(in_.size * in_.dtype.itemsize * 1.0 / len(c)))
108108

109109
with open("pack_torch.bl2", "wb") as f:
110110
f.write(c)
@@ -120,7 +120,7 @@
120120
dtoc = time.time()
121121
td = (dtoc - dtic) / NREP
122122
print(
123-
" Time for tensorflow (tf.io.parse_tensor):\t%.3f s (%.2f GB/s)) " % (td, ((N * 8 / td) / 2**30)),
123+
" Time for tensorflow (tf.io.parse_tensor):\t{:.3f} s ({:.2f} GB/s)) ".format(td, ((N * 8 / td) / 2**30)),
124124
)
125125

126126
with open("serialize_torch.bin", "rb") as f:
@@ -134,7 +134,7 @@
134134
dtoc = time.time()
135135
td = (dtoc - dtic) / NREP
136136
print(
137-
" Time for torch (torch.load):\t\t\t%.3f s (%.2f GB/s)) " % (td, ((N * 8 / td) / 2**30)),
137+
" Time for torch (torch.load):\t\t\t{:.3f} s ({:.2f} GB/s)) ".format(td, ((N * 8 / td) / 2**30)),
138138
)
139139

140140
with open("pack_tensorflow.bl2", "rb") as f:
@@ -147,8 +147,8 @@
147147
dtoc = time.time()
148148
td = (dtoc - dtic) / NREP
149149
print(
150-
" Time for tensorflow (blosc2.unpack_tensor):\t%.3f s (%.2f GB/s)) "
151-
% (td, ((N * 8 / td) / 2**30)),
150+
" Time for tensorflow (blosc2.unpack_tensor):\t{:.3f} s ({:.2f} GB/s)) "
151+
.format(td, ((N * 8 / td) / 2**30)),
152152
)
153153
assert np.array_equal(in_, out)
154154

@@ -164,6 +164,6 @@
164164

165165
td = (dtoc - dtic) / NREP
166166
print(
167-
" Time for torch (blosc2.unpack_tensor):\t%.3f s (%.2f GB/s)) " % (td, ((N * 8 / td) / 2**30)),
167+
" Time for torch (blosc2.unpack_tensor):\t{:.3f} s ({:.2f} GB/s)) ".format(td, ((N * 8 / td) / 2**30)),
168168
)
169169
assert np.array_equal(in_, out)

bench/set_slice.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,6 @@
5252
t0 = time()
5353
schunk[start:stop] = val
5454
t1 = time()
55-
print("Time for setting with setitem: %.3fs" % (t1 - t0))
55+
print("Time for setting with setitem: {:.3f}s".format(t1 - t0))
5656

5757
blosc2.remove_urlpath(urlpath)

blosc2/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ class SpecialValue(Enum):
130130
# Registry for user-defined filters
131131
ufilters_registry = {}
132132

133-
blosclib_version = "%s (%s)" % (VERSION_STRING, VERSION_DATE)
133+
blosclib_version = f"{VERSION_STRING} ({VERSION_DATE})"
134134

135135
# Internal Blosc threading
136136
nthreads = ncores = detect_number_of_cores() // 2

blosc2/blosc2_ext.pyx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -504,7 +504,7 @@ DEFAULT_DTYPE_FORMAT = B2ND_DEFAULT_DTYPE_FORMAT
504504

505505
def _check_comp_length(comp_name, comp_len):
506506
if comp_len < BLOSC_MIN_HEADER_LENGTH:
507-
raise ValueError("%s cannot be less than %d bytes" % (comp_name, BLOSC_MIN_HEADER_LENGTH))
507+
raise ValueError(f"{comp_name} cannot be less than {BLOSC_MIN_HEADER_LENGTH} bytes")
508508

509509

510510
blosc2_init()

0 commit comments

Comments
 (0)