Skip to content

Commit 91fb8fb

Browse files
committed
update ruff version and format files.
1 parent 9ca60a3 commit 91fb8fb

File tree

9 files changed

+33
-33
lines changed

9 files changed

+33
-33
lines changed

dev-requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ regex==2024.11.6
181181
# via mkdocs-material
182182
requests==2.32.3
183183
# via mkdocs-material
184-
ruff==0.8.2
184+
ruff==0.9.2
185185
# via pynxtools (pyproject.toml)
186186
scipy==1.14.1
187187
# via ase

src/pynxtools/dataconverter/readers/example/reader.py

Lines changed: 6 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -106,22 +106,16 @@ def read(
106106

107107
# virtual datasets slicing
108108
my_path = str(f"{os.path.dirname(__file__)}/../../../data/")
109-
template[("/ENTRY[entry]" "/test_virtual" "_dataset/sliced" "_dataset")] = {
110-
"link": (
111-
f"{my_path}/xarray_saved_small_" "calibration.h5:/binned/BinnedData"
112-
),
109+
template[("/ENTRY[entry]/test_virtual_dataset/sliced_dataset")] = {
110+
"link": (f"{my_path}/xarray_saved_small_calibration.h5:/binned/BinnedData"),
113111
"shape": np.index_exp[:, 1, :, :],
114112
}
115-
template[("/ENTRY[entry]" "/test_virtual" "_dataset/slic" "ed_dataset2")] = {
116-
"link": (
117-
f"{my_path}/xarray_saved_small" "_calibration.h5:/binned/BinnedData"
118-
),
113+
template[("/ENTRY[entry]/test_virtual_dataset/sliced_dataset2")] = {
114+
"link": (f"{my_path}/xarray_saved_small_calibration.h5:/binned/BinnedData"),
119115
"shape": np.index_exp[:, :, :, 1],
120116
}
121-
template[("/ENTRY[entry]" "/test_virtual" "_dataset/slic" "ed_dataset3")] = {
122-
"link": (
123-
f"{my_path}/xarray_saved_small" "_calibration.h5:/binned/BinnedData"
124-
),
117+
template[("/ENTRY[entry]/test_virtual_dataset/sliced_dataset3")] = {
118+
"link": (f"{my_path}/xarray_saved_small_calibration.h5:/binned/BinnedData"),
125119
"shape": np.index_exp[:, :, :, 2:4],
126120
}
127121

src/pynxtools/dataconverter/readers/json_map/reader.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ def get_val_nested_keystring_from_dict(keystring, data):
5656
return data[current_key].values
5757
if isinstance(data[current_key], xarray.core.dataset.Dataset):
5858
raise NotImplementedError(
59-
"Xarray datasets are not supported. " "You can only use xarray dataarrays."
59+
"Xarray datasets are not supported. You can only use xarray dataarrays."
6060
)
6161

6262
return data[current_key]

src/pynxtools/dataconverter/writer.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,11 +21,11 @@
2121

2222
import copy
2323
import logging
24-
import sys
2524
import xml.etree.ElementTree as ET
2625

2726
import h5py
2827
import numpy as np
28+
import pint
2929

3030
from pynxtools.dataconverter import helpers
3131
from pynxtools.dataconverter.exceptions import InvalidDictProvided
@@ -183,6 +183,9 @@ def handle_dicts_entries(data, grp, entry_name, output_path, path):
183183
return None
184184

185185

186+
xx = []
187+
188+
186189
class Writer:
187190
"""The writer class for writing a NeXus file in accordance with a given NXDL.
188191
@@ -259,8 +262,11 @@ def _put_data_into_hdf5(self):
259262

260263
def add_units_key(dataset, path):
261264
units_key = f"{path}/@units"
262-
if units_key in self.data.keys() and self.data[units_key] is not None:
263-
dataset.attrs["units"] = self.data[units_key]
265+
units = self.data.get(units_key, None)
266+
units = str(units) if isinstance(units, pint.Unit) else units
267+
if units:
268+
dataset.attrs["units"] = units
269+
xx.append(units)
264270

265271
for path, value in self.data.items():
266272
try:

src/pynxtools/nexus/nexus.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -439,7 +439,7 @@ def process_node(hdf_node, hdf_path, parser, logger, doc=True):
439439
if len(hdf_node.shape) <= 1
440440
else str(decode_if_string(hdf_node[0])).split("\n")
441441
)
442-
logger.debug(f'value: {val[0]} {"..." if len(val) > 1 else ""}')
442+
logger.debug(f"value: {val[0]} {'...' if len(val) > 1 else ''}")
443443
else:
444444
logger.debug(
445445
f"===== GROUP (/{hdf_path} "
@@ -460,7 +460,7 @@ def process_node(hdf_node, hdf_path, parser, logger, doc=True):
460460
for key, value in hdf_node.attrs.items():
461461
logger.debug(f"===== ATTRS (/{hdf_path}@{key})")
462462
val = str(decode_if_string(value)).split("\n")
463-
logger.debug(f'value: {val[0]} {"..." if len(val) > 1 else ""}')
463+
logger.debug(f"value: {val[0]} {'...' if len(val) > 1 else ''}")
464464
(req_str, nxdef, nxdl_path) = get_nxdl_doc(hdf_info, logger, doc, attr=key)
465465
if (
466466
parser is not None

src/pynxtools/testing/nexus_conversion.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -103,9 +103,9 @@ def convert_to_nexus(
103103
"""
104104
Test the example data for the reader plugin.
105105
"""
106-
assert hasattr(
107-
self.reader, "supported_nxdls"
108-
), f"Reader{self.reader} must have supported_nxdls attribute"
106+
assert hasattr(self.reader, "supported_nxdls"), (
107+
f"Reader{self.reader} must have supported_nxdls attribute"
108+
)
109109
assert callable(self.reader.read), f"Reader{self.reader} must have read method"
110110

111111
if isinstance(self.files_or_dir, (list, tuple)):

src/pynxtools/testing/nomad_example.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -124,6 +124,6 @@ def example_upload_entry_point_valid(
124124
os.path.abspath(os.path.join(dirpath, filename))
125125
)
126126

127-
assert (
128-
sorted(real_upload_files) == sorted(expected_upload_files)
129-
), f"Uploaded files {real_upload_files} do not match the expected files: {expected_upload_files}"
127+
assert sorted(real_upload_files) == sorted(expected_upload_files), (
128+
f"Uploaded files {real_upload_files} do not match the expected files: {expected_upload_files}"
129+
)

tests/dataconverter/test_helpers.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -172,13 +172,13 @@ def fixture_filled_test_data(template, tmp_path):
172172
template["/ENTRY[my_entry]/program_name"] = "Testing program"
173173
template["/ENTRY[my_entry]/NXODD_name[nxodd_name]/type"] = "2nd type"
174174
template["/ENTRY[my_entry]/NXODD_name[nxodd_name]/date_value"] = (
175-
"2022-01-22T12" ":14:12.05018+00:00"
175+
"2022-01-22T12:14:12.05018+00:00"
176176
)
177177
template["/ENTRY[my_entry]/required_group/description"] = "An example description"
178178
template["/ENTRY[my_entry]/required_group2/description"] = "An example description"
179179
template["/ENTRY[my_entry]/does/not/exist"] = "random"
180180
template["/ENTRY[my_entry]/links/ext_link"] = {
181-
"link": f"{tmp_path}/" f"xarray_saved_small_cali" f"bration.h5:/axes/ax3"
181+
"link": f"{tmp_path}/xarray_saved_small_calibration.h5:/axes/ax3"
182182
}
183183
return template
184184

tests/nexus/test_nexus.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -124,12 +124,12 @@ def test_decode_if_string(string_obj, decode, expected):
124124

125125
# Handle np.ndarray outputs
126126
if isinstance(expected, np.ndarray):
127-
assert isinstance(
128-
result, np.ndarray
129-
), f"Expected ndarray, but got {type(result)}"
130-
assert (
131-
result == expected
132-
).all(), f"Failed for {string_obj} with decode={decode}"
127+
assert isinstance(result, np.ndarray), (
128+
f"Expected ndarray, but got {type(result)}"
129+
)
130+
assert (result == expected).all(), (
131+
f"Failed for {string_obj} with decode={decode}"
132+
)
133133
# Handle list outputs
134134
elif isinstance(expected, list):
135135
assert isinstance(result, list), f"Expected list, but got {type(result)}"

0 commit comments

Comments
 (0)