Skip to content

Commit 0d9ee78

Browse files
committed
Bump the minimum version of h5netcd in accordance with nep29
| version | date | | :-----: | :--------: | | 1.7.0 | 2025-10-15 | | 1.6.0 | 2025-03-07 | | 1.5.0 | 2025-01-26 | | 1.4.0 | 2024-10-07 |
1 parent 8bed33f commit 0d9ee78

File tree

5 files changed

+11
-75
lines changed

5 files changed

+11
-75
lines changed

doc/whats-new.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,10 @@ New Features
2323
:py:class:`~xarray.indexes.PandasIndex` to perform the selection
2424
(:issue:`9703`, :pull:`11029`).
2525
By `Ian Hunt-Isaak <https://github.com/ianhi>`_.
26+
- The minimum supported version of ``h5netcdf`` is now 1.4. Version 1.4.0
27+
brings improved alignment between h5netcdf and libnetcdf4 in the storage of
28+
complex numbers (:pull:`11068`). By `Mark Harfouche
29+
<https://github.com/hmaarrfk>`_.
2630

2731

2832
Breaking Changes

pixi.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ cftime = "1.6.*"
117117
dask-core = "2024.6.*"
118118
distributed = "2024.6.*"
119119
flox = "0.9.*"
120-
h5netcdf = "1.3.*"
120+
h5netcdf = "1.4.*"
121121
# h5py and hdf5 tend to cause conflicts
122122
# for e.g. hdf5 1.12 conflicts with h5py=3.1
123123
# prioritize bumping other packages instead

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ accel = [
3737
complete = ["xarray[accel,etc,io,parallel,viz]"]
3838
io = [
3939
"netCDF4>=1.6.0",
40-
"h5netcdf",
40+
"h5netcdf>=1.4.0",
4141
"pydap",
4242
"scipy>=1.13",
4343
"zarr>=2.18",

xarray/tests/__init__.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -230,10 +230,6 @@ def _importorskip_h5netcdf_ros3(has_h5netcdf: bool):
230230
"netCDF4", "1.6.2"
231231
)
232232

233-
has_h5netcdf_1_4_0_or_above, requires_h5netcdf_1_4_0_or_above = _importorskip(
234-
"h5netcdf", "1.4.0.dev"
235-
)
236-
237233
has_h5netcdf_1_7_0_or_above, requires_h5netcdf_1_7_0_or_above = _importorskip(
238234
"h5netcdf", "1.7.0.dev"
239235
)

xarray/tests/test_backends.py

Lines changed: 5 additions & 69 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,6 @@
7474
assert_identical,
7575
assert_no_warnings,
7676
has_dask,
77-
has_h5netcdf_1_4_0_or_above,
7877
has_netCDF4,
7978
has_numpy_2,
8079
has_scipy,
@@ -89,7 +88,6 @@
8988
requires_dask,
9089
requires_fsspec,
9190
requires_h5netcdf,
92-
requires_h5netcdf_1_4_0_or_above,
9391
requires_h5netcdf_1_7_0_or_above,
9492
requires_h5netcdf_or_netCDF4,
9593
requires_h5netcdf_ros3,
@@ -2124,20 +2122,14 @@ def test_encoding_enum__no_fill_value(self, recwarn):
21242122
)
21252123
v[:] = 1
21262124
with open_dataset(tmp_file, engine="netcdf4") as original:
2127-
save_kwargs = {}
21282125
# We don't expect any errors.
21292126
# This is effectively a void context manager
21302127
expected_warnings = 0
21312128
if self.engine == "h5netcdf":
2132-
if not has_h5netcdf_1_4_0_or_above:
2133-
save_kwargs["invalid_netcdf"] = True
2134-
expected_warnings = 1
2135-
expected_msg = "You are writing invalid netcdf features to file"
2136-
else:
2137-
expected_warnings = 1
2138-
expected_msg = "Creating variable with default fill_value 0 which IS defined in enum type"
2139-
2140-
with self.roundtrip(original, save_kwargs=save_kwargs) as actual:
2129+
expected_warnings = 1
2130+
expected_msg = "Creating variable with default fill_value 0 which IS defined in enum type"
2131+
2132+
with self.roundtrip(original) as actual:
21412133
assert len(recwarn) == expected_warnings
21422134
if expected_warnings:
21432135
assert issubclass(recwarn[0].category, UserWarning)
@@ -2147,14 +2139,6 @@ def test_encoding_enum__no_fill_value(self, recwarn):
21472139
actual.clouds.encoding["dtype"].metadata["enum"]
21482140
== cloud_type_dict
21492141
)
2150-
if not (
2151-
self.engine == "h5netcdf" and not has_h5netcdf_1_4_0_or_above
2152-
):
2153-
# not implemented in h5netcdf yet
2154-
assert (
2155-
actual.clouds.encoding["dtype"].metadata["enum_name"]
2156-
== "cloud_type"
2157-
)
21582142

21592143
@requires_netCDF4
21602144
def test_encoding_enum__multiple_variable_with_enum(self):
@@ -2176,10 +2160,7 @@ def test_encoding_enum__multiple_variable_with_enum(self):
21762160
fill_value=255,
21772161
)
21782162
with open_dataset(tmp_file, engine="netcdf4") as original:
2179-
save_kwargs = {}
2180-
if self.engine == "h5netcdf" and not has_h5netcdf_1_4_0_or_above:
2181-
save_kwargs["invalid_netcdf"] = True
2182-
with self.roundtrip(original, save_kwargs=save_kwargs) as actual:
2163+
with self.roundtrip(original) as actual:
21832164
assert_equal(original, actual)
21842165
assert (
21852166
actual.clouds.encoding["dtype"] == actual.tifa.encoding["dtype"]
@@ -2192,14 +2173,6 @@ def test_encoding_enum__multiple_variable_with_enum(self):
21922173
actual.clouds.encoding["dtype"].metadata["enum"]
21932174
== cloud_type_dict
21942175
)
2195-
if not (
2196-
self.engine == "h5netcdf" and not has_h5netcdf_1_4_0_or_above
2197-
):
2198-
# not implemented in h5netcdf yet
2199-
assert (
2200-
actual.clouds.encoding["dtype"].metadata["enum_name"]
2201-
== "cloud_type"
2202-
)
22032176

22042177
@requires_netCDF4
22052178
def test_encoding_enum__error_multiple_variable_with_changing_enum(self):
@@ -2235,17 +2208,6 @@ def test_encoding_enum__error_multiple_variable_with_changing_enum(self):
22352208
"u1",
22362209
metadata={"enum": modified_enum, "enum_name": "cloud_type"},
22372210
)
2238-
if not (self.engine == "h5netcdf" and not has_h5netcdf_1_4_0_or_above):
2239-
# not implemented yet in h5netcdf
2240-
with pytest.raises(
2241-
ValueError,
2242-
match=(
2243-
r"Cannot save variable .*"
2244-
r" because an enum `cloud_type` already exists in the Dataset .*"
2245-
),
2246-
):
2247-
with self.roundtrip(original):
2248-
pass
22492211

22502212
@pytest.mark.parametrize("create_default_indexes", [True, False])
22512213
def test_create_default_indexes(self, tmp_path, create_default_indexes) -> None:
@@ -4927,31 +4889,6 @@ def create_store(self):
49274889
with create_tmp_file() as tmp_file:
49284890
yield backends.H5NetCDFStore.open(tmp_file, "w")
49294891

4930-
@pytest.mark.skipif(
4931-
has_h5netcdf_1_4_0_or_above, reason="only valid for h5netcdf < 1.4.0"
4932-
)
4933-
def test_complex(self) -> None:
4934-
expected = Dataset({"x": ("y", np.ones(5) + 1j * np.ones(5))})
4935-
save_kwargs = {"invalid_netcdf": True}
4936-
with pytest.warns(UserWarning, match="You are writing invalid netcdf features"):
4937-
with self.roundtrip(expected, save_kwargs=save_kwargs) as actual:
4938-
assert_equal(expected, actual)
4939-
4940-
@pytest.mark.skipif(
4941-
has_h5netcdf_1_4_0_or_above, reason="only valid for h5netcdf < 1.4.0"
4942-
)
4943-
@pytest.mark.parametrize("invalid_netcdf", [None, False])
4944-
def test_complex_error(self, invalid_netcdf) -> None:
4945-
import h5netcdf
4946-
4947-
expected = Dataset({"x": ("y", np.ones(5) + 1j * np.ones(5))})
4948-
save_kwargs = {"invalid_netcdf": invalid_netcdf}
4949-
with pytest.raises(
4950-
h5netcdf.CompatibilityError, match="are not a supported NetCDF feature"
4951-
):
4952-
with self.roundtrip(expected, save_kwargs=save_kwargs) as actual:
4953-
assert_equal(expected, actual)
4954-
49554892
def test_numpy_bool_(self) -> None:
49564893
# h5netcdf loads booleans as numpy.bool_, this type needs to be supported
49574894
# when writing invalid_netcdf datasets in order to support a roundtrip
@@ -5105,7 +5042,6 @@ def test_byte_attrs(self, byte_attrs_dataset: dict[str, Any]) -> None:
51055042
with pytest.raises(ValueError, match=byte_attrs_dataset["h5netcdf_error"]):
51065043
super().test_byte_attrs(byte_attrs_dataset)
51075044

5108-
@requires_h5netcdf_1_4_0_or_above
51095045
def test_roundtrip_complex(self):
51105046
expected = Dataset({"x": ("y", np.ones(5) + 1j * np.ones(5))})
51115047
with self.roundtrip(expected) as actual:

0 commit comments

Comments
 (0)