Skip to content

Commit 60c4a86

Browse files
committed
Bump the minimum version of h5netcd in accordance with nep29
| version | date | | :-----: | :--------: | | 1.7.0 | 2025-10-15 | | 1.6.0 | 2025-03-07 | | 1.5.0 | 2025-01-26 | | 1.4.0 | 2024-10-07 |
1 parent a5caa3a commit 60c4a86

File tree

4 files changed

+4
-70
lines changed

4 files changed

+4
-70
lines changed

pixi.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ cftime = "1.6.*"
117117
dask-core = "2024.6.*"
118118
distributed = "2024.6.*"
119119
flox = "0.9.*"
120-
h5netcdf = "1.3.*"
120+
h5netcdf = "1.4.*"
121121
# h5py and hdf5 tend to cause conflicts
122122
# for e.g. hdf5 1.12 conflicts with h5py=3.1
123123
# prioritize bumping other packages instead

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ accel = [
3737
complete = ["xarray[accel,etc,io,parallel,viz]"]
3838
io = [
3939
"netCDF4>=1.6.0",
40-
"h5netcdf",
40+
"h5netcdf>=1.4.0",
4141
"pydap",
4242
"scipy>=1.13",
4343
"zarr>=2.18",

xarray/tests/__init__.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -230,10 +230,6 @@ def _importorskip_h5netcdf_ros3(has_h5netcdf: bool):
230230
"netCDF4", "1.6.2"
231231
)
232232

233-
has_h5netcdf_1_4_0_or_above, requires_h5netcdf_1_4_0_or_above = _importorskip(
234-
"h5netcdf", "1.4.0.dev"
235-
)
236-
237233
has_h5netcdf_1_7_0_or_above, requires_h5netcdf_1_7_0_or_above = _importorskip(
238234
"h5netcdf", "1.7.0.dev"
239235
)

xarray/tests/test_backends.py

Lines changed: 2 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,6 @@
7474
assert_identical,
7575
assert_no_warnings,
7676
has_dask,
77-
has_h5netcdf_1_4_0_or_above,
7877
has_netCDF4,
7978
has_numpy_2,
8079
has_scipy,
@@ -89,7 +88,6 @@
8988
requires_dask,
9089
requires_fsspec,
9190
requires_h5netcdf,
92-
requires_h5netcdf_1_4_0_or_above,
9391
requires_h5netcdf_1_7_0_or_above,
9492
requires_h5netcdf_or_netCDF4,
9593
requires_h5netcdf_ros3,
@@ -2129,13 +2127,8 @@ def test_encoding_enum__no_fill_value(self, recwarn):
21292127
# This is effectively a void context manager
21302128
expected_warnings = 0
21312129
if self.engine == "h5netcdf":
2132-
if not has_h5netcdf_1_4_0_or_above:
2133-
save_kwargs["invalid_netcdf"] = True
2134-
expected_warnings = 1
2135-
expected_msg = "You are writing invalid netcdf features to file"
2136-
else:
2137-
expected_warnings = 1
2138-
expected_msg = "Creating variable with default fill_value 0 which IS defined in enum type"
2130+
expected_warnings = 1
2131+
expected_msg = "Creating variable with default fill_value 0 which IS defined in enum type"
21392132

21402133
with self.roundtrip(original, save_kwargs=save_kwargs) as actual:
21412134
assert len(recwarn) == expected_warnings
@@ -2147,14 +2140,6 @@ def test_encoding_enum__no_fill_value(self, recwarn):
21472140
actual.clouds.encoding["dtype"].metadata["enum"]
21482141
== cloud_type_dict
21492142
)
2150-
if not (
2151-
self.engine == "h5netcdf" and not has_h5netcdf_1_4_0_or_above
2152-
):
2153-
# not implemented in h5netcdf yet
2154-
assert (
2155-
actual.clouds.encoding["dtype"].metadata["enum_name"]
2156-
== "cloud_type"
2157-
)
21582143

21592144
@requires_netCDF4
21602145
def test_encoding_enum__multiple_variable_with_enum(self):
@@ -2177,8 +2162,6 @@ def test_encoding_enum__multiple_variable_with_enum(self):
21772162
)
21782163
with open_dataset(tmp_file, engine="netcdf4") as original:
21792164
save_kwargs = {}
2180-
if self.engine == "h5netcdf" and not has_h5netcdf_1_4_0_or_above:
2181-
save_kwargs["invalid_netcdf"] = True
21822165
with self.roundtrip(original, save_kwargs=save_kwargs) as actual:
21832166
assert_equal(original, actual)
21842167
assert (
@@ -2192,14 +2175,6 @@ def test_encoding_enum__multiple_variable_with_enum(self):
21922175
actual.clouds.encoding["dtype"].metadata["enum"]
21932176
== cloud_type_dict
21942177
)
2195-
if not (
2196-
self.engine == "h5netcdf" and not has_h5netcdf_1_4_0_or_above
2197-
):
2198-
# not implemented in h5netcdf yet
2199-
assert (
2200-
actual.clouds.encoding["dtype"].metadata["enum_name"]
2201-
== "cloud_type"
2202-
)
22032178

22042179
@requires_netCDF4
22052180
def test_encoding_enum__error_multiple_variable_with_changing_enum(self):
@@ -2235,17 +2210,6 @@ def test_encoding_enum__error_multiple_variable_with_changing_enum(self):
22352210
"u1",
22362211
metadata={"enum": modified_enum, "enum_name": "cloud_type"},
22372212
)
2238-
if not (self.engine == "h5netcdf" and not has_h5netcdf_1_4_0_or_above):
2239-
# not implemented yet in h5netcdf
2240-
with pytest.raises(
2241-
ValueError,
2242-
match=(
2243-
r"Cannot save variable .*"
2244-
r" because an enum `cloud_type` already exists in the Dataset .*"
2245-
),
2246-
):
2247-
with self.roundtrip(original):
2248-
pass
22492213

22502214
@pytest.mark.parametrize("create_default_indexes", [True, False])
22512215
def test_create_default_indexes(self, tmp_path, create_default_indexes) -> None:
@@ -4927,31 +4891,6 @@ def create_store(self):
49274891
with create_tmp_file() as tmp_file:
49284892
yield backends.H5NetCDFStore.open(tmp_file, "w")
49294893

4930-
@pytest.mark.skipif(
4931-
has_h5netcdf_1_4_0_or_above, reason="only valid for h5netcdf < 1.4.0"
4932-
)
4933-
def test_complex(self) -> None:
4934-
expected = Dataset({"x": ("y", np.ones(5) + 1j * np.ones(5))})
4935-
save_kwargs = {"invalid_netcdf": True}
4936-
with pytest.warns(UserWarning, match="You are writing invalid netcdf features"):
4937-
with self.roundtrip(expected, save_kwargs=save_kwargs) as actual:
4938-
assert_equal(expected, actual)
4939-
4940-
@pytest.mark.skipif(
4941-
has_h5netcdf_1_4_0_or_above, reason="only valid for h5netcdf < 1.4.0"
4942-
)
4943-
@pytest.mark.parametrize("invalid_netcdf", [None, False])
4944-
def test_complex_error(self, invalid_netcdf) -> None:
4945-
import h5netcdf
4946-
4947-
expected = Dataset({"x": ("y", np.ones(5) + 1j * np.ones(5))})
4948-
save_kwargs = {"invalid_netcdf": invalid_netcdf}
4949-
with pytest.raises(
4950-
h5netcdf.CompatibilityError, match="are not a supported NetCDF feature"
4951-
):
4952-
with self.roundtrip(expected, save_kwargs=save_kwargs) as actual:
4953-
assert_equal(expected, actual)
4954-
49554894
def test_numpy_bool_(self) -> None:
49564895
# h5netcdf loads booleans as numpy.bool_, this type needs to be supported
49574896
# when writing invalid_netcdf datasets in order to support a roundtrip
@@ -5105,7 +5044,6 @@ def test_byte_attrs(self, byte_attrs_dataset: dict[str, Any]) -> None:
51055044
with pytest.raises(ValueError, match=byte_attrs_dataset["h5netcdf_error"]):
51065045
super().test_byte_attrs(byte_attrs_dataset)
51075046

5108-
@requires_h5netcdf_1_4_0_or_above
51095047
def test_roundtrip_complex(self):
51105048
expected = Dataset({"x": ("y", np.ones(5) + 1j * np.ones(5))})
51115049
with self.roundtrip(expected) as actual:

0 commit comments

Comments
 (0)