Skip to content

Commit 614f62e

Browse files
committed
Merge branch 'main' into netcdf4-memory
2 parents d633686 + 6336ffb commit 614f62e

File tree

7 files changed

+47
-15
lines changed

7 files changed

+47
-15
lines changed

CLAUDE.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,3 +19,10 @@ uv run pytest xarray/tests/test_dataarray.py # Specific file
1919
pre-commit run --all-files # Includes ruff and other checks
2020
uv run dmypy run # Type checking with mypy
2121
```
22+
23+
## GitHub Interaction Guidelines
24+
25+
- **NEVER impersonate the user on GitHub** - Do not post comments, create issues, or interact with the xarray GitHub repository unless explicitly instructed
26+
- Never create GitHub issues or PRs unless explicitly requested by the user
27+
- Never post "update" messages, progress reports, or explanatory comments on GitHub issues/PRs unless specifically asked
28+
- Always require explicit user direction before creating pull requests or pushing to the xarray GitHub repository

doc/whats-new.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,10 @@ New Features
1717
Breaking changes
1818
~~~~~~~~~~~~~~~~
1919

20+
- :py:meth:`Dataset.update` now returns ``None``, instead of the updated dataset. This
21+
completes the deprecation cycle started in version 0.17. The method still updates the
22+
dataset in-place. (:issue:`10167`)
23+
By `Maximilian Roos <https://github.com/max-sixty>`_.
2024

2125
Deprecations
2226
~~~~~~~~~~~~

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@ types = [
6565
"types-openpyxl",
6666
"types-python-dateutil",
6767
"types-pytz",
68+
"types-requests",
6869
"types-setuptools",
6970
]
7071

xarray/core/coordinates.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -180,11 +180,13 @@ def to_index(self, ordered_dims: Sequence[Hashable] | None = None) -> pd.Index:
180180
np.tile(np.repeat(code, repeat_counts[i]), tile_counts[i])
181181
for code in codes
182182
]
183-
level_list += [list(level) for level in levels]
183+
level_list += levels
184184
names += index.names
185185

186186
return pd.MultiIndex(
187-
levels=level_list, codes=[list(c) for c in code_list], names=names
187+
levels=level_list, # type: ignore[arg-type,unused-ignore]
188+
codes=[list(c) for c in code_list],
189+
names=names,
188190
)
189191

190192

xarray/core/dataset.py

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -5592,7 +5592,7 @@ def unstack(
55925592
result = result._unstack_once(d, stacked_indexes[d], fill_value, sparse)
55935593
return result
55945594

5595-
def update(self, other: CoercibleMapping) -> Self:
5595+
def update(self, other: CoercibleMapping) -> None:
55965596
"""Update this dataset's variables with those from another dataset.
55975597
55985598
Just like :py:meth:`dict.update` this is a in-place operation.
@@ -5609,14 +5609,6 @@ def update(self, other: CoercibleMapping) -> Self:
56095609
- mapping {var name: (dimension name, array-like)}
56105610
- mapping {var name: (tuple of dimension names, array-like)}
56115611
5612-
Returns
5613-
-------
5614-
updated : Dataset
5615-
Updated dataset. Note that since the update is in-place this is the input
5616-
dataset.
5617-
5618-
It is deprecated since version 0.17 and scheduled to be removed in 0.21.
5619-
56205612
Raises
56215613
------
56225614
ValueError
@@ -5629,7 +5621,7 @@ def update(self, other: CoercibleMapping) -> Self:
56295621
Dataset.merge
56305622
"""
56315623
merge_result = dataset_update_method(self, other)
5632-
return self._replace(inplace=True, **merge_result._asdict())
5624+
self._replace(inplace=True, **merge_result._asdict())
56335625

56345626
def merge(
56355627
self,

xarray/tests/test_backends.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5222,9 +5222,7 @@ def test_memoryview_write_netcdf4_read_h5netcdf() -> None:
52225222
@requires_h5netcdf_ros3
52235223
class TestH5NetCDFDataRos3Driver(TestCommon):
52245224
engine: T_NetcdfEngine = "h5netcdf"
5225-
test_remote_dataset: str = (
5226-
"https://www.unidata.ucar.edu/software/netcdf/examples/OMI-Aura_L2-example.nc"
5227-
)
5225+
test_remote_dataset: str = "https://archive.unidata.ucar.edu/software/netcdf/examples/OMI-Aura_L2-example.nc"
52285226

52295227
@pytest.mark.filterwarnings("ignore:Duplicate dimension names")
52305228
def test_get_variable_list(self) -> None:

xarray/tests/test_dataarray.py

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3528,6 +3528,34 @@ def test_to_dataframe_0length(self) -> None:
35283528
assert len(actual) == 0
35293529
assert_array_equal(actual.index.names, list("ABC"))
35303530

3531+
@pytest.mark.parametrize(
3532+
"x_dtype,y_dtype,v_dtype",
3533+
[
3534+
(np.uint32, np.float32, np.uint32),
3535+
(np.int16, np.float64, np.int64),
3536+
(np.uint8, np.float32, np.uint16),
3537+
(np.int32, np.float32, np.int8),
3538+
],
3539+
)
3540+
def test_to_dataframe_coord_dtypes_2d(self, x_dtype, y_dtype, v_dtype) -> None:
3541+
x = np.array([1], dtype=x_dtype)
3542+
y = np.array([1.0], dtype=y_dtype)
3543+
v = np.array([[42]], dtype=v_dtype)
3544+
3545+
da = DataArray(v, dims=["x", "y"], coords={"x": x, "y": y})
3546+
df = da.to_dataframe(name="v").reset_index()
3547+
3548+
# Check that coordinate dtypes are preserved
3549+
assert df["x"].dtype == np.dtype(x_dtype), (
3550+
f"x coord: expected {x_dtype}, got {df['x'].dtype}"
3551+
)
3552+
assert df["y"].dtype == np.dtype(y_dtype), (
3553+
f"y coord: expected {y_dtype}, got {df['y'].dtype}"
3554+
)
3555+
assert df["v"].dtype == np.dtype(v_dtype), (
3556+
f"v data: expected {v_dtype}, got {df['v'].dtype}"
3557+
)
3558+
35313559
@requires_dask_expr
35323560
@requires_dask
35333561
@pytest.mark.xfail(not has_dask_ge_2025_1_0, reason="dask-expr is broken")

0 commit comments

Comments
 (0)