Skip to content

Commit bb0ef8c

Browse files
Added dataless rolling window, _intersect (#6757)
* added rolling window (tests) * added rolling window (code) * added intersection (code) * added intersection (tests) * removed intersection (CML tests) * added whatsnew * Update lib/iris/cube.py Co-authored-by: Chris Bunney <48915820+ukmo-ccbunney@users.noreply.github.com> * review comments * split whatsnew entry, in expectation this won't make 3.14 * fixed failing test * extracted dataless param * sneakily adding in some missed unittest artifacts * swapped yield for return --------- Co-authored-by: Chris Bunney <48915820+ukmo-ccbunney@users.noreply.github.com>
1 parent de58e15 commit bb0ef8c

File tree

3 files changed

+483
-293
lines changed

3 files changed

+483
-293
lines changed

docs/src/whatsnew/latest.rst

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,9 @@ This document explains the changes made to Iris for this release
4444

4545
(:issue:`5819`, :pull:`6854`)
4646

47+
#. `@ESadek-MO`_ added functionality to allow :func:`~iris.cube.Cube.rolling_window` and
48+
:func:`~iris.cube.Cube.intersection` to work with dataless cubes. (:pull:`6757`)
49+
4750

4851
🐛 Bugs Fixed
4952
=============

lib/iris/cube.py

Lines changed: 40 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -3482,8 +3482,6 @@ def intersection(self, *args, **kwargs) -> Cube:
34823482
which intersects with the requested coordinate intervals.
34833483
34843484
"""
3485-
if self.is_dataless():
3486-
raise iris.exceptions.DatalessError("intersection")
34873485
result = self
34883486
ignore_bounds = kwargs.pop("ignore_bounds", False)
34893487
threshold = kwargs.pop("threshold", 0)
@@ -3555,9 +3553,17 @@ def make_chunk(key):
35553553
if len(chunks) == 1:
35563554
result = chunks[0]
35573555
else:
3558-
chunk_data = [chunk.core_data() for chunk in chunks]
3559-
data = _lazy.concatenate(chunk_data, axis=dim)
3560-
result = iris.cube.Cube(data)
3556+
if self.is_dataless():
3557+
old_shape = list(self.shape)
3558+
newlen = sum(chunk.coord(coord).shape[0] for chunk in chunks)
3559+
old_shape[dim] = newlen
3560+
new_shape = tuple(old_shape)
3561+
data = None
3562+
else:
3563+
chunk_data = [chunk.core_data() for chunk in chunks]
3564+
data = _lazy.concatenate(chunk_data, axis=dim)
3565+
new_shape = None
3566+
result = iris.cube.Cube(data=data, shape=new_shape)
35613567
result.metadata = deepcopy(self.metadata)
35623568

35633569
# Record a mapping from old coordinate IDs to new coordinates,
@@ -5158,8 +5164,7 @@ def rolling_window(
51585164
""" # noqa: D214, D406, D407, D410, D411
51595165
# Update weights kwargs (if necessary) to handle different types of
51605166
# weights
5161-
if self.is_dataless():
5162-
raise iris.exceptions.DatalessError("rolling_window")
5167+
dataless = self.is_dataless()
51635168
weights_info = None
51645169
if kwargs.get("weights") is not None:
51655170
weights_info = _Weights(kwargs["weights"], self)
@@ -5198,13 +5203,13 @@ def rolling_window(
51985203
key = [slice(None, None)] * self.ndim
51995204
key[dimension] = slice(None, self.shape[dimension] - window + 1)
52005205
new_cube = new_cube[tuple(key)]
5201-
5202-
# take a view of the original data using the rolling_window function
5203-
# this will add an extra dimension to the data at dimension + 1 which
5204-
# represents the rolled window (i.e. will have a length of window)
5205-
rolling_window_data = iris.util.rolling_window(
5206-
self.core_data(), window=window, axis=dimension
5207-
)
5206+
if not dataless:
5207+
# take a view of the original data using the rolling_window function
5208+
# this will add an extra dimension to the data at dimension + 1 which
5209+
# represents the rolled window (i.e. will have a length of window)
5210+
rolling_window_data = iris.util.rolling_window(
5211+
self.core_data(), window=window, axis=dimension
5212+
)
52085213

52095214
# now update all of the coordinates to reflect the aggregation
52105215
for coord_ in self.coords(dimensions=dimension):
@@ -5253,27 +5258,30 @@ def rolling_window(
52535258
)
52545259
# and perform the data transformation, generating weights first if
52555260
# needed
5256-
if isinstance(
5257-
aggregator, iris.analysis.WeightedAggregator
5258-
) and aggregator.uses_weighting(**kwargs):
5259-
if "weights" in kwargs:
5260-
weights = kwargs["weights"]
5261-
if weights.ndim > 1 or weights.shape[0] != window:
5262-
raise ValueError(
5263-
"Weights for rolling window aggregation "
5264-
"must be a 1d array with the same length "
5265-
"as the window."
5261+
if not dataless:
5262+
if isinstance(
5263+
aggregator, iris.analysis.WeightedAggregator
5264+
) and aggregator.uses_weighting(**kwargs):
5265+
if "weights" in kwargs:
5266+
weights = kwargs["weights"]
5267+
if weights.ndim > 1 or weights.shape[0] != window:
5268+
raise ValueError(
5269+
"Weights for rolling window aggregation "
5270+
"must be a 1d array with the same length "
5271+
"as the window."
5272+
)
5273+
kwargs = dict(kwargs)
5274+
kwargs["weights"] = iris.util.broadcast_to_shape(
5275+
weights, rolling_window_data.shape, (dimension + 1,)
52665276
)
5267-
kwargs = dict(kwargs)
5268-
kwargs["weights"] = iris.util.broadcast_to_shape(
5269-
weights, rolling_window_data.shape, (dimension + 1,)
5270-
)
52715277

5272-
if aggregator.lazy_func is not None and self.has_lazy_data():
5273-
agg_method = aggregator.lazy_aggregate
5278+
if aggregator.lazy_func is not None and self.has_lazy_data():
5279+
agg_method = aggregator.lazy_aggregate
5280+
else:
5281+
agg_method = aggregator.aggregate
5282+
data_result = agg_method(rolling_window_data, axis=dimension + 1, **kwargs)
52745283
else:
5275-
agg_method = aggregator.aggregate
5276-
data_result = agg_method(rolling_window_data, axis=dimension + 1, **kwargs)
5284+
data_result = None
52775285
result = aggregator.post_process(new_cube, data_result, [coord], **kwargs)
52785286
return result
52795287

0 commit comments

Comments
 (0)