Skip to content

Commit 3c9112c

Browse files
Bugfix array equality (#6665)
* Protect _constraints from elementwise equality. * Protect cube.py from elementwise equality. * Fix dumb Constraint eq mistake. * Protect util from elementwise equality. * Correct return type for masked_array_equal. * Protect _structured_array_identitication from elementwise equality. * Fixes for saver.py * Protect pp.py from elementwise equality. * Protect pp_load_rules from elementwise equality. * Revert "Protect pp.py from elementwise equality." This reverts commit 864f0d7. * Protect _concatenate from elementwise NOT equality. * Protect _constraints from elementwise NOT equality. * Protect coords.py from elementwise NOT equality. * Protect cube.py from elementwise NOT equality. * Protect util from elementwise NOT equality. * Add structured array test for NumPy elementwise broadcasting error. * Extra updates for pp_load_rules.py * What's New entry. * Expose users to array comparison errors when requested directly, including helpful errors. * Back out new support for NumPy arrays in AttributeConstraint - inappropriate for patch release. * Revert change to intersection edge_wrap - will never be comparing 2 arrays. * Add test coverage for structured array eq with incompatible shapes. * Test coverage for broadcasting errors in pp_load_rules. * Common code for comparing attributes in case np arrays. * Temporary fix for TestLicenseHeaders. * Don't create a file when testing describe_diff. * Final tests for attribute comparison. * Fix any versus all confusion. * Temporary Nox negation pin - see wntrblm/nox#961. (#6441) --------- Co-authored-by: ukmo-ccbunney <[email protected]>
1 parent ffaaab9 commit 3c9112c

File tree

19 files changed

+224
-29
lines changed

19 files changed

+224
-29
lines changed

.github/workflows/benchmarks_run.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ jobs:
7878

7979
- name: Install Nox
8080
run: |
81-
pip install nox
81+
pip install nox!=2025.05.01
8282
8383
- name: Cache environment directories
8484
id: cache-env-dir

docs/src/whatsnew/3.12.rst

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -215,6 +215,12 @@ v3.12.2 (09 May 2025)
215215
#. `@trexfeathers`_ refactored Iris loading and saving to make it compatible
216216
with Dask version ``2025.4.0`` and above. (:pull:`6451`)
217217

218+
#. `@trexfeathers`_ and `@ukmo-ccbunney`_ adapted array comparison in response
219+
to NumPy v1.25 deprecating comparison of un-broadcastable arrays. It is
220+
hoped that users will see no difference in behaviour, but please get in touch
221+
if you notice anything. See `NumPy v1.25 expired deprecations`_ and
222+
`numpy#22707`_ for more. (:pull:`6665`)
223+
218224

219225
📚 Documentation
220226
================
@@ -271,3 +277,5 @@ v3.12.2 (09 May 2025)
271277
272278
.. _SPEC 0: https://scientific-python.org/specs/spec-0000/
273279
.. _Running setuptools commands: https://setuptools.pypa.io/en/latest/deprecated/commands.html
280+
.. _NumPy v1.25 expired deprecations: https://numpy.org/doc/stable/release/1.25.0-notes.html#expired-deprecations
281+
.. _numpy#22707: https://github.com/numpy/numpy/pull/22707

lib/iris/_concatenate.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
from xxhash import xxh3_64
1717

1818
from iris._lazy_data import concatenate as concatenate_arrays
19+
from iris.common.metadata import hexdigest
1920
import iris.coords
2021
from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord
2122
import iris.cube
@@ -786,7 +787,7 @@ def _coordinate_differences(self, other, attr, reason="metadata"):
786787
diff_names = []
787788
for self_key, self_value in self_dict.items():
788789
other_value = other_dict[self_key]
789-
if self_value != other_value:
790+
if hexdigest(self_value) != hexdigest(other_value):
790791
diff_names.append(self_key)
791792
result = (
792793
" " + reason,

lib/iris/_constraints.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -531,6 +531,8 @@ def __init__(self, **attributes):
531531
super().__init__(cube_func=self._cube_func)
532532

533533
def __eq__(self, other):
534+
# Note: equality means that NumPy arrays are not supported for
535+
# AttributeConstraints (get the truth ambiguity error).
534536
eq = (
535537
isinstance(other, AttributeConstraint)
536538
and self._attributes == other._attributes
@@ -553,6 +555,8 @@ def _cube_func(self, cube):
553555
match = False
554556
break
555557
else:
558+
# Note: equality means that NumPy arrays are not supported
559+
# for AttributeConstraints (get the truth ambiguity error).
556560
if cube_attr != value:
557561
match = False
558562
break

lib/iris/common/mixin.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,9 @@ def __eq__(self, other):
115115
match = set(self.keys()) == set(other.keys())
116116
if match:
117117
for key, value in self.items():
118+
# TODO: should this use the iris.common.metadata approach of
119+
# using hexdigest? Might be a breaking change for some corner
120+
# cases, so would need a major release.
118121
match = np.array_equal(
119122
np.array(value, ndmin=1), np.array(other[key], ndmin=1)
120123
)

lib/iris/coords.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -760,7 +760,9 @@ def is_compatible(self, other, ignore=None):
760760
ignore = (ignore,)
761761
common_keys = common_keys.difference(ignore)
762762
for key in common_keys:
763-
if np.any(self.attributes[key] != other.attributes[key]):
763+
if not iris.util._attribute_equal(
764+
self.attributes[key], other.attributes[key]
765+
):
764766
compatible = False
765767
break
766768

lib/iris/cube.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1435,7 +1435,9 @@ def is_compatible(
14351435
ignore = (ignore,)
14361436
common_keys = common_keys.difference(ignore)
14371437
for key in common_keys:
1438-
if np.any(self.attributes[key] != other.attributes[key]):
1438+
if not iris.util._attribute_equal(
1439+
self.attributes[key], other.attributes[key]
1440+
):
14391441
compatible = False
14401442
break
14411443

lib/iris/fileformats/_structured_array_identification.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,9 @@ def __eq__(self, other):
111111

112112
result = NotImplemented
113113
if stride is not None or arr is not None:
114-
result = stride == self.stride and np.all(self.unique_ordered_values == arr)
114+
result = stride == self.stride and np.array_equal(
115+
self.unique_ordered_values, arr
116+
)
115117
return result
116118

117119
def __ne__(self, other):
@@ -284,7 +286,7 @@ def from_array(cls, arr):
284286
# Do one last sanity check - does the array we've just described
285287
# actually compute the correct array?
286288
constructed_array = structure.construct_array(arr.size)
287-
if not np.all(constructed_array == arr):
289+
if not np.array_equal(constructed_array, arr):
288290
structure = None
289291

290292
return structure

lib/iris/fileformats/netcdf/saver.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2589,13 +2589,7 @@ def save(
25892589
# Fnd any global attributes which are not the same on *all* cubes.
25902590
def attr_values_equal(val1, val2):
25912591
# An equality test which also works when some values are numpy arrays (!)
2592-
# As done in :meth:`iris.common.mixin.LimitedAttributeDict.__eq__`.
2593-
match = val1 == val2
2594-
try:
2595-
match = bool(match)
2596-
except ValueError:
2597-
match = match.all()
2598-
return match
2592+
return iris.util._attribute_equal(val1, val2)
25992593

26002594
cube0 = cubes[0]
26012595
invalid_globals = set(
@@ -2682,7 +2676,9 @@ def attr_values_equal(val1, val2):
26822676
common_keys.intersection_update(keys)
26832677
different_value_keys = []
26842678
for key in common_keys:
2685-
if np.any(attributes[key] != cube.attributes[key]):
2679+
if not iris.util._attribute_equal(
2680+
attributes[key], cube.attributes[key]
2681+
):
26862682
different_value_keys.append(key)
26872683
common_keys.difference_update(different_value_keys)
26882684
local_keys.update(different_value_keys)

lib/iris/fileformats/pp_load_rules.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -139,8 +139,15 @@ def _convert_vertical_coords(
139139
)
140140
coords_and_dims.append((coord, dim))
141141

142+
# Common calc for Depth
143+
try:
144+
svd_lev_eq = brsvd1 == brlev
145+
except ValueError:
146+
# In case of broadcasting errors.
147+
svd_lev_eq = False
148+
142149
# Depth - unbound.
143-
if (len(lbcode) != 5) and (lbvc == 2) and np.all(brsvd1 == brlev):
150+
if (len(lbcode) != 5) and (lbvc == 2) and np.all(svd_lev_eq):
144151
coord = _dim_or_aux(
145152
blev,
146153
standard_name="depth",
@@ -150,7 +157,7 @@ def _convert_vertical_coords(
150157
coords_and_dims.append((coord, dim))
151158

152159
# Depth - bound.
153-
if (len(lbcode) != 5) and (lbvc == 2) and np.all(brsvd1 != brlev):
160+
if (len(lbcode) != 5) and (lbvc == 2) and np.all(~svd_lev_eq):
154161
coord = _dim_or_aux(
155162
blev,
156163
standard_name="depth",
@@ -164,10 +171,10 @@ def _convert_vertical_coords(
164171
if (
165172
(len(lbcode) != 5)
166173
and (lbvc == 2)
167-
and (np.any(brsvd1 == brlev) and np.any(brsvd1 != brlev))
174+
and (np.any(svd_lev_eq) and np.any(~svd_lev_eq))
168175
):
169-
lower = np.where(brsvd1 == brlev, blev, brsvd1)
170-
upper = np.where(brsvd1 == brlev, blev, brlev)
176+
lower = np.where(svd_lev_eq, blev, brsvd1)
177+
upper = np.where(svd_lev_eq, blev, brlev)
171178
coord = _dim_or_aux(
172179
blev,
173180
standard_name="depth",
@@ -189,7 +196,7 @@ def _convert_vertical_coords(
189196
units="1",
190197
)
191198
coords_and_dims.append((coord, dim))
192-
elif np.any(brsvd1 != brlev):
199+
elif np.any(~svd_lev_eq):
193200
# UM populates metadata CORRECTLY,
194201
# so treat it as the expected (bounded) soil depth.
195202
coord = _dim_or_aux(

0 commit comments

Comments
 (0)