Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions cf_xarray/accessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
Any,
Literal,
TypeVar,
Union,
cast,
overload,
)
Expand Down Expand Up @@ -89,7 +88,7 @@
ATTRS["vertical"] = ATTRS["Z"]

# Type for Mapper functions
Mapper = Callable[[Union[DataArray, Dataset], Hashable], list[Hashable]]
Mapper = Callable[[DataArray | Dataset, Hashable], list[Hashable]]

# Type for decorators
F = TypeVar("F", bound=Callable[..., Any])
Expand Down Expand Up @@ -1150,9 +1149,10 @@ def create_flag_dict(da) -> Mapping[Hashable, FlagParam]:
)

flag_params = tuple(
FlagParam(mask, value) for mask, value in zip(flag_masks, flag_values)
FlagParam(mask, value)
for mask, value in zip(flag_masks, flag_values, strict=False)
)
return dict(zip(flag_meanings, flag_params))
return dict(zip(flag_meanings, flag_params, strict=False))


class CFAccessor:
Expand Down Expand Up @@ -1368,7 +1368,7 @@ def curvefit(
kwargs: dict[str, Any] | None = None,
):
if coords is not None:
if isinstance(coords, (Hashable, DataArray)):
if isinstance(coords, Hashable | DataArray):
coords_iter: Iterable[Hashable | DataArray] = [coords]
else:
coords_iter = coords
Expand Down Expand Up @@ -3024,7 +3024,7 @@ def _extract_flags(self, flags: Sequence[Hashable] | None = None) -> Dataset:
x = self._obj.astype("i")
bit_comp = x & bit_mask

for i, (flag, value) in enumerate(zip(flags_reduced, values)):
for i, (flag, value) in enumerate(zip(flags_reduced, values, strict=False)):
bit = bit_comp.isel(_mask=i)
if value is not None:
out[flag] = bit == value
Expand Down
9 changes: 6 additions & 3 deletions cf_xarray/coding.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def encode_multi_index_as_compress(ds, idxnames=None):
encoded = ds.reset_index(idxnames)
for idxname in idxnames:
mindex = ds.indexes[idxname]
coords = dict(zip(mindex.names, mindex.levels))
coords = dict(zip(mindex.names, mindex.levels, strict=False))
encoded.update(coords)
for c in coords:
encoded[c].attrs = ds[c].attrs
Expand Down Expand Up @@ -112,13 +112,16 @@ def decode_compress_to_multi_index(encoded, idxnames=None):

variables = {
dim: encoded[dim].isel({dim: xr.Variable(data=index, dims=idxname)})
for dim, index in zip(names, indices)
for dim, index in zip(names, indices, strict=False)
}
decoded = decoded.assign_coords(variables).set_xindex(
names, PandasMultiIndex
)
except ImportError:
arrays = [encoded[dim].data[index] for dim, index in zip(names, indices)]
arrays = [
encoded[dim].data[index]
for dim, index in zip(names, indices, strict=False)
]
mindex = pd.MultiIndex.from_arrays(arrays, names=names)
decoded.coords[idxname] = mindex

Expand Down
4 changes: 2 additions & 2 deletions cf_xarray/formatting.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,14 +268,14 @@ def _format_flags(accessor, rich):
table.add_column("Value", justify="right")
table.add_column("Bits", justify="center")

for val, bit, key in zip(value_text, bit_text, flag_dict):
for val, bit, key in zip(value_text, bit_text, flag_dict, strict=False):
table.add_row(_format_cf_name(key, rich), val, bit)

return table

else:
rows = []
for val, bit, key in zip(value_text, bit_text, flag_dict):
for val, bit, key in zip(value_text, bit_text, flag_dict, strict=False):
rows.append(
f"{TAB}{_format_cf_name(key, rich):>{key_width}}: {TAB} {val} {bit}"
)
Expand Down
4 changes: 3 additions & 1 deletion cf_xarray/scripts/make_doc.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,9 @@ def make_criteria_csv():
df.transpose().to_csv(os.path.join(csv_dir, "all_criteria.csv"))

# Axes and coordinates
for keys, name in zip([_AXIS_NAMES, _COORD_NAMES], ["axes", "coords"]):
for keys, name in zip(
[_AXIS_NAMES, _COORD_NAMES], ["axes", "coords"], strict=False
):
subdf = df[sorted(keys)].dropna(axis=1, how="all")
subdf = subdf.dropna(axis=1, how="all").transpose()
subdf.transpose().to_csv(os.path.join(csv_dir, f"{name}_criteria.csv"))
Expand Down
3 changes: 2 additions & 1 deletion cf_xarray/sgrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,14 @@ def parse_axes(ds):
zip(
axes_names,
({k} for k in grid.attrs["node_dimensions"].split(" ")),
strict=False,
)
)
for attr in SGRID_DIM_ATTRS:
if attr in grid.attrs:
matches = re.findall(pattern, grid.attrs[attr] + "\n")
assert len(matches) == ndim, matches
for ax, match in zip(axes_names, matches):
for ax, match in zip(axes_names, matches, strict=False):
axes[ax].update(set(match[:2]))

if ndim == 2 and "vertical_dimensions" in grid.attrs:
Expand Down
2 changes: 1 addition & 1 deletion cf_xarray/tests/test_accessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -1544,7 +1544,7 @@ def test_groupby_special_ops() -> None:
grouped = airds.groupby_bins("lat", np.arange(20, 50, 10))

# __iter__
for (label, group), (cflabel, cfgroup) in zip(grouped, cfgrouped):
for (label, group), (cflabel, cfgroup) in zip(grouped, cfgrouped, strict=False):
assert label == cflabel
assert_identical(group, cfgroup)

Expand Down
4 changes: 3 additions & 1 deletion cf_xarray/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,9 @@ def parse_cell_methods_attr(attr: str) -> dict[str, str]:
if len(strings) % 2 != 0:
raise ValueError(f"attrs['cell_measures'] = {attr!r} is malformed.")

return dict(zip(strings[slice(0, None, 2)], strings[slice(1, None, 2)]))
return dict(
zip(strings[slice(0, None, 2)], strings[slice(1, None, 2)], strict=False)
)


def invert_mappings(*mappings):
Expand Down
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ classifiers = [
"Programming Language :: Python :: 3.12",
]
dependencies = [
"xarray",
"xarray>=2022.03.0",
]
dynamic = ["version"]

Expand Down Expand Up @@ -54,10 +54,10 @@ write_to_template= '__version__ = "{version}"'
tag_regex= "^(?P<prefix>v)?(?P<version>[^\\+]+)(?P<suffix>.*)?$"

[tool.black]
target-version = ["py39"]
target-version = ["py310"]

[tool.ruff]
target-version = "py39"
target-version = "py310"
builtins = ["ellipsis"]
exclude = [
".eggs",
Expand Down