diff --git a/esmvalcore/preprocessor/__init__.py b/esmvalcore/preprocessor/__init__.py index 9137594945..da5eea2f78 100644 --- a/esmvalcore/preprocessor/__init__.py +++ b/esmvalcore/preprocessor/__init__.py @@ -25,6 +25,7 @@ zonal_statistics, ) from ._compare_with_refs import bias, distance_metric +from ._concatenate import concatenate from ._cycles import amplitude from ._dask_progress import _compute_with_progress from ._derive import derive @@ -32,7 +33,6 @@ from ._io import ( _get_debug_filename, _sort_products, - concatenate, load, save, write_metadata, diff --git a/esmvalcore/preprocessor/_compare_with_refs.py b/esmvalcore/preprocessor/_compare_with_refs.py index c5daab0547..e29cda36d0 100644 --- a/esmvalcore/preprocessor/_compare_with_refs.py +++ b/esmvalcore/preprocessor/_compare_with_refs.py @@ -20,7 +20,7 @@ ignore_iris_vague_metadata_warnings, rechunk_cube, ) -from esmvalcore.preprocessor._io import concatenate +from esmvalcore.preprocessor._concatenate import concatenate from esmvalcore.preprocessor._other import histogram from esmvalcore.preprocessor._shared import ( get_all_coord_dims, diff --git a/esmvalcore/preprocessor/_concatenate.py b/esmvalcore/preprocessor/_concatenate.py new file mode 100644 index 0000000000..f3425c05ec --- /dev/null +++ b/esmvalcore/preprocessor/_concatenate.py @@ -0,0 +1,291 @@ +"""Module containing :func:`esmvalcore.preprocessor.concatenate`.""" + +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING, Any, NamedTuple, Self + +import cftime +import iris.exceptions +import numpy as np +from iris.cube import CubeList + +from esmvalcore.cmor.check import CheckLevels +from esmvalcore.esgf.facets import FACETS +from esmvalcore.iris_helpers import merge_cube_attributes +from esmvalcore.preprocessor._shared import _rechunk_aux_factory_dependencies + +if TYPE_CHECKING: + from collections.abc import Iterable, Sequence + + from iris.coords import Coord, DimCoord + from iris.cube import Cube + +logger = logging.getLogger(__name__) + + +def _delete_attributes(iris_object: Cube | Coord, atts: Iterable[str]) -> None: + """Delete attributes from Iris cube or coordinate.""" + for att in atts: + if att in iris_object.attributes: + del iris_object.attributes[att] + + +def _concatenate_cubes( + cubes: Iterable[Cube], + check_level: CheckLevels, +) -> CubeList: + """Concatenate cubes according to the check_level.""" + kwargs = { + "check_aux_coords": True, + "check_cell_measures": True, + "check_ancils": True, + "check_derived_coords": True, + } + + if check_level > CheckLevels.DEFAULT: + kwargs = dict.fromkeys(kwargs, False) + logger.debug( + "Concatenation will be performed without checking " + "auxiliary coordinates, cell measures, ancillaries " + "and derived coordinates present in the cubes.", + ) + + return CubeList(cubes).concatenate(**kwargs) + + +class _TimesHelper: + def __init__(self, time: DimCoord) -> None: + self.times = time.core_points() + self.units = str(time.units) + + def __getattr__(self, name: str) -> Any: + return getattr(self.times, name) + + def __len__(self) -> int: + return len(self.times) + + def __getitem__(self, key: Any) -> Any: + return self.times[key] + + +def _remove_time_overlaps(cubes: CubeList) -> CubeList: + """Handle time overlaps. + + Parameters + ---------- + cubes : iris.cube.CubeList + A list of cubes belonging to a single timeseries, + ordered by starting point with possible overlaps. + + Returns + ------- + iris.cube.CubeList + A list of cubes belonging to a single timeseries, + ordered by starting point with no overlaps. + """ + if len(cubes) < 2: + return cubes + + class _TrackedCube(NamedTuple): + cube: Cube + times: iris.coords.DimCoord + start: float + end: float + + @classmethod + def from_cube(cls, cube: Cube) -> Self: + """Construct tracked cube.""" + times = cube.coord("time") + start, end = times.core_points()[[0, -1]] + return cls(cube, times, start, end) + + new_cubes = CubeList() + current_cube = _TrackedCube.from_cube(cubes[0]) + for new_cube in map(_TrackedCube.from_cube, cubes[1:]): + if new_cube.start > current_cube.end: + # no overlap, use current cube and start again from new cube + logger.debug("Using %s", current_cube.cube) + new_cubes.append(current_cube.cube) + current_cube = new_cube + continue + # overlap + if current_cube.end > new_cube.end: + # current cube ends after new one, just forget new cube + logger.debug( + "Discarding %s because the time range " + "is already covered by %s", + new_cube.cube, + current_cube.cube, + ) + continue + if new_cube.start == current_cube.start: + # new cube completely covers current one + # forget current cube + current_cube = new_cube + logger.debug( + "Discarding %s because the time range is covered by %s", + current_cube.cube, + new_cube.cube, + ) + continue + # new cube ends after current one, + # use all of new cube, and shorten current cube to + # eliminate overlap with new cube + cut_index = ( + cftime.time2index( + new_cube.start, + _TimesHelper(current_cube.times), + current_cube.times.units.calendar, + select="before", + ) + + 1 + ) + logger.debug( + "Using %s shortened to %s due to overlap", + current_cube.cube, + current_cube.times.cell(cut_index).point, + ) + new_cubes.append(current_cube.cube[:cut_index]) + current_cube = new_cube + + logger.debug("Using %s", current_cube.cube) + new_cubes.append(current_cube.cube) + + return new_cubes + + +def _fix_calendars(cubes: Sequence[Cube]) -> None: + """Check and homogenise calendars, if possible.""" + calendars = [cube.coord("time").units.calendar for cube in cubes] + unique_calendars = np.unique(calendars) + + calendar_ocurrences = np.array( + [calendars.count(calendar) for calendar in unique_calendars], + ) + calendar_index = int( + np.argwhere(calendar_ocurrences == calendar_ocurrences.max()), + ) + + for cube in cubes: + time_coord = cube.coord("time") + old_calendar = time_coord.units.calendar + if old_calendar != unique_calendars[calendar_index]: + new_unit = time_coord.units.change_calendar( + unique_calendars[calendar_index], + ) + time_coord.units = new_unit + + +def _raise_concatenation_exception(cubes: Sequence[Cube]) -> None: + """Raise an error for concatenation.""" + # Concatenation not successful -> retrieve exact error message + try: + CubeList(cubes).concatenate_cube() + except iris.exceptions.ConcatenateError as exc: + msg = str(exc) + logger.error("Can not concatenate cubes into a single one: %s", msg) + logger.error("Resulting cubes:") + for cube in cubes: + logger.error(cube) + time = cube.coord("time") + logger.error("From %s to %s", time.cell(0), time.cell(-1)) + + msg = f"Can not concatenate cubes: {msg}" + raise ValueError(msg) from exc + + +def _sort_cubes_by_time(cubes: Iterable[Cube]) -> list[Cube]: + """Sort CubeList by time coordinate.""" + try: + cubes = sorted(cubes, key=lambda c: c.coord("time").cell(0).point) + except iris.exceptions.CoordinateNotFoundError as exc: + msg = f"One or more cubes {cubes} are missing time coordinate: {exc!s}" + raise ValueError(msg) from exc + except TypeError as error: + msg = f"Cubes cannot be sorted due to differing time units: {error!s}" + raise TypeError(msg) from error + return cubes + + +def _concatenate_cubes_by_experiment(cubes: Sequence[Cube]) -> Sequence[Cube]: + """Concatenate cubes by experiment. + + This ensures overlapping (branching) experiments are handled correctly. + """ + # get the possible facet names in CMIP3, 5, 6 for exp + # currently these are 'experiment', 'experiment_id' + exp_facet_names = { + project["exp"] for project in FACETS.values() if "exp" in project + } + + def get_exp(cube: Cube) -> Any: + for key in exp_facet_names: + if key in cube.attributes: + return cube.attributes[key] + return "" + + experiments = {get_exp(cube) for cube in cubes} + if len(experiments) > 1: + # first do experiment-wise concatenation, then time-based + cubes = [ + concatenate([cube for cube in cubes if get_exp(cube) == exp]) + for exp in experiments + ] + + return cubes + + +def concatenate( + cubes: Sequence[Cube], + check_level: CheckLevels = CheckLevels.DEFAULT, +) -> Cube: + """Concatenate all cubes after fixing metadata. + + Parameters + ---------- + cubes: iterable of iris.cube.Cube + Data cubes to be concatenated + check_level: CheckLevels + Level of strictness of the checks in the concatenation. + + Returns + ------- + cube: iris.cube.Cube + Resulting concatenated cube. + + Raises + ------ + ValueError + Concatenation was not possible. + """ + if not cubes: + return cubes + if len(cubes) == 1: + return cubes[0] + + for cube in cubes: + # Remove attributes that cause issues with merging and concatenation + _delete_attributes( + cube, + ("creation_date", "tracking_id", "history", "comment"), + ) + for coord in cube.coords(): + # CMOR sometimes adds a history to the coordinates. + _delete_attributes(coord, ("history",)) + + cubes = _concatenate_cubes_by_experiment(cubes) + + merge_cube_attributes(cubes) + cubes = _sort_cubes_by_time(cubes) + _fix_calendars(cubes) + cubes = _remove_time_overlaps(cubes) + cubes = [_rechunk_aux_factory_dependencies(cube) for cube in cubes] + result = _concatenate_cubes(cubes, check_level=check_level) + + if len(result) == 1: + result = result[0] + else: + _raise_concatenation_exception(result) + + return result diff --git a/esmvalcore/preprocessor/_io.py b/esmvalcore/preprocessor/_io.py index 7c579221bc..3c4aa3566e 100644 --- a/esmvalcore/preprocessor/_io.py +++ b/esmvalcore/preprocessor/_io.py @@ -8,33 +8,25 @@ import warnings from itertools import groupby from pathlib import Path -from typing import TYPE_CHECKING, Any, NamedTuple +from typing import TYPE_CHECKING, Any -import cftime import iris -import iris.exceptions import ncdata -import numpy as np import xarray as xr import yaml from iris.cube import Cube, CubeList from esmvalcore._task import write_ncl_settings -from esmvalcore.cmor.check import CheckLevels -from esmvalcore.esgf.facets import FACETS from esmvalcore.exceptions import ESMValCoreLoadWarning from esmvalcore.iris_helpers import ( dataset_to_iris, ignore_warnings_context, - merge_cube_attributes, ) -from esmvalcore.preprocessor._shared import _rechunk_aux_factory_dependencies if TYPE_CHECKING: - from collections.abc import Iterable, Sequence + from collections.abc import Sequence from dask.delayed import Delayed - from iris.coords import Coord from iris.fileformats.cf import CFVariable logger = logging.getLogger(__name__) @@ -80,13 +72,6 @@ def _restore_lat_lon_units( coord.units = units -def _delete_attributes(iris_object: Cube | Coord, atts: Iterable[str]) -> None: - """Delete attributes from Iris cube or coordinate.""" - for att in atts: - if att in iris_object.attributes: - del iris_object.attributes[att] - - def load( file: str | Path | Cube | CubeList | xr.Dataset | ncdata.NcData, ignore_warnings: list[dict[str, Any]] | None = None, @@ -173,260 +158,6 @@ def _load_from_file( return cubes -def _concatenate_cubes(cubes, check_level): - """Concatenate cubes according to the check_level.""" - kwargs = { - "check_aux_coords": True, - "check_cell_measures": True, - "check_ancils": True, - "check_derived_coords": True, - } - - if check_level > CheckLevels.DEFAULT: - kwargs = dict.fromkeys(kwargs, False) - logger.debug( - "Concatenation will be performed without checking " - "auxiliary coordinates, cell measures, ancillaries " - "and derived coordinates present in the cubes.", - ) - - return CubeList(cubes).concatenate(**kwargs) - - -class _TimesHelper: - def __init__(self, time): - self.times = time.core_points() - self.units = str(time.units) - - def __getattr__(self, name): - return getattr(self.times, name) - - def __len__(self): - return len(self.times) - - def __getitem__(self, key): - return self.times[key] - - -def _check_time_overlaps(cubes: CubeList) -> CubeList: - """Handle time overlaps. - - Parameters - ---------- - cubes : iris.cube.CubeList - A list of cubes belonging to a single timeseries, - ordered by starting point with possible overlaps. - - Returns - ------- - iris.cube.CubeList - A list of cubes belonging to a single timeseries, - ordered by starting point with no overlaps. - """ - if len(cubes) < 2: - return cubes - - class _TrackedCube(NamedTuple): - cube: Cube - times: iris.coords.DimCoord - start: float - end: float - - @classmethod - def from_cube(cls, cube): - """Construct tracked cube.""" - times = cube.coord("time") - start, end = times.core_points()[[0, -1]] - return cls(cube, times, start, end) - - new_cubes = CubeList() - current_cube = _TrackedCube.from_cube(cubes[0]) - for new_cube in map(_TrackedCube.from_cube, cubes[1:]): - if new_cube.start > current_cube.end: - # no overlap, use current cube and start again from new cube - logger.debug("Using %s", current_cube.cube) - new_cubes.append(current_cube.cube) - current_cube = new_cube - continue - # overlap - if current_cube.end > new_cube.end: - # current cube ends after new one, just forget new cube - logger.debug( - "Discarding %s because the time range " - "is already covered by %s", - new_cube.cube, - current_cube.cube, - ) - continue - if new_cube.start == current_cube.start: - # new cube completely covers current one - # forget current cube - current_cube = new_cube - logger.debug( - "Discarding %s because the time range is covered by %s", - current_cube.cube, - new_cube.cube, - ) - continue - # new cube ends after current one, - # use all of new cube, and shorten current cube to - # eliminate overlap with new cube - cut_index = ( - cftime.time2index( - new_cube.start, - _TimesHelper(current_cube.times), - current_cube.times.units.calendar, - select="before", - ) - + 1 - ) - logger.debug( - "Using %s shortened to %s due to overlap", - current_cube.cube, - current_cube.times.cell(cut_index).point, - ) - new_cubes.append(current_cube.cube[:cut_index]) - current_cube = new_cube - - logger.debug("Using %s", current_cube.cube) - new_cubes.append(current_cube.cube) - - return new_cubes - - -def _fix_calendars(cubes): - """Check and homogenise calendars, if possible.""" - calendars = [cube.coord("time").units.calendar for cube in cubes] - unique_calendars = np.unique(calendars) - - calendar_ocurrences = np.array( - [calendars.count(calendar) for calendar in unique_calendars], - ) - calendar_index = int( - np.argwhere(calendar_ocurrences == calendar_ocurrences.max()), - ) - - for cube in cubes: - time_coord = cube.coord("time") - old_calendar = time_coord.units.calendar - if old_calendar != unique_calendars[calendar_index]: - new_unit = time_coord.units.change_calendar( - unique_calendars[calendar_index], - ) - time_coord.units = new_unit - - -def _get_concatenation_error(cubes): - """Raise an error for concatenation.""" - # Concatenation not successful -> retrieve exact error message - try: - CubeList(cubes).concatenate_cube() - except iris.exceptions.ConcatenateError as exc: - msg = str(exc) - logger.error("Can not concatenate cubes into a single one: %s", msg) - logger.error("Resulting cubes:") - for cube in cubes: - logger.error(cube) - time = cube.coord("time") - logger.error("From %s to %s", time.cell(0), time.cell(-1)) - - msg = f"Can not concatenate cubes: {msg}" - raise ValueError(msg) - - -def _sort_cubes_by_time(cubes): - """Sort CubeList by time coordinate.""" - try: - cubes = sorted(cubes, key=lambda c: c.coord("time").cell(0).point) - except iris.exceptions.CoordinateNotFoundError as exc: - msg = f"One or more cubes {cubes} are missing time coordinate: {exc!s}" - raise ValueError(msg) from exc - except TypeError as error: - msg = f"Cubes cannot be sorted due to differing time units: {error!s}" - raise TypeError(msg) from error - return cubes - - -def _concatenate_cubes_by_experiment(cubes: list[Cube]) -> list[Cube]: - """Concatenate cubes by experiment. - - This ensures overlapping (branching) experiments are handled correctly. - """ - # get the possible facet names in CMIP3, 5, 6 for exp - # currently these are 'experiment', 'experiment_id' - exp_facet_names = { - project["exp"] for project in FACETS.values() if "exp" in project - } - - def get_exp(cube: Cube) -> str: - for key in exp_facet_names: - if key in cube.attributes: - return cube.attributes[key] - return "" - - experiments = {get_exp(cube) for cube in cubes} - if len(experiments) > 1: - # first do experiment-wise concatenation, then time-based - cubes = [ - concatenate([cube for cube in cubes if get_exp(cube) == exp]) - for exp in experiments - ] - - return cubes - - -def concatenate(cubes, check_level=CheckLevels.DEFAULT): - """Concatenate all cubes after fixing metadata. - - Parameters - ---------- - cubes: iterable of iris.cube.Cube - Data cubes to be concatenated - check_level: CheckLevels - Level of strictness of the checks in the concatenation. - - Returns - ------- - cube: iris.cube.Cube - Resulting concatenated cube. - - Raises - ------ - ValueError - Concatenation was not possible. - """ - if not cubes: - return cubes - if len(cubes) == 1: - return cubes[0] - - for cube in cubes: - # Remove attributes that cause issues with merging and concatenation - _delete_attributes( - cube, - ("creation_date", "tracking_id", "history", "comment"), - ) - for coord in cube.coords(): - # CMOR sometimes adds a history to the coordinates. - _delete_attributes(coord, ("history",)) - - cubes = _concatenate_cubes_by_experiment(cubes) - - merge_cube_attributes(cubes) - cubes = _sort_cubes_by_time(cubes) - _fix_calendars(cubes) - cubes = _check_time_overlaps(cubes) - cubes = [_rechunk_aux_factory_dependencies(cube) for cube in cubes] - result = _concatenate_cubes(cubes, check_level=check_level) - - if len(result) == 1: - result = result[0] - else: - _get_concatenation_error(result) - - return result - - def save( # noqa: C901 cubes: Sequence[Cube], filename: Path | str, diff --git a/tests/integration/preprocessor/_concatenate/__init__.py b/tests/integration/preprocessor/_concatenate/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/preprocessor/_io/test_concatenate.py b/tests/integration/preprocessor/_concatenate/test_concatenate.py similarity index 90% rename from tests/integration/preprocessor/_io/test_concatenate.py rename to tests/integration/preprocessor/_concatenate/test_concatenate.py index 32f9d7afa2..ecb1fe99aa 100644 --- a/tests/integration/preprocessor/_io/test_concatenate.py +++ b/tests/integration/preprocessor/_concatenate/test_concatenate.py @@ -1,4 +1,4 @@ -"""Integration tests for :func:`esmvalcore.preprocessor._io.concatenate`.""" +"""Integration tests for :func:`esmvalcore.preprocessor.concatenate`.""" import unittest @@ -10,7 +10,8 @@ from iris.cube import Cube, CubeList from esmvalcore.cmor.check import CheckLevels -from esmvalcore.preprocessor import _io +from esmvalcore.preprocessor import concatenate +from esmvalcore.preprocessor._concatenate import _remove_time_overlaps from tests import assert_array_equal @@ -82,7 +83,7 @@ def real_hybrid_pressure_cube_list(): def test_concatenation_with_aux_factory(real_hybrid_pressure_cube_list): """Test actual concatenation of a cube with a derived coordinate.""" - concatenated = _io.concatenate(real_hybrid_pressure_cube_list) + concatenated = concatenate(real_hybrid_pressure_cube_list) air_pressure_coord = concatenated.coord("air_pressure") expected_coord = AuxCoord( [[[[1.0]]], [[[1.0]]]], @@ -100,7 +101,7 @@ def test_concatenation_with_aux_factory(real_hybrid_pressure_cube_list): def test_relax_concatenation(check_level, caplog): caplog.set_level("DEBUG") cubes = get_hybrid_pressure_cube_list() - _io.concatenate(cubes, check_level) + concatenate(cubes, check_level) msg = ( "Concatenation will be performed without checking " "auxiliary coordinates, cell measures, ancillaries " @@ -110,7 +111,7 @@ def test_relax_concatenation(check_level, caplog): class TestConcatenate(unittest.TestCase): - """Tests for :func:`esmvalcore.preprocessor._io.concatenate`.""" + """Tests for :func:`esmvalcore.preprocessor.concatenate`.""" def setUp(self): """Start tests.""" @@ -136,7 +137,7 @@ def _add_cube(self, data, coord): def test_concatenate(self): """Test concatenation of two cubes.""" - concatenated = _io.concatenate(self.raw_cubes) + concatenated = concatenate(self.raw_cubes) np.testing.assert_array_equal( concatenated.coord("time").points, np.array([1, 2, 3, 4, 5, 6]), @@ -145,12 +146,12 @@ def test_concatenate(self): def test_concatenate_empty_cubes(self): """Test concatenation with empty :class:`iris.cube.CubeList`.""" empty_cubes = CubeList([]) - result = _io.concatenate(empty_cubes) + result = concatenate(empty_cubes) assert result is empty_cubes def test_concatenate_noop(self): """Test concatenation of a single cube.""" - concatenated = _io.concatenate([self.raw_cubes[0]]) + concatenated = concatenate([self.raw_cubes[0]]) np.testing.assert_array_equal( concatenated.coord("time").points, np.array([1, 2]), @@ -161,7 +162,7 @@ def test_concatenate_with_overlap( ): """Test concatenation of time overalapping cubes.""" self._add_cube([6.5, 7.5], [6.0, 7.0]) - concatenated = _io.concatenate(self.raw_cubes) + concatenated = concatenate(self.raw_cubes) np.testing.assert_array_equal( concatenated.coord("time").points, np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0]), @@ -171,11 +172,17 @@ def test_concatenate_with_overlap( np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.5, 7.5]), ) + def test_remove_time_overlap_noop(self): + """Test time handling of a single cube.""" + cubes = [self.raw_cubes[0]] + result = _remove_time_overlaps(cubes) + assert result is cubes + def test_concatenate_with_overlap_2(self): """Test a more generic case.""" self._add_cube([65.0, 75.0, 100.0], [9.0, 10.0, 11.0]) self._add_cube([65.0, 75.0, 100.0], [7.0, 8.0, 9.0]) - concatenated = _io.concatenate(self.raw_cubes) + concatenated = concatenate(self.raw_cubes) np.testing.assert_array_equal( concatenated.coord("time").points, np.array( @@ -190,7 +197,7 @@ def test_concatenate_with_overlap_3(self): [65.0, 75.0, 100.0, 100.0, 100.0, 112.0], [7.0, 8.0, 9.0, 10.0, 11.0, 12.0], ) - concatenated = _io.concatenate(self.raw_cubes) + concatenated = concatenate(self.raw_cubes) np.testing.assert_array_equal( concatenated.coord("time").points, np.array( @@ -230,13 +237,13 @@ def test_concatenate_with_overlap_same_start(self): dim_coords_and_dims=((time_coord, 0),), ), ) - concatenated = _io.concatenate(raw_cubes) + concatenated = concatenate(raw_cubes) np.testing.assert_array_equal( concatenated.coord("time").points, np.array([1.0, 7.0]), ) raw_cubes.reverse() - concatenated = _io.concatenate(raw_cubes) + concatenated = concatenate(raw_cubes) np.testing.assert_array_equal( concatenated.coord("time").points, np.array([1.0, 7.0]), @@ -267,7 +274,7 @@ def test_concatenate_with_iris_exception(self): dim_coords_and_dims=((time_coord_2, 0),), ) cubes_single_ovlp = [cube2, cube1] - cubess = _io.concatenate(cubes_single_ovlp) + cubess = concatenate(cubes_single_ovlp) # this tests the scalar to vector cube conversion too time_points = cubess.coord("time").core_points() np.testing.assert_array_equal(time_points, [1.0, 1.5, 5.0, 7.0]) @@ -298,7 +305,7 @@ def test_concatenate_no_time_coords(self): dim_coords_and_dims=((ap_coord_2, 0),), ) with self.assertRaises(ValueError): - _io.concatenate([cube1, cube2]) + concatenate([cube1, cube2]) def test_concatenate_with_order(self): """Test a more generic case.""" @@ -325,13 +332,13 @@ def test_concatenate_with_order(self): dim_coords_and_dims=((time_coord_2, 0),), ) cubes_ordered = [cube2, cube1] - concatenated = _io.concatenate(cubes_ordered) + concatenated = concatenate(cubes_ordered) np.testing.assert_array_equal( concatenated.coord("time").points, np.array([1.0, 2.0, 5.0, 7.0, 100.0]), ) cubes_reverse = [cube1, cube2] - concatenated = _io.concatenate(cubes_reverse) + concatenated = concatenate(cubes_reverse) np.testing.assert_array_equal( concatenated.coord("time").points, np.array([1.0, 2.0, 5.0, 7.0, 100.0]), @@ -363,7 +370,7 @@ def test_concatenate_by_experiment_first(self): ssp585_1.attributes["experiment_id"] = "ssp585" ssp585_2 = ssp585_1.copy() ssp585_2.coord("time").points = np.arange(5, 7) - result = _io.concatenate( + result = concatenate( [historical_1, historical_2, historical_3, ssp585_1, ssp585_2], ) assert_array_equal(result.coord("time").points, np.arange(7)) @@ -375,7 +382,7 @@ def test_concatenate_remove_unwanted_attributes(self): for i, cube in enumerate(self.raw_cubes): for attr in attributes: cube.attributes[attr] = f"{attr}-{i}" - concatenated = _io.concatenate(self.raw_cubes) + concatenated = concatenate(self.raw_cubes) assert not set(attributes) & set(concatenated.attributes) def test_concatenate_remove_unwanted_attributes_from_coords(self): @@ -385,7 +392,7 @@ def test_concatenate_remove_unwanted_attributes_from_coords(self): for coord in cube.coords(): for attr in attributes: coord.attributes[attr] = f"{attr}-{i}" - concatenated = _io.concatenate(self.raw_cubes) + concatenated = concatenate(self.raw_cubes) for coord in concatenated.coords(): assert not set(attributes) & set(coord.attributes) @@ -397,7 +404,7 @@ def test_concatenate_differing_attributes(self): "equal_attr": 1, "different_attr": 3 - idx, } - concatenated = _io.concatenate(cubes) + concatenated = concatenate(cubes) np.testing.assert_array_equal( concatenated.coord("time").points, np.array([1, 2, 3, 4, 5, 6]), @@ -425,7 +432,7 @@ def test_convert_calendar_concatenate_with_overlap(self): dim_coords_and_dims=((time_coord, 0),), ), ) - concatenated = _io.concatenate(self.raw_cubes) + concatenated = concatenate(self.raw_cubes) assert concatenated.coord("time").units.calendar == "standard" def test_fail_on_calendar_concatenate_with_overlap(self): @@ -444,11 +451,11 @@ def test_fail_on_calendar_concatenate_with_overlap(self): ), ) with self.assertRaises(TypeError): - _io.concatenate(self.raw_cubes) + concatenate(self.raw_cubes) def test_fail_metadata_differs(self): """Test exception raised if two cubes have different metadata.""" self.raw_cubes[0].units = "m" self.raw_cubes[1].units = "K" with self.assertRaises(ValueError): - _io.concatenate(self.raw_cubes) + concatenate(self.raw_cubes)