|  | 
|  | 1 | +# Copyright Iris contributors | 
|  | 2 | +# | 
|  | 3 | +# This file is part of Iris and is released under the BSD license. | 
|  | 4 | +# See LICENSE in the root of the repository for full licensing details. | 
|  | 5 | +"""Integration tests for save+load of datales cubes.""" | 
|  | 6 | + | 
|  | 7 | +import numpy as np | 
|  | 8 | +import pytest | 
|  | 9 | + | 
|  | 10 | +import iris | 
|  | 11 | +from iris.coords import DimCoord | 
|  | 12 | +from iris.cube import Cube | 
|  | 13 | +from iris.fileformats.netcdf._thread_safe_nc import DatasetWrapper | 
|  | 14 | +from iris.fileformats.netcdf.saver import Saver | 
|  | 15 | + | 
|  | 16 | + | 
|  | 17 | +class TestDataless: | 
|  | 18 | +    @pytest.fixture(autouse=True) | 
|  | 19 | +    def setup(self, tmp_path_factory): | 
|  | 20 | +        ny, nx = 3, 4 | 
|  | 21 | +        self.testcube = Cube( | 
|  | 22 | +            shape=(ny, nx), | 
|  | 23 | +            long_name="testdata", | 
|  | 24 | +            dim_coords_and_dims=[ | 
|  | 25 | +                (DimCoord(np.arange(ny), long_name="y"), 0), | 
|  | 26 | +                (DimCoord(np.arange(nx), long_name="x"), 1), | 
|  | 27 | +            ], | 
|  | 28 | +        ) | 
|  | 29 | +        self.testdir = tmp_path_factory.mktemp("dataless") | 
|  | 30 | +        self.test_path = self.testdir / "test.nc" | 
|  | 31 | + | 
|  | 32 | +    @staticmethod | 
|  | 33 | +    def _strip_saveload_additions(reloaded_cube): | 
|  | 34 | +        reloaded_cube.attributes.pop("Conventions", None) | 
|  | 35 | +        reloaded_cube.var_name = None | 
|  | 36 | +        for co in reloaded_cube.coords(): | 
|  | 37 | +            co.var_name = None | 
|  | 38 | + | 
|  | 39 | +    def test_dataless_save(self): | 
|  | 40 | +        # Check that we can save a dataless cube, and what that looks like in the file. | 
|  | 41 | +        iris.save(self.testcube, self.test_path) | 
|  | 42 | +        assert Saver._DATALESS_ATTRNAME not in self.testcube.attributes | 
|  | 43 | +        # Check the content as seen in the file | 
|  | 44 | +        ncds = DatasetWrapper(self.test_path) | 
|  | 45 | +        var = ncds.variables["testdata"] | 
|  | 46 | +        assert Saver._DATALESS_ATTRNAME in var.ncattrs() | 
|  | 47 | +        assert var.dtype == Saver._DATALESS_DTYPE | 
|  | 48 | +        assert "_FillValue" in var.ncattrs() | 
|  | 49 | +        assert var._FillValue == Saver._DATALESS_FILLVALUE | 
|  | 50 | +        assert np.all(np.ma.getmaskarray(var[:]) == True)  # noqa: E712 | 
|  | 51 | + | 
|  | 52 | +    def test_dataless_load(self): | 
|  | 53 | +        # Check that we can load a saved dataless cube, and it matches the original. | 
|  | 54 | +        iris.save(self.testcube, self.test_path) | 
|  | 55 | + | 
|  | 56 | +        # NB Load with load_raw, since we haven't finished supporting dataless merge. | 
|  | 57 | +        (result_cube,) = iris.load_raw(self.test_path) | 
|  | 58 | +        assert result_cube.is_dataless() | 
|  | 59 | +        assert "iris_dataless_cube" not in result_cube.attributes | 
|  | 60 | + | 
|  | 61 | +        # strip off extra things added by netcdf save+load | 
|  | 62 | +        self._strip_saveload_additions(result_cube) | 
|  | 63 | + | 
|  | 64 | +        # Result now == original | 
|  | 65 | +        assert result_cube == self.testcube | 
|  | 66 | + | 
|  | 67 | +    def test_mixture_saveload(self): | 
|  | 68 | +        # Check that a mixture of dataless and "normal" cubes can be saved + loaded back | 
|  | 69 | +        dataless = self.testcube | 
|  | 70 | +        ny = dataless.shape[0] | 
|  | 71 | +        dataful = Cube( | 
|  | 72 | +            np.ones((ny, 3)), | 
|  | 73 | +            long_name="other", | 
|  | 74 | +            dim_coords_and_dims=[(dataless.coord("y"), 0)], | 
|  | 75 | +        ) | 
|  | 76 | +        iris.save([dataless, dataful], self.test_path) | 
|  | 77 | +        # NB Load with load_raw, since we haven't finished supporting dataless merge. | 
|  | 78 | +        cubes = iris.load_raw(self.test_path) | 
|  | 79 | +        assert len(cubes) == 2 | 
|  | 80 | +        read_dataless = cubes.extract_cube("testdata") | 
|  | 81 | +        read_dataful = cubes.extract_cube("other") | 
|  | 82 | +        assert read_dataless.is_dataless() | 
|  | 83 | +        assert not read_dataful.is_dataless() | 
|  | 84 | +        for cube in (read_dataless, read_dataful): | 
|  | 85 | +            self._strip_saveload_additions(cube) | 
|  | 86 | +        assert read_dataless == dataless | 
|  | 87 | +        assert read_dataful == dataful | 
|  | 88 | + | 
|  | 89 | +    def test_nodata_size(self): | 
|  | 90 | +        # Check that a file saved with a large dataless cube does *not* occupy a large | 
|  | 91 | +        # amount of diskspace. | 
|  | 92 | +        ny, nx = 10000, 10000 | 
|  | 93 | +        data_dims = (ny, nx) | 
|  | 94 | +        dataless_cube = Cube(shape=data_dims) | 
|  | 95 | + | 
|  | 96 | +        iris.save(dataless_cube, self.test_path) | 
|  | 97 | + | 
|  | 98 | +        data_size_bytes = ny * nx  # bytes, since dtype is "u1" (approx 100Mb) | 
|  | 99 | +        filesize_bytes = self.test_path.stat().st_size | 
|  | 100 | +        # Check that the file size < 1/10 variable array size | 
|  | 101 | +        # The 0.1 is a bit arbitrary, but it makes the point! | 
|  | 102 | +        assert filesize_bytes < 0.1 * data_size_bytes | 
0 commit comments