diff --git a/esmvalcore/_main.py b/esmvalcore/_main.py index 9c8d413ef7..5e5668865b 100644 --- a/esmvalcore/_main.py +++ b/esmvalcore/_main.py @@ -35,11 +35,15 @@ import sys from importlib.metadata import entry_points from pathlib import Path +from typing import TYPE_CHECKING import fire from esmvalcore.config._config import warn_if_old_extra_facets_exist +if TYPE_CHECKING: + from esmvalcore.config import Session + # set up logging logger = logging.getLogger(__name__) @@ -80,7 +84,7 @@ def parse_resume(resume, recipe): return resume -def process_recipe(recipe_file: Path, session): +def process_recipe(recipe_file: Path, session: Session) -> None: """Process recipe.""" import datetime import shutil @@ -318,7 +322,7 @@ def _copy_config_file( in_file: Path, out_file: Path, overwrite: bool, - ): + ) -> None: """Copy a configuration file.""" import shutil @@ -634,7 +638,7 @@ def _create_session_dir(session): def _run( self, recipe: Path, - session, + session: Session, cli_config_dir: Path | None, ) -> None: """Run `recipe` using `session`.""" @@ -697,7 +701,7 @@ def _clean_preproc(session): shutil.rmtree(session.preproc_dir) @staticmethod - def _get_recipe(recipe) -> Path: + def _get_recipe(recipe: str) -> Path: from esmvalcore.config._diagnostics import DIAGNOSTICS if not os.path.isfile(recipe): diff --git a/esmvalcore/_provenance.py b/esmvalcore/_provenance.py index a4f3b4c79d..1df4022be2 100644 --- a/esmvalcore/_provenance.py +++ b/esmvalcore/_provenance.py @@ -139,7 +139,7 @@ def __init__( attributes: dict[str, Any] | None = None, ancestors: Iterable[TrackedFile] | None = None, prov_filename: str | None = None, - ): + ) -> None: """Create an instance of a file with provenance tracking. Arguments @@ -182,7 +182,7 @@ def attributes(self) -> dict[str, Any]: return self._attributes @attributes.setter - def attributes(self, value: dict[str, Any] | None): + def attributes(self, value: dict[str, Any] | None) -> None: """Set attributes describing the file.""" self._attributes = value @@ -194,11 +194,11 @@ def __repr__(self) -> str: """Return representation string (e.g., used by ``pformat``).""" return f"{self.__class__.__name__}: {self.filename}" - def __eq__(self, other) -> bool: + def __eq__(self, other: object) -> bool: """Check if `other` equals `self`.""" return hasattr(other, "filename") and self.filename == other.filename - def __lt__(self, other) -> bool: + def __lt__(self, other: object) -> bool: """Check if `other` should be sorted before `self`.""" return hasattr(other, "filename") and self.filename < other.filename diff --git a/esmvalcore/_recipe/check.py b/esmvalcore/_recipe/check.py index 99e40baff9..55e1b52709 100644 --- a/esmvalcore/_recipe/check.py +++ b/esmvalcore/_recipe/check.py @@ -364,7 +364,7 @@ def _verify_span_value(span: str) -> None: raise RecipeError(msg) -def _verify_groupby(groupby: Any) -> None: +def _verify_groupby(groupby: list[str]) -> None: """Raise error if groupby arguments cannot be verified.""" if not isinstance(groupby, list): msg = ( @@ -375,7 +375,7 @@ def _verify_groupby(groupby: Any) -> None: raise RecipeError(msg) -def _verify_keep_input_datasets(keep_input_datasets: Any) -> None: +def _verify_keep_input_datasets(keep_input_datasets: bool) -> None: if not isinstance(keep_input_datasets, bool): msg = ( f"Invalid value encountered for `keep_input_datasets`." @@ -385,7 +385,7 @@ def _verify_keep_input_datasets(keep_input_datasets: Any) -> None: raise RecipeError(msg) -def _verify_ignore_scalar_coords(ignore_scalar_coords: Any) -> None: +def _verify_ignore_scalar_coords(ignore_scalar_coords: bool) -> None: if not isinstance(ignore_scalar_coords, bool): msg = ( f"Invalid value encountered for `ignore_scalar_coords`." diff --git a/esmvalcore/_recipe/from_datasets.py b/esmvalcore/_recipe/from_datasets.py index d0d60e0360..996c81b37a 100644 --- a/esmvalcore/_recipe/from_datasets.py +++ b/esmvalcore/_recipe/from_datasets.py @@ -5,7 +5,7 @@ import itertools import logging import re -from collections.abc import Iterable, Mapping, Sequence +from collections.abc import Iterable, Mapping from functools import partial from typing import TYPE_CHECKING, Any @@ -16,6 +16,7 @@ from ._io import _load_recipe if TYPE_CHECKING: + from collections.abc import Sequence from pathlib import Path from esmvalcore.dataset import Dataset @@ -114,7 +115,7 @@ def _to_frozen(item): return item -def _move_one_level_up(base: dict, level: str, target: str): +def _move_one_level_up(base: dict, level: str, target: str) -> None: """Move datasets one level up in the recipe.""" groups = base[level] if not groups: diff --git a/esmvalcore/_recipe/recipe.py b/esmvalcore/_recipe/recipe.py index da159183ba..4d11956467 100644 --- a/esmvalcore/_recipe/recipe.py +++ b/esmvalcore/_recipe/recipe.py @@ -16,7 +16,7 @@ from esmvalcore import __version__, esgf from esmvalcore._provenance import get_recipe_provenance -from esmvalcore._task import BaseTask, DiagnosticTask, ResumeTask, TaskSet +from esmvalcore._task import DiagnosticTask, ResumeTask, TaskSet from esmvalcore.config._config import TASKSEP from esmvalcore.config._dask import validate_dask_config from esmvalcore.config._diagnostics import TAGS @@ -59,6 +59,9 @@ if TYPE_CHECKING: from collections.abc import Iterable, Sequence + from prov.model import ProvEntity + + from esmvalcore._task import BaseTask from esmvalcore.config import Session from esmvalcore.io.protocol import DataElement from esmvalcore.typing import Facets @@ -964,7 +967,10 @@ def _need_ncl(raw_diagnostics: dict[str, Diagnostic]) -> bool: return True return False - def _initialize_provenance(self, raw_documentation: dict[str, Any]): + def _initialize_provenance( + self, + raw_documentation: dict[str, Any], + ) -> ProvEntity: """Initialize the recipe provenance.""" doc = deepcopy(raw_documentation) @@ -1380,10 +1386,10 @@ def write_html_summary(self) -> None: RecipeOutput, ) - output = self.get_output() - try: - output = RecipeOutput.from_core_recipe_output(output) + output = RecipeOutput.from_core_recipe_output( + self.get_output(), + ) except LookupError as error: # See https://github.com/ESMValGroup/ESMValCore/issues/28 logger.warning("Could not write HTML report: %s", error) diff --git a/esmvalcore/_recipe/to_datasets.py b/esmvalcore/_recipe/to_datasets.py index e335417175..65951450d1 100644 --- a/esmvalcore/_recipe/to_datasets.py +++ b/esmvalcore/_recipe/to_datasets.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from collections.abc import Iterable, Iterator, Sequence +from collections.abc import Iterable from copy import deepcopy from typing import TYPE_CHECKING, Any @@ -23,6 +23,7 @@ from ._io import _load_recipe if TYPE_CHECKING: + from collections.abc import Iterator, Sequence from pathlib import Path from esmvalcore.config import Session diff --git a/esmvalcore/_task.py b/esmvalcore/_task.py index fe77472888..231f119a20 100644 --- a/esmvalcore/_task.py +++ b/esmvalcore/_task.py @@ -1,5 +1,7 @@ """ESMValtool task definition.""" +from __future__ import annotations + import abc import contextlib import datetime @@ -283,7 +285,7 @@ def flatten(self): tasks.add(self) return tasks - def run(self, input_files=None): + def run(self, input_files: list[str] | None = None) -> None: """Run task.""" if not self.output_files: if input_files is None: @@ -308,7 +310,7 @@ def run(self, input_files=None): return self.output_files @abc.abstractmethod - def _run(self, input_files): + def _run(self, input_files: list[str]) -> list[str]: """Run task.""" def get_product_attributes(self) -> dict: @@ -362,7 +364,7 @@ def __init__(self, prev_preproc_dir, preproc_dir, name): super().__init__(ancestors=None, name=name, products=products) - def _run(self, _): + def _run(self, _: list[str]) -> list[str]: """Return the result of a previous run.""" metadata = self.get_product_attributes() @@ -810,11 +812,11 @@ def available_cpu_count() -> int: class TaskSet(set): """Container for tasks.""" - def flatten(self) -> "TaskSet": + def flatten(self) -> TaskSet: """Flatten the list of tasks.""" return TaskSet(t for task in self for t in task.flatten()) - def get_independent(self) -> "TaskSet": + def get_independent(self) -> TaskSet: """Return a set of independent tasks.""" independent_tasks = TaskSet() all_tasks = self.flatten() diff --git a/esmvalcore/cmor/_fixes/cmip5/ec_earth.py b/esmvalcore/cmor/_fixes/cmip5/ec_earth.py index ef65f1bbba..d892172c95 100644 --- a/esmvalcore/cmor/_fixes/cmip5/ec_earth.py +++ b/esmvalcore/cmor/_fixes/cmip5/ec_earth.py @@ -1,6 +1,8 @@ """Fixes for EC-Earth model.""" -from collections.abc import Iterable +from __future__ import annotations + +from typing import TYPE_CHECKING import iris import numpy as np @@ -12,6 +14,9 @@ cube_to_aux_coord, ) +if TYPE_CHECKING: + from collections.abc import Iterable + class Sic(Fix): """Fixes for sic.""" diff --git a/esmvalcore/cmor/_fixes/cmip6/cesm2.py b/esmvalcore/cmor/_fixes/cmip6/cesm2.py index 4ce856e3de..7279d0aa54 100644 --- a/esmvalcore/cmor/_fixes/cmip6/cesm2.py +++ b/esmvalcore/cmor/_fixes/cmip6/cesm2.py @@ -1,7 +1,9 @@ """Fixes for CESM2 model.""" -from pathlib import Path +from __future__ import annotations + from shutil import copyfile +from typing import TYPE_CHECKING import iris import iris.coords @@ -18,6 +20,9 @@ fix_ocean_depth_coord, ) +if TYPE_CHECKING: + from pathlib import Path + class Cl(Fix): """Fixes for ``cl``.""" diff --git a/esmvalcore/cmor/_fixes/cmip6/e3sm_1_1.py b/esmvalcore/cmor/_fixes/cmip6/e3sm_1_1.py index 6918743978..6ba00891eb 100644 --- a/esmvalcore/cmor/_fixes/cmip6/e3sm_1_1.py +++ b/esmvalcore/cmor/_fixes/cmip6/e3sm_1_1.py @@ -1,10 +1,15 @@ """Fixes for E3SM-1-1 model.""" -from iris.cube import Cube +from __future__ import annotations + +from typing import TYPE_CHECKING from esmvalcore.cmor.fix import Fix from esmvalcore.preprocessor._shared import get_array_module +if TYPE_CHECKING: + from iris.cube import Cube + def _mask_greater(cube: Cube, value: float) -> Cube: """Mask all data of cube which is greater than ``value``.""" diff --git a/esmvalcore/cmor/_fixes/cmip6/ec_earth3_veg_lr.py b/esmvalcore/cmor/_fixes/cmip6/ec_earth3_veg_lr.py index fa6ce5e2af..5aafc22cc2 100644 --- a/esmvalcore/cmor/_fixes/cmip6/ec_earth3_veg_lr.py +++ b/esmvalcore/cmor/_fixes/cmip6/ec_earth3_veg_lr.py @@ -1,12 +1,17 @@ """Fixes for EC-Earth3-Veg-LR model.""" -from collections.abc import Sequence +from __future__ import annotations -import iris.cube +from typing import TYPE_CHECKING from esmvalcore.cmor._fixes.common import OceanFixGrid from esmvalcore.cmor._fixes.fix import Fix +if TYPE_CHECKING: + from collections.abc import Sequence + + import iris.cube + class AllVars(Fix): """Fixes for all variables.""" diff --git a/esmvalcore/cmor/_fixes/fix.py b/esmvalcore/cmor/_fixes/fix.py index 789724ffe3..388577f33d 100644 --- a/esmvalcore/cmor/_fixes/fix.py +++ b/esmvalcore/cmor/_fixes/fix.py @@ -8,13 +8,13 @@ import logging import tempfile from pathlib import Path -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any import dask import numpy as np from cf_units import Unit -from iris.coords import Coord, CoordExtent -from iris.cube import Cube, CubeList +from iris.coords import CoordExtent +from iris.cube import CubeList from iris.exceptions import UnitConversionError from iris.util import reverse @@ -37,6 +37,8 @@ import ncdata import xarray as xr + from iris.coords import Coord + from iris.cube import Cube from esmvalcore.cmor.table import CoordinateInfo, VariableInfo from esmvalcore.config import Session @@ -421,12 +423,12 @@ def _msg_suffix(cube: Cube) -> str: return f"\n(for file {cube.attributes['source_file']})" return f"\n(for variable {cube.var_name})" - def _debug_msg(self, cube: Cube, msg: str, *args) -> None: + def _debug_msg(self, cube: Cube, msg: str, *args: Any) -> None: """Print debug message.""" msg += self._msg_suffix(cube) generic_fix_logger.debug(msg, *args) - def _warning_msg(self, cube: Cube, msg: str, *args) -> None: + def _warning_msg(self, cube: Cube, msg: str, *args: Any) -> None: """Print debug message.""" msg += self._msg_suffix(cube) generic_fix_logger.warning(msg, *args) diff --git a/esmvalcore/cmor/_fixes/icon/_base_fixes.py b/esmvalcore/cmor/_fixes/icon/_base_fixes.py index 4023851862..c94b3483c6 100644 --- a/esmvalcore/cmor/_fixes/icon/_base_fixes.py +++ b/esmvalcore/cmor/_fixes/icon/_base_fixes.py @@ -10,7 +10,7 @@ from pathlib import Path from shutil import copyfileobj from tempfile import NamedTemporaryFile -from typing import Any +from typing import TYPE_CHECKING, Any from urllib.parse import urlparse import iris @@ -19,8 +19,8 @@ import requests from cf_units import Unit from iris import NameConstraint -from iris.coords import AuxCoord, Coord, DimCoord -from iris.cube import Cube, CubeList +from iris.coords import AuxCoord, DimCoord +from iris.cube import CubeList from iris.mesh import Connectivity, MeshXY import esmvalcore.local @@ -28,6 +28,10 @@ from esmvalcore.config._data_sources import _get_data_sources from esmvalcore.iris_helpers import add_leading_dim_to_cube, date2num +if TYPE_CHECKING: + from iris.coords import Coord + from iris.cube import Cube + logger = logging.getLogger(__name__) diff --git a/esmvalcore/cmor/_fixes/obs4mips/c3s_gto_ecv_9_0.py b/esmvalcore/cmor/_fixes/obs4mips/c3s_gto_ecv_9_0.py index 3252ad0745..608e6919fd 100644 --- a/esmvalcore/cmor/_fixes/obs4mips/c3s_gto_ecv_9_0.py +++ b/esmvalcore/cmor/_fixes/obs4mips/c3s_gto_ecv_9_0.py @@ -1,10 +1,16 @@ """Fixes for obs4MIPs dataset C3S-GTO-ECV-9-0.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + import dask.array as da -from iris.cube import Cube from esmvalcore.cmor._fixes.fix import Fix +if TYPE_CHECKING: + from iris.cube import Cube + class Toz(Fix): """Fixes for toz.""" diff --git a/esmvalcore/cmor/_fixes/shared.py b/esmvalcore/cmor/_fixes/shared.py index 7cfb832dcf..13d478ea9f 100644 --- a/esmvalcore/cmor/_fixes/shared.py +++ b/esmvalcore/cmor/_fixes/shared.py @@ -1,9 +1,12 @@ """Shared functions for fixes.""" +from __future__ import annotations + import logging import os from datetime import datetime, timedelta from functools import cache +from typing import TYPE_CHECKING import dask.array as da import iris @@ -11,11 +14,13 @@ import pandas as pd from cf_units import Unit from iris import NameConstraint -from iris.coords import Coord from scipy.interpolate import interp1d from esmvalcore.iris_helpers import date2num +if TYPE_CHECKING: + from iris.coords import Coord + logger = logging.getLogger(__name__) diff --git a/esmvalcore/cmor/_utils.py b/esmvalcore/cmor/_utils.py index c88a5b0b4e..da8eddd759 100644 --- a/esmvalcore/cmor/_utils.py +++ b/esmvalcore/cmor/_utils.py @@ -5,7 +5,7 @@ import logging from typing import TYPE_CHECKING -from esmvalcore.cmor.table import CMOR_TABLES, CoordinateInfo, VariableInfo +from esmvalcore.cmor.table import CMOR_TABLES if TYPE_CHECKING: from collections.abc import Sequence @@ -13,6 +13,8 @@ from iris.coords import Coord from iris.cube import Cube + from esmvalcore.cmor.table import CoordinateInfo, VariableInfo + logger = logging.getLogger(__name__) _ALTERNATIVE_GENERIC_LEV_COORDS = { diff --git a/esmvalcore/cmor/check.py b/esmvalcore/cmor/check.py index e2d40aa05c..a487eb70ee 100644 --- a/esmvalcore/cmor/check.py +++ b/esmvalcore/cmor/check.py @@ -22,7 +22,7 @@ _get_new_generic_level_coord, _get_simplified_calendar, ) -from esmvalcore.cmor.table import CoordinateInfo, get_var_info +from esmvalcore.cmor.table import get_var_info from esmvalcore.iris_helpers import has_unstructured_grid if TYPE_CHECKING: @@ -31,6 +31,8 @@ from iris.coords import Coord from iris.cube import Cube + from esmvalcore.cmor.table import CoordinateInfo + class CheckLevels(IntEnum): """Level of strictness of the checks.""" @@ -540,7 +542,10 @@ def _check_coords(self): self._check_coord_ranges(coords) - def _check_coord_ranges(self, coords: list[tuple[CoordinateInfo, Coord]]): + def _check_coord_ranges( + self, + coords: list[tuple[CoordinateInfo, Coord]], + ) -> None: """Check coordinate value are inside valid ranges.""" class Limit(NamedTuple): diff --git a/esmvalcore/cmor/fix.py b/esmvalcore/cmor/fix.py index cbe6aef4b0..4f66ffda25 100644 --- a/esmvalcore/cmor/fix.py +++ b/esmvalcore/cmor/fix.py @@ -9,9 +9,9 @@ import logging from collections import defaultdict -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any -from iris.cube import Cube, CubeList +from iris.cube import CubeList from esmvalcore.cmor._fixes.fix import Fix @@ -21,6 +21,7 @@ import ncdata import xarray as xr + from iris.cube import Cube from esmvalcore.config import Session @@ -37,7 +38,7 @@ def fix_file( # noqa: PLR0913 add_unique_suffix: bool = False, session: Session | None = None, frequency: str | None = None, - **extra_facets, + **extra_facets: Any, ) -> str | Path | xr.Dataset | ncdata.NcData: """Fix files before loading them into a :class:`~iris.cube.CubeList`. @@ -120,7 +121,7 @@ def fix_metadata( mip: str, frequency: str | None = None, session: Session | None = None, - **extra_facets, + **extra_facets: Any, ) -> CubeList: """Fix cube metadata if fixes are required. @@ -204,7 +205,7 @@ def fix_data( mip: str, frequency: str | None = None, session: Session | None = None, - **extra_facets, + **extra_facets: Any, ) -> Cube: """Fix cube data if fixes are required. diff --git a/esmvalcore/config/_config.py b/esmvalcore/config/_config.py index 121ee2b126..565ea4a6d4 100644 --- a/esmvalcore/config/_config.py +++ b/esmvalcore/config/_config.py @@ -94,7 +94,7 @@ def warn_if_old_extra_facets_exist() -> None: ) -def load_config_developer(cfg_file) -> dict: +def load_config_developer(cfg_file: Path) -> dict: """Read the developer's configuration file.""" with open(cfg_file, encoding="utf-8") as file: cfg = yaml.safe_load(file) diff --git a/esmvalcore/config/_config_object.py b/esmvalcore/config/_config_object.py index fcdddc9c37..d03c8409b6 100644 --- a/esmvalcore/config/_config_object.py +++ b/esmvalcore/config/_config_object.py @@ -276,7 +276,7 @@ class Session(ValidatedConfig): relative_cmor_log = Path("run", "cmor_log.txt") _relative_fixed_file_dir = Path("preproc", "fixed_files") - def __init__(self, config: dict, name: str = "session"): + def __init__(self, config: dict, name: str = "session") -> None: super().__init__(config) self.session_name: str | None = None self.set_session_name(name) @@ -293,7 +293,7 @@ def __repr__(self) -> str: def __str__(self) -> str: return repr(self) - def set_session_name(self, name: str = "session"): + def set_session_name(self, name: str = "session") -> None: """Set the name for the session. The `name` is used to name the session directory, e.g. diff --git a/esmvalcore/config/_config_validators.py b/esmvalcore/config/_config_validators.py index efb5e2538f..ca5045f976 100644 --- a/esmvalcore/config/_config_validators.py +++ b/esmvalcore/config/_config_validators.py @@ -5,11 +5,11 @@ import logging import os.path import warnings -from collections.abc import Callable, Iterable +from collections.abc import Iterable from functools import lru_cache, partial from importlib.resources import files as importlib_files from pathlib import Path -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Literal from packaging import version @@ -22,6 +22,8 @@ ) if TYPE_CHECKING: + from collections.abc import Callable + from esmvalcore.config._validated_config import ValidatedConfig logger = logging.getLogger(__name__) @@ -47,7 +49,11 @@ class ValidationError(ValueError): # to fit the needs of ESMValCore. Matplotlib is licenced under the terms of # the the 'Python Software Foundation License' # (https://www.python.org/psf/license) -def _make_type_validator(cls: Any, *, allow_none: bool = False) -> Any: +def _make_type_validator( + cls: Any, # noqa: ANN401 + *, + allow_none: bool = False, +) -> Callable: """Construct a type validator for `cls`. Return a validator that converts inputs to *cls* or raises (and @@ -361,7 +367,9 @@ def validate_extra_facets_dir(value): return validate_pathlist(value) -def validate_projects(value: Any) -> Any: +def validate_projects( + value: dict, +) -> dict[str, dict[str, Any]]: """Validate projects mapping.""" mapping = validate_dict(value) options_for_project: dict[str, Callable[[Any], Any]] = { @@ -444,8 +452,8 @@ def _handle_deprecation( # TODO: remove in v2.15.0 def deprecate_extra_facets_dir( validated_config: ValidatedConfig, - value: Any, - validated_value: Any, + value: str | Path, + validated_value: str | Path, ) -> None: """Deprecate ``extra_facets_dir`` option. @@ -481,8 +489,8 @@ def deprecate_extra_facets_dir( def deprecate_rootpath( validated_config: ValidatedConfig, - value: Any, - validated_value: Any, + value: dict, + validated_value: dict, ) -> None: """Deprecate ``rootpath`` option. @@ -508,8 +516,8 @@ def deprecate_rootpath( def deprecate_drs( validated_config: ValidatedConfig, # noqa: ARG001 - value: Any, # noqa: ARG001 - validated_value: Any, # noqa: ARG001 + value: dict, # noqa: ARG001 + validated_value: dict, # noqa: ARG001 ) -> None: """Deprecate ``drs`` option. @@ -529,8 +537,8 @@ def deprecate_drs( def deprecate_download_dir( validated_config: ValidatedConfig, # noqa: ARG001 - value: Any, # noqa: ARG001 - validated_value: Any, # noqa: ARG001 + value: str | Path, # noqa: ARG001 + validated_value: str | Path, # noqa: ARG001 ) -> None: """Deprecate ``download_dir`` option. @@ -550,8 +558,8 @@ def deprecate_download_dir( def deprecate_search_esgf( validated_config: ValidatedConfig, - value: Any, # noqa: ARG001 - validated_value: Any, + value: Literal["never", "when_missing", "always"], # noqa: ARG001 + validated_value: Literal["never", "when_missing", "always"], ) -> None: """Deprecate ``search_esgf`` option. diff --git a/esmvalcore/config/_dask.py b/esmvalcore/config/_dask.py index 6ffed159c4..10c2195101 100644 --- a/esmvalcore/config/_dask.py +++ b/esmvalcore/config/_dask.py @@ -1,9 +1,11 @@ """Configuration for Dask distributed.""" +from __future__ import annotations + import contextlib import importlib import logging -from collections.abc import Generator, Mapping +from collections.abc import Mapping from copy import deepcopy from typing import TYPE_CHECKING @@ -16,6 +18,8 @@ ) if TYPE_CHECKING: + from collections.abc import Generator + from distributed.deploy import Cluster logger = logging.getLogger(__name__) diff --git a/esmvalcore/config/_data_sources.py b/esmvalcore/config/_data_sources.py index 903ae7c5bf..d4f8ad80c3 100644 --- a/esmvalcore/config/_data_sources.py +++ b/esmvalcore/config/_data_sources.py @@ -1,16 +1,21 @@ """Module for configuring data sources.""" +from __future__ import annotations + import logging +from typing import TYPE_CHECKING import yaml import esmvalcore.esgf import esmvalcore.esgf.facets import esmvalcore.local -from esmvalcore.config import Session from esmvalcore.exceptions import InvalidConfigParameter, RecipeError from esmvalcore.io import load_data_sources -from esmvalcore.io.protocol import DataSource + +if TYPE_CHECKING: + from esmvalcore.config import Session + from esmvalcore.io.protocol import DataSource logger = logging.getLogger(__name__) diff --git a/esmvalcore/config/_diagnostics.py b/esmvalcore/config/_diagnostics.py index 16a5d7cfc9..53ee975f94 100644 --- a/esmvalcore/config/_diagnostics.py +++ b/esmvalcore/config/_diagnostics.py @@ -3,6 +3,7 @@ import logging import os from pathlib import Path +from typing import Self import yaml @@ -80,7 +81,7 @@ def __init__(self, *args, **kwargs): self.source_file = None @classmethod - def from_file(cls, filename: str): + def from_file(cls, filename: str) -> Self: """Load the reference tags used for provenance recording.""" if os.path.exists(filename): logger.debug("Loading tags from %s", filename) @@ -93,7 +94,7 @@ def from_file(cls, filename: str): logger.debug("No tags loaded, file %s not present", filename) return cls() - def set_tag_value(self, section: str, tag: str, value): + def set_tag_value(self, section: str, tag: str, value: str) -> None: """Set the value of a tag in a section. Parameters @@ -110,7 +111,7 @@ def set_tag_value(self, section: str, tag: str, value): self[section][tag] = value - def set_tag_values(self, tag_values: dict): + def set_tag_values(self, tag_values: dict[str, dict[str, str]]) -> None: """Update tags from dict. Parameters @@ -122,7 +123,7 @@ def set_tag_values(self, tag_values: dict): for tag, value in tags.items(): self.set_tag_value(section, tag, value) - def get_tag_value(self, section: str, tag: str): + def get_tag_value(self, section: str, tag: str) -> str: """Retrieve the value of a tag from a section. Parameters @@ -140,25 +141,23 @@ def get_tag_value(self, section: str, tag: str): if tag not in self[section]: postfix = f" of {self.source_file}" if self.source_file else "" msg = f"Tag '{tag}' does not exist in section '{section}'{postfix}" - raise ValueError( - msg, - ) + raise ValueError(msg) return self[section][tag] - def get_tag_values(self, section: str, tags: tuple): + def get_tag_values(self, section: str, tags: tuple) -> tuple[str, ...]: """Retrieve the values for a list of tags from a section. Parameters ---------- - section : str + section Name of the subsection - tags : tuple[str] or list[str] - List or tuple with tag names + tags + Tuple with tag names """ return tuple(self.get_tag_value(section, tag) for tag in tags) - def replace_tags_in_dict(self, dct: dict): + def replace_tags_in_dict(self, dct: dict) -> None: """Resolve tags and updates the given dict in-place. Tags are updated one level deep, and only if the corresponding diff --git a/esmvalcore/config/_logging.py b/esmvalcore/config/_logging.py index 89d1d59851..9332c133f2 100644 --- a/esmvalcore/config/_logging.py +++ b/esmvalcore/config/_logging.py @@ -1,18 +1,22 @@ """Configure logging.""" +from __future__ import annotations + import inspect import logging import logging.config import os import time -from collections.abc import Iterable from pathlib import Path -from typing import Literal +from typing import TYPE_CHECKING, Literal import yaml import esmvalcore.exceptions +if TYPE_CHECKING: + from collections.abc import Iterable + _WARNINGS_SHOWN_IN_MAIN_LOG = [ cls.__name__ for cls in vars(esmvalcore.exceptions).values() @@ -96,7 +100,7 @@ def _get_log_files( return log_files -def _update_stream_level(cfg: dict, level=None): +def _update_stream_level(cfg: dict, level: str | None = None) -> None: """Update the log level for the stream handlers.""" handlers = cfg["handlers"] diff --git a/esmvalcore/config/_validated_config.py b/esmvalcore/config/_validated_config.py index 624068c411..65d42cc5e7 100644 --- a/esmvalcore/config/_validated_config.py +++ b/esmvalcore/config/_validated_config.py @@ -4,10 +4,10 @@ import pprint import warnings -from collections.abc import Callable, Generator, Mapping, MutableMapping +from collections.abc import MutableMapping from contextlib import contextmanager from copy import deepcopy -from typing import Any, ClassVar +from typing import TYPE_CHECKING, Any, ClassVar from esmvalcore.exceptions import ( InvalidConfigParameter, @@ -16,6 +16,9 @@ from ._config_validators import ValidationError +if TYPE_CHECKING: + from collections.abc import Callable, Generator, Mapping + # The code for this class was take from matplotlib (v3.3) and modified to # fit the needs of ESMValCore. Matplotlib is licenced under the terms of @@ -57,7 +60,7 @@ class ValidatedConfig(MutableMapping): """ # validate values on the way in - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args, **kwargs): super().__init__() self._mapping: dict[str, Any] = {} self.update(*args, **kwargs) diff --git a/esmvalcore/dataset.py b/esmvalcore/dataset.py index 157bd69bb6..5c799d0843 100644 --- a/esmvalcore/dataset.py +++ b/esmvalcore/dataset.py @@ -19,7 +19,7 @@ from esmvalcore._recipe import check from esmvalcore._recipe.from_datasets import datasets_to_recipe from esmvalcore.cmor.table import _get_mips, _update_cmor_facets -from esmvalcore.config import CFG, Session +from esmvalcore.config import CFG from esmvalcore.config._config import ( get_activity, get_institutes, @@ -27,10 +27,7 @@ ) from esmvalcore.config._data_sources import _get_data_sources from esmvalcore.exceptions import InputFilesNotFound, RecipeError -from esmvalcore.local import ( - _dates_to_timerange, - _get_output_file, -) +from esmvalcore.local import _dates_to_timerange, _get_output_file from esmvalcore.preprocessor import preprocess if TYPE_CHECKING: @@ -38,6 +35,7 @@ from iris.cube import Cube + from esmvalcore.config import Session from esmvalcore.io.protocol import DataElement, DataSource from esmvalcore.preprocessor import PreprocessorItem from esmvalcore.typing import Facets, FacetValue @@ -673,7 +671,7 @@ def augment_facets(self) -> None: supplementary._augment_facets() # noqa: SLF001 @staticmethod - def _pattern_filter(patterns: Iterable[str], name) -> list[str]: + def _pattern_filter(patterns: Iterable[str], name: str) -> list[str]: """Get the subset of the list `patterns` that `name` matches.""" return [pat for pat in patterns if fnmatch.fnmatchcase(name, pat)] @@ -686,16 +684,16 @@ def _get_extra_facets(self) -> dict[str, Any]: .get(self["project"], {}) .get("extra_facets", {}) ) - dataset_names = self._pattern_filter(raw_extra_facets, self["dataset"]) + dataset_names = self._pattern_filter(raw_extra_facets, self["dataset"]) # type: ignore[arg-type] for dataset_name in dataset_names: mips = self._pattern_filter( raw_extra_facets[dataset_name], - self["mip"], + self["mip"], # type: ignore[arg-type] ) for mip in mips: variables = self._pattern_filter( raw_extra_facets[dataset_name][mip], - self["short_name"], + self["short_name"], # type: ignore[arg-type] ) for var in variables: facets = raw_extra_facets[dataset_name][mip][var] @@ -709,16 +707,16 @@ def _get_extra_facets(self) -> dict[str, Any]: self.facets["project"], tuple(self.session["extra_facets_dir"]), ) - dataset_names = self._pattern_filter(project_details, self["dataset"]) + dataset_names = self._pattern_filter(project_details, self["dataset"]) # type: ignore[arg-type] for dataset_name in dataset_names: mips = self._pattern_filter( project_details[dataset_name], - self["mip"], + self["mip"], # type: ignore[arg-type] ) for mip in mips: variables = self._pattern_filter( project_details[dataset_name][mip], - self["short_name"], + self["short_name"], # type: ignore[arg-type] ) for var in variables: facets = project_details[dataset_name][mip][var] @@ -927,7 +925,7 @@ def _expand_range(self, input_tag: str) -> list[FacetValue]: expanded: list[FacetValue] = [] regex = re.compile(r"\(\d+:\d+\)") - def expand_range(input_range) -> None: + def expand_range(input_range: str) -> None: match = regex.search(input_range) if match: start, end = match.group(0)[1:-1].split(":") @@ -948,7 +946,7 @@ def expand_range(input_range) -> None: raise RecipeError(msg) expanded.append(tag) else: - expand_range(tag) + expand_range(tag) # type: ignore[arg-type] return expanded diff --git a/esmvalcore/esgf/_download.py b/esmvalcore/esgf/_download.py index ce5f030735..bb565963b9 100644 --- a/esmvalcore/esgf/_download.py +++ b/esmvalcore/esgf/_download.py @@ -37,7 +37,7 @@ from collections.abc import Iterable import iris.cube - from pyesgf.search.results import FileResult + from pyesgf.search.results import FileResult, ResultSet from esmvalcore.typing import Facets @@ -331,7 +331,7 @@ def _get_facets(self, results): return facets @staticmethod - def _get_facets_from_dataset_id(results) -> Facets: + def _get_facets_from_dataset_id(results: ResultSet) -> Facets: """Read the facets from the `dataset_id`.""" # This reads the facets from the dataset_id because the facets # provided by ESGF are unreliable. @@ -610,7 +610,7 @@ def download(files, dest_folder=None, n_jobs=4): files = sorted(files) logger.info(get_download_message(files)) - def _download(file: ESGFFile): + def _download(file: ESGFFile) -> None: """Download file to dest_folder.""" file.download(dest_folder) diff --git a/esmvalcore/experimental/_logging.py b/esmvalcore/experimental/_logging.py index 00201909fc..d983cd37c4 100644 --- a/esmvalcore/experimental/_logging.py +++ b/esmvalcore/experimental/_logging.py @@ -1,12 +1,18 @@ """Logging utilities.""" +from __future__ import annotations + import logging from contextlib import contextmanager -from pathlib import Path +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from contextlib.abc import ContextManager + from pathlib import Path @contextmanager -def log_to_dir(drc: Path): +def log_to_dir(drc: Path) -> ContextManager[None]: """Log messages to the specified directory. This is a context manager to temporarily redirect the logging when diff --git a/esmvalcore/experimental/recipe.py b/esmvalcore/experimental/recipe.py index 9bb4f513cc..1da67e9ce1 100644 --- a/esmvalcore/experimental/recipe.py +++ b/esmvalcore/experimental/recipe.py @@ -1,20 +1,27 @@ """Recipe metadata.""" +from __future__ import annotations + import logging -import os import pprint import shutil from pathlib import Path +from typing import TYPE_CHECKING import yaml from esmvalcore._recipe.recipe import Recipe as RecipeEngine -from esmvalcore.config import CFG, Session +from esmvalcore.config import CFG from ._logging import log_to_dir from .recipe_info import RecipeInfo from .recipe_output import RecipeOutput +if TYPE_CHECKING: + import os + + from esmvalcore.config import Session + logger = logging.getLogger(__name__) @@ -29,7 +36,7 @@ class Recipe: Path to the recipe. """ - def __init__(self, path: os.PathLike): + def __init__(self, path: os.PathLike) -> None: self.path = Path(path) if not self.path.exists(): msg = f"Cannot find recipe: `{path}`." @@ -104,7 +111,7 @@ def run( self, task: str | None = None, session: Session | None = None, - ): + ) -> RecipeOutput: """Run the recipe. This function loads the recipe into the ESMValCore recipe format @@ -112,17 +119,17 @@ def run( Parameters ---------- - task : str + task Specify the name of the diagnostic or preprocessor to run a single task. - session : :obj:`Session`, optional + session Defines the config parameters and location where the recipe output will be stored. If ``None``, a new session will be started automatically. Returns ------- - output : dict + : Returns output of the recipe as instances of :obj:`OutputItem` grouped by diagnostic task. """ @@ -150,7 +157,7 @@ def get_output(self) -> RecipeOutput: Returns ------- - output : dict + output Returns output of the recipe as instances of :obj:`OutputFile` grouped by diagnostic task. """ @@ -163,6 +170,6 @@ def get_output(self) -> RecipeOutput: return RecipeOutput( task_output=task_output, - session=self.last_session, + session=self.last_session, # type: ignore[arg-type] info=self.info, ) diff --git a/esmvalcore/experimental/recipe_info.py b/esmvalcore/experimental/recipe_info.py index 1831d16dd8..e2c61edbe2 100644 --- a/esmvalcore/experimental/recipe_info.py +++ b/esmvalcore/experimental/recipe_info.py @@ -1,13 +1,22 @@ """Handles recipe metadata (under 'documentation' section).""" -import os +from __future__ import annotations + import textwrap from pathlib import Path +from typing import TYPE_CHECKING, Any, Self import yaml -from .recipe_metadata import Contributor, Project, Reference -from .templates import get_template +from esmvalcore.experimental.recipe_metadata import ( + Contributor, + Project, + Reference, +) +from esmvalcore.experimental.templates import get_template + +if TYPE_CHECKING: + import os class RecipeInfo: @@ -21,7 +30,11 @@ class RecipeInfo: Name of recipe file """ - def __init__(self, data, filename: os.PathLike | str): + def __init__( + self, + data: dict[str, Any], + filename: os.PathLike | str, + ) -> None: self.filename = Path(filename).name self.data = data self._authors: tuple[Contributor, ...] | None = None @@ -70,7 +83,7 @@ def _repr_html_(self) -> str: return self.render() @classmethod - def from_yaml(cls, path: str): + def from_yaml(cls, path: str) -> Self: """Return instance of 'RecipeInfo' from a recipe in yaml format.""" data = yaml.safe_load(Path(path).read_text(encoding="utf-8")) return cls(data, filename=path) diff --git a/esmvalcore/experimental/recipe_metadata.py b/esmvalcore/experimental/recipe_metadata.py index 6021d8ccf3..f674d33d49 100644 --- a/esmvalcore/experimental/recipe_metadata.py +++ b/esmvalcore/experimental/recipe_metadata.py @@ -1,5 +1,7 @@ """API for recipe metadata.""" +from __future__ import annotations + import pybtex from pybtex.database.input import bibtex @@ -23,7 +25,12 @@ class Contributor: ORCID url """ - def __init__(self, name: str, institute: str, orcid: str | None = None): + def __init__( + self, + name: str, + institute: str, + orcid: str | None = None, + ) -> None: self.name = name self.institute = institute self.orcid = orcid @@ -48,7 +55,7 @@ def _repr_markdown_(self) -> str: return str(self) @classmethod - def from_tag(cls, tag: str) -> "Contributor": + def from_tag(cls, tag: str) -> Contributor: """Return an instance of Contributor from a tag (``TAGS``). Parameters @@ -89,7 +96,7 @@ class Project: The project title. """ - def __init__(self, project: str): + def __init__(self, project: str) -> None: self.project = project def __repr__(self) -> str: @@ -101,7 +108,7 @@ def __str__(self) -> str: return f"{self.project}" @classmethod - def from_tag(cls, tag: str) -> "Project": + def from_tag(cls, tag: str) -> Project: """Return an instance of Project from a tag (``TAGS``). Parameters @@ -127,7 +134,7 @@ class Reference: If the bibtex file contains more than 1 entry. """ - def __init__(self, filename: str): + def __init__(self, filename: str) -> None: parser = bibtex.Parser(strict=False) bib_data = parser.parse_file(filename) @@ -145,7 +152,7 @@ def __init__(self, filename: str): self._filename = filename @classmethod - def from_tag(cls, tag: str) -> "Reference": + def from_tag(cls, tag: str) -> Reference: """Return an instance of Reference from a bibtex tag. Parameters diff --git a/esmvalcore/experimental/recipe_output.py b/esmvalcore/experimental/recipe_output.py index caffe01818..d70902ba7a 100644 --- a/esmvalcore/experimental/recipe_output.py +++ b/esmvalcore/experimental/recipe_output.py @@ -1,5 +1,7 @@ """API for handing recipe output.""" +from __future__ import annotations + import base64 import getpass import logging @@ -7,15 +9,19 @@ import sys from collections.abc import Mapping, Sequence from pathlib import Path +from typing import TYPE_CHECKING, Any, Self import iris import xarray as xr from esmvalcore.config._config import TASKSEP +from esmvalcore.experimental.recipe_info import RecipeInfo +from esmvalcore.experimental.recipe_metadata import Contributor, Reference +from esmvalcore.experimental.templates import get_template -from .recipe_info import RecipeInfo -from .recipe_metadata import Contributor, Reference -from .templates import get_template +if TYPE_CHECKING: + import esmvalcore._task + import esmvalcore.config logger = logging.getLogger(__name__) @@ -31,7 +37,7 @@ class TaskOutput: Mapping of the filenames with the associated attributes. """ - def __init__(self, name: str, files: dict): + def __init__(self, name: str, files: dict) -> None: self.name = name self.title = name.replace("_", " ").replace(TASKSEP, ": ").title() self.files = tuple( @@ -55,7 +61,7 @@ def __len__(self): """Return number of files.""" return len(self.files) - def __getitem__(self, index: int): + def __getitem__(self, index: int) -> OutputFile: """Get item indexed by `index`.""" return self.files[index] @@ -70,11 +76,8 @@ def data_files(self) -> tuple: return tuple(item for item in self.files if item.kind == "data") @classmethod - def from_task(cls, task) -> "TaskOutput": - """Create an instance of `TaskOutput` from a Task. - - Where task is an instance of `esmvalcore._task.BaseTask`. - """ + def from_task(cls, task: esmvalcore._task.BaseTask) -> TaskOutput: + """Create an instance of `TaskOutput` from a Task.""" product_attributes = task.get_product_attributes() return cls(name=task.name, files=product_attributes) @@ -114,18 +117,18 @@ class RecipeOutput(Mapping): Parameters ---------- - task_output : dict + task_output Dictionary with recipe output grouped by task name. Each task value is a mapping of the filenames with the product attributes. Attributes ---------- - diagnostics : dict + diagnostics Dictionary with recipe output grouped by diagnostic. - info : RecipeInfo - The recipe used to create the output. - session : esmvalcore.config.Session + session The session used to run the recipe. + info + The recipe used to create the output. """ FILTER_ATTRS: tuple = ( @@ -135,7 +138,12 @@ class RecipeOutput(Mapping): "long_names", ) - def __init__(self, task_output: dict, session=None, info=None): + def __init__( + self, + task_output: dict[str, dict[str, Any]], + session: esmvalcore.config.Session, + info: RecipeInfo, + ) -> None: self._raw_task_output = task_output self._task_output = {} self.diagnostics = {} @@ -197,7 +205,7 @@ def __repr__(self): """Return canonical string representation.""" return "\n".join(repr(item) for item in self._task_output.values()) - def __getitem__(self, key: str): + def __getitem__(self, key: str) -> TaskOutput: """Get task indexed by `key`.""" return self._task_output[key] @@ -210,8 +218,8 @@ def __len__(self): return len(self._task_output) @classmethod - def from_core_recipe_output(cls, recipe_output: dict): - """Construct instance from `_recipe.Recipe` output. + def from_core_recipe_output(cls, recipe_output: dict) -> Self: + """Construct instance from `esmvalcore._recipe.recipe.Recipe.get_output`. The core recipe format is not directly compatible with the API. This constructor converts the raw recipe dict to :obj:`RecipeInfo` @@ -219,7 +227,7 @@ def from_core_recipe_output(cls, recipe_output: dict): Parameters ---------- recipe_output : dict - Output from `_recipe.Recipe.get_product_output` + Output from `esmvalcore._recipe.recipe.Recipe.get_output` """ task_output = recipe_output["task_output"] recipe_data = recipe_output["recipe_data"] @@ -321,7 +329,7 @@ class OutputFile: kind: str | None = None - def __init__(self, path: str, attributes: dict | None = None): + def __init__(self, path: str, attributes: dict | None = None) -> None: if not attributes: attributes = {} @@ -358,14 +366,18 @@ def references(self) -> tuple: self._references = tuple(Reference.from_tag(tag) for tag in tags) return self._references - def _get_derived_path(self, append: str, suffix: str | None = None): + def _get_derived_path( + self, + append: str, + suffix: str | None = None, + ) -> Path: """Return path of related files. Parameters ---------- - append : str + append Add this string to the stem of the path. - suffix : str + suffix The file extension to use (i.e. `.txt`) Returns @@ -396,7 +408,7 @@ def create( cls, path: str, attributes: dict | None = None, - ) -> "OutputFile": + ) -> OutputFile: """Construct new instances of OutputFile. Chooses a derived class if suitable. diff --git a/esmvalcore/experimental/utils.py b/esmvalcore/experimental/utils.py index c24525f110..dc2ba5dea5 100644 --- a/esmvalcore/experimental/utils.py +++ b/esmvalcore/experimental/utils.py @@ -1,19 +1,23 @@ """ESMValCore utilities.""" -import os +from __future__ import annotations + import re from pathlib import Path -from re import Pattern +from typing import TYPE_CHECKING from esmvalcore.config._diagnostics import DIAGNOSTICS +from esmvalcore.experimental.recipe import Recipe -from .recipe import Recipe +if TYPE_CHECKING: + import os + from re import Pattern class RecipeList(list): """Container for recipes.""" - def find(self, query: Pattern[str]): + def find(self, query: Pattern[str]) -> RecipeList: """Search for recipes matching the search query or pattern. Searches in the description, authors and project information fields. diff --git a/esmvalcore/io/__init__.py b/esmvalcore/io/__init__.py index 13022f8bf1..ef6e056282 100644 --- a/esmvalcore/io/__init__.py +++ b/esmvalcore/io/__init__.py @@ -44,11 +44,16 @@ :attr:`esmvalcore.io.protocol.DataSource.priority` is chosen. """ +from __future__ import annotations + import importlib import logging +from typing import TYPE_CHECKING + +import esmvalcore.io.protocol -from esmvalcore.config import Session -from esmvalcore.io.protocol import DataSource +if TYPE_CHECKING: + from esmvalcore.config import Session logger = logging.getLogger(__name__) @@ -56,7 +61,7 @@ def load_data_sources( session: Session, project: str | None = None, -) -> list[DataSource]: +) -> list[esmvalcore.io.protocol.DataSource]: """Get the list of available data sources. If no ``priority`` is configured for a data source, the default priority @@ -71,7 +76,7 @@ def load_data_sources( Returns ------- - :obj:`list` of :obj:`DataSource`: + : A list of available data sources. Raises @@ -80,7 +85,7 @@ def load_data_sources( If the project or its settings are not found in the configuration. """ - data_sources: list[DataSource] = [] + data_sources: list[esmvalcore.io.protocol.DataSource] = [] if project is not None and project not in session["projects"]: msg = f"Unknown project '{project}', please configure it under 'projects'." raise ValueError(msg) @@ -102,7 +107,7 @@ def load_data_sources( priority=priority, **kwargs, ) - if not isinstance(data_source, DataSource): + if not isinstance(data_source, esmvalcore.io.protocol.DataSource): msg = ( "Expected a data source of type `esmvalcore.io.protocol.DataSource`, " f"but your configuration for project '{project_}' contains " diff --git a/esmvalcore/io/protocol.py b/esmvalcore/io/protocol.py index 6f7108c2a7..37239eef89 100644 --- a/esmvalcore/io/protocol.py +++ b/esmvalcore/io/protocol.py @@ -11,12 +11,16 @@ """ -from collections.abc import Iterable -from typing import Any, Protocol, runtime_checkable +from __future__ import annotations -import iris.cube +from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable -from esmvalcore.typing import FacetValue +if TYPE_CHECKING: + from collections.abc import Iterable + + import iris.cube + + from esmvalcore.typing import FacetValue @runtime_checkable diff --git a/esmvalcore/iris_helpers.py b/esmvalcore/iris_helpers.py index 451720baf8..dfba3ca503 100644 --- a/esmvalcore/iris_helpers.py +++ b/esmvalcore/iris_helpers.py @@ -575,7 +575,7 @@ def ignore_warnings_context( def _get_attribute( data: ncdata.NcData | ncdata.NcVariable | xr.Dataset | xr.DataArray, attribute_name: str, -) -> Any: +) -> Any: # noqa: ANN401 """Get attribute from an ncdata or xarray object.""" if isinstance(data, ncdata.NcData | ncdata.NcVariable): attribute = data.attributes[attribute_name].value diff --git a/esmvalcore/local.py b/esmvalcore/local.py index 2f8ace30db..bb0b2049f1 100644 --- a/esmvalcore/local.py +++ b/esmvalcore/local.py @@ -56,7 +56,7 @@ import iris.fileformats.cf import isodate from cf_units import Unit -from netCDF4 import Dataset, Variable +from netCDF4 import Dataset import esmvalcore.io.protocol from esmvalcore.config import CFG @@ -67,6 +67,8 @@ if TYPE_CHECKING: from collections.abc import Iterable + from netCDF4 import Variable + from esmvalcore.typing import Facets, FacetValue logger = logging.getLogger(__name__) @@ -551,7 +553,7 @@ def __post_init__(self) -> None: self.rootpath = Path(os.path.expandvars(self.rootpath)).expanduser() self._regex_pattern = self._templates_to_regex() - def _get_glob_patterns(self, **facets) -> list[Path]: + def _get_glob_patterns(self, **facets: FacetValue) -> list[Path]: """Compose the globs that will be used to look for files.""" dirname_globs = _replace_tags(self.dirname_template, facets) filename_globs = _replace_tags(self.filename_template, facets) @@ -561,7 +563,7 @@ def _get_glob_patterns(self, **facets) -> list[Path]: for f in filename_globs ) - def find_data(self, **facets) -> list[LocalFile]: + def find_data(self, **facets: FacetValue) -> list[LocalFile]: """Find data locally. Parameters @@ -722,7 +724,7 @@ class DataSource(LocalDataSource): Please use :class:`esmvalcore.local.LocalDataSource` instead. """ - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args, **kwargs): msg = ( "The 'esmvalcore.local.LocalDataSource' class is deprecated and will be " "removed in version 2.16.0. Please use 'esmvalcore.local.LocalDataSource'" @@ -735,7 +737,7 @@ def regex_pattern(self) -> str: """Get regex pattern that can be used to extract facets from paths.""" return self._regex_pattern - def get_glob_patterns(self, **facets) -> list[Path]: + def get_glob_patterns(self, **facets: FacetValue) -> list[Path]: """Compose the globs that will be used to look for files.""" return self._get_glob_patterns(**facets) @@ -743,7 +745,7 @@ def path2facets(self, path: Path, add_timerange: bool) -> dict[str, str]: """Extract facets from path.""" return self._path2facets(path, add_timerange) - def find_files(self, **facets) -> list[LocalFile]: + def find_files(self, **facets: FacetValue) -> list[LocalFile]: """Find files.""" return self.find_data(**facets) @@ -1028,7 +1030,7 @@ def _get_attr_from_field_coord( ncfield: iris.fileformats.cf.CFVariable, coord_name: str | None, attr: str, -) -> Any: +) -> Any: # noqa: ANN401 """Get attribute from netCDF field coordinate.""" if coord_name is not None: attrs = ncfield.cf_group[coord_name].cf_attrs() diff --git a/esmvalcore/preprocessor/__init__.py b/esmvalcore/preprocessor/__init__.py index 8b48058d64..4cd30efa42 100644 --- a/esmvalcore/preprocessor/__init__.py +++ b/esmvalcore/preprocessor/__init__.py @@ -8,7 +8,7 @@ from pprint import pformat from typing import TYPE_CHECKING, Any -from iris.cube import Cube, CubeList +from iris.cube import Cube from esmvalcore._provenance import TrackedFile from esmvalcore._task import BaseTask @@ -107,6 +107,7 @@ import prov.model from dask.delayed import Delayed + from iris.cube import CubeList from esmvalcore.dataset import Dataset @@ -374,7 +375,7 @@ def _get_multi_model_settings( def _run_preproc_function( function: Callable, items: PreprocessorItem | Sequence[PreprocessorItem], - kwargs: Any, + kwargs: dict[str, Any], input_files: Sequence[DataElement] | None = None, ) -> PreprocessorItem | Sequence[PreprocessorItem]: """Run preprocessor function.""" @@ -728,7 +729,7 @@ def __init__( self.debug = debug self.write_ncl_interface = write_ncl_interface - def _run(self, _) -> list[str]: # noqa: C901,PLR0912 + def _run(self, _: list[str]) -> list[str]: # noqa: C901,PLR0912 """Run the preprocessor.""" for product in self.products: product.activity = self.activity diff --git a/esmvalcore/preprocessor/_area.py b/esmvalcore/preprocessor/_area.py index 07ca9d25cc..0c7e08ff2a 100644 --- a/esmvalcore/preprocessor/_area.py +++ b/esmvalcore/preprocessor/_area.py @@ -8,16 +8,17 @@ import logging from pathlib import Path -from typing import TYPE_CHECKING, Literal +from typing import TYPE_CHECKING, Any, Literal import fiona +import fiona.collection import iris import numpy as np import shapely import shapely.ops from dask import array as da from iris.coords import AuxCoord -from iris.cube import Cube, CubeList +from iris.cube import CubeList from iris.exceptions import CoordinateNotFoundError from esmvalcore.iris_helpers import ignore_iris_vague_metadata_warnings @@ -40,6 +41,8 @@ if TYPE_CHECKING: from collections.abc import Iterable + from iris.cube import Cube + from esmvalcore.config import Session logger = logging.getLogger(__name__) @@ -211,7 +214,7 @@ def zonal_statistics( cube: Cube, operator: str, normalize: Literal["subtract", "divide"] | None = None, - **operator_kwargs, + **operator_kwargs: Any, ) -> Cube: """Compute zonal statistics. @@ -263,7 +266,7 @@ def meridional_statistics( cube: Cube, operator: str, normalize: Literal["subtract", "divide"] | None = None, - **operator_kwargs, + **operator_kwargs: Any, ) -> Cube: """Compute meridional statistics. @@ -318,7 +321,7 @@ def area_statistics( cube: Cube, operator: str, normalize: Literal["subtract", "divide"] | None = None, - **operator_kwargs, + **operator_kwargs: Any, ) -> Cube: """Apply a statistical operator in the horizontal plane. @@ -477,7 +480,7 @@ def _crop_cube( def _select_representative_point( - shape, + shape: shapely.geometry.base.BaseGeometry, lon: np.ndarray, lat: np.ndarray, ) -> np.ndarray: @@ -519,7 +522,10 @@ def _correct_coords_from_shapefile( return lon, lat -def _process_ids(geometries, ids: list | dict | None) -> tuple: +def _process_ids( + geometries: fiona.collection.Collection, + ids: list | dict | None, +) -> tuple: """Read requested IDs and ID keys.""" # If ids is a dict, it needs to have length 1 and all geometries needs to # have the requested attribute key @@ -529,9 +535,7 @@ def _process_ids(geometries, ids: list | dict | None) -> tuple: f"If `ids` is given as dict, it needs exactly one entry, got " f"{ids}" ) - raise ValueError( - msg, - ) + raise ValueError(msg) key = next(iter(ids.keys())) for geometry in geometries: if key not in geometry["properties"]: @@ -539,9 +543,7 @@ def _process_ids(geometries, ids: list | dict | None) -> tuple: f"Geometry {dict(geometry['properties'])} does not have " f"requested attribute {key}" ) - raise ValueError( - msg, - ) + raise ValueError(msg) id_keys: tuple[str, ...] = (key,) ids = ids[key] @@ -559,7 +561,7 @@ def _process_ids(geometries, ids: list | dict | None) -> tuple: def _get_requested_geometries( - geometries, + geometries: fiona.collection.Collection, ids: list | dict | None, shapefile: Path, ) -> dict[str, dict]: @@ -594,9 +596,7 @@ def _get_requested_geometries( f"Requested shapes {missing} not found in shapefile " f"{shapefile}" ) - raise ValueError( - msg, - ) + raise ValueError(msg) return requested_geometries diff --git a/esmvalcore/preprocessor/_compare_with_refs.py b/esmvalcore/preprocessor/_compare_with_refs.py index b58c53aab4..f0da4b4521 100644 --- a/esmvalcore/preprocessor/_compare_with_refs.py +++ b/esmvalcore/preprocessor/_compare_with_refs.py @@ -4,7 +4,7 @@ import logging from functools import partial -from typing import TYPE_CHECKING, Literal +from typing import TYPE_CHECKING, Any, Literal import dask import dask.array as da @@ -12,7 +12,7 @@ import iris.analysis.stats import numpy as np from iris.common.metadata import CubeMetadata -from iris.coords import CellMethod, Coord +from iris.coords import CellMethod from iris.cube import Cube, CubeList from scipy.stats import wasserstein_distance @@ -33,6 +33,8 @@ if TYPE_CHECKING: from collections.abc import Iterable + from iris.coords import Coord + from esmvalcore.preprocessor import PreprocessorFile logger = logging.getLogger(__name__) @@ -165,7 +167,10 @@ def bias( return output_products -def _get_ref(products, ref_tag: str) -> tuple[Cube, PreprocessorFile]: +def _get_ref( + products: Iterable[PreprocessorFile], + ref_tag: str, +) -> tuple[Cube, PreprocessorFile]: """Get reference cube and product.""" ref_products = [] for product in products: @@ -230,7 +235,7 @@ def distance_metric( reference: Cube | None = None, coords: Iterable[Coord] | Iterable[str] | None = None, keep_reference_dataset: bool = True, - **kwargs, + **kwargs: Any, ) -> set[PreprocessorFile] | CubeList: r"""Calculate distance metrics. @@ -385,7 +390,7 @@ def _calculate_metric( reference: Cube, metric: MetricType, coords: Iterable[Coord] | Iterable[str] | None, - **kwargs, + **kwargs: Any, ) -> Cube: """Calculate metric for a single cube relative to a reference cube.""" # Make sure that dimensional metadata of data and ref data is compatible @@ -475,7 +480,7 @@ def _calculate_pearsonr( coords: Iterable[Coord] | Iterable[str], *, weighted: bool, - **kwargs, + **kwargs: Any, ) -> tuple[np.ndarray | da.Array, CubeMetadata]: """Calculate Pearson correlation coefficient.""" # Here, we want to use common_mask=True in iris.analysis.stats.pearsonr diff --git a/esmvalcore/preprocessor/_concatenate.py b/esmvalcore/preprocessor/_concatenate.py index f3425c05ec..f3142ca18a 100644 --- a/esmvalcore/preprocessor/_concatenate.py +++ b/esmvalcore/preprocessor/_concatenate.py @@ -59,13 +59,13 @@ def __init__(self, time: DimCoord) -> None: self.times = time.core_points() self.units = str(time.units) - def __getattr__(self, name: str) -> Any: + def __getattr__(self, name: str) -> Any: # noqa: ANN401 return getattr(self.times, name) def __len__(self) -> int: return len(self.times) - def __getitem__(self, key: Any) -> Any: + def __getitem__(self, key: Any) -> Any: # noqa: ANN401 return self.times[key] @@ -219,7 +219,7 @@ def _concatenate_cubes_by_experiment(cubes: Sequence[Cube]) -> Sequence[Cube]: project["exp"] for project in FACETS.values() if "exp" in project } - def get_exp(cube: Cube) -> Any: + def get_exp(cube: Cube) -> str: for key in exp_facet_names: if key in cube.attributes: return cube.attributes[key] diff --git a/esmvalcore/preprocessor/_dask_progress.py b/esmvalcore/preprocessor/_dask_progress.py index 6010f76b41..58edde625d 100644 --- a/esmvalcore/preprocessor/_dask_progress.py +++ b/esmvalcore/preprocessor/_dask_progress.py @@ -6,10 +6,11 @@ import logging import threading import time -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any import dask.diagnostics import distributed +import distributed.diagnostics.progressbar import rich.progress from esmvalcore.config import CFG @@ -89,7 +90,7 @@ class RichDistributedProgressBar( # Disable warnings about design choices that have been made in the base class. # pylint: disable=too-few-public-methods,unused-argument,useless-suppression - def __init__(self, keys) -> None: + def __init__(self, keys) -> None: # noqa: ANN001 self.progress = rich.progress.Progress( rich.progress.TaskProgressColumn(), rich.progress.BarColumn(bar_width=80), @@ -106,7 +107,7 @@ def _draw_bar( self, remaining: int, all: int, # noqa: A002 # pylint: disable=redefined-builtin - **kwargs, # noqa: ARG002 + **kwargs: Any, # noqa: ARG002 ) -> None: completed = all - remaining self.progress.update(self.task_id, completed=completed, total=all) @@ -163,7 +164,7 @@ class DistributedProgressLogger( def __init__( self, - keys, + keys, # noqa: ANN001 log_interval: str | float = "1s", description: str = "", ) -> None: @@ -179,7 +180,7 @@ def _draw_bar( self, remaining: int, all: int, # noqa: A002 # pylint: disable=redefined-builtin - **kwargs, # noqa: ARG002 + **kwargs: Any, # noqa: ARG002 ) -> None: frac = (1 - remaining / all) if all else 1.0 if ( diff --git a/esmvalcore/preprocessor/_derive/__init__.py b/esmvalcore/preprocessor/_derive/__init__.py index cbf138e2d7..3a25d1f9a4 100644 --- a/esmvalcore/preprocessor/_derive/__init__.py +++ b/esmvalcore/preprocessor/_derive/__init__.py @@ -1,16 +1,23 @@ """Automatically derive variables.""" +from __future__ import annotations + import importlib import logging from copy import deepcopy from pathlib import Path +from typing import TYPE_CHECKING -from cf_units import Unit -from iris.cube import Cube, CubeList +from iris.cube import CubeList -from esmvalcore.preprocessor._derive._baseclass import DerivedVariableBase from esmvalcore.preprocessor._units import convert_units -from esmvalcore.typing import Facets + +if TYPE_CHECKING: + from cf_units import Unit + from iris.cube import Cube + + from esmvalcore.preprocessor._derive._baseclass import DerivedVariableBase + from esmvalcore.typing import Facets logger = logging.getLogger(__name__) diff --git a/esmvalcore/preprocessor/_derive/_baseclass.py b/esmvalcore/preprocessor/_derive/_baseclass.py index b8d8bc27da..61d3187acf 100644 --- a/esmvalcore/preprocessor/_derive/_baseclass.py +++ b/esmvalcore/preprocessor/_derive/_baseclass.py @@ -1,10 +1,14 @@ """Contains the base class for derived variables.""" +from __future__ import annotations + from abc import abstractmethod +from typing import TYPE_CHECKING -from iris.cube import Cube, CubeList +if TYPE_CHECKING: + from iris.cube import Cube, CubeList -from esmvalcore.typing import Facets + from esmvalcore.typing import Facets class DerivedVariableBase: diff --git a/esmvalcore/preprocessor/_derive/ohc.py b/esmvalcore/preprocessor/_derive/ohc.py index d9105ffe52..d4db598857 100644 --- a/esmvalcore/preprocessor/_derive/ohc.py +++ b/esmvalcore/preprocessor/_derive/ohc.py @@ -1,14 +1,20 @@ """Derivation of variable `ohc`.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + import iris from cf_units import Unit from iris import Constraint -from iris.cube import Cube, CubeList - -from esmvalcore.typing import Facets from ._baseclass import DerivedVariableBase +if TYPE_CHECKING: + from iris.cube import Cube, CubeList + + from esmvalcore.typing import Facets + RHO_CP = iris.coords.AuxCoord(4.09169e6, units=Unit("kg m-3 J kg-1 K-1")) diff --git a/esmvalcore/preprocessor/_derive/qep.py b/esmvalcore/preprocessor/_derive/qep.py index 407aaba1e0..e755812b24 100644 --- a/esmvalcore/preprocessor/_derive/qep.py +++ b/esmvalcore/preprocessor/_derive/qep.py @@ -1,12 +1,18 @@ """Derivation of variable `qep`.""" -from iris import Constraint -from iris.cube import Cube, CubeList +from __future__ import annotations + +from typing import TYPE_CHECKING -from esmvalcore.typing import Facets +from iris import Constraint from ._baseclass import DerivedVariableBase +if TYPE_CHECKING: + from iris.cube import Cube, CubeList + + from esmvalcore.typing import Facets + class DerivedVariable(DerivedVariableBase): """Derivation of variable `qep`.""" diff --git a/esmvalcore/preprocessor/_derive/vegfrac.py b/esmvalcore/preprocessor/_derive/vegfrac.py index 419ae3878c..381c8f741f 100644 --- a/esmvalcore/preprocessor/_derive/vegfrac.py +++ b/esmvalcore/preprocessor/_derive/vegfrac.py @@ -1,15 +1,22 @@ """Derivation of variable `vegFrac`.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + import dask.array as da import iris from iris import NameConstraint -from iris.cube import Cube, CubeList from esmvalcore.preprocessor._regrid import regrid -from esmvalcore.typing import Facets from ._baseclass import DerivedVariableBase +if TYPE_CHECKING: + from iris.cube import Cube, CubeList + + from esmvalcore.typing import Facets + class DerivedVariable(DerivedVariableBase): """Derivation of variable `vegFrac`.""" diff --git a/esmvalcore/preprocessor/_io.py b/esmvalcore/preprocessor/_io.py index 229854cd75..57a79bf0d6 100644 --- a/esmvalcore/preprocessor/_io.py +++ b/esmvalcore/preprocessor/_io.py @@ -205,7 +205,7 @@ def save( # noqa: C901 compress: bool = False, alias: str = "", compute: bool = True, - **kwargs, + **kwargs: Any, ) -> Delayed | None: """Save iris cubes to file. diff --git a/esmvalcore/preprocessor/_mask.py b/esmvalcore/preprocessor/_mask.py index 1f363096ee..790a58b3bd 100644 --- a/esmvalcore/preprocessor/_mask.py +++ b/esmvalcore/preprocessor/_mask.py @@ -14,22 +14,25 @@ import cartopy.io.shapereader as shpreader import dask.array as da import iris +import iris.cube import iris.util import numpy as np import shapely.vectorized as shp_vect from iris.analysis import Aggregator -from iris.util import rolling_window from esmvalcore.iris_helpers import ignore_iris_vague_metadata_warnings -from esmvalcore.preprocessor._shared import ( - apply_mask, +from esmvalcore.preprocessor._shared import apply_mask +from esmvalcore.preprocessor._supplementary_vars import ( + register_supplementaries, ) -from ._supplementary_vars import register_supplementaries - if TYPE_CHECKING: + from collections.abc import Sequence + from iris.cube import Cube + from esmvalcore.preprocessor import PreprocessorFile + logger = logging.getLogger(__name__) @@ -217,7 +220,10 @@ def mask_landseaice(cube: Cube, mask_out: Literal["landsea", "ice"]) -> Cube: return cube -def mask_glaciated(cube, mask_out: str = "glaciated"): +def mask_glaciated( + cube: iris.cube.Cube, + mask_out: str = "glaciated", +) -> iris.cube.Cube: """Mask out glaciated areas. It applies a Natural Earth mask. Note that for computational reasons @@ -352,7 +358,7 @@ def count_spells( data: np.ndarray | da.Array, threshold: float | None, axis: int, - spell_length, + spell_length: int, ) -> np.ndarray | da.Array: # Copied from: # https://scitools-iris.readthedocs.io/en/stable/generated/gallery/general/plot_custom_aggregation.html @@ -376,11 +382,11 @@ def count_spells( threshold: threshold point for 'significant' datapoints. - axis: int + axis: number of the array dimension mapping the time sequences. (Can also be negative, e.g. '-1' means last dimension) - spell_length: int + spell_length: number of consecutive times at which value > threshold to "count". Returns @@ -406,7 +412,7 @@ def count_spells( # where m is a float ############################################################### with ignore_iris_vague_metadata_warnings(): - hit_windows = rolling_window( + hit_windows = iris.util.rolling_window( data_hits, window=spell_length, step=spell_length, @@ -616,11 +622,11 @@ def mask_multimodel(products): def mask_fillvalues( - products, + products: Sequence[PreprocessorFile], threshold_fraction: float, min_value: float | None = None, time_window: int = 1, -): +) -> Sequence[PreprocessorFile]: """Compute and apply a multi-dataset fillvalues mask. Construct the mask that fills a certain time window with missing values @@ -632,7 +638,7 @@ def mask_fillvalues( Parameters ---------- - products: iris.cube.Cube + products: data products to be masked. threshold_fraction: @@ -648,7 +654,7 @@ def mask_fillvalues( Returns ------- - iris.cube.Cube + : Masked iris cubes. Raises diff --git a/esmvalcore/preprocessor/_multimodel.py b/esmvalcore/preprocessor/_multimodel.py index 974090022d..f15193db4b 100644 --- a/esmvalcore/preprocessor/_multimodel.py +++ b/esmvalcore/preprocessor/_multimodel.py @@ -14,11 +14,13 @@ from datetime import datetime from functools import reduce from pprint import pformat -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any import cf_units import iris -import iris.coord_categorisation +import iris.analysis +import iris.coords +import iris.cube import numpy as np from iris.coords import DimCoord from iris.cube import Cube, CubeList @@ -494,8 +496,8 @@ def _compute_eager( cubes: list, *, operator: iris.analysis.Aggregator, - **kwargs, -): + **kwargs: Any, +) -> iris.cube.Cube: """Compute statistics one slice at a time.""" _ = [cube.data for cube in cubes] # make sure the cubes' data are realized @@ -532,8 +534,8 @@ def _compute( cube: iris.cube.Cube, *, operator: iris.analysis.Aggregator, - **kwargs, -): + **kwargs: Any, +) -> iris.cube.Cube: """Compute statistic.""" # This will always return a masked array with ignore_iris_vague_metadata_warnings(): @@ -701,7 +703,7 @@ def multi_model_statistics( products: set[PreprocessorFile] | Iterable[Cube], span: str, statistics: list[str | dict], - output_products=None, + output_products: dict[str, PreprocessorFile] | None = None, groupby: tuple | None = None, keep_input_datasets: bool = True, ignore_scalar_coords: bool = False, @@ -781,7 +783,7 @@ def multi_model_statistics( additional keyword arguments, e.g., ``[{'operator': 'percentile', 'percent': 20}]``. All supported options are are given in :ref:`this table `. - output_products: dict + output_products: For internal use only. A dict with statistics names as keys and preprocessorfiles as values. If products are passed as input, the statistics cubes will be assigned to these output products. @@ -822,7 +824,8 @@ def multi_model_statistics( # Avoid circular input: https://stackoverflow.com/q/16964467 statistics_products = set() for group, input_prods in _group_products(products, by_key=groupby): - sub_output_products = output_products[group] + # Assume that output_products is not None if products are PreprocessorFiles. + sub_output_products = output_products[group] # type: ignore[index] # Compute statistics on a single group group_statistics = _multiproduct_statistics( @@ -850,7 +853,7 @@ def multi_model_statistics( def ensemble_statistics( products: set[PreprocessorFile] | Iterable[Cube], statistics: list[str | dict], - output_products, + output_products: dict[str, PreprocessorFile] | None, span: str = "overlap", ignore_scalar_coords: bool = False, ) -> dict | set: @@ -871,7 +874,7 @@ def ensemble_statistics( additional keyword arguments, e.g., ``[{'operator': 'percentile', 'percent': 20}]``. All supported options are are given in :ref:`this table `. - output_products: dict + output_products: For internal use only. A dict with statistics names as keys and preprocessorfiles as values. If products are passed as input, the statistics cubes will be assigned to these output products. diff --git a/esmvalcore/preprocessor/_other.py b/esmvalcore/preprocessor/_other.py index 6568490e1a..972f93acbb 100644 --- a/esmvalcore/preprocessor/_other.py +++ b/esmvalcore/preprocessor/_other.py @@ -10,7 +10,7 @@ import dask.array as da import iris.analysis import numpy as np -from iris.coords import Coord, DimCoord +from iris.coords import DimCoord from iris.cube import Cube from iris.exceptions import CoordinateMultiDimError @@ -31,6 +31,8 @@ if TYPE_CHECKING: from collections.abc import Iterable, Sequence + from iris.coords import Coord + from esmvalcore.cmor.table import VariableInfo logger = logging.getLogger(__name__) @@ -563,7 +565,7 @@ def _get_histogram_cube( coords: Iterable[Coord] | Iterable[str], bin_edges: np.ndarray, normalization: Literal["sum", "integral"] | None, -): +) -> Cube: """Get cube with correct metadata for histogram.""" # Calculate bin centers using 2-window running mean and get corresponding # coordinate diff --git a/esmvalcore/preprocessor/_regrid.py b/esmvalcore/preprocessor/_regrid.py index 11e19b9736..486b831e79 100644 --- a/esmvalcore/preprocessor/_regrid.py +++ b/esmvalcore/preprocessor/_regrid.py @@ -20,7 +20,11 @@ import numpy as np import stratify from geopy.geocoders import Nominatim -from iris.analysis import AreaWeighted, Linear, Nearest +from iris.analysis import ( + AreaWeighted, + Linear, + Nearest, +) from iris.cube import Cube from iris.util import broadcast_to_shape @@ -50,6 +54,7 @@ if TYPE_CHECKING: from collections.abc import Iterable + from iris.analysis import Regridder, RegriddingScheme from numpy.typing import ArrayLike from esmvalcore.dataset import Dataset @@ -580,7 +585,9 @@ def extract_point( return cube.interpolate(point, scheme=loaded_scheme) -def is_dataset(dataset: Any) -> bool: +def is_dataset( + dataset: Any, # noqa: ANN401 +) -> bool: """Test if something is an `esmvalcore.dataset.Dataset`.""" # Use this function to avoid circular imports return hasattr(dataset, "facets") @@ -631,7 +638,7 @@ def _load_scheme( src_cube: Cube, tgt_cube: Cube, scheme: NamedHorizontalScheme | dict[str, Any], -): +) -> RegriddingScheme: """Return scheme that can be used in :meth:`iris.cube.Cube.regrid`.""" loaded_scheme: Any = None @@ -664,7 +671,7 @@ def _load_scheme( return loaded_scheme -def _load_generic_scheme(scheme: dict[str, Any]): +def _load_generic_scheme(scheme: dict[str, Any]) -> RegriddingScheme: """Load generic regridding scheme.""" scheme = dict(scheme) # do not overwrite original scheme @@ -706,7 +713,7 @@ def _get_regridder( tgt_cube: Cube, scheme: NamedHorizontalScheme | dict, cache_weights: bool, -): +) -> Regridder: """Get regridder to actually perform regridding. Note diff --git a/esmvalcore/preprocessor/_rolling_window.py b/esmvalcore/preprocessor/_rolling_window.py index 87c9c56711..fc868de2ea 100644 --- a/esmvalcore/preprocessor/_rolling_window.py +++ b/esmvalcore/preprocessor/_rolling_window.py @@ -1,8 +1,9 @@ """Rolling-window operations on data cubes.""" -import logging +from __future__ import annotations -from iris.cube import Cube +import logging +from typing import TYPE_CHECKING, Any from esmvalcore.iris_helpers import ignore_iris_vague_metadata_warnings @@ -11,6 +12,9 @@ preserve_float_dtype, ) +if TYPE_CHECKING: + from iris.cube import Cube + logger = logging.getLogger(__name__) @@ -20,8 +24,8 @@ def rolling_window_statistics( coordinate: str, operator: str, window_length: int, - **operator_kwargs, -): + **operator_kwargs: Any, +) -> Cube: """Compute rolling-window statistics over a coordinate. Parameters diff --git a/esmvalcore/preprocessor/_shared.py b/esmvalcore/preprocessor/_shared.py index abb717d2f9..ecf58028e2 100644 --- a/esmvalcore/preprocessor/_shared.py +++ b/esmvalcore/preprocessor/_shared.py @@ -16,7 +16,7 @@ import dask.array as da import iris.analysis import numpy as np -from iris.coords import CellMeasure, Coord, DimCoord +from iris.coords import CellMeasure, DimCoord from iris.cube import Cube from iris.exceptions import CoordinateMultiDimError, CoordinateNotFoundError from iris.util import broadcast_to_shape @@ -29,6 +29,8 @@ if TYPE_CHECKING: from collections.abc import Callable, Iterable + from iris.coords import Coord + from esmvalcore.typing import DataType logger = logging.getLogger(__name__) @@ -45,7 +47,7 @@ def guess_bounds(cube, coords): def get_iris_aggregator( operator: str, - **operator_kwargs, + **operator_kwargs: Any, ) -> tuple[iris.analysis.Aggregator, dict]: """Get :class:`iris.analysis.Aggregator` and keyword arguments. @@ -82,18 +84,14 @@ def get_iris_aggregator( # Check if valid aggregator is found if not hasattr(iris.analysis, cap_operator): msg = f"Aggregator '{operator}' not found in iris.analysis module" - raise ValueError( - msg, - ) + raise ValueError(msg) aggregator = getattr(iris.analysis, cap_operator) if not hasattr(aggregator, "aggregate"): msg = ( f"Aggregator {aggregator} found by '{operator}' is not a valid " f"iris.analysis.Aggregator" ) - raise ValueError( - msg, - ) + raise ValueError(msg) # Use dummy cube to check if aggregator_kwargs are valid x_coord = DimCoord([1.0], bounds=[0.0, 2.0], var_name="x") @@ -109,9 +107,7 @@ def get_iris_aggregator( cube.collapsed("x", aggregator, **test_kwargs) except (ValueError, TypeError) as exc: msg = f"Invalid kwargs for operator '{operator}': {exc!s}" - raise ValueError( - msg, - ) from exc + raise ValueError(msg) from exc return (aggregator, aggregator_kwargs) @@ -141,10 +137,10 @@ def update_weights_kwargs( operator: str, aggregator: iris.analysis.Aggregator, kwargs: dict, - weights: Any, + weights: Any, # noqa: ANN401 cube: Cube | None = None, callback: Callable | None = None, - **callback_kwargs, + **callback_kwargs: Any, ) -> dict: """Update weights keyword argument. @@ -248,7 +244,7 @@ def get_normalized_cube( return normalized_cube -def _get_first_arg(func: Callable, *args: Any, **kwargs: Any) -> Any: +def _get_first_arg(func: Callable, *args: Any, **kwargs: Any) -> Any: # noqa: ANN401 """Get first argument given to a function.""" # If positional arguments are given, use the first one if args: diff --git a/esmvalcore/preprocessor/_supplementary_vars.py b/esmvalcore/preprocessor/_supplementary_vars.py index 401498f047..f7a1491d09 100644 --- a/esmvalcore/preprocessor/_supplementary_vars.py +++ b/esmvalcore/preprocessor/_supplementary_vars.py @@ -1,11 +1,16 @@ """Preprocessor functions for ancillary variables and cell measures.""" +from __future__ import annotations + import logging -from collections.abc import Callable, Iterable -from typing import Literal +from typing import TYPE_CHECKING, Literal import iris.coords -from iris.cube import Cube + +if TYPE_CHECKING: + from collections.abc import Callable, Iterable + + from iris.cube import Cube logger = logging.getLogger(__name__) diff --git a/esmvalcore/preprocessor/_time.py b/esmvalcore/preprocessor/_time.py index cb2f5a72e5..ee548afc69 100644 --- a/esmvalcore/preprocessor/_time.py +++ b/esmvalcore/preprocessor/_time.py @@ -11,7 +11,7 @@ import logging import warnings from functools import partial -from typing import TYPE_CHECKING, Literal +from typing import TYPE_CHECKING, Any, Literal from warnings import filterwarnings import dask.array as da @@ -24,8 +24,8 @@ import numpy as np from cf_units import Unit from cftime import datetime as cf_datetime -from iris.coords import AuxCoord, Coord, DimCoord -from iris.cube import Cube, CubeList +from iris.coords import AuxCoord, DimCoord +from iris.cube import CubeList from iris.exceptions import CoordinateMultiDimError, CoordinateNotFoundError from iris.time import PartialDateTime from iris.util import broadcast_to_shape @@ -46,6 +46,8 @@ if TYPE_CHECKING: from collections.abc import Iterable + from iris.coords import Coord + from iris.cube import Cube from numpy.typing import DTypeLike logger = logging.getLogger(__name__) @@ -494,7 +496,7 @@ def hourly_statistics( cube: Cube, hours: int, operator: str = "mean", - **operator_kwargs, + **operator_kwargs: Any, ) -> Cube: """Compute hourly statistics. @@ -552,7 +554,7 @@ def hourly_statistics( def daily_statistics( cube: Cube, operator: str = "mean", - **operator_kwargs, + **operator_kwargs: Any, ) -> Cube: """Compute daily statistics. @@ -593,7 +595,7 @@ def daily_statistics( def monthly_statistics( cube: Cube, operator: str = "mean", - **operator_kwargs, + **operator_kwargs: Any, ) -> Cube: """Compute monthly statistics. @@ -637,7 +639,7 @@ def seasonal_statistics( cube: Cube, operator: str = "mean", seasons: Iterable[str] = ("DJF", "MAM", "JJA", "SON"), - **operator_kwargs, + **operator_kwargs: Any, ) -> Cube: """Compute seasonal statistics. @@ -747,7 +749,7 @@ def spans_full_season(cube: Cube) -> list[bool]: def annual_statistics( cube: Cube, operator: str = "mean", - **operator_kwargs, + **operator_kwargs: Any, ) -> Cube: """Compute annual statistics. @@ -789,7 +791,7 @@ def annual_statistics( def decadal_statistics( cube: Cube, operator: str = "mean", - **operator_kwargs, + **operator_kwargs: Any, ) -> Cube: """Compute decadal statistics. @@ -844,7 +846,7 @@ def climate_statistics( operator: str = "mean", period: str = "full", seasons: Iterable[str] = ("DJF", "MAM", "JJA", "SON"), - **operator_kwargs, + **operator_kwargs: Any, ) -> Cube: """Compute climate statistics with the specified granularity. @@ -1068,7 +1070,7 @@ def _compute_anomalies( reference: Cube, period: str, seasons: Iterable[str], -): +) -> Cube: cube_coord = _get_period_coord(cube, period, seasons) ref_coord = _get_period_coord(reference, period, seasons) indices = np.empty_like(cube_coord.points, dtype=np.int32) @@ -1340,7 +1342,7 @@ def timeseries_filter( span: int, filter_type: str = "lowpass", filter_stats: str = "sum", - **operator_kwargs, + **operator_kwargs: Any, ) -> Cube: """Apply a timeseries filter. @@ -1723,7 +1725,7 @@ def _transform_to_lst_eager( *, time_dim: int, lon_dim: int, - **__, + **__: Any, ) -> np.ndarray: """Transform array with UTC coord to local solar time (LST) coord. diff --git a/esmvalcore/preprocessor/_units.py b/esmvalcore/preprocessor/_units.py index 5d34d42c9c..413a289066 100644 --- a/esmvalcore/preprocessor/_units.py +++ b/esmvalcore/preprocessor/_units.py @@ -11,12 +11,13 @@ import dask.array as da import iris import numpy as np -from iris.coords import AuxCoord, DimCoord +from iris.coords import AuxCoord from esmvalcore.iris_helpers import _try_special_unit_conversions if TYPE_CHECKING: from cf_units import Unit + from iris.coords import DimCoord from iris.cube import Cube logger = logging.getLogger(__name__) diff --git a/esmvalcore/preprocessor/_volume.py b/esmvalcore/preprocessor/_volume.py index f6f5b1f494..aefd15c8c1 100644 --- a/esmvalcore/preprocessor/_volume.py +++ b/esmvalcore/preprocessor/_volume.py @@ -7,7 +7,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Literal +from typing import TYPE_CHECKING, Any, Literal import dask import dask.array as da @@ -250,7 +250,7 @@ def volume_statistics( cube: Cube, operator: str, normalize: Literal["subtract", "divide"] | None = None, - **operator_kwargs, + **operator_kwargs: Any, ) -> Cube: """Apply a statistical operation over a volume. @@ -346,7 +346,7 @@ def axis_statistics( axis: str, operator: str, normalize: Literal["subtract", "divide"] | None = None, - **operator_kwargs, + **operator_kwargs: Any, ) -> Cube: """Perform statistics along a given axis. diff --git a/esmvalcore/preprocessor/regrid_schemes.py b/esmvalcore/preprocessor/regrid_schemes.py index 01247ebc42..5fbc5f0359 100644 --- a/esmvalcore/preprocessor/regrid_schemes.py +++ b/esmvalcore/preprocessor/regrid_schemes.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from esmvalcore.preprocessor._regrid_iris_esmf_regrid import IrisESMFRegrid from esmvalcore.preprocessor._regrid_unstructured import ( @@ -53,8 +53,8 @@ def __init__( src_cube: Cube, tgt_cube: Cube, func: Callable, - **kwargs, - ): + **kwargs: Any, + ) -> None: """Initialize class instance.""" self.src_cube = src_cube self.tgt_cube = tgt_cube @@ -93,7 +93,7 @@ class GenericFuncScheme: Keyword arguments for the generic regridding function. """ - def __init__(self, func: Callable, **kwargs): + def __init__(self, func: Callable, **kwargs: Any) -> None: """Initialize class instance.""" self.func = func self.kwargs = kwargs diff --git a/pyproject.toml b/pyproject.toml index dfc0cb0b0b..158c455725 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -182,7 +182,6 @@ line-length = 79 select = ["ALL"] ignore = [ # Ignore until someone has time to fix this: - "ANN", # Adding type hints "DTZ", # add tzinfo to datetimes "FBT", # avoid boolean positional arguments "N818", # Exception name should be named with an Error suffix @@ -207,6 +206,7 @@ ignore = [ "TD002", # Missing author in TODO: can be seen from git history "TRY003", # Fixing this would require many exceptions ] +future-annotations = true [tool.ruff.lint.per-file-ignores] "**.ipynb" = [ "T20", # Printing things in notebooks is fine. @@ -225,9 +225,9 @@ ignore = [ "D104", # Missing docstring in public package "PT013", # Allow importing fixtures from pytest to avoid repeating 'pytest' many times ] -"doc/gensidebar.py" = [ - "INP001", # File is part of an implicit namespace package -] +[tool.ruff.lint.flake8-annotations] +allow-star-arg-any = true +ignore-fully-untyped = true [tool.ruff.lint.isort] known-first-party = ["esmvalcore"] [tool.ruff.lint.pydocstyle] diff --git a/tests/conftest.py b/tests/conftest.py index 1cc8630b88..46cabf58f9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import warnings from copy import deepcopy from functools import lru_cache -from pathlib import Path +from typing import TYPE_CHECKING import numpy as np import pytest @@ -17,6 +19,11 @@ from esmvalcore.config import CFG, Config +if TYPE_CHECKING: + from pathlib import Path + + from esmvalcore.config import Session + @lru_cache def _load_default_config(): @@ -40,13 +47,20 @@ def cfg_default(): @pytest.fixture(autouse=True) -def ignore_existing_user_config(monkeypatch, cfg_default): +def ignore_existing_user_config( + monkeypatch: pytest.MonkeyPatch, + cfg_default: Config, +) -> None: """Ignore user's configuration when running tests.""" monkeypatch.setattr(CFG, "_mapping", cfg_default._mapping) @pytest.fixture -def session(tmp_path: Path, ignore_existing_user_config, monkeypatch): +def session( + tmp_path: Path, + ignore_existing_user_config: None, + monkeypatch: pytest.MonkeyPatch, +) -> Session: """Session object with default settings.""" monkeypatch.setitem(CFG, "output_dir", tmp_path / "esmvaltool_output") return CFG.start_session("recipe_test") diff --git a/tests/integration/cmor/_fixes/icon/conftest.py b/tests/integration/cmor/_fixes/icon/conftest.py index e8f323c175..961aff4755 100644 --- a/tests/integration/cmor/_fixes/icon/conftest.py +++ b/tests/integration/cmor/_fixes/icon/conftest.py @@ -1,7 +1,9 @@ """Fixtures for ICON fixes tests.""" +from __future__ import annotations + import importlib.resources -from pathlib import Path +from typing import TYPE_CHECKING import pytest import yaml @@ -9,6 +11,9 @@ import esmvalcore.config from esmvalcore.cmor._fixes.icon._base_fixes import IconFix +if TYPE_CHECKING: + from pathlib import Path + @pytest.fixture(autouse=True) def tmp_cache_dir(monkeypatch, tmp_path): diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 85b1505866..bb5f4a0324 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import os -from collections.abc import Iterator from pathlib import Path +from typing import TYPE_CHECKING import iris import pytest @@ -12,7 +14,11 @@ _select_drs, _select_files, ) -from esmvalcore.typing import Facets + +if TYPE_CHECKING: + from collections.abc import Callable, Iterator + + from esmvalcore.typing import Facets, FacetValue def create_test_file(filename, tracking_id=None): @@ -123,10 +129,21 @@ def _tracking_ids(i=0): i += 1 -def _get_find_files_func(path: Path, suffix: str = "nc"): +def _get_find_files_func( + path: Path, + suffix: str = "nc", +) -> Callable[ + ..., + tuple[list[LocalFile], list[Path]] | list[LocalFile], +]: tracking_id = _tracking_ids() - def find_files(self, *, debug: bool = False, **facets): + def find_files( + self: esmvalcore.local.LocalDataSource, + *, + debug: bool = False, + **facets: FacetValue, + ) -> tuple[list[LocalFile], list[Path]] | list[LocalFile]: files, file_globs = _get_files(path, facets, tracking_id, suffix) if debug: return files, file_globs @@ -136,7 +153,7 @@ def find_files(self, *, debug: bool = False, **facets): @pytest.fixture -def patched_datafinder(tmp_path, monkeypatch): +def patched_datafinder(tmp_path: Path, monkeypatch: pytest.MonkeyPath) -> None: find_files = _get_find_files_func(tmp_path) monkeypatch.setattr( esmvalcore.local.LocalDataSource, @@ -146,7 +163,10 @@ def patched_datafinder(tmp_path, monkeypatch): @pytest.fixture -def patched_datafinder_grib(tmp_path, monkeypatch): +def patched_datafinder_grib( + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, +) -> None: find_files = _get_find_files_func(tmp_path, suffix="grib") monkeypatch.setattr( esmvalcore.local.LocalDataSource, @@ -156,7 +176,10 @@ def patched_datafinder_grib(tmp_path, monkeypatch): @pytest.fixture -def patched_failing_datafinder(tmp_path, monkeypatch): +def patched_failing_datafinder( + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, +) -> None: """Failing data finder. Do not return files for: @@ -168,7 +191,12 @@ def patched_failing_datafinder(tmp_path, monkeypatch): """ tracking_id = _tracking_ids() - def find_files(self, *, debug: bool = False, **facets): + def find_files( + self: esmvalcore.local.LocalDataSource, + *, + debug: bool = False, + **facets: FacetValue, + ) -> tuple[list[LocalFile], list[Path]] | list[LocalFile]: files, file_globs = _get_files(tmp_path, facets, tracking_id) if facets["frequency"] == "fx": files = [] diff --git a/tests/integration/recipe/test_check.py b/tests/integration/recipe/test_check.py index 3879b46724..19fd6b01ca 100644 --- a/tests/integration/recipe/test_check.py +++ b/tests/integration/recipe/test_check.py @@ -1,12 +1,14 @@ """Integration tests for :mod:`esmvalcore._recipe.check`.""" +from __future__ import annotations + import subprocess from pathlib import Path +from typing import TYPE_CHECKING from unittest import mock import pyesgf.search.results import pytest -import pytest_mock import esmvalcore._recipe.check import esmvalcore.esgf @@ -16,6 +18,9 @@ from esmvalcore.local import LocalFile from esmvalcore.preprocessor import PreprocessorFile +if TYPE_CHECKING: + import pytest_mock + def test_ncl_version(mocker): ncl = "/path/to/ncl" diff --git a/tests/integration/recipe/test_recipe.py b/tests/integration/recipe/test_recipe.py index 6b8b788173..10801a671e 100644 --- a/tests/integration/recipe/test_recipe.py +++ b/tests/integration/recipe/test_recipe.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import importlib.resources import inspect import os @@ -24,7 +26,6 @@ read_recipe_file, ) from esmvalcore._task import DiagnosticTask -from esmvalcore.config import Session from esmvalcore.config._config import TASKSEP from esmvalcore.config._diagnostics import TAGS from esmvalcore.dataset import Dataset @@ -34,12 +35,16 @@ from tests.integration.test_provenance import check_provenance if TYPE_CHECKING: + from esmvalcore._recipe.recipe import ( + Recipe, + ) + from esmvalcore.config import Session from esmvalcore.typing import Facets @lru_cache def _load_data_sources( - filename, + filename: str, ) -> dict[ str, dict[str, dict[str, dict[str, dict[str, str]]]], @@ -193,7 +198,7 @@ def get_required(short_name, _): """) -def get_recipe(tempdir: Path, content: str, session: Session): +def get_recipe(tempdir: Path, content: str, session: Session) -> Recipe: """Save and load recipe content.""" recipe_file = tempdir / "recipe_test.yml" # Add mandatory documentation section @@ -2538,7 +2543,7 @@ def test_representative_dataset_regular_var( tmp_path: Path, patched_datafinder: None, session: Session, -): +) -> None: """Test ``_representative_dataset`` with regular variable.""" update_data_sources(session, "data-native-icon.yml", tmp_path) @@ -2567,7 +2572,7 @@ def test_representative_dataset_derived_var( patched_datafinder: None, session: Session, force_derivation: bool, -): +) -> None: """Test ``_representative_dataset`` with derived variable.""" update_data_sources(session, "data-native-icon.yml", tmp_path) diff --git a/tests/integration/test_main.py b/tests/integration/test_main.py index c701e35af9..90a86da39a 100644 --- a/tests/integration/test_main.py +++ b/tests/integration/test_main.py @@ -3,13 +3,15 @@ Includes a context manager to temporarily modify sys.argv """ +from __future__ import annotations + import contextlib import copy import functools import sys import warnings -from pathlib import Path from textwrap import dedent +from typing import TYPE_CHECKING from unittest.mock import patch import pytest @@ -21,6 +23,9 @@ from esmvalcore._main import Config, ESMValTool, Recipes, run from esmvalcore.exceptions import RecipeError +if TYPE_CHECKING: + from pathlib import Path + def wrapper(f): @functools.wraps(f) diff --git a/tests/sample_data/multimodel_statistics/test_multimodel.py b/tests/sample_data/multimodel_statistics/test_multimodel.py index 84376465d8..a52ba001af 100644 --- a/tests/sample_data/multimodel_statistics/test_multimodel.py +++ b/tests/sample_data/multimodel_statistics/test_multimodel.py @@ -1,9 +1,12 @@ """Test using sample data for :func:`esmvalcore.preprocessor._multimodel`.""" +from __future__ import annotations + import pickle import platform from itertools import groupby from pathlib import Path +from typing import TYPE_CHECKING import cf_units import iris @@ -14,6 +17,9 @@ from esmvalcore.preprocessor import extract_time from esmvalcore.preprocessor._multimodel import multi_model_statistics +if TYPE_CHECKING: + from collections.abc import Sequence + esmvaltool_sample_data = pytest.importorskip("esmvaltool_sample_data") # Increase this number anytime you change the cached input data to the tests. @@ -30,7 +36,7 @@ def assert_array_almost_equal(this, other, rtol=1e-7): np.testing.assert_allclose(this, other, rtol=rtol) -def assert_coords_equal(this: list, other: list): +def assert_coords_equal(this: list, other: list) -> None: """Assert coords list `this` equals coords list `other`.""" for this_coord, other_coord in zip(this, other, strict=False): np.testing.assert_equal(this_coord.points, other_coord.points) @@ -53,7 +59,10 @@ def fix_metadata(cubes): cube.coord("air_pressure").bounds = None -def preprocess_data(cubes, time_slice: dict | None = None): +def preprocess_data( + cubes: Sequence[iris.cube.Cube], + time_slice: dict | None = None, +) -> list[iris.cube.Cube]: """Regrid the data to the first cube and optional time-slicing.""" # Increase TEST_REVISION anytime you make changes to this function. if time_slice: diff --git a/tests/unit/config/test_config.py b/tests/unit/config/test_config.py index 266795c616..3bd3fcc566 100644 --- a/tests/unit/config/test_config.py +++ b/tests/unit/config/test_config.py @@ -28,7 +28,7 @@ for f in BUILTIN_CONFIG_DIR.rglob("*.yml") ], ) -def test_builtin_config_files_have_description(config_file) -> None: +def test_builtin_config_files_have_description(config_file: Path) -> None: """Test that all built-in config files have a description.""" # Use the same code to find the description as in the # `esmvaltool config list` command. diff --git a/tests/unit/config/test_data_sources.py b/tests/unit/config/test_data_sources.py index 67b364018c..b1e08f69af 100644 --- a/tests/unit/config/test_data_sources.py +++ b/tests/unit/config/test_data_sources.py @@ -1,10 +1,16 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + import pytest import esmvalcore.config._data_sources import esmvalcore.local -from esmvalcore.config import Session from esmvalcore.exceptions import InvalidConfigParameter +if TYPE_CHECKING: + from esmvalcore.config import Session + def test_load_data_sources_no_project_data_sources_configured( session: Session, diff --git a/tests/unit/esgf/test_download.py b/tests/unit/esgf/test_download.py index 9c36477b76..886e76404e 100644 --- a/tests/unit/esgf/test_download.py +++ b/tests/unit/esgf/test_download.py @@ -1,21 +1,26 @@ """Test `esmvalcore.esgf._download`.""" +from __future__ import annotations + import datetime import logging import os import re import textwrap from pathlib import Path +from typing import TYPE_CHECKING import pytest import requests import yaml from pyesgf.search.results import FileResult -from pytest_mock import MockerFixture import esmvalcore.esgf from esmvalcore.esgf import _download +if TYPE_CHECKING: + from pytest_mock import MockerFixture + def test_log_speed(monkeypatch, tmp_path): hosts_file = tmp_path / ".esmvaltool" / "cache" / "esgf-hosts.yml" diff --git a/tests/unit/esgf/test_search.py b/tests/unit/esgf/test_search.py index 5949cc5792..a1b8147e89 100644 --- a/tests/unit/esgf/test_search.py +++ b/tests/unit/esgf/test_search.py @@ -1,18 +1,23 @@ """Test 1esmvalcore.esgf._search`.""" +from __future__ import annotations + import copy import textwrap from pathlib import Path +from typing import TYPE_CHECKING import pyesgf.search import pytest import requests.exceptions from pyesgf.search.results import FileResult -from pytest_mock import MockerFixture import esmvalcore.io.protocol from esmvalcore.esgf import ESGFDataSource, ESGFFile, _search, find_files +if TYPE_CHECKING: + from pytest_mock import MockerFixture + OUR_FACETS = ( { "dataset": "cccma_cgcm3_1", diff --git a/tests/unit/io/test_intake_esgf.py b/tests/unit/io/test_intake_esgf.py index a72e145eae..b24fc698b0 100644 --- a/tests/unit/io/test_intake_esgf.py +++ b/tests/unit/io/test_intake_esgf.py @@ -1,6 +1,9 @@ """Unit tests for esmvalcore.io.intake_esgf.""" +from __future__ import annotations + import importlib.resources +from typing import TYPE_CHECKING import intake_esgf import iris.cube @@ -8,13 +11,16 @@ import pytest import xarray as xr import yaml -from pytest import MonkeyPatch -from pytest_mock import MockerFixture import esmvalcore.io.intake_esgf -from esmvalcore.config import Session from esmvalcore.io.intake_esgf import IntakeESGFDataset, IntakeESGFDataSource +if TYPE_CHECKING: + from pytest import MonkeyPatch + from pytest_mock import MockerFixture + + from esmvalcore.config import Session + def test_intakeesgfdataset_repr() -> None: cat = intake_esgf.ESGFCatalog() @@ -134,7 +140,7 @@ def test_find_data_no_results_sets_debug_info(mocker: MockerFixture) -> None: assert data_source.debug_info == expected_debug_info -def test_find_data(mocker: MockerFixture, monkeypatch: MonkeyPatch): +def test_find_data(mocker: MockerFixture, monkeypatch: MonkeyPatch) -> None: """find_data should convert catalog.df rows into IntakeESGFDataset instances.""" cat = intake_esgf.ESGFCatalog() cat.project = intake_esgf.projects.projects["cmip6"] diff --git a/tests/unit/io/test_load_data_sources.py b/tests/unit/io/test_load_data_sources.py index de1f7bad23..d6f7dd380b 100644 --- a/tests/unit/io/test_load_data_sources.py +++ b/tests/unit/io/test_load_data_sources.py @@ -6,7 +6,7 @@ import pytest import esmvalcore.config -import esmvalcore.io +import esmvalcore.io.protocol def test_configurations_valid(cfg_default: esmvalcore.config.Config) -> None: @@ -19,7 +19,7 @@ def test_configurations_valid(cfg_default: esmvalcore.config.Config) -> None: session = cfg_default.start_session("test") data_sources = esmvalcore.io.load_data_sources(session) for data_source in data_sources: - assert isinstance(data_source, esmvalcore.io.DataSource) + assert isinstance(data_source, esmvalcore.io.protocol.DataSource) def test_load_data_sources_unknown_project( diff --git a/tests/unit/local/test_get_data_sources.py b/tests/unit/local/test_get_data_sources.py index 4c0e7be5d5..67057562df 100644 --- a/tests/unit/local/test_get_data_sources.py +++ b/tests/unit/local/test_get_data_sources.py @@ -1,12 +1,17 @@ +from __future__ import annotations + from pathlib import Path +from typing import TYPE_CHECKING import pytest -import pytest_mock from esmvalcore.config import CFG from esmvalcore.config._config_validators import validate_config_developer from esmvalcore.local import DataSource, LocalDataSource, _get_data_sources +if TYPE_CHECKING: + import pytest_mock + @pytest.mark.parametrize( "rootpath_drs", diff --git a/tests/unit/local/test_to_iris.py b/tests/unit/local/test_to_iris.py index 44a6a881d3..cae3db2599 100644 --- a/tests/unit/local/test_to_iris.py +++ b/tests/unit/local/test_to_iris.py @@ -1,11 +1,17 @@ -from pathlib import Path +from __future__ import annotations + +from typing import TYPE_CHECKING import iris.cube import pytest -from pytest_mock import MockerFixture from esmvalcore.local import LocalFile, _get_attr_from_field_coord +if TYPE_CHECKING: + from pathlib import Path + + from pytest_mock import MockerFixture + @pytest.fixture def local_file(tmp_path: Path) -> LocalFile: diff --git a/tests/unit/preprocessor/_regrid/__init__.py b/tests/unit/preprocessor/_regrid/__init__.py index 5a99c5115f..c9938e051d 100644 --- a/tests/unit/preprocessor/_regrid/__init__.py +++ b/tests/unit/preprocessor/_regrid/__init__.py @@ -40,7 +40,7 @@ def _make_cube( # noqa: PLR0915,C901 data: np.ndarray, aux_coord: bool = True, dim_coord: bool = True, - dtype=None, + dtype=None, # noqa: ANN001 grid: Literal["regular", "rotated", "mesh"] = "regular", ) -> iris.cube.Cube: """Create a 3d synthetic test cube.""" diff --git a/tests/unit/preprocessor/_regrid/test_regrid.py b/tests/unit/preprocessor/_regrid/test_regrid.py index a81cc49504..7b41305c7e 100644 --- a/tests/unit/preprocessor/_regrid/test_regrid.py +++ b/tests/unit/preprocessor/_regrid/test_regrid.py @@ -25,7 +25,13 @@ def clear_regridder_cache(monkeypatch): ) -def _make_coord(start: float, stop: float, step: int, *, name: str): +def _make_coord( + start: float, + stop: float, + step: int, + *, + name: str, +) -> iris.coords.DimCoord: """Create a latitude or longitude coordinate with bounds.""" coord = iris.coords.DimCoord( np.linspace(start, stop, step), @@ -36,7 +42,7 @@ def _make_coord(start: float, stop: float, step: int, *, name: str): return coord -def _make_cube(*, lat: tuple, lon: tuple): +def _make_cube(*, lat: tuple, lon: tuple) -> iris.cube.Cube: """Create a cube with a latitude and longitude dimension.""" lat_coord = _make_coord(*lat, name="latitude") lon_coord = _make_coord(*lon, name="longitude") @@ -224,7 +230,7 @@ def test_regrid_generic_regridding(cache_weights, cube_10x10, cube_30x30): ), ], ) -def test_horizontal_grid_is_close(cube2_spec: dict, expected: bool): +def test_horizontal_grid_is_close(cube2_spec: dict, expected: bool) -> None: """Test for `_horizontal_grid_is_close`.""" cube1 = _make_cube(lat=LAT_SPEC1, lon=LON_SPEC1) cube2 = _make_cube(**cube2_spec) diff --git a/tests/unit/provenance/test_trackedfile.py b/tests/unit/provenance/test_trackedfile.py index 16290ec72b..8035962b49 100644 --- a/tests/unit/provenance/test_trackedfile.py +++ b/tests/unit/provenance/test_trackedfile.py @@ -1,9 +1,9 @@ +from __future__ import annotations + from dataclasses import dataclass from pathlib import Path -from typing import Any +from typing import TYPE_CHECKING, Any -import iris.cube -import prov.model import pytest from prov.model import ProvDocument @@ -11,6 +11,10 @@ from esmvalcore.io.protocol import DataElement from esmvalcore.local import LocalFile +if TYPE_CHECKING: + import iris.cube + import prov.model + def test_set() -> None: assert { diff --git a/tests/unit/recipe/test_to_datasets.py b/tests/unit/recipe/test_to_datasets.py index 675c538e69..4369a815fa 100644 --- a/tests/unit/recipe/test_to_datasets.py +++ b/tests/unit/recipe/test_to_datasets.py @@ -1,8 +1,10 @@ +from __future__ import annotations + import textwrap from pathlib import Path +from typing import TYPE_CHECKING import pytest -import pytest_mock import yaml from esmvalcore._recipe import to_datasets @@ -10,6 +12,9 @@ from esmvalcore.exceptions import RecipeError from esmvalcore.local import LocalFile +if TYPE_CHECKING: + import pytest_mock + def test_from_recipe(session): recipe_txt = textwrap.dedent(""" diff --git a/tests/unit/task/test_taskset.py b/tests/unit/task/test_taskset.py index 6e2dd9dccc..9f4c683c74 100644 --- a/tests/unit/task/test_taskset.py +++ b/tests/unit/task/test_taskset.py @@ -1,9 +1,16 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + import dask import pytest from esmvalcore import _task from esmvalcore.preprocessor import PreprocessingTask +if TYPE_CHECKING: + from pytest_mock import MockerFixture + @pytest.mark.parametrize( ( @@ -24,7 +31,7 @@ ], ) def test_taskset_get_dask_config( - mocker, + mocker: MockerFixture, max_parallel_tasks: int, available_cpu_cores: int, n_preproc_tasks: int, @@ -53,7 +60,7 @@ def test_taskset_get_dask_config( assert config == {"num_workers": expected_workers} -def test_taskset_get_dask_config_noop(mocker) -> None: +def test_taskset_get_dask_config_noop(mocker: MockerFixture) -> None: tasks = _task.TaskSet() with dask.config.set({"num_workers": 4, "scheduler": "threads"}): diff --git a/tests/unit/test_dataset.py b/tests/unit/test_dataset.py index c1d320b258..63ebd22b91 100644 --- a/tests/unit/test_dataset.py +++ b/tests/unit/test_dataset.py @@ -1,8 +1,11 @@ +from __future__ import annotations + import importlib.resources import textwrap from collections import defaultdict from functools import lru_cache from pathlib import Path +from typing import TYPE_CHECKING import pyesgf import pytest @@ -16,7 +19,9 @@ from esmvalcore.dataset import Dataset from esmvalcore.esgf import ESGFFile from esmvalcore.exceptions import InputFilesNotFound, RecipeError -from esmvalcore.typing import Facets + +if TYPE_CHECKING: + from esmvalcore.typing import Facets @lru_cache