diff --git a/CHANGES.md b/CHANGES.md index e250b4a..c61a513 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -2,6 +2,18 @@ ## Version 0.3.0 (in development) +- Fixed problem where referring to values in modules via + the form `":"` raised. #21 + +- Introduced factory method `new_plugin` which simplifies + creating plugin objects. + +- Refactored out new common mixin class `Operation` + which reduces amount of code and simplifies testing + of operation classes `Rule`, `Processor`, `Formatter`. + +- Improved overall test coverage. + ## Version 0.2.0 (14.01.2025) diff --git a/docs/api.md b/docs/api.md index 40ec151..40ddcf2 100644 --- a/docs/api.md +++ b/docs/api.md @@ -10,9 +10,10 @@ This chapter provides a plain reference for the XRLint Python API. dataset: [new_linter()][xrlint.linter.new_linter] factory function and the [Linter][xrlint.linter.Linter] class. -- The `plugin` module provides plugin related classes: - [Plugin][xrlint.plugin.Plugin] and its - metadata [PluginMeta][xrlint.plugin.PluginMeta]. +- The `plugin` module provides plugin related components: + A factory [new_plugin][xrlint.plugin.new_plugin] to create instances of + the [Plugin][xrlint.plugin.Plugin] class that comprises + plugin metadata represented by [PluginMeta][xrlint.plugin.PluginMeta]. - The `config` module provides classes that represent configuration information and provide related functionality: [Config][xrlint.config.Config] and [ConfigList][xrlint.config.ConfigList]. @@ -51,6 +52,12 @@ Note: ::: xrlint.linter.Linter +::: xrlint.plugin.new_plugin + +::: xrlint.plugin.Plugin + +::: xrlint.plugin.PluginMeta + ::: xrlint.config.Config ::: xrlint.config.ConfigList @@ -79,10 +86,6 @@ Note: ::: xrlint.node.AttrNode -::: xrlint.plugin.Plugin - -::: xrlint.plugin.PluginMeta - ::: xrlint.processor.define_processor ::: xrlint.processor.Processor diff --git a/environment.yml b/environment.yml index b043546..faf822f 100644 --- a/environment.yml +++ b/environment.yml @@ -1,4 +1,4 @@ -name: xrlint-310 +name: xrlint channels: - conda-forge dependencies: diff --git a/examples/plugin_config.py b/examples/plugin_config.py index 7487fbf..dd72060 100644 --- a/examples/plugin_config.py +++ b/examples/plugin_config.py @@ -5,14 +5,14 @@ from xrlint.config import Config from xrlint.node import DatasetNode -from xrlint.plugin import Plugin -from xrlint.plugin import PluginMeta +from xrlint.plugin import new_plugin from xrlint.rule import RuleContext from xrlint.rule import RuleOp -plugin = Plugin( - meta=PluginMeta(name="hello-plugin", version="1.0.0"), +plugin = new_plugin( + name="hello-plugin", + version="1.0.0", configs={ # "configs" entries must be `Config` objects! "recommended": Config.from_value( diff --git a/tests/formatters/helpers.py b/tests/formatters/helpers.py index 2b47886..58497a7 100644 --- a/tests/formatters/helpers.py +++ b/tests/formatters/helpers.py @@ -1,7 +1,6 @@ from xrlint.config import Config from xrlint.formatter import FormatterContext -from xrlint.plugin import Plugin -from xrlint.plugin import PluginMeta +from xrlint.plugin import new_plugin from xrlint.result import Message, ResultStats from xrlint.result import Result from xrlint.rule import RuleOp @@ -28,7 +27,7 @@ def get_context(max_warnings: int = -1) -> FormatterContext: def get_test_results(): - plugin = Plugin(meta=PluginMeta(name="test")) + plugin = new_plugin(name="test") @plugin.define_rule( "rule-1", description="Haha", docs_url="https://rules.com/haha.html" diff --git a/tests/formatters/test_markdown.py b/tests/formatters/test_markdown.py deleted file mode 100644 index 62949a3..0000000 --- a/tests/formatters/test_markdown.py +++ /dev/null @@ -1,17 +0,0 @@ -from unittest import TestCase - -import pytest - -from xrlint.formatters.markdown import Markdown -from .helpers import get_test_results, get_context - - -class MarkdownTest(TestCase): - # noinspection PyMethodMayBeStatic - def test_markdown(self): - formatter = Markdown() - with pytest.raises(NotImplementedError): - formatter.format( - context=get_context(), - results=get_test_results(), - ) diff --git a/tests/test_formatters.py b/tests/test_formatters.py index f17076d..fc36600 100644 --- a/tests/test_formatters.py +++ b/tests/test_formatters.py @@ -10,7 +10,6 @@ def test_import_formatters(self): { "html", "json", - "markdown", "simple", }, set(registry.keys()), diff --git a/tests/test_linter.py b/tests/test_linter.py index 029ff84..f0e6939 100644 --- a/tests/test_linter.py +++ b/tests/test_linter.py @@ -7,8 +7,7 @@ from xrlint.constants import CORE_PLUGIN_NAME from xrlint.linter import Linter from xrlint.linter import new_linter -from xrlint.plugin import Plugin -from xrlint.plugin import PluginMeta +from xrlint.plugin import new_plugin from xrlint.node import ( AttrsNode, AttrNode, @@ -62,7 +61,7 @@ class LinterVerifyTest(TestCase): def setUp(self): - plugin = Plugin(meta=PluginMeta(name="test")) + plugin = new_plugin(name="test") @plugin.define_rule("no-space-in-attr-name") class AttrVer(RuleOp): diff --git a/tests/test_operation.py b/tests/test_operation.py new file mode 100644 index 0000000..0b286a5 --- /dev/null +++ b/tests/test_operation.py @@ -0,0 +1,266 @@ +from abc import ABC +from dataclasses import dataclass +from typing import Type +from unittest import TestCase + +import pytest + +from xrlint.operation import Operation, OperationMeta + + +class ThingOp(ABC): + def do_something(self): + pass + + +@dataclass(kw_only=True) +class ThingMeta(OperationMeta): + pass + + +@dataclass(kw_only=True, frozen=True) +class Thing(Operation): + meta: ThingMeta + op_class: Type[ThingOp] + + @classmethod + def meta_class(cls) -> Type: + return ThingMeta + + @classmethod + def op_base_class(cls) -> Type[ThingOp]: + return ThingOp + + @classmethod + def op_name(cls) -> str: + return "thing" + + @classmethod + def define(cls, op_class: Type[ThingOp] | None = None, **kwargs): + return cls.define_operation(op_class, **kwargs) + + +class MyThingOp1(ThingOp): + pass + + +class MyThingOp2(ThingOp): + pass + + +# This is one way of exporting one of many things from a module +thing1 = Thing(meta=ThingMeta(name="my-thing-op-1"), op_class=MyThingOp1) +thing2 = Thing(meta=ThingMeta(name="my-thing-op-2"), op_class=MyThingOp2) + + +# This is the default way of exporting a single thing from a dedicated module +def export_thing() -> Thing: + class MyThingOp3(ThingOp): + pass + + return Thing(meta=ThingMeta(name="my-thing-op-3"), op_class=MyThingOp3) + + +class OperationTest(TestCase): + def test_defaults(self): + self.assertEqual(OperationMeta, Operation.meta_class()) + self.assertEqual(type, Operation.op_base_class()) + self.assertEqual("operation", Operation.op_name()) + self.assertEqual("export_operation", Operation.op_import_attr_name()) + self.assertEqual("operation", Operation.value_name()) + self.assertEqual( + "Operation | Type[type] | dict | str", Operation.value_type_name() + ) + + def test_from_value_ok_rule(self): + thing1_ = Thing.from_value(thing1) + self.assertIs(thing1_, thing1) + + def test_from_value_ok_rule_op(self): + class MyThingOp3(ThingOp): + pass + + meta = ThingMeta(name="my-thing-op-3") + + # This "defines" MyThingOp2 so we can create + # instances from its operation class. + MyThingOp3.meta = meta + + thing3 = Thing.from_value(MyThingOp3) + self.assertIsInstance(thing3, Thing) + self.assertIs(meta, thing3.meta) + self.assertIs(MyThingOp3, thing3.op_class) + + def test_from_value_ok_str(self): + + thing1_ = Thing.from_value("tests.test_operation:thing1") + self.assertIs(thing1, thing1_) + self.assertEqual("tests.test_operation:thing1", thing1_.meta.ref) + + thing2_ = Thing.from_value("tests.test_operation:thing2") + self.assertIs(thing2, thing2_) + self.assertEqual("tests.test_operation:thing2", thing2_.meta.ref) + + # default attribute is "export_thing" + thing3 = Thing.from_value("tests.test_operation") + self.assertIsInstance(thing3, Thing) + self.assertIs("my-thing-op-3", thing3.meta.name) + self.assertIsInstance(thing3.op_class, type) + self.assertEqual("tests.test_operation:export_thing", thing3.meta.ref) + + # noinspection PyMethodMayBeStatic + def test_from_value_fails(self): + with pytest.raises( + TypeError, match="value must be of type Thing | str, but got int" + ): + Thing.from_value(73) + + class MyThing3(ThingOp): + """This is my 3rd thing.""" + + with pytest.raises( + ValueError, + match=r"missing thing metadata, apply define_thing\(\) to class MyThing3", + ): + Thing.from_value(MyThing3) + + with pytest.raises( + TypeError, + match=( + r"thing must be of type Thing \| Type\[ThingOp\] \|" + r" dict | str, but got type" + ), + ): + Thing.from_value(Thing) + + def test_to_json(self): + class MyThingOp3(ThingOp): + """This is my 3rd thing.""" + + thing3 = Thing( + meta=ThingMeta(name="t3", ref="mypkg.things:thing3"), op_class=MyThingOp3 + ) + self.assertEqual("mypkg.things:thing3", thing3.to_json()) + + rule = Thing( + meta=ThingMeta(name="t3", description="What a thing."), op_class=MyThingOp3 + ) + self.assertEqual( + { + "meta": { + "name": "t3", + "version": "0.0.0", + "description": "What a thing.", + }, + "op_class": ".MyThingOp3'>", + }, + rule.to_json(), + ) + + +class OpMixinDefineTest(TestCase): + + def test_define_op(self): + + class MyThingOp3(ThingOp): + """This is my 3rd thing.""" + + value = Thing.define_operation(MyThingOp3, meta_kwargs=dict(version="1.0")) + self.assertIsInstance(value, Thing) + self.assertIsInstance(value.meta, ThingMeta) + self.assertEqual("my-thing-op-3", value.meta.name) + self.assertEqual("1.0", value.meta.version) + self.assertEqual("This is my 3rd thing.", value.meta.description) + self.assertIs(MyThingOp3, value.op_class) + self.assertTrue(hasattr(MyThingOp3, "meta")) + # noinspection PyUnresolvedReferences + self.assertIs(value.meta, MyThingOp3.meta) + + def test_define_op_fail(self): + + class MyThingOp3(ThingOp): + """This is my 3rd thing.""" + + with pytest.raises( + TypeError, match="registry must be a MutableMapping, but got int" + ): + # noinspection PyTypeChecker + Thing.define_operation(MyThingOp3, registry=12) + + def test_decorator(self): + class MyThingOp3(ThingOp): + """This is my 3rd thing.""" + + closure = Thing.define() + self.assertTrue(callable(closure)) + op_class = closure(MyThingOp3) + self.assertIs(MyThingOp3, op_class) + self.assertTrue(hasattr(MyThingOp3, "meta")) + # noinspection PyUnresolvedReferences + meta = op_class.meta + self.assertEqual("my-thing-op-3", meta.name) + self.assertEqual("0.0.0", meta.version) + self.assertEqual("This is my 3rd thing.", meta.description) + + # noinspection PyMethodMayBeStatic + def test_decorator_fail(self): + closure = Thing.define() + with pytest.raises( + TypeError, match="decorated thing component must be a class, but got int" + ): + closure(32) + + with pytest.raises( + TypeError, + match=( + "decorated thing component must be a subclass of ThingOp," + " but got Thing" + ), + ): + closure(Thing) + + def test_function(self): + class MyThingOp3(ThingOp): + """This is my 3rd thing.""" + + thing = Thing.define(op_class=MyThingOp3) + self.assertIsInstance(thing, Thing) + self.assertIs(MyThingOp3, thing.op_class) + self.assertTrue(hasattr(MyThingOp3, "meta")) + meta = thing.meta + # noinspection PyUnresolvedReferences + self.assertIs(meta, thing.op_class.meta) + self.assertEqual("my-thing-op-3", meta.name) + self.assertEqual("0.0.0", meta.version) + self.assertEqual("This is my 3rd thing.", meta.description) + + # noinspection PyMethodMayBeStatic + def test_function_fail(self): + class MyThingOp3(ThingOp): + """This is my 3rd thing.""" + + with pytest.raises(TypeError, match="op_class must be a class, but got str"): + # noinspection PyTypeChecker + Thing.define(op_class="Huh!") + + with pytest.raises( + TypeError, + match="op_class must be a subclass of ThingOp, but got TestCase", + ): + # noinspection PyTypeChecker + Thing.define(TestCase) + + def test_with_registry(self): + class Op1(ThingOp): + """This is my 3rd thing.""" + + class Op2(ThingOp): + """This is my 3rd thing.""" + + registry = {} + t1 = Thing.define(op_class=Op1, registry=registry) + t2 = Thing.define(op_class=Op2, registry=registry) + self.assertIs(t1, registry["op-1"]) + self.assertIs(t2, registry["op-2"]) diff --git a/tests/test_plugin.py b/tests/test_plugin.py index b8f2dde..a59dd95 100644 --- a/tests/test_plugin.py +++ b/tests/test_plugin.py @@ -3,13 +3,17 @@ import xarray as xr -from xrlint.plugin import Plugin, PluginMeta +from xrlint.plugin import new_plugin, Plugin, PluginMeta from xrlint.processor import ProcessorOp, Processor from xrlint.result import Message from xrlint.rule import Rule, RuleOp, define_rule class PluginTest(TestCase): + def test_new_plugin(self): + plugin = new_plugin(name="hello", version="2.4.5") + self.assertEqual(Plugin(meta=PluginMeta(name="hello", version="2.4.5")), plugin) + def test_from_value_ok_plugin(self): plugin = Plugin(meta=PluginMeta(name="hello")) self.assertIs(plugin, Plugin.from_value(plugin)) diff --git a/tests/test_processor.py b/tests/test_processor.py index 62ca011..927471a 100644 --- a/tests/test_processor.py +++ b/tests/test_processor.py @@ -4,8 +4,7 @@ import pytest import xarray as xr -from xrlint.plugin import Plugin -from xrlint.plugin import PluginMeta +from xrlint.plugin import new_plugin from xrlint.processor import Processor from xrlint.processor import ProcessorMeta from xrlint.processor import ProcessorOp @@ -69,8 +68,8 @@ def test_define_processor_as_decorator_fail(self): with pytest.raises( TypeError, match=( - r"component decorated by define_processor\(\)" - r" must be a subclass of ProcessorOp" + "decorated processor component must be a subclass of ProcessorOp," + " but got MyProcessorOp" ), ): @@ -79,7 +78,7 @@ class MyProcessorOp: pass def test_define_processor_with_plugin(self): - plugin = Plugin(meta=PluginMeta(name="my-plugin")) + plugin = new_plugin(name="my-plugin") @plugin.define_processor() class MyProcessorOp(ProcessorOp): diff --git a/tests/test_result.py b/tests/test_result.py index 5192791..4b92830 100644 --- a/tests/test_result.py +++ b/tests/test_result.py @@ -1,7 +1,7 @@ from unittest import TestCase from xrlint.config import Config -from xrlint.plugin import Plugin, PluginMeta +from xrlint.plugin import new_plugin from xrlint.result import ( get_rules_meta_for_results, Result, @@ -17,7 +17,7 @@ class ResultTest(TestCase): # noinspection PyUnusedLocal def test_get_rules_meta_for_results(self): - plugin = Plugin(meta=PluginMeta(name="test")) + plugin = new_plugin(name="test") @plugin.define_rule("my-rule-1") class MyRule1(RuleOp): diff --git a/tests/test_rule.py b/tests/test_rule.py index 1618d3a..b1d402a 100644 --- a/tests/test_rule.py +++ b/tests/test_rule.py @@ -136,10 +136,7 @@ def test_with_registry(self): def test_fail(self): with pytest.raises( TypeError, - match=( - r"component decorated by define_rule\(\)" - r" must be a subclass of RuleOp" - ), + match="op_class must be a subclass of RuleOp, but got DefineRuleTest", ): # noinspection PyTypeChecker define_rule(op_class=DefineRuleTest) diff --git a/tests/test_testing.py b/tests/test_testing.py index 1ab960f..d475a5d 100644 --- a/tests/test_testing.py +++ b/tests/test_testing.py @@ -27,7 +27,7 @@ def dataset(self, ctx: RuleContext, node: DatasetNode): # noinspection PyMethodMayBeStatic class RuleTesterTest(TestCase): def test_ok(self): - tester = RuleTester(rules={"test/force-title": "error"}) + tester = RuleTester(rules={"testing/force-title": "error"}) tester.run( "force-title", ForceTitle, @@ -42,7 +42,7 @@ def test_ok(self): ) def test_raises_valid(self): - tester = RuleTester(rules={"test/force-title": "error"}) + tester = RuleTester(rules={"testing/force-title": "error"}) with pytest.raises( AssertionError, match=( @@ -61,7 +61,7 @@ def test_raises_valid(self): ) def test_raises_invalid(self): - tester = RuleTester(rules={"test/force-title": "error"}) + tester = RuleTester(rules={"testing/force-title": "error"}) with pytest.raises( AssertionError, match=( diff --git a/tests/util/test_codec.py b/tests/util/test_codec.py index d86f819..49ab08f 100644 --- a/tests/util/test_codec.py +++ b/tests/util/test_codec.py @@ -15,12 +15,12 @@ import pytest -from xrlint.util.codec import ( +from xrlint.util.constructible import ( ValueConstructible, - JsonSerializable, MappingConstructible, get_class_parameters, ) +from xrlint.util.serializable import JsonSerializable @dataclass() @@ -80,7 +80,7 @@ class UnresolvedTypesContainer(ComplexTypesContainer, SimpleTypesContainer): plugins: dict[str, "Plugin"] = field(default_factory=dict) @classmethod - def _get_forward_refs(cls) -> Optional[Mapping[str, type]]: + def forward_refs(cls) -> Optional[Mapping[str, type]]: from xrlint.rule import RuleConfig from xrlint.plugin import Plugin @@ -480,10 +480,10 @@ def test_union_fail(self): UnionTypesContainer.from_value({"m": "pippo"}, value_name="utc") def test_get_class_parameters_is_cached(self): - ctc_param = ComplexTypesContainer._get_class_parameters() - stc_param = SimpleTypesContainer._get_class_parameters() - self.assertIs(stc_param, SimpleTypesContainer._get_class_parameters()) - self.assertIs(ctc_param, ComplexTypesContainer._get_class_parameters()) + ctc_param = ComplexTypesContainer.class_parameters() + stc_param = SimpleTypesContainer.class_parameters() + self.assertIs(stc_param, SimpleTypesContainer.class_parameters()) + self.assertIs(ctc_param, ComplexTypesContainer.class_parameters()) self.assertIsNot(ctc_param, stc_param) @@ -492,7 +492,7 @@ class GetClassParametersTest(TestCase): def test_resolves_types(self): ctc_params = get_class_parameters( UnresolvedTypesContainer, - forward_refs=UnresolvedTypesContainer._get_forward_refs(), + forward_refs=UnresolvedTypesContainer.forward_refs(), ) # order is important! self.assertEqual( diff --git a/xrlint/all.py b/xrlint/all.py index ade9a6a..d170ab4 100644 --- a/xrlint/all.py +++ b/xrlint/all.py @@ -20,6 +20,7 @@ from xrlint.node import Node from xrlint.plugin import Plugin from xrlint.plugin import PluginMeta +from xrlint.plugin import new_plugin from xrlint.processor import Processor from xrlint.processor import ProcessorMeta from xrlint.processor import ProcessorOp @@ -58,6 +59,7 @@ "Node", "Plugin", "PluginMeta", + "new_plugin", "Processor", "ProcessorMeta", "ProcessorOp", diff --git a/xrlint/config.py b/xrlint/config.py index b8d3750..c23c934 100644 --- a/xrlint/config.py +++ b/xrlint/config.py @@ -3,12 +3,11 @@ from typing import Any, TYPE_CHECKING, Union, Literal, Sequence from xrlint.constants import CORE_PLUGIN_NAME -from xrlint.util.codec import ( +from xrlint.util.constructible import ( MappingConstructible, ValueConstructible, - JsonSerializable, - JsonValue, ) +from xrlint.util.serializable import JsonSerializable, JsonValue from xrlint.util.filefilter import FileFilter from xrlint.util.merge import ( merge_arrays, @@ -284,7 +283,7 @@ def _from_none(cls, value_name: str) -> "Config": return Config() @classmethod - def _get_forward_refs(cls) -> dict[str, type]: + def forward_refs(cls) -> dict[str, type]: from xrlint.processor import ProcessorOp from xrlint.plugin import Plugin from xrlint.rule import Rule @@ -298,11 +297,11 @@ def _get_forward_refs(cls) -> dict[str, type]: } @classmethod - def _get_value_name(cls) -> str: + def value_name(cls) -> str: return "config" @classmethod - def _get_value_type_name(cls) -> str: + def value_type_name(cls) -> str: return "Config | dict | None" def to_dict(self, value_name: str | None = None) -> dict[str, JsonValue]: @@ -396,11 +395,11 @@ def _from_sequence(cls, value: Sequence, value_name: str) -> "ConfigList": return ConfigList(configs) @classmethod - def _get_value_name(cls) -> str: + def value_name(cls) -> str: return "config_list" @classmethod - def _get_value_type_name(cls) -> str: + def value_type_name(cls) -> str: return "ConfigList | list[Config | dict]" @classmethod diff --git a/xrlint/formatter.py b/xrlint/formatter.py index 87bce66..eddf6b7 100644 --- a/xrlint/formatter.py +++ b/xrlint/formatter.py @@ -3,9 +3,9 @@ from dataclasses import dataclass from typing import Any, Callable, Type +from xrlint.operation import Operation, OperationMeta from xrlint.result import Result from xrlint.result import ResultStats -from xrlint.util.naming import to_kebab_case class FormatterContext(ABC): @@ -42,7 +42,7 @@ def format( @dataclass(kw_only=True) -class FormatterMeta: +class FormatterMeta(OperationMeta): """Formatter metadata.""" name: str @@ -51,9 +51,6 @@ class FormatterMeta: version: str = "0.0.0" """Formatter version.""" - schema: dict[str, Any] | list[dict[str, Any]] | bool | None = None - """Formatter options schema.""" - ref: str | None = None """Formatter reference. Specifies the location from where the formatter can be @@ -61,9 +58,12 @@ class FormatterMeta: Must have the form ":", if given. """ + schema: dict[str, Any] | list[dict[str, Any]] | bool | None = None + """Formatter options schema.""" + @dataclass(frozen=True, kw_only=True) -class Formatter: +class Formatter(Operation): """A formatter for linting results.""" meta: FormatterMeta @@ -72,37 +72,36 @@ class Formatter: op_class: Type[FormatterOp] """The class that implements the format operation.""" + @classmethod + def meta_class(cls) -> Type: + return FormatterMeta + + @classmethod + def op_base_class(cls) -> Type: + return FormatterOp + + @classmethod + def op_name(cls) -> str: + return "formatter" + class FormatterRegistry(Mapping[str, Formatter]): def __init__(self): self._registrations = {} - # TODO: fix this code duplication in define_rule() def define_formatter( self, name: str | None = None, version: str | None = None, schema: dict[str, Any] | list[dict[str, Any]] | bool | None = None, - ) -> Callable[[Any], Type[FormatterOp]]: - - def _define_formatter(op_class: Any) -> Type[FormatterOp]: - from inspect import isclass - - if not isclass(op_class) or not issubclass(op_class, FormatterOp): - raise TypeError( - f"component decorated by define_formatter()" - f" must be a subclass of {FormatterOp.__name__}" - ) - meta = FormatterMeta( - name=name or to_kebab_case(op_class.__name__), - version=version, - schema=schema, - ) - self._registrations[meta.name] = Formatter(meta=meta, op_class=op_class) - return op_class - - return _define_formatter + ) -> Callable[[FormatterOp], Type[FormatterOp]] | Formatter: + """Decorator function.""" + return Formatter.define_operation( + None, + registry=self._registrations, + meta_kwargs=dict(name=name, version=version, schema=schema), + ) def __getitem__(self, key: str) -> Formatter: return self._registrations[key] diff --git a/xrlint/formatters/markdown.py b/xrlint/formatters/markdown.py deleted file mode 100644 index f1a7a46..0000000 --- a/xrlint/formatters/markdown.py +++ /dev/null @@ -1,17 +0,0 @@ -from collections.abc import Iterable - -from xrlint.formatter import FormatterOp, FormatterContext -from xrlint.formatters import registry -from xrlint.result import Result - - -@registry.define_formatter("markdown", version="1.0.0") -class Markdown(FormatterOp): - - def format( - self, - context: FormatterContext, - results: Iterable[Result], - ) -> str: - # TODO: implement "markdown" format - raise NotImplementedError() diff --git a/xrlint/operation.py b/xrlint/operation.py new file mode 100644 index 0000000..2f84a9b --- /dev/null +++ b/xrlint/operation.py @@ -0,0 +1,225 @@ +from abc import abstractmethod +from collections.abc import MutableMapping +from dataclasses import dataclass +from inspect import isclass, getdoc +from typing import Any, Type, Callable + +from xrlint.util.constructible import MappingConstructible +from xrlint.util.serializable import JsonSerializable, JsonValue +from xrlint.util.importutil import import_value +from xrlint.util.naming import to_kebab_case + + +@dataclass(kw_only=True) +class OperationMeta(MappingConstructible["OpMetadata"], JsonSerializable): + """Operation metadata.""" + + name: str + """Operation name.""" + + version: str = "0.0.0" + """Operation version.""" + + """Operation description. Optional.""" + description: str | None = None + + schema: dict[str, JsonValue] | list[dict[str, JsonValue]] | None = None + """JSON Schema used to specify and validate the operation's' + options, if any. + + It can take the following values: + + - Use `None` (the default) to indicate that the operation + as no options at all. + - Use a schema to indicate that the operation + takes keyword arguments only. + The schema's JSON type must be `"object"`. + - Use a list of schemas to indicate that the operation + takes positional-only arguments. + If given, the number of schemas in the list specifies the + number of positional arguments that must be provided by users. + """ + + ref: str | None = None + """Operation reference. + Specifies the location from where the operation can be + dynamically imported. + Must have the form ":", if given. + """ + + def to_dict(self, value_name: str | None = None) -> dict[str, JsonValue]: + return { + k: v + for k, v in super().to_dict(value_name=value_name).items() + if v is not None + } + + +class Operation(MappingConstructible["Operation"], JsonSerializable): + """A mixin class that is used by operation classes. + + An operation class comprises a `meta` property + that provides the operation metadata. See [OpMetadata][] + for its interface definition. + + An `op_class` property holds a class that implements the + operation's logic. + + Rules, processors, and formatters use this mixin. + + Derived classes should provide a constructor that takes at least + two keyword arguments: + + - `meta` - the metadata object that describes the operation + - `op_class` - the class that implements the operation + + The `meta` object's class is expected to be constructible + from keyword arguments with at least a `name: str` argument. + `meta` objects should also have a writable `ref: str | None` + property. + """ + + # noinspection PyUnresolvedReferences + def to_json(self, value_name: str | None = None) -> str: + if self.meta.ref: + return self.meta.ref + return super().to_json(value_name=value_name) + + @classmethod + def _from_class(cls, value: Type, value_name: str) -> "Operation": + # noinspection PyTypeChecker + if issubclass(value, cls.op_base_class()): + op_class = value + try: + # Note, the value.meta attribute is set by + # the define_op + # + # noinspection PyUnresolvedReferences + meta = op_class.meta + except AttributeError: + raise ValueError( + f"missing {cls.op_name()} metadata, apply define_{cls.op_name()}()" + f" to class {op_class.__name__}" + ) + # noinspection PyArgumentList + return cls(meta=meta, op_class=op_class) + return super()._from_class(value, value_name) + + @classmethod + def _from_str(cls, value: str, value_name: str) -> "Operation": + # noinspection PyTypeChecker + operator, operator_ref = import_value( + value, + cls.op_import_attr_name(), + factory=cls.from_value, + ) + # noinspection PyUnresolvedReferences + operator.meta.ref = operator_ref + return operator + + @classmethod + def op_import_attr_name(cls) -> str: + """Get the default name for the attribute that is used to import + instances of this class from modules. + """ + return f"export_{cls.op_name()}" + + @classmethod + def meta_class(cls) -> Type: + """Get the class of the instances of the `meta` field. + Defaults to [OperationMeta][xrlint.operation.OperationMeta]. + """ + return OperationMeta + + @classmethod + def op_base_class(cls) -> Type: + """Get the base class from which all instances of the `op_class` + must derive from. + """ + return type + + @classmethod + def op_name(cls) -> str: + """Get a name that describes the operation, e.g., + "rule", "processor", "formatter". + """ + return "operation" + + @classmethod + def value_name(cls) -> str: + return cls.op_name() + + @classmethod + def value_type_name(cls) -> str: + return f"{cls.__name__} | Type[{cls.op_base_class().__name__}] | dict | str" + + @classmethod + def define_operation( + cls, + op_class: Type | None, + *, + registry: MutableMapping[str, "Operation"] | None = None, + meta_kwargs: dict[str, Any] | None = None, + **kwargs, + ) -> Callable[[Type], Type] | "Operation": + """Defines an operation.""" + meta_kwargs = meta_kwargs or {} + + def _define_op(_op_class: Type, decorated=True) -> Type | "Operation": + cls._assert_op_class_ok(f"decorated {cls.op_name()} component", _op_class) + + name = meta_kwargs.pop("name", None) + if not name: + name = to_kebab_case(_op_class.__name__) + description = meta_kwargs.pop("description", None) + if not description: + description = getdoc(_op_class) + schema = meta_kwargs.pop("schema", None) + if schema is None: + # TODO: if schema not given, + # derive it from _op_class' ctor arguments + # schema = cls._derive_schema(_op_class) + pass + # noinspection PyCallingNonCallable + meta = cls.meta_class()( + name=name, description=description, schema=schema, **meta_kwargs + ) + + # Register rule metadata in rule operation class + _op_class.meta = meta + + # noinspection PyArgumentList + op_instance = cls(meta=meta, op_class=_op_class, **kwargs) + if registry is not None: + # Register rule in rule registry + registry[name] = op_instance + if decorated: + return _op_class + else: + return op_instance + + if registry is not None and not isinstance(registry, MutableMapping): + raise TypeError( + f"registry must be a MutableMapping, but got {type(registry).__name__}" + ) + + if op_class is not None: + # passing the op_class means an operation instance is expected + cls._assert_op_class_ok("op_class", op_class) + return _define_op(op_class, decorated=False) + + # used as decorator, return closure + return _define_op + + @classmethod + def _assert_op_class_ok(cls, value_name: str, op_class: Type): + if not isclass(op_class): + raise TypeError( + f"{value_name} must be a class, but got {type(op_class).__name__}" + ) + # noinspection PyTypeChecker + if not issubclass(op_class, cls.op_base_class()): + raise TypeError( + f"{value_name} must be a subclass of {cls.op_base_class().__name__}," + f" but got {op_class.__name__}" + ) diff --git a/xrlint/plugin.py b/xrlint/plugin.py index 2e5b87b..4b46b48 100644 --- a/xrlint/plugin.py +++ b/xrlint/plugin.py @@ -4,13 +4,14 @@ from xrlint.config import Config from xrlint.processor import Processor, ProcessorOp, define_processor from xrlint.rule import Rule, RuleOp, define_rule -from xrlint.util.codec import MappingConstructible, JsonSerializable, JsonValue +from xrlint.util.constructible import MappingConstructible +from xrlint.util.serializable import JsonSerializable, JsonValue from xrlint.util.importutil import import_value @dataclass(kw_only=True) class PluginMeta(MappingConstructible, JsonSerializable): - """XRLint plugin metadata.""" + """Plugin metadata.""" name: str """Plugin name.""" @@ -26,20 +27,22 @@ class PluginMeta(MappingConstructible, JsonSerializable): """ @classmethod - def _get_value_type_name(cls) -> str: + def value_type_name(cls) -> str: return "PluginMeta | dict" @dataclass(frozen=True, kw_only=True) class Plugin(MappingConstructible, JsonSerializable): - """An XRLint plugin.""" + """A plugin that can contribute rules, processors, + and predefined configurations to XRLint. + + Use the factory [new_plugin][xrlint.plugin.new_plugin] + to create plugin instances. + """ meta: PluginMeta """Information about the plugin.""" - configs: dict[str, Config] = field(default_factory=dict) - """A dictionary containing named configurations.""" - rules: dict[str, Rule] = field(default_factory=dict) """A dictionary containing the definitions of custom rules.""" @@ -47,12 +50,15 @@ class Plugin(MappingConstructible, JsonSerializable): """A dictionary containing named processors. """ + configs: dict[str, Config] = field(default_factory=dict) + """A dictionary containing named configurations.""" + def define_rule( self, name: str, version: str = "0.0.0", schema: dict[str, Any] | list[dict[str, Any]] | bool | None = None, - type: Literal["problem", "suggestion", "layout"] | None = None, + type: Literal["problem", "suggestion", "layout"] = "problem", description: str | None = None, docs_url: str | None = None, op_class: Type[RuleOp] | None = None, @@ -102,10 +108,37 @@ def _from_str(cls, value: str, value_name: str) -> "Plugin": return plugin @classmethod - def _get_value_type_name(cls) -> str: + def value_type_name(cls) -> str: return "Plugin | dict | str" def to_json(self, value_name: str | None = None) -> JsonValue: if self.meta.ref: return self.meta.ref return super().to_json(value_name=value_name) + + +def new_plugin( + name: str, + version: str = "0.0.0", + ref: str | None = None, + rules: dict[str, Rule] | None = None, + processors: dict[str, Processor] | None = None, + configs: dict[str, Config] | None = None, +) -> Plugin: + """Create a new plugin object that can contribute rules, processors, + and predefined configurations to XRLint. + + Args: + name: Plugin name. Required. + version: Plugin version. Defaults to `"0.0.0"`. + ref: Plugin reference. Optional. + rules: A dictionary containing the definitions of custom rules. Optional. + processors: A dictionary containing custom processors. Optional. + configs: A dictionary containing predefined configurations. Optional. + """ + return Plugin( + meta=PluginMeta(name=name, version=version, ref=ref), + rules=rules or {}, + processors=processors or {}, + configs=configs or {}, + ) diff --git a/xrlint/plugins/core/rules/__init__.py b/xrlint/plugins/core/rules/__init__.py index 1282ae3..934d6cf 100644 --- a/xrlint/plugins/core/rules/__init__.py +++ b/xrlint/plugins/core/rules/__init__.py @@ -1,13 +1,10 @@ from xrlint.constants import CORE_PLUGIN_NAME -from xrlint.plugin import Plugin -from xrlint.plugin import PluginMeta +from xrlint.plugin import new_plugin from xrlint.version import version -plugin = Plugin( - meta=PluginMeta( - name=CORE_PLUGIN_NAME, - version=version, - ref="xrlint.plugins.core:export_plugin", - ) +plugin = new_plugin( + name=CORE_PLUGIN_NAME, + version=version, + ref="xrlint.plugins.core:export_plugin", ) diff --git a/xrlint/plugins/xcube/rules/__init__.py b/xrlint/plugins/xcube/rules/__init__.py index 18fe5ff..323ea84 100644 --- a/xrlint/plugins/xcube/rules/__init__.py +++ b/xrlint/plugins/xcube/rules/__init__.py @@ -1,12 +1,9 @@ -from xrlint.plugin import Plugin -from xrlint.plugin import PluginMeta +from xrlint.plugin import new_plugin from xrlint.version import version -plugin = Plugin( - meta=PluginMeta( - name="xcube", - version=version, - ref="xrlint.plugins.xcube:export_plugin", - ) +plugin = new_plugin( + name="xcube", + version=version, + ref="xrlint.plugins.xcube:export_plugin", ) diff --git a/xrlint/processor.py b/xrlint/processor.py index b209075..dea1633 100644 --- a/xrlint/processor.py +++ b/xrlint/processor.py @@ -1,14 +1,11 @@ from abc import abstractmethod, ABC from dataclasses import dataclass -from inspect import isclass from typing import Type, Any, Callable import xarray as xr +from xrlint.operation import OperationMeta, Operation from xrlint.result import Message -from xrlint.util.codec import MappingConstructible -from xrlint.util.importutil import import_value -from xrlint.util.naming import to_kebab_case class ProcessorOp(ABC): @@ -49,7 +46,7 @@ def postprocess( @dataclass(kw_only=True) -class ProcessorMeta(MappingConstructible): +class ProcessorMeta(OperationMeta): """Processor metadata.""" name: str @@ -58,6 +55,9 @@ class ProcessorMeta(MappingConstructible): version: str = "0.0.0" """Processor version.""" + """Processor description. Optional.""" + description: str | None = None + ref: str | None = None """Processor reference. Specifies the location from where the processor can be @@ -66,12 +66,12 @@ class ProcessorMeta(MappingConstructible): """ @classmethod - def _get_value_type_name(cls) -> str: + def value_type_name(cls) -> str: return "ProcessorMeta | dict" @dataclass(frozen=True, kw_only=True) -class Processor(MappingConstructible): +class Processor(Operation): """Processors tell XRLint how to process files other than standard xarray datasets. """ @@ -87,39 +87,18 @@ class Processor(MappingConstructible): # """`True` if this processor supports auto-fixing of datasets.""" @classmethod - def _from_type(cls, value: Type[ProcessorOp], value_name: str) -> "Processor": - # TODO: no test covers Processor._from_type - if issubclass(value, ProcessorOp): - # TODO: fix code duplication in Rule._from_class() - try: - # Note, the value.meta attribute is set by - # the define_rule - # noinspection PyUnresolvedReferences - return Processor(meta=value.meta, op_class=value) - except AttributeError: - raise ValueError( - f"missing processor metadata, apply define_processor()" - f" to class {value.__name__}" - ) - return super()._from_type(value, value_name) + def meta_class(cls) -> Type: + return ProcessorMeta @classmethod - def _from_str(cls, value: str, value_name: str) -> "Processor": - processor, processor_ref = import_value( - value, - "export_processor", - factory=Processor.from_value, - ) - # noinspection PyUnresolvedReferences - processor.meta.ref = processor_ref - return processor + def op_base_class(cls) -> Type: + return ProcessorOp @classmethod - def _get_value_type_name(cls) -> str: - return "str | dict | Processor | Type[ProcessorOp]" + def op_name(cls) -> str: + return "processor" -# TODO: fix this code duplication in define_rule() def define_processor( name: str | None = None, version: str = "0.0.0", @@ -142,7 +121,7 @@ def define_processor( see [ProcessorMeta][xrlint.processor.ProcessorMeta]. registry: Processor registry. Can be provided to register the defined processor using its `name`. - op_class: Processor operation class. Must not be provided + op_class: Processor operation class. Must be `None` if this function is used as a class decorator. Returns: @@ -153,27 +132,6 @@ def define_processor( TypeError: If either `op_class` or the decorated object is not a a class derived from [ProcessorOp][xrlint.processor.ProcessorOp]. """ - - def _define_processor( - _op_class: Any, no_deco=False - ) -> Type[ProcessorOp] | Processor: - if not isclass(_op_class) or not issubclass(_op_class, ProcessorOp): - raise TypeError( - f"component decorated by define_processor()" - f" must be a subclass of {ProcessorOp.__name__}" - ) - meta = ProcessorMeta( - name=name or to_kebab_case(_op_class.__name__), - version=version, - ) - setattr(_op_class, "meta", meta) - processor = Processor(meta=meta, op_class=_op_class) - if registry is not None: - registry[meta.name] = processor - return processor if no_deco else _op_class - - if op_class is None: - # decorator case - return _define_processor - else: - return _define_processor(op_class, no_deco=True) + return Processor.define_operation( + op_class, registry=registry, meta_kwargs=dict(name=name, version=version) + ) diff --git a/xrlint/result.py b/xrlint/result.py index 0203ea2..1309c05 100644 --- a/xrlint/result.py +++ b/xrlint/result.py @@ -8,7 +8,7 @@ from xrlint.constants import SEVERITY_CODE_TO_NAME, MISSING_DATASET_FILE_PATH from xrlint.constants import SEVERITY_ERROR from xrlint.constants import SEVERITY_WARN -from xrlint.util.codec import JsonSerializable +from xrlint.util.serializable import JsonSerializable from xrlint.util.formatting import format_problems from xrlint.util.formatting import format_message_type_of diff --git a/xrlint/rule.py b/xrlint/rule.py index 519b9be..4258154 100644 --- a/xrlint/rule.py +++ b/xrlint/rule.py @@ -1,7 +1,6 @@ from abc import abstractmethod, ABC from collections.abc import MutableMapping, Sequence from dataclasses import dataclass, field -from inspect import isclass from typing import Type, Literal, Any, Callable import xarray as xr @@ -11,15 +10,13 @@ SEVERITY_ENUM_TEXT, ) from xrlint.node import DatasetNode, DataArrayNode, AttrsNode, AttrNode +from xrlint.operation import OperationMeta, Operation from xrlint.result import Suggestion -from xrlint.util.codec import ( - MappingConstructible, +from xrlint.util.constructible import ( ValueConstructible, - JsonSerializable, ) +from xrlint.util.serializable import JsonSerializable from xrlint.util.formatting import format_message_one_of -from xrlint.util.importutil import import_value -from xrlint.util.naming import to_kebab_case class RuleContext(ABC): @@ -129,7 +126,7 @@ def attr(self, context: RuleContext, node: AttrNode) -> None: @dataclass(kw_only=True) -class RuleMeta(MappingConstructible, JsonSerializable): +class RuleMeta(OperationMeta): """Rule metadata.""" name: str @@ -184,27 +181,13 @@ class RuleMeta(MappingConstructible, JsonSerializable): by the rule’s implementation and its configured severity. """ - ref: str | None = None - """Rule reference. - Specifies the location from where the rule can be - dynamically imported. - Must have the form ":", if given. - """ - @classmethod - def _get_value_type_name(cls) -> str: + def value_type_name(cls) -> str: return "RuleMeta | dict" - def to_dict(self, value_name: str | None = None) -> dict[str, str]: - return { - k: v - for k, v in super().to_dict(value_name=value_name).items() - if v is not None - } - @dataclass(frozen=True) -class Rule(MappingConstructible, JsonSerializable): +class Rule(Operation): """A rule comprises rule metadata and a reference to the class that implements the rule's logic. @@ -225,37 +208,16 @@ class that implements the rule's logic. """ @classmethod - def _from_str(cls, value: str, value_name: str) -> "Rule": - rule, rule_ref = import_value(value, "export_rule", factory=Rule.from_value) - rule.meta.ref = rule_ref - return rule + def meta_class(cls) -> Type: + return RuleMeta @classmethod - def _from_type(cls, value: Type, value_name: str) -> "Rule": - if issubclass(value, RuleOp): - op_class = value - try: - # noinspection PyUnresolvedReferences - # Note, the value.meta attribute is set by - # the define_rule() function. - meta = value.meta - except AttributeError: - raise ValueError( - f"missing rule metadata, apply define_rule()" - f" to class {value.__name__}" - ) - return Rule(meta=meta, op_class=op_class) - super()._from_type(value, value_name) + def op_base_class(cls) -> Type: + return RuleOp @classmethod - def _get_value_type_name(cls) -> str: - return "Rule | dict | str" - - # noinspection PyUnusedLocal - def to_json(self, value_name: str | None = None) -> str: - if self.meta.ref: - return self.meta.ref - return super().to_json(value_name=value_name) + def op_name(cls) -> str: + return "rule" @dataclass(frozen=True) @@ -331,11 +293,11 @@ def _from_sequence(cls, value: Sequence, value_name: str) -> "RuleConfig": return RuleConfig(severity, tuple(args), dict(kwargs)) @classmethod - def _get_value_name(cls) -> str: + def value_name(cls) -> str: return "rule configuration" @classmethod - def _get_value_type_name(cls) -> str: + def value_type_name(cls) -> str: return "int | str | list" # noinspection PyUnusedLocal @@ -349,10 +311,10 @@ def to_json(self, value_name: str | None = None) -> int | list: def define_rule( name: str | None = None, version: str = "0.0.0", - schema: dict[str, Any] | list[dict[str, Any]] | bool | None = None, - type: Literal["problem", "suggestion", "layout"] | None = None, + type: Literal["problem", "suggestion", "layout"] = "problem", description: str | None = None, docs_url: str | None = None, + schema: dict[str, Any] | list[dict[str, Any]] | bool | None = None, registry: MutableMapping[str, Rule] | None = None, op_class: Type[RuleOp] | None = None, ) -> Callable[[Any], Type[RuleOp]] | Rule: @@ -367,13 +329,13 @@ def define_rule( Args: name: Rule name, see [RuleMeta][xrlint.rule.RuleMeta]. version: Rule version, see [RuleMeta][xrlint.rule.RuleMeta]. - schema: Rule operation arguments schema, - see [RuleMeta][xrlint.rule.RuleMeta]. type: Rule type, see [RuleMeta][xrlint.rule.RuleMeta]. description: Rule description, see [RuleMeta][xrlint.rule.RuleMeta]. docs_url: Rule documentation URL, see [RuleMeta][xrlint.rule.RuleMeta]. + schema: Rule operation arguments schema, + see [RuleMeta][xrlint.rule.RuleMeta]. registry: Rule registry. Can be provided to register the defined rule using its `name`. op_class: Rule operation class. Must not be provided @@ -387,34 +349,15 @@ def define_rule( TypeError: If either `op_class` or the decorated object is not a class derived from [RuleOp][xrlint.rule.RuleOp]. """ - - def _define_rule(_op_class: Type[RuleOp], no_deco=False) -> Type[RuleOp] | Rule: - if not isclass(_op_class) or not issubclass(_op_class, RuleOp): - raise TypeError( - f"component decorated by define_rule()" - f" must be a subclass of {RuleOp.__name__}" - ) - meta = RuleMeta( - name=name or to_kebab_case(_op_class.__name__), + return Rule.define_operation( + op_class, + registry=registry, + meta_kwargs=dict( + name=name, version=version, - description=description or _op_class.__doc__, + description=description, docs_url=docs_url, - type=type if type is not None else "problem", - # TODO: if schema not given, - # derive it from _op_class' ctor arguments + type=type if type else "problem", schema=schema, - ) - # Register rule metadata in rule operation class - setattr(_op_class, "meta", meta) - rule = Rule(meta=meta, op_class=_op_class) - if registry is not None: - # Register rule in rule registry - registry[meta.name] = rule - return rule if no_deco else _op_class - - if op_class is None: - # decorator case: return decorated class - return _define_rule - else: - # called as function: return new rule - return _define_rule(op_class, no_deco=True) + ), + ) diff --git a/xrlint/testing.py b/xrlint/testing.py index efeb9c3..d82cc72 100644 --- a/xrlint/testing.py +++ b/xrlint/testing.py @@ -1,13 +1,12 @@ import unittest from dataclasses import dataclass -from typing import Type, Callable, Literal, Any +from typing import Any, Callable, Final, Literal, Type import xarray as xr from xrlint.constants import SEVERITY_ERROR from xrlint.linter import Linter -from xrlint.plugin import Plugin -from xrlint.plugin import PluginMeta +from xrlint.plugin import new_plugin from xrlint.result import Message from xrlint.result import Result from xrlint.rule import Rule @@ -16,6 +15,8 @@ from xrlint.util.naming import to_snake_case from xrlint.util.formatting import format_problems +_PLUGIN_NAME: Final = "testing" + @dataclass(frozen=True, kw_only=True) class RuleTest: @@ -164,9 +165,9 @@ def _test_rule( result = linter.verify_dataset( test.dataset, plugins={ - "test": ( - Plugin( - meta=PluginMeta(name="test"), + _PLUGIN_NAME: ( + new_plugin( + name=_PLUGIN_NAME, rules={ rule_name: Rule( meta=RuleMeta(name=rule_name), op_class=rule_op_class @@ -176,7 +177,7 @@ def _test_rule( ) }, rules={ - f"test/{rule_name}": ( + f"{_PLUGIN_NAME}/{rule_name}": ( [severity, *(test.args or ()), (test.kwargs or {})] if test.args or test.kwargs else severity diff --git a/xrlint/util/codec.py b/xrlint/util/constructible.py similarity index 76% rename from xrlint/util/codec.py rename to xrlint/util/constructible.py index 193e780..1955d24 100644 --- a/xrlint/util/codec.py +++ b/xrlint/util/constructible.py @@ -8,7 +8,6 @@ Generic, TypeVar, Type, - TypeAlias, Union, get_origin, get_args, @@ -19,12 +18,6 @@ from xrlint.util.formatting import format_message_type_of, format_message_one_of -JSON_VALUE_TYPE_NAME = "None | bool | int | float | str | dict | list" - -JsonValue: TypeAlias = ( - None | bool | int | float | str | dict[str, "JsonValue"] | list["JsonValue"] -) - T = TypeVar("T") _IS_PYTHON_3_10 = (3, 10) <= sys.version_info < (3, 11) @@ -34,6 +27,12 @@ class ValueConstructible(Generic[T]): """A mixin that makes your classes constructible from a single value of any type. + Implementing classes override one of the many `_from_()` + class methods to implement support converting from values of + type ``. They may use the [_from_typed_value][] to convert values + from values with given type annotations, such as object properties + or constructor parameters. + The factory for this purpose is the class method [from_value][xrlint.util.codec.ValueConstructible.from_value]. """ @@ -59,7 +58,7 @@ def from_value(cls, value: Any, value_name: str | None = None) -> T: Raises: TypeError: If `value` cannot be converted. """ - value_name = value_name or cls._get_value_name() + value_name = value_name or cls.value_name() if isinstance(value, cls): return value if value is None: @@ -77,10 +76,7 @@ def from_value(cls, value: Any, value_name: str | None = None) -> T: if isinstance(value, Sequence): return cls._from_sequence(value, value_name) if isclass(value): - if issubclass(value, cls): - return cls._from_class(value, value_name) - else: - return cls._from_type(value, value_name) + return cls._from_class(value, value_name) return cls._from_other(value, value_name) @classmethod @@ -124,16 +120,7 @@ def _from_str(cls, value: str, value_name: str) -> T: raise TypeError(cls._format_type_error(value, value_name)) @classmethod - def _from_class(cls, value: Type[T], value_name: str) -> T: - """Create an instance of this class from a class value - that is a subclass of `cls`. - The default implementation raises a `TypeError`. - Override to implement a different behaviour. - """ - raise TypeError(cls._format_type_error(value, value_name)) - - @classmethod - def _from_type(cls, value: Type, value_name: str) -> T: + def _from_class(cls, value: Type, value_name: str) -> T: """Create an instance of this class from a type value. The default implementation raises a `TypeError`. Override to implement a different behaviour. @@ -166,7 +153,9 @@ def _from_sequence(cls, value: Sequence, value_name: str) -> T: raise TypeError(cls._format_type_error(value, value_name)) @classmethod - def _convert_value(cls, value: Any, type_annotation: Any, value_name: str) -> Any: + def _convert_typed_value( + cls, value: Any, type_annotation: Any, value_name: str + ) -> Any: """To be used by subclasses that wish to convert a value with known type for the target value. @@ -211,7 +200,7 @@ def _convert_value(cls, value: Any, type_annotation: Any, value_name: str) -> An errors = [] for type_arg in type_args: try: - return cls._convert_value(value, type_arg, value_name) + return cls._convert_typed_value(value, type_arg, value_name) except (TypeError, ValueError) as e: errors.append((type_arg, e)) # Note, the error message constructed here is suboptimal. @@ -247,7 +236,7 @@ def _convert_value(cls, value: Any, type_annotation: Any, value_name: str) -> An f"keys of {value_name}", k, key_type ) ) - mapping_value[k] = cls._convert_value( + mapping_value[k] = cls._convert_typed_value( v, item_type, f"{value_name}[{k!r}]" ) return mapping_value @@ -256,7 +245,7 @@ def _convert_value(cls, value: Any, type_annotation: Any, value_name: str) -> An item_type = type_args[0] if type_args else Any # noinspection PyTypeChecker return [ - cls._convert_value(v, item_type, f"{value_name}[{i}]") + cls._convert_typed_value(v, item_type, f"{value_name}[{i}]") for i, v in enumerate(value) ] return value @@ -267,17 +256,16 @@ def _convert_value(cls, value: Any, type_annotation: Any, value_name: str) -> An @classmethod @lru_cache(maxsize=1000) - def _get_class_parameters(cls) -> Mapping[str, Parameter]: + def class_parameters(cls) -> Mapping[str, Parameter]: """Get the type-resolved parameters of this class' constructor. The method returns a cached value for `cls`. Can be used by subclasses to process annotations. """ - forward_refs = cls._get_forward_refs() - return get_class_parameters(cls, forward_refs=forward_refs) + return get_class_parameters(cls, forward_refs=cls.forward_refs()) @classmethod - def _get_forward_refs(cls) -> Optional[Mapping[str, type]]: + def forward_refs(cls) -> Optional[Mapping[str, type]]: """Get an extra namespace to be used for resolving parameter type hints. Called from [ValueConstructible._get_class_parameters][]. @@ -288,7 +276,7 @@ def _get_forward_refs(cls) -> Optional[Mapping[str, type]]: return None @classmethod - def _get_value_name(cls) -> str: + def value_name(cls) -> str: """Get an identifier for values that can be used to create instances of this class. @@ -298,7 +286,7 @@ def _get_value_name(cls) -> str: return "value" @classmethod - def _get_value_type_name(cls) -> str: + def value_type_name(cls) -> str: """Get a descriptive name for the value types that can be used to create instances of this class, e.g., `"Rule | str"`. @@ -319,7 +307,7 @@ def _process_annotation( type_origin = prop_annotation type_args = () if _IS_PYTHON_3_10: # pragma: no cover - forward_refs = cls._get_forward_refs() + forward_refs = cls.forward_refs() type_origin = cls._resolve_forward_ref(forward_refs, type_origin) type_args = tuple( cls._resolve_forward_ref(forward_refs, type_arg) @@ -336,7 +324,7 @@ def _resolve_forward_ref(cls, namespace, ref: Any) -> Any: # pragma: no cover @classmethod def _format_type_error(cls, value: Any, value_name: str) -> str: - return format_message_type_of(value_name, value, cls._get_value_type_name()) + return format_message_type_of(value_name, value, cls.value_type_name()) class MappingConstructible(Generic[T], ValueConstructible[T]): @@ -362,7 +350,7 @@ def _from_mapping(cls, mapping: Mapping, value_name: str) -> T: """Create an instance of this class from a mapping value.""" mapping_keys = set(mapping.keys()) - properties = cls._get_class_parameters() + properties = cls.class_parameters() args = [] kwargs = {} @@ -375,9 +363,10 @@ def _from_mapping(cls, mapping: Mapping, value_name: str) -> T: else: prop_annotation = prop_param.annotation - prop_value = cls._convert_value( + prop_value = cls._convert_property_value( mapping[prop_name], prop_annotation, + prop_name, value_name=f"{value_name}.{prop_name}", ) if prop_param.kind == Parameter.POSITIONAL_ONLY: @@ -389,7 +378,7 @@ def _from_mapping(cls, mapping: Mapping, value_name: str) -> T: ) or prop_param.kind == Parameter.POSITIONAL_ONLY: raise TypeError( f"missing value for required property {value_name}.{prop_name}" - f" of type {cls._get_value_type_name()}" + f" of type {cls.value_type_name()}" ) if mapping_keys: @@ -414,8 +403,35 @@ def _from_mapping(cls, mapping: Mapping, value_name: str) -> T: # noinspection PyArgumentList return cls(*args, **kwargs) + # noinspection PyUnusedLocal + @classmethod + def _convert_property_value( + cls, value: Any, prop_annotation: Any, prop_name: str, value_name: str + ) -> Any: + """Convert a property value to the given type. + + May be overridden by subclasses that wish to perform a special + conversion for a specific property. + + Args: + value: The mapping's item value to convert to an instance of the + type specified by `prop_annotation`. + prop_annotation: The property annotation representing the + target type. + prop_name: The property name. May be used by overrides. + value_name: An identifier for `value`. + + Returns: + The converted property value. + """ + return cls._convert_typed_value( + value, + prop_annotation, + value_name=value_name, + ) + @classmethod - def _get_value_type_name(cls) -> str: + def value_type_name(cls) -> str: """Get a descriptive name for the value types that can be used to create instances of this class, e.g., `"Rule | str"`. Defaults to `f"{cls.__name__} | dict[str, Any]"`. @@ -453,86 +469,3 @@ def get_class_parameters( ) return resolved_params - - -class JsonSerializable: - """A mixin that makes your classes serializable to JSON values - and JSON-serializable dictionaries. - - It adds two methods: - - * [to_json][JsonSerializable.to_json] converts to JSON values - * [to_dict][JsonSerializable.to_dict] converts to JSON-serializable - dictionaries - - """ - - def to_json(self, value_name: str | None = None) -> JsonValue: - """Convert this object into a JSON value. - - The default implementation calls `self.to_dict()` and returns - its value as-is. - """ - return self.to_dict(value_name=value_name) - - def to_dict(self, value_name: str | None = None) -> dict[str, JsonValue]: - """Convert this object into a JSON-serializable dictionary. - - The default implementation naively serializes the non-protected - attributes of this object's dictionary given by `vars(self)`. - """ - return self._object_to_json(self, value_name or type(self).__name__) - - @classmethod - def _value_to_json(cls, value: Any, value_name: str) -> JsonValue: - if value is None: - return None - if isinstance(value, JsonSerializable): - return value.to_json(value_name=value_name) - if isinstance(value, bool): - return bool(value) - if isinstance(value, int): - return int(value) - if isinstance(value, float): - return float(value) - if isinstance(value, str): - return str(value) - if isinstance(value, Mapping): - return cls._mapping_to_json(value, value_name) - if isinstance(value, Sequence): - return cls._sequence_to_json(value, value_name) - if isinstance(value, type): - return repr(value) - raise TypeError(format_message_type_of(value_name, value, JSON_VALUE_TYPE_NAME)) - - @classmethod - def _object_to_json(cls, value: Any, value_name: str) -> dict[str, JsonValue]: - return { - k: cls._value_to_json(v, f"{value_name}.{k}") - for k, v in vars(value).items() - if cls._is_non_protected_property_name(k) - } - - @classmethod - def _mapping_to_json( - cls, mapping: Mapping, value_name: str - ) -> dict[str, JsonValue]: - return { - str(k): cls._value_to_json(v, f"{value_name}[{k!r}]") - for k, v in mapping.items() - } - - @classmethod - def _sequence_to_json(cls, sequence: Sequence, value_name: str) -> list[JsonValue]: - return [ - cls._value_to_json(v, f"{value_name}[{i}]") for i, v in enumerate(sequence) - ] - - @classmethod - def _is_non_protected_property_name(cls, key: Any) -> bool: - return ( - isinstance(key, str) - and key.isidentifier() - and not key[0].isupper() - and not key[0] == "_" - ) diff --git a/xrlint/util/importutil.py b/xrlint/util/importutil.py index 79d816c..ee4e847 100644 --- a/xrlint/util/importutil.py +++ b/xrlint/util/importutil.py @@ -74,7 +74,7 @@ def import_value( ValueImportError: if the value could not be imported """ if ":" in module_ref: - module_name, attr_ref = module_ref + module_name, attr_ref = module_ref.rsplit(":", maxsplit=1) else: module_name = module_ref if attr_ref: @@ -105,7 +105,7 @@ def import_value( # noinspection PyCallingNonCallable exported_value = attr_value() else: - exported_value = attr_ref + exported_value = attr_value if factory is not None: try: diff --git a/xrlint/util/serializable.py b/xrlint/util/serializable.py new file mode 100644 index 0000000..ef26ee7 --- /dev/null +++ b/xrlint/util/serializable.py @@ -0,0 +1,93 @@ +from typing import Any, Final, Mapping, Sequence, TypeAlias + +from xrlint.util.formatting import format_message_type_of + + +JSON_VALUE_TYPE_NAME: Final = "None | bool | int | float | str | dict | list" + +JsonValue: TypeAlias = ( + None | bool | int | float | str | dict[str, "JsonValue"] | list["JsonValue"] +) + + +class JsonSerializable: + """A mixin that makes your classes serializable to JSON values + and JSON-serializable dictionaries. + + It adds two methods: + + * [to_json][JsonSerializable.to_json] converts to JSON values + * [to_dict][JsonSerializable.to_dict] converts to JSON-serializable + dictionaries + + """ + + def to_json(self, value_name: str | None = None) -> JsonValue: + """Convert this object into a JSON value. + + The default implementation calls `self.to_dict()` and returns + its value as-is. + """ + return self.to_dict(value_name=value_name) + + def to_dict(self, value_name: str | None = None) -> dict[str, JsonValue]: + """Convert this object into a JSON-serializable dictionary. + + The default implementation naively serializes the non-protected + attributes of this object's dictionary given by `vars(self)`. + """ + return self._object_to_json(self, value_name or type(self).__name__) + + @classmethod + def _value_to_json(cls, value: Any, value_name: str) -> JsonValue: + if value is None: + return None + if isinstance(value, JsonSerializable): + return value.to_json(value_name=value_name) + if isinstance(value, bool): + return bool(value) + if isinstance(value, int): + return int(value) + if isinstance(value, float): + return float(value) + if isinstance(value, str): + return str(value) + if isinstance(value, Mapping): + return cls._mapping_to_json(value, value_name) + if isinstance(value, Sequence): + return cls._sequence_to_json(value, value_name) + if isinstance(value, type): + return repr(value) + raise TypeError(format_message_type_of(value_name, value, JSON_VALUE_TYPE_NAME)) + + @classmethod + def _object_to_json(cls, value: Any, value_name: str) -> dict[str, JsonValue]: + return { + k: cls._value_to_json(v, f"{value_name}.{k}") + for k, v in vars(value).items() + if cls._is_non_protected_property_name(k) + } + + @classmethod + def _mapping_to_json( + cls, mapping: Mapping, value_name: str + ) -> dict[str, JsonValue]: + return { + str(k): cls._value_to_json(v, f"{value_name}[{k!r}]") + for k, v in mapping.items() + } + + @classmethod + def _sequence_to_json(cls, sequence: Sequence, value_name: str) -> list[JsonValue]: + return [ + cls._value_to_json(v, f"{value_name}[{i}]") for i, v in enumerate(sequence) + ] + + @classmethod + def _is_non_protected_property_name(cls, key: Any) -> bool: + return ( + isinstance(key, str) + and key.isidentifier() + and not key[0].isupper() + and not key[0] == "_" + )