diff --git a/doc/build/dts/macros.bnf b/doc/build/dts/macros.bnf index f5e676f8f44a3..d775d1f35eb59 100644 --- a/doc/build/dts/macros.bnf +++ b/doc/build/dts/macros.bnf @@ -42,6 +42,7 @@ node-macro =/ %s"DT_N" path-id %s"_IRQ_LEVEL" node-macro =/ %s"DT_N" path-id %s"_IRQ_IDX_" DIGIT "_EXISTS" node-macro =/ %s"DT_N" path-id %s"_IRQ_IDX_" DIGIT %s"_VAL_" dt-name [ %s"_EXISTS" ] +node-macro =/ %s"DT_N" path-id %s"_IRQ_IDX_" DIGIT %s"_CONTROLLER" node-macro =/ %s"DT_N" path-id %s"_CONTROLLER" node-macro =/ %s"DT_N" path-id %s"_IRQ_NAME_" dt-name %s"_VAL_" dt-name [ %s"_EXISTS" ] @@ -57,7 +58,7 @@ node-macro =/ %s"DT_N" path-id %s"_RANGES_IDX_" DIGIT node-macro =/ %s"DT_N" path-id %s"_FOREACH_RANGE" ; Subnodes of the fixed-partitions compatible get macros which contain ; a unique ordinal value for each partition -node-macro =/ %s"DT_N" path-id %s"_PARTITION_ID" DIGIT +node-macro =/ %s"DT_N" path-id %s"_PARTITION_ID" ; Macros are generated for each of a node's compatibles; ; dt-name in this case is something like "vnd_device". node-macro =/ %s"DT_N" path-id %s"_COMPAT_MATCHES_" dt-name @@ -74,6 +75,44 @@ node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_FOREACH_PROP_ELEM" node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_FOREACH_PROP_ELEM_SEP" node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_FOREACH_PROP_ELEM_VARGS" node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_FOREACH_PROP_ELEM_SEP_VARGS" +; Map properties generate additional macros consumed by DT_MAP_* APIs. +; The following examples assume something like this mapping nexus: +; +; connector { +; gpio-map = <1 2 &{/gpio-map-test/parent} 3 +; 4 5 &{/gpio-map-test/parent} 6>; +; }; +; +; Total number of entries in the mapping array. +; +; #define DT_N__P_gpio_map_MAP_LEN 2 +node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_LEN" +; Each mapping entry expands to the child specifier cells, the parent node, +; and the parent specifier cells. DT_MAP_BY_IDX() retrieves this list. +; +; #define DT_N__P_gpio_map_MAP_IDX_0 1, 2, DT_N_, 3 +node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_IDX_" DIGIT +; Offsets for the child specifier cells within an entry. These support +; DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(), which slices out just those cells. +; +; #define DT_N__P_gpio_map_MAP_IDX_0_CHILD_SPECIFIER_POS 0 +; #define DT_N__P_gpio_map_MAP_IDX_0_CHILD_SPECIFIER_LEN 2 +node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_IDX_" DIGIT %s"_CHILD_SPECIFIER_POS" +node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_IDX_" DIGIT %s"_CHILD_SPECIFIER_LEN" +; Offsets for the parent node argument. DT_MAP_PARENT_ARG_BY_IDX() uses +; these to extract the parent node identifier. +; +; #define DT_N__P_gpio_map_MAP_IDX_0_PARENT_POS 2 +; #define DT_N__P_gpio_map_MAP_IDX_0_PARENT_LEN 1 +node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_IDX_" DIGIT %s"_PARENT_POS" +node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_IDX_" DIGIT %s"_PARENT_LEN" +; Offsets for the parent specifier cells used by +; DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(). +; +; #define DT_N__P_gpio_map_MAP_IDX_0_PARENT_SPECIFIER_POS 3 +; #define DT_N__P_gpio_map_MAP_IDX_0_PARENT_SPECIFIER_LEN 1 +node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_IDX_" DIGIT %s"_PARENT_SPECIFIER_POS" +node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_IDX_" DIGIT %s"_PARENT_SPECIFIER_LEN" ; These are used by DT_CHILD_NUM and DT_CHILD_NUM_STATUS_OKAY macros node-macro =/ %s"DT_N" path-id %s"_CHILD_NUM" node-macro =/ %s"DT_N" path-id %s"_CHILD_NUM_STATUS_OKAY" @@ -106,10 +145,20 @@ node-macro =/ %s"DT_N" path-id %s"_ORD" node-macro =/ %s"DT_N" path-id %s"_PATH" ; The node's name@unit-addr, as a string literal node-macro =/ %s"DT_N" path-id %s"_FULL_NAME" +; The node's name@unit-addr without surrounding quotes. +node-macro =/ %s"DT_N" path-id %s"_FULL_NAME_UNQUOTED" +; Tokenized variants of the node's name@unit-addr. +node-macro =/ %s"DT_N" path-id %s"_FULL_NAME_TOKEN" +node-macro =/ %s"DT_N" path-id %s"_FULL_NAME_UPPER_TOKEN" ; The dependency ordinals of a node's requirements (direct dependencies). node-macro =/ %s"DT_N" path-id %s"_REQUIRES_ORDS" ; The dependency ordinals of a node supports (reverse direct dependencies). node-macro =/ %s"DT_N" path-id %s"_SUPPORTS_ORDS" +; Helper macros that iterate over the node's ancestors. +node-macro =/ %s"DT_N" path-id %s"_FOREACH_ANCESTOR" +; Node specific hash and sortable dependency strings. +node-macro =/ %s"DT_N" path-id %s"_HASH" +node-macro =/ %s"DT_N" path-id %s"_ORD_STR_SORTABLE" ; -------------------------------------------------------------------- ; pinctrl-macro: a macro related to the pinctrl properties in a node @@ -136,6 +185,11 @@ pinctrl-macro = %s"DT_N" path-id %s"_PINCTRL_NUM" ; #define DT_N__PINCTRL_IDX_0_EXISTS 1 ; #define DT_N__PINCTRL_IDX_1_EXISTS 1 pinctrl-macro =/ %s"DT_N" path-id %s"_PINCTRL_IDX_" DIGIT %s"_EXISTS" +; Tokenized variants of a given pinctrl-DIGIT property. +; These mirror the node FULL_NAME token macros to make use of existing +; helpers that expect both lower and upper token forms. +pinctrl-macro =/ %s"DT_N" path-id %s"_PINCTRL_IDX_" DIGIT %s"_TOKEN" +pinctrl-macro =/ %s"DT_N" path-id %s"_PINCTRL_IDX_" DIGIT %s"_UPPER_TOKEN" ; A given pinctrl property name exists. ; ; #define DT_N__PINCTRL_NAME_default_EXISTS 1 @@ -187,12 +241,12 @@ pinctrl-macro =/ %s"DT_N" path-id %s"_PINCTRL_NAME_" dt-name %s"_IDX_" DIGIT %s" ; ; #define DT_N__GPIO_HOGS_EXISTS 1 ; #define DT_N__GPIO_HOGS_EXISTS 1 -gpioshogs-macro = %s"DT_N" path-id %s"_GPIO_HOGS_EXISTS" +gpiohogs-macro = %s"DT_N" path-id %s"_GPIO_HOGS_EXISTS" ; Number of hogged GPIOs in a node. ; ; #define DT_N__GPIO_HOGS_NUM 2 ; #define DT_N__GPIO_HOGS_NUM 1 -gpioshogs-macro =/ %s"DT_N" path-id %s"_GPIO_HOGS_NUM" +gpiohogs-macro =/ %s"DT_N" path-id %s"_GPIO_HOGS_NUM" ; A given logical GPIO hog array index exists. ; ; #define DT_N__GPIO_HOGS_IDX_0_EXISTS 1 @@ -206,6 +260,10 @@ gpiohogs-macro =/ %s"DT_N" path-id %s"_GPIO_HOGS_IDX_" DIGIT %s"_EXISTS" ; #define DT_N__GPIO_HOGS_IDX_1_PH ; #define DT_N__GPIO_HOGS_IDX_0_PH gpiohogs-macro =/ %s"DT_N" path-id %s"_GPIO_HOGS_IDX_" DIGIT %s"_PH" +; Iteration helpers for the specifier cells exposed by DT_FOREACH_* macros. +gpiohogs-macro =/ %s"DT_N" path-id %s"_GPIO_HOGS_IDX_" DIGIT %s"_FOREACH_CELL" +gpiohogs-macro =/ %s"DT_N" path-id %s"_GPIO_HOGS_IDX_" DIGIT %s"_FOREACH_CELL_SEP" +gpiohogs-macro =/ %s"DT_N" path-id %s"_GPIO_HOGS_IDX_" DIGIT %s"_NUM_CELLS" ; The pin cell of a logical index in the GPIO hogs array exists. ; ; #define DT_N__GPIO_HOGS_IDX_0_VAL_pin_EXISTS 1 @@ -272,8 +330,9 @@ property-macro = %s"DT_N" path-id %s"_P_" prop-id [prop-suf] ; }; ; }; ; -; has path-id "_S_foo_123_S_bar_baz". -path-id = 1*( %s"_S_" dt-name ) +; has path-id "_S_foo_123_S_bar_baz". The root node "/" has an empty +; path-id, which results in a bare "DT_N" identifier. +path-id = *( %s"_S_" dt-name ) ; ---------------------------------------------------------------------- ; prop-id: a property identifier @@ -341,6 +400,7 @@ other-macro =/ %s"DT_FOREACH_OKAY_INST_" dt-name other-macro =/ %s"DT_FOREACH_OKAY_INST_VARGS_" dt-name ; E.g.: #define DT_CHOSEN_zephyr_flash other-macro =/ %s"DT_CHOSEN_" dt-name +other-macro =/ %s"DT_CHOSEN_" dt-name %s"_EXISTS" ; Declares that a compatible has at least one node on a bus. ; Example: ; @@ -354,7 +414,12 @@ other-macro =/ %s"DT_COMPAT_HAS_OKAY_" dt-name ; Currently used to allow mapping a lowercase-and-underscores "label" ; property to a fixed-partitions node. See the flash map API docs ; for an example. -other-macro =/ %s"DT_COMPAT_" dt-name %s"_LABEL_" dt-name +other-macro =/ %s"DT_COMPAT_" dt-name %s"_LABEL_" dt-name [ %s"_EXISTS" ] +; Helper macros expanded while iterating over nodes. +other-macro =/ %s"DT_FOREACH_VARGS_HELPER" +other-macro =/ %s"DT_FOREACH_OKAY_VARGS_HELPER" +; Removes brackets while expanding variadic helper macros. +other-macro =/ %s"DT_DEBRACKET_INTERNAL" ; -------------------------------------------------------------------- ; alternate-id: another way to specify a node besides a path-id diff --git a/doc/tools/check_dt_macros.py b/doc/tools/check_dt_macros.py new file mode 100644 index 0000000000000..52c0f3089a865 --- /dev/null +++ b/doc/tools/check_dt_macros.py @@ -0,0 +1,410 @@ +#!/usr/bin/env python3 +"""Validate generated devicetree macros against macros.bnf grammar.""" + +from __future__ import annotations + +import argparse +import pathlib +import re +import sys +from collections.abc import Callable, Iterable, Iterator +from dataclasses import dataclass +from typing import Final, NamedTuple + +BNF_DEFAULT: Final[pathlib.Path] = pathlib.Path("doc/build/dts/macros.bnf") +RULE_DEFAULT: Final[str] = "dt-macro" + + +@dataclass(frozen=True) +class MacroDefinition: + """Representation of a ``#define`` discovered in an input file.""" + + path: pathlib.Path + line: int + name: str + parameters: tuple[str, ...] + + +@dataclass(frozen=True) +class MacroViolation: + """Information about a macro whose name does not match the grammar.""" + + macro: MacroDefinition + reason: str + + +class _Token(NamedTuple): + kind: str + value: object + + +class _Node: + def to_regex(self, resolve: Callable[[str], str]) -> str: + raise NotImplementedError + + +@dataclass +class _Literal(_Node): + value: str + + def to_regex(self, resolve: Callable[[str], str]) -> str: # noqa: D401 + return re.escape(self.value) + + +@dataclass +class _Range(_Node): + start: int + end: int + + def to_regex(self, resolve: Callable[[str], str]) -> str: + start = re.escape(chr(self.start)) + end = re.escape(chr(self.end)) + if start == end: + return start + return f"[{start}-{end}]" + + +@dataclass +class _RuleRef(_Node): + name: str + + def to_regex(self, resolve: Callable[[str], str]) -> str: + return resolve(self.name) + + +@dataclass +class _Sequence(_Node): + elements: list[_Node] + + def to_regex(self, resolve: Callable[[str], str]) -> str: + return "".join(element.to_regex(resolve) for element in self.elements) + + +@dataclass +class _Alternation(_Node): + options: list[_Node] + + def to_regex(self, resolve: Callable[[str], str]) -> str: + rendered = [option.to_regex(resolve) for option in self.options] + if not rendered: + return "" + if len(rendered) == 1: + return rendered[0] + return f"(?:{'|'.join(rendered)})" + + +@dataclass +class _Repetition(_Node): + element: _Node + minimum: int + maximum: int | None + + def to_regex(self, resolve: Callable[[str], str]) -> str: + body = self.element.to_regex(resolve) + def grouped(text: str) -> str: + if text.startswith("(?:") and text.endswith(")"): + return text + if len(text) == 1: + return text + return f"(?:{text})" + + body_grouped = grouped(body) + if self.minimum == 0 and self.maximum == 1: + return f"{body_grouped}?" + if self.minimum == 0 and self.maximum is None: + return f"{body_grouped}*" + if self.minimum == 1 and self.maximum is None: + return f"{body_grouped}+" + if self.maximum is None: + return f"{body_grouped}{{{self.minimum},}}" + if self.minimum == self.maximum: + return f"{body_grouped}{{{self.minimum}}}" + return f"{body_grouped}{{{self.minimum},{self.maximum}}}" + + +class AbnfGrammar: + """Minimal ABNF parser tailored for ``macros.bnf``.""" + + _PREDEFINED: Final[dict[str, str]] = { + "DIGIT": r"[0-9]", + } + + def __init__(self, rules: dict[str, _Node]): + self._rules = rules + self._cache: dict[str, str] = {} + + @classmethod + def from_path(cls, path: pathlib.Path) -> "AbnfGrammar": + text = path.read_text(encoding="utf-8") + rules = cls._parse_rules(text) + return cls(rules) + + def compile(self, rule: str) -> re.Pattern[str]: + regex = self._resolve(rule) + return re.compile(f"^{regex}$", re.ASCII) + + def _resolve(self, name: str, stack: tuple[str, ...] = ()) -> str: + if name in self._cache: + return self._cache[name] + if name in stack: + raise ValueError(f"Recursive rule detected: {' -> '.join(stack + (name,))}") + if name in self._PREDEFINED: + self._cache[name] = self._PREDEFINED[name] + return self._cache[name] + if name not in self._rules: + raise KeyError(f"Unknown rule '{name}' in macros grammar") + regex = self._rules[name].to_regex(lambda ref: self._resolve(ref, stack + (name,))) + self._cache[name] = regex + return regex + + @staticmethod + def _strip_comment(line: str) -> str: + result = [] + in_quote = False + for char in line: + if char == '"': + in_quote = not in_quote + if char == ';' and not in_quote: + break + result.append(char) + return "".join(result) + + @classmethod + def _parse_rules(cls, text: str) -> dict[str, _Node]: + raw_rules: dict[str, list[str]] = {} + current: str | None = None + for raw_line in text.splitlines(): + cleaned = cls._strip_comment(raw_line).strip() + if not cleaned: + continue + match = re.match(r"^([A-Za-z0-9_-]+)\s*(=/|=)\s*(.*)$", cleaned) + if match: + name, operator, rhs = match.groups() + if operator == "=" or name not in raw_rules: + raw_rules[name] = [] + raw_rules[name].append(rhs.strip()) + current = name + else: + if current is None: + continue + raw_rules[current][-1] += " " + cleaned + parsed: dict[str, _Node] = {} + for name, expressions in raw_rules.items(): + alternatives: list[_Node] = [] + for expression in expressions: + tokens = list(_tokenize(expression)) + parser = _ExpressionParser(tokens) + node = parser.parse() + if isinstance(node, _Alternation): + alternatives.extend(node.options) + else: + alternatives.append(node) + if len(alternatives) == 1: + parsed[name] = alternatives[0] + else: + parsed[name] = _Alternation(alternatives) + return parsed + + +class _ExpressionParser: + def __init__(self, tokens: list[_Token]): + self._tokens = tokens + self._index = 0 + + def parse(self) -> _Node: + node = self._parse_expression() + if self._index != len(self._tokens): + raise ValueError("Unexpected tokens at end of expression") + return node + + def _parse_expression(self) -> _Node: + sequences = [self._parse_sequence()] + while self._peek('/'): + self._consume('/') + sequences.append(self._parse_sequence()) + if len(sequences) == 1: + return sequences[0] + return _Alternation(sequences) + + def _parse_sequence(self) -> _Node: + elements: list[_Node] = [] + while self._index < len(self._tokens) and self._tokens[self._index].kind not in {'/', ')', ']'}: + elements.append(self._parse_element()) + return _Sequence(elements) + + def _parse_element(self) -> _Node: + repeat = None + if self._peek('repeat'): + repeat = self._consume('repeat').value + element = self._parse_factor() + if repeat is None: + return element + minimum, maximum = repeat + return _Repetition(element, minimum, maximum) + + def _parse_factor(self) -> _Node: + token = self._tokens[self._index] + if token.kind == '(': + self._consume('(') + node = self._parse_expression() + self._consume(')') + return node + if token.kind == '[': + self._consume('[') + node = self._parse_expression() + self._consume(']') + return _Repetition(node, 0, 1) + if token.kind == 'literal': + self._consume('literal') + return _Literal(token.value) + if token.kind == 'range': + self._consume('range') + start, end = token.value + return _Range(start, end) + if token.kind == 'identifier': + self._consume('identifier') + return _RuleRef(token.value) + raise ValueError(f"Unexpected token {token.kind!r}") + + def _peek(self, kind: str) -> bool: + return self._index < len(self._tokens) and self._tokens[self._index].kind == kind + + def _consume(self, kind: str) -> _Token: + if not self._peek(kind): + raise ValueError(f"Expected token {kind!r}") + token = self._tokens[self._index] + self._index += 1 + return token + + +def _tokenize(expression: str) -> Iterator[_Token]: + index = 0 + length = len(expression) + while index < length: + char = expression[index] + if char.isspace(): + index += 1 + continue + if expression.startswith('%s"', index): + index += 3 + end = expression.index('"', index) + literal = expression[index:end] + index = end + 1 + yield _Token('literal', literal) + continue + if expression.startswith('%x', index): + index += 2 + start_index = index + while index < length and expression[index] not in ' []()/': + index += 1 + value = expression[start_index:index] + start_str, _, end_str = value.partition('-') + start = int(start_str, 16) + end = int(end_str, 16) if end_str else start + yield _Token('range', (start, end)) + continue + if char == '"': + index += 1 + end = expression.index('"', index) + literal = expression[index:end] + index = end + 1 + yield _Token('literal', literal) + continue + if char in '()[]/': + index += 1 + yield _Token(char, char) + continue + if char.isdigit() or char == '*': + start_index = index + while index < length and (expression[index].isdigit() or expression[index] == '*'): + index += 1 + repeat = expression[start_index:index] + if '*' in repeat: + minimum, _, maximum = repeat.partition('*') + min_value = int(minimum) if minimum else 0 + max_value = int(maximum) if maximum else None + yield _Token('repeat', (min_value, max_value)) + continue + yield _Token('literal', repeat) + continue + if char.isalpha() or char == '_': + start_index = index + while index < length and (expression[index].isalnum() or expression[index] in {'_', '-'}): + index += 1 + identifier = expression[start_index:index] + yield _Token('identifier', identifier) + continue + raise ValueError(f"Unsupported character '{char}' in expression '{expression}'") + + +def load_macros(paths: Iterable[pathlib.Path]) -> Iterator[MacroDefinition]: + pattern = re.compile(r"^\s*#define\s+(DT_[A-Za-z0-9_]+)\s*(?:\(([^)]*)\))?", re.ASCII) + for path in paths: + with path.open(encoding="utf-8") as stream: + for index, line in enumerate(stream, start=1): + match = pattern.match(line) + if not match: + continue + name = match.group(1) + params_raw = match.group(2) + parameters: tuple[str, ...] = () + if params_raw is not None: + parameters = tuple( + part.strip() + for part in params_raw.split(',') + if part.strip() + ) + yield MacroDefinition(path=path, line=index, name=name, parameters=parameters) + + +def validate_macros(definitions: Iterable[MacroDefinition], pattern: re.Pattern[str]) -> list[MacroViolation]: + violations: list[MacroViolation] = [] + for macro in definitions: + if not pattern.fullmatch(macro.name): + violations.append( + MacroViolation( + macro=macro, + reason=f"macro name '{macro.name}' does not match rule", + ) + ) + return violations + + +def parse_arguments(argv: list[str] | None = None) -> argparse.Namespace: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "paths", + nargs="+", + type=pathlib.Path, + help="Files containing generated devicetree macros", + ) + parser.add_argument( + "--bnf", + type=pathlib.Path, + default=BNF_DEFAULT, + help=f"Path to macros.bnf grammar (default: {BNF_DEFAULT})", + ) + parser.add_argument( + "--rule", + default=RULE_DEFAULT, + help=f"Top-level grammar rule to validate against (default: {RULE_DEFAULT})", + ) + return parser.parse_args(argv) + + +def main(argv: list[str] | None = None) -> int: + args = parse_arguments(argv) + grammar = AbnfGrammar.from_path(args.bnf) + pattern = grammar.compile(args.rule) + macros = list(load_macros(args.paths)) + violations = validate_macros(macros, pattern) + if violations: + for violation in violations: + macro = violation.macro + print(f"{macro.path}:{macro.line}: {violation.reason}", file=sys.stderr) + return 1 + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/doc/tools/fixtures/sample_devicetree_macros.h b/doc/tools/fixtures/sample_devicetree_macros.h new file mode 100644 index 0000000000000..d7bbc70f972c6 --- /dev/null +++ b/doc/tools/fixtures/sample_devicetree_macros.h @@ -0,0 +1,17 @@ +/* + * Minimal fixture containing a subset of generated devicetree macros. + * Values are intentionally simple; only macro identifiers are relevant + * for the documentation grammar check. + */ + +#define DT_N_PATH "\/" +#define DT_N_FOREACH_CHILD(fn) fn(DT_N) +#define DT_N_FOREACH_CHILD_SEP(fn, sep) fn(DT_N) +#define DT_N_FOREACH_CHILD_VARGS(fn, ...) fn(DT_N, __VA_ARGS__) +#define DT_N_FOREACH_CHILD_SEP_VARGS(fn, sep, ...) fn(DT_N, sep, __VA_ARGS__) + +#define DT_N_S_soc_PATH "\/soc" +#define DT_N_S_soc_FOREACH_CHILD(fn) fn(DT_N_S_soc) +#define DT_N_S_soc_FOREACH_CHILD_SEP(fn, sep) fn(DT_N_S_soc) +#define DT_N_S_soc_FOREACH_CHILD_VARGS(fn, ...) fn(DT_N_S_soc, __VA_ARGS__) +#define DT_N_S_soc_FOREACH_CHILD_SEP_VARGS(fn, sep, ...) fn(DT_N_S_soc, sep, __VA_ARGS__) diff --git a/doc/tools/validate_abnf.py b/doc/tools/validate_abnf.py new file mode 100755 index 0000000000000..2a18429a5c7fa --- /dev/null +++ b/doc/tools/validate_abnf.py @@ -0,0 +1,307 @@ +#!/usr/bin/env python3 +"""Validate RFC 7405 ABNF grammars used in Zephyr documentation. + +This script implements a small ABNF parser that can be used to sanity check +grammar fragments such as ``doc/build/dts/macros.bnf``. It focuses on the +constructs that appear in the Zephyr documentation today (rule concatenation, +repetition, groups, options, literals and numeric values) and reports the first +syntax error it encounters with a helpful line/column location. +""" + +from __future__ import annotations + +import argparse +from dataclasses import dataclass +import pathlib +import re +import sys +from typing import Iterable, List, Optional, Sequence, Tuple + + +class ABNFError(RuntimeError): + """Raised when a syntax error is encountered in the grammar.""" + + def __init__(self, message: str, *, line: int, column: int, rule: Optional[str]): + super().__init__(message) + self.line = line + self.column = column + self.rule = rule + + def __str__(self) -> str: # pragma: no cover - human readable error + location = f"line {self.line}, column {self.column}" + if self.rule: + location += f" (in rule '{self.rule}')" + return f"{location}: {super().__str__()}" + + +@dataclass +class Token: + kind: str + value: str + column: int + + +_TOKEN_REGEX = re.compile( + r"(?P\s+)" # whitespace + r"|(?P=/)" + r"|(?P=)" + r"|(?P\()" + r"|(?P\))" + r"|(?P\[)" + r"|(?P\])" + r"|(?P/)" + r"|(?P\*)" + r'|(?P%[si]"(?:\\.|[^"\\])*")' + r"|(?P%[bdx](?:[0-9A-F]+(?:-[0-9A-F]+)?)(?:\.[0-9A-F]+(?:-[0-9A-F]+)?)*)" + r'|(?P"(?:\\.|[^"\\])*")' + r"|(?P<(?:\\.|[^>\\])*>)" + r"|(?P[A-Za-z][A-Za-z0-9-]*)" + r"|(?P\d+)" + , +) + + +class Lexer: + def __init__(self, text: str, *, line_offset: int) -> None: + self.text = text + self.position = 0 + self.line_offset = line_offset + + def __iter__(self) -> Iterable[Token]: + while self.position < len(self.text): + match = _TOKEN_REGEX.match(self.text, self.position) + if not match: + column = self.position + 1 + raise ABNFError( + f"unexpected character {self.text[self.position]!r}", + line=self.line_offset, + column=column, + rule=None, + ) + if match.lastgroup is None: # whitespace + self.position = match.end() + continue + token = Token(match.lastgroup, match.group(match.lastgroup), match.start() + 1) + self.position = match.end() + yield token + yield Token("EOF", "", len(self.text) + 1) + + +class Parser: + def __init__(self, tokens: Sequence[Token], *, rule_name: str, line: int) -> None: + self.tokens = list(tokens) + self.index = 0 + self.rule_name = rule_name + self.line = line + + def current(self) -> Token: + return self.tokens[self.index] + + def consume(self, kind: str) -> Token: + token = self.current() + if token.kind != kind: + raise ABNFError( + f"expected {kind} but found {token.kind}", + line=self.line, + column=token.column, + rule=self.rule_name, + ) + self.index += 1 + return token + + def match(self, kind: str) -> Optional[Token]: + token = self.current() + if token.kind == kind: + self.index += 1 + return token + return None + + def parse(self) -> None: + self._parse_rule() + if self.current().kind != "EOF": + token = self.current() + raise ABNFError( + "unexpected trailing input", + line=self.line, + column=token.column, + rule=self.rule_name, + ) + + def _parse_rule(self) -> None: + self.consume("RULENAME") + self._consume_whitespace() + if self.match("DEFINED_AS") is None: + self.consume("EQUAL") + self._consume_whitespace() + self._parse_elements() + + def _parse_elements(self) -> None: + self._parse_alternation() + + def _parse_alternation(self) -> None: + self._parse_concatenation() + while True: + self._consume_whitespace() + checkpoint = self.index + if self.match("SLASH") is None: + self.index = checkpoint + break + self._consume_whitespace() + self._parse_concatenation() + + def _parse_concatenation(self) -> None: + self._parse_repetition() + while True: + checkpoint = self.index + if not self._consume_whitespace(): + self.index = checkpoint + break + token = self.current() + if token.kind in {"SLASH", "RBRK", "RPAREN", "EOF"}: + self.index = checkpoint + break + self._parse_repetition() + + def _consume_whitespace(self) -> bool: + consumed = False + while self.current().kind == "WS": + consumed = True + self.index += 1 + return consumed + + def _parse_repetition(self) -> None: + self._maybe_parse_repeat() + self._consume_whitespace() + self._parse_element() + + def _maybe_parse_repeat(self) -> None: + token = self.current() + if token.kind == "NUMBER": + self.index += 1 + if self.match("STAR") is not None: + if self.current().kind == "NUMBER": + self.index += 1 + else: + return + elif token.kind == "STAR": + self.index += 1 + if self.current().kind == "NUMBER": + self.index += 1 + + def _parse_element(self) -> None: + token = self.current() + if token.kind in {"RULENAME", "CHAR_VAL", "CASE_STRING", "NUM_VAL", "PROSE_VAL"}: + self.index += 1 + return + if token.kind == "LBRK": + self.index += 1 + self._consume_whitespace() + self._parse_alternation() + self._consume_whitespace() + self.consume("RBRK") + return + if token.kind == "LPAREN": + self.index += 1 + self._consume_whitespace() + self._parse_alternation() + self._consume_whitespace() + self.consume("RPAREN") + return + raise ABNFError( + f"unexpected token {token.kind}", + line=self.line, + column=token.column, + rule=self.rule_name, + ) + + +def strip_comments(lines: Iterable[str]) -> List[Tuple[str, int]]: + cleaned: List[Tuple[str, int]] = [] + for line_number, raw_line in enumerate(lines, start=1): + in_string = False + escape = False + result_chars: List[str] = [] + for ch in raw_line.rstrip("\n"): + if ch == "\\" and not escape: + escape = True + result_chars.append(ch) + continue + if ch == '"' and not escape: + in_string = not in_string + if ch == ';' and not in_string: + break + result_chars.append(ch) + escape = False + cleaned_line = "".join(result_chars).rstrip() + if cleaned_line: + cleaned.append((cleaned_line, line_number)) + return cleaned + + +def join_rule_lines(cleaned_lines: Sequence[Tuple[str, int]]) -> List[Tuple[str, int]]: + rules: List[Tuple[str, int]] = [] + current_line = "" + start_number = 0 + for text, line_number in cleaned_lines: + if text and not text[0].isspace(): + if current_line: + rules.append((current_line, start_number)) + current_line = text.strip() + start_number = line_number + else: + if not current_line: + raise ABNFError( + "continuation line encountered without preceding rule", + line=line_number, + column=1, + rule=None, + ) + current_line += " " + text.strip() + if current_line: + rules.append((current_line, start_number)) + return rules + + +def validate_rule(rule_text: str, line_number: int) -> None: + # Tokenise the rule. + tokens: List[Token] = [] + lexer = Lexer(rule_text, line_offset=line_number) + for token in lexer: + if token.kind == "EOF": + tokens.append(Token("EOF", "", token.column)) + elif token.kind == "WS": + tokens.append(Token("WS", token.value, token.column)) + else: + tokens.append(token) + if not tokens: + return + rule_name = tokens[0].value if tokens and tokens[0].kind == "RULENAME" else None + parser = Parser(tokens, rule_name=rule_name or "", line=line_number) + parser.parse() + + +def validate_file(path: pathlib.Path) -> None: + with path.open("r", encoding="utf-8") as fp: + cleaned = strip_comments(fp) + rules = join_rule_lines(cleaned) + for rule_text, line_number in rules: + validate_rule(rule_text, line_number) + + +def main(argv: Optional[Sequence[str]] = None) -> int: + parser = argparse.ArgumentParser(description="Validate RFC 7405 ABNF files.") + parser.add_argument("paths", metavar="FILE", nargs="+", type=pathlib.Path) + args = parser.parse_args(argv) + + had_error = False + for path in args.paths: + try: + validate_file(path) + except ABNFError as exc: # pragma: no cover - CLI entry point + had_error = True + print(f"{path}: {exc}", file=sys.stderr) + return 1 if had_error else 0 + + +if __name__ == "__main__": # pragma: no cover - CLI entry point + sys.exit(main()) diff --git a/dts/bindings/test/vnd,gpio-nexus.yaml b/dts/bindings/test/vnd,gpio-nexus.yaml new file mode 100644 index 0000000000000..2274be7451da1 --- /dev/null +++ b/dts/bindings/test/vnd,gpio-nexus.yaml @@ -0,0 +1,8 @@ +# Copyright (c) 2025, TOKITA Hiroshi +# SPDX-License-Identifier: Apache-2.0 + +description: VND GPIO nexus + +include: [gpio-nexus.yaml] + +compatible: "vnd,gpio-nexus" diff --git a/dts/bindings/test/vnd,intr-nexus.yaml b/dts/bindings/test/vnd,intr-nexus.yaml new file mode 100644 index 0000000000000..f3b194c558385 --- /dev/null +++ b/dts/bindings/test/vnd,intr-nexus.yaml @@ -0,0 +1,8 @@ +# Copyright (c) 2025, TOKITA Hiroshi +# SPDX-License-Identifier: Apache-2.0 + +description: VND interrupt nexus + +include: [interrupt-nexus.yaml] + +compatible: "vnd,intr-nexus" diff --git a/include/zephyr/devicetree.h b/include/zephyr/devicetree.h index e10cb7c8c5fec..c928f98ef72c8 100644 --- a/include/zephyr/devicetree.h +++ b/include/zephyr/devicetree.h @@ -5571,5 +5571,6 @@ #include #include #include +#include #endif /* ZEPHYR_INCLUDE_DEVICETREE_H_ */ diff --git a/include/zephyr/devicetree/map.h b/include/zephyr/devicetree/map.h new file mode 100644 index 0000000000000..3afca063412bb --- /dev/null +++ b/include/zephyr/devicetree/map.h @@ -0,0 +1,331 @@ +/* + * Copyright (c) 2025 TOKITA Hiroshi + * + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef ZEPHYR_INCLUDE_DEVICETREE_MAP_H_ +#define ZEPHYR_INCLUDE_DEVICETREE_MAP_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * @defgroup devicetree-map Devicetree Map API + * + * @brief Helper macros for handling map properties. + * + * This module provides helper macros that facilitate interrupt mapping and + * specifier mapping based on DeviceTree specifications. It enables the extraction + * and interpretation of mapping data represented as phandle-arrays. + * + * In a typical DeviceTree fragment, properties ending with "-map" specify: + * - The child specifier to be mapped. + * - The parent node (phandle) to which the mapping applies. + * - The parent specifier associated with the mapping. + * + * For example, when the following DeviceTree snippet is defined: + * + * @code{.dts} + * n: node { + * gpio-map = <0 1 &gpio0 2 3>, <4 5 &gpio0 6 7>; + * }; + * @endcode + * + * In the first mapping entry: + * - `0 1` are the child specifiers. + * - &gpio0 is the parent node. + * - `2 3` are the parent specifiers. + * + * Since map properties are implemented as phandle-arrays, macros such as + * DT_PHANDLE_BY_IDX() and DT_PHA_BY_IDX() can be used to access individual elements. + * + * Both child and parent specifiers are treated as cells in a phandle-array. + * By default, each group of specifiers is given a sequential cell name + * (child_specifier_0, child_specifier_1, ..., parent_specifier_0, ...). + * + * If cell names are specified in dt-bindings, they will be used for the child specifier cell names. + * Parent specifiers always use the default naming convention. + * + * Example usage: + * + * A mapping entry is a phandle-array whose elements can be referenced as follows: + * - Child specifiers can be accessed via names such as `child_specifier_0`, + * `child_specifier_1`, ... + * - The parent node is accessed via DT_PHANDLE_BY_IDX(). + * - Parent specifiers are accessed via names such as `parent_specifier_0`, + * `parent_specifier_1`, ... + * + * @code{.c} + * int cspec_0 = DT_PHA_BY_IDX(DT_NODELABEL(n), gpio_map, 0, child_specifier_0); // 0 + * int cspec_1 = DT_PHA_BY_IDX(DT_NODELABEL(n), gpio_map, 0, child_specifier_1); // 1 + * const struct device *parent = + * device_get_binding(DT_PHANDLE_BY_IDX(DT_NODELABEL(n), gpio_map, 0)); // &gpio0 + * int pspec_0 = DT_PHA_BY_IDX(DT_NODELABEL(n), gpio_map, 0, parent_specifier_0); // 2 + * int pspec_1 = DT_PHA_BY_IDX(DT_NODELABEL(n), gpio_map, 0, parent_specifier_1); // 3 + * @endcode + * + * The map helper API also provides the following macros for convenient access to + * specific parts of a mapping entry: + * - DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX() + * - DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX() + * - DT_MAP_PARENT_ARG_BY_IDX() + * + * These macros extract, respectively, the child specifier arguments, the parent specifier + * arguments, and the parent node argument from a mapping element identified by its node ID, + * property name, and index. + * + * For instance: + * + * @code{.c} + * #define SRC_AND_DST(node_id, prop, idx) \ + * { GET_ARG_N(1, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(node_id, prop, idx)), \ + * GET_ARG_N(1, DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(node_id, prop, idx)) } + * + * int src_and_dst[2][] = { + * DT_FOREACH_PROP_ELEM_SEP(DT_NODELABEL(n), gpio_map, SRC_AND_DST, (,)) + * }; + * @endcode + * + * The above expansion yields: + * + * @code{.c} + * int src_and_dst[2][] = {{0, 2}, {4, 6}}; + * @endcode + * + * @ingroup devicetree + * @{ + */ + +/** + * @brief Extracts a specified range of arguments. + * + * This helper macro first skips a given number of arguments and then selects + * the first @p len arguments from the remaining list. + * + * @param start The number of arguments to skip. + * @param len The number of arguments to extract after skipping. + * @param ... The list of input arguments. + */ +#define DT_MAP_HELPER_DO_ARGS_RANGE(start, len, ...) \ + GET_ARGS_FIRST_N(len, GET_ARGS_LESS_N(start, __VA_ARGS__)) + +/** + * @brief Extracts a range of mapping arguments for a specific field. + * + * This macro concatenates the field name with the appropriate suffixes to determine + * the starting index and length of the arguments for a map entry, and then extracts + * those arguments. + * + * @param name The mapping field name (e.g., CHILD_SPECIFIER, PARENT). + * @param node_id The node identifier. + * @param prop The property name in lowercase and underscores. + * @param idx The index of the mapping entry. + * @param ... Additional arguments corresponding to the mapping entry. + */ +#define DT_MAP_HELPER_ARGS_RANGE(name, node_id, prop, idx, ...) \ + DT_MAP_HELPER_DO_ARGS_RANGE(DT_CAT3(DT_MAP_, name, _POS_BY_IDX)(node_id, prop, idx), \ + DT_CAT3(DT_MAP_, name, _LEN_BY_IDX)(node_id, prop, idx), \ + __VA_ARGS__) + +/** + * @brief Retrieves the mapping entry at the specified index. + * + * @param node_id The node identifier. + * @param prop The property name in lowercase with underscores. + * @param idx The mapping entry index. + * @return The mapping entry as a list of comma-separated values. + */ +#define DT_MAP_BY_IDX(node_id, prop, idx) DT_CAT5(node_id, _P_, prop, _MAP_IDX_, idx) + +/** + * @brief Retrieves the first mapping entry. + * @see DT_MAP_BY_IDX + */ +#define DT_MAP(node_id, prop) DT_MAP_BY_IDX(node_id, prop, 0) + +/** + * @brief Returns the number of mapping entries for the given property. + * + * @param node_id The node identifier. + * @param prop The property name in lowercase with underscores. + * @return The total count of mapping entries. + */ +#define DT_MAP_LEN(node_id, prop) DT_CAT4(node_id, _P_, prop, _MAP_LEN) + +/** + * @brief Retrieves the starting index of the child specifier cell within a mapping entry. + * + * @param node_id The node identifier. + * @param prop The property name. + * @param idx The mapping entry index. + * @return The starting index of the child specifier cell. + */ +#define DT_MAP_CHILD_SPECIFIER_POS_BY_IDX(node_id, prop, idx) \ + DT_CAT7(node_id, _P_, prop, _MAP_IDX_, idx, _, CHILD_SPECIFIER_POS) + +/** + * @brief Retrieves the starting index of the child specifier cell within the first mapping entry. + * @see DT_MAP_CHILD_SPECIFIER_POS_BY_IDX + */ +#define DT_MAP_CHILD_SPECIFIER_POS(node_id, prop) \ + DT_MAP_CHILD_SPECIFIER_POS_BY_IDX(node_id, prop, 0) + +/** + * @brief Returns the length (number of cells) of the child specifier within a mapping entry. + * + * @param node_id The node identifier. + * @param prop The property name. + * @param idx The mapping entry index. + * @return The length (in cells) of the child specifier. + */ +#define DT_MAP_CHILD_SPECIFIER_LEN_BY_IDX(node_id, prop, idx) \ + DT_CAT7(node_id, _P_, prop, _MAP_IDX_, idx, _, CHILD_SPECIFIER_LEN) + +/** + * @brief Returns the length (number of cells) of the child specifier within the first mapping + * entry. + * @see DT_MAP_CHILD_SPECIFIER_LEN_BY_IDX + */ +#define DT_MAP_CHILD_SPECIFIER_LEN(node_id, prop) \ + DT_MAP_CHILD_SPECIFIER_LEN_BY_IDX(node_id, prop, 0) + +/** + * @brief Retrieves the starting index of the parent cell in a mapping entry. + * + * @param node_id The node identifier. + * @param prop The property name. + * @param idx The mapping entry index. + * @return The starting index of the parent cell. + */ +#define DT_MAP_PARENT_POS_BY_IDX(node_id, prop, idx) \ + DT_CAT7(node_id, _P_, prop, _MAP_IDX_, idx, _, PARENT_POS) + +/** + * @brief Retrieves the starting index of the parent cell in the first mapping entry. + * @see DT_MAP_PARENT_POS_BY_IDX + */ +#define DT_MAP_PARENT_POS(node_id, prop) DT_MAP_PARENT_POS_BY_IDX(node_id, prop, 0) + +/** + * @brief Returns the length (number of cells) of the parent cell in a mapping entry. + * + * @param node_id The node identifier. + * @param prop The property name. + * @param idx The mapping entry index. + * @return The length (in cells) of the parent cell. + */ +#define DT_MAP_PARENT_LEN_BY_IDX(node_id, prop, idx) \ + DT_CAT7(node_id, _P_, prop, _MAP_IDX_, idx, _, PARENT_LEN) + +/** + * @brief Returns the length (number of cells) of the parent cell in the first mapping entry. + * @see DT_MAP_PARENT_LEN_BY_IDX + */ +#define DT_MAP_PARENT_LEN(node_id, prop) DT_MAP_PARENT_LEN_BY_IDX(node_id, prop, 0) + +/** + * @brief Retrieves the starting index of the parent specifier cell within a mapping entry. + * + * @param node_id The node identifier. + * @param prop The property name. + * @param idx The mapping entry index. + * @return The starting index of the parent specifier cell. + */ +#define DT_MAP_PARENT_SPECIFIER_POS_BY_IDX(node_id, prop, idx) \ + DT_CAT7(node_id, _P_, prop, _MAP_IDX_, idx, _, PARENT_SPECIFIER_POS) + +/** + * @brief Retrieves the starting index of the parent specifier cell within the first mapping entry. + * @see DT_MAP_PARENT_SPECIFIER_POS_BY_IDX + */ +#define DT_MAP_PARENT_SPECIFIER_POS(node_id, prop) \ + DT_MAP_PARENT_SPECIFIER_POS_BY_IDX(node_id, prop, 0) + +/** + * @brief Returns the length (number of cells) of the parent specifier in a mapping entry. + * + * @param node_id The node identifier. + * @param prop The property name. + * @param idx The mapping entry index. + * @return The length (in cells) of the parent specifier. + */ +#define DT_MAP_PARENT_SPECIFIER_LEN_BY_IDX(node_id, prop, idx) \ + DT_CAT7(node_id, _P_, prop, _MAP_IDX_, idx, _, PARENT_SPECIFIER_LEN) + +/** + * @brief Returns the length (number of cells) of the parent specifier of the first mapping entry. + * @see DT_MAP_PARENT_SPECIFIER_LEN_BY_IDX + */ +#define DT_MAP_PARENT_SPECIFIER_LEN(node_id, prop) \ + DT_MAP_PARENT_SPECIFIER_LEN_BY_IDX(node_id, prop, 0) + +/** + * @brief Extracts the child specifier arguments from a mapping entry. + * + * This macro returns the comma-separated list of arguments for the child specifier. + * + * @param node_id The node identifier. + * @param prop The property name in lowercase with underscores. + * @param idx The mapping entry index. + * @return The child specifier arguments. + */ +#define DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(node_id, prop, idx) \ + DT_MAP_HELPER_ARGS_RANGE(CHILD_SPECIFIER, node_id, prop, idx, \ + DT_MAP_BY_IDX(node_id, prop, idx)) + +/** + * @brief Extracts the child specifier arguments from the first mapping entry. + * @see DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX + */ +#define DT_MAP_CHILD_SPECIFIER_ARGS(node_id, prop) \ + DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(node_id, prop, 0) + +/** + * @brief Extracts the parent node argument from a mapping entry. + * + * @param node_id The node identifier. + * @param prop The property name in lowercase with underscores. + * @param idx The mapping entry index. + * @return The parent node argument. + */ +#define DT_MAP_PARENT_ARG_BY_IDX(node_id, prop, idx) \ + DT_MAP_HELPER_ARGS_RANGE(PARENT, node_id, prop, idx, DT_MAP_BY_IDX(node_id, prop, idx)) + +/** + * @brief Extracts the parent node argument from the first mapping entry. + * @see DT_MAP_PARENT_ARG_BY_IDX + */ +#define DT_MAP_PARENT_ARG(node_id, prop) DT_MAP_PARENT_ARG_BY_IDX(node_id, prop, 0) + +/** + * @brief Extracts the parent specifier arguments from a mapping entry. + * + * This macro returns the comma-separated list of arguments for the parent specifier. + * + * @param node_id The node identifier. + * @param prop The property name in lowercase with underscores. + * @param idx The mapping entry index. + * @return The parent specifier arguments. + */ +#define DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(node_id, prop, idx) \ + DT_MAP_HELPER_ARGS_RANGE(PARENT_SPECIFIER, node_id, prop, idx, \ + DT_MAP_BY_IDX(node_id, prop, idx)) + +/** + * @brief Extracts the parent specifier arguments of the first mapping entry. + * @see DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX + */ +#define DT_MAP_PARENT_SPECIFIER_ARGS(node_id, prop) \ + DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(node_id, prop, 0) + +/** + * @} + */ + +#ifdef __cplusplus +} +#endif + +#endif /* ZEPHYR_INCLUDE_DEVICETREE_MAP_H_ */ diff --git a/scripts/dts/gen_defines.py b/scripts/dts/gen_defines.py index e3913dd585b79..42df47315942a 100755 --- a/scripts/dts/gen_defines.py +++ b/scripts/dts/gen_defines.py @@ -288,6 +288,7 @@ def write_special_props(node: edtlib.Node) -> None: write_pinctrls(node) write_fixed_partitions(node) write_gpio_hogs(node) + write_maps(node) def write_ranges(node: edtlib.Node) -> None: @@ -579,6 +580,72 @@ def write_gpio_hogs(node: edtlib.Node) -> None: out_dt_define(macro, val) +def write_maps(node: edtlib.Node) -> None: + if len(node.maps) == 0: + return + + out_comment("Map properties:") + + basename = str2ident(node.maps[0].basename) + macro = f"{node.z_path_id}_P_{basename}_map" + macro2val = {} + + for i, cd in enumerate(node.maps): + if basename != str2ident(cd.basename): + err(f"Map basename mismatch: {basename} != {str2ident(cd.basename)}") + + macro2val.update(controller_and_data_macros(cd, i, macro, "")) + + prop_id = f"{basename}_map" + plen = len(node.maps) + # DT_N__P__FOREACH_PROP_ELEM + macro2val[f"{macro}_FOREACH_PROP_ELEM(fn)"] = ' \\\n\t'.join( + f'fn(DT_{node.z_path_id}, {prop_id}, {i})' for i in range(plen) + ) + + # DT_N__P__FOREACH_PROP_ELEM_SEP + macro2val[f"{macro}_FOREACH_PROP_ELEM_SEP(fn, sep)"] = ' DT_DEBRACKET_INTERNAL sep \\\n\t'.join( + f'fn(DT_{node.z_path_id}, {prop_id}, {i})' for i in range(plen) + ) + + # DT_N__P__FOREACH_PROP_ELEM_VARGS + macro2val[f"{macro}_FOREACH_PROP_ELEM_VARGS(fn, ...)"] = ' \\\n\t'.join( + f'fn(DT_{node.z_path_id}, {prop_id}, {i}, __VA_ARGS__)' for i in range(plen) + ) + + # DT_N__P__FOREACH_PROP_ELEM_SEP_VARGS + macro2val[f"{macro}_FOREACH_PROP_ELEM_SEP_VARGS(fn, sep, ...)"] = ( + ' DT_DEBRACKET_INTERNAL sep \\\n\t'.join( + f'fn(DT_{node.z_path_id}, {prop_id}, {i}, __VA_ARGS__)' for i in range(plen) + ) + ) + + macro2val[f"{macro}_LEN"] = plen + macro2val[f"{macro}_EXISTS"] = 1 + + for i, cd in enumerate(node.maps): + parent_specifier_len = len([k for k in cd.data if k.startswith('parent_specifier_')]) + child_specifiers = list(cd.data.values())[:-parent_specifier_len] + parent_specifiers = list(cd.data.values())[-parent_specifier_len:] + child_specifier_len = len(child_specifiers) + + args = [] + args.extend([str(v) for v in child_specifiers]) + args.extend(["DT_" + node_z_path_id(cd.controller)]) + args.extend([str(v) for v in parent_specifiers]) + + macro2val[f"{macro}_MAP_IDX_{i}"] = ", ".join(args) + macro2val[f"{macro}_MAP_IDX_{i}_CHILD_SPECIFIER_POS"] = 0 + macro2val[f"{macro}_MAP_IDX_{i}_CHILD_SPECIFIER_LEN"] = child_specifier_len + macro2val[f"{macro}_MAP_IDX_{i}_PARENT_POS"] = child_specifier_len + macro2val[f"{macro}_MAP_IDX_{i}_PARENT_LEN"] = 1 + macro2val[f"{macro}_MAP_IDX_{i}_PARENT_SPECIFIER_POS"] = child_specifier_len + 1 + macro2val[f"{macro}_MAP_IDX_{i}_PARENT_SPECIFIER_LEN"] = parent_specifier_len + + for mc, val in macro2val.items(): + out_dt_define(mc, val) + + def write_vanilla_props(node: edtlib.Node) -> None: # Writes macros for any and all properties defined in the # "properties" section of the binding for the node. diff --git a/scripts/dts/python-devicetree/src/devicetree/edtlib.py b/scripts/dts/python-devicetree/src/devicetree/edtlib.py index 916cdc230d236..e9091388312d4 100644 --- a/scripts/dts/python-devicetree/src/devicetree/edtlib.py +++ b/scripts/dts/python-devicetree/src/devicetree/edtlib.py @@ -1321,6 +1321,97 @@ def gpio_hogs(self) -> list[ControllerAndData]: return res + @property + def maps(self) -> list[ControllerAndData]: + res: list[ControllerAndData] = [] + + def count_cells_num(node: dtlib_Node, specifier: str) -> int: + """ + Calculate the number of cells in the node. + When calculating the number of interrupt cells, + add up the values of the address cells. + """ + + if node is None: + _err("node is None.") + + num = node.props[f"#{specifier}-cells"].to_num() + + if specifier == "interrupt": + parent_props = None + if node.parent: + parent_props = node.parent.props + + if "#address-cells" in node.props: + num = num + node.props["#address-cells"].to_num() + elif parent_props and "#address-cells" in parent_props: + num = num + parent_props["#address-cells"].to_num() + else: + _err("Neither the node nor its parent has `#address-cells` property") + + return num + + for prop in [v for k, v in self._node.props.items() if k.endswith("-map")]: + specifier_space = prop.name[:-4] # Strip '-map' + raw = prop.value + while raw: + if len(raw) < 4: + # Not enough room for phandle + _err("bad value for " + repr(prop)) + + child_specifier_num = count_cells_num(prop.node, specifier_space) + + child_specifiers = to_nums(raw[: 4 * child_specifier_num]) + raw = raw[4 * child_specifier_num :] + phandle = to_num(raw[:4]) + raw = raw[4:] + + controller_node = prop.node.dt.phandle2node.get(phandle) + if controller_node is None: + _err(f"controller node cannot be found from phandle:{phandle}") + + controller: Node = self.edt._node2enode[controller_node] + if controller is None: + _err("controller cannot be found from: " + repr(controller_node)) + + parent_specifier_num = count_cells_num(controller_node, specifier_space) + parent_specifiers = to_nums(raw[: 4 * parent_specifier_num]) + raw = raw[4 * parent_specifier_num :] + + # Although this is rare, if a cell-name is specified for the map node, + # it will be reflected. + # If not specified, the name of child_specifier_[i] will be set. + values: dict[str, int] = {} + for i, v in enumerate(child_specifiers): + cell_name = f"child_specifier_{i}" + if (self._binding and + self._binding.specifier2cells and + specifier_space in self._binding.specifier2cells and + i < len(self._binding.specifier2cells[specifier_space])): + cell_name = self._binding.specifier2cells[specifier_space][i] + + values[cell_name] = v + + # The cell name for parent_specifier cannot be determined. + # For convenience, we assign it the name parent_specifier_[i]. + for i, v in enumerate(parent_specifiers): + values[f"parent_specifier_{i}"] = v + + res.append( + ControllerAndData( + node=self, + controller=controller, + data=values, + name=None, + basename=specifier_space, + ) + ) + + if len(raw) != 0: + _err(f"unexpected prop.value remainings: {raw}") + + return res + @property def has_child_binding(self) -> bool: """ diff --git a/scripts/dts/python-devicetree/tests/test_edtlib.py b/scripts/dts/python-devicetree/tests/test_edtlib.py index f26b5fa158150..6e9c5914889b3 100644 --- a/scripts/dts/python-devicetree/tests/test_edtlib.py +++ b/scripts/dts/python-devicetree/tests/test_edtlib.py @@ -130,6 +130,116 @@ def test_interrupts(): edtlib.ControllerAndData(node=node, controller=edt.get_node('/interrupt-map-bitops-test/controller'), data={'one': 3, 'two': 2}, name=None, basename=None) ] + +def test_maps(): + '''Tests for the maps property.''' + with from_here(): + edt = edtlib.EDT("test.dts", ["test-bindings"]) + + nexus = edt.get_node("/interrupt-map-test/nexus") + controller_0 = edt.get_node("/interrupt-map-test/controller-0") + controller_1 = edt.get_node("/interrupt-map-test/controller-1") + controller_2 = edt.get_node("/interrupt-map-test/controller-2") + + assert nexus.maps == [ + edtlib.ControllerAndData( + node=nexus, + controller=controller_0, + data={ + "child_specifier_0": 0, + "child_specifier_1": 0, + "child_specifier_2": 0, + "child_specifier_3": 0, + "parent_specifier_0": 0, + "parent_specifier_1": 0, + }, + name=None, + basename="interrupt", + ), + edtlib.ControllerAndData( + node=nexus, + controller=controller_1, + data={ + "child_specifier_0": 0, + "child_specifier_1": 0, + "child_specifier_2": 0, + "child_specifier_3": 1, + "parent_specifier_0": 0, + "parent_specifier_1": 0, + "parent_specifier_2": 0, + "parent_specifier_3": 1, + }, + name=None, + basename="interrupt", + ), + edtlib.ControllerAndData( + node=nexus, + controller=controller_2, + data={ + "child_specifier_0": 0, + "child_specifier_1": 0, + "child_specifier_2": 0, + "child_specifier_3": 2, + "parent_specifier_0": 0, + "parent_specifier_1": 0, + "parent_specifier_2": 0, + "parent_specifier_3": 0, + "parent_specifier_4": 0, + "parent_specifier_5": 2, + }, + name=None, + basename="interrupt", + ), + edtlib.ControllerAndData( + node=nexus, + controller=controller_0, + data={ + "child_specifier_0": 0, + "child_specifier_1": 1, + "child_specifier_2": 0, + "child_specifier_3": 0, + "parent_specifier_0": 0, + "parent_specifier_1": 3, + }, + name=None, + basename="interrupt", + ), + edtlib.ControllerAndData( + node=nexus, + controller=controller_1, + data={ + "child_specifier_0": 0, + "child_specifier_1": 1, + "child_specifier_2": 0, + "child_specifier_3": 1, + "parent_specifier_0": 0, + "parent_specifier_1": 0, + "parent_specifier_2": 0, + "parent_specifier_3": 4, + }, + name=None, + basename="interrupt", + ), + edtlib.ControllerAndData( + node=nexus, + controller=controller_2, + data={ + "child_specifier_0": 0, + "child_specifier_1": 1, + "child_specifier_2": 0, + "child_specifier_3": 2, + "parent_specifier_0": 0, + "parent_specifier_1": 0, + "parent_specifier_2": 0, + "parent_specifier_3": 0, + "parent_specifier_4": 0, + "parent_specifier_5": 5, + }, + name=None, + basename="interrupt", + ), + ] + def test_ranges(): '''Tests for the ranges property''' with from_here(): diff --git a/scripts/tests/dts/fixtures/devicetree_generated_sample.h b/scripts/tests/dts/fixtures/devicetree_generated_sample.h new file mode 100644 index 0000000000000..4440e7ba11940 --- /dev/null +++ b/scripts/tests/dts/fixtures/devicetree_generated_sample.h @@ -0,0 +1,93 @@ +/* Sample devicetree macros produced by gen_defines.py */ +#define DT_N_S_soc_S_i2c_0_PATH "/soc/i2c@0" +#define DT_N_S_soc_S_i2c_0_FULL_NAME "i2c@0" +#define DT_N_S_soc_S_i2c_0_PARENT DT_N_S_soc +#define DT_N_S_soc_S_i2c_0_P_current_speed 100000 +#define DT_N_S_soc_S_i2c_0_P_current_speed_IDX_0 100000 +#define DT_N_S_soc_S_i2c_0_P_current_speed_ENUM_IDX 0 +#define DT_N_S_soc_S_i2c_0_P_current_speed_ENUM_VAL_default_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_P_current_speed_FOREACH_PROP_ELEM(fn) fn(0) +#define DT_N_S_soc_S_i2c_0_P_current_speed_FOREACH_PROP_ELEM_SEP(fn, sep) fn(0) +#define DT_N_S_soc_S_i2c_0_P_current_speed_FOREACH_PROP_ELEM_VARGS(fn, ...) fn(0) +#define DT_N_S_soc_S_i2c_0_P_current_speed_FOREACH_PROP_ELEM_SEP_VARGS(fn, sep, ...) fn(0) +#define DT_N_S_soc_S_i2c_0_P_current_speed_IDX_0_VAL_address 0x0 +#define DT_N_S_soc_S_i2c_0_P_current_speed_IDX_0_VAL_address_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_P_reg_LEN 1 +#define DT_N_S_soc_S_i2c_0_PINCTRL_NUM 2 +#define DT_N_S_soc_S_i2c_0_PINCTRL_IDX_0_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_PINCTRL_NAME_default_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_PINCTRL_NAME_default_IDX 0 +#define DT_N_S_soc_S_i2c_0_PINCTRL_NAME_default_IDX_0_PH DT_N_S_pinmux_S_default +#define DT_N_S_soc_S_i2c_0_GPIO_HOGS_IDX_0_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_GPIO_HOGS_IDX_0_PH DT_N_S_soc_S_gpio_0 +#define DT_N_S_soc_S_i2c_0_GPIO_HOGS_IDX_0_VAL_pin_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_GPIO_HOGS_IDX_0_VAL_pin 3 +#define DT_N_S_soc_S_i2c_0_GPIO_HOGS_IDX_0_VAL_flags_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_GPIO_HOGS_IDX_0_VAL_flags 16 +#define DT_N_S_soc_S_i2c_0_REG_NUM 1 +#define DT_N_S_soc_S_i2c_0_REG_IDX_0_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_REG_IDX_0_VAL_ADDRESS 0x0 +#define DT_N_S_soc_S_i2c_0_REG_IDX_0_VAL_SIZE 0x10 +#define DT_N_S_soc_S_i2c_0_REG_NAME_control_VAL_ADDRESS 0x0 +#define DT_N_S_soc_S_i2c_0_REG_NAME_control_VAL_SIZE 0x10 +#define DT_N_S_soc_S_i2c_0_REG_NAME_control_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_IRQ_NUM 1 +#define DT_N_S_soc_S_i2c_0_IRQ_LEVEL 0 +#define DT_N_S_soc_S_i2c_0_IRQ_IDX_0_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_IRQ_IDX_0_VAL_irq 24 +#define DT_N_S_soc_S_i2c_0_IRQ_IDX_0_VAL_irq_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_IRQ_NAME_main_VAL_irq 24 +#define DT_N_S_soc_S_i2c_0_IRQ_NAME_main_VAL_irq_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_IRQ_NAME_main_CONTROLLER DT_N_S_soc_S_intc +#define DT_N_S_soc_S_i2c_0_RANGES_NUM 1 +#define DT_N_S_soc_S_i2c_0_RANGES_IDX_0_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_RANGES_IDX_0_VAL_CHILD_BUS_FLAGS_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_RANGES_IDX_0_VAL_CHILD_BUS_FLAGS 2 +#define DT_N_S_soc_S_i2c_0_RANGES_IDX_0_VAL_CHILD_BUS_ADDRESS 0 +#define DT_N_S_soc_S_i2c_0_RANGES_IDX_0_VAL_PARENT_BUS_ADDRESS 0 +#define DT_N_S_soc_S_i2c_0_RANGES_IDX_0_VAL_LENGTH 0 +#define DT_N_S_soc_S_i2c_0_FOREACH_RANGE(fn) fn(0) +#define DT_N_S_soc_S_i2c_0_PARTITION_ID0 0 +#define DT_N_S_soc_S_i2c_0_COMPAT_MATCHES_vnd_device 1 +#define DT_N_S_soc_S_i2c_0_COMPAT_VENDOR_IDX_0_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_COMPAT_VENDOR_IDX_0 0 +#define DT_N_S_soc_S_i2c_0_COMPAT_MODEL_IDX_0_EXISTS 1 +#define DT_N_S_soc_S_i2c_0_COMPAT_MODEL_IDX_0 0 +#define DT_N_S_soc_S_i2c_0_P_status_FOREACH_PROP_ELEM(fn) fn(status) +#define DT_N_S_soc_S_i2c_0_P_status_FOREACH_PROP_ELEM_SEP(fn, sep) fn(status) +#define DT_N_S_soc_S_i2c_0_P_status_FOREACH_PROP_ELEM_VARGS(fn, ...) fn(status) +#define DT_N_S_soc_S_i2c_0_P_status_FOREACH_PROP_ELEM_SEP_VARGS(fn, sep, ...) fn(status) +#define DT_N_S_soc_S_i2c_0_CHILD_NUM 2 +#define DT_N_S_soc_S_i2c_0_CHILD_NUM_STATUS_OKAY 1 +#define DT_N_S_soc_S_i2c_0_FOREACH_CHILD(fn) fn(DT_N_S_soc_S_i2c_0_S_child) +#define DT_N_S_soc_S_i2c_0_FOREACH_CHILD_SEP(fn, sep) fn(DT_N_S_soc_S_i2c_0_S_child) +#define DT_N_S_soc_S_i2c_0_FOREACH_CHILD_VARGS(fn, ...) fn(DT_N_S_soc_S_i2c_0_S_child) +#define DT_N_S_soc_S_i2c_0_FOREACH_CHILD_SEP_VARGS(fn, sep, ...) fn(DT_N_S_soc_S_i2c_0_S_child) +#define DT_N_S_soc_S_i2c_0_FOREACH_CHILD_STATUS_OKAY(fn) fn(DT_N_S_soc_S_i2c_0_S_child) +#define DT_N_S_soc_S_i2c_0_FOREACH_CHILD_STATUS_OKAY_SEP(fn, sep) fn(DT_N_S_soc_S_i2c_0_S_child) +#define DT_N_S_soc_S_i2c_0_FOREACH_CHILD_STATUS_OKAY_VARGS(fn, ...) fn(DT_N_S_soc_S_i2c_0_S_child) +#define DT_N_S_soc_S_i2c_0_FOREACH_CHILD_STATUS_OKAY_SEP_VARGS(fn, sep, ...) fn(DT_N_S_soc_S_i2c_0_S_child) +#define DT_N_S_soc_S_i2c_0_FOREACH_NODELABEL(fn) fn(i2c0) +#define DT_N_S_soc_S_i2c_0_FOREACH_NODELABEL_VARGS(fn, ...) fn(i2c0, __VA_ARGS__) +#define DT_N_S_soc_S_i2c_0_NODELABEL_NUM 1 +#define DT_N_S_soc_S_i2c_0_CHILD_IDX 0 +#define DT_N_S_soc_S_i2c_0_STATUS_okay 1 +#define DT_N_S_soc_S_i2c_0_ORD 5 +#define DT_N_S_soc_S_i2c_0_REQUIRES_ORDS {5} +#define DT_N_S_soc_S_i2c_0_SUPPORTS_ORDS {1, 2} +#define DT_N_ALIAS_i2c0 DT_N_S_soc_S_i2c_0 +#define DT_N_NODELABEL_i2c0 DT_N_S_soc_S_i2c_0 +#define DT_N_INST_0_vnd_device DT_N_S_soc_S_i2c_0 +#define DT_N_INST_vnd_device_NUM_OKAY 1 +#define DT_FOREACH_HELPER(macro) macro(DT_N_S_soc_S_root) +#define DT_FOREACH_OKAY_HELPER(macro) macro(DT_N_S_soc_S_root) +#define DT_FOREACH_OKAY_vnd_device(macro) macro(DT_N_S_soc_S_i2c_0) +#define DT_FOREACH_OKAY_VARGS_vnd_device(macro, ...) macro(DT_N_S_soc_S_i2c_0, __VA_ARGS__) +#define DT_FOREACH_OKAY_INST_vnd_device(macro) macro(DT_N_S_soc_S_i2c_0) +#define DT_FOREACH_OKAY_INST_VARGS_vnd_device(macro, ...) macro(DT_N_S_soc_S_i2c_0, __VA_ARGS__) +#define DT_COMPAT_vnd_device_BUS_i2c 1 +#define DT_COMPAT_HAS_OKAY_vnd_device 1 +#define DT_COMPAT_vnd_device_LABEL_flash0 DT_N_S_soc_S_flash_controller +#define DT_CHOSEN_zephyr_console DT_N_S_soc_S_uart_40001000 +#define DT_N_S_soc_S_i2c_0_BUS_i2c DT_N_S_soc_S_i2c_controller +#define DT_N_S_soc_S_i2c_0_EXISTS 1 diff --git a/scripts/tests/dts/test_check_dt_macros.py b/scripts/tests/dts/test_check_dt_macros.py new file mode 100644 index 0000000000000..88e8a07dbd6dd --- /dev/null +++ b/scripts/tests/dts/test_check_dt_macros.py @@ -0,0 +1,52 @@ +"""Tests for doc.tools.check_dt_macros.""" + +from __future__ import annotations + +import importlib.util +import pathlib +import sys + +import pytest + + +@pytest.fixture(scope="module") +def checker_module(): + module_path = pathlib.Path(__file__).resolve().parents[3] / "doc" / "tools" / "check_dt_macros.py" + spec = importlib.util.spec_from_file_location("doc.tools.check_dt_macros", module_path) + module = importlib.util.module_from_spec(spec) + assert spec.loader is not None + sys.modules[spec.name] = module + spec.loader.exec_module(module) # type: ignore[attr-defined] + return module + + +@pytest.fixture(scope="module") +def grammar(checker_module): + grammar_path = pathlib.Path(__file__).resolve().parents[3] / "doc" / "build" / "dts" / "macros.bnf" + return checker_module.AbnfGrammar.from_path(grammar_path) + + +def test_fixture_macros_match_abnf(checker_module, grammar): + fixture = pathlib.Path(__file__).resolve().parent / "fixtures" / "devicetree_generated_sample.h" + pattern = grammar.compile("dt-macro") + macros = list(checker_module.load_macros([fixture])) + violations = checker_module.validate_macros(macros, pattern) + assert not violations, "All macros in the fixture should match the ABNF grammar" + + +def test_invalid_macro_is_reported(tmp_path, checker_module, grammar): + invalid = tmp_path / "invalid.h" + invalid.write_text("#define DT_invalid_macro 1\n", encoding="utf-8") + pattern = grammar.compile("dt-macro") + macros = list(checker_module.load_macros([invalid])) + violations = checker_module.validate_macros(macros, pattern) + assert len(violations) == 1 + assert violations[0].macro.line == 1 + assert "DT_invalid_macro" in violations[0].reason + + +def test_macro_parameters_are_tokenized(checker_module): + fixture = pathlib.Path(__file__).resolve().parent / "fixtures" / "devicetree_generated_sample.h" + macros = list(checker_module.load_macros([fixture])) + macro = next(m for m in macros if m.name.endswith("FOREACH_CHILD_SEP_VARGS")) + assert macro.parameters == ("fn", "sep", "...") diff --git a/tests/lib/devicetree/api/app.overlay b/tests/lib/devicetree/api/app.overlay index 950ca9b403555..a2961f115a13d 100644 --- a/tests/lib/devicetree/api/app.overlay +++ b/tests/lib/devicetree/api/app.overlay @@ -921,4 +921,49 @@ compatible = "vnd,non-deprecated-label"; label = "FOO"; }; + + gpio-map-test { + connector { + compatible = "vnd,gpio-nexus"; + #gpio-cells = <2>; + gpio-map = <1 2 &{/gpio-map-test/parent} 3 + 4 5 &{/gpio-map-test/parent} 6>; + gpio-map-mask = <0xffffffff 0xffffffc0>; + gpio-map-pass-thru = <0x0 0x3f>; + }; + parent { + compatible = "gpio-dst"; + gpio-controller; + #gpio-cells = <1>; + }; + }; + + interrupt-map-test { + #address-cells = <2>; + #size-cells = <0>; + + controller-0@0 { + compatible = "vnd,cpu-intc"; + reg = <0x0 0x0>; + #address-cells = <1>; + #interrupt-cells = <1>; + interrupt-controller; + }; + controller-1@1 { + compatible = "vnd,intc"; + reg = <0x0 0x1>; + #address-cells = <2>; + #interrupt-cells = <2>; + interrupt-controller; + }; + nexus { + compatible = "vnd,intr-nexus"; + #interrupt-cells = <2>; + interrupt-map = < + 0 0 1 2 &{/interrupt-map-test/controller-0@0} 3 4 + 0 0 5 6 &{/interrupt-map-test/controller-1@1} 7 8 9 0 + 0 1 9 8 &{/interrupt-map-test/controller-0@0} 7 6 + 0 1 5 4 &{/interrupt-map-test/controller-1@1} 3 2 1 0>; + }; + }; }; diff --git a/tests/lib/devicetree/api/src/main.c b/tests/lib/devicetree/api/src/main.c index 98189ebe582cc..38acae5be4387 100644 --- a/tests/lib/devicetree/api/src/main.c +++ b/tests/lib/devicetree/api/src/main.c @@ -111,6 +111,9 @@ #define TEST_SUBPARTITION_1 DT_PATH(test, test_mtd_ffeeddcc, flash_20000000, partitions, \ partition_100, partition_40) +#define TEST_GPIO_CONNECTOR DT_PATH(gpio_map_test, connector) +#define TEST_INTERRUPT_NEXUS DT_PATH(interrupt_map_test, nexus) + #define ZEPHYR_USER DT_PATH(zephyr_user) #define TA_HAS_COMPAT(compat) DT_NODE_HAS_COMPAT(TEST_ARRAYS, compat) @@ -3832,4 +3835,87 @@ ZTEST(devicetree_api, test_interrupt_controller) zassert_true(DT_SAME_NODE(DT_INST_IRQ_INTC(0), TEST_INTC), ""); } +#define INTERRUPT_NEXUS_CHECK_0(n, p, i, ...) \ + zassert_equal(NUM_VA_ARGS(DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 4); \ + zassert_equal(GET_ARG_N(1, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 0); \ + zassert_equal(GET_ARG_N(2, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 0); \ + zassert_equal(GET_ARG_N(3, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 1); \ + zassert_equal(GET_ARG_N(4, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 2); \ + zassert_equal(NUM_VA_ARGS(DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 2); \ + zassert_equal(GET_ARG_N(1, DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 3); \ + zassert_equal(GET_ARG_N(2, DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 4); \ + zassert_equal(NUM_VA_ARGS(DT_MAP_PARENT_ARG_BY_IDX(n, p, i)), 1); \ + zassert_str_equal(STRINGIFY(DT_MAP_PARENT_ARG_BY_IDX(n, p, i)), \ + "DT_N_S_interrupt_map_test_S_controller_0_0"); + +#define INTERRUPT_NEXUS_CHECK_1(n, p, i, ...) \ + zassert_equal(NUM_VA_ARGS(DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 4); \ + zassert_equal(GET_ARG_N(1, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 0); \ + zassert_equal(GET_ARG_N(2, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 0); \ + zassert_equal(GET_ARG_N(3, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 5); \ + zassert_equal(GET_ARG_N(4, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 6); \ + zassert_equal(NUM_VA_ARGS(DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 4); \ + zassert_equal(GET_ARG_N(1, DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 7); \ + zassert_equal(GET_ARG_N(2, DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 8); \ + zassert_equal(GET_ARG_N(3, DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 9); \ + zassert_equal(GET_ARG_N(4, DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 0); \ + zassert_equal(NUM_VA_ARGS(DT_MAP_PARENT_ARG_BY_IDX(n, p, i)), 1); \ + zassert_str_equal(STRINGIFY(DT_MAP_PARENT_ARG_BY_IDX(n, p, i)), \ + "DT_N_S_interrupt_map_test_S_controller_1_1"); + +#define INTERRUPT_NEXUS_CHECK_2(n, p, i, ...) \ + zassert_equal(NUM_VA_ARGS(DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 4); \ + zassert_equal(GET_ARG_N(1, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 0); \ + zassert_equal(GET_ARG_N(2, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 1); \ + zassert_equal(GET_ARG_N(3, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 9); \ + zassert_equal(GET_ARG_N(4, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 8); \ + zassert_equal(NUM_VA_ARGS(DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 2); \ + zassert_equal(GET_ARG_N(1, DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 7); \ + zassert_equal(GET_ARG_N(2, DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 6); \ + zassert_equal(NUM_VA_ARGS(DT_MAP_PARENT_ARG_BY_IDX(n, p, i)), 1); \ + zassert_str_equal(STRINGIFY(DT_MAP_PARENT_ARG_BY_IDX(n, p, i)), \ + "DT_N_S_interrupt_map_test_S_controller_0_0"); + +#define INTERRUPT_NEXUS_CHECK_3(n, p, i, ...) \ + zassert_equal(NUM_VA_ARGS(DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 4); \ + zassert_equal(GET_ARG_N(1, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 0); \ + zassert_equal(GET_ARG_N(2, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 1); \ + zassert_equal(GET_ARG_N(3, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 5); \ + zassert_equal(GET_ARG_N(4, DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(n, p, i)), 4); \ + zassert_equal(NUM_VA_ARGS(DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 4); \ + zassert_equal(GET_ARG_N(1, DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 3); \ + zassert_equal(GET_ARG_N(2, DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 2); \ + zassert_equal(GET_ARG_N(3, DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 1); \ + zassert_equal(GET_ARG_N(4, DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX(n, p, i)), 0); \ + zassert_str_equal(STRINGIFY(DT_MAP_PARENT_ARG_BY_IDX(n, p, i)), \ + "DT_N_S_interrupt_map_test_S_controller_1_1"); + +#define INTERRUPT_NEXUS_CHECK(...) \ + UTIL_CAT(INTERRUPT_NEXUS_CHECK_, GET_ARG_N(3, __VA_ARGS__))(__VA_ARGS__) + +ZTEST(devicetree_api, test_map) +{ + zassert_equal(DT_PROP_LEN(TEST_GPIO_CONNECTOR, gpio_map), 2); + zassert_equal(DT_PHA_BY_IDX(TEST_GPIO_CONNECTOR, gpio_map, 0, child_specifier_0), 1); + zassert_equal(DT_PHA_BY_IDX(TEST_GPIO_CONNECTOR, gpio_map, 0, child_specifier_1), 2); + zassert_str_equal(STRINGIFY(DT_PHANDLE_BY_IDX(TEST_GPIO_CONNECTOR, gpio_map, 0)), + "DT_N_S_gpio_map_test_S_parent"); + zassert_equal(DT_PHA_BY_IDX(TEST_GPIO_CONNECTOR, gpio_map, 0, parent_specifier_0), 3); + zassert_equal(DT_PHA_BY_IDX(TEST_GPIO_CONNECTOR, gpio_map, 1, child_specifier_0), 4); + zassert_equal(DT_PHA_BY_IDX(TEST_GPIO_CONNECTOR, gpio_map, 1, child_specifier_1), 5); + zassert_str_equal(STRINGIFY(DT_PHANDLE_BY_IDX(TEST_GPIO_CONNECTOR, gpio_map, 1)), + "DT_N_S_gpio_map_test_S_parent"); + zassert_equal(DT_PHA_BY_IDX(TEST_GPIO_CONNECTOR, gpio_map, 1, parent_specifier_0), 6); + + zassert_equal(DT_PROP_LEN(TEST_GPIO_CONNECTOR, gpio_map_mask), 2); + zassert_equal(DT_PROP_BY_IDX(TEST_GPIO_CONNECTOR, gpio_map_mask, 0), 0xffffffff); + zassert_equal(DT_PROP_BY_IDX(TEST_GPIO_CONNECTOR, gpio_map_mask, 1), 0xffffffc0); + zassert_equal(DT_PROP_LEN(TEST_GPIO_CONNECTOR, gpio_map_pass_thru), 2); + zassert_equal(DT_PROP_BY_IDX(TEST_GPIO_CONNECTOR, gpio_map_pass_thru, 0), 0x0); + zassert_equal(DT_PROP_BY_IDX(TEST_GPIO_CONNECTOR, gpio_map_pass_thru, 1), 0x3f); + + DT_FOREACH_PROP_ELEM_VARGS(TEST_INTERRUPT_NEXUS, interrupt_map, INTERRUPT_NEXUS_CHECK, + 9999); +} + ZTEST_SUITE(devicetree_api, NULL, NULL, NULL, NULL, NULL); diff --git a/tests/lib/devicetree/api/testcase.yaml b/tests/lib/devicetree/api/testcase.yaml index ded886833ac87..2759a047ab7d6 100644 --- a/tests/lib/devicetree/api/testcase.yaml +++ b/tests/lib/devicetree/api/testcase.yaml @@ -5,6 +5,7 @@ tests: # will mostly likely be the fastest. platform_allow: - native_sim + - native_sim/native/64 - qemu_x86 - qemu_x86_64 - qemu_cortex_m3