Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 38 additions & 0 deletions doc/build/dts/macros.bnf
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,44 @@ node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_FOREACH_PROP_ELEM"
node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_FOREACH_PROP_ELEM_SEP"
node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_FOREACH_PROP_ELEM_VARGS"
node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_FOREACH_PROP_ELEM_SEP_VARGS"
; Map properties generate additional macros consumed by DT_MAP_* APIs.
; The following examples assume something like this mapping nexus:
;
; connector {
; gpio-map = <1 2 &{/gpio-map-test/parent} 3
; 4 5 &{/gpio-map-test/parent} 6>;
; };
;
; Total number of entries in the mapping array.
;
; #define DT_N_<connector path>_P_gpio_map_MAP_LEN 2
node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_LEN"
; Each mapping entry expands to the child specifier cells, the parent node,
; and the parent specifier cells. DT_MAP_BY_IDX() retrieves this list.
;
; #define DT_N_<connector path>_P_gpio_map_MAP_IDX_0 1, 2, DT_N_<parent path>, 3
node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_IDX_" DIGIT
; Offsets for the child specifier cells within an entry. These support
; DT_MAP_CHILD_SPECIFIER_ARGS_BY_IDX(), which slices out just those cells.
;
; #define DT_N_<connector path>_P_gpio_map_MAP_IDX_0_CHILD_SPECIFIER_POS 0
; #define DT_N_<connector path>_P_gpio_map_MAP_IDX_0_CHILD_SPECIFIER_LEN 2
node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_IDX_" DIGIT %s"_CHILD_SPECIFIER_POS"
node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_IDX_" DIGIT %s"_CHILD_SPECIFIER_LEN"
; Offsets for the parent node argument. DT_MAP_PARENT_ARG_BY_IDX() uses
; these to extract the parent node identifier.
;
; #define DT_N_<connector path>_P_gpio_map_MAP_IDX_0_PARENT_POS 2
; #define DT_N_<connector path>_P_gpio_map_MAP_IDX_0_PARENT_LEN 1
node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_IDX_" DIGIT %s"_PARENT_POS"
node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_IDX_" DIGIT %s"_PARENT_LEN"
; Offsets for the parent specifier cells used by
; DT_MAP_PARENT_SPECIFIER_ARGS_BY_IDX().
;
; #define DT_N_<connector path>_P_gpio_map_MAP_IDX_0_PARENT_SPECIFIER_POS 3
; #define DT_N_<connector path>_P_gpio_map_MAP_IDX_0_PARENT_SPECIFIER_LEN 1
node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_IDX_" DIGIT %s"_PARENT_SPECIFIER_POS"
node-macro =/ %s"DT_N" path-id %s"_P_" prop-id %s"_MAP_IDX_" DIGIT %s"_PARENT_SPECIFIER_LEN"
; These are used by DT_CHILD_NUM and DT_CHILD_NUM_STATUS_OKAY macros
node-macro =/ %s"DT_N" path-id %s"_CHILD_NUM"
node-macro =/ %s"DT_N" path-id %s"_CHILD_NUM_STATUS_OKAY"
Expand Down
307 changes: 307 additions & 0 deletions doc/tools/validate_abnf.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,307 @@
#!/usr/bin/env python3
"""Validate RFC 7405 ABNF grammars used in Zephyr documentation.

This script implements a small ABNF parser that can be used to sanity check
grammar fragments such as ``doc/build/dts/macros.bnf``. It focuses on the
constructs that appear in the Zephyr documentation today (rule concatenation,
repetition, groups, options, literals and numeric values) and reports the first
syntax error it encounters with a helpful line/column location.
"""

from __future__ import annotations

import argparse
from dataclasses import dataclass
import pathlib
import re
import sys
from typing import Iterable, List, Optional, Sequence, Tuple

Check failure on line 18 in doc/tools/validate_abnf.py

View workflow job for this annotation

GitHub Actions / Run compliance checks on patch series (PR)

Python lint error (UP035) see https://docs.astral.sh/ruff/rules/deprecated-import

doc/tools/validate_abnf.py:18 `typing.Tuple` is deprecated, use `tuple` instead

Check failure on line 18 in doc/tools/validate_abnf.py

View workflow job for this annotation

GitHub Actions / Run compliance checks on patch series (PR)

Python lint error (UP035) see https://docs.astral.sh/ruff/rules/deprecated-import

doc/tools/validate_abnf.py:18 `typing.List` is deprecated, use `list` instead

Check failure on line 18 in doc/tools/validate_abnf.py

View workflow job for this annotation

GitHub Actions / Run compliance checks on patch series (PR)

Python lint error (UP035) see https://docs.astral.sh/ruff/rules/deprecated-import

doc/tools/validate_abnf.py:18 Import from `collections.abc` instead: `Iterable`, `Sequence`

Check failure on line 18 in doc/tools/validate_abnf.py

View workflow job for this annotation

GitHub Actions / Run compliance checks on patch series (PR)

Python lint error (I001) see https://docs.astral.sh/ruff/rules/unsorted-imports

doc/tools/validate_abnf.py:11 Import block is un-sorted or un-formatted


class ABNFError(RuntimeError):
"""Raised when a syntax error is encountered in the grammar."""

def __init__(self, message: str, *, line: int, column: int, rule: Optional[str]):

Check failure on line 24 in doc/tools/validate_abnf.py

View workflow job for this annotation

GitHub Actions / Run compliance checks on patch series (PR)

Python lint error (UP007) see https://docs.astral.sh/ruff/rules/non-pep604-annotation-union

doc/tools/validate_abnf.py:24 Use `X | Y` for type annotations
super().__init__(message)
self.line = line
self.column = column
self.rule = rule

def __str__(self) -> str: # pragma: no cover - human readable error
location = f"line {self.line}, column {self.column}"
if self.rule:
location += f" (in rule '{self.rule}')"
return f"{location}: {super().__str__()}"


@dataclass
class Token:
kind: str
value: str
column: int


_TOKEN_REGEX = re.compile(
r"(?P<WS>\s+)" # whitespace
r"|(?P<DEFINED_AS>=/)"
r"|(?P<EQUAL>=)"
r"|(?P<LPAREN>\()"
r"|(?P<RPAREN>\))"
r"|(?P<LBRK>\[)"
r"|(?P<RBRK>\])"
r"|(?P<SLASH>/)"
r"|(?P<STAR>\*)"
r'|(?P<CASE_STRING>%[si]"(?:\\.|[^"\\])*")'
r"|(?P<NUM_VAL>%[bdx](?:[0-9A-F]+(?:-[0-9A-F]+)?)(?:\.[0-9A-F]+(?:-[0-9A-F]+)?)*)"
r'|(?P<CHAR_VAL>"(?:\\.|[^"\\])*")'
r"|(?P<PROSE_VAL><(?:\\.|[^>\\])*>)"
r"|(?P<RULENAME>[A-Za-z][A-Za-z0-9-]*)"
r"|(?P<NUMBER>\d+)"
,
)


class Lexer:
def __init__(self, text: str, *, line_offset: int) -> None:
self.text = text
self.position = 0
self.line_offset = line_offset

def __iter__(self) -> Iterable[Token]:
while self.position < len(self.text):
match = _TOKEN_REGEX.match(self.text, self.position)
if not match:
column = self.position + 1
raise ABNFError(
f"unexpected character {self.text[self.position]!r}",
line=self.line_offset,
column=column,
rule=None,
)
if match.lastgroup is None: # whitespace
self.position = match.end()
continue
token = Token(match.lastgroup, match.group(match.lastgroup), match.start() + 1)
self.position = match.end()
yield token
yield Token("EOF", "", len(self.text) + 1)


class Parser:
def __init__(self, tokens: Sequence[Token], *, rule_name: str, line: int) -> None:
self.tokens = list(tokens)
self.index = 0
self.rule_name = rule_name
self.line = line

def current(self) -> Token:
return self.tokens[self.index]

def consume(self, kind: str) -> Token:
token = self.current()
if token.kind != kind:
raise ABNFError(
f"expected {kind} but found {token.kind}",
line=self.line,
column=token.column,
rule=self.rule_name,
)
self.index += 1
return token

def match(self, kind: str) -> Optional[Token]:

Check failure on line 112 in doc/tools/validate_abnf.py

View workflow job for this annotation

GitHub Actions / Run compliance checks on patch series (PR)

Python lint error (UP007) see https://docs.astral.sh/ruff/rules/non-pep604-annotation-union

doc/tools/validate_abnf.py:112 Use `X | Y` for type annotations
token = self.current()
if token.kind == kind:
self.index += 1
return token
return None

def parse(self) -> None:
self._parse_rule()
if self.current().kind != "EOF":
token = self.current()
raise ABNFError(
"unexpected trailing input",
line=self.line,
column=token.column,
rule=self.rule_name,
)

def _parse_rule(self) -> None:
self.consume("RULENAME")
self._consume_whitespace()
if self.match("DEFINED_AS") is None:
self.consume("EQUAL")
self._consume_whitespace()
self._parse_elements()

def _parse_elements(self) -> None:
self._parse_alternation()

def _parse_alternation(self) -> None:
self._parse_concatenation()
while True:
self._consume_whitespace()
checkpoint = self.index
if self.match("SLASH") is None:
self.index = checkpoint
break
self._consume_whitespace()
self._parse_concatenation()

def _parse_concatenation(self) -> None:
self._parse_repetition()
while True:
checkpoint = self.index
if not self._consume_whitespace():
self.index = checkpoint
break
token = self.current()
if token.kind in {"SLASH", "RBRK", "RPAREN", "EOF"}:
self.index = checkpoint
break
self._parse_repetition()

def _consume_whitespace(self) -> bool:
consumed = False
while self.current().kind == "WS":
consumed = True
self.index += 1
return consumed

def _parse_repetition(self) -> None:
self._maybe_parse_repeat()
self._consume_whitespace()
self._parse_element()

def _maybe_parse_repeat(self) -> None:
token = self.current()
if token.kind == "NUMBER":
self.index += 1
if self.match("STAR") is not None:
if self.current().kind == "NUMBER":
self.index += 1
else:
return
elif token.kind == "STAR":
self.index += 1
if self.current().kind == "NUMBER":
self.index += 1

def _parse_element(self) -> None:
token = self.current()
if token.kind in {"RULENAME", "CHAR_VAL", "CASE_STRING", "NUM_VAL", "PROSE_VAL"}:
self.index += 1
return
if token.kind == "LBRK":
self.index += 1
self._consume_whitespace()
self._parse_alternation()
self._consume_whitespace()
self.consume("RBRK")
return
if token.kind == "LPAREN":
self.index += 1
self._consume_whitespace()
self._parse_alternation()
self._consume_whitespace()
self.consume("RPAREN")
return
raise ABNFError(
f"unexpected token {token.kind}",
line=self.line,
column=token.column,
rule=self.rule_name,
)


def strip_comments(lines: Iterable[str]) -> List[Tuple[str, int]]:

Check failure on line 218 in doc/tools/validate_abnf.py

View workflow job for this annotation

GitHub Actions / Run compliance checks on patch series (PR)

Python lint error (UP006) see https://docs.astral.sh/ruff/rules/non-pep585-annotation

doc/tools/validate_abnf.py:218 Use `tuple` instead of `Tuple` for type annotation

Check failure on line 218 in doc/tools/validate_abnf.py

View workflow job for this annotation

GitHub Actions / Run compliance checks on patch series (PR)

Python lint error (UP006) see https://docs.astral.sh/ruff/rules/non-pep585-annotation

doc/tools/validate_abnf.py:218 Use `list` instead of `List` for type annotation
cleaned: List[Tuple[str, int]] = []

Check failure on line 219 in doc/tools/validate_abnf.py

View workflow job for this annotation

GitHub Actions / Run compliance checks on patch series (PR)

Python lint error (UP006) see https://docs.astral.sh/ruff/rules/non-pep585-annotation

doc/tools/validate_abnf.py:219 Use `tuple` instead of `Tuple` for type annotation

Check failure on line 219 in doc/tools/validate_abnf.py

View workflow job for this annotation

GitHub Actions / Run compliance checks on patch series (PR)

Python lint error (UP006) see https://docs.astral.sh/ruff/rules/non-pep585-annotation

doc/tools/validate_abnf.py:219 Use `list` instead of `List` for type annotation
for line_number, raw_line in enumerate(lines, start=1):
in_string = False
escape = False
result_chars: List[str] = []
for ch in raw_line.rstrip("\n"):
if ch == "\\" and not escape:
escape = True
result_chars.append(ch)
continue
if ch == '"' and not escape:
in_string = not in_string
if ch == ';' and not in_string:
break
result_chars.append(ch)
escape = False
cleaned_line = "".join(result_chars).rstrip()
if cleaned_line:
cleaned.append((cleaned_line, line_number))
return cleaned


def join_rule_lines(cleaned_lines: Sequence[Tuple[str, int]]) -> List[Tuple[str, int]]:
rules: List[Tuple[str, int]] = []
current_line = ""
start_number = 0
for text, line_number in cleaned_lines:
if text and not text[0].isspace():
if current_line:
rules.append((current_line, start_number))
current_line = text.strip()
start_number = line_number
else:
if not current_line:
raise ABNFError(
"continuation line encountered without preceding rule",
line=line_number,
column=1,
rule=None,
)
current_line += " " + text.strip()
if current_line:
rules.append((current_line, start_number))
return rules


def validate_rule(rule_text: str, line_number: int) -> None:
# Tokenise the rule.
tokens: List[Token] = []
lexer = Lexer(rule_text, line_offset=line_number)
for token in lexer:
if token.kind == "EOF":
tokens.append(Token("EOF", "", token.column))
elif token.kind == "WS":
tokens.append(Token("WS", token.value, token.column))
else:
tokens.append(token)
if not tokens:
return
rule_name = tokens[0].value if tokens and tokens[0].kind == "RULENAME" else None
parser = Parser(tokens, rule_name=rule_name or "<unknown>", line=line_number)
parser.parse()


def validate_file(path: pathlib.Path) -> None:
with path.open("r", encoding="utf-8") as fp:
cleaned = strip_comments(fp)
rules = join_rule_lines(cleaned)
for rule_text, line_number in rules:
validate_rule(rule_text, line_number)


def main(argv: Optional[Sequence[str]] = None) -> int:
parser = argparse.ArgumentParser(description="Validate RFC 7405 ABNF files.")
parser.add_argument("paths", metavar="FILE", nargs="+", type=pathlib.Path)
args = parser.parse_args(argv)

had_error = False
for path in args.paths:
try:
validate_file(path)
except ABNFError as exc: # pragma: no cover - CLI entry point
had_error = True
print(f"{path}: {exc}", file=sys.stderr)
return 1 if had_error else 0


if __name__ == "__main__": # pragma: no cover - CLI entry point
sys.exit(main())
18 changes: 1 addition & 17 deletions dts/bindings/adc/arduino,uno-adc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,20 +13,4 @@ description: |

compatible: "arduino,uno-adc"

include: base.yaml

properties:
io-channel-map:
type: compound
required: true

io-channel-map-mask:
type: compound

io-channel-map-pass-thru:
type: compound

"#io-channel-cells":
type: int
required: true
description: Number of items to expect in an ADC specifier
include: [base.yaml, io-channel-nexus.yaml]
4 changes: 2 additions & 2 deletions dts/bindings/gpio/gpio-nexus.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@ properties:
required: true

gpio-map-mask:
type: compound
type: array

gpio-map-pass-thru:
type: compound
type: array

"#gpio-cells":
type: int
Expand Down
Loading
Loading