Skip to content

Commit 37cd3ab

Browse files
virtualdauscompgeek
andcommitted
Parse C++20 requirement constraints for functions/classes
Co-authored-by: David Vo <[email protected]>
1 parent 2957e70 commit 37cd3ab

File tree

4 files changed

+524
-1
lines changed

4 files changed

+524
-1
lines changed

cxxheaderparser/lexer.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -188,6 +188,7 @@ class PlyLexer:
188188
"DBL_RBRACKET",
189189
"DBL_COLON",
190190
"DBL_AMP",
191+
"DBL_PIPE",
191192
"ARROW",
192193
"SHIFT_LEFT",
193194
] + list(keywords)
@@ -473,6 +474,7 @@ def t_PP_DIRECTIVE(self, t: LexToken):
473474
t_DBL_RBRACKET = r"\]\]"
474475
t_DBL_COLON = r"::"
475476
t_DBL_AMP = r"&&"
477+
t_DBL_PIPE = r"\|\|"
476478
t_ARROW = r"->"
477479
t_SHIFT_LEFT = r"<<"
478480
# SHIFT_RIGHT introduces ambiguity

cxxheaderparser/parser.py

Lines changed: 104 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -647,6 +647,9 @@ def _parse_template(self, tok: LexToken, doxygen: typing.Optional[str]) -> None:
647647
self._parse_friend_decl(tok, doxygen, template)
648648
elif tok.type == "concept":
649649
self._parse_concept(tok, doxygen, template)
650+
elif tok.type == "requires":
651+
template.raw_requires_pre = self._parse_requires(tok)
652+
self._parse_declarations(self.lex.token(), doxygen, template)
650653
else:
651654
self._parse_declarations(tok, doxygen, template)
652655

@@ -783,6 +786,91 @@ def _parse_concept(
783786
),
784787
)
785788

789+
# fmt: off
790+
_expr_operators = {
791+
"<", ">", "|", "%", "^", "!", "*", "-", "+", "&", "=",
792+
"&&", "||", "<<"
793+
}
794+
# fmt: on
795+
796+
def _parse_requires(
797+
self,
798+
tok: LexToken,
799+
) -> Value:
800+
tok = self.lex.token()
801+
802+
rawtoks: typing.List[LexToken] = []
803+
804+
# The easier case -- requires requires
805+
if tok.type == "requires":
806+
rawtoks.append(tok)
807+
for tt in ("(", "{"):
808+
tok = self._next_token_must_be(tt)
809+
rawtoks.extend(self._consume_balanced_tokens(tok))
810+
# .. and that's it?
811+
812+
# this is either a parenthesized expression or a primary clause
813+
elif tok.type == "(":
814+
rawtoks.extend(self._consume_balanced_tokens(tok))
815+
else:
816+
while True:
817+
if tok.type == "(":
818+
rawtoks.extend(self._consume_balanced_tokens(tok))
819+
else:
820+
tok = self._parse_requires_segment(tok, rawtoks)
821+
822+
# If this is not an operator of some kind, we don't know how
823+
# to proceed so let the next parser figure it out
824+
if tok.value not in self._expr_operators:
825+
break
826+
827+
rawtoks.append(tok)
828+
829+
# check once more for compound operator?
830+
tok = self.lex.token()
831+
if tok.value in self._expr_operators:
832+
rawtoks.append(tok)
833+
tok = self.lex.token()
834+
835+
self.lex.return_token(tok)
836+
837+
return self._create_value(rawtoks)
838+
839+
def _parse_requires_segment(
840+
self, tok: LexToken, rawtoks: typing.List[LexToken]
841+
) -> LexToken:
842+
# first token could be a name or ::
843+
if tok.type == "DBL_COLON":
844+
rawtoks.append(tok)
845+
tok = self.lex.token()
846+
847+
while True:
848+
# This token has to be a name or some other valid name-like thing
849+
if tok.value == "decltype":
850+
rawtoks.append(tok)
851+
tok = self._next_token_must_be("(")
852+
rawtoks.extend(self._consume_balanced_tokens(tok))
853+
elif tok.type == "NAME":
854+
rawtoks.append(tok)
855+
else:
856+
# not sure what I expected, but I didn't find it
857+
raise self._parse_error(tok)
858+
859+
tok = self.lex.token()
860+
861+
# Maybe there's a specialization
862+
if tok.value == "<":
863+
rawtoks.extend(self._consume_balanced_tokens(tok))
864+
tok = self.lex.token()
865+
866+
# Maybe we keep trying to parse this name
867+
if tok.type == "DBL_COLON":
868+
tok = self.lex.token()
869+
continue
870+
871+
# Let the caller decide
872+
return tok
873+
786874
#
787875
# Attributes
788876
#
@@ -1816,6 +1904,15 @@ def _parse_fn_end(self, fn: Function) -> None:
18161904
if otok:
18171905
toks = self._consume_balanced_tokens(otok)[1:-1]
18181906
fn.noexcept = self._create_value(toks)
1907+
else:
1908+
rtok = self.lex.token_if("requires")
1909+
if rtok:
1910+
fn_template = fn.template
1911+
if fn_template is None:
1912+
raise self._parse_error(rtok)
1913+
elif isinstance(fn_template, list):
1914+
fn_template = fn_template[0]
1915+
fn_template.raw_requires_post = self._parse_requires(rtok)
18191916

18201917
if self.lex.token_if("{"):
18211918
self._discard_contents("{", "}")
@@ -1876,6 +1973,13 @@ def _parse_method_end(self, method: Method) -> None:
18761973
if otok:
18771974
toks = self._consume_balanced_tokens(otok)[1:-1]
18781975
method.noexcept = self._create_value(toks)
1976+
elif tok_value == "requires":
1977+
method_template = method.template
1978+
if method_template is None:
1979+
raise self._parse_error(tok)
1980+
elif isinstance(method_template, list):
1981+
method_template = method_template[0]
1982+
method_template.raw_requires_post = self._parse_requires(tok)
18791983
else:
18801984
self.lex.return_token(tok)
18811985
break

cxxheaderparser/types.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -520,6 +520,15 @@ class Foo {};
520520

521521
params: typing.List[TemplateParam] = field(default_factory=list)
522522

523+
# Currently don't interpret requires, if that changes in the future
524+
# then this API will change.
525+
526+
#: template <typename T> requires ...
527+
raw_requires_pre: typing.Optional[Value] = None
528+
529+
#: template <typename T> int main() requires ...
530+
raw_requires_post: typing.Optional[Value] = None
531+
523532

524533
#: If no template, this is None. This is a TemplateDecl if this there is a single
525534
#: declaration:

0 commit comments

Comments
 (0)