@@ -647,6 +647,9 @@ def _parse_template(self, tok: LexToken, doxygen: typing.Optional[str]) -> None:
647
647
self ._parse_friend_decl (tok , doxygen , template )
648
648
elif tok .type == "concept" :
649
649
self ._parse_concept (tok , doxygen , template )
650
+ elif tok .type == "requires" :
651
+ template .raw_requires_pre = self ._parse_requires (tok )
652
+ self ._parse_declarations (self .lex .token (), doxygen , template )
650
653
else :
651
654
self ._parse_declarations (tok , doxygen , template )
652
655
@@ -783,6 +786,91 @@ def _parse_concept(
783
786
),
784
787
)
785
788
789
+ # fmt: off
790
+ _expr_operators = {
791
+ "<" , ">" , "|" , "%" , "^" , "!" , "*" , "-" , "+" , "&" , "=" ,
792
+ "&&" , "||" , "<<"
793
+ }
794
+ # fmt: on
795
+
796
+ def _parse_requires (
797
+ self ,
798
+ tok : LexToken ,
799
+ ) -> Value :
800
+ tok = self .lex .token ()
801
+
802
+ rawtoks : typing .List [LexToken ] = []
803
+
804
+ # The easier case -- requires requires
805
+ if tok .type == "requires" :
806
+ rawtoks .append (tok )
807
+ for tt in ("(" , "{" ):
808
+ tok = self ._next_token_must_be (tt )
809
+ rawtoks .extend (self ._consume_balanced_tokens (tok ))
810
+ # .. and that's it?
811
+
812
+ # this is either a parenthesized expression or a primary clause
813
+ elif tok .type == "(" :
814
+ rawtoks .extend (self ._consume_balanced_tokens (tok ))
815
+ else :
816
+ while True :
817
+ if tok .type == "(" :
818
+ rawtoks .extend (self ._consume_balanced_tokens (tok ))
819
+ else :
820
+ tok = self ._parse_requires_segment (tok , rawtoks )
821
+
822
+ # If this is not an operator of some kind, we don't know how
823
+ # to proceed so let the next parser figure it out
824
+ if tok .value not in self ._expr_operators :
825
+ break
826
+
827
+ rawtoks .append (tok )
828
+
829
+ # check once more for compound operator?
830
+ tok = self .lex .token ()
831
+ if tok .value in self ._expr_operators :
832
+ rawtoks .append (tok )
833
+ tok = self .lex .token ()
834
+
835
+ self .lex .return_token (tok )
836
+
837
+ return self ._create_value (rawtoks )
838
+
839
+ def _parse_requires_segment (
840
+ self , tok : LexToken , rawtoks : typing .List [LexToken ]
841
+ ) -> LexToken :
842
+ # first token could be a name or ::
843
+ if tok .type == "DBL_COLON" :
844
+ rawtoks .append (tok )
845
+ tok = self .lex .token ()
846
+
847
+ while True :
848
+ # This token has to be a name or some other valid name-like thing
849
+ if tok .value == "decltype" :
850
+ rawtoks .append (tok )
851
+ tok = self ._next_token_must_be ("(" )
852
+ rawtoks .extend (self ._consume_balanced_tokens (tok ))
853
+ elif tok .type == "NAME" :
854
+ rawtoks .append (tok )
855
+ else :
856
+ # not sure what I expected, but I didn't find it
857
+ raise self ._parse_error (tok )
858
+
859
+ tok = self .lex .token ()
860
+
861
+ # Maybe there's a specialization
862
+ if tok .value == "<" :
863
+ rawtoks .extend (self ._consume_balanced_tokens (tok ))
864
+ tok = self .lex .token ()
865
+
866
+ # Maybe we keep trying to parse this name
867
+ if tok .type == "DBL_COLON" :
868
+ tok = self .lex .token ()
869
+ continue
870
+
871
+ # Let the caller decide
872
+ return tok
873
+
786
874
#
787
875
# Attributes
788
876
#
@@ -1816,6 +1904,15 @@ def _parse_fn_end(self, fn: Function) -> None:
1816
1904
if otok :
1817
1905
toks = self ._consume_balanced_tokens (otok )[1 :- 1 ]
1818
1906
fn .noexcept = self ._create_value (toks )
1907
+ else :
1908
+ rtok = self .lex .token_if ("requires" )
1909
+ if rtok :
1910
+ fn_template = fn .template
1911
+ if fn_template is None :
1912
+ raise self ._parse_error (rtok )
1913
+ elif isinstance (fn_template , list ):
1914
+ fn_template = fn_template [0 ]
1915
+ fn_template .raw_requires_post = self ._parse_requires (rtok )
1819
1916
1820
1917
if self .lex .token_if ("{" ):
1821
1918
self ._discard_contents ("{" , "}" )
@@ -1876,6 +1973,13 @@ def _parse_method_end(self, method: Method) -> None:
1876
1973
if otok :
1877
1974
toks = self ._consume_balanced_tokens (otok )[1 :- 1 ]
1878
1975
method .noexcept = self ._create_value (toks )
1976
+ elif tok_value == "requires" :
1977
+ method_template = method .template
1978
+ if method_template is None :
1979
+ raise self ._parse_error (tok )
1980
+ elif isinstance (method_template , list ):
1981
+ method_template = method_template [0 ]
1982
+ method_template .raw_requires_post = self ._parse_requires (tok )
1879
1983
else :
1880
1984
self .lex .return_token (tok )
1881
1985
break
0 commit comments