22
22
AutoSpecifier ,
23
23
BaseClass ,
24
24
ClassDecl ,
25
+ Concept ,
25
26
DecltypeSpecifier ,
26
27
DecoratedType ,
27
28
EnumDecl ,
@@ -537,7 +538,7 @@ def _on_block_end(self, tok: LexToken, doxygen: typing.Optional[str]) -> None:
537
538
self ._finish_class_decl (old_state )
538
539
539
540
#
540
- # Template parsing
541
+ # Template and concept parsing
541
542
#
542
543
543
544
def _parse_template_type_parameter (
@@ -605,9 +606,13 @@ def _parse_template_decl(self) -> TemplateDecl:
605
606
lex .return_token (ptok )
606
607
param = self ._parse_template_type_parameter (tok , None )
607
608
else :
608
- param = self ._parse_parameter (ptok , TemplateNonTypeParam , ">" )
609
+ param , _ = self ._parse_parameter (
610
+ ptok , TemplateNonTypeParam , False , ">"
611
+ )
609
612
else :
610
- param = self ._parse_parameter (tok , TemplateNonTypeParam , ">" )
613
+ param , _ = self ._parse_parameter (
614
+ tok , TemplateNonTypeParam , concept_ok = False , end = ">"
615
+ )
611
616
612
617
params .append (param )
613
618
@@ -640,6 +645,11 @@ def _parse_template(self, tok: LexToken, doxygen: typing.Optional[str]) -> None:
640
645
self ._parse_using (tok , doxygen , template )
641
646
elif tok .type == "friend" :
642
647
self ._parse_friend_decl (tok , doxygen , template )
648
+ elif tok .type == "concept" :
649
+ self ._parse_concept (tok , doxygen , template )
650
+ elif tok .type == "requires" :
651
+ template .raw_requires_pre = self ._parse_requires (tok )
652
+ self ._parse_declarations (self .lex .token (), doxygen , template )
643
653
else :
644
654
self ._parse_declarations (tok , doxygen , template )
645
655
@@ -750,6 +760,117 @@ def _parse_template_instantiation(
750
760
self .state , TemplateInst (typename , extern , doxygen )
751
761
)
752
762
763
+ def _parse_concept (
764
+ self ,
765
+ tok : LexToken ,
766
+ doxygen : typing .Optional [str ],
767
+ template : TemplateDecl ,
768
+ ) -> None :
769
+ name = self ._next_token_must_be ("NAME" )
770
+ self ._next_token_must_be ("=" )
771
+
772
+ # not trying to understand this for now
773
+ raw_constraint = self ._create_value (self ._consume_value_until ([], "," , ";" ))
774
+
775
+ state = self .state
776
+ if isinstance (state , ClassBlockState ):
777
+ raise CxxParseError ("concept cannot be defined in a class" )
778
+
779
+ self .visitor .on_concept (
780
+ state ,
781
+ Concept (
782
+ template = template ,
783
+ name = name .value ,
784
+ raw_constraint = raw_constraint ,
785
+ doxygen = doxygen ,
786
+ ),
787
+ )
788
+
789
+ # fmt: off
790
+ _expr_operators = {
791
+ "<" , ">" , "|" , "%" , "^" , "!" , "*" , "-" , "+" , "&" , "=" ,
792
+ "&&" , "||" , "<<"
793
+ }
794
+ # fmt: on
795
+
796
+ def _parse_requires (
797
+ self ,
798
+ tok : LexToken ,
799
+ ) -> Value :
800
+ tok = self .lex .token ()
801
+
802
+ rawtoks : typing .List [LexToken ] = []
803
+
804
+ # The easier case -- requires requires
805
+ if tok .type == "requires" :
806
+ rawtoks .append (tok )
807
+ for tt in ("(" , "{" ):
808
+ tok = self ._next_token_must_be (tt )
809
+ rawtoks .extend (self ._consume_balanced_tokens (tok ))
810
+ # .. and that's it?
811
+
812
+ # this is either a parenthesized expression or a primary clause
813
+ elif tok .type == "(" :
814
+ rawtoks .extend (self ._consume_balanced_tokens (tok ))
815
+ else :
816
+ while True :
817
+ if tok .type == "(" :
818
+ rawtoks .extend (self ._consume_balanced_tokens (tok ))
819
+ else :
820
+ tok = self ._parse_requires_segment (tok , rawtoks )
821
+
822
+ # If this is not an operator of some kind, we don't know how
823
+ # to proceed so let the next parser figure it out
824
+ if tok .value not in self ._expr_operators :
825
+ break
826
+
827
+ rawtoks .append (tok )
828
+
829
+ # check once more for compound operator?
830
+ tok = self .lex .token ()
831
+ if tok .value in self ._expr_operators :
832
+ rawtoks .append (tok )
833
+ tok = self .lex .token ()
834
+
835
+ self .lex .return_token (tok )
836
+
837
+ return self ._create_value (rawtoks )
838
+
839
+ def _parse_requires_segment (
840
+ self , tok : LexToken , rawtoks : typing .List [LexToken ]
841
+ ) -> LexToken :
842
+ # first token could be a name or ::
843
+ if tok .type == "DBL_COLON" :
844
+ rawtoks .append (tok )
845
+ tok = self .lex .token ()
846
+
847
+ while True :
848
+ # This token has to be a name or some other valid name-like thing
849
+ if tok .value == "decltype" :
850
+ rawtoks .append (tok )
851
+ tok = self ._next_token_must_be ("(" )
852
+ rawtoks .extend (self ._consume_balanced_tokens (tok ))
853
+ elif tok .type == "NAME" :
854
+ rawtoks .append (tok )
855
+ else :
856
+ # not sure what I expected, but I didn't find it
857
+ raise self ._parse_error (tok )
858
+
859
+ tok = self .lex .token ()
860
+
861
+ # Maybe there's a specialization
862
+ if tok .value == "<" :
863
+ rawtoks .extend (self ._consume_balanced_tokens (tok ))
864
+ tok = self .lex .token ()
865
+
866
+ # Maybe we keep trying to parse this name
867
+ if tok .type == "DBL_COLON" :
868
+ tok = self .lex .token ()
869
+ continue
870
+
871
+ # Let the caller decide
872
+ return tok
873
+
753
874
#
754
875
# Attributes
755
876
#
@@ -1615,23 +1736,43 @@ def _parse_pqname(
1615
1736
#
1616
1737
1617
1738
def _parse_parameter (
1618
- self , tok : typing .Optional [LexToken ], cls : typing .Type [PT ], end : str = ")"
1619
- ) -> PT :
1739
+ self ,
1740
+ tok : typing .Optional [LexToken ],
1741
+ cls : typing .Type [PT ],
1742
+ concept_ok : bool ,
1743
+ end : str = ")" ,
1744
+ ) -> typing .Tuple [PT , typing .Optional [Type ]]:
1620
1745
"""
1621
1746
Parses a single parameter (excluding vararg parameters). Also used
1622
1747
to parse template non-type parameters
1748
+
1749
+ Returns parameter type, abbreviated template type
1623
1750
"""
1624
1751
1625
1752
param_name = None
1626
1753
default = None
1627
1754
param_pack = False
1755
+ parsed_type : typing .Optional [Type ]
1756
+ at_type : typing .Optional [Type ] = None
1628
1757
1629
- # required typename + decorators
1630
- parsed_type , mods = self ._parse_type (tok )
1631
- if parsed_type is None :
1632
- raise self ._parse_error (None )
1758
+ if not tok :
1759
+ tok = self .lex .token ()
1760
+
1761
+ # placeholder type, skip typename
1762
+ if tok .type == "auto" :
1763
+ at_type = parsed_type = Type (PQName ([AutoSpecifier ()]))
1764
+ else :
1765
+ # required typename + decorators
1766
+ parsed_type , mods = self ._parse_type (tok )
1767
+ if parsed_type is None :
1768
+ raise self ._parse_error (None )
1769
+
1770
+ mods .validate (var_ok = False , meth_ok = False , msg = "parsing parameter" )
1633
1771
1634
- mods .validate (var_ok = False , meth_ok = False , msg = "parsing parameter" )
1772
+ # Could be a concept
1773
+ if concept_ok and self .lex .token_if ("auto" ):
1774
+ at_type = Type (parsed_type .typename )
1775
+ parsed_type .typename = PQName ([AutoSpecifier ()])
1635
1776
1636
1777
dtype = self ._parse_cv_ptr (parsed_type )
1637
1778
@@ -1659,32 +1800,50 @@ def _parse_parameter(
1659
1800
if self .lex .token_if ("=" ):
1660
1801
default = self ._create_value (self ._consume_value_until ([], "," , end ))
1661
1802
1803
+ # abbreviated template pack
1804
+ if at_type and self .lex .token_if ("ELLIPSIS" ):
1805
+ param_pack = True
1806
+
1662
1807
param = cls (type = dtype , name = param_name , default = default , param_pack = param_pack )
1663
1808
self .debug_print ("parameter: %s" , param )
1664
- return param
1809
+ return param , at_type
1665
1810
1666
- def _parse_parameters (self ) -> typing .Tuple [typing .List [Parameter ], bool ]:
1811
+ def _parse_parameters (
1812
+ self , concept_ok : bool
1813
+ ) -> typing .Tuple [typing .List [Parameter ], bool , typing .List [TemplateParam ]]:
1667
1814
"""
1668
- Consumes function parameters and returns them, and vararg if found
1815
+ Consumes function parameters and returns them, and vararg if found, and
1816
+ promotes abbreviated template parameters to actual template parameters
1817
+ if concept_ok is True
1669
1818
"""
1670
1819
1671
1820
# starting at a (
1672
1821
1673
1822
# special case: zero parameters
1674
1823
if self .lex .token_if (")" ):
1675
- return [], False
1824
+ return [], False , []
1676
1825
1677
1826
params : typing .List [Parameter ] = []
1678
1827
vararg = False
1828
+ at_params : typing .List [TemplateParam ] = []
1679
1829
1680
1830
while True :
1681
1831
if self .lex .token_if ("ELLIPSIS" ):
1682
1832
vararg = True
1683
1833
self ._next_token_must_be (")" )
1684
1834
break
1685
1835
1686
- param = self ._parse_parameter (None , Parameter )
1836
+ param , at_type = self ._parse_parameter (None , Parameter , concept_ok )
1687
1837
params .append (param )
1838
+ if at_type :
1839
+ at_params .append (
1840
+ TemplateNonTypeParam (
1841
+ type = at_type ,
1842
+ param_idx = len (params ) - 1 ,
1843
+ param_pack = param .param_pack ,
1844
+ )
1845
+ )
1846
+
1688
1847
tok = self ._next_token_must_be ("," , ")" )
1689
1848
if tok .value == ")" :
1690
1849
break
@@ -1699,7 +1858,7 @@ def _parse_parameters(self) -> typing.Tuple[typing.List[Parameter], bool]:
1699
1858
):
1700
1859
params = []
1701
1860
1702
- return params , vararg
1861
+ return params , vararg , at_params
1703
1862
1704
1863
_auto_return_typename = PQName ([AutoSpecifier ()])
1705
1864
@@ -1745,6 +1904,15 @@ def _parse_fn_end(self, fn: Function) -> None:
1745
1904
if otok :
1746
1905
toks = self ._consume_balanced_tokens (otok )[1 :- 1 ]
1747
1906
fn .noexcept = self ._create_value (toks )
1907
+ else :
1908
+ rtok = self .lex .token_if ("requires" )
1909
+ if rtok :
1910
+ fn_template = fn .template
1911
+ if fn_template is None :
1912
+ raise self ._parse_error (rtok )
1913
+ elif isinstance (fn_template , list ):
1914
+ fn_template = fn_template [0 ]
1915
+ fn_template .raw_requires_post = self ._parse_requires (rtok )
1748
1916
1749
1917
if self .lex .token_if ("{" ):
1750
1918
self ._discard_contents ("{" , "}" )
@@ -1805,6 +1973,13 @@ def _parse_method_end(self, method: Method) -> None:
1805
1973
if otok :
1806
1974
toks = self ._consume_balanced_tokens (otok )[1 :- 1 ]
1807
1975
method .noexcept = self ._create_value (toks )
1976
+ elif tok_value == "requires" :
1977
+ method_template = method .template
1978
+ if method_template is None :
1979
+ raise self ._parse_error (tok )
1980
+ elif isinstance (method_template , list ):
1981
+ method_template = method_template [0 ]
1982
+ method_template .raw_requires_post = self ._parse_requires (tok )
1808
1983
else :
1809
1984
self .lex .return_token (tok )
1810
1985
break
@@ -1846,7 +2021,16 @@ def _parse_function(
1846
2021
state .location = location
1847
2022
is_class_block = isinstance (state , ClassBlockState )
1848
2023
1849
- params , vararg = self ._parse_parameters ()
2024
+ params , vararg , at_params = self ._parse_parameters (True )
2025
+
2026
+ # Promote abbreviated template parameters
2027
+ if at_params :
2028
+ if template is None :
2029
+ template = TemplateDecl (at_params )
2030
+ elif isinstance (template , TemplateDecl ):
2031
+ template .params .extend (at_params )
2032
+ else :
2033
+ template [- 1 ].params .extend (at_params )
1850
2034
1851
2035
# A method outside of a class has multiple name segments
1852
2036
multiple_name_segments = len (pqname .segments ) > 1
@@ -2019,7 +2203,7 @@ def _parse_cv_ptr_or_fn(
2019
2203
toks = self ._consume_balanced_tokens (gtok )
2020
2204
self .lex .return_tokens (toks [1 :- 1 ])
2021
2205
2022
- fn_params , vararg = self ._parse_parameters ()
2206
+ fn_params , vararg , _ = self ._parse_parameters (False )
2023
2207
2024
2208
assert not isinstance (dtype , FunctionType )
2025
2209
dtype = dtype_fn = FunctionType (dtype , fn_params , vararg )
@@ -2047,7 +2231,7 @@ def _parse_cv_ptr_or_fn(
2047
2231
assert not isinstance (dtype , FunctionType )
2048
2232
dtype = self ._parse_array_type (aptok , dtype )
2049
2233
elif aptok .type == "(" :
2050
- fn_params , vararg = self ._parse_parameters ()
2234
+ fn_params , vararg , _ = self ._parse_parameters (False )
2051
2235
# the type we already have is the return type of the function pointer
2052
2236
2053
2237
assert not isinstance (dtype , FunctionType )
0 commit comments