@@ -50,27 +50,32 @@ pub const DEFAULT_REGEX_OPTIONS: RegexOptions = RegexOptions {
5050#[ derive( Debug ) ]
5151#[ doc( hidden) ]
5252pub struct Rule < StorageT > {
53- /// If `Some`, the ID that lexemes created against this rule will be given (lrlex gives such
54- /// rules a guaranteed unique value, though that value can be overridden by clients who need to
55- /// control the ID). If `None`, then this rule specifies lexemes which should not appear in the
56- /// user's input.
53+ /// If `Some`, this specifies the ID that lexemes resulting from this rule will have. Note that
54+ /// lrlex gives rules a guaranteed unique value by default, though users can later override
55+ /// that, potentially undermining uniqueness if they're not careful.
56+ ///
57+ /// If `None`, then this rule specifies lexemes which should not appear in the user's input.
5758 pub ( super ) tok_id : Option < StorageT > ,
5859 /// This rule's name. If None, then text which matches this rule will be skipped (i.e. will not
5960 /// create a lexeme).
61+ #[ deprecated( note = "Use the name() function" ) ]
6062 pub name : Option < String > ,
63+ #[ deprecated( note = "Use the name_span() function" ) ]
6164 pub name_span : Span ,
6265 pub ( super ) re_str : String ,
6366 re : Regex ,
6467 /// Id(s) of permitted start conditions for the lexer to match this rule.
68+ #[ deprecated( note = "Use the start_states() function" ) ]
6569 pub start_states : Vec < usize > ,
6670 /// If Some(_), successful matching of this rule will cause the current stack of start
6771 /// conditions in the lexer to be updated with the enclosed value, using the designated
6872 /// operation.
6973 /// If None, successful matching causes no change to the current start condition.
74+ #[ deprecated( note = "Use the target_state() function" ) ]
7075 pub target_state : Option < ( usize , StartStateOperation ) > ,
7176}
7277
73- impl < StorageT > Rule < StorageT > {
78+ impl < StorageT : PrimInt > Rule < StorageT > {
7479 /// Create a new `Rule`. This interface is unstable and should only be used by code generated
7580 /// by lrlex itself.
7681 #[ doc( hidden) ]
@@ -115,6 +120,7 @@ impl<StorageT> Rule<StorageT> {
115120 }
116121
117122 let re = re. build ( ) ?;
123+ #[ allow( deprecated) ]
118124 Ok ( Rule {
119125 tok_id,
120126 name,
@@ -125,6 +131,44 @@ impl<StorageT> Rule<StorageT> {
125131 target_state,
126132 } )
127133 }
134+
135+ /// Return this rule's token ID, if any.
136+ ///
137+ /// If `Some`, this specifies the ID that lexemes resulting from this rule will have. If
138+ /// `None`, then this rule specifies lexemes which should not appear in the user's input.
139+ pub fn tok_id ( & self ) -> Option < StorageT > {
140+ self . tok_id
141+ }
142+
143+ /// Return this rule's name. If `None`, then text which matches this rule will be skipped (i.e.
144+ /// it will not result in the creation of a [Lexeme]).
145+ pub fn name ( & self ) -> Option < & str > {
146+ #[ allow( deprecated) ]
147+ self . name . as_deref ( )
148+ }
149+
150+ /// Return the [Span] of this rule's name.
151+ pub fn name_span ( & self ) -> Span {
152+ #[ allow( deprecated) ]
153+ self . name_span
154+ }
155+
156+ /// Return the original regular expression specified by the user for this [Rule].
157+ pub fn re_str ( & self ) -> & str {
158+ & self . re_str
159+ }
160+
161+ /// Return the IDs of the permitted start conditions for the lexer to match this rule.
162+ pub fn start_states ( & self ) -> & [ usize ] {
163+ #[ allow( deprecated) ]
164+ self . start_states . as_slice ( )
165+ }
166+
167+ /// Return the IDs of the permitted start conditions for the lexer to match this rule.
168+ pub fn target_state ( & self ) -> Option < ( usize , StartStateOperation ) > {
169+ #[ allow( deprecated) ]
170+ self . target_state . clone ( )
171+ }
128172}
129173
130174/// Methods which all lexer definitions must implement.
@@ -235,7 +279,7 @@ where
235279 }
236280
237281 fn get_rule_by_name ( & self , n : & str ) -> Option < & Rule < LexerTypesT :: StorageT > > {
238- self . rules . iter ( ) . find ( |r| r. name . as_deref ( ) == Some ( n) )
282+ self . rules . iter ( ) . find ( |r| r. name ( ) == Some ( n) )
239283 }
240284
241285 fn set_rule_ids < ' a > (
@@ -261,8 +305,8 @@ where
261305 let mut missing_from_parser_idxs = Vec :: new ( ) ;
262306 let mut rules_with_names = 0 ;
263307 for ( i, r) in self . rules . iter_mut ( ) . enumerate ( ) {
264- if let Some ( ref n) = r. name {
265- match rule_ids_map. get ( & * * n) {
308+ if let Some ( n) = r. name ( ) {
309+ match rule_ids_map. get ( n) {
266310 Some ( tok_id) => r. tok_id = Some ( * tok_id) ,
267311 None => {
268312 r. tok_id = None ;
@@ -278,10 +322,7 @@ where
278322 } else {
279323 let mut mfp = HashSet :: with_capacity ( missing_from_parser_idxs. len ( ) ) ;
280324 for i in & missing_from_parser_idxs {
281- mfp. insert ( (
282- self . rules [ * i] . name . as_ref ( ) . unwrap ( ) . as_str ( ) ,
283- self . rules [ * i] . name_span ,
284- ) ) ;
325+ mfp. insert ( ( self . rules [ * i] . name ( ) . unwrap ( ) , self . rules [ * i] . name_span ( ) ) ) ;
285326 }
286327 Some ( mfp)
287328 } ;
@@ -299,8 +340,8 @@ where
299340 & self
300341 . rules
301342 . iter ( )
302- . filter ( |x| x. name . is_some ( ) )
303- . map ( |x| & * * x. name . as_ref ( ) . unwrap ( ) )
343+ . filter ( |x| x. name ( ) . is_some ( ) )
344+ . map ( |x| x. name ( ) . unwrap ( ) )
304345 . collect :: < HashSet < & str > > ( ) ,
305346 )
306347 . cloned ( )
@@ -375,7 +416,7 @@ where
375416 Some ( ( _, s) ) => s,
376417 } ;
377418 for ( ridx, r) in self . iter_rules ( ) . enumerate ( ) {
378- if !Self :: state_matches ( current_state, & r. start_states ) {
419+ if !Self :: state_matches ( current_state, r. start_states ( ) ) {
379420 continue ;
380421 }
381422 if let Some ( m) = r. re . find ( & s[ old_i..] ) {
@@ -390,7 +431,7 @@ where
390431 }
391432 if longest > 0 {
392433 let r = self . get_rule ( longest_ridx) . unwrap ( ) ;
393- if r. name . is_some ( ) {
434+ if r. name ( ) . is_some ( ) {
394435 match r. tok_id {
395436 Some ( tok_id) => {
396437 lexemes. push ( Ok ( Lexeme :: new ( tok_id, old_i, longest) ) ) ;
@@ -401,7 +442,7 @@ where
401442 }
402443 }
403444 }
404- if let Some ( ( target_state_id, op) ) = & r. target_state {
445+ if let Some ( ( target_state_id, op) ) = & r. target_state ( ) {
405446 let state = match self . get_start_state_by_id ( * target_state_id) {
406447 None => {
407448 // TODO: I can see an argument for lexing state to be either `None` or `Some(target_state_id)` here
@@ -851,18 +892,18 @@ b 'B'
851892 . to_string ( ) ;
852893 let lexerdef = LRNonStreamingLexerDef :: < DefaultLexerTypes < u8 > > :: from_str ( & src) . unwrap ( ) ;
853894 assert_eq ! (
854- lexerdef. get_rule_by_name( "A" ) . unwrap( ) . name_span,
895+ lexerdef. get_rule_by_name( "A" ) . unwrap( ) . name_span( ) ,
855896 Span :: new( 6 , 7 )
856897 ) ;
857898 assert_eq ! (
858- lexerdef. get_rule_by_name( "B" ) . unwrap( ) . name_span,
899+ lexerdef. get_rule_by_name( "B" ) . unwrap( ) . name_span( ) ,
859900 Span :: new( 12 , 13 )
860901 ) ;
861902 let anonymous_rules = lexerdef
862903 . iter_rules ( )
863- . filter ( |rule| rule. name . is_none ( ) )
904+ . filter ( |rule| rule. name ( ) . is_none ( ) )
864905 . collect :: < Vec < _ > > ( ) ;
865- assert_eq ! ( anonymous_rules[ 0 ] . name_span, Span :: new( 21 , 21 ) ) ;
906+ assert_eq ! ( anonymous_rules[ 0 ] . name_span( ) , Span :: new( 21 , 21 ) ) ;
866907 }
867908
868909 #[ test]
@@ -876,11 +917,11 @@ b 'B'
876917 . to_string ( ) ;
877918 let lexerdef = LRNonStreamingLexerDef :: < DefaultLexerTypes < u8 > > :: from_str ( & src) . unwrap ( ) ;
878919 assert_eq ! (
879- lexerdef. get_rule_by_name( "A" ) . unwrap( ) . name_span,
920+ lexerdef. get_rule_by_name( "A" ) . unwrap( ) . name_span( ) ,
880921 Span :: new( 44 , 45 )
881922 ) ;
882923 assert_eq ! (
883- lexerdef. get_rule_by_name( "B" ) . unwrap( ) . name_span,
924+ lexerdef. get_rule_by_name( "B" ) . unwrap( ) . name_span( ) ,
884925 Span :: new( 50 , 51 )
885926 ) ;
886927 }
@@ -896,11 +937,11 @@ b 'B'
896937 . to_string ( ) ;
897938 let lexerdef = LRNonStreamingLexerDef :: < DefaultLexerTypes < u8 > > :: from_str ( & src) . unwrap ( ) ;
898939 let a_rule = lexerdef. get_rule_by_name ( "A" ) . unwrap ( ) ;
899- assert_eq ! ( a_rule. name_span, Span :: new( 61 , 62 ) ) ;
940+ assert_eq ! ( a_rule. name_span( ) , Span :: new( 61 , 62 ) ) ;
900941 assert_eq ! ( a_rule. re_str, "a" ) ;
901942
902943 let b_rule = lexerdef. get_rule_by_name ( "B" ) . unwrap ( ) ;
903- assert_eq ! ( b_rule. name_span, Span :: new( 84 , 85 ) ) ;
944+ assert_eq ! ( b_rule. name_span( ) , Span :: new( 84 , 85 ) ) ;
904945 assert_eq ! ( b_rule. re_str, "b" ) ;
905946 }
906947
0 commit comments