@@ -143,7 +143,7 @@ impl<'a> TokenizerState<'a> {
143143 }
144144
145145 if !self . settings . white_space . contains_key ( & self . current_char ) {
146- if self . current_char . is_digit ( 10 ) {
146+ if self . current_char . is_ascii_digit ( ) {
147147 self . scan_number ( ) ?;
148148 } else if let Some ( identifier_end) =
149149 self . settings . identifiers . get ( & self . current_char )
@@ -202,7 +202,7 @@ impl<'a> TokenizerState<'a> {
202202 }
203203
204204 fn char_at ( & self , index : usize ) -> Result < char , TokenizerError > {
205- self . sql . get ( index) . map ( |c| * c ) . ok_or_else ( || {
205+ self . sql . get ( index) . copied ( ) . ok_or_else ( || {
206206 self . error ( format ! (
207207 "Index {} is out of bound (size {})" ,
208208 index, self . size
@@ -500,7 +500,7 @@ impl<'a> TokenizerState<'a> {
500500 let mut scientific = 0 ;
501501
502502 loop {
503- if self . peek_char . is_digit ( 10 ) {
503+ if self . peek_char . is_ascii_digit ( ) {
504504 self . advance ( 1 ) ?;
505505 } else if self . peek_char == '.' && !decimal {
506506 if self . tokens . last ( ) . map ( |t| t. token_type ) == Some ( self . token_types . parameter ) {
@@ -534,8 +534,7 @@ impl<'a> TokenizerState<'a> {
534534 . numeric_literals
535535 . get ( & literal. to_uppercase ( ) )
536536 . unwrap_or ( & String :: from ( "" ) ) ,
537- )
538- . map ( |x| * x) ;
537+ ) . copied ( ) ;
539538
540539 let replaced = literal. replace ( "_" , "" ) ;
541540
@@ -604,8 +603,7 @@ impl<'a> TokenizerState<'a> {
604603 } else {
605604 self . settings
606605 . keywords
607- . get ( & self . text ( ) . to_uppercase ( ) )
608- . map ( |x| * x)
606+ . get ( & self . text ( ) . to_uppercase ( ) ) . copied ( )
609607 . unwrap_or ( self . token_types . var )
610608 } ;
611609 self . add ( token_type, None )
@@ -715,13 +713,13 @@ impl<'a> TokenizerState<'a> {
715713 if i == 0 {
716714 self . is_alphabetic_or_underscore ( c)
717715 } else {
718- self . is_alphabetic_or_underscore ( c) || c. is_digit ( 10 )
716+ self . is_alphabetic_or_underscore ( c) || c. is_ascii_digit ( )
719717 }
720718 } )
721719 }
722720
723721 fn is_numeric ( & mut self , s : & str ) -> bool {
724- s. chars ( ) . all ( |c| c. is_digit ( 10 ) )
722+ s. chars ( ) . all ( |c| c. is_ascii_digit ( ) )
725723 }
726724
727725 fn extract_value ( & mut self ) -> Result < String , TokenizerError > {
0 commit comments