@@ -782,7 +782,9 @@ where
782782
783783 let mut token_consts = TokenStream :: new ( ) ;
784784 if let Some ( rim) = self . rule_ids_map {
785- for ( name, id) in rim {
785+ let mut rim_sorted = Vec :: from_iter ( rim. iter ( ) ) ;
786+ rim_sorted. sort_by_key ( |( k, _) | * k) ;
787+ for ( name, id) in rim_sorted {
786788 if RE_TOKEN_ID . is_match ( & name) {
787789 let tok_ident = format_ident ! ( "N_{}" , name. to_ascii_uppercase( ) ) ;
788790 let storaget =
@@ -815,9 +817,17 @@ where
815817 } ;
816818 // Try and run a code formatter on the generated code.
817819 let unformatted = out_tokens. to_string ( ) ;
818- let outs = syn:: parse_str ( & unformatted)
819- . map ( |syntax_tree| prettyplease:: unparse ( & syntax_tree) )
820- . unwrap_or ( unformatted) ;
820+ let mut outs = String :: new ( ) ;
821+ // Record the time that this version of lrlex was built. If the source code changes and rustc
822+ // forces a recompile, this will change this value, causing anything which depends on this
823+ // build of lrlex to be recompiled too.
824+ let timestamp = env ! ( "VERGEN_BUILD_TIMESTAMP" ) ;
825+ write ! ( outs, "// lrlex build time: {}\n \n " , quote!( #timestamp) , ) . ok ( ) ;
826+ outs. push_str (
827+ & syn:: parse_str ( & unformatted)
828+ . map ( |syntax_tree| prettyplease:: unparse ( & syntax_tree) )
829+ . unwrap_or ( unformatted) ,
830+ ) ;
821831 // If the file we're about to write out already exists with the same contents, then we
822832 // don't overwrite it (since that will force a recompile of the file, and relinking of the
823833 // binary etc).
@@ -1205,9 +1215,12 @@ pub fn ct_token_map<StorageT: Display + ToTokens>(
12051215 let timestamp = env ! ( "VERGEN_BUILD_TIMESTAMP" ) ;
12061216 let mod_ident = format_ident ! ( "{}" , mod_name) ;
12071217 write ! ( outs, "// lrlex build time: {}\n \n " , quote!( #timestamp) , ) . ok ( ) ;
1208- let tokens = & token_map
1209- . borrow ( )
1210- . iter ( )
1218+ // Sort the tokens so that they're always in the same order.
1219+ // This will prevent unneeded rebuilds.
1220+ let mut token_map_sorted = Vec :: from_iter ( token_map. borrow ( ) . iter ( ) ) ;
1221+ token_map_sorted. sort_by_key ( |( k, _) | * k) ;
1222+ let tokens = & token_map_sorted
1223+ . into_iter ( )
12111224 . map ( |( k, id) | {
12121225 let name = match rename_map {
12131226 Some ( rmap) => * rmap. get ( k. as_str ( ) ) . unwrap_or ( & k. as_str ( ) ) ,
0 commit comments