11//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
22
3- use std:: iter;
4-
53use parser:: { ParseError , TreeSink } ;
6- use rustc_hash:: FxHashMap ;
4+ use rustc_hash:: { FxHashMap , FxHashSet } ;
75use syntax:: {
86 ast:: { self , make:: tokens:: doc_comment} ,
9- tokenize, AstToken , Parse , SmolStr , SyntaxKind ,
7+ tokenize, AstToken , Parse , PreorderWithTokens , SmolStr , SyntaxElement , SyntaxKind ,
108 SyntaxKind :: * ,
11- SyntaxNode , SyntaxToken , SyntaxTreeBuilder , TextRange , TextSize , Token as RawToken , T ,
9+ SyntaxNode , SyntaxToken , SyntaxTreeBuilder , TextRange , TextSize , Token as RawToken , WalkEvent ,
10+ T ,
1211} ;
1312use tt:: buffer:: { Cursor , TokenBuffer } ;
1413
@@ -19,14 +18,14 @@ use crate::{
1918/// Convert the syntax node to a `TokenTree` (what macro
2019/// will consume).
2120pub fn syntax_node_to_token_tree ( node : & SyntaxNode ) -> ( tt:: Subtree , TokenMap ) {
22- syntax_node_to_token_tree_censored ( node, None )
21+ syntax_node_to_token_tree_censored ( node, & Default :: default ( ) )
2322}
2423
2524/// Convert the syntax node to a `TokenTree` (what macro will consume)
2625/// with the censored range excluded.
2726pub fn syntax_node_to_token_tree_censored (
2827 node : & SyntaxNode ,
29- censor : Option < TextRange > ,
28+ censor : & FxHashSet < SyntaxNode > ,
3029) -> ( tt:: Subtree , TokenMap ) {
3130 let global_offset = node. text_range ( ) . start ( ) ;
3231 let mut c = Convertor :: new ( node, global_offset, censor) ;
@@ -424,8 +423,6 @@ impl<'a> SrcToken for (&'a RawToken, &'a str) {
424423 }
425424}
426425
427- impl RawConvertor < ' _ > { }
428-
429426impl < ' a > TokenConvertor for RawConvertor < ' a > {
430427 type Token = ( & ' a RawToken , & ' a str ) ;
431428
@@ -455,30 +452,51 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
455452 }
456453}
457454
458- struct Convertor {
455+ struct Convertor < ' c > {
459456 id_alloc : TokenIdAlloc ,
460457 current : Option < SyntaxToken > ,
461- censor : Option < TextRange > ,
458+ preorder : PreorderWithTokens ,
459+ censor : & ' c FxHashSet < SyntaxNode > ,
462460 range : TextRange ,
463461 punct_offset : Option < ( SyntaxToken , TextSize ) > ,
464462}
465463
466- impl Convertor {
467- fn new ( node : & SyntaxNode , global_offset : TextSize , censor : Option < TextRange > ) -> Convertor {
468- let first = node. first_token ( ) ;
469- let current = match censor {
470- Some ( censor) => iter:: successors ( first, |token| token. next_token ( ) )
471- . find ( |token| !censor. contains_range ( token. text_range ( ) ) ) ,
472- None => first,
473- } ;
464+ impl < ' c > Convertor < ' c > {
465+ fn new (
466+ node : & SyntaxNode ,
467+ global_offset : TextSize ,
468+ censor : & ' c FxHashSet < SyntaxNode > ,
469+ ) -> Convertor < ' c > {
470+ let range = node. text_range ( ) ;
471+ let mut preorder = node. preorder_with_tokens ( ) ;
472+ let first = Self :: next_token ( & mut preorder, censor) ;
474473 Convertor {
475474 id_alloc : { TokenIdAlloc { map : TokenMap :: default ( ) , global_offset, next_id : 0 } } ,
476- current,
477- range : node. text_range ( ) ,
475+ current : first,
476+ preorder,
477+ range,
478478 censor,
479479 punct_offset : None ,
480480 }
481481 }
482+
483+ fn next_token (
484+ preorder : & mut PreorderWithTokens ,
485+ censor : & FxHashSet < SyntaxNode > ,
486+ ) -> Option < SyntaxToken > {
487+ while let Some ( ev) = preorder. next ( ) {
488+ let ele = match ev {
489+ WalkEvent :: Enter ( ele) => ele,
490+ _ => continue ,
491+ } ;
492+ match ele {
493+ SyntaxElement :: Token ( t) => return Some ( t) ,
494+ SyntaxElement :: Node ( node) if censor. contains ( & node) => preorder. skip_subtree ( ) ,
495+ SyntaxElement :: Node ( _) => ( ) ,
496+ }
497+ }
498+ None
499+ }
482500}
483501
484502#[ derive( Debug ) ]
@@ -511,7 +529,7 @@ impl SrcToken for SynToken {
511529 }
512530}
513531
514- impl TokenConvertor for Convertor {
532+ impl TokenConvertor for Convertor < ' _ > {
515533 type Token = SynToken ;
516534 fn convert_doc_comment ( & self , token : & Self :: Token ) -> Option < Vec < tt:: TokenTree > > {
517535 convert_doc_comment ( token. token ( ) )
@@ -532,11 +550,7 @@ impl TokenConvertor for Convertor {
532550 if !& self . range . contains_range ( curr. text_range ( ) ) {
533551 return None ;
534552 }
535- self . current = match self . censor {
536- Some ( censor) => iter:: successors ( curr. next_token ( ) , |token| token. next_token ( ) )
537- . find ( |token| !censor. contains_range ( token. text_range ( ) ) ) ,
538- None => curr. next_token ( ) ,
539- } ;
553+ self . current = Self :: next_token ( & mut self . preorder , self . censor ) ;
540554 let token = if curr. kind ( ) . is_punct ( ) {
541555 let range = curr. text_range ( ) ;
542556 let range = TextRange :: at ( range. start ( ) , TextSize :: of ( '.' ) ) ;
0 commit comments