22
33mod source_to_def;
44
5- use std:: { cell:: RefCell , fmt, iter :: successors } ;
5+ use std:: { cell:: RefCell , fmt} ;
66
77use base_db:: { FileId , FileRange } ;
88use hir_def:: {
@@ -14,6 +14,7 @@ use hir_expand::{name::AsName, ExpansionInfo};
1414use hir_ty:: { associated_type_shorthand_candidates, Interner } ;
1515use itertools:: Itertools ;
1616use rustc_hash:: { FxHashMap , FxHashSet } ;
17+ use smallvec:: { smallvec, SmallVec } ;
1718use syntax:: {
1819 algo:: find_node_at_offset,
1920 ast:: { self , GenericParamsOwner , LoopBodyOwner } ,
@@ -165,7 +166,13 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
165166 self . imp . speculative_expand ( actual_macro_call, speculative_args, token_to_map)
166167 }
167168
169+ // FIXME: Rename to descend_into_macros_single
168170 pub fn descend_into_macros ( & self , token : SyntaxToken ) -> SyntaxToken {
171+ self . imp . descend_into_macros ( token) . pop ( ) . unwrap ( )
172+ }
173+
174+ // FIXME: Rename to descend_into_macros
175+ pub fn descend_into_macros_many ( & self , token : SyntaxToken ) -> SmallVec < [ SyntaxToken ; 1 ] > {
169176 self . imp . descend_into_macros ( token)
170177 }
171178
@@ -174,7 +181,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
174181 node : & SyntaxNode ,
175182 offset : TextSize ,
176183 ) -> Option < N > {
177- self . imp . descend_node_at_offset ( node, offset) . find_map ( N :: cast)
184+ self . imp . descend_node_at_offset ( node, offset) . flatten ( ) . find_map ( N :: cast)
178185 }
179186
180187 pub fn hir_file_for ( & self , syntax_node : & SyntaxNode ) -> HirFileId {
@@ -228,7 +235,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
228235 return Some ( it) ;
229236 }
230237
231- self . imp . descend_node_at_offset ( node, offset) . find_map ( N :: cast)
238+ self . imp . descend_node_at_offset ( node, offset) . flatten ( ) . find_map ( N :: cast)
239+ }
240+
241+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
242+ /// descend it and find again
243+ pub fn find_nodes_at_offset_with_descend < ' slf , N : AstNode + ' slf > (
244+ & ' slf self ,
245+ node : & SyntaxNode ,
246+ offset : TextSize ,
247+ ) -> impl Iterator < Item = N > + ' slf {
248+ self . imp . descend_node_at_offset ( node, offset) . filter_map ( |mut it| it. find_map ( N :: cast) )
232249 }
233250
234251 pub fn resolve_lifetime_param ( & self , lifetime : & ast:: Lifetime ) -> Option < LifetimeParam > {
@@ -440,87 +457,93 @@ impl<'db> SemanticsImpl<'db> {
440457 )
441458 }
442459
443- fn descend_into_macros ( & self , token : SyntaxToken ) -> SyntaxToken {
460+ fn descend_into_macros ( & self , token : SyntaxToken ) -> SmallVec < [ SyntaxToken ; 1 ] > {
444461 let _p = profile:: span ( "descend_into_macros" ) ;
445462 let parent = match token. parent ( ) {
446463 Some ( it) => it,
447- None => return token,
464+ None => return smallvec ! [ token] ,
448465 } ;
449466 let sa = self . analyze ( & parent) ;
450-
451- let token = successors ( Some ( InFile :: new ( sa. file_id , token) ) , |token| {
467+ let mut queue = vec ! [ InFile :: new( sa. file_id, token) ] ;
468+ let mut cache = self . expansion_info_cache . borrow_mut ( ) ;
469+ let mut res = smallvec ! [ ] ;
470+ while let Some ( token) = queue. pop ( ) {
452471 self . db . unwind_if_cancelled ( ) ;
453472
454- for node in token. value . ancestors ( ) {
455- match_ast ! {
456- match node {
457- ast:: MacroCall ( macro_call) => {
458- let tt = macro_call. token_tree( ) ?;
459- let l_delim = match tt. left_delimiter_token( ) {
460- Some ( it) => it. text_range( ) . end( ) ,
461- None => tt. syntax( ) . text_range( ) . start( )
462- } ;
463- let r_delim = match tt. right_delimiter_token( ) {
464- Some ( it) => it. text_range( ) . start( ) ,
465- None => tt. syntax( ) . text_range( ) . end( )
466- } ;
467- if !TextRange :: new( l_delim, r_delim) . contains_range( token. value. text_range( ) ) {
468- return None ;
469- }
470- let file_id = sa. expand( self . db, token. with_value( & macro_call) ) ?;
471- let token = self
472- . expansion_info_cache
473- . borrow_mut( )
474- . entry( file_id)
475- . or_insert_with( || file_id. expansion_info( self . db. upcast( ) ) )
476- . as_ref( ) ?
477- . map_token_down( self . db. upcast( ) , None , token. as_ref( ) ) ?;
478-
479- if let Some ( parent) = token. value. parent( ) {
480- self . cache( find_root( & parent) , token. file_id) ;
481- }
482-
483- return Some ( token) ;
484- } ,
485- ast:: Item ( item) => {
486- if let Some ( call_id) = self . with_ctx( |ctx| ctx. item_to_macro_call( token. with_value( item. clone( ) ) ) ) {
487- let file_id = call_id. as_file( ) ;
488- let token = self
489- . expansion_info_cache
490- . borrow_mut( )
473+ let was_not_remapped = ( || {
474+ for node in token. value . ancestors ( ) {
475+ match_ast ! {
476+ match node {
477+ ast:: MacroCall ( macro_call) => {
478+ let tt = macro_call. token_tree( ) ?;
479+ let l_delim = match tt. left_delimiter_token( ) {
480+ Some ( it) => it. text_range( ) . end( ) ,
481+ None => tt. syntax( ) . text_range( ) . start( )
482+ } ;
483+ let r_delim = match tt. right_delimiter_token( ) {
484+ Some ( it) => it. text_range( ) . start( ) ,
485+ None => tt. syntax( ) . text_range( ) . end( )
486+ } ;
487+ if !TextRange :: new( l_delim, r_delim) . contains_range( token. value. text_range( ) ) {
488+ return None ;
489+ }
490+ let file_id = sa. expand( self . db, token. with_value( & macro_call) ) ?;
491+ let tokens = cache
491492 . entry( file_id)
492493 . or_insert_with( || file_id. expansion_info( self . db. upcast( ) ) )
493494 . as_ref( ) ?
494- . map_token_down( self . db. upcast( ) , Some ( item) , token. as_ref( ) ) ?;
495-
496- if let Some ( parent) = token. value. parent( ) {
497- self . cache( find_root( & parent) , token. file_id) ;
495+ . map_token_down( self . db. upcast( ) , None , token. as_ref( ) ) ?;
496+
497+ let len = queue. len( ) ;
498+ queue. extend( tokens. inspect( |token| {
499+ if let Some ( parent) = token. value. parent( ) {
500+ self . cache( find_root( & parent) , token. file_id) ;
501+ }
502+ } ) ) ;
503+ return ( queue. len( ) != len) . then( || ( ) ) ;
504+ } ,
505+ ast:: Item ( item) => {
506+ if let Some ( call_id) = self . with_ctx( |ctx| ctx. item_to_macro_call( token. with_value( item. clone( ) ) ) ) {
507+ let file_id = call_id. as_file( ) ;
508+ let tokens = cache
509+ . entry( file_id)
510+ . or_insert_with( || file_id. expansion_info( self . db. upcast( ) ) )
511+ . as_ref( ) ?
512+ . map_token_down( self . db. upcast( ) , Some ( item) , token. as_ref( ) ) ?;
513+
514+ let len = queue. len( ) ;
515+ queue. extend( tokens. inspect( |token| {
516+ if let Some ( parent) = token. value. parent( ) {
517+ self . cache( find_root( & parent) , token. file_id) ;
518+ }
519+ } ) ) ;
520+ return ( queue. len( ) != len) . then( || ( ) ) ;
498521 }
499-
500- return Some ( token) ;
501- }
502- } ,
503- _ => { }
522+ } ,
523+ _ => { }
524+ }
504525 }
505526 }
527+ None
528+ } ) ( ) . is_none ( ) ;
529+ if was_not_remapped {
530+ res. push ( token. value )
506531 }
507-
508- None
509- } )
510- . last ( )
511- . unwrap ( ) ;
512- token. value
532+ }
533+ res
513534 }
514535
536+ // Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop
537+ // traversing the inner iterator when it finds a node.
515538 fn descend_node_at_offset (
516539 & self ,
517540 node : & SyntaxNode ,
518541 offset : TextSize ,
519- ) -> impl Iterator < Item = SyntaxNode > + ' _ {
542+ ) -> impl Iterator < Item = impl Iterator < Item = SyntaxNode > + ' _ > + ' _ {
520543 // Handle macro token cases
521544 node. token_at_offset ( offset)
522- . map ( |token| self . descend_into_macros ( token) )
523- . map ( |it| self . token_ancestors_with_macros ( it) )
545+ . map ( move |token| self . descend_into_macros ( token) )
546+ . map ( |it| it . into_iter ( ) . map ( move |it| self . token_ancestors_with_macros ( it) ) )
524547 . flatten ( )
525548 }
526549
0 commit comments