@@ -110,48 +110,69 @@ fn extend_tokens_from_range(
110110 macro_call : ast:: MacroCall ,
111111 original_range : TextRange ,
112112) -> Option < TextRange > {
113- // compute original mapped token range
114- let mut expanded = None ;
115- let range = macro_call
113+ // Find all non-whitespace tokens under MacroCall
114+ let all_tokens: Vec < _ > = macro_call
116115 . syntax ( )
117116 . descendants_with_tokens ( )
118- . filter_map ( |n| match n {
119- NodeOrToken :: Token ( token) if token. text_range ( ) . is_subrange ( & original_range) => {
120- let node = descend_into_macros ( db, file_id, token) ;
121- match node. file_id {
122- it if it == file_id. into ( ) => None ,
123- it if expanded. is_none ( ) || expanded == Some ( it) => {
124- expanded = Some ( it. into ( ) ) ;
125- Some ( node. value . text_range ( ) )
126- }
127- _ => None ,
128- }
117+ . filter_map ( |n| {
118+ let token = n. as_token ( ) ?;
119+ if token. kind ( ) == WHITESPACE {
120+ None
121+ } else {
122+ Some ( token. clone ( ) )
129123 }
130- _ => None ,
131124 } )
132- . fold1 ( |x, y| union_range ( x, y) ) ?;
133-
134- let expanded = expanded?;
135- let src = db. parse_or_expand ( expanded) ?;
136- let parent = shallowest_node ( & find_covering_element ( & src, range) ) ?. parent ( ) ?;
137- // compute parent mapped token range
138- let range = macro_call
139- . syntax ( )
140- . descendants_with_tokens ( )
141- . filter_map ( |n| match n {
142- NodeOrToken :: Token ( token) => {
143- let node = descend_into_macros ( db, file_id, token. clone ( ) ) ;
144- if node. file_id == expanded
145- && node. value . text_range ( ) . is_subrange ( & parent. text_range ( ) )
146- {
147- Some ( token. text_range ( ) )
125+ . sorted_by ( |a, b| Ord :: cmp ( & a. text_range ( ) . start ( ) , & b. text_range ( ) . start ( ) ) )
126+ . collect ( ) ;
127+
128+ // Get all indices which is in original range
129+ let indices: Vec < _ > =
130+ all_tokens
131+ . iter ( )
132+ . enumerate ( )
133+ . filter_map ( |( i, token) | {
134+ if token. text_range ( ) . is_subrange ( & original_range) {
135+ Some ( i)
148136 } else {
149137 None
150138 }
151- }
152- _ => None ,
153- } )
154- . fold1 ( |x, y| union_range ( x, y) ) ?;
139+ } )
140+ . collect ( ) ;
141+
142+ // Compute the first and last token index in original_range
143+ let first_idx = * indices. iter ( ) . min_by_key ( |& & idx| all_tokens[ idx] . text_range ( ) . start ( ) ) ?;
144+ let last_idx = * indices. iter ( ) . max_by_key ( |& & idx| all_tokens[ idx] . text_range ( ) . end ( ) ) ?;
145+
146+ // compute original mapped token range
147+ let expanded = {
148+ let first_node = descend_into_macros ( db, file_id, all_tokens[ first_idx] . clone ( ) ) ;
149+ let first_node = first_node. map ( |it| it. text_range ( ) ) ;
150+
151+ let last_node = descend_into_macros ( db, file_id, all_tokens[ last_idx] . clone ( ) ) ;
152+ if last_node. file_id == file_id. into ( ) || first_node. file_id != last_node. file_id {
153+ return None ;
154+ }
155+ first_node. map ( |it| union_range ( it, last_node. value . text_range ( ) ) )
156+ } ;
157+
158+ // Compute parent node range
159+ let src = db. parse_or_expand ( expanded. file_id ) ?;
160+ let parent = shallowest_node ( & find_covering_element ( & src, expanded. value ) ) ?. parent ( ) ?;
161+
162+ let validate = |& idx: & usize | {
163+ let token: & SyntaxToken = & all_tokens[ idx] ;
164+ let node = descend_into_macros ( db, file_id, token. clone ( ) ) ;
165+
166+ node. file_id == expanded. file_id
167+ && node. value . text_range ( ) . is_subrange ( & parent. text_range ( ) )
168+ } ;
169+
170+ // Find the first and last text range under expanded parent
171+ let first = ( 0 ..=first_idx) . rev ( ) . take_while ( validate) . last ( ) ?;
172+ let last = ( last_idx..all_tokens. len ( ) ) . take_while ( validate) . last ( ) ?;
173+
174+ let range = union_range ( all_tokens[ first] . text_range ( ) , all_tokens[ last] . text_range ( ) ) ;
175+
155176 if original_range. is_subrange ( & range) && original_range != range {
156177 Some ( range)
157178 } else {
0 commit comments