Skip to content

Commit c5059e0

Browse files
committed
Return all ranges corresponding to a token id in TokenMap
1 parent 7e31c5e commit c5059e0

File tree

6 files changed

+100
-72
lines changed

6 files changed

+100
-72
lines changed

crates/hir/src/semantics.rs

Lines changed: 71 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
33
mod source_to_def;
44

5-
use std::{cell::RefCell, fmt, iter::successors};
5+
use std::{cell::RefCell, fmt};
66

77
use base_db::{FileId, FileRange};
88
use hir_def::{
@@ -14,6 +14,7 @@ use hir_expand::{name::AsName, ExpansionInfo};
1414
use hir_ty::{associated_type_shorthand_candidates, Interner};
1515
use itertools::Itertools;
1616
use rustc_hash::{FxHashMap, FxHashSet};
17+
use smallvec::{smallvec, SmallVec};
1718
use syntax::{
1819
algo::find_node_at_offset,
1920
ast::{self, GenericParamsOwner, LoopBodyOwner},
@@ -166,6 +167,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
166167
}
167168

168169
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
170+
self.imp.descend_into_macros(token).pop().unwrap()
171+
}
172+
173+
pub fn descend_into_macros_many(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
169174
self.imp.descend_into_macros(token)
170175
}
171176

@@ -440,76 +445,83 @@ impl<'db> SemanticsImpl<'db> {
440445
)
441446
}
442447

443-
fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
448+
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
444449
let _p = profile::span("descend_into_macros");
445450
let parent = match token.parent() {
446451
Some(it) => it,
447-
None => return token,
452+
None => return smallvec![token],
448453
};
449454
let sa = self.analyze(&parent);
450-
451-
let token = successors(Some(InFile::new(sa.file_id, token)), |token| {
455+
let mut queue = vec![InFile::new(sa.file_id, token)];
456+
let mut res = smallvec![];
457+
while let Some(token) = queue.pop() {
452458
self.db.unwind_if_cancelled();
453459

454-
for node in token.value.ancestors() {
455-
match_ast! {
456-
match node {
457-
ast::MacroCall(macro_call) => {
458-
let tt = macro_call.token_tree()?;
459-
let l_delim = match tt.left_delimiter_token() {
460-
Some(it) => it.text_range().end(),
461-
None => tt.syntax().text_range().start()
462-
};
463-
let r_delim = match tt.right_delimiter_token() {
464-
Some(it) => it.text_range().start(),
465-
None => tt.syntax().text_range().end()
466-
};
467-
if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) {
468-
return None;
469-
}
470-
let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
471-
let token = self
472-
.expansion_info_cache
473-
.borrow_mut()
474-
.entry(file_id)
475-
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
476-
.as_ref()?
477-
.map_token_down(self.db.upcast(), None, token.as_ref())?;
478-
479-
if let Some(parent) = token.value.parent() {
480-
self.cache(find_root(&parent), token.file_id);
481-
}
482-
483-
return Some(token);
484-
},
485-
ast::Item(item) => {
486-
if let Some(call_id) = self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item.clone()))) {
487-
let file_id = call_id.as_file();
488-
let token = self
489-
.expansion_info_cache
490-
.borrow_mut()
460+
let mapped = (|| {
461+
for node in token.value.ancestors() {
462+
match_ast! {
463+
match node {
464+
ast::MacroCall(macro_call) => {
465+
let tt = macro_call.token_tree()?;
466+
let l_delim = match tt.left_delimiter_token() {
467+
Some(it) => it.text_range().end(),
468+
None => tt.syntax().text_range().start()
469+
};
470+
let r_delim = match tt.right_delimiter_token() {
471+
Some(it) => it.text_range().start(),
472+
None => tt.syntax().text_range().end()
473+
};
474+
if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) {
475+
return None;
476+
}
477+
let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
478+
let mut cache = self.expansion_info_cache.borrow_mut();
479+
let tokens = cache
491480
.entry(file_id)
492481
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
493482
.as_ref()?
494-
.map_token_down(self.db.upcast(), Some(item), token.as_ref())?;
495-
496-
if let Some(parent) = token.value.parent() {
497-
self.cache(find_root(&parent), token.file_id);
483+
.map_token_down(self.db.upcast(), None, token.as_ref())?;
484+
485+
queue.extend(tokens.inspect(|token| {
486+
if let Some(parent) = token.value.parent() {
487+
self.cache(find_root(&parent), token.file_id);
488+
}
489+
}));
490+
return Some(());
491+
},
492+
ast::Item(item) => {
493+
match self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item))) {
494+
Some(call_id) => {
495+
let file_id = call_id.as_file();
496+
let mut cache = self.expansion_info_cache.borrow_mut();
497+
let tokens = cache
498+
.entry(file_id)
499+
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
500+
.as_ref()?
501+
.map_token_down(self.db.upcast(), None, token.as_ref())?;
502+
503+
queue.extend(tokens.inspect(|token| {
504+
if let Some(parent) = token.value.parent() {
505+
self.cache(find_root(&parent), token.file_id);
506+
}
507+
}));
508+
return Some(());
509+
}
510+
None => {}
498511
}
499-
500-
return Some(token);
501-
}
502-
},
503-
_ => {}
512+
},
513+
_ => {}
514+
}
504515
}
505516
}
517+
None
518+
})();
519+
match mapped {
520+
Some(()) => (),
521+
None => res.push(token.value),
506522
}
507-
508-
None
509-
})
510-
.last()
511-
.unwrap();
512-
token.value
523+
}
524+
res
513525
}
514526

515527
fn descend_node_at_offset(
@@ -519,8 +531,8 @@ impl<'db> SemanticsImpl<'db> {
519531
) -> impl Iterator<Item = SyntaxNode> + '_ {
520532
// Handle macro token cases
521533
node.token_at_offset(offset)
522-
.map(|token| self.descend_into_macros(token))
523-
.map(|it| self.token_ancestors_with_macros(it))
534+
.flat_map(move |token| self.descend_into_macros(token))
535+
.map(move |it| self.token_ancestors_with_macros(it))
524536
.flatten()
525537
}
526538

crates/hir_expand/src/db.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,7 @@ pub fn expand_speculative(
163163
mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?;
164164

165165
let token_id = macro_def.map_id_down(token_id);
166-
let range = tmap_2.range_by_token(token_id, token_to_map.kind())?;
166+
let range = tmap_2.first_range_by_token(token_id, token_to_map.kind())?;
167167
let token = node.syntax_node().covering_element(range).into_token()?;
168168
Some((node.syntax_node(), token))
169169
}

crates/hir_expand/src/hygiene.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,7 @@ impl HygieneInfo {
171171
},
172172
};
173173

174-
let range = token_map.range_by_token(token_id, SyntaxKind::IDENT)?;
174+
let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
175175
Some((tt.with_value(range + tt.value), origin))
176176
}
177177
}

crates/hir_expand/src/lib.rs

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -368,7 +368,7 @@ impl ExpansionInfo {
368368
db: &dyn db::AstDatabase,
369369
item: Option<ast::Item>,
370370
token: InFile<&SyntaxToken>,
371-
) -> Option<InFile<SyntaxToken>> {
371+
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
372372
assert_eq!(token.file_id, self.arg.file_id);
373373
let token_id = if let Some(item) = item {
374374
let call_id = match self.expanded.file_id.0 {
@@ -411,11 +411,12 @@ impl ExpansionInfo {
411411
}
412412
};
413413

414-
let range = self.exp_map.range_by_token(token_id, token.value.kind())?;
414+
let tokens = self
415+
.exp_map
416+
.ranges_by_token(token_id, token.value.kind())
417+
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
415418

416-
let token = self.expanded.value.covering_element(range).into_token()?;
417-
418-
Some(self.expanded.with_value(token))
419+
Some(tokens.map(move |token| self.expanded.with_value(token)))
419420
}
420421

421422
pub fn map_token_up(
@@ -453,7 +454,7 @@ impl ExpansionInfo {
453454
},
454455
};
455456

456-
let range = token_map.range_by_token(token_id, token.value.kind())?;
457+
let range = token_map.first_range_by_token(token_id, token.value.kind())?;
457458
let token =
458459
tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
459460
Some((tt.with_value(token), origin))

crates/mbe/src/tests/expand.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,8 +58,9 @@ macro_rules! foobar {
5858
let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap();
5959
let content = node.syntax_node().to_string();
6060

61-
let get_text =
62-
|id, kind| -> String { content[token_map.range_by_token(id, kind).unwrap()].to_string() };
61+
let get_text = |id, kind| -> String {
62+
content[token_map.first_range_by_token(id, kind).unwrap()].to_string()
63+
};
6364

6465
assert_eq!(expanded.token_trees.len(), 4);
6566
// {($e:ident) => { fn $e() {} }}

crates/mbe/src/token_map.rs

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -46,9 +46,23 @@ impl TokenMap {
4646
Some(token_id)
4747
}
4848

49-
pub fn range_by_token(&self, token_id: tt::TokenId, kind: SyntaxKind) -> Option<TextRange> {
50-
let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
51-
range.by_kind(kind)
49+
pub fn ranges_by_token(
50+
&self,
51+
token_id: tt::TokenId,
52+
kind: SyntaxKind,
53+
) -> impl Iterator<Item = TextRange> + '_ {
54+
self.entries
55+
.iter()
56+
.filter(move |&&(tid, _)| tid == token_id)
57+
.filter_map(move |(_, range)| range.by_kind(kind))
58+
}
59+
60+
pub fn first_range_by_token(
61+
&self,
62+
token_id: tt::TokenId,
63+
kind: SyntaxKind,
64+
) -> Option<TextRange> {
65+
self.ranges_by_token(token_id, kind).next()
5266
}
5367

5468
pub(crate) fn shrink_to_fit(&mut self) {

0 commit comments

Comments
 (0)