Skip to content

Commit 97409e5

Browse files
bors[bot]Veykril
andauthored
Merge #9970
9970: feat: Implement attribute input token mapping, fix attribute item token mapping r=Veykril a=Veykril ![image](https://user-images.githubusercontent.com/3757771/130328577-4c1ad72c-51b1-47c3-8d3d-3242ec44a355.png) The token mapping for items with attributes got overwritten partially by the attributes non-item input, since attributes have two different inputs, the item and the direct input both. This PR gives attributes a second TokenMap for its direct input. We now shift all normal input IDs by the item input maximum(we maybe wanna swap this see below) similar to what we do for macro-rules/def. For mapping down we then have to figure out whether we are inside the direct attribute input or its item input to pick the appropriate mapping which can be done with some token range comparisons. Fixes #9867 Co-authored-by: Lukas Wirth <[email protected]>
2 parents 3acbf94 + 4933bec commit 97409e5

File tree

9 files changed

+197
-89
lines changed

9 files changed

+197
-89
lines changed

crates/hir/src/semantics.rs

Lines changed: 15 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -474,7 +474,7 @@ impl<'db> SemanticsImpl<'db> {
474474
.entry(file_id)
475475
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
476476
.as_ref()?
477-
.map_token_down(token.as_ref())?;
477+
.map_token_down(self.db.upcast(), None, token.as_ref())?;
478478

479479
if let Some(parent) = token.value.parent() {
480480
self.cache(find_root(&parent), token.file_id);
@@ -483,24 +483,21 @@ impl<'db> SemanticsImpl<'db> {
483483
return Some(token);
484484
},
485485
ast::Item(item) => {
486-
match self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item))) {
487-
Some(call_id) => {
488-
let file_id = call_id.as_file();
489-
let token = self
490-
.expansion_info_cache
491-
.borrow_mut()
492-
.entry(file_id)
493-
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
494-
.as_ref()?
495-
.map_token_down(token.as_ref())?;
496-
497-
if let Some(parent) = token.value.parent() {
498-
self.cache(find_root(&parent), token.file_id);
499-
}
500-
501-
return Some(token);
486+
if let Some(call_id) = self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item.clone()))) {
487+
let file_id = call_id.as_file();
488+
let token = self
489+
.expansion_info_cache
490+
.borrow_mut()
491+
.entry(file_id)
492+
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
493+
.as_ref()?
494+
.map_token_down(self.db.upcast(), Some(item), token.as_ref())?;
495+
496+
if let Some(parent) = token.value.parent() {
497+
self.cache(find_root(&parent), token.file_id);
502498
}
503-
None => {}
499+
500+
return Some(token);
504501
}
505502
},
506503
_ => {}
@@ -512,7 +509,6 @@ impl<'db> SemanticsImpl<'db> {
512509
})
513510
.last()
514511
.unwrap();
515-
516512
token.value
517513
}
518514

crates/hir_def/src/attr.rs

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,9 @@
22
33
use std::{
44
convert::{TryFrom, TryInto},
5-
fmt, ops,
5+
fmt,
6+
hash::Hash,
7+
ops,
68
sync::Arc,
79
};
810

@@ -158,7 +160,7 @@ impl RawAttrs {
158160
}
159161

160162
let subtree = match attr.input.as_deref() {
161-
Some(AttrInput::TokenTree(it)) => it,
163+
Some(AttrInput::TokenTree(it, _)) => it,
162164
_ => return smallvec![attr.clone()],
163165
};
164166

@@ -258,7 +260,7 @@ impl Attrs {
258260
pub fn docs(&self) -> Option<Documentation> {
259261
let docs = self.by_key("doc").attrs().flat_map(|attr| match attr.input.as_deref()? {
260262
AttrInput::Literal(s) => Some(s),
261-
AttrInput::TokenTree(_) => None,
263+
AttrInput::TokenTree(..) => None,
262264
});
263265
let indent = docs
264266
.clone()
@@ -463,7 +465,7 @@ impl AttrsWithOwner {
463465
// FIXME: code duplication in `docs` above
464466
let docs = self.by_key("doc").attrs().flat_map(|attr| match attr.input.as_deref()? {
465467
AttrInput::Literal(s) => Some((s, attr.id)),
466-
AttrInput::TokenTree(_) => None,
468+
AttrInput::TokenTree(..) => None,
467469
});
468470
let indent = docs
469471
.clone()
@@ -652,14 +654,14 @@ pub enum AttrInput {
652654
/// `#[attr = "string"]`
653655
Literal(SmolStr),
654656
/// `#[attr(subtree)]`
655-
TokenTree(Subtree),
657+
TokenTree(tt::Subtree, mbe::TokenMap),
656658
}
657659

658660
impl fmt::Display for AttrInput {
659661
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
660662
match self {
661663
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
662-
AttrInput::TokenTree(subtree) => subtree.fmt(f),
664+
AttrInput::TokenTree(subtree, _) => subtree.fmt(f),
663665
}
664666
}
665667
}
@@ -679,7 +681,8 @@ impl Attr {
679681
};
680682
Some(Interned::new(AttrInput::Literal(value)))
681683
} else if let Some(tt) = ast.token_tree() {
682-
Some(Interned::new(AttrInput::TokenTree(syntax_node_to_token_tree(tt.syntax()).0)))
684+
let (tree, map) = syntax_node_to_token_tree(tt.syntax());
685+
Some(Interned::new(AttrInput::TokenTree(tree, map)))
683686
} else {
684687
None
685688
};
@@ -709,7 +712,7 @@ impl Attr {
709712
}
710713

711714
match self.input.as_deref() {
712-
Some(AttrInput::TokenTree(args)) => {
715+
Some(AttrInput::TokenTree(args, _)) => {
713716
let mut counter = 0;
714717
let paths = args
715718
.token_trees
@@ -756,7 +759,7 @@ pub struct AttrQuery<'a> {
756759
impl<'a> AttrQuery<'a> {
757760
pub fn tt_values(self) -> impl Iterator<Item = &'a Subtree> {
758761
self.attrs().filter_map(|attr| match attr.input.as_deref()? {
759-
AttrInput::TokenTree(it) => Some(it),
762+
AttrInput::TokenTree(it, _) => Some(it),
760763
_ => None,
761764
})
762765
}

crates/hir_def/src/lib.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -786,13 +786,13 @@ fn attr_macro_as_call_id(
786786
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
787787
let mut arg = match &macro_attr.input {
788788
Some(input) => match &**input {
789-
attr::AttrInput::Literal(_) => tt::Subtree::default(),
790-
attr::AttrInput::TokenTree(tt) => tt.clone(),
789+
attr::AttrInput::Literal(_) => Default::default(),
790+
attr::AttrInput::TokenTree(tt, map) => (tt.clone(), map.clone()),
791791
},
792-
None => tt::Subtree::default(),
792+
None => Default::default(),
793793
};
794794
// The parentheses are always disposed here.
795-
arg.delimiter = None;
795+
arg.0.delimiter = None;
796796

797797
let res = def.as_lazy_macro(
798798
db.upcast(),

crates/hir_def/src/nameres/collector.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -289,7 +289,7 @@ impl DefCollector<'_> {
289289
|| *attr_name == hir_expand::name![register_tool]
290290
{
291291
match attr.input.as_deref() {
292-
Some(AttrInput::TokenTree(subtree)) => match &*subtree.token_trees {
292+
Some(AttrInput::TokenTree(subtree, _)) => match &*subtree.token_trees {
293293
[tt::TokenTree::Leaf(tt::Leaf::Ident(name))] => name.as_name(),
294294
_ => continue,
295295
},

crates/hir_expand/src/db.rs

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -223,7 +223,7 @@ fn parse_macro_expansion(
223223
Ok(it) => it,
224224
Err(err) => {
225225
log::debug!(
226-
"failed to parse expanstion to {:?} = {}",
226+
"failed to parse expansion to {:?} = {}",
227227
fragment_kind,
228228
tt.as_debug_string()
229229
);
@@ -386,11 +386,15 @@ fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<tt::
386386
};
387387

388388
let attr_arg = match &loc.kind {
389-
MacroCallKind::Attr { attr_args, .. } => Some(attr_args),
389+
MacroCallKind::Attr { attr_args, .. } => {
390+
let mut attr_args = attr_args.0.clone();
391+
mbe::Shift::new(&macro_arg.0).shift_all(&mut attr_args);
392+
Some(attr_args)
393+
}
390394
_ => None,
391395
};
392396

393-
expander.expand(db, loc.krate, &macro_arg.0, attr_arg)
397+
expander.expand(db, loc.krate, &macro_arg.0, attr_arg.as_ref())
394398
}
395399

396400
fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool {

crates/hir_expand/src/hygiene.rs

Lines changed: 55 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,15 @@ use db::TokenExpander;
99
use either::Either;
1010
use mbe::Origin;
1111
use parser::SyntaxKind;
12-
use syntax::{ast, AstNode, SyntaxNode, TextRange, TextSize};
12+
use syntax::{
13+
ast::{self, AttrsOwner},
14+
AstNode, SyntaxNode, TextRange, TextSize,
15+
};
1316

1417
use crate::{
1518
db::{self, AstDatabase},
1619
name::{AsName, Name},
17-
HirFileId, HirFileIdRepr, InFile, MacroCallLoc, MacroDefKind, MacroFile,
20+
HirFileId, HirFileIdRepr, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
1821
};
1922

2023
#[derive(Clone, Debug)]
@@ -121,11 +124,12 @@ impl HygieneFrames {
121124
#[derive(Debug, Clone, PartialEq, Eq)]
122125
struct HygieneInfo {
123126
file: MacroFile,
124-
/// The `macro_rules!` arguments.
125-
def_start: Option<InFile<TextSize>>,
127+
/// The start offset of the `macro_rules!` arguments or attribute input.
128+
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
126129

127130
macro_def: Arc<TokenExpander>,
128131
macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
132+
macro_arg_shift: mbe::Shift,
129133
exp_map: Arc<mbe::TokenMap>,
130134
}
131135

@@ -136,22 +140,34 @@ impl HygieneInfo {
136140
token: TextRange,
137141
) -> Option<(InFile<TextRange>, Origin)> {
138142
let token_id = self.exp_map.token_by_range(token)?;
143+
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
139144

140-
let (token_id, origin) = self.macro_def.map_id_up(token_id);
141-
let (token_map, tt) = match origin {
142-
mbe::Origin::Call => {
143-
let call_id = self.file.macro_call_id;
144-
let loc: MacroCallLoc = db.lookup_intern_macro(call_id);
145-
let arg_start = loc.kind.arg(db)?.text_range().start();
146-
(&self.macro_arg.1, InFile::new(loc.kind.file_id(), arg_start))
147-
}
148-
mbe::Origin::Def => match (&*self.macro_def, self.def_start) {
149-
(
150-
TokenExpander::MacroDef { def_site_token_map, .. }
151-
| TokenExpander::MacroRules { def_site_token_map, .. },
152-
Some(tt),
153-
) => (def_site_token_map, tt),
154-
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
145+
let loc = db.lookup_intern_macro(self.file.macro_call_id);
146+
147+
let (token_map, tt) = match &loc.kind {
148+
MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) {
149+
Some(unshifted) => {
150+
token_id = unshifted;
151+
(&attr_args.1, self.attr_input_or_mac_def_start?)
152+
}
153+
None => (
154+
&self.macro_arg.1,
155+
InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
156+
),
157+
},
158+
_ => match origin {
159+
mbe::Origin::Call => (
160+
&self.macro_arg.1,
161+
InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
162+
),
163+
mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def_start) {
164+
(
165+
TokenExpander::MacroDef { def_site_token_map, .. }
166+
| TokenExpander::MacroRules { def_site_token_map, .. },
167+
Some(tt),
168+
) => (def_site_token_map, *tt),
169+
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
170+
},
155171
},
156172
};
157173

@@ -165,19 +181,34 @@ fn make_hygiene_info(
165181
macro_file: MacroFile,
166182
loc: &MacroCallLoc,
167183
) -> Option<HygieneInfo> {
168-
let def_offset = loc.def.ast_id().left().and_then(|id| {
184+
let def = loc.def.ast_id().left().and_then(|id| {
169185
let def_tt = match id.to_node(db) {
170-
ast::Macro::MacroRules(mac) => mac.token_tree()?.syntax().text_range().start(),
171-
ast::Macro::MacroDef(mac) => mac.body()?.syntax().text_range().start(),
186+
ast::Macro::MacroRules(mac) => mac.token_tree()?,
187+
ast::Macro::MacroDef(mac) => mac.body()?,
172188
};
173189
Some(InFile::new(id.file_id, def_tt))
174190
});
191+
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
192+
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
193+
let tt = ast_id.to_node(db).attrs().nth(invoc_attr_index as usize)?.token_tree()?;
194+
Some(InFile::new(ast_id.file_id, tt))
195+
}
196+
_ => None,
197+
});
175198

176199
let macro_def = db.macro_def(loc.def)?;
177200
let (_, exp_map) = db.parse_macro_expansion(macro_file).value?;
178201
let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
179202

180-
Some(HygieneInfo { file: macro_file, def_start: def_offset, macro_arg, macro_def, exp_map })
203+
Some(HygieneInfo {
204+
file: macro_file,
205+
attr_input_or_mac_def_start: attr_input_or_mac_def
206+
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
207+
macro_arg_shift: mbe::Shift::new(&macro_arg.0),
208+
macro_arg,
209+
macro_def,
210+
exp_map,
211+
})
181212
}
182213

183214
impl HygieneFrame {
@@ -214,7 +245,7 @@ impl HygieneFrame {
214245
Some(it) => it,
215246
};
216247

217-
let def_site = info.def_start.map(|it| db.hygiene_frame(it.file_id));
248+
let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
218249
let call_site = Some(db.hygiene_frame(calling_file));
219250

220251
HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site }

0 commit comments

Comments
 (0)