Skip to content

Commit a6dde50

Browse files
committed
Only strip derive attributes when preparing macro input
1 parent e458f66 commit a6dde50

File tree

7 files changed

+88
-62
lines changed

7 files changed

+88
-62
lines changed

Cargo.lock

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

crates/hir_expand/src/db.rs

Lines changed: 22 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,13 @@
33
use std::sync::Arc;
44

55
use base_db::{salsa, SourceDatabase};
6-
use itertools::Itertools;
76
use limit::Limit;
87
use mbe::{syntax_node_to_token_tree, ExpandError, ExpandResult};
8+
use rustc_hash::FxHashSet;
99
use syntax::{
1010
algo::diff,
1111
ast::{self, AttrsOwner, NameOwner},
12-
AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, TextRange, T,
12+
AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, T,
1313
};
1414

1515
use crate::{
@@ -151,7 +151,7 @@ pub fn expand_speculative(
151151
// Build the subtree and token mapping for the speculative args
152152
let censor = censor_for_macro_input(&loc, &speculative_args);
153153
let (mut tt, spec_args_tmap) =
154-
mbe::syntax_node_to_token_tree_censored(&speculative_args, censor);
154+
mbe::syntax_node_to_token_tree_censored(&speculative_args, &censor);
155155

156156
let (attr_arg, token_id) = match loc.kind {
157157
MacroCallKind::Attr { invoc_attr_index, .. } => {
@@ -305,7 +305,7 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,
305305

306306
let node = SyntaxNode::new_root(arg);
307307
let censor = censor_for_macro_input(&loc, &node);
308-
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, censor);
308+
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, &censor);
309309

310310
if loc.def.is_proc_macro() {
311311
// proc macros expect their inputs without parentheses, MBEs expect it with them included
@@ -315,24 +315,26 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,
315315
Some(Arc::new((tt, tmap)))
316316
}
317317

318-
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> Option<TextRange> {
319-
match loc.kind {
320-
MacroCallKind::FnLike { .. } => None,
321-
MacroCallKind::Derive { derive_attr_index, .. } => match ast::Item::cast(node.clone()) {
322-
Some(item) => item
318+
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
319+
(|| {
320+
let censor = match loc.kind {
321+
MacroCallKind::FnLike { .. } => return None,
322+
MacroCallKind::Derive { derive_attr_index, .. } => ast::Item::cast(node.clone())?
323323
.attrs()
324-
.map(|attr| attr.syntax().text_range())
325324
.take(derive_attr_index as usize + 1)
326-
.fold1(TextRange::cover),
327-
None => None,
328-
},
329-
MacroCallKind::Attr { invoc_attr_index, .. } => match ast::Item::cast(node.clone()) {
330-
Some(item) => {
331-
item.attrs().nth(invoc_attr_index as usize).map(|attr| attr.syntax().text_range())
332-
}
333-
None => None,
334-
},
335-
}
325+
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
326+
.map(|it| it.syntax().clone())
327+
.collect(),
328+
MacroCallKind::Attr { invoc_attr_index, .. } => ast::Item::cast(node.clone())?
329+
.attrs()
330+
.nth(invoc_attr_index as usize)
331+
.map(|attr| attr.syntax().clone())
332+
.into_iter()
333+
.collect(),
334+
};
335+
Some(censor)
336+
})()
337+
.unwrap_or_default()
336338
}
337339

338340
fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {

crates/mbe/src/syntax_bridge.rs

Lines changed: 41 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,13 @@
11
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
22
3-
use std::iter;
4-
53
use parser::{ParseError, TreeSink};
6-
use rustc_hash::FxHashMap;
4+
use rustc_hash::{FxHashMap, FxHashSet};
75
use syntax::{
86
ast::{self, make::tokens::doc_comment},
9-
tokenize, AstToken, Parse, SmolStr, SyntaxKind,
7+
tokenize, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
108
SyntaxKind::*,
11-
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, T,
9+
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, WalkEvent,
10+
T,
1211
};
1312
use tt::buffer::{Cursor, TokenBuffer};
1413

@@ -19,14 +18,14 @@ use crate::{
1918
/// Convert the syntax node to a `TokenTree` (what macro
2019
/// will consume).
2120
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
22-
syntax_node_to_token_tree_censored(node, None)
21+
syntax_node_to_token_tree_censored(node, &Default::default())
2322
}
2423

2524
/// Convert the syntax node to a `TokenTree` (what macro will consume)
2625
/// with the censored range excluded.
2726
pub fn syntax_node_to_token_tree_censored(
2827
node: &SyntaxNode,
29-
censor: Option<TextRange>,
28+
censor: &FxHashSet<SyntaxNode>,
3029
) -> (tt::Subtree, TokenMap) {
3130
let global_offset = node.text_range().start();
3231
let mut c = Convertor::new(node, global_offset, censor);
@@ -424,8 +423,6 @@ impl<'a> SrcToken for (&'a RawToken, &'a str) {
424423
}
425424
}
426425

427-
impl RawConvertor<'_> {}
428-
429426
impl<'a> TokenConvertor for RawConvertor<'a> {
430427
type Token = (&'a RawToken, &'a str);
431428

@@ -455,30 +452,51 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
455452
}
456453
}
457454

458-
struct Convertor {
455+
struct Convertor<'c> {
459456
id_alloc: TokenIdAlloc,
460457
current: Option<SyntaxToken>,
461-
censor: Option<TextRange>,
458+
preorder: PreorderWithTokens,
459+
censor: &'c FxHashSet<SyntaxNode>,
462460
range: TextRange,
463461
punct_offset: Option<(SyntaxToken, TextSize)>,
464462
}
465463

466-
impl Convertor {
467-
fn new(node: &SyntaxNode, global_offset: TextSize, censor: Option<TextRange>) -> Convertor {
468-
let first = node.first_token();
469-
let current = match censor {
470-
Some(censor) => iter::successors(first, |token| token.next_token())
471-
.find(|token| !censor.contains_range(token.text_range())),
472-
None => first,
473-
};
464+
impl<'c> Convertor<'c> {
465+
fn new(
466+
node: &SyntaxNode,
467+
global_offset: TextSize,
468+
censor: &'c FxHashSet<SyntaxNode>,
469+
) -> Convertor<'c> {
470+
let range = node.text_range();
471+
let mut preorder = node.preorder_with_tokens();
472+
let first = Self::next_token(&mut preorder, censor);
474473
Convertor {
475474
id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
476-
current,
477-
range: node.text_range(),
475+
current: first,
476+
preorder,
477+
range,
478478
censor,
479479
punct_offset: None,
480480
}
481481
}
482+
483+
fn next_token(
484+
preorder: &mut PreorderWithTokens,
485+
censor: &FxHashSet<SyntaxNode>,
486+
) -> Option<SyntaxToken> {
487+
while let Some(ev) = preorder.next() {
488+
let ele = match ev {
489+
WalkEvent::Enter(ele) => ele,
490+
_ => continue,
491+
};
492+
match ele {
493+
SyntaxElement::Token(t) => return Some(t),
494+
SyntaxElement::Node(node) if censor.contains(&node) => preorder.skip_subtree(),
495+
SyntaxElement::Node(_) => (),
496+
}
497+
}
498+
None
499+
}
482500
}
483501

484502
#[derive(Debug)]
@@ -511,7 +529,7 @@ impl SrcToken for SynToken {
511529
}
512530
}
513531

514-
impl TokenConvertor for Convertor {
532+
impl TokenConvertor for Convertor<'_> {
515533
type Token = SynToken;
516534
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
517535
convert_doc_comment(token.token())
@@ -532,11 +550,7 @@ impl TokenConvertor for Convertor {
532550
if !&self.range.contains_range(curr.text_range()) {
533551
return None;
534552
}
535-
self.current = match self.censor {
536-
Some(censor) => iter::successors(curr.next_token(), |token| token.next_token())
537-
.find(|token| !censor.contains_range(token.text_range())),
538-
None => curr.next_token(),
539-
};
553+
self.current = Self::next_token(&mut self.preorder, self.censor);
540554
let token = if curr.kind().is_punct() {
541555
let range = curr.text_range();
542556
let range = TextRange::at(range.start(), TextSize::of('.'));

crates/mbe/src/tests.rs

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
mod expand;
22
mod rule;
33

4-
use std::fmt::Write;
4+
use std::{fmt::Write, iter};
55

66
use syntax::{ast, AstNode, NodeOrToken, SyntaxNode, WalkEvent};
77
use test_utils::assert_eq_text;
@@ -252,27 +252,36 @@ struct Struct {
252252
let item = source_file.items().next().unwrap();
253253
let attr = item.attrs().nth(1).unwrap();
254254

255-
let (tt, _) =
256-
syntax_node_to_token_tree_censored(item.syntax(), Some(attr.syntax().text_range()));
255+
let (tt, _) = syntax_node_to_token_tree_censored(
256+
item.syntax(),
257+
&iter::once(attr.syntax().clone()).collect(),
258+
);
257259
expect_test::expect![[r##"# [attr0] # [attr2] struct Struct {field : ()}"##]]
258260
.assert_eq(&tt.to_string());
259261

260262
let source = r##"
263+
#[attr0]
261264
#[derive(Derive0)]
265+
#[attr1]
262266
#[derive(Derive1)]
267+
#[attr2]
263268
#[derive(Derive2)]
269+
#[attr3]
264270
struct Struct {
265271
field: ()
266272
}
267273
"##;
268274
let source_file = ast::SourceFile::parse(source).ok().unwrap();
269275
let item = source_file.items().next().unwrap();
270-
let attr = item.attrs().nth(1).unwrap();
271-
272-
let (tt, _) = syntax_node_to_token_tree_censored(
273-
item.syntax(),
274-
Some(attr.syntax().text_range().cover_offset(0.into())),
275-
);
276-
expect_test::expect![[r##"# [derive (Derive2)] struct Struct {field : ()}"##]]
276+
let derive_attr_index = 3;
277+
let censor = item
278+
.attrs()
279+
.take(derive_attr_index as usize + 1)
280+
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
281+
.map(|it| it.syntax().clone())
282+
.collect();
283+
284+
let (tt, _) = syntax_node_to_token_tree_censored(item.syntax(), &censor);
285+
expect_test::expect![[r##"# [attr0] # [attr1] # [attr2] # [derive (Derive2)] # [attr3] struct Struct {field : ()}"##]]
277286
.assert_eq(&tt.to_string());
278287
}

crates/syntax/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ doctest = false
1212
[dependencies]
1313
cov-mark = "2.0.0-pre.1"
1414
itertools = "0.10.0"
15-
rowan = "0.13.0"
15+
rowan = "0.14.0"
1616
rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" }
1717
rustc-hash = "1.1.0"
1818
once_cell = "1.3.1"

crates/syntax/src/lib.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,8 @@ pub use crate::{
5252
ptr::{AstPtr, SyntaxNodePtr},
5353
syntax_error::SyntaxError,
5454
syntax_node::{
55-
SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren, SyntaxToken,
56-
SyntaxTreeBuilder,
55+
PreorderWithTokens, SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren,
56+
SyntaxToken, SyntaxTreeBuilder,
5757
},
5858
token_text::TokenText,
5959
};

crates/syntax/src/syntax_node.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ pub type SyntaxToken = rowan::SyntaxToken<RustLanguage>;
3131
pub type SyntaxElement = rowan::SyntaxElement<RustLanguage>;
3232
pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
3333
pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
34+
pub type PreorderWithTokens = rowan::api::PreorderWithTokens<RustLanguage>;
3435

3536
#[derive(Default)]
3637
pub struct SyntaxTreeBuilder {

0 commit comments

Comments
 (0)