Skip to content

Commit b706ae2

Browse files
bors[bot]Veykril
andauthored
Merge #11160
11160: internal: Use basic `NonEmptyVec` in mbe::syntax_bridge r=Veykril a=Veykril There are some places in the ide crates where this can be used as well if memory serves right. bors r+ Co-authored-by: Lukas Wirth <[email protected]>
2 parents bc81dca + 65a1538 commit b706ae2

File tree

3 files changed

+98
-51
lines changed

3 files changed

+98
-51
lines changed

crates/mbe/src/syntax_bridge.rs

Lines changed: 52 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
22
33
use rustc_hash::{FxHashMap, FxHashSet};
4+
use stdx::non_empty_vec::NonEmptyVec;
45
use syntax::{
56
ast::{self, make::tokens::doc_comment},
67
AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
@@ -141,25 +142,26 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
141142
idx: !0,
142143
open_range: TextRange::empty(TextSize::of('.')),
143144
};
144-
let mut stack = vec![entry];
145+
let mut stack = NonEmptyVec::new(entry);
145146

146147
loop {
147-
let entry = stack.last_mut().unwrap();
148-
let result = &mut entry.subtree.token_trees;
148+
let StackEntry { subtree, .. } = stack.last_mut();
149+
let result = &mut subtree.token_trees;
149150
let (token, range) = match conv.bump() {
150151
Some(it) => it,
151152
None => break,
152153
};
153154

154-
let k: SyntaxKind = token.kind(&conv);
155-
if k == COMMENT {
155+
let kind = token.kind(&conv);
156+
if kind == COMMENT {
156157
if let Some(tokens) = conv.convert_doc_comment(&token) {
157158
// FIXME: There has to be a better way to do this
158159
// Add the comments token id to the converted doc string
159160
let id = conv.id_alloc().alloc(range);
160161
result.extend(tokens.into_iter().map(|mut tt| {
161162
if let tt::TokenTree::Subtree(sub) = &mut tt {
162-
if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = &mut sub.token_trees[2]
163+
if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
164+
sub.token_trees.get_mut(2)
163165
{
164166
lit.id = id
165167
}
@@ -169,26 +171,26 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
169171
}
170172
continue;
171173
}
172-
173-
result.push(if k.is_punct() && k != UNDERSCORE {
174+
let tt = if kind.is_punct() && kind != UNDERSCORE {
174175
assert_eq!(range.len(), TextSize::of('.'));
175176

176-
if let Some(delim) = entry.subtree.delimiter {
177+
if let Some(delim) = subtree.delimiter {
177178
let expected = match delim.kind {
178179
tt::DelimiterKind::Parenthesis => T![')'],
179180
tt::DelimiterKind::Brace => T!['}'],
180181
tt::DelimiterKind::Bracket => T![']'],
181182
};
182183

183-
if k == expected {
184-
let entry = stack.pop().unwrap();
185-
conv.id_alloc().close_delim(entry.idx, Some(range));
186-
stack.last_mut().unwrap().subtree.token_trees.push(entry.subtree.into());
184+
if kind == expected {
185+
if let Some(entry) = stack.pop() {
186+
conv.id_alloc().close_delim(entry.idx, Some(range));
187+
stack.last_mut().subtree.token_trees.push(entry.subtree.into());
188+
}
187189
continue;
188190
}
189191
}
190192

191-
let delim = match k {
193+
let delim = match kind {
192194
T!['('] => Some(tt::DelimiterKind::Parenthesis),
193195
T!['{'] => Some(tt::DelimiterKind::Brace),
194196
T!['['] => Some(tt::DelimiterKind::Bracket),
@@ -201,36 +203,35 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
201203
subtree.delimiter = Some(tt::Delimiter { id, kind });
202204
stack.push(StackEntry { subtree, idx, open_range: range });
203205
continue;
204-
} else {
205-
let spacing = match conv.peek() {
206-
Some(next)
207-
if next.kind(&conv).is_trivia()
208-
|| next.kind(&conv) == T!['[']
209-
|| next.kind(&conv) == T!['{']
210-
|| next.kind(&conv) == T!['('] =>
211-
{
212-
tt::Spacing::Alone
213-
}
214-
Some(next) if next.kind(&conv).is_punct() && next.kind(&conv) != UNDERSCORE => {
215-
tt::Spacing::Joint
216-
}
217-
_ => tt::Spacing::Alone,
218-
};
219-
let char = match token.to_char(&conv) {
220-
Some(c) => c,
221-
None => {
222-
panic!("Token from lexer must be single char: token = {:#?}", token);
223-
}
224-
};
225-
tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range) }).into()
226206
}
207+
208+
let spacing = match conv.peek().map(|next| next.kind(&conv)) {
209+
Some(kind)
210+
if !kind.is_trivia()
211+
&& kind.is_punct()
212+
&& kind != T!['[']
213+
&& kind != T!['{']
214+
&& kind != T!['(']
215+
&& kind != UNDERSCORE =>
216+
{
217+
tt::Spacing::Joint
218+
}
219+
_ => tt::Spacing::Alone,
220+
};
221+
let char = match token.to_char(&conv) {
222+
Some(c) => c,
223+
None => {
224+
panic!("Token from lexer must be single char: token = {:#?}", token);
225+
}
226+
};
227+
tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range) }).into()
227228
} else {
228229
macro_rules! make_leaf {
229230
($i:ident) => {
230231
tt::$i { id: conv.id_alloc().alloc(range), text: token.to_text(conv) }.into()
231232
};
232233
}
233-
let leaf: tt::Leaf = match k {
234+
let leaf: tt::Leaf = match kind {
234235
T![true] | T![false] => make_leaf!(Ident),
235236
IDENT => make_leaf!(Ident),
236237
UNDERSCORE => make_leaf!(Ident),
@@ -258,15 +259,15 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
258259
};
259260

260261
leaf.into()
261-
});
262+
};
263+
result.push(tt);
262264
}
263265

264266
// If we get here, we've consumed all input tokens.
265267
// We might have more than one subtree in the stack, if the delimiters are improperly balanced.
266268
// Merge them so we're left with one.
267-
while stack.len() > 1 {
268-
let entry = stack.pop().unwrap();
269-
let parent = stack.last_mut().unwrap();
269+
while let Some(entry) = stack.pop() {
270+
let parent = stack.last_mut();
270271

271272
conv.id_alloc().close_delim(entry.idx, None);
272273
let leaf: tt::Leaf = tt::Punct {
@@ -283,13 +284,12 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
283284
parent.subtree.token_trees.extend(entry.subtree.token_trees);
284285
}
285286

286-
let subtree = stack.pop().unwrap().subtree;
287-
if subtree.token_trees.len() == 1 {
288-
if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] {
289-
return first.clone();
290-
}
287+
let subtree = stack.into_first().subtree;
288+
if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees {
289+
first.clone()
290+
} else {
291+
subtree
291292
}
292-
subtree
293293
}
294294

295295
/// Returns the textual content of a doc comment block as a quoted string
@@ -320,7 +320,8 @@ fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>
320320
let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
321321

322322
// Make `#![]`
323-
let mut token_trees = vec![mk_punct('#')];
323+
let mut token_trees = Vec::with_capacity(3);
324+
token_trees.push(mk_punct('#'));
324325
if let ast::CommentPlacement::Inner = doc {
325326
token_trees.push(mk_punct('!'));
326327
}
@@ -439,8 +440,8 @@ impl<'a> SrcToken<RawConvertor<'a>> for usize {
439440
impl<'a> TokenConvertor for RawConvertor<'a> {
440441
type Token = usize;
441442

442-
fn convert_doc_comment(&self, token: &usize) -> Option<Vec<tt::TokenTree>> {
443-
let text = self.lexed.text(*token);
443+
fn convert_doc_comment(&self, &token: &usize) -> Option<Vec<tt::TokenTree>> {
444+
let text = self.lexed.text(token);
444445
convert_doc_comment(&doc_comment(text))
445446
}
446447

@@ -568,9 +569,9 @@ impl TokenConvertor for Convertor<'_> {
568569
}
569570
self.current = Self::next_token(&mut self.preorder, self.censor);
570571
let token = if curr.kind().is_punct() {
572+
self.punct_offset = Some((curr.clone(), 0.into()));
571573
let range = curr.text_range();
572574
let range = TextRange::at(range.start(), TextSize::of('.'));
573-
self.punct_offset = Some((curr.clone(), 0.into()));
574575
(SynToken::Punch(curr, 0.into()), range)
575576
} else {
576577
self.punct_offset = None;

crates/stdx/src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ use std::{cmp::Ordering, ops, time::Instant};
55
mod macros;
66
pub mod process;
77
pub mod panic_context;
8+
pub mod non_empty_vec;
89

910
pub use always_assert::{always, never};
1011

crates/stdx/src/non_empty_vec.rs

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
//! A [`Vec`] that is guaranteed to at least contain one element.
2+
3+
pub struct NonEmptyVec<T>(Vec<T>);
4+
5+
impl<T> NonEmptyVec<T> {
6+
#[inline]
7+
pub fn new(initial: T) -> Self {
8+
NonEmptyVec(vec![initial])
9+
}
10+
11+
#[inline]
12+
pub fn last_mut(&mut self) -> &mut T {
13+
match self.0.last_mut() {
14+
Some(it) => it,
15+
None => unreachable!(),
16+
}
17+
}
18+
19+
#[inline]
20+
pub fn pop(&mut self) -> Option<T> {
21+
if self.0.len() <= 1 {
22+
None
23+
} else {
24+
self.0.pop()
25+
}
26+
}
27+
28+
#[inline]
29+
pub fn push(&mut self, value: T) {
30+
self.0.push(value)
31+
}
32+
33+
#[inline]
34+
pub fn len(&self) -> usize {
35+
self.0.len()
36+
}
37+
38+
#[inline]
39+
pub fn into_first(mut self) -> T {
40+
match self.0.pop() {
41+
Some(it) => it,
42+
None => unreachable!(),
43+
}
44+
}
45+
}

0 commit comments

Comments
 (0)