1
1
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
2
2
3
3
use rustc_hash:: { FxHashMap , FxHashSet } ;
4
+ use stdx:: non_empty_vec:: NonEmptyVec ;
4
5
use syntax:: {
5
6
ast:: { self , make:: tokens:: doc_comment} ,
6
7
AstToken , Parse , PreorderWithTokens , SmolStr , SyntaxElement , SyntaxKind ,
@@ -141,25 +142,26 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
141
142
idx : !0 ,
142
143
open_range : TextRange :: empty ( TextSize :: of ( '.' ) ) ,
143
144
} ;
144
- let mut stack = vec ! [ entry] ;
145
+ let mut stack = NonEmptyVec :: new ( entry) ;
145
146
146
147
loop {
147
- let entry = stack. last_mut ( ) . unwrap ( ) ;
148
- let result = & mut entry . subtree . token_trees ;
148
+ let StackEntry { subtree , .. } = stack. last_mut ( ) ;
149
+ let result = & mut subtree. token_trees ;
149
150
let ( token, range) = match conv. bump ( ) {
150
151
Some ( it) => it,
151
152
None => break ,
152
153
} ;
153
154
154
- let k : SyntaxKind = token. kind ( & conv) ;
155
- if k == COMMENT {
155
+ let kind = token. kind ( & conv) ;
156
+ if kind == COMMENT {
156
157
if let Some ( tokens) = conv. convert_doc_comment ( & token) {
157
158
// FIXME: There has to be a better way to do this
158
159
// Add the comments token id to the converted doc string
159
160
let id = conv. id_alloc ( ) . alloc ( range) ;
160
161
result. extend ( tokens. into_iter ( ) . map ( |mut tt| {
161
162
if let tt:: TokenTree :: Subtree ( sub) = & mut tt {
162
- if let tt:: TokenTree :: Leaf ( tt:: Leaf :: Literal ( lit) ) = & mut sub. token_trees [ 2 ]
163
+ if let Some ( tt:: TokenTree :: Leaf ( tt:: Leaf :: Literal ( lit) ) ) =
164
+ sub. token_trees . get_mut ( 2 )
163
165
{
164
166
lit. id = id
165
167
}
@@ -169,26 +171,26 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
169
171
}
170
172
continue ;
171
173
}
172
-
173
- result. push ( if k. is_punct ( ) && k != UNDERSCORE {
174
+ let tt = if kind. is_punct ( ) && kind != UNDERSCORE {
174
175
assert_eq ! ( range. len( ) , TextSize :: of( '.' ) ) ;
175
176
176
- if let Some ( delim) = entry . subtree . delimiter {
177
+ if let Some ( delim) = subtree. delimiter {
177
178
let expected = match delim. kind {
178
179
tt:: DelimiterKind :: Parenthesis => T ! [ ')' ] ,
179
180
tt:: DelimiterKind :: Brace => T ! [ '}' ] ,
180
181
tt:: DelimiterKind :: Bracket => T ! [ ']' ] ,
181
182
} ;
182
183
183
- if k == expected {
184
- let entry = stack. pop ( ) . unwrap ( ) ;
185
- conv. id_alloc ( ) . close_delim ( entry. idx , Some ( range) ) ;
186
- stack. last_mut ( ) . unwrap ( ) . subtree . token_trees . push ( entry. subtree . into ( ) ) ;
184
+ if kind == expected {
185
+ if let Some ( entry) = stack. pop ( ) {
186
+ conv. id_alloc ( ) . close_delim ( entry. idx , Some ( range) ) ;
187
+ stack. last_mut ( ) . subtree . token_trees . push ( entry. subtree . into ( ) ) ;
188
+ }
187
189
continue ;
188
190
}
189
191
}
190
192
191
- let delim = match k {
193
+ let delim = match kind {
192
194
T ! [ '(' ] => Some ( tt:: DelimiterKind :: Parenthesis ) ,
193
195
T ! [ '{' ] => Some ( tt:: DelimiterKind :: Brace ) ,
194
196
T ! [ '[' ] => Some ( tt:: DelimiterKind :: Bracket ) ,
@@ -201,36 +203,35 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
201
203
subtree. delimiter = Some ( tt:: Delimiter { id, kind } ) ;
202
204
stack. push ( StackEntry { subtree, idx, open_range : range } ) ;
203
205
continue ;
204
- } else {
205
- let spacing = match conv. peek ( ) {
206
- Some ( next)
207
- if next. kind ( & conv) . is_trivia ( )
208
- || next. kind ( & conv) == T ! [ '[' ]
209
- || next. kind ( & conv) == T ! [ '{' ]
210
- || next. kind ( & conv) == T ! [ '(' ] =>
211
- {
212
- tt:: Spacing :: Alone
213
- }
214
- Some ( next) if next. kind ( & conv) . is_punct ( ) && next. kind ( & conv) != UNDERSCORE => {
215
- tt:: Spacing :: Joint
216
- }
217
- _ => tt:: Spacing :: Alone ,
218
- } ;
219
- let char = match token. to_char ( & conv) {
220
- Some ( c) => c,
221
- None => {
222
- panic ! ( "Token from lexer must be single char: token = {:#?}" , token) ;
223
- }
224
- } ;
225
- tt:: Leaf :: from ( tt:: Punct { char, spacing, id : conv. id_alloc ( ) . alloc ( range) } ) . into ( )
226
206
}
207
+
208
+ let spacing = match conv. peek ( ) . map ( |next| next. kind ( & conv) ) {
209
+ Some ( kind)
210
+ if !kind. is_trivia ( )
211
+ && kind. is_punct ( )
212
+ && kind != T ! [ '[' ]
213
+ && kind != T ! [ '{' ]
214
+ && kind != T ! [ '(' ]
215
+ && kind != UNDERSCORE =>
216
+ {
217
+ tt:: Spacing :: Joint
218
+ }
219
+ _ => tt:: Spacing :: Alone ,
220
+ } ;
221
+ let char = match token. to_char ( & conv) {
222
+ Some ( c) => c,
223
+ None => {
224
+ panic ! ( "Token from lexer must be single char: token = {:#?}" , token) ;
225
+ }
226
+ } ;
227
+ tt:: Leaf :: from ( tt:: Punct { char, spacing, id : conv. id_alloc ( ) . alloc ( range) } ) . into ( )
227
228
} else {
228
229
macro_rules! make_leaf {
229
230
( $i: ident) => {
230
231
tt:: $i { id: conv. id_alloc( ) . alloc( range) , text: token. to_text( conv) } . into( )
231
232
} ;
232
233
}
233
- let leaf: tt:: Leaf = match k {
234
+ let leaf: tt:: Leaf = match kind {
234
235
T ! [ true ] | T ! [ false ] => make_leaf ! ( Ident ) ,
235
236
IDENT => make_leaf ! ( Ident ) ,
236
237
UNDERSCORE => make_leaf ! ( Ident ) ,
@@ -258,15 +259,15 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
258
259
} ;
259
260
260
261
leaf. into ( )
261
- } ) ;
262
+ } ;
263
+ result. push ( tt) ;
262
264
}
263
265
264
266
// If we get here, we've consumed all input tokens.
265
267
// We might have more than one subtree in the stack, if the delimiters are improperly balanced.
266
268
// Merge them so we're left with one.
267
- while stack. len ( ) > 1 {
268
- let entry = stack. pop ( ) . unwrap ( ) ;
269
- let parent = stack. last_mut ( ) . unwrap ( ) ;
269
+ while let Some ( entry) = stack. pop ( ) {
270
+ let parent = stack. last_mut ( ) ;
270
271
271
272
conv. id_alloc ( ) . close_delim ( entry. idx , None ) ;
272
273
let leaf: tt:: Leaf = tt:: Punct {
@@ -283,13 +284,12 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
283
284
parent. subtree . token_trees . extend ( entry. subtree . token_trees ) ;
284
285
}
285
286
286
- let subtree = stack. pop ( ) . unwrap ( ) . subtree ;
287
- if subtree . token_trees . len ( ) == 1 {
288
- if let tt :: TokenTree :: Subtree ( first) = & subtree . token_trees [ 0 ] {
289
- return first . clone ( ) ;
290
- }
287
+ let subtree = stack. into_first ( ) . subtree ;
288
+ if let [ tt :: TokenTree :: Subtree ( first ) ] = & * subtree . token_trees {
289
+ first. clone ( )
290
+ } else {
291
+ subtree
291
292
}
292
- subtree
293
293
}
294
294
295
295
/// Returns the textual content of a doc comment block as a quoted string
@@ -320,7 +320,8 @@ fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>
320
320
let meta_tkns = vec ! [ mk_ident( "doc" ) , mk_punct( '=' ) , mk_doc_literal( & comment) ] ;
321
321
322
322
// Make `#![]`
323
- let mut token_trees = vec ! [ mk_punct( '#' ) ] ;
323
+ let mut token_trees = Vec :: with_capacity ( 3 ) ;
324
+ token_trees. push ( mk_punct ( '#' ) ) ;
324
325
if let ast:: CommentPlacement :: Inner = doc {
325
326
token_trees. push ( mk_punct ( '!' ) ) ;
326
327
}
@@ -439,8 +440,8 @@ impl<'a> SrcToken<RawConvertor<'a>> for usize {
439
440
impl < ' a > TokenConvertor for RawConvertor < ' a > {
440
441
type Token = usize ;
441
442
442
- fn convert_doc_comment ( & self , token : & usize ) -> Option < Vec < tt:: TokenTree > > {
443
- let text = self . lexed . text ( * token) ;
443
+ fn convert_doc_comment ( & self , & token: & usize ) -> Option < Vec < tt:: TokenTree > > {
444
+ let text = self . lexed . text ( token) ;
444
445
convert_doc_comment ( & doc_comment ( text) )
445
446
}
446
447
@@ -568,9 +569,9 @@ impl TokenConvertor for Convertor<'_> {
568
569
}
569
570
self . current = Self :: next_token ( & mut self . preorder , self . censor ) ;
570
571
let token = if curr. kind ( ) . is_punct ( ) {
572
+ self . punct_offset = Some ( ( curr. clone ( ) , 0 . into ( ) ) ) ;
571
573
let range = curr. text_range ( ) ;
572
574
let range = TextRange :: at ( range. start ( ) , TextSize :: of ( '.' ) ) ;
573
- self . punct_offset = Some ( ( curr. clone ( ) , 0 . into ( ) ) ) ;
574
575
( SynToken :: Punch ( curr, 0 . into ( ) ) , range)
575
576
} else {
576
577
self . punct_offset = None ;
0 commit comments