@@ -35,7 +35,7 @@ pub fn syntax_node_to_token_tree_with_modifications(
35
35
append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
36
36
) -> (tt::Subtree, TokenMap, u32) {
37
37
let global_offset = node.text_range().start();
38
- let mut c = Convertor ::new(node, global_offset, existing_token_map, next_id, replace, append);
38
+ let mut c = Converter ::new(node, global_offset, existing_token_map, next_id, replace, append);
39
39
let subtree = convert_tokens(&mut c);
40
40
c.id_alloc.map.shrink_to_fit();
41
41
always!(c.replace.is_empty(), "replace: {:?}", c.replace);
@@ -100,7 +100,7 @@ pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
100
100
return None;
101
101
}
102
102
103
- let mut conv = RawConvertor {
103
+ let mut conv = RawConverter {
104
104
lexed,
105
105
pos: 0,
106
106
id_alloc: TokenIdAlloc {
@@ -148,7 +148,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
148
148
res
149
149
}
150
150
151
- fn convert_tokens<C: TokenConvertor >(conv: &mut C) -> tt::Subtree {
151
+ fn convert_tokens<C: TokenConverter >(conv: &mut C) -> tt::Subtree {
152
152
struct StackEntry {
153
153
subtree: tt::Subtree,
154
154
idx: usize,
@@ -425,8 +425,8 @@ impl TokenIdAlloc {
425
425
}
426
426
}
427
427
428
- /// A raw token (straight from lexer) convertor
429
- struct RawConvertor <'a> {
428
+ /// A raw token (straight from lexer) converter
429
+ struct RawConverter <'a> {
430
430
lexed: parser::LexedStr<'a>,
431
431
pos: usize,
432
432
id_alloc: TokenIdAlloc,
@@ -442,7 +442,7 @@ trait SrcToken<Ctx>: std::fmt::Debug {
442
442
fn synthetic_id(&self, ctx: &Ctx) -> Option<SyntheticTokenId>;
443
443
}
444
444
445
- trait TokenConvertor : Sized {
445
+ trait TokenConverter : Sized {
446
446
type Token: SrcToken<Self>;
447
447
448
448
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>;
@@ -454,25 +454,25 @@ trait TokenConvertor: Sized {
454
454
fn id_alloc(&mut self) -> &mut TokenIdAlloc;
455
455
}
456
456
457
- impl<'a> SrcToken<RawConvertor <'a>> for usize {
458
- fn kind(&self, ctx: &RawConvertor <'a>) -> SyntaxKind {
457
+ impl<'a> SrcToken<RawConverter <'a>> for usize {
458
+ fn kind(&self, ctx: &RawConverter <'a>) -> SyntaxKind {
459
459
ctx.lexed.kind(*self)
460
460
}
461
461
462
- fn to_char(&self, ctx: &RawConvertor <'a>) -> Option<char> {
462
+ fn to_char(&self, ctx: &RawConverter <'a>) -> Option<char> {
463
463
ctx.lexed.text(*self).chars().next()
464
464
}
465
465
466
- fn to_text(&self, ctx: &RawConvertor <'_>) -> SmolStr {
466
+ fn to_text(&self, ctx: &RawConverter <'_>) -> SmolStr {
467
467
ctx.lexed.text(*self).into()
468
468
}
469
469
470
- fn synthetic_id(&self, _ctx: &RawConvertor <'a>) -> Option<SyntheticTokenId> {
470
+ fn synthetic_id(&self, _ctx: &RawConverter <'a>) -> Option<SyntheticTokenId> {
471
471
None
472
472
}
473
473
}
474
474
475
- impl<'a> TokenConvertor for RawConvertor <'a> {
475
+ impl<'a> TokenConverter for RawConverter <'a> {
476
476
type Token = usize;
477
477
478
478
fn convert_doc_comment(&self, &token: &usize) -> Option<Vec<tt::TokenTree>> {
@@ -504,7 +504,7 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
504
504
}
505
505
}
506
506
507
- struct Convertor {
507
+ struct Converter {
508
508
id_alloc: TokenIdAlloc,
509
509
current: Option<SyntaxToken>,
510
510
current_synthetic: Vec<SyntheticToken>,
@@ -515,19 +515,19 @@ struct Convertor {
515
515
punct_offset: Option<(SyntaxToken, TextSize)>,
516
516
}
517
517
518
- impl Convertor {
518
+ impl Converter {
519
519
fn new(
520
520
node: &SyntaxNode,
521
521
global_offset: TextSize,
522
522
existing_token_map: TokenMap,
523
523
next_id: u32,
524
524
mut replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
525
525
mut append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
526
- ) -> Convertor {
526
+ ) -> Converter {
527
527
let range = node.text_range();
528
528
let mut preorder = node.preorder_with_tokens();
529
529
let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append);
530
- Convertor {
530
+ Converter {
531
531
id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } },
532
532
current: first,
533
533
current_synthetic: synthetic,
@@ -590,39 +590,39 @@ impl SynToken {
590
590
}
591
591
}
592
592
593
- impl SrcToken<Convertor > for SynToken {
594
- fn kind(&self, _ctx: &Convertor ) -> SyntaxKind {
593
+ impl SrcToken<Converter > for SynToken {
594
+ fn kind(&self, _ctx: &Converter ) -> SyntaxKind {
595
595
match self {
596
596
SynToken::Ordinary(token) => token.kind(),
597
597
SynToken::Punch(token, _) => token.kind(),
598
598
SynToken::Synthetic(token) => token.kind,
599
599
}
600
600
}
601
- fn to_char(&self, _ctx: &Convertor ) -> Option<char> {
601
+ fn to_char(&self, _ctx: &Converter ) -> Option<char> {
602
602
match self {
603
603
SynToken::Ordinary(_) => None,
604
604
SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
605
605
SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(),
606
606
SynToken::Synthetic(_) => None,
607
607
}
608
608
}
609
- fn to_text(&self, _ctx: &Convertor ) -> SmolStr {
609
+ fn to_text(&self, _ctx: &Converter ) -> SmolStr {
610
610
match self {
611
611
SynToken::Ordinary(token) => token.text().into(),
612
612
SynToken::Punch(token, _) => token.text().into(),
613
613
SynToken::Synthetic(token) => token.text.clone(),
614
614
}
615
615
}
616
616
617
- fn synthetic_id(&self, _ctx: &Convertor ) -> Option<SyntheticTokenId> {
617
+ fn synthetic_id(&self, _ctx: &Converter ) -> Option<SyntheticTokenId> {
618
618
match self {
619
619
SynToken::Synthetic(token) => Some(token.id),
620
620
_ => None,
621
621
}
622
622
}
623
623
}
624
624
625
- impl TokenConvertor for Convertor {
625
+ impl TokenConverter for Converter {
626
626
type Token = SynToken;
627
627
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
628
628
convert_doc_comment(token.token()?)
0 commit comments