Skip to content

Commit 74de79b

Browse files
committed
internal: rename
1 parent d0d0507 commit 74de79b

File tree

13 files changed

+106
-89
lines changed

13 files changed

+106
-89
lines changed

crates/mbe/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ mod parser;
1010
mod expander;
1111
mod syntax_bridge;
1212
mod tt_iter;
13-
mod to_parser_tokens;
13+
mod to_parser_input;
1414

1515
#[cfg(test)]
1616
mod benchmark;

crates/mbe/src/syntax_bridge.rs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ use syntax::{
1010
use tt::buffer::{Cursor, TokenBuffer};
1111

1212
use crate::{
13-
to_parser_tokens::to_parser_tokens, tt_iter::TtIter, ExpandError, ParserEntryPoint, TokenMap,
13+
to_parser_input::to_parser_input, tt_iter::TtIter, ExpandError, ParserEntryPoint, TokenMap,
1414
};
1515

1616
/// Convert the syntax node to a `TokenTree` (what macro
@@ -54,17 +54,17 @@ pub fn token_tree_to_syntax_node(
5454
}
5555
_ => TokenBuffer::from_subtree(tt),
5656
};
57-
let parser_tokens = to_parser_tokens(&buffer);
58-
let tree_traversal = parser::parse(&parser_tokens, entry_point);
57+
let parser_input = to_parser_input(&buffer);
58+
let parser_output = parser::parse(&parser_input, entry_point);
5959
let mut tree_sink = TtTreeSink::new(buffer.begin());
60-
for event in tree_traversal.iter() {
60+
for event in parser_output.iter() {
6161
match event {
62-
parser::TraversalStep::Token { kind, n_raw_tokens } => {
62+
parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => {
6363
tree_sink.token(kind, n_raw_tokens)
6464
}
65-
parser::TraversalStep::EnterNode { kind } => tree_sink.start_node(kind),
66-
parser::TraversalStep::LeaveNode => tree_sink.finish_node(),
67-
parser::TraversalStep::Error { msg } => tree_sink.error(msg.to_string()),
65+
parser::Step::Enter { kind } => tree_sink.start_node(kind),
66+
parser::Step::Exit => tree_sink.finish_node(),
67+
parser::Step::Error { msg } => tree_sink.error(msg.to_string()),
6868
}
6969
}
7070
if tree_sink.roots.len() != 1 {

crates/mbe/src/to_parser_tokens.rs renamed to crates/mbe/src/to_parser_input.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@
44
use syntax::{SyntaxKind, SyntaxKind::*, T};
55
use tt::buffer::TokenBuffer;
66

7-
pub(crate) fn to_parser_tokens(buffer: &TokenBuffer) -> parser::Tokens {
8-
let mut res = parser::Tokens::default();
7+
pub(crate) fn to_parser_input(buffer: &TokenBuffer) -> parser::Input {
8+
let mut res = parser::Input::default();
99

1010
let mut current = buffer.begin();
1111

crates/mbe/src/tt_iter.rs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
//! A "Parser" structure for token trees. We use this when parsing a declarative
22
//! macro definition into a list of patterns and templates.
33
4-
use crate::{to_parser_tokens::to_parser_tokens, ExpandError, ExpandResult, ParserEntryPoint};
4+
use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult, ParserEntryPoint};
55

66
use syntax::SyntaxKind;
77
use tt::buffer::TokenBuffer;
@@ -94,23 +94,23 @@ impl<'a> TtIter<'a> {
9494
entry_point: ParserEntryPoint,
9595
) -> ExpandResult<Option<tt::TokenTree>> {
9696
let buffer = TokenBuffer::from_tokens(self.inner.as_slice());
97-
let parser_tokens = to_parser_tokens(&buffer);
98-
let tree_traversal = parser::parse(&parser_tokens, entry_point);
97+
let parser_input = to_parser_input(&buffer);
98+
let tree_traversal = parser::parse(&parser_input, entry_point);
9999

100100
let mut cursor = buffer.begin();
101101
let mut error = false;
102102
for step in tree_traversal.iter() {
103103
match step {
104-
parser::TraversalStep::Token { kind, mut n_raw_tokens } => {
104+
parser::Step::Token { kind, mut n_input_tokens } => {
105105
if kind == SyntaxKind::LIFETIME_IDENT {
106-
n_raw_tokens = 2;
106+
n_input_tokens = 2;
107107
}
108-
for _ in 0..n_raw_tokens {
108+
for _ in 0..n_input_tokens {
109109
cursor = cursor.bump_subtree();
110110
}
111111
}
112-
parser::TraversalStep::EnterNode { .. } | parser::TraversalStep::LeaveNode => (),
113-
parser::TraversalStep::Error { .. } => error = true,
112+
parser::Step::Enter { .. } | parser::Step::Exit => (),
113+
parser::Step::Error { .. } => error = true,
114114
}
115115
}
116116

crates/parser/src/event.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
use std::mem;
1111

1212
use crate::{
13-
tree_traversal::TreeTraversal,
13+
output::Output,
1414
SyntaxKind::{self, *},
1515
};
1616

@@ -87,8 +87,8 @@ impl Event {
8787
}
8888

8989
/// Generate the syntax tree with the control of events.
90-
pub(super) fn process(mut events: Vec<Event>) -> TreeTraversal {
91-
let mut res = TreeTraversal::default();
90+
pub(super) fn process(mut events: Vec<Event>) -> Output {
91+
let mut res = Output::default();
9292
let mut forward_parents = Vec::new();
9393

9494
for i in 0..events.len() {

crates/parser/src/tokens.rs renamed to crates/parser/src/input.rs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,26 @@
1-
//! Input for the parser -- a sequence of tokens.
2-
//!
3-
//! As of now, parser doesn't have access to the *text* of the tokens, and makes
4-
//! decisions based solely on their classification. Unlike `LexerToken`, the
5-
//! `Tokens` doesn't include whitespace and comments.
1+
//! See [`Input`].
62
73
use crate::SyntaxKind;
84

95
#[allow(non_camel_case_types)]
106
type bits = u64;
117

12-
/// Main input to the parser.
8+
/// Input for the parser -- a sequence of tokens.
139
///
14-
/// A sequence of tokens represented internally as a struct of arrays.
10+
/// As of now, parser doesn't have access to the *text* of the tokens, and makes
11+
/// decisions based solely on their classification. Unlike `LexerToken`, the
12+
/// `Tokens` doesn't include whitespace and comments. Main input to the parser.
13+
///
14+
/// Struct of arrays internally, but this shouldn't really matter.
1515
#[derive(Default)]
16-
pub struct Tokens {
16+
pub struct Input {
1717
kind: Vec<SyntaxKind>,
1818
joint: Vec<bits>,
1919
contextual_kind: Vec<SyntaxKind>,
2020
}
2121

2222
/// `pub` impl used by callers to create `Tokens`.
23-
impl Tokens {
23+
impl Input {
2424
#[inline]
2525
pub fn push(&mut self, kind: SyntaxKind) {
2626
self.push_impl(kind, SyntaxKind::EOF)
@@ -63,7 +63,7 @@ impl Tokens {
6363
}
6464

6565
/// pub(crate) impl used by the parser to consume `Tokens`.
66-
impl Tokens {
66+
impl Input {
6767
pub(crate) fn kind(&self, idx: usize) -> SyntaxKind {
6868
self.kind.get(idx).copied().unwrap_or(SyntaxKind::EOF)
6969
}
@@ -76,7 +76,7 @@ impl Tokens {
7676
}
7777
}
7878

79-
impl Tokens {
79+
impl Input {
8080
fn bit_index(&self, n: usize) -> (usize, usize) {
8181
let idx = n / (bits::BITS as usize);
8282
let b_idx = n % (bits::BITS as usize);

crates/parser/src/lexed_str.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -122,8 +122,8 @@ impl<'a> LexedStr<'a> {
122122
self.error.iter().map(|it| (it.token as usize, it.msg.as_str()))
123123
}
124124

125-
pub fn to_tokens(&self) -> crate::Tokens {
126-
let mut res = crate::Tokens::default();
125+
pub fn to_input(&self) -> crate::Input {
126+
let mut res = crate::Input::default();
127127
let mut was_joint = false;
128128
for i in 0..self.len() {
129129
let kind = self.kind(i);

crates/parser/src/lib.rs

Lines changed: 17 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -24,19 +24,19 @@ mod syntax_kind;
2424
mod event;
2525
mod parser;
2626
mod grammar;
27-
mod tokens;
28-
mod tree_traversal;
27+
mod input;
28+
mod output;
2929

3030
#[cfg(test)]
3131
mod tests;
3232

3333
pub(crate) use token_set::TokenSet;
3434

3535
pub use crate::{
36+
input::Input,
3637
lexed_str::LexedStr,
38+
output::{Output, Step},
3739
syntax_kind::SyntaxKind,
38-
tokens::Tokens,
39-
tree_traversal::{TraversalStep, TreeTraversal},
4040
};
4141

4242
/// rust-analyzer parser allows you to choose one of the possible entry points.
@@ -62,11 +62,19 @@ pub enum ParserEntryPoint {
6262
}
6363

6464
/// Parse given tokens into the given sink as a rust file.
65-
pub fn parse_source_file(tokens: &Tokens) -> TreeTraversal {
66-
parse(tokens, ParserEntryPoint::SourceFile)
65+
pub fn parse_source_file(inp: &Input) -> Output {
66+
parse(inp, ParserEntryPoint::SourceFile)
6767
}
6868

69-
pub fn parse(tokens: &Tokens, entry_point: ParserEntryPoint) -> TreeTraversal {
69+
/// Parses the given [`Input`] into [`Output`] assuming that the top-level
70+
/// syntactic construct is the given [`ParserEntryPoint`].
71+
///
72+
/// Both input and output here are fairly abstract. The overall flow is that the
73+
/// caller has some "real" tokens, converts them to [`Input`], parses them to
74+
/// [`Output`], and then converts that into a "real" tree. The "real" tree is
75+
/// made of "real" tokens, so this all hinges on rather tight coordination of
76+
/// indices between the four stages.
77+
pub fn parse(inp: &Input, entry_point: ParserEntryPoint) -> Output {
7078
let entry_point: fn(&'_ mut parser::Parser) = match entry_point {
7179
ParserEntryPoint::SourceFile => grammar::entry_points::source_file,
7280
ParserEntryPoint::Path => grammar::entry_points::path,
@@ -84,7 +92,7 @@ pub fn parse(tokens: &Tokens, entry_point: ParserEntryPoint) -> TreeTraversal {
8492
ParserEntryPoint::Attr => grammar::entry_points::attr,
8593
};
8694

87-
let mut p = parser::Parser::new(tokens);
95+
let mut p = parser::Parser::new(inp);
8896
entry_point(&mut p);
8997
let events = p.finish();
9098
event::process(events)
@@ -107,7 +115,7 @@ impl Reparser {
107115
///
108116
/// Tokens must start with `{`, end with `}` and form a valid brace
109117
/// sequence.
110-
pub fn parse(self, tokens: &Tokens) -> TreeTraversal {
118+
pub fn parse(self, tokens: &Input) -> Output {
111119
let Reparser(r) = self;
112120
let mut p = parser::Parser::new(tokens);
113121
r(&mut p);

crates/parser/src/tree_traversal.rs renamed to crates/parser/src/output.rs

Lines changed: 24 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,43 +1,52 @@
1-
//! TODO
1+
//! See [`Output`]
2+
23
use crate::SyntaxKind;
34

4-
/// Output of the parser.
5+
/// Output of the parser -- a DFS traversal of a concrete syntax tree.
6+
///
7+
/// Use the [`Output::iter`] method to iterate over traversal steps and consume
8+
/// a syntax tree.
9+
///
10+
/// In a sense, this is just a sequence of [`SyntaxKind`]-colored parenthesis
11+
/// interspersed into the original [`crate::Input`]. The output is fundamentally
12+
/// coordinated with the input and `n_input_tokens` refers to the number of
13+
/// times [`crate::Input::push`] was called.
514
#[derive(Default)]
6-
pub struct TreeTraversal {
15+
pub struct Output {
716
/// 32-bit encoding of events. If LSB is zero, then that's an index into the
817
/// error vector. Otherwise, it's one of the thee other variants, with data encoded as
918
///
10-
/// |16 bit kind|8 bit n_raw_tokens|4 bit tag|4 bit leftover|
19+
/// |16 bit kind|8 bit n_input_tokens|4 bit tag|4 bit leftover|
1120
///
1221
event: Vec<u32>,
1322
error: Vec<String>,
1423
}
1524

16-
pub enum TraversalStep<'a> {
17-
Token { kind: SyntaxKind, n_raw_tokens: u8 },
18-
EnterNode { kind: SyntaxKind },
19-
LeaveNode,
25+
pub enum Step<'a> {
26+
Token { kind: SyntaxKind, n_input_tokens: u8 },
27+
Enter { kind: SyntaxKind },
28+
Exit,
2029
Error { msg: &'a str },
2130
}
2231

23-
impl TreeTraversal {
24-
pub fn iter(&self) -> impl Iterator<Item = TraversalStep<'_>> {
32+
impl Output {
33+
pub fn iter(&self) -> impl Iterator<Item = Step<'_>> {
2534
self.event.iter().map(|&event| {
2635
if event & 0b1 == 0 {
27-
return TraversalStep::Error { msg: self.error[(event as usize) >> 1].as_str() };
36+
return Step::Error { msg: self.error[(event as usize) >> 1].as_str() };
2837
}
2938
let tag = ((event & 0x0000_00F0) >> 4) as u8;
3039
match tag {
3140
0 => {
3241
let kind: SyntaxKind = (((event & 0xFFFF_0000) >> 16) as u16).into();
33-
let n_raw_tokens = ((event & 0x0000_FF00) >> 8) as u8;
34-
TraversalStep::Token { kind, n_raw_tokens }
42+
let n_input_tokens = ((event & 0x0000_FF00) >> 8) as u8;
43+
Step::Token { kind, n_input_tokens }
3544
}
3645
1 => {
3746
let kind: SyntaxKind = (((event & 0xFFFF_0000) >> 16) as u16).into();
38-
TraversalStep::EnterNode { kind }
47+
Step::Enter { kind }
3948
}
40-
2 => TraversalStep::LeaveNode,
49+
2 => Step::Exit,
4150
_ => unreachable!(),
4251
}
4352
})

crates/parser/src/parser.rs

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use limit::Limit;
77

88
use crate::{
99
event::Event,
10-
tokens::Tokens,
10+
input::Input,
1111
SyntaxKind::{self, EOF, ERROR, TOMBSTONE},
1212
TokenSet, T,
1313
};
@@ -22,7 +22,7 @@ use crate::{
2222
/// "start expression, consume number literal,
2323
/// finish expression". See `Event` docs for more.
2424
pub(crate) struct Parser<'t> {
25-
tokens: &'t Tokens,
25+
inp: &'t Input,
2626
pos: usize,
2727
events: Vec<Event>,
2828
steps: Cell<u32>,
@@ -31,8 +31,8 @@ pub(crate) struct Parser<'t> {
3131
static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000);
3232

3333
impl<'t> Parser<'t> {
34-
pub(super) fn new(tokens: &'t Tokens) -> Parser<'t> {
35-
Parser { tokens, pos: 0, events: Vec::new(), steps: Cell::new(0) }
34+
pub(super) fn new(inp: &'t Input) -> Parser<'t> {
35+
Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0) }
3636
}
3737

3838
pub(crate) fn finish(self) -> Vec<Event> {
@@ -55,7 +55,7 @@ impl<'t> Parser<'t> {
5555
assert!(PARSER_STEP_LIMIT.check(steps as usize).is_ok(), "the parser seems stuck");
5656
self.steps.set(steps + 1);
5757

58-
self.tokens.kind(self.pos + n)
58+
self.inp.kind(self.pos + n)
5959
}
6060

6161
/// Checks if the current token is `kind`.
@@ -91,7 +91,7 @@ impl<'t> Parser<'t> {
9191
T![<<=] => self.at_composite3(n, T![<], T![<], T![=]),
9292
T![>>=] => self.at_composite3(n, T![>], T![>], T![=]),
9393

94-
_ => self.tokens.kind(self.pos + n) == kind,
94+
_ => self.inp.kind(self.pos + n) == kind,
9595
}
9696
}
9797

@@ -130,17 +130,17 @@ impl<'t> Parser<'t> {
130130
}
131131

132132
fn at_composite2(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind) -> bool {
133-
self.tokens.kind(self.pos + n) == k1
134-
&& self.tokens.kind(self.pos + n + 1) == k2
135-
&& self.tokens.is_joint(self.pos + n)
133+
self.inp.kind(self.pos + n) == k1
134+
&& self.inp.kind(self.pos + n + 1) == k2
135+
&& self.inp.is_joint(self.pos + n)
136136
}
137137

138138
fn at_composite3(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind, k3: SyntaxKind) -> bool {
139-
self.tokens.kind(self.pos + n) == k1
140-
&& self.tokens.kind(self.pos + n + 1) == k2
141-
&& self.tokens.kind(self.pos + n + 2) == k3
142-
&& self.tokens.is_joint(self.pos + n)
143-
&& self.tokens.is_joint(self.pos + n + 1)
139+
self.inp.kind(self.pos + n) == k1
140+
&& self.inp.kind(self.pos + n + 1) == k2
141+
&& self.inp.kind(self.pos + n + 2) == k3
142+
&& self.inp.is_joint(self.pos + n)
143+
&& self.inp.is_joint(self.pos + n + 1)
144144
}
145145

146146
/// Checks if the current token is in `kinds`.
@@ -150,7 +150,7 @@ impl<'t> Parser<'t> {
150150

151151
/// Checks if the current token is contextual keyword with text `t`.
152152
pub(crate) fn at_contextual_kw(&self, kw: SyntaxKind) -> bool {
153-
self.tokens.contextual_kind(self.pos) == kw
153+
self.inp.contextual_kind(self.pos) == kw
154154
}
155155

156156
/// Starts a new node in the syntax tree. All nodes and tokens

0 commit comments

Comments
 (0)