Skip to content

Commit c456b21

Browse files
bors[bot]matklad
andauthored
Merge #11117
11117: internal: replace TreeSink with a data structure r=matklad a=matklad The general theme of this is to make parser a better independent library. The specific thing we do here is replacing callback based TreeSink with a data structure. That is, rather than calling user-provided tree construction methods, the parser now spits out a very bare-bones tree, effectively a log of a DFS traversal. This makes the parser usable without any *specifc* tree sink, and allows us to, eg, move tests into this crate. Now, it's also true that this is a distinction without a difference, as the old and the new interface are equivalent in expressiveness. Still, this new thing seems somewhat simpler. But yeah, I admit I don't have a suuper strong motivation here, just a hunch that this is better. cc #10765 Co-authored-by: Aleksey Kladov <[email protected]>
2 parents 2f63558 + f692faf commit c456b21

File tree

14 files changed

+259
-185
lines changed

14 files changed

+259
-185
lines changed

crates/mbe/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ mod parser;
1010
mod expander;
1111
mod syntax_bridge;
1212
mod tt_iter;
13-
mod to_parser_tokens;
13+
mod to_parser_input;
1414

1515
#[cfg(test)]
1616
mod benchmark;

crates/mbe/src/syntax_bridge.rs

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
22
3-
use parser::{ParseError, TreeSink};
43
use rustc_hash::{FxHashMap, FxHashSet};
54
use syntax::{
65
ast::{self, make::tokens::doc_comment},
@@ -11,7 +10,7 @@ use syntax::{
1110
use tt::buffer::{Cursor, TokenBuffer};
1211

1312
use crate::{
14-
to_parser_tokens::to_parser_tokens, tt_iter::TtIter, ExpandError, ParserEntryPoint, TokenMap,
13+
to_parser_input::to_parser_input, tt_iter::TtIter, ExpandError, ParserEntryPoint, TokenMap,
1514
};
1615

1716
/// Convert the syntax node to a `TokenTree` (what macro
@@ -55,9 +54,19 @@ pub fn token_tree_to_syntax_node(
5554
}
5655
_ => TokenBuffer::from_subtree(tt),
5756
};
58-
let parser_tokens = to_parser_tokens(&buffer);
57+
let parser_input = to_parser_input(&buffer);
58+
let parser_output = parser::parse(&parser_input, entry_point);
5959
let mut tree_sink = TtTreeSink::new(buffer.begin());
60-
parser::parse(&parser_tokens, &mut tree_sink, entry_point);
60+
for event in parser_output.iter() {
61+
match event {
62+
parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => {
63+
tree_sink.token(kind, n_raw_tokens)
64+
}
65+
parser::Step::Enter { kind } => tree_sink.start_node(kind),
66+
parser::Step::Exit => tree_sink.finish_node(),
67+
parser::Step::Error { msg } => tree_sink.error(msg.to_string()),
68+
}
69+
}
6170
if tree_sink.roots.len() != 1 {
6271
return Err(ExpandError::ConversionError);
6372
}
@@ -643,7 +652,7 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> &'static str {
643652
&texts[idx..texts.len() - (1 - idx)]
644653
}
645654

646-
impl<'a> TreeSink for TtTreeSink<'a> {
655+
impl<'a> TtTreeSink<'a> {
647656
fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
648657
if kind == LIFETIME_IDENT {
649658
n_tokens = 2;
@@ -741,7 +750,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
741750
*self.roots.last_mut().unwrap() -= 1;
742751
}
743752

744-
fn error(&mut self, error: ParseError) {
753+
fn error(&mut self, error: String) {
745754
self.inner.error(error, self.text_pos)
746755
}
747756
}

crates/mbe/src/to_parser_tokens.rs renamed to crates/mbe/src/to_parser_input.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@
44
use syntax::{SyntaxKind, SyntaxKind::*, T};
55
use tt::buffer::TokenBuffer;
66

7-
pub(crate) fn to_parser_tokens(buffer: &TokenBuffer) -> parser::Tokens {
8-
let mut res = parser::Tokens::default();
7+
pub(crate) fn to_parser_input(buffer: &TokenBuffer) -> parser::Input {
8+
let mut res = parser::Input::default();
99

1010
let mut current = buffer.begin();
1111

crates/mbe/src/tt_iter.rs

Lines changed: 22 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,10 @@
11
//! A "Parser" structure for token trees. We use this when parsing a declarative
22
//! macro definition into a list of patterns and templates.
33
4-
use crate::{to_parser_tokens::to_parser_tokens, ExpandError, ExpandResult, ParserEntryPoint};
4+
use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult, ParserEntryPoint};
55

6-
use parser::TreeSink;
76
use syntax::SyntaxKind;
8-
use tt::buffer::{Cursor, TokenBuffer};
7+
use tt::buffer::TokenBuffer;
98

109
macro_rules! err {
1110
() => {
@@ -94,34 +93,28 @@ impl<'a> TtIter<'a> {
9493
&mut self,
9594
entry_point: ParserEntryPoint,
9695
) -> ExpandResult<Option<tt::TokenTree>> {
97-
struct OffsetTokenSink<'a> {
98-
cursor: Cursor<'a>,
99-
error: bool,
100-
}
101-
102-
impl<'a> TreeSink for OffsetTokenSink<'a> {
103-
fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
104-
if kind == SyntaxKind::LIFETIME_IDENT {
105-
n_tokens = 2;
106-
}
107-
for _ in 0..n_tokens {
108-
self.cursor = self.cursor.bump_subtree();
96+
let buffer = TokenBuffer::from_tokens(self.inner.as_slice());
97+
let parser_input = to_parser_input(&buffer);
98+
let tree_traversal = parser::parse(&parser_input, entry_point);
99+
100+
let mut cursor = buffer.begin();
101+
let mut error = false;
102+
for step in tree_traversal.iter() {
103+
match step {
104+
parser::Step::Token { kind, mut n_input_tokens } => {
105+
if kind == SyntaxKind::LIFETIME_IDENT {
106+
n_input_tokens = 2;
107+
}
108+
for _ in 0..n_input_tokens {
109+
cursor = cursor.bump_subtree();
110+
}
109111
}
110-
}
111-
fn start_node(&mut self, _kind: SyntaxKind) {}
112-
fn finish_node(&mut self) {}
113-
fn error(&mut self, _error: parser::ParseError) {
114-
self.error = true;
112+
parser::Step::Enter { .. } | parser::Step::Exit => (),
113+
parser::Step::Error { .. } => error = true,
115114
}
116115
}
117116

118-
let buffer = TokenBuffer::from_tokens(self.inner.as_slice());
119-
let parser_tokens = to_parser_tokens(&buffer);
120-
let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
121-
122-
parser::parse(&parser_tokens, &mut sink, entry_point);
123-
124-
let mut err = if !sink.cursor.is_root() || sink.error {
117+
let mut err = if !cursor.is_root() || error {
125118
Some(err!("expected {:?}", entry_point))
126119
} else {
127120
None
@@ -130,8 +123,8 @@ impl<'a> TtIter<'a> {
130123
let mut curr = buffer.begin();
131124
let mut res = vec![];
132125

133-
if sink.cursor.is_root() {
134-
while curr != sink.cursor {
126+
if cursor.is_root() {
127+
while curr != cursor {
135128
if let Some(token) = curr.token_tree() {
136129
res.push(token);
137130
}

crates/parser/src/event.rs

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,8 @@
1010
use std::mem;
1111

1212
use crate::{
13-
ParseError,
13+
output::Output,
1414
SyntaxKind::{self, *},
15-
TreeSink,
1615
};
1716

1817
/// `Parser` produces a flat list of `Event`s.
@@ -77,7 +76,7 @@ pub(crate) enum Event {
7776
},
7877

7978
Error {
80-
msg: ParseError,
79+
msg: String,
8180
},
8281
}
8382

@@ -88,7 +87,8 @@ impl Event {
8887
}
8988

9089
/// Generate the syntax tree with the control of events.
91-
pub(super) fn process(sink: &mut dyn TreeSink, mut events: Vec<Event>) {
90+
pub(super) fn process(mut events: Vec<Event>) -> Output {
91+
let mut res = Output::default();
9292
let mut forward_parents = Vec::new();
9393

9494
for i in 0..events.len() {
@@ -117,15 +117,17 @@ pub(super) fn process(sink: &mut dyn TreeSink, mut events: Vec<Event>) {
117117

118118
for kind in forward_parents.drain(..).rev() {
119119
if kind != TOMBSTONE {
120-
sink.start_node(kind);
120+
res.enter_node(kind);
121121
}
122122
}
123123
}
124-
Event::Finish => sink.finish_node(),
124+
Event::Finish => res.leave_node(),
125125
Event::Token { kind, n_raw_tokens } => {
126-
sink.token(kind, n_raw_tokens);
126+
res.token(kind, n_raw_tokens);
127127
}
128-
Event::Error { msg } => sink.error(msg),
128+
Event::Error { msg } => res.error(msg),
129129
}
130130
}
131+
132+
res
131133
}

crates/parser/src/tokens.rs renamed to crates/parser/src/input.rs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,26 @@
1-
//! Input for the parser -- a sequence of tokens.
2-
//!
3-
//! As of now, parser doesn't have access to the *text* of the tokens, and makes
4-
//! decisions based solely on their classification. Unlike `LexerToken`, the
5-
//! `Tokens` doesn't include whitespace and comments.
1+
//! See [`Input`].
62
73
use crate::SyntaxKind;
84

95
#[allow(non_camel_case_types)]
106
type bits = u64;
117

12-
/// Main input to the parser.
8+
/// Input for the parser -- a sequence of tokens.
139
///
14-
/// A sequence of tokens represented internally as a struct of arrays.
10+
/// As of now, parser doesn't have access to the *text* of the tokens, and makes
11+
/// decisions based solely on their classification. Unlike `LexerToken`, the
12+
/// `Tokens` doesn't include whitespace and comments. Main input to the parser.
13+
///
14+
/// Struct of arrays internally, but this shouldn't really matter.
1515
#[derive(Default)]
16-
pub struct Tokens {
16+
pub struct Input {
1717
kind: Vec<SyntaxKind>,
1818
joint: Vec<bits>,
1919
contextual_kind: Vec<SyntaxKind>,
2020
}
2121

2222
/// `pub` impl used by callers to create `Tokens`.
23-
impl Tokens {
23+
impl Input {
2424
#[inline]
2525
pub fn push(&mut self, kind: SyntaxKind) {
2626
self.push_impl(kind, SyntaxKind::EOF)
@@ -63,7 +63,7 @@ impl Tokens {
6363
}
6464

6565
/// pub(crate) impl used by the parser to consume `Tokens`.
66-
impl Tokens {
66+
impl Input {
6767
pub(crate) fn kind(&self, idx: usize) -> SyntaxKind {
6868
self.kind.get(idx).copied().unwrap_or(SyntaxKind::EOF)
6969
}
@@ -76,7 +76,7 @@ impl Tokens {
7676
}
7777
}
7878

79-
impl Tokens {
79+
impl Input {
8080
fn bit_index(&self, n: usize) -> (usize, usize) {
8181
let idx = n / (bits::BITS as usize);
8282
let b_idx = n % (bits::BITS as usize);

crates/parser/src/lexed_str.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -122,8 +122,8 @@ impl<'a> LexedStr<'a> {
122122
self.error.iter().map(|it| (it.token as usize, it.msg.as_str()))
123123
}
124124

125-
pub fn to_tokens(&self) -> crate::Tokens {
126-
let mut res = crate::Tokens::default();
125+
pub fn to_input(&self) -> crate::Input {
126+
let mut res = crate::Input::default();
127127
let mut was_joint = false;
128128
for i in 0..self.len() {
129129
let kind = self.kind(i);

crates/parser/src/lib.rs

Lines changed: 23 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -24,32 +24,20 @@ mod syntax_kind;
2424
mod event;
2525
mod parser;
2626
mod grammar;
27-
mod tokens;
27+
mod input;
28+
mod output;
2829

2930
#[cfg(test)]
3031
mod tests;
3132

3233
pub(crate) use token_set::TokenSet;
3334

34-
pub use crate::{lexed_str::LexedStr, syntax_kind::SyntaxKind, tokens::Tokens};
35-
36-
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
37-
pub struct ParseError(pub Box<String>);
38-
39-
/// `TreeSink` abstracts details of a particular syntax tree implementation.
40-
pub trait TreeSink {
41-
/// Adds new token to the current branch.
42-
fn token(&mut self, kind: SyntaxKind, n_tokens: u8);
43-
44-
/// Start new branch and make it current.
45-
fn start_node(&mut self, kind: SyntaxKind);
46-
47-
/// Finish current branch and restore previous
48-
/// branch as current.
49-
fn finish_node(&mut self);
50-
51-
fn error(&mut self, error: ParseError);
52-
}
35+
pub use crate::{
36+
input::Input,
37+
lexed_str::LexedStr,
38+
output::{Output, Step},
39+
syntax_kind::SyntaxKind,
40+
};
5341

5442
/// rust-analyzer parser allows you to choose one of the possible entry points.
5543
///
@@ -74,11 +62,19 @@ pub enum ParserEntryPoint {
7462
}
7563

7664
/// Parse given tokens into the given sink as a rust file.
77-
pub fn parse_source_file(tokens: &Tokens, tree_sink: &mut dyn TreeSink) {
78-
parse(tokens, tree_sink, ParserEntryPoint::SourceFile);
65+
pub fn parse_source_file(inp: &Input) -> Output {
66+
parse(inp, ParserEntryPoint::SourceFile)
7967
}
8068

81-
pub fn parse(tokens: &Tokens, tree_sink: &mut dyn TreeSink, entry_point: ParserEntryPoint) {
69+
/// Parses the given [`Input`] into [`Output`] assuming that the top-level
70+
/// syntactic construct is the given [`ParserEntryPoint`].
71+
///
72+
/// Both input and output here are fairly abstract. The overall flow is that the
73+
/// caller has some "real" tokens, converts them to [`Input`], parses them to
74+
/// [`Output`], and then converts that into a "real" tree. The "real" tree is
75+
/// made of "real" tokens, so this all hinges on rather tight coordination of
76+
/// indices between the four stages.
77+
pub fn parse(inp: &Input, entry_point: ParserEntryPoint) -> Output {
8278
let entry_point: fn(&'_ mut parser::Parser) = match entry_point {
8379
ParserEntryPoint::SourceFile => grammar::entry_points::source_file,
8480
ParserEntryPoint::Path => grammar::entry_points::path,
@@ -96,10 +92,10 @@ pub fn parse(tokens: &Tokens, tree_sink: &mut dyn TreeSink, entry_point: ParserE
9692
ParserEntryPoint::Attr => grammar::entry_points::attr,
9793
};
9894

99-
let mut p = parser::Parser::new(tokens);
95+
let mut p = parser::Parser::new(inp);
10096
entry_point(&mut p);
10197
let events = p.finish();
102-
event::process(tree_sink, events);
98+
event::process(events)
10399
}
104100

105101
/// A parsing function for a specific braced-block.
@@ -119,11 +115,11 @@ impl Reparser {
119115
///
120116
/// Tokens must start with `{`, end with `}` and form a valid brace
121117
/// sequence.
122-
pub fn parse(self, tokens: &Tokens, tree_sink: &mut dyn TreeSink) {
118+
pub fn parse(self, tokens: &Input) -> Output {
123119
let Reparser(r) = self;
124120
let mut p = parser::Parser::new(tokens);
125121
r(&mut p);
126122
let events = p.finish();
127-
event::process(tree_sink, events);
123+
event::process(events)
128124
}
129125
}

0 commit comments

Comments
 (0)