Skip to content

Commit abdc689

Browse files
committed
Clean up ext::tt::transcribe::TtFrame, rename to Frame.
1 parent d09e512 commit abdc689

File tree

3 files changed

+88
-61
lines changed

3 files changed

+88
-61
lines changed

src/libsyntax/ext/tt/transcribe.rs

Lines changed: 84 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -14,27 +14,71 @@ use errors::Handler;
1414
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
1515
use parse::token::{self, MatchNt, SubstNt, Token, NtIdent, NtTT};
1616
use syntax_pos::{Span, DUMMY_SP};
17-
use tokenstream::{self, TokenTree};
17+
use tokenstream::{self, TokenTree, Delimited, SequenceRepetition};
1818
use util::small_vector::SmallVector;
1919

2020
use std::rc::Rc;
2121
use std::ops::Add;
2222
use std::collections::HashMap;
2323

24-
///an unzipping of `TokenTree`s
25-
#[derive(Clone)]
26-
struct TtFrame {
27-
forest: TokenTree,
28-
idx: usize,
29-
dotdotdoted: bool,
30-
sep: Option<Token>,
24+
// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
25+
enum Frame {
26+
Delimited {
27+
forest: Rc<Delimited>,
28+
idx: usize,
29+
span: Span,
30+
},
31+
MatchNt {
32+
name: Ident,
33+
kind: Ident,
34+
idx: usize,
35+
span: Span,
36+
},
37+
Sequence {
38+
forest: Rc<SequenceRepetition>,
39+
idx: usize,
40+
sep: Option<Token>,
41+
},
42+
}
43+
44+
impl Iterator for Frame {
45+
type Item = TokenTree;
46+
47+
fn next(&mut self) -> Option<TokenTree> {
48+
match *self {
49+
Frame::Delimited { ref forest, ref mut idx, span } => {
50+
*idx += 1;
51+
if *idx == forest.delim.len() {
52+
Some(forest.open_tt(span))
53+
} else if let Some(tree) = forest.tts.get(*idx - forest.delim.len() - 1) {
54+
Some(tree.clone())
55+
} else if *idx == forest.tts.len() + 2 * forest.delim.len() {
56+
Some(forest.close_tt(span))
57+
} else {
58+
None
59+
}
60+
}
61+
Frame::Sequence { ref forest, ref mut idx, .. } => {
62+
*idx += 1;
63+
forest.tts.get(*idx - 1).cloned()
64+
}
65+
Frame::MatchNt { ref mut idx, name, kind, span } => {
66+
*idx += 1;
67+
match *idx {
68+
1 => Some(TokenTree::Token(span, token::SubstNt(name))),
69+
2 => Some(TokenTree::Token(span, token::Colon)),
70+
3 => Some(TokenTree::Token(span, token::Ident(kind))),
71+
_ => None,
72+
}
73+
}
74+
}
75+
}
3176
}
3277

33-
#[derive(Clone)]
3478
struct TtReader<'a> {
3579
sp_diag: &'a Handler,
3680
/// the unzipped tree:
37-
stack: SmallVector<TtFrame>,
81+
stack: SmallVector<Frame>,
3882
/* for MBE-style macro transcription */
3983
interpolations: HashMap<Ident, Rc<NamedMatch>>,
4084

@@ -51,15 +95,10 @@ pub fn transcribe(sp_diag: &Handler,
5195
-> Vec<TokenTree> {
5296
let mut r = TtReader {
5397
sp_diag: sp_diag,
54-
stack: SmallVector::one(TtFrame {
55-
forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
56-
tts: src,
57-
// doesn't matter. This merely holds the root unzipping.
58-
separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0
59-
})),
98+
stack: SmallVector::one(Frame::Delimited {
99+
forest: Rc::new(tokenstream::Delimited { delim: token::NoDelim, tts: src }),
60100
idx: 0,
61-
dotdotdoted: false,
62-
sep: None,
101+
span: DUMMY_SP,
63102
}),
64103
interpolations: match interp { /* just a convenience */
65104
None => HashMap::new(),
@@ -151,34 +190,33 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
151190
/// EFFECT: advances the reader's token field
152191
fn tt_next_token(r: &mut TtReader, prev_span: Span) -> Option<TokenTree> {
153192
loop {
154-
let frame = match r.stack.last() {
155-
Some(frame) => frame.clone(),
193+
let tree = match r.stack.last_mut() {
194+
Some(frame) => frame.next(),
156195
None => return None,
157196
};
158197

159-
if frame.idx == frame.forest.len() {
160-
if frame.dotdotdoted &&
161-
*r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1 {
162-
*r.repeat_idx.last_mut().unwrap() += 1;
163-
r.stack.last_mut().unwrap().idx = 0;
164-
if let Some(tk) = r.stack.last().unwrap().sep.clone() {
165-
return Some(TokenTree::Token(prev_span, tk)); // repeat same span, I guess
166-
}
167-
} else {
168-
r.stack.pop();
169-
match r.stack.last_mut() {
170-
Some(frame) => frame.idx += 1,
171-
None => return None,
172-
}
173-
if frame.dotdotdoted {
174-
r.repeat_idx.pop();
175-
r.repeat_len.pop();
198+
let tree = if let Some(tree) = tree {
199+
tree
200+
} else {
201+
if let Frame::Sequence { ref mut idx, ref sep, .. } = *r.stack.last_mut().unwrap() {
202+
if *r.repeat_idx.last().unwrap() < *r.repeat_len.last().unwrap() - 1 {
203+
*r.repeat_idx.last_mut().unwrap() += 1;
204+
*idx = 0;
205+
if let Some(sep) = sep.clone() {
206+
return Some(TokenTree::Token(prev_span, sep)); // repeat same span, I guess
207+
}
208+
continue
176209
}
177210
}
211+
212+
if let Frame::Sequence { .. } = r.stack.pop().unwrap() {
213+
r.repeat_idx.pop();
214+
r.repeat_len.pop();
215+
}
178216
continue
179-
}
217+
};
180218

181-
match frame.forest.get_tt(frame.idx) {
219+
match tree {
182220
TokenTree::Sequence(sp, seq) => {
183221
// FIXME(pcwalton): Bad copy.
184222
match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()),
@@ -202,23 +240,20 @@ fn tt_next_token(r: &mut TtReader, prev_span: Span) -> Option<TokenTree> {
202240
"this must repeat at least once"));
203241
}
204242

205-
r.stack.last_mut().unwrap().idx += 1;
206243
return tt_next_token(r, prev_span);
207244
}
208245
r.repeat_len.push(len);
209246
r.repeat_idx.push(0);
210-
r.stack.push(TtFrame {
247+
r.stack.push(Frame::Sequence {
211248
idx: 0,
212-
dotdotdoted: true,
213249
sep: seq.separator.clone(),
214-
forest: TokenTree::Sequence(sp, seq),
250+
forest: seq,
215251
});
216252
}
217253
}
218254
}
219255
// FIXME #2887: think about span stuff here
220256
TokenTree::Token(sp, SubstNt(ident)) => {
221-
r.stack.last_mut().unwrap().idx += 1;
222257
match lookup_cur_matched(r, ident) {
223258
None => {
224259
return Some(TokenTree::Token(sp, SubstNt(ident)));
@@ -245,21 +280,13 @@ fn tt_next_token(r: &mut TtReader, prev_span: Span) -> Option<TokenTree> {
245280
}
246281
}
247282
}
248-
// TokenTree::Delimited or any token that can be unzipped
249-
seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, MatchNt(..)) => {
250-
// do not advance the idx yet
251-
r.stack.push(TtFrame {
252-
forest: seq,
253-
idx: 0,
254-
dotdotdoted: false,
255-
sep: None
256-
});
257-
// if this could be 0-length, we'd need to potentially recur here
283+
TokenTree::Delimited(span, delimited) => {
284+
r.stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
258285
}
259-
tt @ TokenTree::Token(..) => {
260-
r.stack.last_mut().unwrap().idx += 1;
261-
return Some(tt);
286+
TokenTree::Token(span, MatchNt(name, kind)) => {
287+
r.stack.push(Frame::MatchNt { name: name, kind: kind, idx: 0, span: span });
262288
}
289+
tt @ TokenTree::Token(..) => return Some(tt),
263290
}
264291
}
265292
}

src/libsyntax/parse/token.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,8 +50,8 @@ pub enum DelimToken {
5050
}
5151

5252
impl DelimToken {
53-
pub fn len(&self) -> u32 {
54-
if *self == NoDelim { 0 } else { 1 }
53+
pub fn len(self) -> usize {
54+
if self == NoDelim { 0 } else { 1 }
5555
}
5656
}
5757

src/libsyntax/tokenstream.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ impl Delimited {
6464
pub fn open_tt(&self, span: Span) -> TokenTree {
6565
let open_span = match span {
6666
DUMMY_SP => DUMMY_SP,
67-
_ => Span { hi: span.lo + BytePos(self.delim.len()), ..span },
67+
_ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span },
6868
};
6969
TokenTree::Token(open_span, self.open_token())
7070
}
@@ -73,7 +73,7 @@ impl Delimited {
7373
pub fn close_tt(&self, span: Span) -> TokenTree {
7474
let close_span = match span {
7575
DUMMY_SP => DUMMY_SP,
76-
_ => Span { lo: span.hi - BytePos(self.delim.len()), ..span },
76+
_ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span },
7777
};
7878
TokenTree::Token(close_span, self.close_token())
7979
}

0 commit comments

Comments
 (0)