Skip to content

Commit d3d980e

Browse files
authored
Merge pull request #232 from dtolnay/drop
Implement Drop nonrecursively for TokenStream
2 parents d0415e3 + 0bbae26 commit d3d980e

File tree

1 file changed

+28
-4
lines changed

1 file changed

+28
-4
lines changed

src/fallback.rs

Lines changed: 28 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ use std::cell::RefCell;
66
use std::cmp;
77
use std::fmt;
88
use std::iter;
9+
use std::mem;
910
use std::ops::RangeBounds;
1011
#[cfg(procmacro2_semver_exempt)]
1112
use std::path::Path;
@@ -44,6 +45,29 @@ impl TokenStream {
4445
pub fn is_empty(&self) -> bool {
4546
self.inner.len() == 0
4647
}
48+
49+
fn take_inner(&mut self) -> Vec<TokenTree> {
50+
mem::replace(&mut self.inner, Vec::new())
51+
}
52+
}
53+
54+
// Nonrecursive to prevent stack overflow.
55+
impl Drop for TokenStream {
56+
fn drop(&mut self) {
57+
while let Some(token) = self.inner.pop() {
58+
let group = match token {
59+
TokenTree::Group(group) => group.inner,
60+
_ => continue,
61+
};
62+
#[cfg(wrap_proc_macro)]
63+
let group = match group {
64+
crate::imp::Group::Fallback(group) => group,
65+
_ => continue,
66+
};
67+
let mut group = group;
68+
self.inner.extend(group.stream.take_inner());
69+
}
70+
}
4771
}
4872

4973
#[cfg(span_locations)]
@@ -168,8 +192,8 @@ impl iter::FromIterator<TokenStream> for TokenStream {
168192
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
169193
let mut v = Vec::new();
170194

171-
for stream in streams.into_iter() {
172-
v.extend(stream.inner);
195+
for mut stream in streams.into_iter() {
196+
v.extend(stream.take_inner());
173197
}
174198

175199
TokenStream { inner: v }
@@ -195,8 +219,8 @@ impl IntoIterator for TokenStream {
195219
type Item = TokenTree;
196220
type IntoIter = TokenTreeIter;
197221

198-
fn into_iter(self) -> TokenTreeIter {
199-
self.inner.into_iter()
222+
fn into_iter(mut self) -> TokenTreeIter {
223+
self.take_inner().into_iter()
200224
}
201225
}
202226

0 commit comments

Comments
 (0)