Skip to content

Commit 197cb26

Browse files
authored
Rollup merge of #145984 - nnethercote:TokenStream-cleanups, r=chenyukang
`TokenStream` cleanups r? `@chenyukang`
2 parents 25163e8 + 364a3be commit 197cb26

File tree

1 file changed

+65
-76
lines changed

1 file changed

+65
-76
lines changed

compiler/rustc_ast/src/tokenstream.rs

Lines changed: 65 additions & 76 deletions
Original file line numberDiff line numberDiff line change
@@ -3,15 +3,6 @@
33
//! `TokenStream`s represent syntactic objects before they are converted into ASTs.
44
//! A `TokenStream` is, roughly speaking, a sequence of [`TokenTree`]s,
55
//! which are themselves a single [`Token`] or a `Delimited` subsequence of tokens.
6-
//!
7-
//! ## Ownership
8-
//!
9-
//! `TokenStream`s are persistent data structures constructed as ropes with reference
10-
//! counted-children. In general, this means that calling an operation on a `TokenStream`
11-
//! (such as `slice`) produces an entirely new `TokenStream` from the borrowed reference to
12-
//! the original. This essentially coerces `TokenStream`s into "views" of their subparts,
13-
//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
14-
//! ownership of the original.
156
167
use std::borrow::Cow;
178
use std::ops::Range;
@@ -99,17 +90,6 @@ impl TokenTree {
9990
}
10091
}
10192

102-
impl<CTX> HashStable<CTX> for TokenStream
103-
where
104-
CTX: crate::HashStableContext,
105-
{
106-
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
107-
for sub_tt in self.iter() {
108-
sub_tt.hash_stable(hcx, hasher);
109-
}
110-
}
111-
}
112-
11393
/// A lazy version of [`AttrTokenStream`], which defers creation of an actual
11494
/// `AttrTokenStream` until it is needed.
11595
#[derive(Clone)]
@@ -556,10 +536,6 @@ pub struct AttrsTarget {
556536
pub tokens: LazyAttrTokenStream,
557537
}
558538

559-
/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
560-
#[derive(Clone, Debug, Default, Encodable, Decodable)]
561-
pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);
562-
563539
/// Indicates whether a token can join with the following token to form a
564540
/// compound token. Used for conversions to `proc_macro::Spacing`. Also used to
565541
/// guide pretty-printing, which is where the `JointHidden` value (which isn't
@@ -620,58 +596,9 @@ pub enum Spacing {
620596
JointHidden,
621597
}
622598

623-
impl TokenStream {
624-
/// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
625-
/// separating the two arguments with a comma for diagnostic suggestions.
626-
pub fn add_comma(&self) -> Option<(TokenStream, Span)> {
627-
// Used to suggest if a user writes `foo!(a b);`
628-
let mut suggestion = None;
629-
let mut iter = self.0.iter().enumerate().peekable();
630-
while let Some((pos, ts)) = iter.next() {
631-
if let Some((_, next)) = iter.peek() {
632-
let sp = match (&ts, &next) {
633-
(_, TokenTree::Token(Token { kind: token::Comma, .. }, _)) => continue,
634-
(
635-
TokenTree::Token(token_left, Spacing::Alone),
636-
TokenTree::Token(token_right, _),
637-
) if (token_left.is_non_reserved_ident() || token_left.is_lit())
638-
&& (token_right.is_non_reserved_ident() || token_right.is_lit()) =>
639-
{
640-
token_left.span
641-
}
642-
(TokenTree::Delimited(sp, ..), _) => sp.entire(),
643-
_ => continue,
644-
};
645-
let sp = sp.shrink_to_hi();
646-
let comma = TokenTree::token_alone(token::Comma, sp);
647-
suggestion = Some((pos, comma, sp));
648-
}
649-
}
650-
if let Some((pos, comma, sp)) = suggestion {
651-
let mut new_stream = Vec::with_capacity(self.0.len() + 1);
652-
let parts = self.0.split_at(pos + 1);
653-
new_stream.extend_from_slice(parts.0);
654-
new_stream.push(comma);
655-
new_stream.extend_from_slice(parts.1);
656-
return Some((TokenStream::new(new_stream), sp));
657-
}
658-
None
659-
}
660-
}
661-
662-
impl FromIterator<TokenTree> for TokenStream {
663-
fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
664-
TokenStream::new(iter.into_iter().collect::<Vec<TokenTree>>())
665-
}
666-
}
667-
668-
impl Eq for TokenStream {}
669-
670-
impl PartialEq<TokenStream> for TokenStream {
671-
fn eq(&self, other: &TokenStream) -> bool {
672-
self.iter().eq(other.iter())
673-
}
674-
}
599+
/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
600+
#[derive(Clone, Debug, Default, Encodable, Decodable)]
601+
pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);
675602

676603
impl TokenStream {
677604
pub fn new(tts: Vec<TokenTree>) -> TokenStream {
@@ -847,6 +774,68 @@ impl TokenStream {
847774
}
848775
}
849776
}
777+
778+
/// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
779+
/// separating the two arguments with a comma for diagnostic suggestions.
780+
pub fn add_comma(&self) -> Option<(TokenStream, Span)> {
781+
// Used to suggest if a user writes `foo!(a b);`
782+
let mut suggestion = None;
783+
let mut iter = self.0.iter().enumerate().peekable();
784+
while let Some((pos, ts)) = iter.next() {
785+
if let Some((_, next)) = iter.peek() {
786+
let sp = match (&ts, &next) {
787+
(_, TokenTree::Token(Token { kind: token::Comma, .. }, _)) => continue,
788+
(
789+
TokenTree::Token(token_left, Spacing::Alone),
790+
TokenTree::Token(token_right, _),
791+
) if (token_left.is_non_reserved_ident() || token_left.is_lit())
792+
&& (token_right.is_non_reserved_ident() || token_right.is_lit()) =>
793+
{
794+
token_left.span
795+
}
796+
(TokenTree::Delimited(sp, ..), _) => sp.entire(),
797+
_ => continue,
798+
};
799+
let sp = sp.shrink_to_hi();
800+
let comma = TokenTree::token_alone(token::Comma, sp);
801+
suggestion = Some((pos, comma, sp));
802+
}
803+
}
804+
if let Some((pos, comma, sp)) = suggestion {
805+
let mut new_stream = Vec::with_capacity(self.0.len() + 1);
806+
let parts = self.0.split_at(pos + 1);
807+
new_stream.extend_from_slice(parts.0);
808+
new_stream.push(comma);
809+
new_stream.extend_from_slice(parts.1);
810+
return Some((TokenStream::new(new_stream), sp));
811+
}
812+
None
813+
}
814+
}
815+
816+
impl PartialEq<TokenStream> for TokenStream {
817+
fn eq(&self, other: &TokenStream) -> bool {
818+
self.iter().eq(other.iter())
819+
}
820+
}
821+
822+
impl Eq for TokenStream {}
823+
824+
impl FromIterator<TokenTree> for TokenStream {
825+
fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
826+
TokenStream::new(iter.into_iter().collect::<Vec<TokenTree>>())
827+
}
828+
}
829+
830+
impl<CTX> HashStable<CTX> for TokenStream
831+
where
832+
CTX: crate::HashStableContext,
833+
{
834+
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
835+
for sub_tt in self.iter() {
836+
sub_tt.hash_stable(hcx, hasher);
837+
}
838+
}
850839
}
851840

852841
#[derive(Clone)]

0 commit comments

Comments
 (0)