@@ -10,7 +10,7 @@ use std::default::Default;
10
10
11
11
use rustc_ast:: {
12
12
token:: { Delimiter , Token , TokenKind } ,
13
- tokenstream:: { CursorRef , DelimSpan , TokenStream , TokenTree , TreeAndSpacing } ,
13
+ tokenstream:: { DelimSpan , TokenStream , TokenTree , TreeAndSpacing } ,
14
14
AttrKind , Attribute , MacArgs ,
15
15
} ;
16
16
use rustc_hir:: { def:: Res , FieldDef , GenericArg , QPath , TyKind , VariantData } ;
@@ -84,9 +84,10 @@ impl<'tcx> LateLintPass<'tcx> for DuplicateMutableAccounts {
84
84
if let MacArgs :: Delimited ( _, _, token_stream) = & attr_item. args;
85
85
then {
86
86
// Parse each constraint as a separate TokenStream
87
- for delimited_stream in split( token_stream. trees( ) , TokenKind :: Comma ) {
88
- self . streams. 0 . push( delimited_stream) ;
89
- }
87
+ // for delimited_stream in split(token_stream.trees(), TokenKind::Comma) {
88
+ // self.streams.0.push(delimited_stream);
89
+ // }
90
+ self . streams. 0 . push( token_stream. clone( ) ) ;
90
91
}
91
92
}
92
93
}
@@ -107,6 +108,7 @@ impl<'tcx> LateLintPass<'tcx> for DuplicateMutableAccounts {
107
108
|| self . streams . contains ( & symmetric_stream) )
108
109
{
109
110
// NOTE: for some reason, will only print out 2 messages, not 3
111
+ // println!("spanning lint");
110
112
span_lint_and_help (
111
113
cx,
112
114
DUPLICATE_MUTABLE_ACCOUNTS ,
@@ -155,31 +157,11 @@ fn get_def_id(ty: &rustc_hir::Ty) -> Option<DefId> {
155
157
}
156
158
}
157
159
158
- /// Splits `stream` into a vector of substreams, separated by `delimiter`.
159
- fn split ( stream : CursorRef , delimiter : TokenKind ) -> Vec < TokenStream > {
160
- let mut split_streams: Vec < TokenStream > = Vec :: new ( ) ;
161
- let mut temp: Vec < TreeAndSpacing > = Vec :: new ( ) ;
162
- let delim = TokenTree :: Token ( Token :: new ( delimiter, DUMMY_SP ) ) ;
163
-
164
- stream. for_each ( |t| {
165
- if t. eq_unspanned ( & delim) {
166
- split_streams. push ( TokenStream :: new ( temp. clone ( ) ) ) ;
167
- temp. clear ( ) ;
168
- } else {
169
- temp. push ( TreeAndSpacing :: from ( t. clone ( ) ) ) ;
170
- }
171
- } ) ;
172
- split_streams. push ( TokenStream :: new ( temp) ) ;
173
- split_streams
174
- }
175
-
176
- /// Returns a `TokenStream` of form: constraint = `a`.key() != `b`.key().
160
+ /// Returns a `TokenStream` of form: `a`.key() != `b`.key().
177
161
fn create_key_check_constraint_tokenstream ( a : Symbol , b : Symbol ) -> TokenStream {
178
162
// TODO: may be more efficient way to do this, since the stream is effectively fixed
179
163
// and determined. Only two tokens are variable.
180
164
let constraint = vec ! [
181
- TreeAndSpacing :: from( create_token_from_ident( "constraint" ) ) ,
182
- TreeAndSpacing :: from( TokenTree :: Token ( Token :: new( TokenKind :: Eq , DUMMY_SP ) ) ) ,
183
165
TreeAndSpacing :: from( create_token_from_ident( a. as_str( ) ) ) ,
184
166
TreeAndSpacing :: from( TokenTree :: Token ( Token :: new( TokenKind :: Dot , DUMMY_SP ) ) ) ,
185
167
TreeAndSpacing :: from( create_token_from_ident( "key" ) ) ,
@@ -212,13 +194,59 @@ fn create_token_from_ident(s: &str) -> TokenTree {
212
194
pub struct Streams ( Vec < TokenStream > ) ;
213
195
214
196
impl Streams {
215
- /// Returns true if `self` contains `other`, by comparing if there is an
216
- /// identical `TokenStream` in `self` regardless of span.
197
+ /// Returns true if `self` has a TokenStream that `other` is a substream of
217
198
fn contains ( & self , other : & TokenStream ) -> bool {
218
- self . 0 . iter ( ) . any ( |stream| stream. eq_unspanned ( other) )
199
+ self . 0
200
+ . iter ( )
201
+ . any ( |token_stream| Self :: is_substream ( token_stream, other) )
202
+ }
203
+
204
+ /// Returns true if `other` is a substream of `stream`. By substream we mean in the
205
+ /// sense of a substring.
206
+ // NOTE: a possible optimization is when a match is found, to remove the matched
207
+ // TokenTrees from the TokenStream, since the constraint has been "checked" so it never
208
+ // needs to be validated again. This cuts down the number of comparisons.
209
+ fn is_substream ( stream : & TokenStream , other : & TokenStream ) -> bool {
210
+ let other_len = other. len ( ) ;
211
+ for i in 0 ..stream. len ( ) {
212
+ for ( j, other_token) in other. trees ( ) . enumerate ( ) {
213
+ match stream. trees ( ) . nth ( i + j) {
214
+ Some ( token_tree) => {
215
+ // println!("Comparing {:#?} with {:#?}", token_tree, other_tokens[j]);
216
+ if !token_tree. eq_unspanned ( other_token) {
217
+ break ;
218
+ }
219
+ // reached last index, so we have a match
220
+ if j == other_len - 1 {
221
+ return true ;
222
+ }
223
+ }
224
+ None => return false , // reached end of stream
225
+ }
226
+ }
227
+ }
228
+ false
219
229
}
220
230
}
221
231
232
+ // /// Splits `stream` into a vector of substreams, separated by `delimiter`.
233
+ // fn split(stream: CursorRef, delimiter: TokenKind) -> Vec<TokenStream> {
234
+ // let mut split_streams: Vec<TokenStream> = Vec::new();
235
+ // let mut temp: Vec<TreeAndSpacing> = Vec::new();
236
+ // let delim = TokenTree::Token(Token::new(delimiter, DUMMY_SP));
237
+
238
+ // stream.for_each(|t| {
239
+ // if t.eq_unspanned(&delim) {
240
+ // split_streams.push(TokenStream::new(temp.clone()));
241
+ // temp.clear();
242
+ // } else {
243
+ // temp.push(TreeAndSpacing::from(t.clone()));
244
+ // }
245
+ // });
246
+ // split_streams.push(TokenStream::new(temp));
247
+ // split_streams
248
+ // }
249
+
222
250
#[ test]
223
251
fn insecure ( ) {
224
252
dylint_testing:: ui_test_example ( env ! ( "CARGO_PKG_NAME" ) , "insecure" ) ;
0 commit comments