@@ -5,22 +5,24 @@ extern crate rustc_ast;
5
5
extern crate rustc_hir;
6
6
extern crate rustc_span;
7
7
8
- use proc_macro2:: * ;
9
- use quote:: quote;
10
- use std:: str:: FromStr ;
11
-
12
8
use rustc_ast:: {
13
- token:: TokenKind ,
14
- tokenstream:: { TokenStream , TokenTree } ,
9
+ token:: { Delimiter , Token , TokenKind } ,
10
+ tokenstream:: { CursorRef , DelimSpan , TokenStream , TokenTree , TreeAndSpacing } ,
15
11
AttrKind , Attribute , MacArgs ,
16
12
} ;
17
13
use rustc_hir:: def:: Res ;
18
14
use rustc_hir:: * ;
19
15
use rustc_lint:: { LateContext , LateLintPass } ;
20
- use rustc_span:: { def_id:: DefId , symbol:: Symbol , Span } ;
21
- use std:: collections:: { HashMap , HashSet , VecDeque } ;
16
+ use rustc_span:: {
17
+ def_id:: DefId ,
18
+ symbol:: { Ident , Symbol } ,
19
+ Span , DUMMY_SP ,
20
+ } ;
21
+ use std:: collections:: { HashMap , VecDeque } ;
22
22
use std:: default:: Default ;
23
23
24
+
25
+
24
26
use clippy_utils:: { diagnostics:: span_lint_and_help, ty:: match_type} ;
25
27
use if_chain:: if_chain;
26
28
use solana_lints:: paths;
@@ -54,6 +56,27 @@ struct DuplicateMutableAccounts {
54
56
}
55
57
56
58
impl < ' tcx > LateLintPass < ' tcx > for DuplicateMutableAccounts {
59
+ // fn check_mod(
60
+ // &mut self,
61
+ // cx: &LateContext<'tcx>,
62
+ // _: &'tcx Mod<'tcx>,
63
+ // span: Span,
64
+ // _: HirId
65
+ // ) {
66
+ // println!("new");
67
+ // for _ in 0..3 {
68
+ // println!("linting");
69
+ // span_lint_and_help(
70
+ // cx,
71
+ // DUPLICATE_MUTABLE_ACCOUNTS,
72
+ // span,
73
+ // "dummy",
74
+ // None,
75
+ // ""
76
+ // );
77
+ // }
78
+ // }
79
+
57
80
fn check_struct_def ( & mut self , cx : & LateContext < ' tcx > , variant_data : & ' tcx VariantData < ' tcx > ) {
58
81
if let VariantData :: Struct ( fields, _) = variant_data {
59
82
fields. iter ( ) . for_each ( |field| {
@@ -91,11 +114,11 @@ impl<'tcx> LateLintPass<'tcx> for DuplicateMutableAccounts {
91
114
if name. as_str( ) == "account" ;
92
115
if let MacArgs :: Delimited ( _, _, token_stream) = & attr_item. args;
93
116
then {
94
- // TODO: figure out stream representation. At this point, may parse?
95
- // TODO: filter mechanism: only insert constraints that match form "constraint = _.key() != _.key()"
96
- // TODO: may need to parse each constraint as a separate stream, as comma-delimited
97
- self . streams . 0 . push ( token_stream . clone ( ) ) ;
98
- // println!("{:#?}", attribute);
117
+ for split in split ( token_stream . trees ( ) , TokenKind :: Comma ) {
118
+ // println!("{:#?}", split);
119
+ self . streams . 0 . push ( split ) ;
120
+ }
121
+
99
122
}
100
123
}
101
124
}
@@ -107,17 +130,29 @@ impl<'tcx> LateLintPass<'tcx> for DuplicateMutableAccounts {
107
130
// generate static set of possible constraints
108
131
let gen_constraints = generate_possible_expected_constraints ( v) ;
109
132
110
- // assert the following checks:
111
- for ( one, reflexive) in gen_constraints {
112
- if !( self . streams . contains ( one) || self . streams . contains ( reflexive) ) {
113
- // span_lint_and_help(
114
- // cx,
115
- // DUPLICATE_MUTABLE_ACCOUNTS,
116
- // v[0].1,
117
- // "identical account types",
118
- // Some(v[1].1),
119
- // &format!("add an anchor key check constraint: #[account(constraint = {}.key() != {}.key())]", v[0].0, v[1].0)
120
- // );
133
+ for ( ( one, symmetric) , symbols) in gen_constraints {
134
+ // println!("{:#?}\n {:#?}", one, symmetric);
135
+ if !( self . streams . contains ( one) || self . streams . contains ( symmetric) ) {
136
+ println ! ( "lint for {} {}" , symbols. 0 , symbols. 1 ) ;
137
+
138
+ // stupid way to get spans for offending types
139
+ let mut spans: Vec < Span > = Vec :: new ( ) ;
140
+ for ( sym, span) in v {
141
+ if & symbols. 0 == sym || & symbols. 1 == sym {
142
+ spans. push ( span. clone ( ) ) ;
143
+ }
144
+ }
145
+
146
+ // TODO: for some reason, will only print out 2 messages, not 3
147
+ // println!("{:?}", spans);
148
+ span_lint_and_help (
149
+ cx,
150
+ DUPLICATE_MUTABLE_ACCOUNTS ,
151
+ spans[ 0 ] ,
152
+ "identical account types without a key check constraint" ,
153
+ Some ( spans[ 1 ] ) ,
154
+ & format ! ( "add an anchor key check constraint: #[account(constraint = {}.key() != {}.key())]" , symbols. 0 , symbols. 1 )
155
+ ) ;
121
156
}
122
157
}
123
158
}
@@ -141,8 +176,29 @@ fn get_anchor_account_type(segment: &PathSegment<'_>) -> Option<DefId> {
141
176
}
142
177
}
143
178
179
+ // collect elements into a TokenStream until encounter delim then stop collecting. create a new vec.
180
+ // continue until reaching end of stream
181
+ fn split ( stream : CursorRef , delimiter : TokenKind ) -> Vec < TokenStream > {
182
+ let mut split_streams: Vec < TokenStream > = Vec :: new ( ) ;
183
+ let mut temp: Vec < TreeAndSpacing > = Vec :: new ( ) ;
184
+ let delim = TokenTree :: Token ( Token :: new ( delimiter, DUMMY_SP ) ) ;
185
+
186
+ stream. for_each ( |t| {
187
+ if t. eq_unspanned ( & delim) {
188
+ split_streams. push ( TokenStream :: new ( temp. clone ( ) ) ) ;
189
+ temp. clear ( ) ;
190
+ } else {
191
+ temp. push ( TreeAndSpacing :: from ( t. to_owned ( ) ) ) ;
192
+ }
193
+ } ) ;
194
+ split_streams. push ( TokenStream :: new ( temp) ) ;
195
+ split_streams
196
+ }
197
+
144
198
/// Generates a static set of a possible expected key check constraints necessary for `values`.
145
- fn generate_possible_expected_constraints ( values : & Vec < ( Symbol , Span ) > ) -> Vec < ( proc_macro2:: TokenStream , proc_macro2:: TokenStream ) > {
199
+ fn generate_possible_expected_constraints (
200
+ values : & Vec < ( Symbol , Span ) > ,
201
+ ) -> Vec < ( ( TokenStream , TokenStream ) , ( Symbol , Symbol ) ) > {
146
202
// TODO: may start with a VecDeque in the first place?
147
203
let mut deq = VecDeque :: from ( values. clone ( ) ) ;
148
204
let mut gen_set = Vec :: new ( ) ;
@@ -151,28 +207,55 @@ fn generate_possible_expected_constraints(values: &Vec<(Symbol, Span)>) -> Vec<(
151
207
let first = deq. pop_front ( ) . unwrap ( ) . 0 ;
152
208
// generate stream for all other values in vec
153
209
for ( other, _) in & deq {
154
- let constraint = format ! ( "constraint = {}.key() != {}.key()" , first. as_str( ) , other. as_str( ) ) ;
155
- let reflexive = format ! ( "constraint = {}.key() != {}.key()" , other. as_str( ) , first. as_str( ) ) ;
156
-
157
- // using quote
158
- // let stream = quote!(constraint = first.as_str().key() != other.as_str().key());
159
-
160
- let stream: proc_macro2:: TokenStream = constraint. parse ( ) . unwrap ( ) ;
161
- let reflex_stream: proc_macro2:: TokenStream = reflexive. parse ( ) . unwrap ( ) ;
210
+ let stream = create_key_check_constraint_tokenstream ( & first, other) ;
211
+ let symmetric_stream = create_key_check_constraint_tokenstream ( other, & first) ;
162
212
// println!("{:#?}", stream);
163
213
164
- gen_set. push ( ( stream, reflex_stream ) ) ;
214
+ gen_set. push ( ( ( stream, symmetric_stream ) , ( first , other . clone ( ) ) ) ) ;
165
215
}
166
216
}
167
217
gen_set
168
218
}
169
219
220
+ // TODO: figure out more efficient way to do this
221
+ fn create_key_check_constraint_tokenstream ( a : & Symbol , b : & Symbol ) -> TokenStream {
222
+ let constraint = vec ! [
223
+ // TODO: test string matching by changing some string
224
+ TreeAndSpacing :: from( create_token( "constraint" ) ) ,
225
+ TreeAndSpacing :: from( TokenTree :: Token ( Token :: new( TokenKind :: Eq , DUMMY_SP ) ) ) ,
226
+ TreeAndSpacing :: from( create_token( a. as_str( ) ) ) ,
227
+ TreeAndSpacing :: from( TokenTree :: Token ( Token :: new( TokenKind :: Dot , DUMMY_SP ) ) ) ,
228
+ TreeAndSpacing :: from( create_token( "key" ) ) ,
229
+ TreeAndSpacing :: from( TokenTree :: Delimited (
230
+ DelimSpan :: dummy( ) ,
231
+ Delimiter :: Parenthesis ,
232
+ TokenStream :: new( vec![ ] ) ,
233
+ ) ) ,
234
+ TreeAndSpacing :: from( TokenTree :: Token ( Token :: new( TokenKind :: Ne , DUMMY_SP ) ) ) ,
235
+ TreeAndSpacing :: from( create_token( b. as_str( ) ) ) ,
236
+ TreeAndSpacing :: from( TokenTree :: Token ( Token :: new( TokenKind :: Dot , DUMMY_SP ) ) ) ,
237
+ TreeAndSpacing :: from( create_token( "key" ) ) ,
238
+ TreeAndSpacing :: from( TokenTree :: Delimited (
239
+ DelimSpan :: dummy( ) ,
240
+ Delimiter :: Parenthesis ,
241
+ TokenStream :: new( vec![ ] ) ,
242
+ ) ) ,
243
+ ] ;
244
+
245
+ TokenStream :: new ( constraint)
246
+ }
247
+
248
+ fn create_token ( s : & str ) -> TokenTree {
249
+ let ident = Ident :: from_str ( s) ;
250
+ TokenTree :: Token ( Token :: from_ast_ident ( ident) )
251
+ }
252
+
170
253
#[ derive( Debug , Default ) ]
171
254
pub struct Streams ( Vec < TokenStream > ) ;
172
255
173
256
impl Streams {
174
257
fn contains ( & self , other : TokenStream ) -> bool {
175
- self . 0 . iter ( ) . any ( |stream| stream == & other)
258
+ self . 0 . iter ( ) . any ( |stream| stream. eq_unspanned ( & other) )
176
259
}
177
260
}
178
261
@@ -190,27 +273,3 @@ fn insecure_2() {
190
273
fn secure ( ) {
191
274
dylint_testing:: ui_test_example ( env ! ( "CARGO_PKG_NAME" ) , "secure" ) ;
192
275
}
193
-
194
- // fn has_satisfying_stream(streams: &Vec<Stream>, field_names: &Vec<(Symbol, Span)>) -> bool {
195
- // for stream in streams {
196
- // if stream.contains(TokenKind::Ne)
197
- // && field_names
198
- // .iter()
199
- // // TODO: if true, will not match. figure out what the bool signifies
200
- // .all(|(sym, _)| stream.contains(TokenKind::Ident(*sym, false)))
201
- // {
202
- // return true;
203
- // }
204
- // }
205
- // return false;
206
- // }
207
-
208
- // Generates a TokenStream that matches `constraint = a.key() != b.key()` and its reflexive
209
- // fn generate_key_check_constraint(a: Symbol, b: Symbol) -> (TokenStream, TokenStream) {
210
- // let mut tree_and_spacing = vec![];
211
- // // create token
212
- // let tree = TokenTree::token(TokenKind::Ident(Symbol::intern("constraint"), false), span); // TODO: generate span somehow
213
- // tree_and_spacing.push(TreeAndSpacing::from(tree));
214
-
215
- // TokenStream::new(tree_and_spacing)
216
- // }
0 commit comments