Skip to content

Commit 31a7a04

Browse files
authored
refactor(cubesql): Fix clippy warnings in cubesql, part 2 (#9262)
* refactor(cubesql): Fix if_same_then_else warning * refactor(cubesql): Fix into_iter_on_ref warning * refactor(cubesql): Fix iter_cloned_collect warning * refactor(cubesql): Fix iter_next_slice warning * refactor(cubesql): Fix manual_flatten warning * refactor(cubesql): Fix manual_range_contains warning * refactor(cubesql): Fix map_clone warning * refactor(cubesql): Fix map_flatten warning * refactor(cubesql): Fix map_identity warning * refactor(cubesql): Fix useless_vec warning * refactor(cubesql): Fix useless_conversion warning * refactor(cubesql): Fix unwrap_or_default warning * refactor(cubesql): Fix clone_on_copy warning * refactor(cubesql): Fix op_ref warning
1 parent e12dc44 commit 31a7a04

33 files changed

+531
-620
lines changed

rust/cubesql/cubesql/Cargo.toml

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -93,25 +93,15 @@ harness = false
9393
# Feel free to remove any rule from here and fix all warnings with it
9494
# Or to write a comment why rule should stay disabled
9595
[lints.clippy]
96-
clone_on_copy = "allow"
9796
collapsible_if = "allow"
9897
collapsible_match = "allow"
9998
collapsible_else_if = "allow"
10099
comparison_chain = "allow"
101100
derive_ord_xor_partial_ord = "allow"
102101
field_reassign_with_default = "allow"
103-
if_same_then_else = "allow"
104-
into_iter_on_ref = "allow"
105-
iter_cloned_collect = "allow"
106-
iter_next_slice = "allow"
107102
len_without_is_empty = "allow"
108103
len_zero = "allow"
109104
let_and_return = "allow"
110-
manual_flatten = "allow"
111-
manual_range_contains = "allow"
112-
map_clone = "allow"
113-
map_flatten = "allow"
114-
map_identity = "allow"
115105
match_like_matches_macro = "allow"
116106
match_ref_pats = "allow"
117107
match_single_binding = "allow"
@@ -129,7 +119,6 @@ new_without_default = "allow"
129119
non_canonical_partial_ord_impl = "allow"
130120
nonminimal_bool = "allow"
131121
only_used_in_recursion = "allow"
132-
op_ref = "allow"
133122
option_as_ref_deref = "allow"
134123
partialeq_ne_impl = "allow"
135124
ptr_arg = "allow"
@@ -149,8 +138,5 @@ unnecessary_mut_passed = "allow"
149138
unnecessary_to_owned = "allow"
150139
unnecessary_unwrap = "allow"
151140
unused_unit = "allow"
152-
unwrap_or_default = "allow"
153-
useless_conversion = "allow"
154141
useless_format = "allow"
155-
useless_vec = "allow"
156142
wrong_self_convention = "allow"

rust/cubesql/cubesql/e2e/tests/postgres.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ impl PostgresIntegrationTestSuite {
134134
for row in res.into_iter() {
135135
let mut values: Vec<String> = Vec::new();
136136

137-
for (idx, column) in row.columns().into_iter().enumerate() {
137+
for (idx, column) in row.columns().iter().enumerate() {
138138
if !description_done {
139139
description.push(format!(
140140
"{} type: {} ({})",
@@ -1272,7 +1272,7 @@ impl AsyncTestSuite for PostgresIntegrationTestSuite {
12721272
let columns = rows.first().unwrap().columns();
12731273
assert_eq!(
12741274
columns
1275-
.into_iter()
1275+
.iter()
12761276
.map(|col| col.type_().oid())
12771277
.collect::<Vec<u32>>(),
12781278
vec![1184, 1114]

rust/cubesql/cubesql/e2e/tests/utils.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ pub fn escape_snapshot_name(name: String) -> String {
2121

2222
// Windows limit
2323
if name.len() > 200 {
24-
name.chars().into_iter().take(200).collect()
24+
name.chars().take(200).collect()
2525
} else {
2626
name
2727
}

rust/cubesql/cubesql/src/compile/engine/df/optimizers/filter_push_down.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ fn filter_push_down(
120120
// let predicates = split_predicates(predicate)
121121
let predicates = vec![predicate.clone()]
122122
.into_iter()
123-
.chain(predicates.into_iter())
123+
.chain(predicates)
124124
.collect::<Vec<_>>();
125125
let mut pushable_predicates = vec![];
126126
let mut non_pushable_predicates = vec![];
@@ -326,10 +326,10 @@ fn filter_push_down(
326326
optimizer_config,
327327
)?),
328328
on: on.clone(),
329-
join_type: join_type.clone(),
330-
join_constraint: join_constraint.clone(),
329+
join_type: *join_type,
330+
join_constraint: *join_constraint,
331331
schema: schema.clone(),
332-
null_equals_null: null_equals_null.clone(),
332+
null_equals_null: *null_equals_null,
333333
}),
334334
)
335335
}
@@ -483,8 +483,8 @@ fn filter_push_down(
483483
issue_filter(
484484
predicates,
485485
LogicalPlan::Limit(Limit {
486-
skip: skip.clone(),
487-
fetch: fetch.clone(),
486+
skip: *skip,
487+
fetch: *fetch,
488488
input: Arc::new(filter_push_down(
489489
optimizer,
490490
input,

rust/cubesql/cubesql/src/compile/engine/df/optimizers/limit_push_down.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -203,10 +203,10 @@ fn limit_push_down(
203203
optimizer_config,
204204
)?),
205205
on: on.clone(),
206-
join_type: join_type.clone(),
207-
join_constraint: join_constraint.clone(),
206+
join_type: *join_type,
207+
join_constraint: *join_constraint,
208208
schema: schema.clone(),
209-
null_equals_null: null_equals_null.clone(),
209+
null_equals_null: *null_equals_null,
210210
}),
211211
)
212212
}

rust/cubesql/cubesql/src/compile/engine/df/optimizers/sort_push_down.rs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -72,8 +72,8 @@ fn sort_push_down(
7272
} => Ok(if is_column_expr(expr) {
7373
rewrite(expr, &rewrite_map)?.map(|expr| Expr::Sort {
7474
expr: Box::new(expr),
75-
asc: asc.clone(),
76-
nulls_first: nulls_first.clone(),
75+
asc: *asc,
76+
nulls_first: *nulls_first,
7777
})
7878
} else {
7979
None
@@ -199,10 +199,10 @@ fn sort_push_down(
199199
)?),
200200
right: Arc::new(sort_push_down(optimizer, right, None, optimizer_config)?),
201201
on: on.clone(),
202-
join_type: join_type.clone(),
203-
join_constraint: join_constraint.clone(),
202+
join_type: *join_type,
203+
join_constraint: *join_constraint,
204204
schema: schema.clone(),
205-
null_equals_null: null_equals_null.clone(),
205+
null_equals_null: *null_equals_null,
206206
}));
207207
}
208208
}
@@ -213,10 +213,10 @@ fn sort_push_down(
213213
left: Arc::new(sort_push_down(optimizer, left, None, optimizer_config)?),
214214
right: Arc::new(sort_push_down(optimizer, right, None, optimizer_config)?),
215215
on: on.clone(),
216-
join_type: join_type.clone(),
217-
join_constraint: join_constraint.clone(),
216+
join_type: *join_type,
217+
join_constraint: *join_constraint,
218218
schema: schema.clone(),
219-
null_equals_null: null_equals_null.clone(),
219+
null_equals_null: *null_equals_null,
220220
}),
221221
)
222222
}
@@ -285,8 +285,8 @@ fn sort_push_down(
285285
issue_sort(
286286
sort_expr,
287287
LogicalPlan::Limit(Limit {
288-
skip: skip.clone(),
289-
fetch: fetch.clone(),
288+
skip: *skip,
289+
fetch: *fetch,
290290
input: Arc::new(sort_push_down(optimizer, input, None, optimizer_config)?),
291291
}),
292292
)

rust/cubesql/cubesql/src/compile/engine/df/optimizers/utils.rs

Lines changed: 23 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
4444
};
4545
rewrites.map(|(left, right)| Expr::BinaryExpr {
4646
left: Box::new(left),
47-
op: op.clone(),
47+
op: *op,
4848
right: Box::new(right),
4949
})
5050
}
@@ -60,9 +60,9 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
6060
};
6161
rewrites.map(|(left, right)| Expr::AnyExpr {
6262
left: Box::new(left),
63-
op: op.clone(),
63+
op: *op,
6464
right: Box::new(right),
65-
all: all.clone(),
65+
all: *all,
6666
})
6767
}
6868
Expr::Like(Like {
@@ -77,10 +77,10 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
7777
};
7878
rewrites.map(|(expr, pattern)| {
7979
Expr::Like(Like {
80-
negated: negated.clone(),
80+
negated: *negated,
8181
expr: Box::new(expr),
8282
pattern: Box::new(pattern),
83-
escape_char: escape_char.clone(),
83+
escape_char: *escape_char,
8484
})
8585
})
8686
}
@@ -96,10 +96,10 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
9696
};
9797
rewrites.map(|(expr, pattern)| {
9898
Expr::ILike(Like {
99-
negated: negated.clone(),
99+
negated: *negated,
100100
expr: Box::new(expr),
101101
pattern: Box::new(pattern),
102-
escape_char: escape_char.clone(),
102+
escape_char: *escape_char,
103103
})
104104
})
105105
}
@@ -115,10 +115,10 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
115115
};
116116
rewrites.map(|(expr, pattern)| {
117117
Expr::SimilarTo(Like {
118-
negated: negated.clone(),
118+
negated: *negated,
119119
expr: Box::new(expr),
120120
pattern: Box::new(pattern),
121-
escape_char: escape_char.clone(),
121+
escape_char: *escape_char,
122122
})
123123
})
124124
}
@@ -148,7 +148,7 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
148148
};
149149
rewrites.map(|(expr, low, high)| Expr::Between {
150150
expr: Box::new(expr),
151-
negated: negated.clone(),
151+
negated: *negated,
152152
low: Box::new(low),
153153
high: Box::new(high),
154154
})
@@ -211,8 +211,8 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
211211
nulls_first,
212212
} => rewrite(expr, map)?.map(|expr| Expr::Sort {
213213
expr: Box::new(expr),
214-
asc: asc.clone(),
215-
nulls_first: nulls_first.clone(),
214+
asc: *asc,
215+
nulls_first: *nulls_first,
216216
}),
217217
Expr::ScalarFunction { fun, args } => args
218218
.iter()
@@ -249,7 +249,7 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
249249
.map(|args| Expr::AggregateFunction {
250250
fun: fun.clone(),
251251
args,
252-
distinct: distinct.clone(),
252+
distinct: *distinct,
253253
}),
254254
Expr::WindowFunction {
255255
fun,
@@ -283,7 +283,7 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
283283
args,
284284
partition_by,
285285
order_by,
286-
window_frame: window_frame.clone(),
286+
window_frame: *window_frame,
287287
})
288288
}
289289
Expr::AggregateUDF { fun, args } => args
@@ -310,7 +310,7 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
310310
.map(|list| Expr::InList {
311311
expr: Box::new(expr),
312312
list,
313-
negated: negated.clone(),
313+
negated: *negated,
314314
})
315315
}
316316
// As rewrites are used to push things down or up the plan, wildcards
@@ -329,7 +329,7 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
329329
rewrites.map(|(expr, subquery)| Expr::InSubquery {
330330
expr: Box::new(expr),
331331
subquery: Box::new(subquery),
332-
negated: negated.clone(),
332+
negated: *negated,
333333
})
334334
}
335335
})
@@ -415,25 +415,25 @@ pub fn get_expr_columns(expr: &Expr) -> Vec<Column> {
415415
Expr::BinaryExpr { left, right, .. } | Expr::AnyExpr { left, right, .. } => {
416416
get_expr_columns(left)
417417
.into_iter()
418-
.chain(get_expr_columns(right).into_iter())
418+
.chain(get_expr_columns(right))
419419
.collect()
420420
}
421421
Expr::Like(Like { expr, pattern, .. })
422422
| Expr::ILike(Like { expr, pattern, .. })
423423
| Expr::SimilarTo(Like { expr, pattern, .. }) => get_expr_columns(expr)
424424
.into_iter()
425-
.chain(get_expr_columns(pattern).into_iter())
425+
.chain(get_expr_columns(pattern))
426426
.collect(),
427427
Expr::GetIndexedField { expr, key } => get_expr_columns(expr)
428428
.into_iter()
429-
.chain(get_expr_columns(key).into_iter())
429+
.chain(get_expr_columns(key))
430430
.collect(),
431431
Expr::Between {
432432
expr, low, high, ..
433433
} => get_expr_columns(expr)
434434
.into_iter()
435-
.chain(get_expr_columns(low).into_iter())
436-
.chain(get_expr_columns(high).into_iter())
435+
.chain(get_expr_columns(low))
436+
.chain(get_expr_columns(high))
437437
.collect(),
438438
Expr::Case {
439439
expr,
@@ -447,15 +447,14 @@ pub fn get_expr_columns(expr: &Expr) -> Vec<Column> {
447447
.chain(when_then_expr.iter().flat_map(|(when, then)| {
448448
get_expr_columns(when)
449449
.into_iter()
450-
.chain(get_expr_columns(then).into_iter())
450+
.chain(get_expr_columns(then))
451451
.collect::<Vec<_>>()
452452
}))
453453
.chain(
454454
else_expr
455455
.as_ref()
456456
.map(|else_expr| get_expr_columns(else_expr))
457-
.unwrap_or(vec![])
458-
.into_iter(),
457+
.unwrap_or(vec![]),
459458
)
460459
.collect(),
461460
Expr::ScalarFunction { args, .. }

0 commit comments

Comments
 (0)