Skip to content

Commit 1798068

Browse files
committed
Use workspace to set clippy linters
use them in yjit and zjit fix clippy issues
1 parent fccd96c commit 1798068

File tree

17 files changed

+133
-116
lines changed

17 files changed

+133
-116
lines changed

Cargo.toml

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,3 +49,19 @@ overflow-checks = true
4949
debug = true
5050
# Use ThinLTO. Much smaller output for a small amount of build time increase.
5151
lto = "thin"
52+
53+
[workspace.lints.clippy]
54+
correctness = { level = "forbid", priority = 1 }
55+
suspicious = { level = "forbid", priority = 1 }
56+
perf = { level = "forbid", priority = 1 }
57+
58+
complexity = { level = "deny", priority = -1 }
59+
60+
style = { level = "allow", priority = -1 }
61+
pedantic = { level = "allow", priority = -1 }
62+
63+
too_many_arguments = "allow"
64+
identity_op = "allow"
65+
66+
unwrap_or_default = "forbid"
67+
ptr_arg = "deny"

yjit/Cargo.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,3 +24,6 @@ disasm = ["capstone"]
2424
# from cfg!(debug_assertions) so that we can see disasm of the code
2525
# that would run in the release mode.
2626
runtime_checks = []
27+
28+
[lints]
29+
workspace = true

yjit/src/asm/mod.rs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -224,16 +224,16 @@ impl CodeBlock {
224224
}
225225

226226
/// Free the memory pages of given code page indexes
227-
fn free_pages(&mut self, page_idxs: &Vec<usize>) {
228-
let mut page_idxs = page_idxs.clone();
229-
page_idxs.reverse(); // to loop with pop()
227+
/// In Rust >= 1.77 this could probably be simplified with chunk_by.
228+
fn free_pages(&mut self, page_idxs: &[usize]) {
229+
let mut page_idxs = page_idxs.iter().rev().collect::<Vec<_>>();
230230

231231
// Group adjacent page indexes and free them in batches to reduce the # of syscalls.
232-
while let Some(page_idx) = page_idxs.pop() {
232+
while let Some(&page_idx) = page_idxs.pop() {
233233
// Group first adjacent page indexes
234234
let mut batch_idxs = vec![page_idx];
235-
while page_idxs.last() == Some(&(batch_idxs.last().unwrap() + 1)) {
236-
batch_idxs.push(page_idxs.pop().unwrap());
235+
while page_idxs.last() == Some(&&(*batch_idxs.last().unwrap() + 1)) {
236+
batch_idxs.push(*page_idxs.pop().unwrap());
237237
}
238238

239239
// Free the grouped pages at once
@@ -441,12 +441,12 @@ impl CodeBlock {
441441

442442
// Ignore empty code ranges
443443
if start_addr == end_addr {
444-
return (0..0).into_iter();
444+
return 0..0;
445445
}
446446

447447
let start_page = (start_addr.raw_addr(self) - mem_start) / self.page_size;
448448
let end_page = (end_addr.raw_addr(self) - mem_start - 1) / self.page_size;
449-
(start_page..end_page + 1).into_iter()
449+
start_page..end_page + 1
450450
}
451451

452452
/// Get a (possibly dangling) direct pointer to the current write position

yjit/src/backend/ir.rs

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ impl Opnd
186186

187187
/// Maps the indices from a previous list of instructions to a new list of
188188
/// instructions.
189-
pub fn map_index(self, indices: &Vec<usize>) -> Opnd {
189+
pub fn map_index(self, indices: &[usize]) -> Opnd {
190190
match self {
191191
Opnd::InsnOut { idx, num_bits } => {
192192
Opnd::InsnOut { idx: indices[idx], num_bits }
@@ -1266,11 +1266,11 @@ impl Assembler
12661266

12671267
/// Spill all live registers to the stack
12681268
pub fn spill_regs(&mut self) {
1269-
self.spill_regs_except(&vec![]);
1269+
self.spill_regs_except(&[]);
12701270
}
12711271

12721272
/// Spill all live registers except `ignored_temps` to the stack
1273-
pub fn spill_regs_except(&mut self, ignored_temps: &Vec<RegOpnd>) {
1273+
pub fn spill_regs_except(&mut self, ignored_temps: &[RegOpnd]) {
12741274
// Forget registers above the stack top
12751275
let mut reg_mapping = self.ctx.get_reg_mapping();
12761276
for stack_idx in self.ctx.get_stack_size()..MAX_CTX_TEMPS as u8 {
@@ -1348,6 +1348,7 @@ impl Assembler
13481348

13491349
// Shuffle register moves, sometimes adding extra moves using SCRATCH_REG,
13501350
// so that they will not rewrite each other before they are used.
1351+
#[allow(clippy::ptr_arg)]
13511352
pub fn reorder_reg_moves(old_moves: &Vec<(Reg, Opnd)>) -> Vec<(Reg, Opnd)> {
13521353
// Return the index of a move whose destination is not used as a source if any.
13531354
fn find_safe_move(moves: &Vec<(Reg, Opnd)>) -> Option<usize> {
@@ -1385,6 +1386,7 @@ impl Assembler
13851386
/// Sets the out field on the various instructions that require allocated
13861387
/// registers because their output is used as the operand on a subsequent
13871388
/// instruction. This is our implementation of the linear scan algorithm.
1389+
#[allow(clippy::useless_conversion)]
13881390
pub(super) fn alloc_regs(mut self, regs: Vec<Reg>) -> Assembler
13891391
{
13901392
//dbg!(&self);
@@ -1394,7 +1396,7 @@ impl Assembler
13941396

13951397
// Mutate the pool bitmap to indicate that the register at that index
13961398
// has been allocated and is live.
1397-
fn alloc_reg(pool: &mut u32, regs: &Vec<Reg>) -> Option<Reg> {
1399+
fn alloc_reg(pool: &mut u32, regs: &[Reg]) -> Option<Reg> {
13981400
for (index, reg) in regs.iter().enumerate() {
13991401
if (*pool & (1 << index)) == 0 {
14001402
*pool |= 1 << index;
@@ -1405,7 +1407,7 @@ impl Assembler
14051407
}
14061408

14071409
// Allocate a specific register
1408-
fn take_reg(pool: &mut u32, regs: &Vec<Reg>, reg: &Reg) -> Reg {
1410+
fn take_reg(pool: &mut u32, regs: &[Reg], reg: &Reg) -> Reg {
14091411
let reg_index = regs.iter().position(|elem| elem.reg_no == reg.reg_no);
14101412

14111413
if let Some(reg_index) = reg_index {
@@ -1419,7 +1421,7 @@ impl Assembler
14191421
// Mutate the pool bitmap to indicate that the given register is being
14201422
// returned as it is no longer used by the instruction that previously
14211423
// held it.
1422-
fn dealloc_reg(pool: &mut u32, regs: &Vec<Reg>, reg: &Reg) {
1424+
fn dealloc_reg(pool: &mut u32, regs: &[Reg], reg: &Reg) {
14231425
let reg_index = regs.iter().position(|elem| elem.reg_no == reg.reg_no);
14241426

14251427
if let Some(reg_index) = reg_index {
@@ -1791,7 +1793,7 @@ impl Assembler {
17911793
}
17921794

17931795
/// Let vm_check_canary() assert the leafness of this ccall if leaf_ccall is set
1794-
fn set_stack_canary(&mut self, opnds: &Vec<Opnd>) -> Option<Opnd> {
1796+
fn set_stack_canary(&mut self, opnds: &[Opnd]) -> Option<Opnd> {
17951797
// Use the slot right above the stack top for verifying leafness.
17961798
let canary_opnd = self.stack_opnd(-1);
17971799

yjit/src/codegen.rs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ type InsnGenFn = fn(
4343

4444
/// Ephemeral code generation state.
4545
/// Represents a [crate::core::Block] while we build it.
46+
#[allow(clippy::type_complexity)]
4647
pub struct JITState<'a> {
4748
/// Instruction sequence for the compiling block
4849
pub iseq: IseqPtr,
@@ -1178,14 +1179,14 @@ pub fn gen_entry_reg_mapping(asm: &mut Assembler, blockid: BlockId, stack_size:
11781179
// Find an existing callee block. If it's not found or uses no register, skip loading registers.
11791180
let mut ctx = Context::default();
11801181
ctx.set_stack_size(stack_size);
1181-
let reg_mapping = find_most_compatible_reg_mapping(blockid, &ctx).unwrap_or(RegMapping::default());
1182+
let reg_mapping = find_most_compatible_reg_mapping(blockid, &ctx).unwrap_or_default();
11821183
if reg_mapping == RegMapping::default() {
11831184
return reg_mapping;
11841185
}
11851186

11861187
// If found, load the same registers to reuse the block.
11871188
asm_comment!(asm, "reuse maps: {:?}", reg_mapping);
1188-
let local_table_size: u32 = unsafe { get_iseq_body_local_table_size(blockid.iseq) }.try_into().unwrap();
1189+
let local_table_size: u32 = unsafe { get_iseq_body_local_table_size(blockid.iseq) };
11891190
for &reg_opnd in reg_mapping.get_reg_opnds().iter() {
11901191
match reg_opnd {
11911192
RegOpnd::Local(local_idx) => {
@@ -7708,7 +7709,7 @@ fn gen_send_iseq(
77087709
// runtime guards later in copy_splat_args_for_rest_callee()
77097710
if !iseq_has_rest {
77107711
let supplying = argc - 1 - i32::from(kw_splat) + array_length as i32;
7711-
if (required_num..=required_num + opt_num).contains(&supplying) == false {
7712+
if !(required_num..=required_num + opt_num).contains(&supplying) {
77127713
gen_counter_incr(jit, asm, Counter::send_iseq_splat_arity_error);
77137714
return None;
77147715
}

yjit/src/core.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1099,7 +1099,7 @@ impl Context {
10991099
MapToLocal(local_idx) => {
11001100
bits.push_op(CtxOp::MapTempLocal);
11011101
bits.push_u3(stack_idx as u8);
1102-
bits.push_u3(local_idx as u8);
1102+
bits.push_u3(local_idx);
11031103
}
11041104

11051105
MapToSelf => {
@@ -1170,19 +1170,19 @@ impl Context {
11701170

11711171
match op {
11721172
CtxOp::SetSelfType => {
1173-
ctx.self_type = unsafe { transmute(bits.read_u4(&mut idx)) };
1173+
ctx.self_type = unsafe { transmute::<u8, Type>(bits.read_u4(&mut idx)) };
11741174
}
11751175

11761176
CtxOp::SetLocalType => {
11771177
let local_idx = bits.read_u3(&mut idx) as usize;
1178-
let t = unsafe { transmute(bits.read_u4(&mut idx)) };
1178+
let t = unsafe { transmute::<u8, Type>(bits.read_u4(&mut idx)) };
11791179
ctx.set_local_type(local_idx, t);
11801180
}
11811181

11821182
// Map temp to stack (known type)
11831183
CtxOp::SetTempType => {
11841184
let temp_idx = bits.read_u3(&mut idx) as usize;
1185-
let temp_type = unsafe { transmute(bits.read_u4(&mut idx)) };
1185+
let temp_type = unsafe { transmute::<u8, Type>(bits.read_u4(&mut idx)) };
11861186
ctx.set_temp_mapping(temp_idx, TempMapping::MapToStack(temp_type));
11871187
}
11881188

@@ -2294,7 +2294,7 @@ pub fn limit_block_versions(blockid: BlockId, ctx: &Context) -> Context {
22942294
let generic_ctx = ctx.get_generic_ctx();
22952295

22962296
if cfg!(debug_assertions) {
2297-
let mut ctx = ctx.clone();
2297+
let mut ctx = *ctx;
22982298
if ctx.inline() {
22992299
// Suppress TypeDiff::Incompatible from ctx.diff(). We return TypeDiff::Incompatible
23002300
// to keep inlining blocks until we hit the limit, but it's safe to give up inlining.
@@ -2920,7 +2920,7 @@ impl Context {
29202920
}
29212921

29222922
// Prepare a Context with the same registers
2923-
let mut dst_with_same_regs = dst.clone();
2923+
let mut dst_with_same_regs = *dst;
29242924
dst_with_same_regs.set_reg_mapping(self.get_reg_mapping());
29252925

29262926
// Diff registers and other stuff separately, and merge them
@@ -3823,7 +3823,7 @@ pub fn gen_branch_stub_hit_trampoline(ocb: &mut OutlinedCb) -> Option<CodePtr> {
38233823
}
38243824

38253825
/// Return registers to be pushed and popped on branch_stub_hit.
3826-
pub fn caller_saved_temp_regs() -> impl Iterator<Item = &'static Reg> + DoubleEndedIterator {
3826+
pub fn caller_saved_temp_regs() -> impl DoubleEndedIterator<Item = &'static Reg> {
38273827
let temp_regs = Assembler::get_temp_regs().iter();
38283828
let len = temp_regs.len();
38293829
// The return value gen_leave() leaves in C_RET_REG

yjit/src/cruby.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ pub type size_t = u64;
9797
pub type RedefinitionFlag = u32;
9898

9999
#[allow(dead_code)]
100-
#[allow(clippy::all)]
100+
#[allow(clippy::complexity)]
101101
mod autogened {
102102
use super::*;
103103
// Textually include output from rust-bindgen as suggested by its user guide.

yjit/src/lib.rs

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,3 @@
1-
// Clippy disagreements
2-
#![allow(clippy::style)] // We are laid back about style
3-
#![allow(clippy::too_many_arguments)] // :shrug:
4-
#![allow(clippy::identity_op)] // Sometimes we do it for style
5-
61
// TODO(alan): This lint is right -- the way we use `static mut` is UB happy. We have many globals
72
// and take `&mut` frequently, sometimes with a method that easily allows calling it twice.
83
//

yjit/src/utils.rs

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -92,10 +92,7 @@ pub fn ruby_str_to_rust(v: VALUE) -> String {
9292
let str_ptr = unsafe { rb_RSTRING_PTR(v) } as *mut u8;
9393
let str_len: usize = unsafe { rb_RSTRING_LEN(v) }.try_into().unwrap();
9494
let str_slice: &[u8] = unsafe { slice::from_raw_parts(str_ptr, str_len) };
95-
match String::from_utf8(str_slice.to_vec()) {
96-
Ok(utf8) => utf8,
97-
Err(_) => String::new(),
98-
}
95+
String::from_utf8(str_slice.to_vec()).unwrap_or_default()
9996
}
10097

10198
// Location is the file defining the method, colon, method name.

zjit/Cargo.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,3 +19,6 @@ expect-test = "1.5.1"
1919
# Support --yjit-dump-disasm and RubyVM::YJIT.disasm using libcapstone.
2020
disasm = ["capstone"]
2121
runtime_checks = []
22+
23+
[lints]
24+
workspace = true

0 commit comments

Comments
 (0)