YJIT: Adopt Clippy suggestions we like

This adopts most suggestions that rust-clippy is confident enough to
auto apply. The manual changes mostly fix manual if-lets and take
opportunities to use the `Default` trait on standard collections.

Co-authored-by: Kevin Newton <kddnewton@gmail.com>
Co-authored-by: Maxime Chevalier-Boisvert <maxime.chevalierboisvert@shopify.com>
This commit is contained in:
Alan Wu 2022-04-27 14:08:52 -04:00
parent f8e4488e5e
commit fead7107ab
Notes: git 2022-04-30 04:04:18 +09:00
8 changed files with 39 additions and 39 deletions

View File

@ -225,7 +225,7 @@ impl CodeBlock {
// Get a direct pointer into the executable memory block // Get a direct pointer into the executable memory block
pub fn get_ptr(&self, offset: usize) -> CodePtr { pub fn get_ptr(&self, offset: usize) -> CodePtr {
unsafe { unsafe {
let ptr = self.mem_block.offset(offset as isize); let ptr = self.mem_block.add(offset);
CodePtr(ptr) CodePtr(ptr)
} }
} }

View File

@ -1295,12 +1295,12 @@ pub fn sub(cb: &mut CodeBlock, opnd0: X86Opnd, opnd1: X86Opnd) {
fn resize_opnd(opnd: X86Opnd, num_bits: u8) -> X86Opnd { fn resize_opnd(opnd: X86Opnd, num_bits: u8) -> X86Opnd {
match opnd { match opnd {
X86Opnd::Reg(reg) => { X86Opnd::Reg(reg) => {
let mut cloned = reg.clone(); let mut cloned = reg;
cloned.num_bits = num_bits; cloned.num_bits = num_bits;
X86Opnd::Reg(cloned) X86Opnd::Reg(cloned)
}, },
X86Opnd::Mem(mem) => { X86Opnd::Mem(mem) => {
let mut cloned = mem.clone(); let mut cloned = mem;
cloned.num_bits = num_bits; cloned.num_bits = num_bits;
X86Opnd::Mem(cloned) X86Opnd::Mem(cloned)
}, },

View File

@ -222,7 +222,7 @@ fn test_mov_unsigned() {
// MOV RAX, imm64, will not move down into EAX since it does not fit into 32 bits // MOV RAX, imm64, will not move down into EAX since it does not fit into 32 bits
check_bytes("48b80000000001000000", |cb| mov(cb, RAX, uimm_opnd(u32::MAX as u64 + 1))); check_bytes("48b80000000001000000", |cb| mov(cb, RAX, uimm_opnd(u32::MAX as u64 + 1)));
check_bytes("48b8ffffffffffffffff", |cb| mov(cb, RAX, uimm_opnd(u64::MAX.into()))); check_bytes("48b8ffffffffffffffff", |cb| mov(cb, RAX, uimm_opnd(u64::MAX)));
check_bytes("49b8ffffffffffffffff", |cb| mov(cb, R8, uimm_opnd(u64::MAX))); check_bytes("49b8ffffffffffffffff", |cb| mov(cb, R8, uimm_opnd(u64::MAX)));
// MOV r8, imm8 // MOV r8, imm8

View File

@ -663,7 +663,7 @@ fn jump_to_next_insn(
) { ) {
// Reset the depth since in current usages we only ever jump to to // Reset the depth since in current usages we only ever jump to to
// chain_depth > 0 from the same instruction. // chain_depth > 0 from the same instruction.
let mut reset_depth = current_context.clone(); let mut reset_depth = *current_context;
reset_depth.reset_chain_depth(); reset_depth.reset_chain_depth();
let jump_block = BlockId { let jump_block = BlockId {
@ -1808,7 +1808,7 @@ fn jit_chain_guard(
}; };
if (ctx.get_chain_depth() as i32) < depth_limit { if (ctx.get_chain_depth() as i32) < depth_limit {
let mut deeper = ctx.clone(); let mut deeper = *ctx;
deeper.increment_chain_depth(); deeper.increment_chain_depth();
let bid = BlockId { let bid = BlockId {
iseq: jit.iseq, iseq: jit.iseq,
@ -1881,7 +1881,7 @@ fn gen_get_ivar(
side_exit: CodePtr, side_exit: CodePtr,
) -> CodegenStatus { ) -> CodegenStatus {
let comptime_val_klass = comptime_receiver.class_of(); let comptime_val_klass = comptime_receiver.class_of();
let starting_context = ctx.clone(); // make a copy for use with jit_chain_guard let starting_context = *ctx; // make a copy for use with jit_chain_guard
// Check if the comptime class uses a custom allocator // Check if the comptime class uses a custom allocator
let custom_allocator = unsafe { rb_get_alloc_func(comptime_val_klass) }; let custom_allocator = unsafe { rb_get_alloc_func(comptime_val_klass) };
@ -2008,7 +2008,7 @@ fn gen_get_ivar(
); );
// Check that the extended table is big enough // Check that the extended table is big enough
if ivar_index >= ROBJECT_EMBED_LEN_MAX + 1 { if ivar_index > ROBJECT_EMBED_LEN_MAX {
// Check that the slot is inside the extended table (num_slots > index) // Check that the slot is inside the extended table (num_slots > index)
let num_slots = mem_opnd(32, REG0, RUBY_OFFSET_ROBJECT_AS_HEAP_NUMIV); let num_slots = mem_opnd(32, REG0, RUBY_OFFSET_ROBJECT_AS_HEAP_NUMIV);
@ -2552,7 +2552,7 @@ fn gen_opt_aref(
} }
// Remember the context on entry for adding guard chains // Remember the context on entry for adding guard chains
let starting_context = ctx.clone(); let starting_context = *ctx;
// Specialize base on compile time values // Specialize base on compile time values
let comptime_idx = jit_peek_at_stack(jit, ctx, 0); let comptime_idx = jit_peek_at_stack(jit, ctx, 0);
@ -3747,8 +3747,7 @@ fn gen_send_cfunc(
// Delegate to codegen for C methods if we have it. // Delegate to codegen for C methods if we have it.
if kw_arg.is_null() { if kw_arg.is_null() {
let codegen_p = lookup_cfunc_codegen(unsafe { (*cme).def }); let codegen_p = lookup_cfunc_codegen(unsafe { (*cme).def });
if codegen_p.is_some() { if let Some(known_cfunc_codegen) = codegen_p {
let known_cfunc_codegen = codegen_p.unwrap();
if known_cfunc_codegen(jit, ctx, cb, ocb, ci, cme, block, argc, recv_known_klass) { if known_cfunc_codegen(jit, ctx, cb, ocb, ci, cme, block, argc, recv_known_klass) {
// cfunc codegen generated code. Terminate the block so // cfunc codegen generated code. Terminate the block so
// there isn't multiple calls in the same block. // there isn't multiple calls in the same block.
@ -4323,9 +4322,7 @@ fn gen_send_iseq(
// Next we're going to do some bookkeeping on our end so // Next we're going to do some bookkeeping on our end so
// that we know the order that the arguments are // that we know the order that the arguments are
// actually in now. // actually in now.
let tmp = caller_kwargs[kwarg_idx]; caller_kwargs.swap(kwarg_idx, swap_idx);
caller_kwargs[kwarg_idx] = caller_kwargs[swap_idx];
caller_kwargs[swap_idx] = tmp;
break; break;
} }
@ -4465,7 +4462,7 @@ fn gen_send_iseq(
// Pop arguments and receiver in return context, push the return value // Pop arguments and receiver in return context, push the return value
// After the return, sp_offset will be 1. The codegen for leave writes // After the return, sp_offset will be 1. The codegen for leave writes
// the return value in case of JIT-to-JIT return. // the return value in case of JIT-to-JIT return.
let mut return_ctx = ctx.clone(); let mut return_ctx = *ctx;
return_ctx.stack_pop((argc + 1).try_into().unwrap()); return_ctx.stack_pop((argc + 1).try_into().unwrap());
return_ctx.stack_push(Type::Unknown); return_ctx.stack_push(Type::Unknown);
return_ctx.set_sp_offset(1); return_ctx.set_sp_offset(1);

View File

@ -402,7 +402,7 @@ impl IseqPayload {
let version_map = mem::take(&mut self.version_map); let version_map = mem::take(&mut self.version_map);
// Turn it into an iterator that owns the blocks and return // Turn it into an iterator that owns the blocks and return
version_map.into_iter().flat_map(|versions| versions) version_map.into_iter().flatten()
} }
} }
@ -1718,8 +1718,8 @@ pub fn gen_branch(
// Get the branch targets or stubs // Get the branch targets or stubs
let dst_addr0 = get_branch_target(target0, ctx0, &branchref, 0, ocb); let dst_addr0 = get_branch_target(target0, ctx0, &branchref, 0, ocb);
let dst_addr1 = if ctx1.is_some() { let dst_addr1 = if let Some(ctx) = ctx1 {
get_branch_target(target1.unwrap(), ctx1.unwrap(), &branchref, 1, ocb) get_branch_target(target1.unwrap(), ctx, &branchref, 1, ocb)
} else { } else {
None None
}; };
@ -1733,8 +1733,8 @@ pub fn gen_branch(
branch.targets[0] = Some(target0); branch.targets[0] = Some(target0);
branch.targets[1] = target1; branch.targets[1] = target1;
branch.target_ctxs[0] = *ctx0; branch.target_ctxs[0] = *ctx0;
branch.target_ctxs[1] = if ctx1.is_some() { branch.target_ctxs[1] = if let Some(&ctx) = ctx1 {
*ctx1.unwrap() ctx
} else { } else {
Context::default() Context::default()
}; };
@ -1803,12 +1803,11 @@ pub fn defer_compilation(
panic!("Double defer!"); panic!("Double defer!");
} }
let mut next_ctx = cur_ctx.clone(); let mut next_ctx = *cur_ctx;
if next_ctx.chain_depth >= u8::MAX { if next_ctx.chain_depth == u8::MAX {
panic!("max block version chain depth reached!"); panic!("max block version chain depth reached!");
} }
next_ctx.chain_depth += 1; next_ctx.chain_depth += 1;
let block_rc = jit.get_block(); let block_rc = jit.get_block();

View File

@ -101,12 +101,12 @@ pub fn assume_bop_not_redefined(
invariants invariants
.basic_operator_blocks .basic_operator_blocks
.entry((klass, bop)) .entry((klass, bop))
.or_insert(HashSet::new()) .or_default()
.insert(jit.get_block()); .insert(jit.get_block());
invariants invariants
.block_basic_operators .block_basic_operators
.entry(jit.get_block()) .entry(jit.get_block())
.or_insert(HashSet::new()) .or_default()
.insert((klass, bop)); .insert((klass, bop));
return true; return true;
@ -142,17 +142,17 @@ pub fn assume_method_lookup_stable(
Invariants::get_instance() Invariants::get_instance()
.cme_validity .cme_validity
.entry(callee_cme) .entry(callee_cme)
.or_insert(HashSet::new()) .or_default()
.insert(block.clone()); .insert(block.clone());
let mid = unsafe { (*callee_cme).called_id }; let mid = unsafe { (*callee_cme).called_id };
Invariants::get_instance() Invariants::get_instance()
.method_lookup .method_lookup
.entry(receiver_klass) .entry(receiver_klass)
.or_insert(HashMap::new()) .or_default()
.entry(mid) .entry(mid)
.or_insert(HashSet::new()) .or_default()
.insert(block.clone()); .insert(block);
} }
/// Tracks that a block is assuming it is operating in single-ractor mode. /// Tracks that a block is assuming it is operating in single-ractor mode.
@ -198,12 +198,12 @@ pub fn assume_stable_constant_names(jit: &mut JITState, ocb: &mut OutlinedCb) {
invariants invariants
.constant_state_blocks .constant_state_blocks
.entry(id) .entry(id)
.or_insert(HashSet::new()) .or_default()
.insert(jit.get_block()); .insert(jit.get_block());
invariants invariants
.block_constant_states .block_constant_states
.entry(jit.get_block()) .entry(jit.get_block())
.or_insert(HashSet::new()) .or_default()
.insert(id); .insert(id);
} }
@ -239,15 +239,15 @@ pub extern "C" fn rb_yjit_bop_redefined(klass: RedefinitionFlag, bop: ruby_basic
with_vm_lock(src_loc!(), || { with_vm_lock(src_loc!(), || {
// Loop through the blocks that are associated with this class and basic // Loop through the blocks that are associated with this class and basic
// operator and invalidate them. // operator and invalidate them.
Invariants::get_instance() if let Some(blocks) = Invariants::get_instance()
.basic_operator_blocks .basic_operator_blocks
.remove(&(klass, bop)) .remove(&(klass, bop))
.map(|blocks| { {
for block in blocks.iter() { for block in blocks.iter() {
invalidate_block_version(block); invalidate_block_version(block);
incr_counter!(invalidate_bop_redefined); incr_counter!(invalidate_bop_redefined);
} }
}); }
}); });
} }

View File

@ -3,7 +3,11 @@
#![allow(dead_code)] #![allow(dead_code)]
#![allow(unused_assignments)] #![allow(unused_assignments)]
#![allow(unused_macros)] #![allow(unused_macros)]
// Clippy disagreements
#![allow(clippy::style)] // We are laid back about style #![allow(clippy::style)] // We are laid back about style
#![allow(clippy::too_many_arguments)] // :shrug:
#![allow(clippy::identity_op)] // Sometimes we do it for style
mod asm; mod asm;
mod codegen; mod codegen;

View File

@ -70,7 +70,7 @@ pub fn parse_option(str_ptr: *const std::os::raw::c_char) -> Option<()> {
// Split the option name and value strings // Split the option name and value strings
// Note that some options do not contain an assignment // Note that some options do not contain an assignment
let parts = opt_str.split_once("="); let parts = opt_str.split_once('=');
let (opt_name, opt_val) = match parts { let (opt_name, opt_val) = match parts {
Some((before_eq, after_eq)) => (before_eq, after_eq), Some((before_eq, after_eq)) => (before_eq, after_eq),
None => (opt_str, ""), None => (opt_str, ""),