YJIT: Adopt Clippy suggestions we like
This adopts most suggestions that rust-clippy is confident enough to auto apply. The manual changes mostly fix manual if-lets and take opportunities to use the `Default` trait on standard collections. Co-authored-by: Kevin Newton <kddnewton@gmail.com> Co-authored-by: Maxime Chevalier-Boisvert <maxime.chevalierboisvert@shopify.com>
This commit is contained in:
parent
f8e4488e5e
commit
fead7107ab
Notes:
git
2022-04-30 04:04:18 +09:00
@ -225,7 +225,7 @@ impl CodeBlock {
|
||||
// Get a direct pointer into the executable memory block
|
||||
pub fn get_ptr(&self, offset: usize) -> CodePtr {
|
||||
unsafe {
|
||||
let ptr = self.mem_block.offset(offset as isize);
|
||||
let ptr = self.mem_block.add(offset);
|
||||
CodePtr(ptr)
|
||||
}
|
||||
}
|
||||
|
@ -1295,12 +1295,12 @@ pub fn sub(cb: &mut CodeBlock, opnd0: X86Opnd, opnd1: X86Opnd) {
|
||||
fn resize_opnd(opnd: X86Opnd, num_bits: u8) -> X86Opnd {
|
||||
match opnd {
|
||||
X86Opnd::Reg(reg) => {
|
||||
let mut cloned = reg.clone();
|
||||
let mut cloned = reg;
|
||||
cloned.num_bits = num_bits;
|
||||
X86Opnd::Reg(cloned)
|
||||
},
|
||||
X86Opnd::Mem(mem) => {
|
||||
let mut cloned = mem.clone();
|
||||
let mut cloned = mem;
|
||||
cloned.num_bits = num_bits;
|
||||
X86Opnd::Mem(cloned)
|
||||
},
|
||||
|
@ -222,7 +222,7 @@ fn test_mov_unsigned() {
|
||||
|
||||
// MOV RAX, imm64, will not move down into EAX since it does not fit into 32 bits
|
||||
check_bytes("48b80000000001000000", |cb| mov(cb, RAX, uimm_opnd(u32::MAX as u64 + 1)));
|
||||
check_bytes("48b8ffffffffffffffff", |cb| mov(cb, RAX, uimm_opnd(u64::MAX.into())));
|
||||
check_bytes("48b8ffffffffffffffff", |cb| mov(cb, RAX, uimm_opnd(u64::MAX)));
|
||||
check_bytes("49b8ffffffffffffffff", |cb| mov(cb, R8, uimm_opnd(u64::MAX)));
|
||||
|
||||
// MOV r8, imm8
|
||||
|
@ -663,7 +663,7 @@ fn jump_to_next_insn(
|
||||
) {
|
||||
// Reset the depth since in current usages we only ever jump to to
|
||||
// chain_depth > 0 from the same instruction.
|
||||
let mut reset_depth = current_context.clone();
|
||||
let mut reset_depth = *current_context;
|
||||
reset_depth.reset_chain_depth();
|
||||
|
||||
let jump_block = BlockId {
|
||||
@ -1808,7 +1808,7 @@ fn jit_chain_guard(
|
||||
};
|
||||
|
||||
if (ctx.get_chain_depth() as i32) < depth_limit {
|
||||
let mut deeper = ctx.clone();
|
||||
let mut deeper = *ctx;
|
||||
deeper.increment_chain_depth();
|
||||
let bid = BlockId {
|
||||
iseq: jit.iseq,
|
||||
@ -1881,7 +1881,7 @@ fn gen_get_ivar(
|
||||
side_exit: CodePtr,
|
||||
) -> CodegenStatus {
|
||||
let comptime_val_klass = comptime_receiver.class_of();
|
||||
let starting_context = ctx.clone(); // make a copy for use with jit_chain_guard
|
||||
let starting_context = *ctx; // make a copy for use with jit_chain_guard
|
||||
|
||||
// Check if the comptime class uses a custom allocator
|
||||
let custom_allocator = unsafe { rb_get_alloc_func(comptime_val_klass) };
|
||||
@ -2008,7 +2008,7 @@ fn gen_get_ivar(
|
||||
);
|
||||
|
||||
// Check that the extended table is big enough
|
||||
if ivar_index >= ROBJECT_EMBED_LEN_MAX + 1 {
|
||||
if ivar_index > ROBJECT_EMBED_LEN_MAX {
|
||||
// Check that the slot is inside the extended table (num_slots > index)
|
||||
let num_slots = mem_opnd(32, REG0, RUBY_OFFSET_ROBJECT_AS_HEAP_NUMIV);
|
||||
|
||||
@ -2552,7 +2552,7 @@ fn gen_opt_aref(
|
||||
}
|
||||
|
||||
// Remember the context on entry for adding guard chains
|
||||
let starting_context = ctx.clone();
|
||||
let starting_context = *ctx;
|
||||
|
||||
// Specialize base on compile time values
|
||||
let comptime_idx = jit_peek_at_stack(jit, ctx, 0);
|
||||
@ -3747,8 +3747,7 @@ fn gen_send_cfunc(
|
||||
// Delegate to codegen for C methods if we have it.
|
||||
if kw_arg.is_null() {
|
||||
let codegen_p = lookup_cfunc_codegen(unsafe { (*cme).def });
|
||||
if codegen_p.is_some() {
|
||||
let known_cfunc_codegen = codegen_p.unwrap();
|
||||
if let Some(known_cfunc_codegen) = codegen_p {
|
||||
if known_cfunc_codegen(jit, ctx, cb, ocb, ci, cme, block, argc, recv_known_klass) {
|
||||
// cfunc codegen generated code. Terminate the block so
|
||||
// there isn't multiple calls in the same block.
|
||||
@ -4323,9 +4322,7 @@ fn gen_send_iseq(
|
||||
// Next we're going to do some bookkeeping on our end so
|
||||
// that we know the order that the arguments are
|
||||
// actually in now.
|
||||
let tmp = caller_kwargs[kwarg_idx];
|
||||
caller_kwargs[kwarg_idx] = caller_kwargs[swap_idx];
|
||||
caller_kwargs[swap_idx] = tmp;
|
||||
caller_kwargs.swap(kwarg_idx, swap_idx);
|
||||
|
||||
break;
|
||||
}
|
||||
@ -4465,7 +4462,7 @@ fn gen_send_iseq(
|
||||
// Pop arguments and receiver in return context, push the return value
|
||||
// After the return, sp_offset will be 1. The codegen for leave writes
|
||||
// the return value in case of JIT-to-JIT return.
|
||||
let mut return_ctx = ctx.clone();
|
||||
let mut return_ctx = *ctx;
|
||||
return_ctx.stack_pop((argc + 1).try_into().unwrap());
|
||||
return_ctx.stack_push(Type::Unknown);
|
||||
return_ctx.set_sp_offset(1);
|
||||
|
@ -402,7 +402,7 @@ impl IseqPayload {
|
||||
let version_map = mem::take(&mut self.version_map);
|
||||
|
||||
// Turn it into an iterator that owns the blocks and return
|
||||
version_map.into_iter().flat_map(|versions| versions)
|
||||
version_map.into_iter().flatten()
|
||||
}
|
||||
}
|
||||
|
||||
@ -1718,8 +1718,8 @@ pub fn gen_branch(
|
||||
|
||||
// Get the branch targets or stubs
|
||||
let dst_addr0 = get_branch_target(target0, ctx0, &branchref, 0, ocb);
|
||||
let dst_addr1 = if ctx1.is_some() {
|
||||
get_branch_target(target1.unwrap(), ctx1.unwrap(), &branchref, 1, ocb)
|
||||
let dst_addr1 = if let Some(ctx) = ctx1 {
|
||||
get_branch_target(target1.unwrap(), ctx, &branchref, 1, ocb)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@ -1733,8 +1733,8 @@ pub fn gen_branch(
|
||||
branch.targets[0] = Some(target0);
|
||||
branch.targets[1] = target1;
|
||||
branch.target_ctxs[0] = *ctx0;
|
||||
branch.target_ctxs[1] = if ctx1.is_some() {
|
||||
*ctx1.unwrap()
|
||||
branch.target_ctxs[1] = if let Some(&ctx) = ctx1 {
|
||||
ctx
|
||||
} else {
|
||||
Context::default()
|
||||
};
|
||||
@ -1803,12 +1803,11 @@ pub fn defer_compilation(
|
||||
panic!("Double defer!");
|
||||
}
|
||||
|
||||
let mut next_ctx = cur_ctx.clone();
|
||||
let mut next_ctx = *cur_ctx;
|
||||
|
||||
if next_ctx.chain_depth >= u8::MAX {
|
||||
if next_ctx.chain_depth == u8::MAX {
|
||||
panic!("max block version chain depth reached!");
|
||||
}
|
||||
|
||||
next_ctx.chain_depth += 1;
|
||||
|
||||
let block_rc = jit.get_block();
|
||||
|
@ -101,12 +101,12 @@ pub fn assume_bop_not_redefined(
|
||||
invariants
|
||||
.basic_operator_blocks
|
||||
.entry((klass, bop))
|
||||
.or_insert(HashSet::new())
|
||||
.or_default()
|
||||
.insert(jit.get_block());
|
||||
invariants
|
||||
.block_basic_operators
|
||||
.entry(jit.get_block())
|
||||
.or_insert(HashSet::new())
|
||||
.or_default()
|
||||
.insert((klass, bop));
|
||||
|
||||
return true;
|
||||
@ -142,17 +142,17 @@ pub fn assume_method_lookup_stable(
|
||||
Invariants::get_instance()
|
||||
.cme_validity
|
||||
.entry(callee_cme)
|
||||
.or_insert(HashSet::new())
|
||||
.or_default()
|
||||
.insert(block.clone());
|
||||
|
||||
let mid = unsafe { (*callee_cme).called_id };
|
||||
Invariants::get_instance()
|
||||
.method_lookup
|
||||
.entry(receiver_klass)
|
||||
.or_insert(HashMap::new())
|
||||
.or_default()
|
||||
.entry(mid)
|
||||
.or_insert(HashSet::new())
|
||||
.insert(block.clone());
|
||||
.or_default()
|
||||
.insert(block);
|
||||
}
|
||||
|
||||
/// Tracks that a block is assuming it is operating in single-ractor mode.
|
||||
@ -198,12 +198,12 @@ pub fn assume_stable_constant_names(jit: &mut JITState, ocb: &mut OutlinedCb) {
|
||||
invariants
|
||||
.constant_state_blocks
|
||||
.entry(id)
|
||||
.or_insert(HashSet::new())
|
||||
.or_default()
|
||||
.insert(jit.get_block());
|
||||
invariants
|
||||
.block_constant_states
|
||||
.entry(jit.get_block())
|
||||
.or_insert(HashSet::new())
|
||||
.or_default()
|
||||
.insert(id);
|
||||
}
|
||||
|
||||
@ -239,15 +239,15 @@ pub extern "C" fn rb_yjit_bop_redefined(klass: RedefinitionFlag, bop: ruby_basic
|
||||
with_vm_lock(src_loc!(), || {
|
||||
// Loop through the blocks that are associated with this class and basic
|
||||
// operator and invalidate them.
|
||||
Invariants::get_instance()
|
||||
if let Some(blocks) = Invariants::get_instance()
|
||||
.basic_operator_blocks
|
||||
.remove(&(klass, bop))
|
||||
.map(|blocks| {
|
||||
for block in blocks.iter() {
|
||||
invalidate_block_version(block);
|
||||
incr_counter!(invalidate_bop_redefined);
|
||||
}
|
||||
});
|
||||
{
|
||||
for block in blocks.iter() {
|
||||
invalidate_block_version(block);
|
||||
incr_counter!(invalidate_bop_redefined);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,11 @@
|
||||
#![allow(dead_code)]
|
||||
#![allow(unused_assignments)]
|
||||
#![allow(unused_macros)]
|
||||
|
||||
// Clippy disagreements
|
||||
#![allow(clippy::style)] // We are laid back about style
|
||||
#![allow(clippy::too_many_arguments)] // :shrug:
|
||||
#![allow(clippy::identity_op)] // Sometimes we do it for style
|
||||
|
||||
mod asm;
|
||||
mod codegen;
|
||||
|
@ -70,7 +70,7 @@ pub fn parse_option(str_ptr: *const std::os::raw::c_char) -> Option<()> {
|
||||
|
||||
// Split the option name and value strings
|
||||
// Note that some options do not contain an assignment
|
||||
let parts = opt_str.split_once("=");
|
||||
let parts = opt_str.split_once('=');
|
||||
let (opt_name, opt_val) = match parts {
|
||||
Some((before_eq, after_eq)) => (before_eq, after_eq),
|
||||
None => (opt_str, ""),
|
||||
|
Loading…
x
Reference in New Issue
Block a user