YJIT: Allow tracing fallback counters (#11347)
* YJIT: Allow tracing fallback counters * Update yjit.md about --yjit-trace-exits=counter
This commit is contained in:
parent
ec5436bc3a
commit
77ffdfe79f
Notes:
git
2024-08-08 23:13:33 +00:00
Merged-By: k0kubun <takashikkbn@gmail.com>
@ -179,7 +179,7 @@ YJIT supports all command-line options supported by upstream CRuby, but also add
|
|||||||
This can allow you to use a lower executable memory size limit, but may cause a slight drop in performance when the limit is hit.
|
This can allow you to use a lower executable memory size limit, but may cause a slight drop in performance when the limit is hit.
|
||||||
- `--yjit-perf`: enable frame pointers and profiling with the `perf` tool
|
- `--yjit-perf`: enable frame pointers and profiling with the `perf` tool
|
||||||
- `--yjit-trace-exits`: produce a Marshal dump of backtraces from all exits. Automatically enables `--yjit-stats`
|
- `--yjit-trace-exits`: produce a Marshal dump of backtraces from all exits. Automatically enables `--yjit-stats`
|
||||||
- `--yjit-trace-exits=COUNTER`: produce a Marshal dump of backtraces from specified exits. Automatically enables `--yjit-stats`
|
- `--yjit-trace-exits=COUNTER`: produce a Marshal dump of backtraces from a counted exit or a fallback. Automatically enables `--yjit-stats`
|
||||||
- `--yjit-trace-exits-sample-rate=N`: trace exit locations only every Nth occurrence. Automatically enables `--yjit-trace-exits`
|
- `--yjit-trace-exits-sample-rate=N`: trace exit locations only every Nth occurrence. Automatically enables `--yjit-trace-exits`
|
||||||
|
|
||||||
Note that there is also an environment variable `RUBY_YJIT_ENABLE` which can be used to enable YJIT.
|
Note that there is also an environment variable `RUBY_YJIT_ENABLE` which can be used to enable YJIT.
|
||||||
|
@ -460,8 +460,31 @@ pub enum JCCKinds {
|
|||||||
JCC_JO_MUL,
|
JCC_JO_MUL,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generate code to increment a given counter. With --yjit-trace-exits=counter,
|
||||||
|
/// the counter is traced when it's incremented by this function.
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn gen_counter_incr(asm: &mut Assembler, counter: Counter) {
|
fn gen_counter_incr(jit: &JITState, asm: &mut Assembler, counter: Counter) {
|
||||||
|
gen_counter_incr_with_pc(asm, counter, jit.pc);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Same as gen_counter_incr(), but takes PC isntead of JITState.
|
||||||
|
#[inline(always)]
|
||||||
|
fn gen_counter_incr_with_pc(asm: &mut Assembler, counter: Counter, pc: *mut VALUE) {
|
||||||
|
gen_counter_incr_without_pc(asm, counter);
|
||||||
|
|
||||||
|
// Trace a counter if --yjit-trace-exits=counter is given.
|
||||||
|
// TraceExits::All is handled by gen_exit().
|
||||||
|
if get_option!(trace_exits) == Some(TraceExits::Counter(counter)) {
|
||||||
|
with_caller_saved_temp_regs(asm, |asm| {
|
||||||
|
asm.ccall(rb_yjit_record_exit_stack as *const u8, vec![Opnd::const_ptr(pc as *const u8)]);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate code to increment a given counter. Not traced by --yjit-trace-exits=counter
|
||||||
|
/// unlike gen_counter_incr() or gen_counter_incr_with_pc().
|
||||||
|
#[inline(always)]
|
||||||
|
fn gen_counter_incr_without_pc(asm: &mut Assembler, counter: Counter) {
|
||||||
// Assert that default counters are not incremented by generated code as this would impact performance
|
// Assert that default counters are not incremented by generated code as this would impact performance
|
||||||
assert!(!DEFAULT_COUNTERS.contains(&counter), "gen_counter_incr incremented {:?}", counter);
|
assert!(!DEFAULT_COUNTERS.contains(&counter), "gen_counter_incr incremented {:?}", counter);
|
||||||
|
|
||||||
@ -723,7 +746,7 @@ fn gen_stub_exit(ocb: &mut OutlinedCb) -> Option<CodePtr> {
|
|||||||
let ocb = ocb.unwrap();
|
let ocb = ocb.unwrap();
|
||||||
let mut asm = Assembler::new_without_iseq();
|
let mut asm = Assembler::new_without_iseq();
|
||||||
|
|
||||||
gen_counter_incr(&mut asm, Counter::exit_from_branch_stub);
|
gen_counter_incr_without_pc(&mut asm, Counter::exit_from_branch_stub);
|
||||||
|
|
||||||
asm_comment!(asm, "exit from branch stub");
|
asm_comment!(asm, "exit from branch stub");
|
||||||
asm.cpop_into(SP);
|
asm.cpop_into(SP);
|
||||||
@ -833,15 +856,7 @@ pub fn gen_counted_exit(exit_pc: *mut VALUE, side_exit: CodePtr, ocb: &mut Outli
|
|||||||
let mut asm = Assembler::new_without_iseq();
|
let mut asm = Assembler::new_without_iseq();
|
||||||
|
|
||||||
// Increment a counter
|
// Increment a counter
|
||||||
gen_counter_incr(&mut asm, counter);
|
gen_counter_incr_with_pc(&mut asm, counter, exit_pc);
|
||||||
|
|
||||||
// Trace a counted exit if --yjit-trace-exits=counter is given.
|
|
||||||
// TraceExits::All is handled by gen_exit().
|
|
||||||
if get_option!(trace_exits) == Some(TraceExits::CountedExit(counter)) {
|
|
||||||
with_caller_saved_temp_regs(&mut asm, |asm| {
|
|
||||||
asm.ccall(rb_yjit_record_exit_stack as *const u8, vec![Opnd::const_ptr(exit_pc as *const u8)]);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Jump to the existing side exit
|
// Jump to the existing side exit
|
||||||
asm.jmp(Target::CodePtr(side_exit));
|
asm.jmp(Target::CodePtr(side_exit));
|
||||||
@ -901,7 +916,7 @@ fn gen_full_cfunc_return(ocb: &mut OutlinedCb) -> Option<CodePtr> {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Count the exit
|
// Count the exit
|
||||||
gen_counter_incr(&mut asm, Counter::traced_cfunc_return);
|
gen_counter_incr_without_pc(&mut asm, Counter::traced_cfunc_return);
|
||||||
|
|
||||||
// Return to the interpreter
|
// Return to the interpreter
|
||||||
asm.cpop_into(SP);
|
asm.cpop_into(SP);
|
||||||
@ -926,7 +941,7 @@ fn gen_leave_exit(ocb: &mut OutlinedCb) -> Option<CodePtr> {
|
|||||||
let ret_opnd = asm.live_reg_opnd(C_RET_OPND);
|
let ret_opnd = asm.live_reg_opnd(C_RET_OPND);
|
||||||
|
|
||||||
// Every exit to the interpreter should be counted
|
// Every exit to the interpreter should be counted
|
||||||
gen_counter_incr(&mut asm, Counter::leave_interp_return);
|
gen_counter_incr_without_pc(&mut asm, Counter::leave_interp_return);
|
||||||
|
|
||||||
asm_comment!(asm, "exit from leave");
|
asm_comment!(asm, "exit from leave");
|
||||||
asm.cpop_into(SP);
|
asm.cpop_into(SP);
|
||||||
@ -952,7 +967,7 @@ fn gen_leave_exception(ocb: &mut OutlinedCb) -> Option<CodePtr> {
|
|||||||
let ruby_ret_val = asm.live_reg_opnd(C_RET_OPND);
|
let ruby_ret_val = asm.live_reg_opnd(C_RET_OPND);
|
||||||
|
|
||||||
// Every exit to the interpreter should be counted
|
// Every exit to the interpreter should be counted
|
||||||
gen_counter_incr(&mut asm, Counter::leave_interp_return);
|
gen_counter_incr_without_pc(&mut asm, Counter::leave_interp_return);
|
||||||
|
|
||||||
asm_comment!(asm, "push return value through cfp->sp");
|
asm_comment!(asm, "push return value through cfp->sp");
|
||||||
let cfp_sp = Opnd::mem(64, CFP, RUBY_OFFSET_CFP_SP);
|
let cfp_sp = Opnd::mem(64, CFP, RUBY_OFFSET_CFP_SP);
|
||||||
@ -1257,7 +1272,7 @@ pub fn gen_single_block(
|
|||||||
// :count-placement:
|
// :count-placement:
|
||||||
// Count bytecode instructions that execute in generated code.
|
// Count bytecode instructions that execute in generated code.
|
||||||
// Note that the increment happens even when the output takes side exit.
|
// Note that the increment happens even when the output takes side exit.
|
||||||
gen_counter_incr(&mut asm, Counter::yjit_insns_count);
|
gen_counter_incr(&jit, &mut asm, Counter::yjit_insns_count);
|
||||||
|
|
||||||
// Lookup the codegen function for this instruction
|
// Lookup the codegen function for this instruction
|
||||||
let mut status = None;
|
let mut status = None;
|
||||||
@ -2118,13 +2133,13 @@ fn gen_expandarray(
|
|||||||
|
|
||||||
// If this instruction has the splat flag, then bail out.
|
// If this instruction has the splat flag, then bail out.
|
||||||
if flag & 0x01 != 0 {
|
if flag & 0x01 != 0 {
|
||||||
gen_counter_incr(asm, Counter::expandarray_splat);
|
gen_counter_incr(jit, asm, Counter::expandarray_splat);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If this instruction has the postarg flag, then bail out.
|
// If this instruction has the postarg flag, then bail out.
|
||||||
if flag & 0x02 != 0 {
|
if flag & 0x02 != 0 {
|
||||||
gen_counter_incr(asm, Counter::expandarray_postarg);
|
gen_counter_incr(jit, asm, Counter::expandarray_postarg);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2146,7 +2161,7 @@ fn gen_expandarray(
|
|||||||
|
|
||||||
// if to_ary is defined, return can't compile so to_ary can be called
|
// if to_ary is defined, return can't compile so to_ary can be called
|
||||||
if cme_def_type != VM_METHOD_TYPE_UNDEF {
|
if cme_def_type != VM_METHOD_TYPE_UNDEF {
|
||||||
gen_counter_incr(asm, Counter::expandarray_to_ary);
|
gen_counter_incr(jit, asm, Counter::expandarray_to_ary);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2720,7 +2735,7 @@ fn gen_get_ivar(
|
|||||||
// Use a general C call at the last chain to avoid exits on megamorphic shapes
|
// Use a general C call at the last chain to avoid exits on megamorphic shapes
|
||||||
let megamorphic = asm.ctx.get_chain_depth() >= max_chain_depth;
|
let megamorphic = asm.ctx.get_chain_depth() >= max_chain_depth;
|
||||||
if megamorphic {
|
if megamorphic {
|
||||||
gen_counter_incr(asm, Counter::num_getivar_megamorphic);
|
gen_counter_incr(jit, asm, Counter::num_getivar_megamorphic);
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the class uses the default allocator, instances should all be T_OBJECT
|
// If the class uses the default allocator, instances should all be T_OBJECT
|
||||||
@ -2930,7 +2945,7 @@ fn gen_set_ivar(
|
|||||||
// If the comptime receiver is frozen, writing an IV will raise an exception
|
// If the comptime receiver is frozen, writing an IV will raise an exception
|
||||||
// and we don't want to JIT code to deal with that situation.
|
// and we don't want to JIT code to deal with that situation.
|
||||||
if comptime_receiver.is_frozen() {
|
if comptime_receiver.is_frozen() {
|
||||||
gen_counter_incr(asm, Counter::setivar_frozen);
|
gen_counter_incr(jit, asm, Counter::setivar_frozen);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2951,7 +2966,7 @@ fn gen_set_ivar(
|
|||||||
// Use a general C call at the last chain to avoid exits on megamorphic shapes
|
// Use a general C call at the last chain to avoid exits on megamorphic shapes
|
||||||
let megamorphic = asm.ctx.get_chain_depth() >= SET_IVAR_MAX_DEPTH;
|
let megamorphic = asm.ctx.get_chain_depth() >= SET_IVAR_MAX_DEPTH;
|
||||||
if megamorphic {
|
if megamorphic {
|
||||||
gen_counter_incr(asm, Counter::num_setivar_megamorphic);
|
gen_counter_incr(jit, asm, Counter::num_setivar_megamorphic);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the iv index
|
// Get the iv index
|
||||||
@ -3649,7 +3664,7 @@ fn gen_opt_aref(
|
|||||||
|
|
||||||
// Only JIT one arg calls like `ary[6]`
|
// Only JIT one arg calls like `ary[6]`
|
||||||
if argc != 1 {
|
if argc != 1 {
|
||||||
gen_counter_incr(asm, Counter::opt_aref_argc_not_one);
|
gen_counter_incr(jit, asm, Counter::opt_aref_argc_not_one);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -4396,7 +4411,7 @@ fn gen_opt_case_dispatch(
|
|||||||
// If megamorphic, fallback to compiling branch instructions after opt_case_dispatch
|
// If megamorphic, fallback to compiling branch instructions after opt_case_dispatch
|
||||||
let megamorphic = asm.ctx.get_chain_depth() >= CASE_WHEN_MAX_DEPTH;
|
let megamorphic = asm.ctx.get_chain_depth() >= CASE_WHEN_MAX_DEPTH;
|
||||||
if megamorphic {
|
if megamorphic {
|
||||||
gen_counter_incr(asm, Counter::num_opt_case_dispatch_megamorphic);
|
gen_counter_incr(jit, asm, Counter::num_opt_case_dispatch_megamorphic);
|
||||||
}
|
}
|
||||||
|
|
||||||
if comptime_key.fixnum_p() && comptime_key.0 <= u32::MAX.as_usize() && case_hash_all_fixnum_p(case_hash) && !megamorphic {
|
if comptime_key.fixnum_p() && comptime_key.0 <= u32::MAX.as_usize() && case_hash_all_fixnum_p(case_hash) && !megamorphic {
|
||||||
@ -4603,11 +4618,11 @@ fn gen_throw(
|
|||||||
let throwobj = asm.load(throwobj);
|
let throwobj = asm.load(throwobj);
|
||||||
|
|
||||||
// Gather some statistics about throw
|
// Gather some statistics about throw
|
||||||
gen_counter_incr(asm, Counter::num_throw);
|
gen_counter_incr(jit, asm, Counter::num_throw);
|
||||||
match (throw_state & VM_THROW_STATE_MASK as u64) as u32 {
|
match (throw_state & VM_THROW_STATE_MASK as u64) as u32 {
|
||||||
RUBY_TAG_BREAK => gen_counter_incr(asm, Counter::num_throw_break),
|
RUBY_TAG_BREAK => gen_counter_incr(jit, asm, Counter::num_throw_break),
|
||||||
RUBY_TAG_RETRY => gen_counter_incr(asm, Counter::num_throw_retry),
|
RUBY_TAG_RETRY => gen_counter_incr(jit, asm, Counter::num_throw_retry),
|
||||||
RUBY_TAG_RETURN => gen_counter_incr(asm, Counter::num_throw_return),
|
RUBY_TAG_RETURN => gen_counter_incr(jit, asm, Counter::num_throw_return),
|
||||||
_ => {},
|
_ => {},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -6450,11 +6465,11 @@ fn gen_send_cfunc(
|
|||||||
|
|
||||||
// If it's a splat and the method expects a Ruby array of arguments
|
// If it's a splat and the method expects a Ruby array of arguments
|
||||||
if cfunc_argc == -2 && flags & VM_CALL_ARGS_SPLAT != 0 {
|
if cfunc_argc == -2 && flags & VM_CALL_ARGS_SPLAT != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_cfunc_splat_neg2);
|
gen_counter_incr(jit, asm, Counter::send_cfunc_splat_neg2);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
exit_if_kwsplat_non_nil(asm, flags, Counter::send_cfunc_kw_splat_non_nil)?;
|
exit_if_kwsplat_non_nil(jit, asm, flags, Counter::send_cfunc_kw_splat_non_nil)?;
|
||||||
let kw_splat = flags & VM_CALL_KW_SPLAT != 0;
|
let kw_splat = flags & VM_CALL_KW_SPLAT != 0;
|
||||||
|
|
||||||
let kw_arg = unsafe { vm_ci_kwarg(ci) };
|
let kw_arg = unsafe { vm_ci_kwarg(ci) };
|
||||||
@ -6465,18 +6480,18 @@ fn gen_send_cfunc(
|
|||||||
};
|
};
|
||||||
|
|
||||||
if kw_arg_num != 0 && flags & VM_CALL_ARGS_SPLAT != 0 {
|
if kw_arg_num != 0 && flags & VM_CALL_ARGS_SPLAT != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_cfunc_splat_with_kw);
|
gen_counter_incr(jit, asm, Counter::send_cfunc_splat_with_kw);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
if c_method_tracing_currently_enabled(jit) {
|
if c_method_tracing_currently_enabled(jit) {
|
||||||
// Don't JIT if tracing c_call or c_return
|
// Don't JIT if tracing c_call or c_return
|
||||||
gen_counter_incr(asm, Counter::send_cfunc_tracing);
|
gen_counter_incr(jit, asm, Counter::send_cfunc_tracing);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Increment total cfunc send count
|
// Increment total cfunc send count
|
||||||
gen_counter_incr(asm, Counter::num_send_cfunc);
|
gen_counter_incr(jit, asm, Counter::num_send_cfunc);
|
||||||
|
|
||||||
// Delegate to codegen for C methods if we have it.
|
// Delegate to codegen for C methods if we have it.
|
||||||
if kw_arg.is_null() &&
|
if kw_arg.is_null() &&
|
||||||
@ -6499,7 +6514,7 @@ fn gen_send_cfunc(
|
|||||||
|
|
||||||
if cfunc_codegen {
|
if cfunc_codegen {
|
||||||
assert_eq!(expected_stack_after, asm.ctx.get_stack_size() as i32);
|
assert_eq!(expected_stack_after, asm.ctx.get_stack_size() as i32);
|
||||||
gen_counter_incr(asm, Counter::num_send_cfunc_inline);
|
gen_counter_incr(jit, asm, Counter::num_send_cfunc_inline);
|
||||||
// cfunc codegen generated code. Terminate the block so
|
// cfunc codegen generated code. Terminate the block so
|
||||||
// there isn't multiple calls in the same block.
|
// there isn't multiple calls in the same block.
|
||||||
jump_to_next_insn(jit, asm);
|
jump_to_next_insn(jit, asm);
|
||||||
@ -6525,7 +6540,7 @@ fn gen_send_cfunc(
|
|||||||
let splat_array_idx = i32::from(kw_splat) + i32::from(block_arg);
|
let splat_array_idx = i32::from(kw_splat) + i32::from(block_arg);
|
||||||
let comptime_splat_array = jit.peek_at_stack(&asm.ctx, splat_array_idx as isize);
|
let comptime_splat_array = jit.peek_at_stack(&asm.ctx, splat_array_idx as isize);
|
||||||
if unsafe { rb_yjit_ruby2_keywords_splat_p(comptime_splat_array) } != 0 {
|
if unsafe { rb_yjit_ruby2_keywords_splat_p(comptime_splat_array) } != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_cfunc_splat_varg_ruby2_keywords);
|
gen_counter_incr(jit, asm, Counter::send_cfunc_splat_varg_ruby2_keywords);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -6554,13 +6569,13 @@ fn gen_send_cfunc(
|
|||||||
|
|
||||||
// If the argument count doesn't match
|
// If the argument count doesn't match
|
||||||
if cfunc_argc >= 0 && cfunc_argc != passed_argc && flags & VM_CALL_ARGS_SPLAT == 0 {
|
if cfunc_argc >= 0 && cfunc_argc != passed_argc && flags & VM_CALL_ARGS_SPLAT == 0 {
|
||||||
gen_counter_incr(asm, Counter::send_cfunc_argc_mismatch);
|
gen_counter_incr(jit, asm, Counter::send_cfunc_argc_mismatch);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Don't JIT functions that need C stack arguments for now
|
// Don't JIT functions that need C stack arguments for now
|
||||||
if cfunc_argc >= 0 && passed_argc + 1 > (C_ARG_OPNDS.len() as i32) {
|
if cfunc_argc >= 0 && passed_argc + 1 > (C_ARG_OPNDS.len() as i32) {
|
||||||
gen_counter_incr(asm, Counter::send_cfunc_toomany_args);
|
gen_counter_incr(jit, asm, Counter::send_cfunc_toomany_args);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -6578,7 +6593,7 @@ fn gen_send_cfunc(
|
|||||||
// Nothing to do
|
// Nothing to do
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
gen_counter_incr(asm, Counter::send_cfunc_block_arg);
|
gen_counter_incr(jit, asm, Counter::send_cfunc_block_arg);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -6614,7 +6629,7 @@ fn gen_send_cfunc(
|
|||||||
let required_args : u32 = (cfunc_argc as u32).saturating_sub(argc as u32 - 1);
|
let required_args : u32 = (cfunc_argc as u32).saturating_sub(argc as u32 - 1);
|
||||||
// + 1 because we pass self
|
// + 1 because we pass self
|
||||||
if required_args + 1 >= C_ARG_OPNDS.len() as u32 {
|
if required_args + 1 >= C_ARG_OPNDS.len() as u32 {
|
||||||
gen_counter_incr(asm, Counter::send_cfunc_toomany_args);
|
gen_counter_incr(jit, asm, Counter::send_cfunc_toomany_args);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -6964,14 +6979,14 @@ fn gen_send_bmethod(
|
|||||||
// Optimize for single ractor mode and avoid runtime check for
|
// Optimize for single ractor mode and avoid runtime check for
|
||||||
// "defined with an un-shareable Proc in a different Ractor"
|
// "defined with an un-shareable Proc in a different Ractor"
|
||||||
if !assume_single_ractor_mode(jit, asm) {
|
if !assume_single_ractor_mode(jit, asm) {
|
||||||
gen_counter_incr(asm, Counter::send_bmethod_ractor);
|
gen_counter_incr(jit, asm, Counter::send_bmethod_ractor);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Passing a block to a block needs logic different from passing
|
// Passing a block to a block needs logic different from passing
|
||||||
// a block to a method and sometimes requires allocation. Bail for now.
|
// a block to a method and sometimes requires allocation. Bail for now.
|
||||||
if block.is_some() {
|
if block.is_some() {
|
||||||
gen_counter_incr(asm, Counter::send_bmethod_block_arg);
|
gen_counter_incr(jit, asm, Counter::send_bmethod_block_arg);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -7134,29 +7149,29 @@ fn gen_send_iseq(
|
|||||||
let splat_pos = i32::from(block_arg) + i32::from(kw_splat) + kw_arg_num;
|
let splat_pos = i32::from(block_arg) + i32::from(kw_splat) + kw_arg_num;
|
||||||
|
|
||||||
exit_if_stack_too_large(iseq)?;
|
exit_if_stack_too_large(iseq)?;
|
||||||
exit_if_tail_call(asm, ci)?;
|
exit_if_tail_call(jit, asm, ci)?;
|
||||||
exit_if_has_post(asm, iseq)?;
|
exit_if_has_post(jit, asm, iseq)?;
|
||||||
exit_if_kwsplat_non_nil(asm, flags, Counter::send_iseq_kw_splat_non_nil)?;
|
exit_if_kwsplat_non_nil(jit, asm, flags, Counter::send_iseq_kw_splat_non_nil)?;
|
||||||
exit_if_has_rest_and_captured(asm, iseq_has_rest, captured_opnd)?;
|
exit_if_has_rest_and_captured(jit, asm, iseq_has_rest, captured_opnd)?;
|
||||||
exit_if_has_kwrest_and_captured(asm, has_kwrest, captured_opnd)?;
|
exit_if_has_kwrest_and_captured(jit, asm, has_kwrest, captured_opnd)?;
|
||||||
exit_if_has_rest_and_supplying_kws(asm, iseq_has_rest, supplying_kws)?;
|
exit_if_has_rest_and_supplying_kws(jit, asm, iseq_has_rest, supplying_kws)?;
|
||||||
exit_if_supplying_kw_and_has_no_kw(asm, supplying_kws, doing_kw_call)?;
|
exit_if_supplying_kw_and_has_no_kw(jit, asm, supplying_kws, doing_kw_call)?;
|
||||||
exit_if_supplying_kws_and_accept_no_kwargs(asm, supplying_kws, iseq)?;
|
exit_if_supplying_kws_and_accept_no_kwargs(jit, asm, supplying_kws, iseq)?;
|
||||||
exit_if_doing_kw_and_splat(asm, doing_kw_call, flags)?;
|
exit_if_doing_kw_and_splat(jit, asm, doing_kw_call, flags)?;
|
||||||
if !forwarding {
|
if !forwarding {
|
||||||
exit_if_wrong_number_arguments(asm, arg_setup_block, opts_filled, flags, opt_num, iseq_has_rest)?;
|
exit_if_wrong_number_arguments(jit, asm, arg_setup_block, opts_filled, flags, opt_num, iseq_has_rest)?;
|
||||||
}
|
}
|
||||||
exit_if_doing_kw_and_opts_missing(asm, doing_kw_call, opts_missing)?;
|
exit_if_doing_kw_and_opts_missing(jit, asm, doing_kw_call, opts_missing)?;
|
||||||
exit_if_has_rest_and_optional_and_block(asm, iseq_has_rest, opt_num, iseq, block_arg)?;
|
exit_if_has_rest_and_optional_and_block(jit, asm, iseq_has_rest, opt_num, iseq, block_arg)?;
|
||||||
if forwarding && flags & VM_CALL_OPT_SEND != 0 {
|
if forwarding && flags & VM_CALL_OPT_SEND != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_iseq_send_forwarding);
|
gen_counter_incr(jit, asm, Counter::send_iseq_send_forwarding);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let block_arg_type = exit_if_unsupported_block_arg_type(jit, asm, block_arg)?;
|
let block_arg_type = exit_if_unsupported_block_arg_type(jit, asm, block_arg)?;
|
||||||
|
|
||||||
// Bail if we can't drop extra arguments for a yield by just popping them
|
// Bail if we can't drop extra arguments for a yield by just popping them
|
||||||
if supplying_kws && arg_setup_block && argc > (kw_arg_num + required_num + opt_num) {
|
if supplying_kws && arg_setup_block && argc > (kw_arg_num + required_num + opt_num) {
|
||||||
gen_counter_incr(asm, Counter::send_iseq_complex_discard_extras);
|
gen_counter_incr(jit, asm, Counter::send_iseq_complex_discard_extras);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -7168,7 +7183,7 @@ fn gen_send_iseq(
|
|||||||
// In this case (param.flags.has_block && local_iseq != iseq),
|
// In this case (param.flags.has_block && local_iseq != iseq),
|
||||||
// the block argument is setup as a local variable and requires
|
// the block argument is setup as a local variable and requires
|
||||||
// materialization (allocation). Bail.
|
// materialization (allocation). Bail.
|
||||||
gen_counter_incr(asm, Counter::send_iseq_materialized_block);
|
gen_counter_incr(jit, asm, Counter::send_iseq_materialized_block);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -7176,7 +7191,7 @@ fn gen_send_iseq(
|
|||||||
// Check that required keyword arguments are supplied and find any extras
|
// Check that required keyword arguments are supplied and find any extras
|
||||||
// that should go into the keyword rest parameter (**kw_rest).
|
// that should go into the keyword rest parameter (**kw_rest).
|
||||||
if doing_kw_call {
|
if doing_kw_call {
|
||||||
gen_iseq_kw_call_checks(asm, iseq, kw_arg, has_kwrest, kw_arg_num)?;
|
gen_iseq_kw_call_checks(jit, asm, iseq, kw_arg, has_kwrest, kw_arg_num)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let splat_array_length = if splat_call {
|
let splat_array_length = if splat_call {
|
||||||
@ -7184,7 +7199,7 @@ fn gen_send_iseq(
|
|||||||
let array_length = if array == Qnil {
|
let array_length = if array == Qnil {
|
||||||
0
|
0
|
||||||
} else if unsafe { !RB_TYPE_P(array, RUBY_T_ARRAY) } {
|
} else if unsafe { !RB_TYPE_P(array, RUBY_T_ARRAY) } {
|
||||||
gen_counter_incr(asm, Counter::send_iseq_splat_not_array);
|
gen_counter_incr(jit, asm, Counter::send_iseq_splat_not_array);
|
||||||
return None;
|
return None;
|
||||||
} else {
|
} else {
|
||||||
unsafe { rb_yjit_array_len(array) as u32}
|
unsafe { rb_yjit_array_len(array) as u32}
|
||||||
@ -7195,7 +7210,7 @@ fn gen_send_iseq(
|
|||||||
if !iseq_has_rest {
|
if !iseq_has_rest {
|
||||||
let supplying = argc - 1 - i32::from(kw_splat) + array_length as i32;
|
let supplying = argc - 1 - i32::from(kw_splat) + array_length as i32;
|
||||||
if (required_num..=required_num + opt_num).contains(&supplying) == false {
|
if (required_num..=required_num + opt_num).contains(&supplying) == false {
|
||||||
gen_counter_incr(asm, Counter::send_iseq_splat_arity_error);
|
gen_counter_incr(jit, asm, Counter::send_iseq_splat_arity_error);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -7234,14 +7249,14 @@ fn gen_send_iseq(
|
|||||||
// If block_arg0_splat, we still need side exits after splat, but
|
// If block_arg0_splat, we still need side exits after splat, but
|
||||||
// the splat modifies the stack which breaks side exits. So bail out.
|
// the splat modifies the stack which breaks side exits. So bail out.
|
||||||
if splat_call {
|
if splat_call {
|
||||||
gen_counter_incr(asm, Counter::invokeblock_iseq_arg0_args_splat);
|
gen_counter_incr(jit, asm, Counter::invokeblock_iseq_arg0_args_splat);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
// The block_arg0_splat implementation cannot deal with optional parameters.
|
// The block_arg0_splat implementation cannot deal with optional parameters.
|
||||||
// This is a setup_parameters_complex() situation and interacts with the
|
// This is a setup_parameters_complex() situation and interacts with the
|
||||||
// starting position of the callee.
|
// starting position of the callee.
|
||||||
if opt_num > 1 {
|
if opt_num > 1 {
|
||||||
gen_counter_incr(asm, Counter::invokeblock_iseq_arg0_optional);
|
gen_counter_incr(jit, asm, Counter::invokeblock_iseq_arg0_optional);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -7275,7 +7290,7 @@ fn gen_send_iseq(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Increment total ISEQ send count
|
// Increment total ISEQ send count
|
||||||
gen_counter_incr(asm, Counter::num_send_iseq);
|
gen_counter_incr(jit, asm, Counter::num_send_iseq);
|
||||||
|
|
||||||
// Shortcut for special `Primitive.attr! :leaf` builtins
|
// Shortcut for special `Primitive.attr! :leaf` builtins
|
||||||
let builtin_attrs = unsafe { rb_yjit_iseq_builtin_attrs(iseq) };
|
let builtin_attrs = unsafe { rb_yjit_iseq_builtin_attrs(iseq) };
|
||||||
@ -7292,7 +7307,7 @@ fn gen_send_iseq(
|
|||||||
// adding one requires interpreter changes to support.
|
// adding one requires interpreter changes to support.
|
||||||
if block_arg_type.is_some() {
|
if block_arg_type.is_some() {
|
||||||
if iseq_has_block_param {
|
if iseq_has_block_param {
|
||||||
gen_counter_incr(asm, Counter::send_iseq_leaf_builtin_block_arg_block_param);
|
gen_counter_incr(jit, asm, Counter::send_iseq_leaf_builtin_block_arg_block_param);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
asm.stack_pop(1);
|
asm.stack_pop(1);
|
||||||
@ -7309,7 +7324,7 @@ fn gen_send_iseq(
|
|||||||
}
|
}
|
||||||
|
|
||||||
asm_comment!(asm, "inlined leaf builtin");
|
asm_comment!(asm, "inlined leaf builtin");
|
||||||
gen_counter_incr(asm, Counter::num_send_iseq_leaf);
|
gen_counter_incr(jit, asm, Counter::num_send_iseq_leaf);
|
||||||
|
|
||||||
// The callee may allocate, e.g. Integer#abs on a Bignum.
|
// The callee may allocate, e.g. Integer#abs on a Bignum.
|
||||||
// Save SP for GC, save PC for allocation tracing, and prepare
|
// Save SP for GC, save PC for allocation tracing, and prepare
|
||||||
@ -7343,7 +7358,7 @@ fn gen_send_iseq(
|
|||||||
// Inline simple ISEQs whose return value is known at compile time
|
// Inline simple ISEQs whose return value is known at compile time
|
||||||
if let (Some(value), None, false) = (iseq_get_return_value(iseq, captured_opnd, block, flags), block_arg_type, opt_send_call) {
|
if let (Some(value), None, false) = (iseq_get_return_value(iseq, captured_opnd, block, flags), block_arg_type, opt_send_call) {
|
||||||
asm_comment!(asm, "inlined simple ISEQ");
|
asm_comment!(asm, "inlined simple ISEQ");
|
||||||
gen_counter_incr(asm, Counter::num_send_iseq_inline);
|
gen_counter_incr(jit, asm, Counter::num_send_iseq_inline);
|
||||||
|
|
||||||
match value {
|
match value {
|
||||||
IseqReturn::LocalVariable(local_idx) => {
|
IseqReturn::LocalVariable(local_idx) => {
|
||||||
@ -7454,7 +7469,7 @@ fn gen_send_iseq(
|
|||||||
let callee_specval = callee_ep + VM_ENV_DATA_INDEX_SPECVAL;
|
let callee_specval = callee_ep + VM_ENV_DATA_INDEX_SPECVAL;
|
||||||
if callee_specval < 0 {
|
if callee_specval < 0 {
|
||||||
// Can't write to sp[-n] since that's where the arguments are
|
// Can't write to sp[-n] since that's where the arguments are
|
||||||
gen_counter_incr(asm, Counter::send_iseq_clobbering_block_arg);
|
gen_counter_incr(jit, asm, Counter::send_iseq_clobbering_block_arg);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let proc = asm.stack_pop(1); // Pop first, as argc doesn't account for the block arg
|
let proc = asm.stack_pop(1); // Pop first, as argc doesn't account for the block arg
|
||||||
@ -7480,7 +7495,7 @@ fn gen_send_iseq(
|
|||||||
// an array that has the same length. We will insert guards.
|
// an array that has the same length. We will insert guards.
|
||||||
argc = argc - 1 + array_length as i32;
|
argc = argc - 1 + array_length as i32;
|
||||||
if argc + asm.ctx.get_stack_size() as i32 > MAX_SPLAT_LENGTH {
|
if argc + asm.ctx.get_stack_size() as i32 > MAX_SPLAT_LENGTH {
|
||||||
gen_counter_incr(asm, Counter::send_splat_too_long);
|
gen_counter_incr(jit, asm, Counter::send_splat_too_long);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
push_splat_args(array_length, asm);
|
push_splat_args(array_length, asm);
|
||||||
@ -7853,6 +7868,7 @@ fn gen_send_iseq(
|
|||||||
|
|
||||||
// Check if we can handle a keyword call
|
// Check if we can handle a keyword call
|
||||||
fn gen_iseq_kw_call_checks(
|
fn gen_iseq_kw_call_checks(
|
||||||
|
jit: &JITState,
|
||||||
asm: &mut Assembler,
|
asm: &mut Assembler,
|
||||||
iseq: *const rb_iseq_t,
|
iseq: *const rb_iseq_t,
|
||||||
kw_arg: *const rb_callinfo_kwarg,
|
kw_arg: *const rb_callinfo_kwarg,
|
||||||
@ -7871,7 +7887,7 @@ fn gen_iseq_kw_call_checks(
|
|||||||
// We have so many keywords that (1 << num) encoded as a FIXNUM
|
// We have so many keywords that (1 << num) encoded as a FIXNUM
|
||||||
// (which shifts it left one more) no longer fits inside a 32-bit
|
// (which shifts it left one more) no longer fits inside a 32-bit
|
||||||
// immediate. Similarly, we use a u64 in case of keyword rest parameter.
|
// immediate. Similarly, we use a u64 in case of keyword rest parameter.
|
||||||
gen_counter_incr(asm, Counter::send_iseq_too_many_kwargs);
|
gen_counter_incr(jit, asm, Counter::send_iseq_too_many_kwargs);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -7912,7 +7928,7 @@ fn gen_iseq_kw_call_checks(
|
|||||||
// If the keyword was never found, then we know we have a
|
// If the keyword was never found, then we know we have a
|
||||||
// mismatch in the names of the keyword arguments, so we need to
|
// mismatch in the names of the keyword arguments, so we need to
|
||||||
// bail.
|
// bail.
|
||||||
gen_counter_incr(asm, Counter::send_iseq_kwargs_mismatch);
|
gen_counter_incr(jit, asm, Counter::send_iseq_kwargs_mismatch);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
Some((callee_idx, _)) if callee_idx < keyword_required_num => {
|
Some((callee_idx, _)) if callee_idx < keyword_required_num => {
|
||||||
@ -7925,7 +7941,7 @@ fn gen_iseq_kw_call_checks(
|
|||||||
}
|
}
|
||||||
assert!(required_kwargs_filled <= keyword_required_num);
|
assert!(required_kwargs_filled <= keyword_required_num);
|
||||||
if required_kwargs_filled != keyword_required_num {
|
if required_kwargs_filled != keyword_required_num {
|
||||||
gen_counter_incr(asm, Counter::send_iseq_kwargs_mismatch);
|
gen_counter_incr(jit, asm, Counter::send_iseq_kwargs_mismatch);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8177,49 +8193,50 @@ fn gen_iseq_kw_call(
|
|||||||
/// short-circuit using the ? operator if we return None.
|
/// short-circuit using the ? operator if we return None.
|
||||||
/// It would be great if rust let you implement ? for your
|
/// It would be great if rust let you implement ? for your
|
||||||
/// own types, but as of right now they don't.
|
/// own types, but as of right now they don't.
|
||||||
fn exit_if(asm: &mut Assembler, pred: bool, counter: Counter) -> Option<()> {
|
fn exit_if(jit: &JITState, asm: &mut Assembler, pred: bool, counter: Counter) -> Option<()> {
|
||||||
if pred {
|
if pred {
|
||||||
gen_counter_incr(asm, counter);
|
gen_counter_incr(jit, asm, counter);
|
||||||
return None
|
return None
|
||||||
}
|
}
|
||||||
Some(())
|
Some(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn exit_if_tail_call(asm: &mut Assembler, ci: *const rb_callinfo) -> Option<()> {
|
fn exit_if_tail_call(jit: &JITState, asm: &mut Assembler, ci: *const rb_callinfo) -> Option<()> {
|
||||||
exit_if(asm, unsafe { vm_ci_flag(ci) } & VM_CALL_TAILCALL != 0, Counter::send_iseq_tailcall)
|
exit_if(jit, asm, unsafe { vm_ci_flag(ci) } & VM_CALL_TAILCALL != 0, Counter::send_iseq_tailcall)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn exit_if_has_post(asm: &mut Assembler, iseq: *const rb_iseq_t) -> Option<()> {
|
fn exit_if_has_post(jit: &JITState, asm: &mut Assembler, iseq: *const rb_iseq_t) -> Option<()> {
|
||||||
exit_if(asm, unsafe { get_iseq_flags_has_post(iseq) }, Counter::send_iseq_has_post)
|
exit_if(jit, asm, unsafe { get_iseq_flags_has_post(iseq) }, Counter::send_iseq_has_post)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn exit_if_kwsplat_non_nil(asm: &mut Assembler, flags: u32, counter: Counter) -> Option<()> {
|
fn exit_if_kwsplat_non_nil(jit: &JITState, asm: &mut Assembler, flags: u32, counter: Counter) -> Option<()> {
|
||||||
let kw_splat = flags & VM_CALL_KW_SPLAT != 0;
|
let kw_splat = flags & VM_CALL_KW_SPLAT != 0;
|
||||||
let kw_splat_stack = StackOpnd((flags & VM_CALL_ARGS_BLOCKARG != 0).into());
|
let kw_splat_stack = StackOpnd((flags & VM_CALL_ARGS_BLOCKARG != 0).into());
|
||||||
exit_if(asm, kw_splat && asm.ctx.get_opnd_type(kw_splat_stack) != Type::Nil, counter)
|
exit_if(jit, asm, kw_splat && asm.ctx.get_opnd_type(kw_splat_stack) != Type::Nil, counter)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn exit_if_has_rest_and_captured(asm: &mut Assembler, iseq_has_rest: bool, captured_opnd: Option<Opnd>) -> Option<()> {
|
fn exit_if_has_rest_and_captured(jit: &JITState, asm: &mut Assembler, iseq_has_rest: bool, captured_opnd: Option<Opnd>) -> Option<()> {
|
||||||
exit_if(asm, iseq_has_rest && captured_opnd.is_some(), Counter::send_iseq_has_rest_and_captured)
|
exit_if(jit, asm, iseq_has_rest && captured_opnd.is_some(), Counter::send_iseq_has_rest_and_captured)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn exit_if_has_kwrest_and_captured(asm: &mut Assembler, iseq_has_kwrest: bool, captured_opnd: Option<Opnd>) -> Option<()> {
|
fn exit_if_has_kwrest_and_captured(jit: &JITState, asm: &mut Assembler, iseq_has_kwrest: bool, captured_opnd: Option<Opnd>) -> Option<()> {
|
||||||
// We need to call a C function to allocate the kwrest hash, but also need to hold the captred
|
// We need to call a C function to allocate the kwrest hash, but also need to hold the captred
|
||||||
// block across the call, which we can't do.
|
// block across the call, which we can't do.
|
||||||
exit_if(asm, iseq_has_kwrest && captured_opnd.is_some(), Counter::send_iseq_has_kwrest_and_captured)
|
exit_if(jit, asm, iseq_has_kwrest && captured_opnd.is_some(), Counter::send_iseq_has_kwrest_and_captured)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn exit_if_has_rest_and_supplying_kws(asm: &mut Assembler, iseq_has_rest: bool, supplying_kws: bool) -> Option<()> {
|
fn exit_if_has_rest_and_supplying_kws(jit: &JITState, asm: &mut Assembler, iseq_has_rest: bool, supplying_kws: bool) -> Option<()> {
|
||||||
// There can be a gap between the rest parameter array and the supplied keywords, or
|
// There can be a gap between the rest parameter array and the supplied keywords, or
|
||||||
// no space to put the rest array (e.g. `def foo(*arr, k:) = arr; foo(k: 1)` 1 is
|
// no space to put the rest array (e.g. `def foo(*arr, k:) = arr; foo(k: 1)` 1 is
|
||||||
// sitting where the rest array should be).
|
// sitting where the rest array should be).
|
||||||
exit_if(
|
exit_if(
|
||||||
|
jit,
|
||||||
asm,
|
asm,
|
||||||
iseq_has_rest && supplying_kws,
|
iseq_has_rest && supplying_kws,
|
||||||
Counter::send_iseq_has_rest_and_kw_supplied,
|
Counter::send_iseq_has_rest_and_kw_supplied,
|
||||||
@ -8227,10 +8244,11 @@ fn exit_if_has_rest_and_supplying_kws(asm: &mut Assembler, iseq_has_rest: bool,
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn exit_if_supplying_kw_and_has_no_kw(asm: &mut Assembler, supplying_kws: bool, callee_kws: bool) -> Option<()> {
|
fn exit_if_supplying_kw_and_has_no_kw(jit: &JITState, asm: &mut Assembler, supplying_kws: bool, callee_kws: bool) -> Option<()> {
|
||||||
// Passing keyword arguments to a callee means allocating a hash and treating
|
// Passing keyword arguments to a callee means allocating a hash and treating
|
||||||
// that as a positional argument. Bail for now.
|
// that as a positional argument. Bail for now.
|
||||||
exit_if(
|
exit_if(
|
||||||
|
jit,
|
||||||
asm,
|
asm,
|
||||||
supplying_kws && !callee_kws,
|
supplying_kws && !callee_kws,
|
||||||
Counter::send_iseq_has_no_kw,
|
Counter::send_iseq_has_no_kw,
|
||||||
@ -8238,10 +8256,11 @@ fn exit_if_supplying_kw_and_has_no_kw(asm: &mut Assembler, supplying_kws: bool,
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn exit_if_supplying_kws_and_accept_no_kwargs(asm: &mut Assembler, supplying_kws: bool, iseq: *const rb_iseq_t) -> Option<()> {
|
fn exit_if_supplying_kws_and_accept_no_kwargs(jit: &JITState, asm: &mut Assembler, supplying_kws: bool, iseq: *const rb_iseq_t) -> Option<()> {
|
||||||
// If we have a method accepting no kwargs (**nil), exit if we have passed
|
// If we have a method accepting no kwargs (**nil), exit if we have passed
|
||||||
// it any kwargs.
|
// it any kwargs.
|
||||||
exit_if(
|
exit_if(
|
||||||
|
jit,
|
||||||
asm,
|
asm,
|
||||||
supplying_kws && unsafe { get_iseq_flags_accepts_no_kwarg(iseq) },
|
supplying_kws && unsafe { get_iseq_flags_accepts_no_kwarg(iseq) },
|
||||||
Counter::send_iseq_accepts_no_kwarg
|
Counter::send_iseq_accepts_no_kwarg
|
||||||
@ -8249,12 +8268,13 @@ fn exit_if_supplying_kws_and_accept_no_kwargs(asm: &mut Assembler, supplying_kws
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn exit_if_doing_kw_and_splat(asm: &mut Assembler, doing_kw_call: bool, flags: u32) -> Option<()> {
|
fn exit_if_doing_kw_and_splat(jit: &JITState, asm: &mut Assembler, doing_kw_call: bool, flags: u32) -> Option<()> {
|
||||||
exit_if(asm, doing_kw_call && flags & VM_CALL_ARGS_SPLAT != 0, Counter::send_iseq_splat_with_kw)
|
exit_if(jit, asm, doing_kw_call && flags & VM_CALL_ARGS_SPLAT != 0, Counter::send_iseq_splat_with_kw)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn exit_if_wrong_number_arguments(
|
fn exit_if_wrong_number_arguments(
|
||||||
|
jit: &JITState,
|
||||||
asm: &mut Assembler,
|
asm: &mut Assembler,
|
||||||
args_setup_block: bool,
|
args_setup_block: bool,
|
||||||
opts_filled: i32,
|
opts_filled: i32,
|
||||||
@ -8267,20 +8287,21 @@ fn exit_if_wrong_number_arguments(
|
|||||||
// Too many arguments and no sink that take them
|
// Too many arguments and no sink that take them
|
||||||
let too_many = opts_filled > opt_num && !(iseq_has_rest || args_setup_block);
|
let too_many = opts_filled > opt_num && !(iseq_has_rest || args_setup_block);
|
||||||
|
|
||||||
exit_if(asm, too_few || too_many, Counter::send_iseq_arity_error)
|
exit_if(jit, asm, too_few || too_many, Counter::send_iseq_arity_error)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn exit_if_doing_kw_and_opts_missing(asm: &mut Assembler, doing_kw_call: bool, opts_missing: i32) -> Option<()> {
|
fn exit_if_doing_kw_and_opts_missing(jit: &JITState, asm: &mut Assembler, doing_kw_call: bool, opts_missing: i32) -> Option<()> {
|
||||||
// If we have unfilled optional arguments and keyword arguments then we
|
// If we have unfilled optional arguments and keyword arguments then we
|
||||||
// would need to adjust the arguments location to account for that.
|
// would need to adjust the arguments location to account for that.
|
||||||
// For now we aren't handling this case.
|
// For now we aren't handling this case.
|
||||||
exit_if(asm, doing_kw_call && opts_missing > 0, Counter::send_iseq_missing_optional_kw)
|
exit_if(jit, asm, doing_kw_call && opts_missing > 0, Counter::send_iseq_missing_optional_kw)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn exit_if_has_rest_and_optional_and_block(asm: &mut Assembler, iseq_has_rest: bool, opt_num: i32, iseq: *const rb_iseq_t, block_arg: bool) -> Option<()> {
|
fn exit_if_has_rest_and_optional_and_block(jit: &JITState, asm: &mut Assembler, iseq_has_rest: bool, opt_num: i32, iseq: *const rb_iseq_t, block_arg: bool) -> Option<()> {
|
||||||
exit_if(
|
exit_if(
|
||||||
|
jit,
|
||||||
asm,
|
asm,
|
||||||
iseq_has_rest && opt_num != 0 && (unsafe { get_iseq_flags_has_block(iseq) } || block_arg),
|
iseq_has_rest && opt_num != 0 && (unsafe { get_iseq_flags_has_block(iseq) } || block_arg),
|
||||||
Counter::send_iseq_has_rest_opt_and_block
|
Counter::send_iseq_has_rest_opt_and_block
|
||||||
@ -8322,7 +8343,7 @@ fn exit_if_unsupported_block_arg_type(
|
|||||||
Some(Some(BlockArg::TProc))
|
Some(Some(BlockArg::TProc))
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
gen_counter_incr(asm, Counter::send_iseq_block_arg_type);
|
gen_counter_incr(jit, asm, Counter::send_iseq_block_arg_type);
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -8374,7 +8395,7 @@ fn gen_struct_aref(
|
|||||||
if c_method_tracing_currently_enabled(jit) {
|
if c_method_tracing_currently_enabled(jit) {
|
||||||
// Struct accesses need fire c_call and c_return events, which we can't support
|
// Struct accesses need fire c_call and c_return events, which we can't support
|
||||||
// See :attr-tracing:
|
// See :attr-tracing:
|
||||||
gen_counter_incr(asm, Counter::send_cfunc_tracing);
|
gen_counter_incr(jit, asm, Counter::send_cfunc_tracing);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8424,7 +8445,7 @@ fn gen_struct_aset(
|
|||||||
if c_method_tracing_currently_enabled(jit) {
|
if c_method_tracing_currently_enabled(jit) {
|
||||||
// Struct accesses need fire c_call and c_return events, which we can't support
|
// Struct accesses need fire c_call and c_return events, which we can't support
|
||||||
// See :attr-tracing:
|
// See :attr-tracing:
|
||||||
gen_counter_incr(asm, Counter::send_cfunc_tracing);
|
gen_counter_incr(jit, asm, Counter::send_cfunc_tracing);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8489,7 +8510,7 @@ fn gen_send_dynamic<F: Fn(&mut Assembler) -> Opnd>(
|
|||||||
// Fix the interpreter SP deviated by vm_sendish
|
// Fix the interpreter SP deviated by vm_sendish
|
||||||
asm.mov(Opnd::mem(64, CFP, RUBY_OFFSET_CFP_SP), SP);
|
asm.mov(Opnd::mem(64, CFP, RUBY_OFFSET_CFP_SP), SP);
|
||||||
|
|
||||||
gen_counter_incr(asm, Counter::num_send_dynamic);
|
gen_counter_incr(jit, asm, Counter::num_send_dynamic);
|
||||||
|
|
||||||
jit_perf_symbol_pop!(jit, asm, PerfMap::Codegen);
|
jit_perf_symbol_pop!(jit, asm, PerfMap::Codegen);
|
||||||
|
|
||||||
@ -8529,7 +8550,7 @@ fn gen_send_general(
|
|||||||
|
|
||||||
// Dynamic stack layout. No good way to support without inlining.
|
// Dynamic stack layout. No good way to support without inlining.
|
||||||
if ci_flags & VM_CALL_FORWARDING != 0 {
|
if ci_flags & VM_CALL_FORWARDING != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_iseq_forwarding);
|
gen_counter_incr(jit, asm, Counter::send_iseq_forwarding);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8546,7 +8567,7 @@ fn gen_send_general(
|
|||||||
&& comptime_recv != unsafe { rb_vm_top_self() }
|
&& comptime_recv != unsafe { rb_vm_top_self() }
|
||||||
&& !unsafe { RB_TYPE_P(comptime_recv, RUBY_T_CLASS) }
|
&& !unsafe { RB_TYPE_P(comptime_recv, RUBY_T_CLASS) }
|
||||||
&& !unsafe { RB_TYPE_P(comptime_recv, RUBY_T_MODULE) } {
|
&& !unsafe { RB_TYPE_P(comptime_recv, RUBY_T_MODULE) } {
|
||||||
gen_counter_incr(asm, Counter::send_singleton_class);
|
gen_counter_incr(jit, asm, Counter::send_singleton_class);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8559,16 +8580,16 @@ fn gen_send_general(
|
|||||||
asm_comment!(asm, "call to {}", get_method_name(Some(comptime_recv_klass), mid));
|
asm_comment!(asm, "call to {}", get_method_name(Some(comptime_recv_klass), mid));
|
||||||
|
|
||||||
// Gather some statistics about sends
|
// Gather some statistics about sends
|
||||||
gen_counter_incr(asm, Counter::num_send);
|
gen_counter_incr(jit, asm, Counter::num_send);
|
||||||
if let Some(_known_klass) = asm.ctx.get_opnd_type(recv_opnd).known_class() {
|
if let Some(_known_klass) = asm.ctx.get_opnd_type(recv_opnd).known_class() {
|
||||||
gen_counter_incr(asm, Counter::num_send_known_class);
|
gen_counter_incr(jit, asm, Counter::num_send_known_class);
|
||||||
}
|
}
|
||||||
if asm.ctx.get_chain_depth() > 1 {
|
if asm.ctx.get_chain_depth() > 1 {
|
||||||
gen_counter_incr(asm, Counter::num_send_polymorphic);
|
gen_counter_incr(jit, asm, Counter::num_send_polymorphic);
|
||||||
}
|
}
|
||||||
// If megamorphic, let the caller fallback to dynamic dispatch
|
// If megamorphic, let the caller fallback to dynamic dispatch
|
||||||
if asm.ctx.get_chain_depth() >= SEND_MAX_DEPTH {
|
if asm.ctx.get_chain_depth() >= SEND_MAX_DEPTH {
|
||||||
gen_counter_incr(asm, Counter::send_megamorphic);
|
gen_counter_incr(jit, asm, Counter::send_megamorphic);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8586,7 +8607,7 @@ fn gen_send_general(
|
|||||||
// Do method lookup
|
// Do method lookup
|
||||||
let mut cme = unsafe { rb_callable_method_entry(comptime_recv_klass, mid) };
|
let mut cme = unsafe { rb_callable_method_entry(comptime_recv_klass, mid) };
|
||||||
if cme.is_null() {
|
if cme.is_null() {
|
||||||
gen_counter_incr(asm, Counter::send_cme_not_found);
|
gen_counter_incr(jit, asm, Counter::send_cme_not_found);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8603,7 +8624,7 @@ fn gen_send_general(
|
|||||||
if flags & VM_CALL_FCALL == 0 {
|
if flags & VM_CALL_FCALL == 0 {
|
||||||
// Can only call private methods with FCALL callsites.
|
// Can only call private methods with FCALL callsites.
|
||||||
// (at the moment they are callsites without a receiver or an explicit `self` receiver)
|
// (at the moment they are callsites without a receiver or an explicit `self` receiver)
|
||||||
gen_counter_incr(asm, Counter::send_private_not_fcall);
|
gen_counter_incr(jit, asm, Counter::send_private_not_fcall);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -8649,7 +8670,7 @@ fn gen_send_general(
|
|||||||
VM_METHOD_TYPE_IVAR => {
|
VM_METHOD_TYPE_IVAR => {
|
||||||
// This is a .send call not supported right now for attr_reader
|
// This is a .send call not supported right now for attr_reader
|
||||||
if flags & VM_CALL_OPT_SEND != 0 {
|
if flags & VM_CALL_OPT_SEND != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_send_attr_reader);
|
gen_counter_incr(jit, asm, Counter::send_send_attr_reader);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8661,7 +8682,7 @@ fn gen_send_general(
|
|||||||
asm.stack_pop(1);
|
asm.stack_pop(1);
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
gen_counter_incr(asm, Counter::send_getter_block_arg);
|
gen_counter_incr(jit, asm, Counter::send_getter_block_arg);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -8681,7 +8702,7 @@ fn gen_send_general(
|
|||||||
asm.stack_pop(1);
|
asm.stack_pop(1);
|
||||||
} else {
|
} else {
|
||||||
// Argument count mismatch. Getters take no arguments.
|
// Argument count mismatch. Getters take no arguments.
|
||||||
gen_counter_incr(asm, Counter::send_getter_arity);
|
gen_counter_incr(jit, asm, Counter::send_getter_arity);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -8694,7 +8715,7 @@ fn gen_send_general(
|
|||||||
// "method" we are calling into never enables those tracing
|
// "method" we are calling into never enables those tracing
|
||||||
// events. We are never inside the code that needs to be
|
// events. We are never inside the code that needs to be
|
||||||
// invalidated when invalidation happens.
|
// invalidated when invalidation happens.
|
||||||
gen_counter_incr(asm, Counter::send_cfunc_tracing);
|
gen_counter_incr(jit, asm, Counter::send_cfunc_tracing);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8714,26 +8735,26 @@ fn gen_send_general(
|
|||||||
VM_METHOD_TYPE_ATTRSET => {
|
VM_METHOD_TYPE_ATTRSET => {
|
||||||
// This is a .send call not supported right now for attr_writer
|
// This is a .send call not supported right now for attr_writer
|
||||||
if flags & VM_CALL_OPT_SEND != 0 {
|
if flags & VM_CALL_OPT_SEND != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_send_attr_writer);
|
gen_counter_incr(jit, asm, Counter::send_send_attr_writer);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
if flags & VM_CALL_ARGS_SPLAT != 0 {
|
if flags & VM_CALL_ARGS_SPLAT != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_args_splat_attrset);
|
gen_counter_incr(jit, asm, Counter::send_args_splat_attrset);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
if flags & VM_CALL_KWARG != 0 {
|
if flags & VM_CALL_KWARG != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_attrset_kwargs);
|
gen_counter_incr(jit, asm, Counter::send_attrset_kwargs);
|
||||||
return None;
|
return None;
|
||||||
} else if argc != 1 || unsafe { !RB_TYPE_P(comptime_recv, RUBY_T_OBJECT) } {
|
} else if argc != 1 || unsafe { !RB_TYPE_P(comptime_recv, RUBY_T_OBJECT) } {
|
||||||
gen_counter_incr(asm, Counter::send_ivar_set_method);
|
gen_counter_incr(jit, asm, Counter::send_ivar_set_method);
|
||||||
return None;
|
return None;
|
||||||
} else if c_method_tracing_currently_enabled(jit) {
|
} else if c_method_tracing_currently_enabled(jit) {
|
||||||
// Can't generate code for firing c_call and c_return events
|
// Can't generate code for firing c_call and c_return events
|
||||||
// See :attr-tracing:
|
// See :attr-tracing:
|
||||||
gen_counter_incr(asm, Counter::send_cfunc_tracing);
|
gen_counter_incr(jit, asm, Counter::send_cfunc_tracing);
|
||||||
return None;
|
return None;
|
||||||
} else if flags & VM_CALL_ARGS_BLOCKARG != 0 {
|
} else if flags & VM_CALL_ARGS_BLOCKARG != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_attrset_block_arg);
|
gen_counter_incr(jit, asm, Counter::send_attrset_block_arg);
|
||||||
return None;
|
return None;
|
||||||
} else {
|
} else {
|
||||||
let ivar_name = unsafe { get_cme_def_body_attr_id(cme) };
|
let ivar_name = unsafe { get_cme_def_body_attr_id(cme) };
|
||||||
@ -8743,7 +8764,7 @@ fn gen_send_general(
|
|||||||
// Block method, e.g. define_method(:foo) { :my_block }
|
// Block method, e.g. define_method(:foo) { :my_block }
|
||||||
VM_METHOD_TYPE_BMETHOD => {
|
VM_METHOD_TYPE_BMETHOD => {
|
||||||
if flags & VM_CALL_ARGS_SPLAT != 0 {
|
if flags & VM_CALL_ARGS_SPLAT != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_args_splat_bmethod);
|
gen_counter_incr(jit, asm, Counter::send_args_splat_bmethod);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
return gen_send_bmethod(jit, asm, ci, cme, block, flags, argc);
|
return gen_send_bmethod(jit, asm, ci, cme, block, flags, argc);
|
||||||
@ -8756,7 +8777,7 @@ fn gen_send_general(
|
|||||||
// Send family of methods, e.g. call/apply
|
// Send family of methods, e.g. call/apply
|
||||||
VM_METHOD_TYPE_OPTIMIZED => {
|
VM_METHOD_TYPE_OPTIMIZED => {
|
||||||
if flags & VM_CALL_ARGS_BLOCKARG != 0 {
|
if flags & VM_CALL_ARGS_BLOCKARG != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_optimized_block_arg);
|
gen_counter_incr(jit, asm, Counter::send_optimized_block_arg);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8774,12 +8795,12 @@ fn gen_send_general(
|
|||||||
// currently work, we can't do stack manipulation until we will no longer
|
// currently work, we can't do stack manipulation until we will no longer
|
||||||
// side exit.
|
// side exit.
|
||||||
if flags & VM_CALL_OPT_SEND != 0 {
|
if flags & VM_CALL_OPT_SEND != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_send_nested);
|
gen_counter_incr(jit, asm, Counter::send_send_nested);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
if argc == 0 {
|
if argc == 0 {
|
||||||
gen_counter_incr(asm, Counter::send_send_wrong_args);
|
gen_counter_incr(jit, asm, Counter::send_send_wrong_args);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8790,13 +8811,13 @@ fn gen_send_general(
|
|||||||
mid = unsafe { rb_get_symbol_id(compile_time_name) };
|
mid = unsafe { rb_get_symbol_id(compile_time_name) };
|
||||||
if mid == 0 {
|
if mid == 0 {
|
||||||
// This also rejects method names that need conversion
|
// This also rejects method names that need conversion
|
||||||
gen_counter_incr(asm, Counter::send_send_null_mid);
|
gen_counter_incr(jit, asm, Counter::send_send_null_mid);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
cme = unsafe { rb_callable_method_entry(comptime_recv_klass, mid) };
|
cme = unsafe { rb_callable_method_entry(comptime_recv_klass, mid) };
|
||||||
if cme.is_null() {
|
if cme.is_null() {
|
||||||
gen_counter_incr(asm, Counter::send_send_null_cme);
|
gen_counter_incr(jit, asm, Counter::send_send_null_cme);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8830,24 +8851,24 @@ fn gen_send_general(
|
|||||||
OPTIMIZED_METHOD_TYPE_CALL => {
|
OPTIMIZED_METHOD_TYPE_CALL => {
|
||||||
|
|
||||||
if block.is_some() {
|
if block.is_some() {
|
||||||
gen_counter_incr(asm, Counter::send_call_block);
|
gen_counter_incr(jit, asm, Counter::send_call_block);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
if flags & VM_CALL_KWARG != 0 {
|
if flags & VM_CALL_KWARG != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_call_kwarg);
|
gen_counter_incr(jit, asm, Counter::send_call_kwarg);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
if flags & VM_CALL_ARGS_SPLAT != 0 {
|
if flags & VM_CALL_ARGS_SPLAT != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_args_splat_opt_call);
|
gen_counter_incr(jit, asm, Counter::send_args_splat_opt_call);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Optimize for single ractor mode and avoid runtime check for
|
// Optimize for single ractor mode and avoid runtime check for
|
||||||
// "defined with an un-shareable Proc in a different Ractor"
|
// "defined with an un-shareable Proc in a different Ractor"
|
||||||
if !assume_single_ractor_mode(jit, asm) {
|
if !assume_single_ractor_mode(jit, asm) {
|
||||||
gen_counter_incr(asm, Counter::send_call_multi_ractor);
|
gen_counter_incr(jit, asm, Counter::send_call_multi_ractor);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8884,12 +8905,12 @@ fn gen_send_general(
|
|||||||
|
|
||||||
}
|
}
|
||||||
OPTIMIZED_METHOD_TYPE_BLOCK_CALL => {
|
OPTIMIZED_METHOD_TYPE_BLOCK_CALL => {
|
||||||
gen_counter_incr(asm, Counter::send_optimized_method_block_call);
|
gen_counter_incr(jit, asm, Counter::send_optimized_method_block_call);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
OPTIMIZED_METHOD_TYPE_STRUCT_AREF => {
|
OPTIMIZED_METHOD_TYPE_STRUCT_AREF => {
|
||||||
if flags & VM_CALL_ARGS_SPLAT != 0 {
|
if flags & VM_CALL_ARGS_SPLAT != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_args_splat_aref);
|
gen_counter_incr(jit, asm, Counter::send_args_splat_aref);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
return gen_struct_aref(
|
return gen_struct_aref(
|
||||||
@ -8904,7 +8925,7 @@ fn gen_send_general(
|
|||||||
}
|
}
|
||||||
OPTIMIZED_METHOD_TYPE_STRUCT_ASET => {
|
OPTIMIZED_METHOD_TYPE_STRUCT_ASET => {
|
||||||
if flags & VM_CALL_ARGS_SPLAT != 0 {
|
if flags & VM_CALL_ARGS_SPLAT != 0 {
|
||||||
gen_counter_incr(asm, Counter::send_args_splat_aset);
|
gen_counter_incr(jit, asm, Counter::send_args_splat_aset);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
return gen_struct_aset(
|
return gen_struct_aset(
|
||||||
@ -8923,23 +8944,23 @@ fn gen_send_general(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
VM_METHOD_TYPE_ZSUPER => {
|
VM_METHOD_TYPE_ZSUPER => {
|
||||||
gen_counter_incr(asm, Counter::send_zsuper_method);
|
gen_counter_incr(jit, asm, Counter::send_zsuper_method);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
VM_METHOD_TYPE_UNDEF => {
|
VM_METHOD_TYPE_UNDEF => {
|
||||||
gen_counter_incr(asm, Counter::send_undef_method);
|
gen_counter_incr(jit, asm, Counter::send_undef_method);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
VM_METHOD_TYPE_NOTIMPLEMENTED => {
|
VM_METHOD_TYPE_NOTIMPLEMENTED => {
|
||||||
gen_counter_incr(asm, Counter::send_not_implemented_method);
|
gen_counter_incr(jit, asm, Counter::send_not_implemented_method);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
VM_METHOD_TYPE_MISSING => {
|
VM_METHOD_TYPE_MISSING => {
|
||||||
gen_counter_incr(asm, Counter::send_missing_method);
|
gen_counter_incr(jit, asm, Counter::send_missing_method);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
VM_METHOD_TYPE_REFINED => {
|
VM_METHOD_TYPE_REFINED => {
|
||||||
gen_counter_incr(asm, Counter::send_refined_method);
|
gen_counter_incr(jit, asm, Counter::send_refined_method);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
@ -9088,7 +9109,7 @@ fn gen_invokeblock_specialized(
|
|||||||
|
|
||||||
// Fallback to dynamic dispatch if this callsite is megamorphic
|
// Fallback to dynamic dispatch if this callsite is megamorphic
|
||||||
if asm.ctx.get_chain_depth() >= SEND_MAX_DEPTH {
|
if asm.ctx.get_chain_depth() >= SEND_MAX_DEPTH {
|
||||||
gen_counter_incr(asm, Counter::invokeblock_megamorphic);
|
gen_counter_incr(jit, asm, Counter::invokeblock_megamorphic);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -9104,7 +9125,7 @@ fn gen_invokeblock_specialized(
|
|||||||
|
|
||||||
// Handle each block_handler type
|
// Handle each block_handler type
|
||||||
if comptime_handler.0 == VM_BLOCK_HANDLER_NONE as usize { // no block given
|
if comptime_handler.0 == VM_BLOCK_HANDLER_NONE as usize { // no block given
|
||||||
gen_counter_incr(asm, Counter::invokeblock_none);
|
gen_counter_incr(jit, asm, Counter::invokeblock_none);
|
||||||
None
|
None
|
||||||
} else if comptime_handler.0 & 0x3 == 0x1 { // VM_BH_ISEQ_BLOCK_P
|
} else if comptime_handler.0 & 0x3 == 0x1 { // VM_BH_ISEQ_BLOCK_P
|
||||||
asm_comment!(asm, "get local EP");
|
asm_comment!(asm, "get local EP");
|
||||||
@ -9127,7 +9148,7 @@ fn gen_invokeblock_specialized(
|
|||||||
// If the current ISEQ is annotated to be inlined but it's not being inlined here,
|
// If the current ISEQ is annotated to be inlined but it's not being inlined here,
|
||||||
// generate a dynamic dispatch to avoid making this yield megamorphic.
|
// generate a dynamic dispatch to avoid making this yield megamorphic.
|
||||||
if unsafe { rb_yjit_iseq_builtin_attrs(jit.iseq) } & BUILTIN_ATTR_INLINE_BLOCK != 0 && !asm.ctx.inline() {
|
if unsafe { rb_yjit_iseq_builtin_attrs(jit.iseq) } & BUILTIN_ATTR_INLINE_BLOCK != 0 && !asm.ctx.inline() {
|
||||||
gen_counter_incr(asm, Counter::invokeblock_iseq_not_inlined);
|
gen_counter_incr(jit, asm, Counter::invokeblock_iseq_not_inlined);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -9150,11 +9171,11 @@ fn gen_invokeblock_specialized(
|
|||||||
} else if comptime_handler.0 & 0x3 == 0x3 { // VM_BH_IFUNC_P
|
} else if comptime_handler.0 & 0x3 == 0x3 { // VM_BH_IFUNC_P
|
||||||
// We aren't handling CALLER_SETUP_ARG and CALLER_REMOVE_EMPTY_KW_SPLAT yet.
|
// We aren't handling CALLER_SETUP_ARG and CALLER_REMOVE_EMPTY_KW_SPLAT yet.
|
||||||
if flags & VM_CALL_ARGS_SPLAT != 0 {
|
if flags & VM_CALL_ARGS_SPLAT != 0 {
|
||||||
gen_counter_incr(asm, Counter::invokeblock_ifunc_args_splat);
|
gen_counter_incr(jit, asm, Counter::invokeblock_ifunc_args_splat);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
if flags & VM_CALL_KW_SPLAT != 0 {
|
if flags & VM_CALL_KW_SPLAT != 0 {
|
||||||
gen_counter_incr(asm, Counter::invokeblock_ifunc_kw_splat);
|
gen_counter_incr(jit, asm, Counter::invokeblock_ifunc_kw_splat);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -9200,10 +9221,10 @@ fn gen_invokeblock_specialized(
|
|||||||
jump_to_next_insn(jit, asm);
|
jump_to_next_insn(jit, asm);
|
||||||
Some(EndBlock)
|
Some(EndBlock)
|
||||||
} else if comptime_handler.symbol_p() {
|
} else if comptime_handler.symbol_p() {
|
||||||
gen_counter_incr(asm, Counter::invokeblock_symbol);
|
gen_counter_incr(jit, asm, Counter::invokeblock_symbol);
|
||||||
None
|
None
|
||||||
} else { // Proc
|
} else { // Proc
|
||||||
gen_counter_incr(asm, Counter::invokeblock_proc);
|
gen_counter_incr(jit, asm, Counter::invokeblock_proc);
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -9258,13 +9279,13 @@ fn gen_invokesuper_specialized(
|
|||||||
|
|
||||||
// Fallback to dynamic dispatch if this callsite is megamorphic
|
// Fallback to dynamic dispatch if this callsite is megamorphic
|
||||||
if asm.ctx.get_chain_depth() >= SEND_MAX_DEPTH {
|
if asm.ctx.get_chain_depth() >= SEND_MAX_DEPTH {
|
||||||
gen_counter_incr(asm, Counter::invokesuper_megamorphic);
|
gen_counter_incr(jit, asm, Counter::invokesuper_megamorphic);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let me = unsafe { rb_vm_frame_method_entry(jit.get_cfp()) };
|
let me = unsafe { rb_vm_frame_method_entry(jit.get_cfp()) };
|
||||||
if me.is_null() {
|
if me.is_null() {
|
||||||
gen_counter_incr(asm, Counter::invokesuper_no_me);
|
gen_counter_incr(jit, asm, Counter::invokesuper_no_me);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -9277,7 +9298,7 @@ fn gen_invokesuper_specialized(
|
|||||||
if current_defined_class.builtin_type() == RUBY_T_ICLASS
|
if current_defined_class.builtin_type() == RUBY_T_ICLASS
|
||||||
&& unsafe { RB_TYPE_P((*rbasic_ptr).klass, RUBY_T_MODULE) && FL_TEST_RAW((*rbasic_ptr).klass, VALUE(RMODULE_IS_REFINEMENT.as_usize())) != VALUE(0) }
|
&& unsafe { RB_TYPE_P((*rbasic_ptr).klass, RUBY_T_MODULE) && FL_TEST_RAW((*rbasic_ptr).klass, VALUE(RMODULE_IS_REFINEMENT.as_usize())) != VALUE(0) }
|
||||||
{
|
{
|
||||||
gen_counter_incr(asm, Counter::invokesuper_refinement);
|
gen_counter_incr(jit, asm, Counter::invokesuper_refinement);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let comptime_superclass =
|
let comptime_superclass =
|
||||||
@ -9292,15 +9313,15 @@ fn gen_invokesuper_specialized(
|
|||||||
// Note, not using VM_CALL_ARGS_SIMPLE because sometimes we pass a block.
|
// Note, not using VM_CALL_ARGS_SIMPLE because sometimes we pass a block.
|
||||||
|
|
||||||
if ci_flags & VM_CALL_KWARG != 0 {
|
if ci_flags & VM_CALL_KWARG != 0 {
|
||||||
gen_counter_incr(asm, Counter::invokesuper_kwarg);
|
gen_counter_incr(jit, asm, Counter::invokesuper_kwarg);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
if ci_flags & VM_CALL_KW_SPLAT != 0 {
|
if ci_flags & VM_CALL_KW_SPLAT != 0 {
|
||||||
gen_counter_incr(asm, Counter::invokesuper_kw_splat);
|
gen_counter_incr(jit, asm, Counter::invokesuper_kw_splat);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
if ci_flags & VM_CALL_FORWARDING != 0 {
|
if ci_flags & VM_CALL_FORWARDING != 0 {
|
||||||
gen_counter_incr(asm, Counter::invokesuper_forwarding);
|
gen_counter_incr(jit, asm, Counter::invokesuper_forwarding);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -9311,20 +9332,20 @@ fn gen_invokesuper_specialized(
|
|||||||
// check and side exit.
|
// check and side exit.
|
||||||
let comptime_recv = jit.peek_at_stack(&asm.ctx, argc as isize);
|
let comptime_recv = jit.peek_at_stack(&asm.ctx, argc as isize);
|
||||||
if unsafe { rb_obj_is_kind_of(comptime_recv, current_defined_class) } == VALUE(0) {
|
if unsafe { rb_obj_is_kind_of(comptime_recv, current_defined_class) } == VALUE(0) {
|
||||||
gen_counter_incr(asm, Counter::invokesuper_defined_class_mismatch);
|
gen_counter_incr(jit, asm, Counter::invokesuper_defined_class_mismatch);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Don't compile `super` on objects with singleton class to avoid retaining the receiver.
|
// Don't compile `super` on objects with singleton class to avoid retaining the receiver.
|
||||||
if VALUE(0) != unsafe { FL_TEST(comptime_recv.class_of(), VALUE(RUBY_FL_SINGLETON as usize)) } {
|
if VALUE(0) != unsafe { FL_TEST(comptime_recv.class_of(), VALUE(RUBY_FL_SINGLETON as usize)) } {
|
||||||
gen_counter_incr(asm, Counter::invokesuper_singleton_class);
|
gen_counter_incr(jit, asm, Counter::invokesuper_singleton_class);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Do method lookup
|
// Do method lookup
|
||||||
let cme = unsafe { rb_callable_method_entry(comptime_superclass, mid) };
|
let cme = unsafe { rb_callable_method_entry(comptime_superclass, mid) };
|
||||||
if cme.is_null() {
|
if cme.is_null() {
|
||||||
gen_counter_incr(asm, Counter::invokesuper_no_cme);
|
gen_counter_incr(jit, asm, Counter::invokesuper_no_cme);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -9332,7 +9353,7 @@ fn gen_invokesuper_specialized(
|
|||||||
let cme_def_type = unsafe { get_cme_def_type(cme) };
|
let cme_def_type = unsafe { get_cme_def_type(cme) };
|
||||||
if cme_def_type != VM_METHOD_TYPE_ISEQ && cme_def_type != VM_METHOD_TYPE_CFUNC {
|
if cme_def_type != VM_METHOD_TYPE_ISEQ && cme_def_type != VM_METHOD_TYPE_CFUNC {
|
||||||
// others unimplemented
|
// others unimplemented
|
||||||
gen_counter_incr(asm, Counter::invokesuper_not_iseq_or_cfunc);
|
gen_counter_incr(jit, asm, Counter::invokesuper_not_iseq_or_cfunc);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -9799,7 +9820,7 @@ fn gen_opt_getconstant_path(
|
|||||||
} else {
|
} else {
|
||||||
// Optimize for single ractor mode.
|
// Optimize for single ractor mode.
|
||||||
if !assume_single_ractor_mode(jit, asm) {
|
if !assume_single_ractor_mode(jit, asm) {
|
||||||
gen_counter_incr(asm, Counter::opt_getconstant_path_multi_ractor);
|
gen_counter_incr(jit, asm, Counter::opt_getconstant_path_multi_ractor);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -9839,7 +9860,7 @@ fn gen_getblockparamproxy(
|
|||||||
unsafe { rb_obj_is_proc(comptime_handler) }.test() // block is a Proc
|
unsafe { rb_obj_is_proc(comptime_handler) }.test() // block is a Proc
|
||||||
) {
|
) {
|
||||||
// Missing the symbol case, where we basically need to call Symbol#to_proc at runtime
|
// Missing the symbol case, where we basically need to call Symbol#to_proc at runtime
|
||||||
gen_counter_incr(asm, Counter::gbpp_unsupported_type);
|
gen_counter_incr(jit, asm, Counter::gbpp_unsupported_type);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -116,8 +116,8 @@ static YJIT_OPTIONS: [(&str, &str); 9] = [
|
|||||||
pub enum TraceExits {
|
pub enum TraceExits {
|
||||||
// Trace all exits
|
// Trace all exits
|
||||||
All,
|
All,
|
||||||
// Trace a specific counted exit
|
// Trace a specific counter
|
||||||
CountedExit(Counter),
|
Counter(Counter),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -293,7 +293,7 @@ pub fn parse_option(str_ptr: *const std::os::raw::c_char) -> Option<()> {
|
|||||||
OPTIONS.trace_exits = match opt_val {
|
OPTIONS.trace_exits = match opt_val {
|
||||||
"" => Some(TraceExits::All),
|
"" => Some(TraceExits::All),
|
||||||
name => match Counter::get(name) {
|
name => match Counter::get(name) {
|
||||||
Some(counter) => Some(TraceExits::CountedExit(counter)),
|
Some(counter) => Some(TraceExits::Counter(counter)),
|
||||||
None => return None,
|
None => return None,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
Loading…
x
Reference in New Issue
Block a user