YJIT: Allow non-leaf calls on opt_* insns (#10033)

* YJIT: Allow non-leaf calls on opt_* insns

* s/on_send_insn/is_sendish/

* Repeat known_cfunc_codegen
This commit is contained in:
Takashi Kokubun 2024-02-21 12:24:18 -08:00 committed by GitHub
parent 5c02d97780
commit 577d07cfc6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 21 additions and 9 deletions

View File

@ -1580,11 +1580,6 @@ impl Assembler
pub fn expect_leaf_ccall(&mut self) { pub fn expect_leaf_ccall(&mut self) {
self.leaf_ccall = true; self.leaf_ccall = true;
} }
/// Undo expect_leaf_ccall() as an exception.
pub fn allow_non_leaf_ccall(&mut self) {
self.leaf_ccall = false;
}
} }
/// A struct that allows iterating through an assembler's instructions and /// A struct that allows iterating through an assembler's instructions and

View File

@ -298,6 +298,16 @@ impl JITState {
} }
} }
} }
/// Return true if we're compiling a send-like instruction, not an opt_* instruction.
pub fn is_sendish(&self) -> bool {
match unsafe { rb_iseq_opcode_at_pc(self.iseq, self.pc) } as u32 {
YARVINSN_send |
YARVINSN_opt_send_without_block |
YARVINSN_invokesuper => true,
_ => false,
}
}
} }
/// Macro to call jit.perf_symbol_push() without evaluating arguments when /// Macro to call jit.perf_symbol_push() without evaluating arguments when
@ -5525,10 +5535,9 @@ fn jit_rb_str_concat(
guard_object_is_string(asm, asm.stack_opnd(0), StackOpnd(0), Counter::guard_send_not_string); guard_object_is_string(asm, asm.stack_opnd(0), StackOpnd(0), Counter::guard_send_not_string);
// Guard buffers from GC since rb_str_buf_append may allocate. // Guard buffers from GC since rb_str_buf_append may allocate.
jit_prepare_non_leaf_call(jit, asm);
// rb_str_buf_append may raise Encoding::CompatibilityError, but we accept compromised // rb_str_buf_append may raise Encoding::CompatibilityError, but we accept compromised
// backtraces on this method since the interpreter does the same thing on opt_ltlt. // backtraces on this method since the interpreter does the same thing on opt_ltlt.
asm.allow_non_leaf_ccall(); jit_prepare_non_leaf_call(jit, asm);
asm.spill_temps(); // For ccall. Unconditionally spill them for RegTemps consistency. asm.spill_temps(); // For ccall. Unconditionally spill them for RegTemps consistency.
let concat_arg = asm.stack_pop(1); let concat_arg = asm.stack_pop(1);
@ -6088,9 +6097,17 @@ fn gen_send_cfunc(
let expected_stack_after = asm.ctx.get_stack_size() as i32 - argc; let expected_stack_after = asm.ctx.get_stack_size() as i32 - argc;
if let Some(known_cfunc_codegen) = lookup_cfunc_codegen(unsafe { (*cme).def }) { if let Some(known_cfunc_codegen) = lookup_cfunc_codegen(unsafe { (*cme).def }) {
// We don't push a frame for specialized cfunc codegen, so the generated code must be leaf. // We don't push a frame for specialized cfunc codegen, so the generated code must be leaf.
if asm.with_leaf_ccall(|asm| // However, the interpreter doesn't push a frame on opt_* instruction either, so we allow
// non-sendish instructions to break this rule as an exception.
let cfunc_codegen = if jit.is_sendish() {
asm.with_leaf_ccall(|asm|
perf_call!("gen_send_cfunc: ", known_cfunc_codegen(jit, asm, ocb, ci, cme, block, argc, recv_known_class))
)
} else {
perf_call!("gen_send_cfunc: ", known_cfunc_codegen(jit, asm, ocb, ci, cme, block, argc, recv_known_class)) perf_call!("gen_send_cfunc: ", known_cfunc_codegen(jit, asm, ocb, ci, cme, block, argc, recv_known_class))
) { };
if cfunc_codegen {
assert_eq!(expected_stack_after, asm.ctx.get_stack_size() as i32); assert_eq!(expected_stack_after, asm.ctx.get_stack_size() as i32);
gen_counter_incr(asm, Counter::num_send_cfunc_inline); gen_counter_incr(asm, Counter::num_send_cfunc_inline);
// cfunc codegen generated code. Terminate the block so // cfunc codegen generated code. Terminate the block so