YJIT: Let Assembler own Context (#7691)

* YJIT: Let Assembler own Context

* Update a comment

Co-authored-by: Maxime Chevalier-Boisvert <maximechevalierb@gmail.com>

---------

Co-authored-by: Maxime Chevalier-Boisvert <maximechevalierb@gmail.com>
This commit is contained in:
Takashi Kokubun 2023-04-12 12:11:44 -07:00 committed by GitHub
parent 0ac3f2c20e
commit 00bbd31edb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
Notes: git 2023-04-12 19:12:07 +00:00
Merged-By: k0kubun <takashikkbn@gmail.com>
5 changed files with 809 additions and 951 deletions

View File

@ -1210,7 +1210,7 @@ mod tests {
fn test_emit_cpop_all() {
let (mut asm, mut cb) = setup_asm();
asm.cpop_all(&Context::default());
asm.cpop_all();
asm.compile_with_num_regs(&mut cb, 0);
}

View File

@ -896,6 +896,9 @@ pub struct Assembler
/// Names of labels
pub(super) label_names: Vec<String>,
/// Context for generating the current insn
pub ctx: Context,
}
impl Assembler
@ -909,7 +912,8 @@ impl Assembler
insns: Vec::default(),
live_ranges: Vec::default(),
reg_temps: Vec::default(),
label_names
label_names,
ctx: Context::default(),
}
}
@ -1054,12 +1058,12 @@ impl Assembler
}
/// Allocate a register to a stack temp if available.
pub fn alloc_temp_reg(&mut self, ctx: &mut Context, stack_idx: u8) {
pub fn alloc_temp_reg(&mut self, stack_idx: u8) {
if get_option!(num_temp_regs) == 0 {
return;
}
assert_eq!(self.get_reg_temps(), ctx.get_reg_temps());
assert_eq!(self.get_reg_temps(), self.ctx.get_reg_temps());
let mut reg_temps = self.get_reg_temps();
// Allocate a register if there's no conflict.
@ -1068,17 +1072,17 @@ impl Assembler
} else {
reg_temps.set(stack_idx, true);
self.set_reg_temps(reg_temps);
ctx.set_reg_temps(reg_temps);
self.ctx.set_reg_temps(reg_temps);
}
}
/// Spill all live stack temps from registers to the stack
pub fn spill_temps(&mut self, ctx: &mut Context) {
assert_eq!(self.get_reg_temps(), ctx.get_reg_temps());
pub fn spill_temps(&mut self) {
assert_eq!(self.get_reg_temps(), self.ctx.get_reg_temps());
// Forget registers above the stack top
let mut reg_temps = self.get_reg_temps();
for stack_idx in ctx.get_stack_size()..MAX_REG_TEMPS {
for stack_idx in self.ctx.get_stack_size()..MAX_REG_TEMPS {
reg_temps.set(stack_idx, false);
}
self.set_reg_temps(reg_temps);
@ -1086,17 +1090,17 @@ impl Assembler
// Spill live stack temps
if self.get_reg_temps() != RegTemps::default() {
self.comment(&format!("spill_temps: {:08b} -> {:08b}", self.get_reg_temps().as_u8(), RegTemps::default().as_u8()));
for stack_idx in 0..u8::min(MAX_REG_TEMPS, ctx.get_stack_size()) {
for stack_idx in 0..u8::min(MAX_REG_TEMPS, self.ctx.get_stack_size()) {
if self.get_reg_temps().get(stack_idx) {
let idx = ctx.get_stack_size() - 1 - stack_idx;
self.spill_temp(ctx.stack_opnd(idx.into()));
let idx = self.ctx.get_stack_size() - 1 - stack_idx;
self.spill_temp(self.ctx.stack_opnd(idx.into()));
}
}
}
// Every stack temp should have been spilled
assert_eq!(self.get_reg_temps(), RegTemps::default());
ctx.set_reg_temps(self.get_reg_temps());
self.ctx.set_reg_temps(self.get_reg_temps());
}
/// Sets the out field on the various instructions that require allocated
@ -1500,12 +1504,12 @@ impl Assembler {
out
}
pub fn cpop_all(&mut self, ctx: &Context) {
pub fn cpop_all(&mut self) {
self.push_insn(Insn::CPopAll);
// Re-enable ccall's RegTemps assertion disabled by cpush_all.
// cpush_all + cpop_all preserve all stack temp registers, so it's safe.
self.set_reg_temps(ctx.get_reg_temps());
self.set_reg_temps(self.ctx.get_reg_temps());
}
pub fn cpop_into(&mut self, opnd: Opnd) {

File diff suppressed because it is too large Load Diff

View File

@ -1622,61 +1622,9 @@ impl Context {
self.set_reg_temps(reg_temps);
}
/// Push one new value on the temp stack with an explicit mapping
/// Return a pointer to the new stack top
pub fn stack_push_mapping(&mut self, asm: &mut Assembler, (mapping, temp_type): (TempMapping, Type)) -> Opnd {
// If type propagation is disabled, store no types
if get_option!(no_type_prop) {
return self.stack_push_mapping(asm, (mapping, Type::Unknown));
}
let stack_size: usize = self.stack_size.into();
// Keep track of the type and mapping of the value
if stack_size < MAX_TEMP_TYPES {
self.temp_mapping[stack_size] = mapping;
self.temp_types[stack_size] = temp_type;
if let MapToLocal(idx) = mapping {
assert!((idx as usize) < MAX_LOCAL_TYPES);
}
}
// Allocate a register to the stack operand
assert_eq!(self.reg_temps, asm.get_reg_temps());
if self.stack_size < MAX_REG_TEMPS {
asm.alloc_temp_reg(self, self.stack_size);
}
self.stack_size += 1;
self.sp_offset += 1;
return self.stack_opnd(0);
}
/// Push one new value on the temp stack
/// Return a pointer to the new stack top
pub fn stack_push(&mut self, asm: &mut Assembler, val_type: Type) -> Opnd {
return self.stack_push_mapping(asm, (MapToStack, val_type));
}
/// Push the self value on the stack
pub fn stack_push_self(&mut self, asm: &mut Assembler) -> Opnd {
return self.stack_push_mapping(asm, (MapToSelf, Type::Unknown));
}
/// Push a local variable on the stack
pub fn stack_push_local(&mut self, asm: &mut Assembler, local_idx: usize) -> Opnd {
if local_idx >= MAX_LOCAL_TYPES {
return self.stack_push(asm, Type::Unknown);
}
return self.stack_push_mapping(asm, (MapToLocal((local_idx as u8).into()), Type::Unknown));
}
// Pop N values off the stack
// Return a pointer to the stack top before the pop operation
pub fn stack_pop(&mut self, n: usize) -> Opnd {
fn stack_pop(&mut self, n: usize) -> Opnd {
assert!(n <= self.stack_size.into());
let top = self.stack_opnd(0);
@ -1978,6 +1926,66 @@ impl Context {
}
}
impl Assembler {
/// Push one new value on the temp stack with an explicit mapping
/// Return a pointer to the new stack top
pub fn stack_push_mapping(&mut self, (mapping, temp_type): (TempMapping, Type)) -> Opnd {
// If type propagation is disabled, store no types
if get_option!(no_type_prop) {
return self.stack_push_mapping((mapping, Type::Unknown));
}
let stack_size: usize = self.ctx.stack_size.into();
// Keep track of the type and mapping of the value
if stack_size < MAX_TEMP_TYPES {
self.ctx.temp_mapping[stack_size] = mapping;
self.ctx.temp_types[stack_size] = temp_type;
if let MapToLocal(idx) = mapping {
assert!((idx as usize) < MAX_LOCAL_TYPES);
}
}
// Allocate a register to the stack operand
assert_eq!(self.ctx.reg_temps, self.get_reg_temps());
if self.ctx.stack_size < MAX_REG_TEMPS {
self.alloc_temp_reg(self.ctx.stack_size);
}
self.ctx.stack_size += 1;
self.ctx.sp_offset += 1;
return self.ctx.stack_opnd(0);
}
/// Push one new value on the temp stack
/// Return a pointer to the new stack top
pub fn stack_push(&mut self, val_type: Type) -> Opnd {
return self.stack_push_mapping((MapToStack, val_type));
}
/// Push the self value on the stack
pub fn stack_push_self(&mut self) -> Opnd {
return self.stack_push_mapping((MapToSelf, Type::Unknown));
}
/// Push a local variable on the stack
pub fn stack_push_local(&mut self, local_idx: usize) -> Opnd {
if local_idx >= MAX_LOCAL_TYPES {
return self.stack_push(Type::Unknown);
}
return self.stack_push_mapping((MapToLocal((local_idx as u8).into()), Type::Unknown));
}
// Pop N values off the stack
// Return a pointer to the stack top before the pop operation
pub fn stack_pop(&mut self, n: usize) -> Opnd {
self.ctx.stack_pop(n)
}
}
impl BlockId {
/// Print Ruby source location for debugging
#[cfg(debug_assertions)]
@ -2556,6 +2564,7 @@ fn gen_branch_stub(
let stub_addr = ocb.get_write_ptr();
let mut asm = Assembler::new();
asm.ctx = ctx.clone();
asm.set_reg_temps(ctx.reg_temps);
asm.comment("branch stub hit");
@ -2566,7 +2575,7 @@ fn gen_branch_stub(
}
// Spill temps to the VM stack as well for jit.peek_at_stack()
asm.spill_temps(&mut ctx.clone());
asm.spill_temps();
// Set up the arguments unique to this stub for:
//
@ -2756,15 +2765,14 @@ pub fn gen_direct_jump(jit: &mut JITState, ctx: &Context, target0: BlockId, asm:
/// Create a stub to force the code up to this point to be executed
pub fn defer_compilation(
jit: &mut JITState,
cur_ctx: &Context,
asm: &mut Assembler,
ocb: &mut OutlinedCb,
) {
if cur_ctx.chain_depth != 0 {
if asm.ctx.chain_depth != 0 {
panic!("Double defer!");
}
let mut next_ctx = cur_ctx.clone();
let mut next_ctx = asm.ctx.clone();
if next_ctx.chain_depth == u8::MAX {
panic!("max block version chain depth reached!");
@ -3169,9 +3177,9 @@ mod tests {
assert_eq!(Context::default().diff(&Context::default()), TypeDiff::Compatible(0));
// Try pushing an operand and getting its type
let mut ctx = Context::default();
ctx.stack_push(&mut Assembler::new(), Type::Fixnum);
let top_type = ctx.get_opnd_type(StackOpnd(0));
let mut asm = Assembler::new();
asm.stack_push(Type::Fixnum);
let top_type = asm.ctx.get_opnd_type(StackOpnd(0));
assert!(top_type == Type::Fixnum);
// TODO: write more tests for Context type diff

View File

@ -1,7 +1,6 @@
#![allow(dead_code)] // Some functions for print debugging in here
use crate::backend::ir::*;
use crate::core::Context;
use crate::cruby::*;
use std::slice;
@ -142,7 +141,7 @@ macro_rules! c_callable {
}
pub(crate) use c_callable;
pub fn print_int(asm: &mut Assembler, ctx: &Context, opnd: Opnd) {
pub fn print_int(asm: &mut Assembler, opnd: Opnd) {
c_callable!{
fn print_int_fn(val: i64) {
println!("{}", val);
@ -165,11 +164,11 @@ pub fn print_int(asm: &mut Assembler, ctx: &Context, opnd: Opnd) {
};
asm.ccall(print_int_fn as *const u8, vec![argument]);
asm.cpop_all(ctx);
asm.cpop_all();
}
/// Generate code to print a pointer
pub fn print_ptr(asm: &mut Assembler, ctx: &Context, opnd: Opnd) {
pub fn print_ptr(asm: &mut Assembler, opnd: Opnd) {
c_callable!{
fn print_ptr_fn(ptr: *const u8) {
println!("{:p}", ptr);
@ -180,11 +179,11 @@ pub fn print_ptr(asm: &mut Assembler, ctx: &Context, opnd: Opnd) {
asm.cpush_all();
asm.ccall(print_ptr_fn as *const u8, vec![opnd]);
asm.cpop_all(ctx);
asm.cpop_all();
}
/// Generate code to print a value
pub fn print_value(asm: &mut Assembler, ctx: &Context, opnd: Opnd) {
pub fn print_value(asm: &mut Assembler, opnd: Opnd) {
c_callable!{
fn print_value_fn(val: VALUE) {
unsafe { rb_obj_info_dump(val) }
@ -195,11 +194,11 @@ pub fn print_value(asm: &mut Assembler, ctx: &Context, opnd: Opnd) {
asm.cpush_all();
asm.ccall(print_value_fn as *const u8, vec![opnd]);
asm.cpop_all(ctx);
asm.cpop_all();
}
/// Generate code to print constant string to stdout
pub fn print_str(asm: &mut Assembler, ctx: &Context, str: &str) {
pub fn print_str(asm: &mut Assembler, str: &str) {
c_callable!{
fn print_str_cfun(ptr: *const u8, num_bytes: usize) {
unsafe {
@ -223,7 +222,7 @@ pub fn print_str(asm: &mut Assembler, ctx: &Context, str: &str) {
let opnd = asm.lea_label(string_data);
asm.ccall(print_str_cfun as *const u8, vec![opnd, Opnd::UImm(str.len() as u64)]);
asm.cpop_all(ctx);
asm.cpop_all();
}
#[cfg(test)]
@ -263,7 +262,7 @@ mod tests {
let mut asm = Assembler::new();
let mut cb = CodeBlock::new_dummy(1024);
print_int(&mut asm, &Context::default(), Opnd::Imm(42));
print_int(&mut asm, Opnd::Imm(42));
asm.compile(&mut cb);
}
@ -272,7 +271,7 @@ mod tests {
let mut asm = Assembler::new();
let mut cb = CodeBlock::new_dummy(1024);
print_str(&mut asm, &Context::default(), "Hello, world!");
print_str(&mut asm, "Hello, world!");
asm.compile(&mut cb);
}
}