Rename RB_OBJ_SHAPE -> rb_obj_shape

As well as `RB_OBJ_SHAPE_ID` -> `rb_obj_shape_id`
and `RSHAPE` is now a simple alias for `rb_shape_lookup`.

I tried to turn all these into `static inline` but I'm having
trouble with `RUBY_EXTERN rb_shape_tree_t *rb_shape_tree_ptr;`
not being exposed as I'd expect.
This commit is contained in:
Jean Boussier 2025-05-09 08:58:07 +02:00
parent 0b81359b3f
commit ea77250847
Notes: git 2025-05-09 08:23:05 +00:00
14 changed files with 68 additions and 67 deletions

View File

@ -414,7 +414,7 @@ dump_object(VALUE obj, struct dump_config *dc)
dump_append(dc, obj_type(obj));
dump_append(dc, "\"");
size_t shape_id = RB_OBJ_SHAPE_ID(obj);
size_t shape_id = rb_obj_shape_id(obj);
dump_append(dc, ", \"shape_id\":");
dump_append_sizet(dc, shape_id);

6
gc.c
View File

@ -367,7 +367,7 @@ rb_gc_shutdown_call_finalizer_p(VALUE obj)
uint32_t
rb_gc_get_shape(VALUE obj)
{
return (uint32_t)RB_OBJ_SHAPE_ID(obj);
return (uint32_t)rb_obj_shape_id(obj);
}
void
@ -379,7 +379,7 @@ rb_gc_set_shape(VALUE obj, uint32_t shape_id)
uint32_t
rb_gc_rebuild_shape(VALUE obj, size_t heap_id)
{
shape_id_t orig_shape_id = RB_OBJ_SHAPE_ID(obj);
shape_id_t orig_shape_id = rb_obj_shape_id(obj);
if (rb_shape_id_too_complex_p(orig_shape_id)) {
return (uint32_t)orig_shape_id;
}
@ -1815,7 +1815,7 @@ static VALUE
object_id(VALUE obj)
{
VALUE id = Qfalse;
rb_shape_t *shape = RB_OBJ_SHAPE(obj);
rb_shape_t *shape = rb_obj_shape(obj);
unsigned int lock_lev;
// We could avoid locking if the object isn't shareable

View File

@ -713,18 +713,18 @@ has_ivars(VALUE obj, VALUE encname, VALUE *ivobj)
static void
w_ivar_each(VALUE obj, st_index_t num, struct dump_call_arg *arg)
{
shape_id_t shape_id = RB_OBJ_SHAPE_ID(arg->obj);
shape_id_t shape_id = rb_obj_shape_id(arg->obj);
struct w_ivar_arg ivarg = {arg, num};
if (!num) return;
rb_ivar_foreach(obj, w_obj_each, (st_data_t)&ivarg);
shape_id_t actual_shape_id = RB_OBJ_SHAPE_ID(arg->obj);
shape_id_t actual_shape_id = rb_obj_shape_id(arg->obj);
if (shape_id != actual_shape_id) {
// If the shape tree got _shorter_ then we probably removed an IV
// If the shape tree got longer, then we probably added an IV.
// The exception message might not be accurate when someone adds and
// removes the same number of IVs, but they will still get an exception
if (rb_shape_depth(shape_id) > rb_shape_depth(RB_OBJ_SHAPE_ID(arg->obj))) {
if (rb_shape_depth(shape_id) > rb_shape_depth(rb_obj_shape_id(arg->obj))) {
rb_raise(rb_eRuntimeError, "instance variable removed from %"PRIsVALUE" instance",
CLASS_OF(arg->obj));
}

View File

@ -132,7 +132,7 @@ rb_class_allocate_instance(VALUE klass)
T_OBJECT | ROBJECT_EMBED | (RGENGC_WB_PROTECTED_OBJECT ? FL_WB_PROTECTED : 0), size, 0);
VALUE obj = (VALUE)o;
RUBY_ASSERT(RB_OBJ_SHAPE(obj)->type == SHAPE_ROOT);
RUBY_ASSERT(rb_obj_shape(obj)->type == SHAPE_ROOT);
// Set the shape to the specific T_OBJECT shape.
ROBJECT_SET_SHAPE_ID(obj, (shape_id_t)(rb_gc_heap_id_for_size(size) + FIRST_T_OBJECT_SHAPE_ID));
@ -335,7 +335,7 @@ rb_obj_copy_ivar(VALUE dest, VALUE obj)
return;
}
rb_shape_t *src_shape = RB_OBJ_SHAPE(obj);
rb_shape_t *src_shape = rb_obj_shape(obj);
if (rb_shape_too_complex_p(src_shape)) {
// obj is TOO_COMPLEX so we can copy its iv_hash
@ -350,7 +350,7 @@ rb_obj_copy_ivar(VALUE dest, VALUE obj)
}
rb_shape_t *shape_to_set_on_dest = src_shape;
rb_shape_t *initial_shape = RB_OBJ_SHAPE(dest);
rb_shape_t *initial_shape = rb_obj_shape(dest);
if (initial_shape->heap_index != src_shape->heap_index || !rb_shape_canonical_p(src_shape)) {
RUBY_ASSERT(initial_shape->type == SHAPE_T_OBJECT);

36
shape.c
View File

@ -340,7 +340,7 @@ rb_shape_each_shape(each_shape_callback callback, void *data)
}
RUBY_FUNC_EXPORTED rb_shape_t *
RSHAPE(shape_id_t shape_id)
rb_shape_lookup(shape_id_t shape_id)
{
RUBY_ASSERT(shape_id != INVALID_SHAPE_ID);
@ -352,7 +352,7 @@ shape_id_t rb_generic_shape_id(VALUE obj);
#endif
RUBY_FUNC_EXPORTED shape_id_t
RB_OBJ_SHAPE_ID(VALUE obj)
rb_obj_shape_id(VALUE obj)
{
if (RB_SPECIAL_CONST_P(obj)) {
return SPECIAL_CONST_SHAPE_ID;
@ -388,12 +388,6 @@ rb_shape_depth(shape_id_t shape_id)
return depth;
}
rb_shape_t *
RB_OBJ_SHAPE(VALUE obj)
{
return RSHAPE(RB_OBJ_SHAPE_ID(obj));
}
static rb_shape_t *
shape_alloc(void)
{
@ -641,7 +635,7 @@ remove_shape_recursive(rb_shape_t *shape, ID id, rb_shape_t **removed_shape)
bool
rb_shape_transition_remove_ivar(VALUE obj, ID id, VALUE *removed)
{
rb_shape_t *shape = RB_OBJ_SHAPE(obj);
rb_shape_t *shape = rb_obj_shape(obj);
if (UNLIKELY(rb_shape_too_complex_p(shape))) {
return false;
@ -701,7 +695,7 @@ rb_shape_transition_frozen(VALUE obj)
{
RUBY_ASSERT(RB_OBJ_FROZEN(obj));
shape_id_t shape_id = RB_OBJ_SHAPE_ID(obj);
shape_id_t shape_id = rb_obj_shape_id(obj);
if (shape_id == ROOT_SHAPE_ID) {
return SPECIAL_CONST_SHAPE_ID;
}
@ -741,7 +735,7 @@ shape_transition_too_complex(rb_shape_t *original_shape)
shape_id_t
rb_shape_transition_complex(VALUE obj)
{
rb_shape_t *original_shape = RB_OBJ_SHAPE(obj);
rb_shape_t *original_shape = rb_obj_shape(obj);
return rb_shape_id(shape_transition_too_complex(original_shape));
}
@ -754,7 +748,7 @@ rb_shape_has_object_id(rb_shape_t *shape)
rb_shape_t *
rb_shape_object_id_shape(VALUE obj)
{
rb_shape_t* shape = RB_OBJ_SHAPE(obj);
rb_shape_t* shape = rb_obj_shape(obj);
RUBY_ASSERT(shape);
if (shape->flags & SHAPE_FL_HAS_OBJECT_ID) {
@ -844,13 +838,13 @@ shape_get_next(rb_shape_t *shape, VALUE obj, ID id, bool emit_warnings)
shape_id_t
rb_shape_transition_add_ivar(VALUE obj, ID id)
{
return rb_shape_id(shape_get_next(RB_OBJ_SHAPE(obj), obj, id, true));
return rb_shape_id(shape_get_next(rb_obj_shape(obj), obj, id, true));
}
shape_id_t
rb_shape_transition_add_ivar_no_warnings(VALUE obj, ID id)
{
return rb_shape_id(shape_get_next(RB_OBJ_SHAPE(obj), obj, id, false));
return rb_shape_id(shape_get_next(rb_obj_shape(obj), obj, id, false));
}
// Same as rb_shape_get_iv_index, but uses a provided valid shape id and index
@ -1098,7 +1092,7 @@ rb_shape_rebuild_shape(rb_shape_t *initial_shape, rb_shape_t *dest_shape)
RUBY_FUNC_EXPORTED bool
rb_shape_obj_too_complex_p(VALUE obj)
{
return rb_shape_too_complex_p(RB_OBJ_SHAPE(obj));
return rb_shape_too_complex_p(rb_obj_shape(obj));
}
bool
@ -1261,7 +1255,7 @@ rb_shape_parent(VALUE self)
static VALUE
rb_shape_debug_shape(VALUE self, VALUE obj)
{
return rb_shape_t_to_rb_cShape(RB_OBJ_SHAPE(obj));
return rb_shape_t_to_rb_cShape(rb_obj_shape(obj));
}
static VALUE
@ -1285,11 +1279,11 @@ rb_shape_exhaust(int argc, VALUE *argv, VALUE self)
return Qnil;
}
VALUE rb_obj_shape(rb_shape_t *shape);
static VALUE shape_to_h(rb_shape_t *shape);
static enum rb_id_table_iterator_result collect_keys_and_values(ID key, VALUE value, void *ref)
{
rb_hash_aset(*(VALUE *)ref, parse_key(key), rb_obj_shape((rb_shape_t *)value));
rb_hash_aset(*(VALUE *)ref, parse_key(key), shape_to_h((rb_shape_t *)value));
return ID_TABLE_CONTINUE;
}
@ -1306,8 +1300,8 @@ static VALUE edges(struct rb_id_table* edges)
return hash;
}
VALUE
rb_obj_shape(rb_shape_t *shape)
static VALUE
shape_to_h(rb_shape_t *shape)
{
VALUE rb_shape = rb_hash_new();
@ -1328,7 +1322,7 @@ rb_obj_shape(rb_shape_t *shape)
static VALUE
shape_transition_tree(VALUE self)
{
return rb_obj_shape(rb_shape_get_root_shape());
return shape_to_h(rb_shape_get_root_shape());
}
static VALUE

17
shape.h
View File

@ -149,10 +149,12 @@ RCLASS_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
set_shape_id_in_flags(obj, shape_id);
}
#define RSHAPE rb_shape_lookup
int32_t rb_shape_id_offset(void);
RUBY_FUNC_EXPORTED rb_shape_t *RSHAPE(shape_id_t shape_id);
RUBY_FUNC_EXPORTED shape_id_t RB_OBJ_SHAPE_ID(VALUE obj);
RUBY_FUNC_EXPORTED rb_shape_t *rb_shape_lookup(shape_id_t shape_id);
RUBY_FUNC_EXPORTED shape_id_t rb_obj_shape_id(VALUE obj);
shape_id_t rb_shape_get_next_iv_shape(shape_id_t shape_id, ID id);
bool rb_shape_get_iv_index(rb_shape_t *shape, ID id, attr_index_t *value);
bool rb_shape_get_iv_index_with_hint(shape_id_t shape_id, ID id, attr_index_t *value, shape_id_t *shape_id_hint);
@ -161,7 +163,6 @@ bool rb_shape_too_complex_p(rb_shape_t *shape);
bool rb_shape_id_too_complex_p(shape_id_t shape_id);
void rb_shape_set_shape(VALUE obj, rb_shape_t *shape);
rb_shape_t *RB_OBJ_SHAPE(VALUE obj);
shape_id_t rb_shape_transition_frozen(VALUE obj);
shape_id_t rb_shape_transition_complex(VALUE obj);
bool rb_shape_transition_remove_ivar(VALUE obj, ID id, VALUE *removed);
@ -174,6 +175,12 @@ void rb_shape_free_all(void);
rb_shape_t *rb_shape_rebuild_shape(rb_shape_t *initial_shape, rb_shape_t *dest_shape);
static inline rb_shape_t *
rb_obj_shape(VALUE obj)
{
return RSHAPE(rb_obj_shape_id(obj));
}
static inline bool
rb_shape_canonical_p(rb_shape_t *shape)
{
@ -224,7 +231,7 @@ ROBJECT_FIELDS_COUNT(VALUE obj)
static inline uint32_t
RBASIC_FIELDS_COUNT(VALUE obj)
{
return RSHAPE(RB_OBJ_SHAPE_ID(obj))->next_field_index;
return RSHAPE(rb_obj_shape_id(obj))->next_field_index;
}
shape_id_t rb_shape_traverse_from_new_root(shape_id_t initial_shape_id, shape_id_t orig_shape_id);
@ -234,7 +241,7 @@ bool rb_shape_set_shape_id(VALUE obj, shape_id_t shape_id);
static inline bool
rb_shape_obj_has_id(VALUE obj)
{
return rb_shape_has_object_id(RB_OBJ_SHAPE(obj));
return rb_shape_has_object_id(rb_obj_shape(obj));
}
// For ext/objspace

View File

@ -1570,8 +1570,8 @@ rb_obj_init_too_complex(VALUE obj, st_table *table)
{
// This method is meant to be called on newly allocated object.
RUBY_ASSERT(!rb_shape_obj_too_complex_p(obj));
RUBY_ASSERT(rb_shape_canonical_p(RB_OBJ_SHAPE(obj)));
RUBY_ASSERT(RB_OBJ_SHAPE(obj)->next_field_index == 0);
RUBY_ASSERT(rb_shape_canonical_p(rb_obj_shape(obj)));
RUBY_ASSERT(rb_obj_shape(obj)->next_field_index == 0);
obj_transition_too_complex(obj, table);
}
@ -1584,7 +1584,7 @@ rb_evict_fields_to_hash(VALUE obj)
RUBY_ASSERT(!rb_shape_obj_too_complex_p(obj));
rb_shape_t *shape = RB_OBJ_SHAPE(obj);
rb_shape_t *shape = rb_obj_shape(obj);
st_table *table = st_init_numtable_with_size(shape->next_field_index);
rb_obj_copy_fields_to_hash_table(obj, table);
obj_transition_too_complex(obj, table);
@ -1624,7 +1624,7 @@ general_ivar_set(VALUE obj, ID id, VALUE val, void *data,
.existing = true
};
rb_shape_t *current_shape = RB_OBJ_SHAPE(obj);
rb_shape_t *current_shape = rb_obj_shape(obj);
if (UNLIKELY(rb_shape_too_complex_p(current_shape))) {
goto too_complex;
@ -1681,7 +1681,7 @@ general_field_set(VALUE obj, rb_shape_t *target_shape, VALUE val, void *data,
void (*transition_too_complex_func)(VALUE, void *),
st_table *(*too_complex_table_func)(VALUE, void *))
{
rb_shape_t *current_shape = RB_OBJ_SHAPE(obj);
rb_shape_t *current_shape = rb_obj_shape(obj);
if (UNLIKELY(rb_shape_too_complex_p(target_shape))) {
if (UNLIKELY(!rb_shape_too_complex_p(current_shape))) {
@ -1964,7 +1964,7 @@ rb_vm_set_ivar_id(VALUE obj, ID id, VALUE val)
bool
rb_shape_set_shape_id(VALUE obj, shape_id_t shape_id)
{
if (RB_OBJ_SHAPE_ID(obj) == shape_id) {
if (rb_obj_shape_id(obj) == shape_id) {
return false;
}
@ -2119,7 +2119,7 @@ rb_ivar_defined(VALUE obj, ID id)
return Qtrue;
}
else {
return RBOOL(rb_shape_get_iv_index(RB_OBJ_SHAPE(obj), id, &index));
return RBOOL(rb_shape_get_iv_index(rb_obj_shape(obj), id, &index));
}
}
@ -2206,7 +2206,7 @@ obj_fields_each(VALUE obj, rb_ivar_foreach_callback_func *func, st_data_t arg, b
.ivar_only = ivar_only,
};
rb_shape_t *shape = RB_OBJ_SHAPE(obj);
rb_shape_t *shape = rb_obj_shape(obj);
if (rb_shape_too_complex_p(shape)) {
rb_st_foreach(ROBJECT_FIELDS_HASH(obj), each_hash_iv, (st_data_t)&itr_data);
}
@ -2218,7 +2218,7 @@ obj_fields_each(VALUE obj, rb_ivar_foreach_callback_func *func, st_data_t arg, b
static void
gen_fields_each(VALUE obj, rb_ivar_foreach_callback_func *func, st_data_t arg, bool ivar_only)
{
rb_shape_t *shape = RB_OBJ_SHAPE(obj);
rb_shape_t *shape = rb_obj_shape(obj);
struct gen_fields_tbl *fields_tbl;
if (!rb_gen_fields_tbl_get(obj, 0, &fields_tbl)) return;
@ -2243,7 +2243,7 @@ class_fields_each(VALUE obj, rb_ivar_foreach_callback_func *func, st_data_t arg,
{
RUBY_ASSERT(RB_TYPE_P(obj, T_CLASS) || RB_TYPE_P(obj, T_MODULE));
rb_shape_t *shape = RB_OBJ_SHAPE(obj);
rb_shape_t *shape = rb_obj_shape(obj);
struct iv_itr_data itr_data = {
.obj = obj,
.arg = arg,
@ -2276,7 +2276,7 @@ rb_copy_generic_ivar(VALUE dest, VALUE obj)
goto clear;
}
rb_shape_t *src_shape = RB_OBJ_SHAPE(obj);
rb_shape_t *src_shape = rb_obj_shape(obj);
if (rb_gen_fields_tbl_get(obj, 0, &obj_fields_tbl)) {
if (gen_fields_tbl_count(obj, obj_fields_tbl) == 0)
@ -2297,7 +2297,7 @@ rb_copy_generic_ivar(VALUE dest, VALUE obj)
}
rb_shape_t *shape_to_set_on_dest = src_shape;
rb_shape_t *initial_shape = RB_OBJ_SHAPE(dest);
rb_shape_t *initial_shape = rb_obj_shape(dest);
if (!rb_shape_canonical_p(src_shape)) {
RUBY_ASSERT(initial_shape->type == SHAPE_ROOT);
@ -4586,7 +4586,7 @@ rb_fields_tbl_copy(VALUE dst, VALUE src)
RUBY_ASSERT(rb_type(dst) == rb_type(src));
RUBY_ASSERT(RB_TYPE_P(dst, T_CLASS) || RB_TYPE_P(dst, T_MODULE));
RUBY_ASSERT(RB_OBJ_SHAPE(dst)->type == SHAPE_ROOT);
RUBY_ASSERT(rb_obj_shape(dst)->type == SHAPE_ROOT);
RUBY_ASSERT(!RCLASS_FIELDS(dst));
rb_ivar_foreach(src, tbl_copy_i, dst);

View File

@ -94,8 +94,8 @@ fn main() {
.allowlist_function("rb_bug")
// From shape.h
.allowlist_function("RB_OBJ_SHAPE_ID")
.allowlist_function("RSHAPE")
.allowlist_function("rb_obj_shape_id")
.allowlist_function("rb_shape_lookup")
.allowlist_function("rb_shape_id_offset")
.allowlist_function("rb_shape_get_iv_index")
.allowlist_function("rb_shape_transition_add_ivar_no_warnings")

View File

@ -2894,7 +2894,7 @@ fn gen_get_ivar(
let ivar_index = unsafe {
let shape_id = comptime_receiver.shape_id_of();
let shape = RSHAPE(shape_id);
let shape = rb_shape_lookup(shape_id);
let mut ivar_index: u32 = 0;
if rb_shape_get_iv_index(shape, ivar_name, &mut ivar_index) {
Some(ivar_index as usize)
@ -2909,7 +2909,7 @@ fn gen_get_ivar(
// Compile time self is embedded and the ivar index lands within the object
let embed_test_result = unsafe { FL_TEST_RAW(comptime_receiver, VALUE(ROBJECT_EMBED.as_usize())) != VALUE(0) };
let expected_shape = unsafe { RB_OBJ_SHAPE_ID(comptime_receiver) };
let expected_shape = unsafe { rb_obj_shape_id(comptime_receiver) };
let shape_id_offset = unsafe { rb_shape_id_offset() };
let shape_opnd = Opnd::mem(SHAPE_ID_NUM_BITS as u8, recv, shape_id_offset);
@ -3097,7 +3097,7 @@ fn gen_set_ivar(
let shape_too_complex = comptime_receiver.shape_too_complex();
let ivar_index = if !shape_too_complex {
let shape_id = comptime_receiver.shape_id_of();
let shape = unsafe { RSHAPE(shape_id) };
let shape = unsafe { rb_shape_lookup(shape_id) };
let mut ivar_index: u32 = 0;
if unsafe { rb_shape_get_iv_index(shape, ivar_name, &mut ivar_index) } {
Some(ivar_index as usize)
@ -3113,7 +3113,7 @@ fn gen_set_ivar(
let new_shape = if !shape_too_complex && receiver_t_object && ivar_index.is_none() {
let current_shape = comptime_receiver.shape_of();
let next_shape_id = unsafe { rb_shape_transition_add_ivar_no_warnings(comptime_receiver, ivar_name) };
let next_shape = unsafe { RSHAPE(next_shape_id) };
let next_shape = unsafe { rb_shape_lookup(next_shape_id) };
// If the VM ran out of shapes, or this class generated too many leaf,
// it may be de-optimized into OBJ_TOO_COMPLEX_SHAPE (hash-table).
@ -3187,7 +3187,7 @@ fn gen_set_ivar(
// Upgrade type
guard_object_is_heap(asm, recv, recv_opnd, Counter::setivar_not_heap);
let expected_shape = unsafe { RB_OBJ_SHAPE_ID(comptime_receiver) };
let expected_shape = unsafe { rb_obj_shape_id(comptime_receiver) };
let shape_id_offset = unsafe { rb_shape_id_offset() };
let shape_opnd = Opnd::mem(SHAPE_ID_NUM_BITS as u8, recv, shape_id_offset);
@ -3387,7 +3387,7 @@ fn gen_definedivar(
let shape_id = comptime_receiver.shape_id_of();
let ivar_exists = unsafe {
let shape = RSHAPE(shape_id);
let shape = rb_shape_lookup(shape_id);
let mut ivar_index: u32 = 0;
rb_shape_get_iv_index(shape, ivar_name, &mut ivar_index)
};

View File

@ -445,12 +445,12 @@ impl VALUE {
}
pub fn shape_id_of(self) -> u32 {
unsafe { RB_OBJ_SHAPE_ID(self) }
unsafe { rb_obj_shape_id(self) }
}
pub fn shape_of(self) -> *mut rb_shape {
unsafe {
let shape = RSHAPE(self.shape_id_of());
let shape = rb_shape_lookup(self.shape_id_of());
if shape.is_null() {
panic!("Shape should not be null");

View File

@ -1088,8 +1088,8 @@ extern "C" {
pub fn rb_obj_info(obj: VALUE) -> *const ::std::os::raw::c_char;
pub fn rb_ec_stack_check(ec: *mut rb_execution_context_struct) -> ::std::os::raw::c_int;
pub fn rb_shape_id_offset() -> i32;
pub fn RSHAPE(shape_id: shape_id_t) -> *mut rb_shape_t;
pub fn RB_OBJ_SHAPE_ID(obj: VALUE) -> shape_id_t;
pub fn rb_shape_lookup(shape_id: shape_id_t) -> *mut rb_shape_t;
pub fn rb_obj_shape_id(obj: VALUE) -> shape_id_t;
pub fn rb_shape_get_iv_index(shape: *mut rb_shape_t, id: ID, value: *mut attr_index_t) -> bool;
pub fn rb_shape_obj_too_complex_p(obj: VALUE) -> bool;
pub fn rb_shape_too_complex_p(shape: *mut rb_shape_t) -> bool;

View File

@ -107,8 +107,8 @@ fn main() {
.allowlist_function("rb_bug")
// From shape.h
.allowlist_function("RB_OBJ_SHAPE_ID")
.allowlist_function("RSHAPE")
.allowlist_function("rb_obj_shape_id")
.allowlist_function("rb_shape_lookup")
.allowlist_function("rb_shape_id_offset")
.allowlist_function("rb_shape_get_iv_index")
.allowlist_function("rb_shape_transition_add_ivar_no_warnings")

View File

@ -482,12 +482,12 @@ impl VALUE {
}
pub fn shape_id_of(self) -> u32 {
unsafe { RB_OBJ_SHAPE_ID(self) }
unsafe { rb_obj_shape_id(self) }
}
pub fn shape_of(self) -> *mut rb_shape {
unsafe {
let shape = RSHAPE(self.shape_id_of());
let shape = rb_shape_lookup(self.shape_id_of());
if shape.is_null() {
panic!("Shape should not be null");

View File

@ -868,8 +868,8 @@ unsafe extern "C" {
pub fn rb_obj_info(obj: VALUE) -> *const ::std::os::raw::c_char;
pub fn rb_ec_stack_check(ec: *mut rb_execution_context_struct) -> ::std::os::raw::c_int;
pub fn rb_shape_id_offset() -> i32;
pub fn RSHAPE(shape_id: shape_id_t) -> *mut rb_shape_t;
pub fn RB_OBJ_SHAPE_ID(obj: VALUE) -> shape_id_t;
pub fn rb_shape_lookup(shape_id: shape_id_t) -> *mut rb_shape_t;
pub fn rb_obj_shape_id(obj: VALUE) -> shape_id_t;
pub fn rb_shape_get_iv_index(shape: *mut rb_shape_t, id: ID, value: *mut attr_index_t) -> bool;
pub fn rb_shape_obj_too_complex_p(obj: VALUE) -> bool;
pub fn rb_shape_transition_add_ivar_no_warnings(obj: VALUE, id: ID) -> shape_id_t;