Add shape_id to RBasic under 32 bit

This makes `RBobject` `4B` larger on 32 bit systems
but simplifies the implementation a lot.

[Feature #21353]

Co-authored-by: Jean Boussier <byroot@ruby-lang.org>
This commit is contained in:
John Hawthorn 2025-05-14 11:06:46 -07:00 committed by Jean Boussier
parent 2295384a5a
commit f483befd90
Notes: git 2025-05-26 08:32:06 +00:00
16 changed files with 61 additions and 210 deletions

12
class.c
View File

@ -42,10 +42,6 @@
* 2: RCLASS_PRIME_CLASSEXT_PRIME_WRITABLE
* This class's prime classext is the only classext and writable from any namespaces.
* If unset, the prime classext is writable only from the root namespace.
* if !SHAPE_IN_BASIC_FLAGS
* 4-19: SHAPE_FLAG_MASK
* Shape ID for the class.
* endif
*/
/* Flags of T_ICLASS
@ -53,10 +49,6 @@
* 2: RCLASS_PRIME_CLASSEXT_PRIME_WRITABLE
* This module's prime classext is the only classext and writable from any namespaces.
* If unset, the prime classext is writable only from the root namespace.
* if !SHAPE_IN_BASIC_FLAGS
* 4-19: SHAPE_FLAG_MASK
* Shape ID. This is set but not used.
* endif
*/
/* Flags of T_MODULE
@ -71,10 +63,6 @@
* If unset, the prime classext is writable only from the root namespace.
* 3: RMODULE_IS_REFINEMENT
* Module is used for refinements.
* if !SHAPE_IN_BASIC_FLAGS
* 4-19: SHAPE_FLAG_MASK
* Shape ID for the module.
* endif
*/
#define METACLASS_OF(k) RBASIC(k)->klass

1
gc.c
View File

@ -1965,6 +1965,7 @@ build_id2ref_i(VALUE obj, void *data)
}
break;
case T_IMEMO:
case T_NONE:
break;
default:
if (rb_shape_obj_has_id(obj)) {

View File

@ -28,6 +28,7 @@
#include "ruby/util.h"
#include "ruby/vm.h"
#include "ruby/internal/encoding/string.h"
#include "internal/object.h"
#include "ccan/list/list.h"
#include "darray.h"
#include "gc/gc.h"
@ -2123,6 +2124,9 @@ newobj_init(VALUE klass, VALUE flags, int wb_protected, rb_objspace_t *objspace,
GC_ASSERT((flags & FL_WB_PROTECTED) == 0);
RBASIC(obj)->flags = flags;
*((VALUE *)&RBASIC(obj)->klass) = klass;
#if RBASIC_SHAPE_ID_FIELD
RBASIC(obj)->shape_id = 0;
#endif
int t = flags & RUBY_T_MASK;
if (t == T_CLASS || t == T_MODULE || t == T_ICLASS) {
@ -2968,7 +2972,7 @@ rb_gc_impl_shutdown_free_objects(void *objspace_ptr)
if (RB_BUILTIN_TYPE(vp) != T_NONE) {
rb_gc_obj_free_vm_weak_references(vp);
if (rb_gc_obj_free(objspace, vp)) {
RBASIC(vp)->flags = 0;
RBASIC_RESET_FLAGS(vp);
}
}
}
@ -3042,7 +3046,7 @@ rb_gc_impl_shutdown_call_finalizer(void *objspace_ptr)
if (rb_gc_shutdown_call_finalizer_p(vp)) {
rb_gc_obj_free_vm_weak_references(vp);
if (rb_gc_obj_free(objspace, vp)) {
RBASIC(vp)->flags = 0;
RBASIC_RESET_FLAGS(vp);
}
}
}
@ -9361,6 +9365,7 @@ rb_gc_impl_init(void)
VALUE gc_constants = rb_hash_new();
rb_hash_aset(gc_constants, ID2SYM(rb_intern("DEBUG")), GC_DEBUG ? Qtrue : Qfalse);
rb_hash_aset(gc_constants, ID2SYM(rb_intern("BASE_SLOT_SIZE")), SIZET2NUM(BASE_SLOT_SIZE - RVALUE_OVERHEAD));
rb_hash_aset(gc_constants, ID2SYM(rb_intern("RBASIC_SIZE")), SIZET2NUM(sizeof(struct RBasic)));
rb_hash_aset(gc_constants, ID2SYM(rb_intern("RVALUE_OVERHEAD")), SIZET2NUM(RVALUE_OVERHEAD));
rb_hash_aset(gc_constants, ID2SYM(rb_intern("HEAP_PAGE_OBJ_LIMIT")), SIZET2NUM(HEAP_PAGE_OBJ_LIMIT));
rb_hash_aset(gc_constants, ID2SYM(rb_intern("HEAP_PAGE_BITMAP_SIZE")), SIZET2NUM(HEAP_PAGE_BITMAP_SIZE));

View File

@ -4,6 +4,7 @@
#include "ruby/assert.h"
#include "ruby/atomic.h"
#include "ruby/debug.h"
#include "internal/object.h"
#include "gc/gc.h"
#include "gc/gc_impl.h"
@ -453,6 +454,7 @@ rb_gc_impl_init(void)
{
VALUE gc_constants = rb_hash_new();
rb_hash_aset(gc_constants, ID2SYM(rb_intern("BASE_SLOT_SIZE")), SIZET2NUM(sizeof(VALUE) * 5));
rb_hash_aset(gc_constants, ID2SYM(rb_intern("RBASIC_SIZE")), SIZET2NUM(sizeof(struct RBasic)));
rb_hash_aset(gc_constants, ID2SYM(rb_intern("RVALUE_OVERHEAD")), INT2NUM(0));
rb_hash_aset(gc_constants, ID2SYM(rb_intern("RVARGC_MAX_ALLOCATE_SIZE")), LONG2FIX(640));
// Pretend we have 5 size pools
@ -1019,7 +1021,7 @@ rb_gc_impl_shutdown_call_finalizer(void *objspace_ptr)
if (rb_gc_shutdown_call_finalizer_p(obj)) {
rb_gc_obj_free(objspace_ptr, obj);
RBASIC(obj)->flags = 0;
RBASIC_RESET_FLAGS(obj);
}
}
mmtk_free_raw_vec_of_obj_ref(registered_candidates);

View File

@ -55,6 +55,12 @@ enum ruby_rvalue_flags {
RVALUE_EMBED_LEN_MAX = RBIMPL_RVALUE_EMBED_LEN_MAX
};
#if (SIZEOF_VALUE < SIZEOF_UINT64_T)
#define RBASIC_SHAPE_ID_FIELD 1
#else
#define RBASIC_SHAPE_ID_FIELD 0
#endif
/**
* Ruby object's base components. All Ruby objects have them in common.
*/
@ -85,6 +91,10 @@ RBasic {
*/
const VALUE klass;
#if RBASIC_SHAPE_ID_FIELD
VALUE shape_id;
#endif
#ifdef __cplusplus
public:
RBIMPL_ATTR_CONSTEXPR(CXX11)
@ -100,6 +110,9 @@ RBasic {
RBasic() :
flags(RBIMPL_VALUE_NULL),
klass(RBIMPL_VALUE_NULL)
#if RBASIC_SHAPE_ID_FIELD
, shape_id(RBIMPL_VALUE_NULL)
#endif
{
}
#endif

View File

@ -577,7 +577,7 @@ RCLASS_FIELDS_COUNT(VALUE obj)
return count;
}
else {
return RSHAPE(RCLASS_SHAPE_ID(obj))->next_field_index;
return RSHAPE(RBASIC_SHAPE_ID(obj))->next_field_index;
}
}

View File

@ -60,4 +60,13 @@ RBASIC_SET_CLASS(VALUE obj, VALUE klass)
RBASIC_SET_CLASS_RAW(obj, klass);
RB_OBJ_WRITTEN(obj, oldv, klass);
}
static inline void
RBASIC_RESET_FLAGS(VALUE obj)
{
RBASIC(obj)->flags = 0;
#if RBASIC_SHAPE_ID_FIELD
RBASIC(obj)->shape_id = 0;
#endif
}
#endif /* INTERNAL_OBJECT_H */

View File

@ -50,10 +50,6 @@
* The object has its instance variables embedded (the array of
* instance variables directly follow the object, rather than being
* on a separately allocated buffer).
* if !SHAPE_IN_BASIC_FLAGS
* 4-19: SHAPE_FLAG_MASK
* Shape ID for the object.
* endif
*/
/*!
@ -134,8 +130,7 @@ rb_class_allocate_instance(VALUE klass)
RUBY_ASSERT(rb_obj_shape(obj)->type == SHAPE_ROOT);
// Set the shape to the specific T_OBJECT shape.
ROBJECT_SET_SHAPE_ID(obj, rb_shape_root(rb_gc_heap_id_for_size(size)));
RBASIC_SET_SHAPE_ID(obj, rb_shape_root(rb_gc_heap_id_for_size(size)));
#if RUBY_DEBUG
RUBY_ASSERT(!rb_shape_obj_too_complex_p(obj));

View File

@ -3667,10 +3667,10 @@ move_leave(VALUE obj, struct obj_traverse_replace_data *data)
rb_replace_generic_ivar(data->replacement, obj);
}
VALUE flags = T_OBJECT | FL_FREEZE | (RBASIC(obj)->flags & FL_PROMOTED);
VALUE flags = T_OBJECT | FL_FREEZE | ROBJECT_EMBED | (RBASIC(obj)->flags & FL_PROMOTED);
// Avoid mutations using bind_call, etc.
MEMZERO((char *)obj + sizeof(struct RBasic), char, size - sizeof(struct RBasic));
MEMZERO((char *)obj, char, sizeof(struct RBasic));
RBASIC(obj)->flags = flags;
RBASIC_SET_CLASS_RAW(obj, rb_cRactorMovedObject);
return traverse_cont;

17
shape.c
View File

@ -347,10 +347,6 @@ rb_shape_lookup(shape_id_t shape_id)
return &GET_SHAPE_TREE()->shape_list[shape_id];
}
#if !SHAPE_IN_BASIC_FLAGS
shape_id_t rb_generic_shape_id(VALUE obj);
#endif
RUBY_FUNC_EXPORTED shape_id_t
rb_obj_shape_id(VALUE obj)
{
@ -358,20 +354,7 @@ rb_obj_shape_id(VALUE obj)
return SPECIAL_CONST_SHAPE_ID;
}
#if SHAPE_IN_BASIC_FLAGS
return RBASIC_SHAPE_ID(obj);
#else
switch (BUILTIN_TYPE(obj)) {
case T_OBJECT:
return ROBJECT_SHAPE_ID(obj);
break;
case T_CLASS:
case T_MODULE:
return RCLASS_SHAPE_ID(obj);
default:
return rb_generic_shape_id(obj);
}
#endif
}
size_t

75
shape.h
View File

@ -6,7 +6,6 @@
#if (SIZEOF_UINT64_T <= SIZEOF_VALUE)
#define SIZEOF_SHAPE_T 4
#define SHAPE_IN_BASIC_FLAGS 1
typedef uint32_t attr_index_t;
typedef uint32_t shape_id_t;
# define SHAPE_ID_NUM_BITS 32
@ -14,7 +13,6 @@ typedef uint32_t shape_id_t;
#else
#define SIZEOF_SHAPE_T 2
#define SHAPE_IN_BASIC_FLAGS 0
typedef uint16_t attr_index_t;
typedef uint16_t shape_id_t;
# define SHAPE_ID_NUM_BITS 16
@ -90,66 +88,31 @@ rb_current_shape_tree(void)
}
#define GET_SHAPE_TREE() rb_current_shape_tree()
static inline shape_id_t
get_shape_id_from_flags(VALUE obj)
{
RUBY_ASSERT(!RB_SPECIAL_CONST_P(obj));
RUBY_ASSERT(!RB_TYPE_P(obj, T_IMEMO));
return (shape_id_t)((RBASIC(obj)->flags) >> SHAPE_FLAG_SHIFT);
}
static inline void
set_shape_id_in_flags(VALUE obj, shape_id_t shape_id)
{
RUBY_ASSERT(!RB_SPECIAL_CONST_P(obj));
RUBY_ASSERT(!RB_TYPE_P(obj, T_IMEMO));
// Ractors are occupying the upper 32 bits of flags, but only in debug mode
// Object shapes are occupying top bits
RBASIC(obj)->flags &= SHAPE_FLAG_MASK;
RBASIC(obj)->flags |= ((VALUE)(shape_id) << SHAPE_FLAG_SHIFT);
}
#if SHAPE_IN_BASIC_FLAGS
static inline shape_id_t
RBASIC_SHAPE_ID(VALUE obj)
{
return get_shape_id_from_flags(obj);
RUBY_ASSERT(!RB_SPECIAL_CONST_P(obj));
RUBY_ASSERT(!RB_TYPE_P(obj, T_IMEMO));
#if RBASIC_SHAPE_ID_FIELD
return (shape_id_t)((RBASIC(obj)->shape_id));
#else
return (shape_id_t)((RBASIC(obj)->flags) >> SHAPE_FLAG_SHIFT);
#endif
}
static inline void
RBASIC_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
{
set_shape_id_in_flags(obj, shape_id);
}
RUBY_ASSERT(!RB_SPECIAL_CONST_P(obj));
RUBY_ASSERT(!RB_TYPE_P(obj, T_IMEMO));
#if RBASIC_SHAPE_ID_FIELD
RBASIC(obj)->shape_id = (VALUE)shape_id;
#else
// Ractors are occupying the upper 32 bits of flags, but only in debug mode
// Object shapes are occupying top bits
RBASIC(obj)->flags &= SHAPE_FLAG_MASK;
RBASIC(obj)->flags |= ((VALUE)(shape_id) << SHAPE_FLAG_SHIFT);
#endif
static inline shape_id_t
ROBJECT_SHAPE_ID(VALUE obj)
{
RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
return get_shape_id_from_flags(obj);
}
static inline void
ROBJECT_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
{
RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
set_shape_id_in_flags(obj, shape_id);
}
static inline shape_id_t
RCLASS_SHAPE_ID(VALUE obj)
{
RUBY_ASSERT(RB_TYPE_P(obj, T_CLASS) || RB_TYPE_P(obj, T_MODULE));
return get_shape_id_from_flags(obj);
}
static inline void
RCLASS_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
{
RUBY_ASSERT(RB_TYPE_P(obj, T_CLASS) || RB_TYPE_P(obj, T_MODULE));
set_shape_id_in_flags(obj, shape_id);
}
#define RSHAPE rb_shape_lookup
@ -203,7 +166,7 @@ ROBJECT_FIELDS_CAPACITY(VALUE obj)
// Asking for capacity doesn't make sense when the object is using
// a hash table for storing instance variables
RUBY_ASSERT(!rb_shape_obj_too_complex_p(obj));
return RSHAPE(ROBJECT_SHAPE_ID(obj))->capacity;
return RSHAPE(RBASIC_SHAPE_ID(obj))->capacity;
}
static inline st_table *
@ -222,8 +185,6 @@ ROBJECT_SET_FIELDS_HASH(VALUE obj, const st_table *tbl)
ROBJECT(obj)->as.heap.fields = (VALUE *)tbl;
}
size_t rb_id_table_size(const struct rb_id_table *tbl);
static inline uint32_t
ROBJECT_FIELDS_COUNT(VALUE obj)
{
@ -233,7 +194,7 @@ ROBJECT_FIELDS_COUNT(VALUE obj)
else {
RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
RUBY_ASSERT(!rb_shape_obj_too_complex_p(obj));
return RSHAPE(ROBJECT_SHAPE_ID(obj))->next_field_index;
return RSHAPE(RBASIC_SHAPE_ID(obj))->next_field_index;
}
}

View File

@ -960,6 +960,8 @@ setup_fake_str(struct RString *fake_str, const char *name, long len, int encidx)
{
fake_str->basic.flags = T_STRING|RSTRING_NOEMBED|STR_NOFREE|STR_FAKESTR;
rb_shape_set_shape_id((VALUE)fake_str, 0);
if (!name) {
RUBY_ASSERT_ALWAYS(len == 0);
name = "";

View File

@ -66,7 +66,7 @@ class Test_StringCapacity < Test::Unit::TestCase
end
def embed_header_size
3 * RbConfig::SIZEOF['void*']
GC::INTERNAL_CONSTANTS[:RBASIC_SIZE] + RbConfig::SIZEOF['void*']
end
def max_embed_len

View File

@ -1296,28 +1296,6 @@ rb_generic_ivar_memsize(VALUE obj)
return 0;
}
#if !SHAPE_IN_BASIC_FLAGS
shape_id_t
rb_generic_shape_id(VALUE obj)
{
struct gen_fields_tbl *fields_tbl = 0;
shape_id_t shape_id = 0;
RB_VM_LOCKING() {
st_table* global_iv_table = generic_fields_tbl(obj, 0, false);
if (global_iv_table && st_lookup(global_iv_table, obj, (st_data_t *)&fields_tbl)) {
shape_id = fields_tbl->shape_id;
}
else if (OBJ_FROZEN(obj)) {
shape_id = SPECIAL_CONST_SHAPE_ID;
}
}
return shape_id;
}
#endif
static size_t
gen_fields_tbl_count(VALUE obj, const struct gen_fields_tbl *fields_tbl)
{
@ -1400,9 +1378,7 @@ rb_ivar_lookup(VALUE obj, ID id, VALUE undef)
VALUE * ivar_list;
rb_shape_t * shape;
#if SHAPE_IN_BASIC_FLAGS
shape_id = RBASIC_SHAPE_ID(obj);
#endif
switch (BUILTIN_TYPE(obj)) {
case T_CLASS:
@ -1412,10 +1388,6 @@ rb_ivar_lookup(VALUE obj, ID id, VALUE undef)
VALUE val;
RB_VM_LOCKING() {
#if !SHAPE_IN_BASIC_FLAGS
shape_id = RCLASS_SHAPE_ID(obj);
#endif
if (rb_shape_id_too_complex_p(shape_id)) {
st_table * iv_table = RCLASS_FIELDS_HASH(obj);
if (rb_st_lookup(iv_table, (st_data_t)id, (st_data_t *)&val)) {
@ -1453,9 +1425,6 @@ rb_ivar_lookup(VALUE obj, ID id, VALUE undef)
}
case T_OBJECT:
{
#if !SHAPE_IN_BASIC_FLAGS
shape_id = ROBJECT_SHAPE_ID(obj);
#endif
if (rb_shape_id_too_complex_p(shape_id)) {
st_table * iv_table = ROBJECT_FIELDS_HASH(obj);
VALUE val;
@ -1485,10 +1454,6 @@ rb_ivar_lookup(VALUE obj, ID id, VALUE undef)
return undef;
}
}
#if !SHAPE_IN_BASIC_FLAGS
shape_id = fields_tbl->shape_id;
#endif
ivar_list = fields_tbl->as.shape.fields;
}
else {
@ -1677,11 +1642,7 @@ obj_transition_too_complex(VALUE obj, st_table *table)
* and hold the table because the xmalloc could trigger a GC
* compaction. We want the table to be updated rather than
* the original fields. */
#if SHAPE_IN_BASIC_FLAGS
rb_shape_set_shape_id(obj, shape_id);
#else
old_fields_tbl->shape_id = shape_id;
#endif
old_fields_tbl->as.complex.table = table;
old_fields = (VALUE *)old_fields_tbl;
}
@ -1690,11 +1651,7 @@ obj_transition_too_complex(VALUE obj, st_table *table)
fields_tbl->as.complex.table = table;
st_insert(gen_ivs, (st_data_t)obj, (st_data_t)fields_tbl);
#if SHAPE_IN_BASIC_FLAGS
rb_shape_set_shape_id(obj, shape_id);
#else
fields_tbl->shape_id = shape_id;
#endif
}
}
@ -1880,11 +1837,7 @@ generic_fields_lookup_ensure_size(st_data_t *k, st_data_t *v, st_data_t u, int e
fields_lookup->fields_tbl = fields_tbl;
if (fields_lookup->shape_id) {
#if SHAPE_IN_BASIC_FLAGS
rb_shape_set_shape_id(fields_lookup->obj, fields_lookup->shape_id);
#else
fields_tbl->shape_id = fields_lookup->shape_id;
#endif
}
return ST_CONTINUE;
@ -1937,9 +1890,6 @@ generic_ivar_set_too_complex_table(VALUE obj, void *data)
struct gen_fields_tbl *fields_tbl;
if (!rb_gen_fields_tbl_get(obj, 0, &fields_tbl)) {
fields_tbl = xmalloc(sizeof(struct gen_fields_tbl));
#if !SHAPE_IN_BASIC_FLAGS
fields_tbl->shape_id = rb_shape_transition_complex(obj);
#endif
fields_tbl->as.complex.table = st_init_numtable_with_size(1);
RB_VM_LOCKING() {
@ -2100,34 +2050,7 @@ rb_shape_set_shape_id(VALUE obj, shape_id_t shape_id)
return false;
}
#if SHAPE_IN_BASIC_FLAGS
RBASIC_SET_SHAPE_ID(obj, shape_id);
#else
switch (BUILTIN_TYPE(obj)) {
case T_OBJECT:
ROBJECT_SET_SHAPE_ID(obj, shape_id);
break;
case T_CLASS:
case T_MODULE:
RCLASS_SET_SHAPE_ID(obj, shape_id);
break;
default:
if (shape_id != SPECIAL_CONST_SHAPE_ID) {
struct gen_fields_tbl *fields_tbl = 0;
RB_VM_LOCKING() {
st_table* global_iv_table = generic_fields_tbl(obj, 0, false);
if (st_lookup(global_iv_table, obj, (st_data_t *)&fields_tbl)) {
fields_tbl->shape_id = shape_id;
}
else {
rb_bug("Expected shape_id entry in global iv table");
}
}
}
}
#endif
return true;
}
@ -2492,9 +2415,7 @@ rb_copy_generic_ivar(VALUE dest, VALUE obj)
clear:
if (FL_TEST(dest, FL_EXIVAR)) {
#if SHAPE_IN_BASIC_FLAGS
RBASIC_SET_SHAPE_ID(dest, ROOT_SHAPE_ID);
#endif
rb_free_generic_ivar(dest);
FL_UNSET(dest, FL_EXIVAR);
}

View File

@ -13,9 +13,6 @@
#include "shape.h"
struct gen_fields_tbl {
#if !SHAPE_IN_BASIC_FLAGS
uint16_t shape_id;
#endif
union {
struct {
uint32_t fields_count;
@ -29,10 +26,6 @@ struct gen_fields_tbl {
int rb_ivar_generic_fields_tbl_lookup(VALUE obj, struct gen_fields_tbl **);
#if !SHAPE_IN_BASIC_FLAGS
shape_id_t rb_generic_shape_id(VALUE obj);
#endif
void rb_free_rb_global_tbl(void);
void rb_free_generic_fields_tbl_(void);

View File

@ -1224,18 +1224,12 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
return default_value;
}
#if SHAPE_IN_BASIC_FLAGS
shape_id = RBASIC_SHAPE_ID(obj);
#endif
switch (BUILTIN_TYPE(obj)) {
case T_OBJECT:
ivar_list = ROBJECT_FIELDS(obj);
VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
#if !SHAPE_IN_BASIC_FLAGS
shape_id = ROBJECT_SHAPE_ID(obj);
#endif
break;
case T_CLASS:
case T_MODULE:
@ -1257,20 +1251,12 @@ vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_call
}
ivar_list = RCLASS_PRIME_FIELDS(obj);
#if !SHAPE_IN_BASIC_FLAGS
shape_id = RCLASS_SHAPE_ID(obj);
#endif
break;
}
default:
if (FL_TEST_RAW(obj, FL_EXIVAR)) {
struct gen_fields_tbl *fields_tbl;
rb_gen_fields_tbl_get(obj, id, &fields_tbl);
#if !SHAPE_IN_BASIC_FLAGS
shape_id = fields_tbl->shape_id;
#endif
ivar_list = fields_tbl->as.shape.fields;
}
else {
@ -1434,7 +1420,7 @@ vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic,
attr_index_t index = rb_obj_ivar_set(obj, id, val);
shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
shape_id_t next_shape_id = RBASIC_SHAPE_ID(obj);
if (!rb_shape_id_too_complex_p(next_shape_id)) {
populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
@ -1463,11 +1449,7 @@ NOINLINE(static VALUE vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t
static VALUE
vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index)
{
#if SHAPE_IN_BASIC_FLAGS
shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
#else
shape_id_t shape_id = rb_generic_shape_id(obj);
#endif
struct gen_fields_tbl *fields_tbl = 0;
@ -1493,11 +1475,7 @@ vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_i
rb_gen_fields_tbl_get(obj, 0, &fields_tbl);
if (shape_id != dest_shape_id) {
#if SHAPE_IN_BASIC_FLAGS
RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
#else
fields_tbl->shape_id = dest_shape_id;
#endif
}
RB_OBJ_WRITE(obj, &fields_tbl->as.shape.fields[index], val);
@ -1516,7 +1494,7 @@ vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t i
{
VM_ASSERT(!rb_ractor_shareable_p(obj) || rb_obj_frozen_p(obj));
shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
RUBY_ASSERT(dest_shape_id == INVALID_SHAPE_ID || !rb_shape_id_too_complex_p(dest_shape_id));
if (LIKELY(shape_id == dest_shape_id)) {
@ -1531,7 +1509,7 @@ vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t i
if (shape_id == source_shape_id && dest_shape->edge_name == id && shape->capacity == dest_shape->capacity) {
RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
RUBY_ASSERT(rb_shape_get_next_iv_shape(source_shape_id, id) == dest_shape_id);
RUBY_ASSERT(index < dest_shape->capacity);