Transition shape when object's capacity changes

This commit adds a `capacity` field to shapes, and adds shape
transitions whenever an object's capacity changes. Objects which are
allocated out of a bigger size pool will also make a transition from the
root shape to the shape with the correct capacity for their size pool
when they are allocated.

This commit will allow us to remove numiv from objects completely, and
will also mean we can guarantee that if two objects share shapes, their
IVs are in the same positions (an embedded and extended object cannot
share shapes). This will enable us to implement ivar sets in YJIT using
object shapes.

Co-Authored-By: Aaron Patterson <tenderlove@ruby-lang.org>
This commit is contained in:
Jemma Issroff 2022-11-08 15:35:31 -05:00 committed by Peter Zhu
parent 9986697b62
commit 5246f4027e
Notes: git 2022-11-10 15:12:05 +00:00
28 changed files with 483 additions and 203 deletions

View File

@ -197,7 +197,7 @@ class_alloc(VALUE flags, VALUE klass)
{ {
size_t alloc_size = sizeof(struct RClass); size_t alloc_size = sizeof(struct RClass);
#if USE_RVARGC #if RCLASS_EXT_EMBEDDED
alloc_size += sizeof(rb_classext_t); alloc_size += sizeof(rb_classext_t);
#endif #endif
@ -206,7 +206,7 @@ class_alloc(VALUE flags, VALUE klass)
if (RGENGC_WB_PROTECTED_CLASS) flags |= FL_WB_PROTECTED; if (RGENGC_WB_PROTECTED_CLASS) flags |= FL_WB_PROTECTED;
RVARGC_NEWOBJ_OF(obj, struct RClass, klass, flags, alloc_size); RVARGC_NEWOBJ_OF(obj, struct RClass, klass, flags, alloc_size);
#if USE_RVARGC #if RCLASS_EXT_EMBEDDED
memset(RCLASS_EXT(obj), 0, sizeof(rb_classext_t)); memset(RCLASS_EXT(obj), 0, sizeof(rb_classext_t));
#else #else
obj->ptr = ZALLOC(rb_classext_t); obj->ptr = ZALLOC(rb_classext_t);

View File

@ -6038,6 +6038,7 @@ enumerator.$(OBJEXT): {$(VPATH)}missing.h
enumerator.$(OBJEXT): {$(VPATH)}onigmo.h enumerator.$(OBJEXT): {$(VPATH)}onigmo.h
enumerator.$(OBJEXT): {$(VPATH)}oniguruma.h enumerator.$(OBJEXT): {$(VPATH)}oniguruma.h
enumerator.$(OBJEXT): {$(VPATH)}ruby_assert.h enumerator.$(OBJEXT): {$(VPATH)}ruby_assert.h
enumerator.$(OBJEXT): {$(VPATH)}shape.h
enumerator.$(OBJEXT): {$(VPATH)}st.h enumerator.$(OBJEXT): {$(VPATH)}st.h
enumerator.$(OBJEXT): {$(VPATH)}subst.h enumerator.$(OBJEXT): {$(VPATH)}subst.h
error.$(OBJEXT): $(CCAN_DIR)/check_type/check_type.h error.$(OBJEXT): $(CCAN_DIR)/check_type/check_type.h
@ -9376,6 +9377,7 @@ memory_view.$(OBJEXT): {$(VPATH)}internal/xmalloc.h
memory_view.$(OBJEXT): {$(VPATH)}memory_view.c memory_view.$(OBJEXT): {$(VPATH)}memory_view.c
memory_view.$(OBJEXT): {$(VPATH)}memory_view.h memory_view.$(OBJEXT): {$(VPATH)}memory_view.h
memory_view.$(OBJEXT): {$(VPATH)}missing.h memory_view.$(OBJEXT): {$(VPATH)}missing.h
memory_view.$(OBJEXT): {$(VPATH)}shape.h
memory_view.$(OBJEXT): {$(VPATH)}st.h memory_view.$(OBJEXT): {$(VPATH)}st.h
memory_view.$(OBJEXT): {$(VPATH)}subst.h memory_view.$(OBJEXT): {$(VPATH)}subst.h
memory_view.$(OBJEXT): {$(VPATH)}util.h memory_view.$(OBJEXT): {$(VPATH)}util.h
@ -10629,6 +10631,7 @@ object.$(OBJEXT): {$(VPATH)}shape.h
object.$(OBJEXT): {$(VPATH)}st.h object.$(OBJEXT): {$(VPATH)}st.h
object.$(OBJEXT): {$(VPATH)}subst.h object.$(OBJEXT): {$(VPATH)}subst.h
object.$(OBJEXT): {$(VPATH)}util.h object.$(OBJEXT): {$(VPATH)}util.h
object.$(OBJEXT): {$(VPATH)}variable.h
pack.$(OBJEXT): $(hdrdir)/ruby/ruby.h pack.$(OBJEXT): $(hdrdir)/ruby/ruby.h
pack.$(OBJEXT): $(top_srcdir)/internal/array.h pack.$(OBJEXT): $(top_srcdir)/internal/array.h
pack.$(OBJEXT): $(top_srcdir)/internal/bits.h pack.$(OBJEXT): $(top_srcdir)/internal/bits.h
@ -10810,6 +10813,7 @@ pack.$(OBJEXT): {$(VPATH)}onigmo.h
pack.$(OBJEXT): {$(VPATH)}oniguruma.h pack.$(OBJEXT): {$(VPATH)}oniguruma.h
pack.$(OBJEXT): {$(VPATH)}pack.c pack.$(OBJEXT): {$(VPATH)}pack.c
pack.$(OBJEXT): {$(VPATH)}pack.rbinc pack.$(OBJEXT): {$(VPATH)}pack.rbinc
pack.$(OBJEXT): {$(VPATH)}shape.h
pack.$(OBJEXT): {$(VPATH)}st.h pack.$(OBJEXT): {$(VPATH)}st.h
pack.$(OBJEXT): {$(VPATH)}subst.h pack.$(OBJEXT): {$(VPATH)}subst.h
pack.$(OBJEXT): {$(VPATH)}util.h pack.$(OBJEXT): {$(VPATH)}util.h
@ -11022,6 +11026,7 @@ parse.$(OBJEXT): {$(VPATH)}ractor.h
parse.$(OBJEXT): {$(VPATH)}regenc.h parse.$(OBJEXT): {$(VPATH)}regenc.h
parse.$(OBJEXT): {$(VPATH)}regex.h parse.$(OBJEXT): {$(VPATH)}regex.h
parse.$(OBJEXT): {$(VPATH)}ruby_assert.h parse.$(OBJEXT): {$(VPATH)}ruby_assert.h
parse.$(OBJEXT): {$(VPATH)}shape.h
parse.$(OBJEXT): {$(VPATH)}st.h parse.$(OBJEXT): {$(VPATH)}st.h
parse.$(OBJEXT): {$(VPATH)}subst.h parse.$(OBJEXT): {$(VPATH)}subst.h
parse.$(OBJEXT): {$(VPATH)}symbol.h parse.$(OBJEXT): {$(VPATH)}symbol.h
@ -11853,6 +11858,7 @@ random.$(OBJEXT): {$(VPATH)}ractor.h
random.$(OBJEXT): {$(VPATH)}random.c random.$(OBJEXT): {$(VPATH)}random.c
random.$(OBJEXT): {$(VPATH)}random.h random.$(OBJEXT): {$(VPATH)}random.h
random.$(OBJEXT): {$(VPATH)}ruby_atomic.h random.$(OBJEXT): {$(VPATH)}ruby_atomic.h
random.$(OBJEXT): {$(VPATH)}shape.h
random.$(OBJEXT): {$(VPATH)}siphash.c random.$(OBJEXT): {$(VPATH)}siphash.c
random.$(OBJEXT): {$(VPATH)}siphash.h random.$(OBJEXT): {$(VPATH)}siphash.h
random.$(OBJEXT): {$(VPATH)}st.h random.$(OBJEXT): {$(VPATH)}st.h
@ -12045,6 +12051,7 @@ range.$(OBJEXT): {$(VPATH)}missing.h
range.$(OBJEXT): {$(VPATH)}onigmo.h range.$(OBJEXT): {$(VPATH)}onigmo.h
range.$(OBJEXT): {$(VPATH)}oniguruma.h range.$(OBJEXT): {$(VPATH)}oniguruma.h
range.$(OBJEXT): {$(VPATH)}range.c range.$(OBJEXT): {$(VPATH)}range.c
range.$(OBJEXT): {$(VPATH)}shape.h
range.$(OBJEXT): {$(VPATH)}st.h range.$(OBJEXT): {$(VPATH)}st.h
range.$(OBJEXT): {$(VPATH)}subst.h range.$(OBJEXT): {$(VPATH)}subst.h
rational.$(OBJEXT): $(hdrdir)/ruby/ruby.h rational.$(OBJEXT): $(hdrdir)/ruby/ruby.h
@ -14024,6 +14031,7 @@ shape.$(OBJEXT): {$(VPATH)}constant.h
shape.$(OBJEXT): {$(VPATH)}debug_counter.h shape.$(OBJEXT): {$(VPATH)}debug_counter.h
shape.$(OBJEXT): {$(VPATH)}defines.h shape.$(OBJEXT): {$(VPATH)}defines.h
shape.$(OBJEXT): {$(VPATH)}encoding.h shape.$(OBJEXT): {$(VPATH)}encoding.h
shape.$(OBJEXT): {$(VPATH)}gc.h
shape.$(OBJEXT): {$(VPATH)}id.h shape.$(OBJEXT): {$(VPATH)}id.h
shape.$(OBJEXT): {$(VPATH)}id_table.h shape.$(OBJEXT): {$(VPATH)}id_table.h
shape.$(OBJEXT): {$(VPATH)}intern.h shape.$(OBJEXT): {$(VPATH)}intern.h
@ -16007,6 +16015,7 @@ time.$(OBJEXT): {$(VPATH)}missing.h
time.$(OBJEXT): {$(VPATH)}onigmo.h time.$(OBJEXT): {$(VPATH)}onigmo.h
time.$(OBJEXT): {$(VPATH)}oniguruma.h time.$(OBJEXT): {$(VPATH)}oniguruma.h
time.$(OBJEXT): {$(VPATH)}ruby_assert.h time.$(OBJEXT): {$(VPATH)}ruby_assert.h
time.$(OBJEXT): {$(VPATH)}shape.h
time.$(OBJEXT): {$(VPATH)}st.h time.$(OBJEXT): {$(VPATH)}st.h
time.$(OBJEXT): {$(VPATH)}subst.h time.$(OBJEXT): {$(VPATH)}subst.h
time.$(OBJEXT): {$(VPATH)}time.c time.$(OBJEXT): {$(VPATH)}time.c
@ -16371,6 +16380,7 @@ transient_heap.$(OBJEXT): {$(VPATH)}internal/warning_push.h
transient_heap.$(OBJEXT): {$(VPATH)}internal/xmalloc.h transient_heap.$(OBJEXT): {$(VPATH)}internal/xmalloc.h
transient_heap.$(OBJEXT): {$(VPATH)}missing.h transient_heap.$(OBJEXT): {$(VPATH)}missing.h
transient_heap.$(OBJEXT): {$(VPATH)}ruby_assert.h transient_heap.$(OBJEXT): {$(VPATH)}ruby_assert.h
transient_heap.$(OBJEXT): {$(VPATH)}shape.h
transient_heap.$(OBJEXT): {$(VPATH)}st.h transient_heap.$(OBJEXT): {$(VPATH)}st.h
transient_heap.$(OBJEXT): {$(VPATH)}subst.h transient_heap.$(OBJEXT): {$(VPATH)}subst.h
transient_heap.$(OBJEXT): {$(VPATH)}transient_heap.c transient_heap.$(OBJEXT): {$(VPATH)}transient_heap.c

View File

@ -174,5 +174,6 @@ rat.o: $(top_srcdir)/internal/static_assert.h
rat.o: $(top_srcdir)/internal/vm.h rat.o: $(top_srcdir)/internal/vm.h
rat.o: $(top_srcdir)/internal/warnings.h rat.o: $(top_srcdir)/internal/warnings.h
rat.o: $(top_srcdir)/ruby_assert.h rat.o: $(top_srcdir)/ruby_assert.h
rat.o: $(top_srcdir)/shape.h
rat.o: rat.c rat.o: rat.c
# AUTOGENERATED DEPENDENCIES END # AUTOGENERATED DEPENDENCIES END

View File

@ -181,5 +181,6 @@ pty.o: $(top_srcdir)/internal/process.h
pty.o: $(top_srcdir)/internal/signal.h pty.o: $(top_srcdir)/internal/signal.h
pty.o: $(top_srcdir)/internal/static_assert.h pty.o: $(top_srcdir)/internal/static_assert.h
pty.o: $(top_srcdir)/internal/warnings.h pty.o: $(top_srcdir)/internal/warnings.h
pty.o: $(top_srcdir)/shape.h
pty.o: pty.c pty.o: pty.c
# AUTOGENERATED DEPENDENCIES END # AUTOGENERATED DEPENDENCIES END

View File

@ -252,6 +252,7 @@ ripper.o: $(top_srcdir)/internal/warnings.h
ripper.o: $(top_srcdir)/node.h ripper.o: $(top_srcdir)/node.h
ripper.o: $(top_srcdir)/regenc.h ripper.o: $(top_srcdir)/regenc.h
ripper.o: $(top_srcdir)/ruby_assert.h ripper.o: $(top_srcdir)/ruby_assert.h
ripper.o: $(top_srcdir)/shape.h
ripper.o: $(top_srcdir)/symbol.h ripper.o: $(top_srcdir)/symbol.h
ripper.o: ../../probes.h ripper.o: ../../probes.h
ripper.o: eventids2.c ripper.o: eventids2.c

View File

@ -197,6 +197,7 @@ ancdata.o: $(top_srcdir)/internal/string.h
ancdata.o: $(top_srcdir)/internal/thread.h ancdata.o: $(top_srcdir)/internal/thread.h
ancdata.o: $(top_srcdir)/internal/vm.h ancdata.o: $(top_srcdir)/internal/vm.h
ancdata.o: $(top_srcdir)/internal/warnings.h ancdata.o: $(top_srcdir)/internal/warnings.h
ancdata.o: $(top_srcdir)/shape.h
ancdata.o: ancdata.c ancdata.o: ancdata.c
ancdata.o: constdefs.h ancdata.o: constdefs.h
ancdata.o: rubysocket.h ancdata.o: rubysocket.h
@ -388,6 +389,7 @@ basicsocket.o: $(top_srcdir)/internal/string.h
basicsocket.o: $(top_srcdir)/internal/thread.h basicsocket.o: $(top_srcdir)/internal/thread.h
basicsocket.o: $(top_srcdir)/internal/vm.h basicsocket.o: $(top_srcdir)/internal/vm.h
basicsocket.o: $(top_srcdir)/internal/warnings.h basicsocket.o: $(top_srcdir)/internal/warnings.h
basicsocket.o: $(top_srcdir)/shape.h
basicsocket.o: basicsocket.c basicsocket.o: basicsocket.c
basicsocket.o: constdefs.h basicsocket.o: constdefs.h
basicsocket.o: rubysocket.h basicsocket.o: rubysocket.h
@ -579,6 +581,7 @@ constants.o: $(top_srcdir)/internal/string.h
constants.o: $(top_srcdir)/internal/thread.h constants.o: $(top_srcdir)/internal/thread.h
constants.o: $(top_srcdir)/internal/vm.h constants.o: $(top_srcdir)/internal/vm.h
constants.o: $(top_srcdir)/internal/warnings.h constants.o: $(top_srcdir)/internal/warnings.h
constants.o: $(top_srcdir)/shape.h
constants.o: constants.c constants.o: constants.c
constants.o: constdefs.c constants.o: constdefs.c
constants.o: constdefs.h constants.o: constdefs.h
@ -771,6 +774,7 @@ ifaddr.o: $(top_srcdir)/internal/string.h
ifaddr.o: $(top_srcdir)/internal/thread.h ifaddr.o: $(top_srcdir)/internal/thread.h
ifaddr.o: $(top_srcdir)/internal/vm.h ifaddr.o: $(top_srcdir)/internal/vm.h
ifaddr.o: $(top_srcdir)/internal/warnings.h ifaddr.o: $(top_srcdir)/internal/warnings.h
ifaddr.o: $(top_srcdir)/shape.h
ifaddr.o: constdefs.h ifaddr.o: constdefs.h
ifaddr.o: ifaddr.c ifaddr.o: ifaddr.c
ifaddr.o: rubysocket.h ifaddr.o: rubysocket.h
@ -962,6 +966,7 @@ init.o: $(top_srcdir)/internal/string.h
init.o: $(top_srcdir)/internal/thread.h init.o: $(top_srcdir)/internal/thread.h
init.o: $(top_srcdir)/internal/vm.h init.o: $(top_srcdir)/internal/vm.h
init.o: $(top_srcdir)/internal/warnings.h init.o: $(top_srcdir)/internal/warnings.h
init.o: $(top_srcdir)/shape.h
init.o: constdefs.h init.o: constdefs.h
init.o: init.c init.o: init.c
init.o: rubysocket.h init.o: rubysocket.h
@ -1153,6 +1158,7 @@ ipsocket.o: $(top_srcdir)/internal/string.h
ipsocket.o: $(top_srcdir)/internal/thread.h ipsocket.o: $(top_srcdir)/internal/thread.h
ipsocket.o: $(top_srcdir)/internal/vm.h ipsocket.o: $(top_srcdir)/internal/vm.h
ipsocket.o: $(top_srcdir)/internal/warnings.h ipsocket.o: $(top_srcdir)/internal/warnings.h
ipsocket.o: $(top_srcdir)/shape.h
ipsocket.o: constdefs.h ipsocket.o: constdefs.h
ipsocket.o: ipsocket.c ipsocket.o: ipsocket.c
ipsocket.o: rubysocket.h ipsocket.o: rubysocket.h
@ -1344,6 +1350,7 @@ option.o: $(top_srcdir)/internal/string.h
option.o: $(top_srcdir)/internal/thread.h option.o: $(top_srcdir)/internal/thread.h
option.o: $(top_srcdir)/internal/vm.h option.o: $(top_srcdir)/internal/vm.h
option.o: $(top_srcdir)/internal/warnings.h option.o: $(top_srcdir)/internal/warnings.h
option.o: $(top_srcdir)/shape.h
option.o: constdefs.h option.o: constdefs.h
option.o: option.c option.o: option.c
option.o: rubysocket.h option.o: rubysocket.h
@ -1535,6 +1542,7 @@ raddrinfo.o: $(top_srcdir)/internal/string.h
raddrinfo.o: $(top_srcdir)/internal/thread.h raddrinfo.o: $(top_srcdir)/internal/thread.h
raddrinfo.o: $(top_srcdir)/internal/vm.h raddrinfo.o: $(top_srcdir)/internal/vm.h
raddrinfo.o: $(top_srcdir)/internal/warnings.h raddrinfo.o: $(top_srcdir)/internal/warnings.h
raddrinfo.o: $(top_srcdir)/shape.h
raddrinfo.o: constdefs.h raddrinfo.o: constdefs.h
raddrinfo.o: raddrinfo.c raddrinfo.o: raddrinfo.c
raddrinfo.o: rubysocket.h raddrinfo.o: rubysocket.h
@ -1726,6 +1734,7 @@ socket.o: $(top_srcdir)/internal/string.h
socket.o: $(top_srcdir)/internal/thread.h socket.o: $(top_srcdir)/internal/thread.h
socket.o: $(top_srcdir)/internal/vm.h socket.o: $(top_srcdir)/internal/vm.h
socket.o: $(top_srcdir)/internal/warnings.h socket.o: $(top_srcdir)/internal/warnings.h
socket.o: $(top_srcdir)/shape.h
socket.o: constdefs.h socket.o: constdefs.h
socket.o: rubysocket.h socket.o: rubysocket.h
socket.o: socket.c socket.o: socket.c
@ -1917,6 +1926,7 @@ sockssocket.o: $(top_srcdir)/internal/string.h
sockssocket.o: $(top_srcdir)/internal/thread.h sockssocket.o: $(top_srcdir)/internal/thread.h
sockssocket.o: $(top_srcdir)/internal/vm.h sockssocket.o: $(top_srcdir)/internal/vm.h
sockssocket.o: $(top_srcdir)/internal/warnings.h sockssocket.o: $(top_srcdir)/internal/warnings.h
sockssocket.o: $(top_srcdir)/shape.h
sockssocket.o: constdefs.h sockssocket.o: constdefs.h
sockssocket.o: rubysocket.h sockssocket.o: rubysocket.h
sockssocket.o: sockport.h sockssocket.o: sockport.h
@ -2108,6 +2118,7 @@ tcpserver.o: $(top_srcdir)/internal/string.h
tcpserver.o: $(top_srcdir)/internal/thread.h tcpserver.o: $(top_srcdir)/internal/thread.h
tcpserver.o: $(top_srcdir)/internal/vm.h tcpserver.o: $(top_srcdir)/internal/vm.h
tcpserver.o: $(top_srcdir)/internal/warnings.h tcpserver.o: $(top_srcdir)/internal/warnings.h
tcpserver.o: $(top_srcdir)/shape.h
tcpserver.o: constdefs.h tcpserver.o: constdefs.h
tcpserver.o: rubysocket.h tcpserver.o: rubysocket.h
tcpserver.o: sockport.h tcpserver.o: sockport.h
@ -2299,6 +2310,7 @@ tcpsocket.o: $(top_srcdir)/internal/string.h
tcpsocket.o: $(top_srcdir)/internal/thread.h tcpsocket.o: $(top_srcdir)/internal/thread.h
tcpsocket.o: $(top_srcdir)/internal/vm.h tcpsocket.o: $(top_srcdir)/internal/vm.h
tcpsocket.o: $(top_srcdir)/internal/warnings.h tcpsocket.o: $(top_srcdir)/internal/warnings.h
tcpsocket.o: $(top_srcdir)/shape.h
tcpsocket.o: constdefs.h tcpsocket.o: constdefs.h
tcpsocket.o: rubysocket.h tcpsocket.o: rubysocket.h
tcpsocket.o: sockport.h tcpsocket.o: sockport.h
@ -2490,6 +2502,7 @@ udpsocket.o: $(top_srcdir)/internal/string.h
udpsocket.o: $(top_srcdir)/internal/thread.h udpsocket.o: $(top_srcdir)/internal/thread.h
udpsocket.o: $(top_srcdir)/internal/vm.h udpsocket.o: $(top_srcdir)/internal/vm.h
udpsocket.o: $(top_srcdir)/internal/warnings.h udpsocket.o: $(top_srcdir)/internal/warnings.h
udpsocket.o: $(top_srcdir)/shape.h
udpsocket.o: constdefs.h udpsocket.o: constdefs.h
udpsocket.o: rubysocket.h udpsocket.o: rubysocket.h
udpsocket.o: sockport.h udpsocket.o: sockport.h
@ -2681,6 +2694,7 @@ unixserver.o: $(top_srcdir)/internal/string.h
unixserver.o: $(top_srcdir)/internal/thread.h unixserver.o: $(top_srcdir)/internal/thread.h
unixserver.o: $(top_srcdir)/internal/vm.h unixserver.o: $(top_srcdir)/internal/vm.h
unixserver.o: $(top_srcdir)/internal/warnings.h unixserver.o: $(top_srcdir)/internal/warnings.h
unixserver.o: $(top_srcdir)/shape.h
unixserver.o: constdefs.h unixserver.o: constdefs.h
unixserver.o: rubysocket.h unixserver.o: rubysocket.h
unixserver.o: sockport.h unixserver.o: sockport.h
@ -2872,6 +2886,7 @@ unixsocket.o: $(top_srcdir)/internal/string.h
unixsocket.o: $(top_srcdir)/internal/thread.h unixsocket.o: $(top_srcdir)/internal/thread.h
unixsocket.o: $(top_srcdir)/internal/vm.h unixsocket.o: $(top_srcdir)/internal/vm.h
unixsocket.o: $(top_srcdir)/internal/warnings.h unixsocket.o: $(top_srcdir)/internal/warnings.h
unixsocket.o: $(top_srcdir)/shape.h
unixsocket.o: constdefs.h unixsocket.o: constdefs.h
unixsocket.o: rubysocket.h unixsocket.o: rubysocket.h
unixsocket.o: sockport.h unixsocket.o: sockport.h

67
gc.c
View File

@ -138,6 +138,7 @@
#include "ractor_core.h" #include "ractor_core.h"
#include "builtin.h" #include "builtin.h"
#include "shape.h"
#define rb_setjmp(env) RUBY_SETJMP(env) #define rb_setjmp(env) RUBY_SETJMP(env)
#define rb_jmp_buf rb_jmpbuf_t #define rb_jmp_buf rb_jmpbuf_t
@ -2593,6 +2594,12 @@ size_pool_slot_size(unsigned char pool_id)
return slot_size; return slot_size;
} }
size_t
rb_size_pool_slot_size(unsigned char pool_id)
{
return size_pool_slot_size(pool_id);
}
bool bool
rb_gc_size_allocatable_p(size_t size) rb_gc_size_allocatable_p(size_t size)
{ {
@ -2797,6 +2804,9 @@ newobj_slowpath(VALUE klass, VALUE flags, rb_objspace_t *objspace, rb_ractor_t *
} }
obj = newobj_alloc(objspace, cr, size_pool_idx, true); obj = newobj_alloc(objspace, cr, size_pool_idx, true);
#if SHAPE_IN_BASIC_FLAGS
flags |= (VALUE)(size_pool_idx) << SHAPE_FLAG_SHIFT;
#endif
newobj_init(klass, flags, wb_protected, objspace, obj); newobj_init(klass, flags, wb_protected, objspace, obj);
gc_event_hook_prep(objspace, RUBY_INTERNAL_EVENT_NEWOBJ, obj, newobj_fill(obj, 0, 0, 0)); gc_event_hook_prep(objspace, RUBY_INTERNAL_EVENT_NEWOBJ, obj, newobj_fill(obj, 0, 0, 0));
@ -2848,6 +2858,9 @@ newobj_of0(VALUE klass, VALUE flags, int wb_protected, rb_ractor_t *cr, size_t a
gc_event_hook_available_p(objspace)) && gc_event_hook_available_p(objspace)) &&
wb_protected) { wb_protected) {
obj = newobj_alloc(objspace, cr, size_pool_idx, false); obj = newobj_alloc(objspace, cr, size_pool_idx, false);
#if SHAPE_IN_BASIC_FLAGS
flags |= (VALUE)size_pool_idx << SHAPE_FLAG_SHIFT;
#endif
newobj_init(klass, flags, wb_protected, objspace, obj); newobj_init(klass, flags, wb_protected, objspace, obj);
} }
else { else {
@ -2916,10 +2929,10 @@ rb_class_instance_allocate_internal(VALUE klass, VALUE flags, bool wb_protected)
GC_ASSERT((flags & RUBY_T_MASK) == T_OBJECT); GC_ASSERT((flags & RUBY_T_MASK) == T_OBJECT);
GC_ASSERT(flags & ROBJECT_EMBED); GC_ASSERT(flags & ROBJECT_EMBED);
uint32_t index_tbl_num_entries = RCLASS_EXT(klass)->max_iv_count;
size_t size; size_t size;
#if USE_RVARGC #if USE_RVARGC
uint32_t index_tbl_num_entries = RCLASS_EXT(klass)->max_iv_count;
size = rb_obj_embedded_size(index_tbl_num_entries); size = rb_obj_embedded_size(index_tbl_num_entries);
if (!rb_gc_size_allocatable_p(size)) { if (!rb_gc_size_allocatable_p(size)) {
size = sizeof(struct RObject); size = sizeof(struct RObject);
@ -2932,7 +2945,7 @@ rb_class_instance_allocate_internal(VALUE klass, VALUE flags, bool wb_protected)
#if USE_RVARGC #if USE_RVARGC
uint32_t capa = (uint32_t)((rb_gc_obj_slot_size(obj) - offsetof(struct RObject, as.ary)) / sizeof(VALUE)); uint32_t capa = (uint32_t)((rb_gc_obj_slot_size(obj) - offsetof(struct RObject, as.ary)) / sizeof(VALUE));
ROBJECT(obj)->numiv = capa; ROBJECT_SET_NUMIV(obj, capa);
#endif #endif
#if RUBY_DEBUG #if RUBY_DEBUG
@ -3454,7 +3467,7 @@ obj_free(rb_objspace_t *objspace, VALUE obj)
xfree(RCLASS_SUPERCLASSES(obj)); xfree(RCLASS_SUPERCLASSES(obj));
} }
#if !USE_RVARGC #if SIZE_POOL_COUNT == 1
if (RCLASS_EXT(obj)) if (RCLASS_EXT(obj))
xfree(RCLASS_EXT(obj)); xfree(RCLASS_EXT(obj));
#endif #endif
@ -4869,7 +4882,7 @@ obj_memsize_of(VALUE obj, int use_all_types)
if (FL_TEST_RAW(obj, RCLASS_SUPERCLASSES_INCLUDE_SELF)) { if (FL_TEST_RAW(obj, RCLASS_SUPERCLASSES_INCLUDE_SELF)) {
size += (RCLASS_SUPERCLASS_DEPTH(obj) + 1) * sizeof(VALUE); size += (RCLASS_SUPERCLASS_DEPTH(obj) + 1) * sizeof(VALUE);
} }
#if !USE_RVARGC #if SIZE_POOL_COUNT == 1
size += sizeof(rb_classext_t); size += sizeof(rb_classext_t);
#endif #endif
} }
@ -6054,6 +6067,7 @@ invalidate_moved_plane(rb_objspace_t *objspace, struct heap_page *page, uintptr_
gc_move(objspace, object, forwarding_object, GET_HEAP_PAGE(object)->slot_size, page->slot_size); gc_move(objspace, object, forwarding_object, GET_HEAP_PAGE(object)->slot_size, page->slot_size);
/* forwarding_object is now our actual object, and "object" /* forwarding_object is now our actual object, and "object"
* is the free slot for the original page */ * is the free slot for the original page */
struct heap_page *orig_page = GET_HEAP_PAGE(object); struct heap_page *orig_page = GET_HEAP_PAGE(object);
orig_page->free_slots++; orig_page->free_slots++;
heap_page_add_freeobj(objspace, orig_page, object); heap_page_add_freeobj(objspace, orig_page, object);
@ -8387,6 +8401,7 @@ static rb_size_pool_t *
gc_compact_destination_pool(rb_objspace_t *objspace, rb_size_pool_t *src_pool, VALUE src) gc_compact_destination_pool(rb_objspace_t *objspace, rb_size_pool_t *src_pool, VALUE src)
{ {
size_t obj_size; size_t obj_size;
size_t idx = 0;
switch (BUILTIN_TYPE(src)) { switch (BUILTIN_TYPE(src)) {
case T_ARRAY: case T_ARRAY:
@ -8406,17 +8421,16 @@ gc_compact_destination_pool(rb_objspace_t *objspace, rb_size_pool_t *src_pool, V
} }
if (rb_gc_size_allocatable_p(obj_size)){ if (rb_gc_size_allocatable_p(obj_size)){
return &size_pools[size_pool_idx_for_size(obj_size)]; idx = size_pool_idx_for_size(obj_size);
}
else {
return &size_pools[0];
} }
return &size_pools[idx];
} }
static bool static bool
gc_compact_move(rb_objspace_t *objspace, rb_heap_t *heap, rb_size_pool_t *size_pool, VALUE src) gc_compact_move(rb_objspace_t *objspace, rb_heap_t *heap, rb_size_pool_t *size_pool, VALUE src)
{ {
GC_ASSERT(BUILTIN_TYPE(src) != T_MOVED); GC_ASSERT(BUILTIN_TYPE(src) != T_MOVED);
rb_heap_t *dheap = SIZE_POOL_EDEN_HEAP(gc_compact_destination_pool(objspace, size_pool, src)); rb_heap_t *dheap = SIZE_POOL_EDEN_HEAP(gc_compact_destination_pool(objspace, size_pool, src));
if (gc_compact_heap_cursors_met_p(dheap)) { if (gc_compact_heap_cursors_met_p(dheap)) {
@ -10003,9 +10017,10 @@ static void
gc_ref_update_object(rb_objspace_t *objspace, VALUE v) gc_ref_update_object(rb_objspace_t *objspace, VALUE v)
{ {
VALUE *ptr = ROBJECT_IVPTR(v); VALUE *ptr = ROBJECT_IVPTR(v);
uint32_t numiv = ROBJECT_NUMIV(v);
#if USE_RVARGC #if USE_RVARGC
uint32_t numiv = ROBJECT_NUMIV(v);
size_t slot_size = rb_gc_obj_slot_size(v); size_t slot_size = rb_gc_obj_slot_size(v);
size_t embed_size = rb_obj_embedded_size(numiv); size_t embed_size = rb_obj_embedded_size(numiv);
if (slot_size >= embed_size && !RB_FL_TEST_RAW(v, ROBJECT_EMBED)) { if (slot_size >= embed_size && !RB_FL_TEST_RAW(v, ROBJECT_EMBED)) {
@ -10019,9 +10034,17 @@ gc_ref_update_object(rb_objspace_t *objspace, VALUE v)
xfree(ptr); xfree(ptr);
} }
ptr = ROBJECT(v)->as.ary; ptr = ROBJECT(v)->as.ary;
size_t size_pool_shape_id = size_pool_idx_for_size(embed_size);
uint32_t capa = (uint32_t)((slot_size - offsetof(struct RObject, as.ary)) / sizeof(VALUE)); rb_shape_t * initial_shape = rb_shape_get_shape_by_id((shape_id_t)size_pool_shape_id);
ROBJECT(v)->numiv = capa; rb_shape_t * new_shape = rb_shape_rebuild_shape(initial_shape, rb_shape_get_shape(v));
rb_shape_set_shape(v, new_shape);
ROBJECT_SET_NUMIV(v, new_shape->capacity);
#if RUBY_DEBUG
if(RB_TYPE_P(v, T_OBJECT) && ROBJECT_IV_CAPACITY(v) != ROBJECT_NUMIV(v)) {
fprintf(stderr, "shape capa: %d, v capa: %d\n", ROBJECT_IV_CAPACITY(v), ROBJECT_NUMIV(v));
}
#endif
RUBY_ASSERT(!RB_TYPE_P(v, T_OBJECT) || ROBJECT_IV_CAPACITY(v) == ROBJECT_NUMIV(v));
} }
#endif #endif
@ -14293,6 +14316,22 @@ rb_gcdebug_remove_stress_to_class(int argc, VALUE *argv, VALUE self)
*/ */
#include "gc.rbinc" #include "gc.rbinc"
/*
* call-seq:
* GC.using_rvargc? -> true or false
*
* Returns true if using experimental feature Variable Width Allocation, false
* otherwise.
*/
static VALUE
gc_using_rvargc_p(VALUE mod)
{
#if USE_RVARGC
return Qtrue;
#else
return Qfalse;
#endif
}
void void
Init_GC(void) Init_GC(void)
@ -14371,6 +14410,8 @@ Init_GC(void)
rb_define_singleton_method(rb_mGC, "malloc_allocations", gc_malloc_allocations, 0); rb_define_singleton_method(rb_mGC, "malloc_allocations", gc_malloc_allocations, 0);
#endif #endif
rb_define_singleton_method(rb_mGC, "using_rvargc?", gc_using_rvargc_p, 0);
if (GC_COMPACTION_SUPPORTED) { if (GC_COMPACTION_SUPPORTED) {
rb_define_singleton_method(rb_mGC, "compact", gc_compact, 0); rb_define_singleton_method(rb_mGC, "compact", gc_compact, 0);
rb_define_singleton_method(rb_mGC, "auto_compact", gc_get_auto_compact, 0); rb_define_singleton_method(rb_mGC, "auto_compact", gc_get_auto_compact, 0);

2
gc.h
View File

@ -120,6 +120,8 @@ VALUE rb_gc_disable_no_rest(void);
struct rb_thread_struct; struct rb_thread_struct;
size_t rb_size_pool_slot_size(unsigned char pool_id);
RUBY_SYMBOL_EXPORT_BEGIN RUBY_SYMBOL_EXPORT_BEGIN
/* exports for objspace module */ /* exports for objspace module */

10
gc.rb
View File

@ -252,16 +252,6 @@ module GC
end end
end end
# call-seq:
# GC.using_rvargc? -> true or false
#
# Returns true if using experimental feature Variable Width Allocation, false
# otherwise.
def self.using_rvargc? # :nodoc:
GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] > 1
end
# call-seq: # call-seq:
# GC.measure_total_time = true/false # GC.measure_total_time = true/false
# #

View File

@ -192,6 +192,16 @@ ROBJECT_NUMIV(VALUE obj)
#endif #endif
} }
static inline void
ROBJECT_SET_NUMIV(VALUE obj, uint32_t capacity)
{
#if USE_RVARGC
ROBJECT(obj)->numiv = capacity;
#else
ROBJECT(obj)->as.heap.numiv = capacity;
#endif
}
RBIMPL_ATTR_PURE_UNLESS_DEBUG() RBIMPL_ATTR_PURE_UNLESS_DEBUG()
RBIMPL_ATTR_ARTIFICIAL() RBIMPL_ATTR_ARTIFICIAL()
/** /**

View File

@ -20,6 +20,7 @@ static void Init_builtin_prelude(void);
void void
rb_call_inits(void) rb_call_inits(void)
{ {
CALL(default_shapes);
CALL(Thread_Mutex); CALL(Thread_Mutex);
#if USE_TRANSIENT_HEAP #if USE_TRANSIENT_HEAP
CALL(TransientHeap); CALL(TransientHeap);

View File

@ -62,7 +62,7 @@ struct RClass {
struct RBasic basic; struct RBasic basic;
VALUE super; VALUE super;
struct rb_id_table *m_tbl; struct rb_id_table *m_tbl;
#if !USE_RVARGC #if SIZE_POOL_COUNT == 1
struct rb_classext_struct *ptr; struct rb_classext_struct *ptr;
#endif #endif
}; };
@ -70,7 +70,7 @@ struct RClass {
typedef struct rb_subclass_entry rb_subclass_entry_t; typedef struct rb_subclass_entry rb_subclass_entry_t;
typedef struct rb_classext_struct rb_classext_t; typedef struct rb_classext_struct rb_classext_t;
#if USE_RVARGC #if RCLASS_EXT_EMBEDDED
# define RCLASS_EXT(c) ((rb_classext_t *)((char *)(c) + sizeof(struct RClass))) # define RCLASS_EXT(c) ((rb_classext_t *)((char *)(c) + sizeof(struct RClass)))
#else #else
# define RCLASS_EXT(c) (RCLASS(c)->ptr) # define RCLASS_EXT(c) (RCLASS(c)->ptr)

View File

@ -14,6 +14,7 @@
#include "internal/compilers.h" /* for __has_attribute */ #include "internal/compilers.h" /* for __has_attribute */
#include "ruby/ruby.h" /* for rb_event_flag_t */ #include "ruby/ruby.h" /* for rb_event_flag_t */
#include "shape.h"
struct rb_execution_context_struct; /* in vm_core.h */ struct rb_execution_context_struct; /* in vm_core.h */
struct rb_objspace; /* in vm_core.h */ struct rb_objspace; /* in vm_core.h */
@ -67,12 +68,14 @@ struct rb_objspace; /* in vm_core.h */
rb_obj_write((VALUE)(a), UNALIGNED_MEMBER_ACCESS((VALUE *)(slot)), \ rb_obj_write((VALUE)(a), UNALIGNED_MEMBER_ACCESS((VALUE *)(slot)), \
(VALUE)(b), __FILE__, __LINE__) (VALUE)(b), __FILE__, __LINE__)
#if USE_RVARGC #if USE_RVARGC && SHAPE_IN_BASIC_FLAGS
# define SIZE_POOL_COUNT 5 # define SIZE_POOL_COUNT 5
#else #else
# define SIZE_POOL_COUNT 1 # define SIZE_POOL_COUNT 1
#endif #endif
#define RCLASS_EXT_EMBEDDED (SIZE_POOL_COUNT > 1)
typedef struct ractor_newobj_size_pool_cache { typedef struct ractor_newobj_size_pool_cache {
struct RVALUE *freelist; struct RVALUE *freelist;
struct heap_page *using_page; struct heap_page *using_page;

View File

@ -13,6 +13,7 @@
#include "constant.h" /* for rb_const_entry_t */ #include "constant.h" /* for rb_const_entry_t */
#include "ruby/internal/stdbool.h" /* for bool */ #include "ruby/internal/stdbool.h" /* for bool */
#include "ruby/ruby.h" /* for VALUE */ #include "ruby/ruby.h" /* for VALUE */
#include "shape.h" /* for rb_shape_t */
/* global variable */ /* global variable */
@ -53,7 +54,7 @@ VALUE rb_gvar_get(ID);
VALUE rb_gvar_set(ID, VALUE); VALUE rb_gvar_set(ID, VALUE);
VALUE rb_gvar_defined(ID); VALUE rb_gvar_defined(ID);
void rb_const_warn_if_deprecated(const rb_const_entry_t *, VALUE, ID); void rb_const_warn_if_deprecated(const rb_const_entry_t *, VALUE, ID);
void rb_init_iv_list(VALUE obj); rb_shape_t * rb_grow_iv_list(VALUE obj);
void rb_ensure_iv_list_size(VALUE obj, uint32_t len, uint32_t newsize); void rb_ensure_iv_list_size(VALUE obj, uint32_t len, uint32_t newsize);
struct gen_ivtbl * rb_ensure_generic_iv_list_size(VALUE obj, uint32_t newsize); struct gen_ivtbl * rb_ensure_generic_iv_list_size(VALUE obj, uint32_t newsize);
MJIT_SYMBOL_EXPORT_END MJIT_SYMBOL_EXPORT_END

View File

@ -353,10 +353,20 @@ module RubyVM::MJIT
ic_copy = (status.is_entries + (C.iseq_inline_storage_entry.new(operands[1]) - body.is_entries)).iv_cache ic_copy = (status.is_entries + (C.iseq_inline_storage_entry.new(operands[1]) - body.is_entries)).iv_cache
dest_shape_id = ic_copy.value >> C.SHAPE_FLAG_SHIFT dest_shape_id = ic_copy.value >> C.SHAPE_FLAG_SHIFT
attr_index = ic_copy.value & ((1 << C.SHAPE_FLAG_SHIFT) - 1) attr_index = ic_copy.value & ((1 << C.SHAPE_FLAG_SHIFT) - 1)
capa = nil
source_shape_id = if dest_shape_id == C.INVALID_SHAPE_ID source_shape_id = if dest_shape_id == C.INVALID_SHAPE_ID
dest_shape_id dest_shape_id
else else
C.rb_shape_get_shape_by_id(dest_shape_id).parent_id parent_id = C.rb_shape_get_shape_by_id(dest_shape_id).parent_id
parent = C.rb_shape_get_shape_by_id(parent_id)
if parent.type == C.SHAPE_CAPACITY_CHANGE
capa = parent.capacity
parent.parent_id
else
parent_id
end
end end
src = +'' src = +''
@ -374,9 +384,9 @@ module RubyVM::MJIT
src << " const shape_id_t dest_shape_id = (shape_id_t)#{dest_shape_id};\n" src << " const shape_id_t dest_shape_id = (shape_id_t)#{dest_shape_id};\n"
src << " if (source_shape_id == ROBJECT_SHAPE_ID(obj) && \n" src << " if (source_shape_id == ROBJECT_SHAPE_ID(obj) && \n"
src << " dest_shape_id != ROBJECT_SHAPE_ID(obj)) {\n" src << " dest_shape_id != ROBJECT_SHAPE_ID(obj)) {\n"
src << " if (UNLIKELY(index >= ROBJECT_NUMIV(obj))) {\n" # Conditionally generate a capacity change if there is one
src << " rb_init_iv_list(obj);\n" # between the destination and the parent IV set
src << " }\n" src << " rb_ensure_iv_list_size(obj, RBOJECT_NUMIV(obj), #{capa});\n" if capa
src << " ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);\n" src << " ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);\n"
src << " VALUE *ptr = ROBJECT_IVPTR(obj);\n" src << " VALUE *ptr = ROBJECT_IVPTR(obj);\n"
src << " RB_OBJ_WRITE(obj, &ptr[index], stack[#{stack_size - 1}]);\n" src << " RB_OBJ_WRITE(obj, &ptr[index], stack[#{stack_size - 1}]);\n"

View File

@ -13,6 +13,30 @@ module RubyVM::MJIT
Primitive.cexpr! 'UINT2NUM(SHAPE_FLAG_SHIFT)' Primitive.cexpr! 'UINT2NUM(SHAPE_FLAG_SHIFT)'
end end
def SHAPE_ROOT
Primitive.cexpr! 'UINT2NUM(SHAPE_ROOT)'
end
def SHAPE_IVAR
Primitive.cexpr! 'UINT2NUM(SHAPE_IVAR)'
end
def SHAPE_FROZEN
Primitive.cexpr! 'UINT2NUM(SHAPE_FROZEN)'
end
def SHAPE_CAPACITY_CHANGE
Primitive.cexpr! 'UINT2NUM(SHAPE_CAPACITY_CHANGE)'
end
def SHAPE_IVAR_UNDEF
Primitive.cexpr! 'UINT2NUM(SHAPE_IVAR_UNDEF)'
end
def SHAPE_INITIAL_CAPACITY
Primitive.cexpr! 'UINT2NUM(SHAPE_INITIAL_CAPACITY)'
end
def ROBJECT_EMBED_LEN_MAX def ROBJECT_EMBED_LEN_MAX
Primitive.cexpr! 'INT2NUM(RBIMPL_EMBED_LEN_MAX_OF(VALUE))' Primitive.cexpr! 'INT2NUM(RBIMPL_EMBED_LEN_MAX_OF(VALUE))'
end end
@ -598,7 +622,9 @@ module RubyVM::MJIT
edges: [CType::Pointer.new { self.rb_id_table }, Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), edges)")], edges: [CType::Pointer.new { self.rb_id_table }, Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), edges)")],
edge_name: [self.ID, Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), edge_name)")], edge_name: [self.ID, Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), edge_name)")],
next_iv_index: [self.attr_index_t, Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), next_iv_index)")], next_iv_index: [self.attr_index_t, Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), next_iv_index)")],
capacity: [CType::Immediate.parse("uint32_t"), Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), capacity)")],
type: [CType::Immediate.parse("uint8_t"), Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), type)")], type: [CType::Immediate.parse("uint8_t"), Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), type)")],
size_pool_index: [CType::Immediate.parse("uint8_t"), Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), size_pool_index)")],
parent_id: [self.shape_id_t, Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), parent_id)")], parent_id: [self.shape_id_t, Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), parent_id)")],
) )
end end

View File

@ -33,6 +33,7 @@
#include "internal/string.h" #include "internal/string.h"
#include "internal/symbol.h" #include "internal/symbol.h"
#include "internal/variable.h" #include "internal/variable.h"
#include "variable.h"
#include "probes.h" #include "probes.h"
#include "ruby/encoding.h" #include "ruby/encoding.h"
#include "ruby/st.h" #include "ruby/st.h"
@ -268,21 +269,64 @@ rb_obj_singleton_class(VALUE obj)
MJIT_FUNC_EXPORTED void MJIT_FUNC_EXPORTED void
rb_obj_copy_ivar(VALUE dest, VALUE obj) rb_obj_copy_ivar(VALUE dest, VALUE obj)
{ {
uint32_t dest_len = ROBJECT_NUMIV(dest); RUBY_ASSERT(!RB_TYPE_P(obj, T_CLASS) && !RB_TYPE_P(obj, T_MODULE));
uint32_t src_len = ROBJECT_NUMIV(obj);
if (dest_len < src_len) { RUBY_ASSERT(BUILTIN_TYPE(dest) == BUILTIN_TYPE(obj));
rb_ensure_iv_list_size(dest, dest_len, src_len); uint32_t src_num_ivs = RBASIC_IV_COUNT(obj);
RUBY_ASSERT(!(RBASIC(dest)->flags & ROBJECT_EMBED)); rb_shape_t * src_shape = rb_shape_get_shape(obj);
} rb_shape_t * shape_to_set_on_dest = src_shape;
else { VALUE * src_buf;
RUBY_ASSERT((RBASIC(dest)->flags & ROBJECT_EMBED)); VALUE * dest_buf;
if (!src_num_ivs) {
return;
} }
VALUE * dest_buf = ROBJECT_IVPTR(dest); // The copy should be mutable, so we don't want the frozen shape
VALUE * src_buf = ROBJECT_IVPTR(obj); if (rb_shape_frozen_shape_p(src_shape)) {
shape_to_set_on_dest = rb_shape_get_shape_by_id(src_shape->parent_id);
}
MEMCPY(dest_buf, src_buf, VALUE, ROBJECT_IV_COUNT(obj)); src_buf = ROBJECT_IVPTR(obj);
dest_buf = ROBJECT_IVPTR(dest);
rb_shape_t * initial_shape = rb_shape_get_shape(dest);
if (initial_shape->size_pool_index != src_shape->size_pool_index) {
RUBY_ASSERT(initial_shape->parent_id == ROOT_SHAPE_ID || initial_shape->type == SHAPE_ROOT);
shape_to_set_on_dest = rb_shape_rebuild_shape(initial_shape, src_shape);
}
RUBY_ASSERT(src_num_ivs <= shape_to_set_on_dest->capacity);
if (initial_shape->capacity < shape_to_set_on_dest->capacity) {
rb_ensure_iv_list_size(dest, initial_shape->capacity, shape_to_set_on_dest->capacity);
dest_buf = ROBJECT_IVPTR(dest);
rb_shape_t * initial_shape = rb_shape_get_shape(dest);
if (initial_shape->size_pool_index != src_shape->size_pool_index) {
RUBY_ASSERT(initial_shape->parent_id == ROOT_SHAPE_ID || initial_shape->type == SHAPE_ROOT);
shape_to_set_on_dest = rb_shape_rebuild_shape(initial_shape, src_shape);
}
RUBY_ASSERT(src_num_ivs <= shape_to_set_on_dest->capacity);
if (initial_shape->capacity < shape_to_set_on_dest->capacity) {
rb_ensure_iv_list_size(dest, initial_shape->capacity, shape_to_set_on_dest->capacity);
dest_buf = ROBJECT_IVPTR(dest);
}
}
MEMCPY(dest_buf, src_buf, VALUE, src_num_ivs);
// Fire write barriers
for (uint32_t i = 0; i < src_num_ivs; i++) {
RB_OBJ_WRITTEN(dest, Qundef, dest_buf[i]);
}
rb_shape_set_shape(dest, shape_to_set_on_dest);
RUBY_ASSERT(!RB_TYPE_P(obj, T_OBJECT) || ROBJECT_IV_CAPACITY(dest) == ROBJECT_NUMIV(dest));
} }
static void static void
@ -301,19 +345,6 @@ init_copy(VALUE dest, VALUE obj)
if (RB_TYPE_P(obj, T_OBJECT)) { if (RB_TYPE_P(obj, T_OBJECT)) {
rb_obj_copy_ivar(dest, obj); rb_obj_copy_ivar(dest, obj);
} }
if (!RB_TYPE_P(obj, T_CLASS) && !RB_TYPE_P(obj, T_MODULE)) {
rb_shape_t *shape_to_set = rb_shape_get_shape(obj);
// If the object is frozen, the "dup"'d object will *not* be frozen,
// so we need to copy the frozen shape's parent to the new object.
if (rb_shape_frozen_shape_p(shape_to_set)) {
shape_to_set = rb_shape_get_shape_by_id(shape_to_set->parent_id);
}
// shape ids are different
rb_shape_set_shape(dest, shape_to_set);
}
} }
static VALUE immutable_obj_clone(VALUE obj, VALUE kwfreeze); static VALUE immutable_obj_clone(VALUE obj, VALUE kwfreeze);

203
shape.c
View File

@ -1,15 +1,19 @@
#include "vm_core.h" #include "vm_core.h"
#include "vm_sync.h" #include "vm_sync.h"
#include "shape.h" #include "shape.h"
#include "gc.h"
#include "internal/class.h" #include "internal/class.h"
#include "internal/symbol.h" #include "internal/symbol.h"
#include "internal/variable.h" #include "internal/variable.h"
#include <stdbool.h> #include <stdbool.h>
static ID id_frozen;
static ID size_pool_edge_names[SIZE_POOL_COUNT];
/* /*
* Shape getters * Shape getters
*/ */
static rb_shape_t* rb_shape_t *
rb_shape_get_root_shape(void) rb_shape_get_root_shape(void)
{ {
return GET_VM()->root_shape; return GET_VM()->root_shape;
@ -21,12 +25,6 @@ rb_shape_id(rb_shape_t * shape)
return (shape_id_t)(shape - GET_VM()->shape_list); return (shape_id_t)(shape - GET_VM()->shape_list);
} }
static rb_shape_t*
rb_shape_get_frozen_root_shape(void)
{
return GET_VM()->frozen_root_shape;
}
bool bool
rb_shape_root_shape_p(rb_shape_t* shape) rb_shape_root_shape_p(rb_shape_t* shape)
{ {
@ -68,7 +66,7 @@ shape_id_t
rb_shape_get_shape_id(VALUE obj) rb_shape_get_shape_id(VALUE obj)
{ {
if (RB_SPECIAL_CONST_P(obj)) { if (RB_SPECIAL_CONST_P(obj)) {
return FROZEN_ROOT_SHAPE_ID; return SPECIAL_CONST_SHAPE_ID;
} }
#if SHAPE_IN_BASIC_FLAGS #if SHAPE_IN_BASIC_FLAGS
@ -113,12 +111,9 @@ rb_shape_lookup_id(rb_shape_t* shape, ID id, enum shape_type shape_type)
} }
static rb_shape_t* static rb_shape_t*
get_next_shape_internal(rb_shape_t* shape, ID id, VALUE obj, enum shape_type shape_type) get_next_shape_internal(rb_shape_t * shape, ID id, enum shape_type shape_type)
{ {
rb_shape_t *res = NULL; rb_shape_t *res = NULL;
RUBY_ASSERT(SHAPE_FROZEN != (enum shape_type)shape->type || RB_TYPE_P(obj, T_MODULE) || RB_TYPE_P(obj, T_CLASS));
RB_VM_LOCK_ENTER(); RB_VM_LOCK_ENTER();
{ {
if (rb_shape_lookup_id(shape, id, shape_type)) { if (rb_shape_lookup_id(shape, id, shape_type)) {
@ -142,23 +137,18 @@ get_next_shape_internal(rb_shape_t* shape, ID id, VALUE obj, enum shape_type sha
rb_shape_t * new_shape = rb_shape_alloc(id, shape); rb_shape_t * new_shape = rb_shape_alloc(id, shape);
new_shape->type = (uint8_t)shape_type; new_shape->type = (uint8_t)shape_type;
new_shape->capacity = shape->capacity;
switch (shape_type) { switch (shape_type) {
case SHAPE_IVAR: case SHAPE_IVAR:
new_shape->next_iv_index = rb_shape_get_shape_by_id(new_shape->parent_id)->next_iv_index + 1; new_shape->next_iv_index = shape->next_iv_index + 1;
// Check if we should update next_iv_index on the object's class
if (BUILTIN_TYPE(obj) == T_OBJECT) {
VALUE klass = rb_obj_class(obj);
if (new_shape->next_iv_index > RCLASS_EXT(klass)->max_iv_count) {
RCLASS_EXT(klass)->max_iv_count = new_shape->next_iv_index;
}
}
break; break;
case SHAPE_CAPACITY_CHANGE:
case SHAPE_IVAR_UNDEF: case SHAPE_IVAR_UNDEF:
case SHAPE_FROZEN: case SHAPE_FROZEN:
new_shape->next_iv_index = rb_shape_get_shape_by_id(new_shape->parent_id)->next_iv_index; new_shape->next_iv_index = shape->next_iv_index;
break; break;
case SHAPE_INITIAL_CAPACITY:
case SHAPE_ROOT: case SHAPE_ROOT:
rb_bug("Unreachable"); rb_bug("Unreachable");
break; break;
@ -183,7 +173,7 @@ rb_shape_frozen_shape_p(rb_shape_t* shape)
void void
rb_shape_transition_shape_remove_ivar(VALUE obj, ID id, rb_shape_t *shape) rb_shape_transition_shape_remove_ivar(VALUE obj, ID id, rb_shape_t *shape)
{ {
rb_shape_t* next_shape = get_next_shape_internal(shape, id, obj, SHAPE_IVAR_UNDEF); rb_shape_t * next_shape = get_next_shape_internal(shape, id, SHAPE_IVAR_UNDEF);
if (shape == next_shape) { if (shape == next_shape) {
return; return;
@ -206,16 +196,11 @@ rb_shape_transition_shape_frozen(VALUE obj)
rb_shape_t* next_shape; rb_shape_t* next_shape;
if (shape == rb_shape_get_root_shape()) { if (shape == rb_shape_get_root_shape()) {
next_shape = rb_shape_get_frozen_root_shape(); rb_shape_set_shape_id(obj, SPECIAL_CONST_SHAPE_ID);
return;
} }
else {
static ID id_frozen;
if (!id_frozen) {
id_frozen = rb_make_internal_id();
}
next_shape = get_next_shape_internal(shape, (ID)id_frozen, obj, SHAPE_FROZEN); next_shape = get_next_shape_internal(shape, (ID)id_frozen, SHAPE_FROZEN);
}
RUBY_ASSERT(next_shape); RUBY_ASSERT(next_shape);
rb_shape_set_shape(obj, next_shape); rb_shape_set_shape(obj, next_shape);
@ -231,10 +216,39 @@ rb_shape_transition_shape(VALUE obj, ID id, rb_shape_t *shape)
rb_shape_set_shape(obj, next_shape); rb_shape_set_shape(obj, next_shape);
} }
rb_shape_t* /*
* This function is used for assertions where we don't want to increment
* max_iv_count
*/
rb_shape_t *
rb_shape_get_next_iv_shape(rb_shape_t* shape, ID id)
{
return get_next_shape_internal(shape, id, SHAPE_IVAR);
}
rb_shape_t *
rb_shape_get_next(rb_shape_t* shape, VALUE obj, ID id) rb_shape_get_next(rb_shape_t* shape, VALUE obj, ID id)
{ {
return get_next_shape_internal(shape, id, obj, SHAPE_IVAR); rb_shape_t * new_shape = rb_shape_get_next_iv_shape(shape, id);
// Check if we should update max_iv_count on the object's class
if (BUILTIN_TYPE(obj) == T_OBJECT) {
VALUE klass = rb_obj_class(obj);
if (new_shape->next_iv_index > RCLASS_EXT(klass)->max_iv_count) {
RCLASS_EXT(klass)->max_iv_count = new_shape->next_iv_index;
}
}
return new_shape;
}
rb_shape_t *
rb_shape_transition_shape_capa(rb_shape_t* shape, uint32_t new_capacity)
{
ID edge_name = rb_make_temporary_id(new_capacity);
rb_shape_t * new_shape = get_next_shape_internal(shape, edge_name, SHAPE_CAPACITY_CHANGE);
new_shape->capacity = new_capacity;
return new_shape;
} }
bool bool
@ -250,11 +264,13 @@ rb_shape_get_iv_index(rb_shape_t * shape, ID id, attr_index_t *value)
RUBY_ASSERT(shape->next_iv_index > 0); RUBY_ASSERT(shape->next_iv_index > 0);
*value = shape->next_iv_index - 1; *value = shape->next_iv_index - 1;
return true; return true;
case SHAPE_CAPACITY_CHANGE:
case SHAPE_IVAR_UNDEF: case SHAPE_IVAR_UNDEF:
case SHAPE_ROOT: case SHAPE_ROOT:
case SHAPE_INITIAL_CAPACITY:
return false; return false;
case SHAPE_FROZEN: case SHAPE_FROZEN:
rb_bug("Ivar should not exist on frozen transition\n"); rb_bug("Ivar should not exist on transition\n");
} }
} }
shape = rb_shape_get_shape_by_id(shape->parent_id); shape = rb_shape_get_shape_by_id(shape->parent_id);
@ -289,10 +305,19 @@ rb_shape_alloc_with_parent_id(ID edge_name, shape_id_t parent_id)
return shape; return shape;
} }
rb_shape_t *
rb_shape_alloc_with_size_pool_index(ID edge_name, rb_shape_t * parent, uint8_t size_pool_index)
{
rb_shape_t * shape = rb_shape_alloc_with_parent_id(edge_name, rb_shape_id(parent));
shape->size_pool_index = size_pool_index;
return shape;
}
rb_shape_t * rb_shape_t *
rb_shape_alloc(ID edge_name, rb_shape_t * parent) rb_shape_alloc(ID edge_name, rb_shape_t * parent)
{ {
return rb_shape_alloc_with_parent_id(edge_name, rb_shape_id(parent)); return rb_shape_alloc_with_size_pool_index(edge_name, parent, parent->size_pool_index);
} }
MJIT_FUNC_EXPORTED void MJIT_FUNC_EXPORTED void
@ -307,6 +332,39 @@ rb_shape_flags_mask(void)
return SHAPE_FLAG_MASK; return SHAPE_FLAG_MASK;
} }
rb_shape_t *
rb_shape_rebuild_shape(rb_shape_t * initial_shape, rb_shape_t * dest_shape)
{
rb_shape_t * midway_shape;
if (dest_shape->type != SHAPE_ROOT) {
midway_shape = rb_shape_rebuild_shape(initial_shape, rb_shape_get_shape_by_id(dest_shape->parent_id));
}
else {
midway_shape = initial_shape;
}
switch (dest_shape->type) {
case SHAPE_IVAR:
if (midway_shape->capacity < midway_shape->next_iv_index) {
// There isn't enough room to write this IV, so we need to increase the capacity
midway_shape = rb_shape_transition_shape_capa(midway_shape, midway_shape->capacity * 2);
}
midway_shape = rb_shape_get_next_iv_shape(midway_shape, dest_shape->edge_name);
break;
case SHAPE_IVAR_UNDEF:
midway_shape = get_next_shape_internal(midway_shape, dest_shape->edge_name, SHAPE_IVAR_UNDEF);
break;
case SHAPE_ROOT:
case SHAPE_FROZEN:
case SHAPE_CAPACITY_CHANGE:
break;
}
return midway_shape;
}
#if VM_CHECK_MODE > 0 #if VM_CHECK_MODE > 0
VALUE rb_cShape; VALUE rb_cShape;
@ -335,6 +393,14 @@ rb_shape_type(VALUE self)
return INT2NUM(shape->type); return INT2NUM(shape->type);
} }
static VALUE
rb_shape_capacity(VALUE self)
{
rb_shape_t * shape;
TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape);
return INT2NUM(shape->capacity);
}
static VALUE static VALUE
rb_shape_parent_id(VALUE self) rb_shape_parent_id(VALUE self)
{ {
@ -398,11 +464,16 @@ rb_shape_edge_name(VALUE self)
rb_shape_t* shape; rb_shape_t* shape;
TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape); TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape);
if (shape->edge_name) { if ((shape->edge_name & (ID_INTERNAL)) == ID_INTERNAL) {
return ID2SYM(shape->edge_name); return INT2NUM(shape->capacity);
} }
else { else {
return Qnil; if (shape->edge_name) {
return ID2SYM(shape->edge_name);
}
else {
return Qnil;
}
} }
} }
@ -415,6 +486,15 @@ rb_shape_next_iv_index(VALUE self)
return INT2NUM(shape->next_iv_index); return INT2NUM(shape->next_iv_index);
} }
static VALUE
rb_shape_size_pool_index(VALUE self)
{
rb_shape_t * shape;
TypedData_Get_Struct(self, rb_shape_t, &shape_data_type, shape);
return INT2NUM(shape->size_pool_index);
}
static VALUE static VALUE
rb_shape_export_depth(VALUE self) rb_shape_export_depth(VALUE self)
{ {
@ -454,12 +534,6 @@ rb_shape_root_shape(VALUE self)
return rb_shape_t_to_rb_cShape(rb_shape_get_root_shape()); return rb_shape_t_to_rb_cShape(rb_shape_get_root_shape());
} }
static VALUE
rb_shape_frozen_root_shape(VALUE self)
{
return rb_shape_t_to_rb_cShape(rb_shape_get_frozen_root_shape());
}
VALUE rb_obj_shape(rb_shape_t* shape); VALUE rb_obj_shape(rb_shape_t* shape);
static enum rb_id_table_iterator_result collect_keys_and_values(ID key, VALUE value, void *ref) static enum rb_id_table_iterator_result collect_keys_and_values(ID key, VALUE value, void *ref)
@ -518,6 +592,43 @@ rb_shape_find_by_id(VALUE mod, VALUE id)
} }
#endif #endif
void
Init_default_shapes(void)
{
id_frozen = rb_make_internal_id();
// Shapes by size pool
for (int i = 0; i < SIZE_POOL_COUNT; i++) {
size_pool_edge_names[i] = rb_make_internal_id();
}
// Root shape
rb_shape_t * root = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID);
root->capacity = (uint32_t)((rb_size_pool_slot_size(0) - offsetof(struct RObject, as.ary)) / sizeof(VALUE));
root->type = SHAPE_ROOT;
root->size_pool_index = 0;
GET_VM()->root_shape = root;
RUBY_ASSERT(rb_shape_id(GET_VM()->root_shape) == ROOT_SHAPE_ID);
// Shapes by size pool
for (int i = 1; i < SIZE_POOL_COUNT; i++) {
uint32_t capa = (uint32_t)((rb_size_pool_slot_size(i) - offsetof(struct RObject, as.ary)) / sizeof(VALUE));
rb_shape_t * new_shape = rb_shape_transition_shape_capa(root, capa);
new_shape->type = SHAPE_INITIAL_CAPACITY;
new_shape->size_pool_index = i;
RUBY_ASSERT(rb_shape_id(new_shape) == (shape_id_t)i);
}
// Special const shape
#if RUBY_DEBUG
rb_shape_t * special_const_shape =
#endif
get_next_shape_internal(root, (ID)id_frozen, SHAPE_FROZEN);
RUBY_ASSERT(rb_shape_id(special_const_shape) == SPECIAL_CONST_SHAPE_ID);
RUBY_ASSERT(SPECIAL_CONST_SHAPE_ID == (GET_VM()->next_shape_id - 1));
RUBY_ASSERT(rb_shape_frozen_shape_p(special_const_shape));
}
void void
Init_shape(void) Init_shape(void)
{ {
@ -530,21 +641,23 @@ Init_shape(void)
rb_define_method(rb_cShape, "edges", rb_shape_edges, 0); rb_define_method(rb_cShape, "edges", rb_shape_edges, 0);
rb_define_method(rb_cShape, "edge_name", rb_shape_edge_name, 0); rb_define_method(rb_cShape, "edge_name", rb_shape_edge_name, 0);
rb_define_method(rb_cShape, "next_iv_index", rb_shape_next_iv_index, 0); rb_define_method(rb_cShape, "next_iv_index", rb_shape_next_iv_index, 0);
rb_define_method(rb_cShape, "size_pool_index", rb_shape_size_pool_index, 0);
rb_define_method(rb_cShape, "depth", rb_shape_export_depth, 0); rb_define_method(rb_cShape, "depth", rb_shape_export_depth, 0);
rb_define_method(rb_cShape, "id", rb_wrapped_shape_id, 0); rb_define_method(rb_cShape, "id", rb_wrapped_shape_id, 0);
rb_define_method(rb_cShape, "type", rb_shape_type, 0); rb_define_method(rb_cShape, "type", rb_shape_type, 0);
rb_define_method(rb_cShape, "capacity", rb_shape_capacity, 0);
rb_define_const(rb_cShape, "SHAPE_ROOT", INT2NUM(SHAPE_ROOT)); rb_define_const(rb_cShape, "SHAPE_ROOT", INT2NUM(SHAPE_ROOT));
rb_define_const(rb_cShape, "SHAPE_IVAR", INT2NUM(SHAPE_IVAR)); rb_define_const(rb_cShape, "SHAPE_IVAR", INT2NUM(SHAPE_IVAR));
rb_define_const(rb_cShape, "SHAPE_IVAR_UNDEF", INT2NUM(SHAPE_IVAR_UNDEF)); rb_define_const(rb_cShape, "SHAPE_IVAR_UNDEF", INT2NUM(SHAPE_IVAR_UNDEF));
rb_define_const(rb_cShape, "SHAPE_FROZEN", INT2NUM(SHAPE_FROZEN)); rb_define_const(rb_cShape, "SHAPE_FROZEN", INT2NUM(SHAPE_FROZEN));
rb_define_const(rb_cShape, "SHAPE_BITS", INT2NUM(SHAPE_BITS)); rb_define_const(rb_cShape, "SHAPE_BITS", INT2NUM(SHAPE_BITS));
rb_define_const(rb_cShape, "SHAPE_FLAG_SHIFT", INT2NUM(SHAPE_FLAG_SHIFT)); rb_define_const(rb_cShape, "SHAPE_FLAG_SHIFT", INT2NUM(SHAPE_FLAG_SHIFT));
rb_define_const(rb_cShape, "SPECIAL_CONST_SHAPE_ID", INT2NUM(SPECIAL_CONST_SHAPE_ID));
rb_define_singleton_method(rb_cShape, "transition_tree", shape_transition_tree, 0); rb_define_singleton_method(rb_cShape, "transition_tree", shape_transition_tree, 0);
rb_define_singleton_method(rb_cShape, "find_by_id", rb_shape_find_by_id, 1); rb_define_singleton_method(rb_cShape, "find_by_id", rb_shape_find_by_id, 1);
rb_define_singleton_method(rb_cShape, "next_shape_id", next_shape_id, 0); rb_define_singleton_method(rb_cShape, "next_shape_id", next_shape_id, 0);
rb_define_singleton_method(rb_cShape, "of", rb_shape_debug_shape, 1); rb_define_singleton_method(rb_cShape, "of", rb_shape_debug_shape, 1);
rb_define_singleton_method(rb_cShape, "root_shape", rb_shape_root_shape, 0); rb_define_singleton_method(rb_cShape, "root_shape", rb_shape_root_shape, 0);
rb_define_singleton_method(rb_cShape, "frozen_root_shape", rb_shape_frozen_root_shape, 0);
#endif #endif
} }

28
shape.h
View File

@ -40,13 +40,17 @@ typedef uint16_t shape_id_t;
# define MAX_SHAPE_ID (SHAPE_MASK - 1) # define MAX_SHAPE_ID (SHAPE_MASK - 1)
# define INVALID_SHAPE_ID SHAPE_MASK # define INVALID_SHAPE_ID SHAPE_MASK
# define ROOT_SHAPE_ID 0x0 # define ROOT_SHAPE_ID 0x0
# define FROZEN_ROOT_SHAPE_ID 0x1 // We use SIZE_POOL_COUNT number of shape IDs for transitions out of different size pools
// The next available shapd ID will be the SPECIAL_CONST_SHAPE_ID
# define SPECIAL_CONST_SHAPE_ID SIZE_POOL_COUNT
struct rb_shape { struct rb_shape {
struct rb_id_table * edges; // id_table from ID (ivar) to next shape struct rb_id_table * edges; // id_table from ID (ivar) to next shape
ID edge_name; // ID (ivar) for transition from parent to rb_shape ID edge_name; // ID (ivar) for transition from parent to rb_shape
attr_index_t next_iv_index; attr_index_t next_iv_index;
uint32_t capacity; // Total capacity of the object with this shape
uint8_t type; uint8_t type;
uint8_t size_pool_index;
shape_id_t parent_id; shape_id_t parent_id;
}; };
@ -56,7 +60,9 @@ enum shape_type {
SHAPE_ROOT, SHAPE_ROOT,
SHAPE_IVAR, SHAPE_IVAR,
SHAPE_FROZEN, SHAPE_FROZEN,
SHAPE_CAPACITY_CHANGE,
SHAPE_IVAR_UNDEF, SHAPE_IVAR_UNDEF,
SHAPE_INITIAL_CAPACITY,
}; };
#if SHAPE_IN_BASIC_FLAGS #if SHAPE_IN_BASIC_FLAGS
@ -124,6 +130,7 @@ static inline shape_id_t RCLASS_SHAPE_ID(VALUE obj) {
#endif #endif
bool rb_shape_root_shape_p(rb_shape_t* shape); bool rb_shape_root_shape_p(rb_shape_t* shape);
rb_shape_t * rb_shape_get_root_shape(void);
rb_shape_t* rb_shape_get_shape_by_id_without_assertion(shape_id_t shape_id); rb_shape_t* rb_shape_get_shape_by_id_without_assertion(shape_id_t shape_id);
@ -135,21 +142,37 @@ rb_shape_t* rb_shape_get_shape(VALUE obj);
int rb_shape_frozen_shape_p(rb_shape_t* shape); int rb_shape_frozen_shape_p(rb_shape_t* shape);
void rb_shape_transition_shape_frozen(VALUE obj); void rb_shape_transition_shape_frozen(VALUE obj);
void rb_shape_transition_shape_remove_ivar(VALUE obj, ID id, rb_shape_t *shape); void rb_shape_transition_shape_remove_ivar(VALUE obj, ID id, rb_shape_t *shape);
rb_shape_t * rb_shape_transition_shape_capa(rb_shape_t * shape, uint32_t new_capacity);
void rb_shape_transition_shape(VALUE obj, ID id, rb_shape_t *shape); void rb_shape_transition_shape(VALUE obj, ID id, rb_shape_t *shape);
rb_shape_t * rb_shape_get_next_iv_shape(rb_shape_t * shape, ID id);
rb_shape_t* rb_shape_get_next(rb_shape_t* shape, VALUE obj, ID id); rb_shape_t* rb_shape_get_next(rb_shape_t* shape, VALUE obj, ID id);
bool rb_shape_get_iv_index(rb_shape_t * shape, ID id, attr_index_t * value); bool rb_shape_get_iv_index(rb_shape_t * shape, ID id, attr_index_t * value);
shape_id_t rb_shape_id(rb_shape_t * shape); shape_id_t rb_shape_id(rb_shape_t * shape);
MJIT_SYMBOL_EXPORT_END MJIT_SYMBOL_EXPORT_END
rb_shape_t * rb_shape_rebuild_shape(rb_shape_t * initial_shape, rb_shape_t * dest_shape);
static inline uint32_t
ROBJECT_IV_CAPACITY(VALUE obj)
{
RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
return rb_shape_get_shape_by_id(ROBJECT_SHAPE_ID(obj))->capacity;
}
static inline uint32_t static inline uint32_t
ROBJECT_IV_COUNT(VALUE obj) ROBJECT_IV_COUNT(VALUE obj)
{ {
RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT); RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
uint32_t ivc = rb_shape_get_shape_by_id(ROBJECT_SHAPE_ID(obj))->next_iv_index; uint32_t ivc = rb_shape_get_shape_by_id(ROBJECT_SHAPE_ID(obj))->next_iv_index;
RUBY_ASSERT(ivc <= ROBJECT_NUMIV(obj));
return ivc; return ivc;
} }
static inline uint32_t
RBASIC_IV_COUNT(VALUE obj)
{
return rb_shape_get_shape_by_id(rb_shape_get_shape_id(obj))->next_iv_index;
}
static inline uint32_t static inline uint32_t
RCLASS_IV_COUNT(VALUE obj) RCLASS_IV_COUNT(VALUE obj)
{ {
@ -159,6 +182,7 @@ RCLASS_IV_COUNT(VALUE obj)
} }
rb_shape_t * rb_shape_alloc(ID edge_name, rb_shape_t * parent); rb_shape_t * rb_shape_alloc(ID edge_name, rb_shape_t * parent);
rb_shape_t * rb_shape_alloc_with_size_pool_index(ID edge_name, rb_shape_t * parent, uint8_t size_pool_index);
rb_shape_t * rb_shape_alloc_with_parent_id(ID edge_name, shape_id_t parent_id); rb_shape_t * rb_shape_alloc_with_parent_id(ID edge_name, shape_id_t parent_id);
bool rb_shape_set_shape_id(VALUE obj, shape_id_t shape_id); bool rb_shape_set_shape_id(VALUE obj, shape_id_t shape_id);

View File

@ -43,7 +43,11 @@ class Test_StringCStr < Test::Unit::TestCase
end end
def test_rb_str_new_frozen_embed def test_rb_str_new_frozen_embed
str = Bug::String.cstr_noembed("rbconfig.rb") # "rbconfi" is the smallest "maximum embeddable string". VWA adds
# a capacity field, which removes one pointer capacity for embedded objects,
# so if VWA is enabled, but there is only one size pool, then the
# maximum embeddable capacity on 32 bit machines is 8 bytes.
str = Bug::String.cstr_noembed("rbconfi")
str = Bug::String.rb_str_new_frozen(str) str = Bug::String.rb_str_new_frozen(str)
assert_equal true, Bug::String.cstr_embedded?(str) assert_equal true, Bug::String.cstr_embedded?(str)
end end

View File

@ -277,7 +277,7 @@ class TestObjSpace < Test::Unit::TestCase
info = nil info = nil
ObjectSpace.trace_object_allocations do ObjectSpace.trace_object_allocations do
line = __LINE__ + 1 line = __LINE__ + 1
str = "hello world" str = "hello w"
info = ObjectSpace.dump(str) info = ObjectSpace.dump(str)
end end
assert_dump_object(info, line) assert_dump_object(info, line)
@ -289,7 +289,7 @@ class TestObjSpace < Test::Unit::TestCase
th = Thread.start {r.read} th = Thread.start {r.read}
ObjectSpace.trace_object_allocations do ObjectSpace.trace_object_allocations do
line = __LINE__ + 1 line = __LINE__ + 1
str = "hello world" str = "hello w"
ObjectSpace.dump(str, output: w) ObjectSpace.dump(str, output: w)
end end
w.close w.close
@ -301,7 +301,7 @@ class TestObjSpace < Test::Unit::TestCase
def assert_dump_object(info, line) def assert_dump_object(info, line)
loc = caller_locations(1, 1)[0] loc = caller_locations(1, 1)[0]
assert_match(/"type":"STRING"/, info) assert_match(/"type":"STRING"/, info)
assert_match(/"embedded":true, "bytesize":11, "value":"hello world", "encoding":"UTF-8"/, info) assert_match(/"embedded":true, "bytesize":7, "value":"hello w", "encoding":"UTF-8"/, info)
assert_match(/"file":"#{Regexp.escape __FILE__}", "line":#{line}/, info) assert_match(/"file":"#{Regexp.escape __FILE__}", "line":#{line}/, info)
assert_match(/"method":"#{loc.base_label}"/, info) assert_match(/"method":"#{loc.base_label}"/, info)
JSON.parse(info) if defined?(JSON) JSON.parse(info) if defined?(JSON)
@ -549,17 +549,17 @@ class TestObjSpace < Test::Unit::TestCase
# #
# This test makes assertions on the assignment to `str`, so we look for # This test makes assertions on the assignment to `str`, so we look for
# the second appearance of /TEST STRING/ in the output # the second appearance of /TEST STRING/ in the output
test_string_in_dump_all = output.grep(/TEST STRING/) test_string_in_dump_all = output.grep(/TEST2/)
assert_equal(test_string_in_dump_all.size, 2) assert_equal(2, test_string_in_dump_all.size, "number of strings")
entry_hash = JSON.parse(test_string_in_dump_all[1]) entry_hash = JSON.parse(test_string_in_dump_all[1])
assert_equal(entry_hash["bytesize"], 11) assert_equal(5, entry_hash["bytesize"], "bytesize is wrong")
assert_equal(entry_hash["value"], "TEST STRING") assert_equal("TEST2", entry_hash["value"], "value is wrong")
assert_equal(entry_hash["encoding"], "UTF-8") assert_equal("UTF-8", entry_hash["encoding"], "encoding is wrong")
assert_equal(entry_hash["file"], "-") assert_equal("-", entry_hash["file"], "file is wrong")
assert_equal(entry_hash["line"], 4) assert_equal(4, entry_hash["line"], "line is wrong")
assert_equal(entry_hash["method"], "dump_my_heap_please") assert_equal("dump_my_heap_please", entry_hash["method"], "method is wrong")
assert_not_nil(entry_hash["generation"]) assert_not_nil(entry_hash["generation"])
end end
@ -571,7 +571,7 @@ class TestObjSpace < Test::Unit::TestCase
def dump_my_heap_please def dump_my_heap_please
ObjectSpace.trace_object_allocations_start ObjectSpace.trace_object_allocations_start
GC.start GC.start
str = "TEST STRING".force_encoding("UTF-8") str = "TEST2".force_encoding("UTF-8")
ObjectSpace.dump_all(output: :stdout) ObjectSpace.dump_all(output: :stdout)
end end
@ -586,7 +586,7 @@ class TestObjSpace < Test::Unit::TestCase
def dump_my_heap_please def dump_my_heap_please
ObjectSpace.trace_object_allocations_start ObjectSpace.trace_object_allocations_start
GC.start GC.start
(str = "TEST STRING").force_encoding("UTF-8") (str = "TEST2").force_encoding("UTF-8")
ObjectSpace.dump_all().path ObjectSpace.dump_all().path
end end

View File

@ -210,7 +210,7 @@ class TestGCCompact < Test::Unit::TestCase
end end
def test_moving_arrays_down_size_pools def test_moving_arrays_down_size_pools
omit if !GC.using_rvargc? omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1
assert_separately([], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 10, signal: :SEGV) assert_separately([], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 10, signal: :SEGV)
begin; begin;
ARY_COUNT = 500 ARY_COUNT = 500
@ -229,7 +229,8 @@ class TestGCCompact < Test::Unit::TestCase
end end
def test_moving_arrays_up_size_pools def test_moving_arrays_up_size_pools
omit if !GC.using_rvargc? omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1
assert_separately([], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 10, signal: :SEGV) assert_separately([], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 10, signal: :SEGV)
begin; begin;
ARY_COUNT = 500 ARY_COUNT = 500
@ -250,6 +251,8 @@ class TestGCCompact < Test::Unit::TestCase
end end
def test_moving_objects_between_size_pools def test_moving_objects_between_size_pools
omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1
assert_separately([], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 10, signal: :SEGV) assert_separately([], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 10, signal: :SEGV)
begin; begin;
class Foo class Foo
@ -274,7 +277,8 @@ class TestGCCompact < Test::Unit::TestCase
end end
def test_moving_strings_up_size_pools def test_moving_strings_up_size_pools
omit if !GC.using_rvargc? omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1
assert_separately([], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 10, signal: :SEGV) assert_separately([], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 10, signal: :SEGV)
begin; begin;
STR_COUNT = 500 STR_COUNT = 500
@ -292,7 +296,8 @@ class TestGCCompact < Test::Unit::TestCase
end end
def test_moving_strings_down_size_pools def test_moving_strings_down_size_pools
omit if !GC.using_rvargc? omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1
assert_separately([], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 10, signal: :SEGV) assert_separately([], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 10, signal: :SEGV)
begin; begin;
STR_COUNT = 500 STR_COUNT = 500

View File

@ -86,15 +86,10 @@ class TestShapes < Test::Unit::TestCase
assert_equal(2, bar_shape.next_iv_index) assert_equal(2, bar_shape.next_iv_index)
end end
def test_new_obj_has_root_shape class TestObject; end
assert_shape_equal(RubyVM::Shape.root_shape, RubyVM::Shape.of(Object.new))
end
def test_frozen_new_obj_has_frozen_root_shape def test_new_obj_has_root_shape
assert_shape_equal( assert_shape_equal(RubyVM::Shape.root_shape, RubyVM::Shape.of(TestObject.new))
RubyVM::Shape.frozen_root_shape,
RubyVM::Shape.of(Object.new.freeze)
)
end end
def test_str_has_root_shape def test_str_has_root_shape
@ -109,12 +104,12 @@ class TestShapes < Test::Unit::TestCase
assert_shape_equal(RubyVM::Shape.root_shape, RubyVM::Shape.of({})) assert_shape_equal(RubyVM::Shape.root_shape, RubyVM::Shape.of({}))
end end
def test_true_has_frozen_root_shape def test_true_has_special_const_shape_id
assert_shape_equal(RubyVM::Shape.frozen_root_shape, RubyVM::Shape.of(true)) assert_equal(RubyVM::Shape::SPECIAL_CONST_SHAPE_ID, RubyVM::Shape.of(true).id)
end end
def test_nil_has_frozen_root_shape def test_nil_has_special_const_shape_id
assert_shape_equal(RubyVM::Shape.frozen_root_shape, RubyVM::Shape.of(nil)) assert_equal(RubyVM::Shape::SPECIAL_CONST_SHAPE_ID, RubyVM::Shape.of(nil).id)
end end
def test_basic_shape_transition def test_basic_shape_transition

View File

@ -1092,7 +1092,7 @@ rb_generic_shape_id(VALUE obj)
shape_id = ivtbl->shape_id; shape_id = ivtbl->shape_id;
} }
else if (OBJ_FROZEN(obj)) { else if (OBJ_FROZEN(obj)) {
shape_id = FROZEN_ROOT_SHAPE_ID; shape_id = SPECIAL_CONST_SHAPE_ID;
} }
} }
RB_VM_LOCK_LEAVE(); RB_VM_LOCK_LEAVE();
@ -1364,26 +1364,21 @@ rb_obj_transient_heap_evacuate(VALUE obj, int promote)
#endif #endif
void void
rb_ensure_iv_list_size(VALUE obj, uint32_t len, uint32_t newsize) rb_ensure_iv_list_size(VALUE obj, uint32_t current_capacity, uint32_t new_capacity)
{ {
VALUE *ptr = ROBJECT_IVPTR(obj); VALUE *ptr = ROBJECT_IVPTR(obj);
VALUE *newptr; VALUE *newptr;
if (RBASIC(obj)->flags & ROBJECT_EMBED) { if (RBASIC(obj)->flags & ROBJECT_EMBED) {
newptr = obj_ivar_heap_alloc(obj, newsize); newptr = obj_ivar_heap_alloc(obj, new_capacity);
MEMCPY(newptr, ptr, VALUE, len); MEMCPY(newptr, ptr, VALUE, current_capacity);
RB_FL_UNSET_RAW(obj, ROBJECT_EMBED); RB_FL_UNSET_RAW(obj, ROBJECT_EMBED);
ROBJECT(obj)->as.heap.ivptr = newptr; ROBJECT(obj)->as.heap.ivptr = newptr;
} }
else { else {
newptr = obj_ivar_heap_realloc(obj, len, newsize); newptr = obj_ivar_heap_realloc(obj, current_capacity, new_capacity);
} }
ROBJECT_SET_NUMIV(obj, new_capacity);
#if USE_RVARGC
ROBJECT(obj)->numiv = newsize;
#else
ROBJECT(obj)->as.heap.numiv = newsize;
#endif
} }
struct gen_ivtbl * struct gen_ivtbl *
@ -1407,12 +1402,25 @@ rb_ensure_generic_iv_list_size(VALUE obj, uint32_t newsize)
} }
// @note May raise when there are too many instance variables. // @note May raise when there are too many instance variables.
void rb_shape_t *
rb_init_iv_list(VALUE obj) rb_grow_iv_list(VALUE obj)
{ {
uint32_t newsize = (uint32_t)(rb_shape_get_shape(obj)->next_iv_index * 2.0);
uint32_t len = ROBJECT_NUMIV(obj); uint32_t len = ROBJECT_NUMIV(obj);
rb_ensure_iv_list_size(obj, len, newsize < len ? len : newsize); RUBY_ASSERT(len > 0);
uint32_t newsize = (uint32_t)(len * 2);
rb_ensure_iv_list_size(obj, len, newsize);
rb_shape_t * res;
#if USE_RVARGC
ROBJECT_SET_NUMIV(obj, newsize);
#else
ROBJECT(obj)->as.heap.numiv = newsize;
#endif
res = rb_shape_transition_shape_capa(rb_shape_get_shape(obj), newsize);
rb_shape_set_shape(obj, res);
RUBY_ASSERT(!RB_TYPE_P(obj, T_OBJECT) || ROBJECT_IV_CAPACITY(obj) == ROBJECT_NUMIV(obj));
return res;
} }
static VALUE static VALUE
@ -1423,9 +1431,10 @@ obj_ivar_set(VALUE obj, ID id, VALUE val)
// Get the current shape // Get the current shape
rb_shape_t * shape = rb_shape_get_shape_by_id(ROBJECT_SHAPE_ID(obj)); rb_shape_t * shape = rb_shape_get_shape_by_id(ROBJECT_SHAPE_ID(obj));
bool found = true;
if (!rb_shape_get_iv_index(shape, id, &index)) { if (!rb_shape_get_iv_index(shape, id, &index)) {
shape = rb_shape_get_next(shape, obj, id); index = shape->next_iv_index;
index = shape->next_iv_index - 1; found = false;
} }
uint32_t len = ROBJECT_NUMIV(obj); uint32_t len = ROBJECT_NUMIV(obj);
@ -1434,12 +1443,16 @@ obj_ivar_set(VALUE obj, ID id, VALUE val)
// on this object until the buffer has been allocated, otherwise // on this object until the buffer has been allocated, otherwise
// GC could read off the end of the buffer. // GC could read off the end of the buffer.
if (len <= index) { if (len <= index) {
uint32_t newsize = (uint32_t)((len + 1) * 1.25); shape = rb_grow_iv_list(obj);
rb_ensure_iv_list_size(obj, len, newsize); }
if (!found) {
shape = rb_shape_get_next(shape, obj, id);
RUBY_ASSERT(index == (shape->next_iv_index - 1));
rb_shape_set_shape(obj, shape);
} }
RB_OBJ_WRITE(obj, &ROBJECT_IVPTR(obj)[index], val); RB_OBJ_WRITE(obj, &ROBJECT_IVPTR(obj)[index], val);
rb_shape_set_shape(obj, shape);
return val; return val;
} }
@ -1475,7 +1488,7 @@ rb_shape_set_shape_id(VALUE obj, shape_id_t shape_id)
RCLASS_EXT(obj)->shape_id = shape_id; RCLASS_EXT(obj)->shape_id = shape_id;
break; break;
default: default:
if (shape_id != FROZEN_ROOT_SHAPE_ID) { if (shape_id != SPECIAL_CONST_SHAPE_ID) {
struct gen_ivtbl *ivtbl = 0; struct gen_ivtbl *ivtbl = 0;
RB_VM_LOCK_ENTER(); RB_VM_LOCK_ENTER();
{ {
@ -1599,8 +1612,10 @@ iterate_over_shapes_with_callback(rb_shape_t *shape, rb_ivar_foreach_callback_fu
callback(shape->edge_name, val, itr_data->arg); callback(shape->edge_name, val, itr_data->arg);
} }
return; return;
case SHAPE_IVAR_UNDEF: case SHAPE_INITIAL_CAPACITY:
case SHAPE_CAPACITY_CHANGE:
case SHAPE_FROZEN: case SHAPE_FROZEN:
case SHAPE_IVAR_UNDEF:
iterate_over_shapes_with_callback(rb_shape_get_shape_by_id(shape->parent_id), callback, itr_data); iterate_over_shapes_with_callback(rb_shape_get_shape_by_id(shape->parent_id), callback, itr_data);
return; return;
} }
@ -3922,7 +3937,7 @@ rb_iv_tbl_copy(VALUE dst, VALUE src)
RUBY_ASSERT(rb_type(dst) == rb_type(src)); RUBY_ASSERT(rb_type(dst) == rb_type(src));
RUBY_ASSERT(RB_TYPE_P(dst, T_CLASS) || RB_TYPE_P(dst, T_MODULE)); RUBY_ASSERT(RB_TYPE_P(dst, T_CLASS) || RB_TYPE_P(dst, T_MODULE));
RUBY_ASSERT(RCLASS_SHAPE_ID(dst) == ROOT_SHAPE_ID); RUBY_ASSERT(RCLASS_SHAPE_ID(dst) == ROOT_SHAPE_ID || rb_shape_get_shape_by_id(RCLASS_SHAPE_ID(dst))->type == SHAPE_INITIAL_CAPACITY);
RUBY_ASSERT(!RCLASS_IVPTR(dst)); RUBY_ASSERT(!RCLASS_IVPTR(dst));
rb_ivar_foreach(src, tbl_copy_i, dst); rb_ivar_foreach(src, tbl_copy_i, dst);

11
vm.c
View File

@ -4044,17 +4044,6 @@ Init_vm_objects(void)
if (!vm->shape_list) { if (!vm->shape_list) {
rb_memerror(); rb_memerror();
} }
// Root shape
vm->root_shape = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID);
RUBY_ASSERT(rb_shape_id(vm->root_shape) == ROOT_SHAPE_ID);
// Frozen root shape
vm->frozen_root_shape = rb_shape_alloc_with_parent_id(rb_make_internal_id(), rb_shape_id(vm->root_shape));
vm->frozen_root_shape->type = (uint8_t)SHAPE_FROZEN;
RUBY_ASSERT(rb_shape_id(vm->frozen_root_shape) == FROZEN_ROOT_SHAPE_ID);
vm->next_shape_id = 2;
} }
/* Stub for builtin function when not building YJIT units*/ /* Stub for builtin function when not building YJIT units*/

View File

@ -691,7 +691,6 @@ typedef struct rb_vm_struct {
/* object shapes */ /* object shapes */
rb_shape_t *shape_list; rb_shape_t *shape_list;
rb_shape_t *root_shape; rb_shape_t *root_shape;
rb_shape_t *frozen_root_shape;
shape_id_t next_shape_id; shape_id_t next_shape_id;
/* load */ /* load */

View File

@ -50,11 +50,6 @@ MJIT_STATIC VALUE
ruby_vm_special_exception_copy(VALUE exc) ruby_vm_special_exception_copy(VALUE exc)
{ {
VALUE e = rb_obj_alloc(rb_class_real(RBASIC_CLASS(exc))); VALUE e = rb_obj_alloc(rb_class_real(RBASIC_CLASS(exc)));
rb_shape_t * shape = rb_shape_get_shape(exc);
if (rb_shape_frozen_shape_p(shape)) {
shape = rb_shape_get_shape_by_id(shape->parent_id);
}
rb_shape_set_shape(e, shape);
rb_obj_copy_ivar(e, exc); rb_obj_copy_ivar(e, exc);
return e; return e;
} }
@ -1310,37 +1305,33 @@ vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic,
rb_shape_t* shape = rb_shape_get_shape(obj); rb_shape_t* shape = rb_shape_get_shape(obj);
shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj); shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
rb_shape_t* next_shape = rb_shape_get_next(shape, obj, id); if (!rb_shape_get_iv_index(shape, id, &index)) {
if (UNLIKELY(shape->next_iv_index >= num_iv)) {
RUBY_ASSERT(shape->next_iv_index == num_iv);
if (shape != next_shape) { shape = rb_grow_iv_list(obj);
RUBY_ASSERT(next_shape->parent_id == rb_shape_id(shape)); RUBY_ASSERT(shape->type == SHAPE_CAPACITY_CHANGE);
next_shape_id = rb_shape_id(next_shape); }
}
index = shape->next_iv_index;
if (rb_shape_get_iv_index(next_shape, id, &index)) { // based off the hash stored in the transition tree
if (index >= MAX_IVARS) { if (index >= MAX_IVARS) {
rb_raise(rb_eArgError, "too many instance variables"); rb_raise(rb_eArgError, "too many instance variables");
} }
populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr); rb_shape_t * next_shape = rb_shape_get_next(shape, obj, id);
} RUBY_ASSERT(next_shape->type == SHAPE_IVAR);
else { RUBY_ASSERT(index == (next_shape->next_iv_index - 1));
rb_bug("Didn't find instance variable %s\n", rb_id2name(id)); next_shape_id = rb_shape_id(next_shape);
}
// Ensure the IV buffer is wide enough to store the IV
if (UNLIKELY(index >= num_iv)) {
RUBY_ASSERT(index == num_iv);
rb_init_iv_list(obj);
}
if (shape != next_shape) {
rb_shape_set_shape(obj, next_shape); rb_shape_set_shape(obj, next_shape);
} }
populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
VALUE *ptr = ROBJECT_IVPTR(obj); VALUE *ptr = ROBJECT_IVPTR(obj);
RB_OBJ_WRITE(obj, &ptr[index], val); RB_OBJ_WRITE(obj, &ptr[index], val);
RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit); RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
return val; return val;
} }
case T_CLASS: case T_CLASS:
@ -1450,17 +1441,18 @@ vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t i
else if (dest_shape_id != INVALID_SHAPE_ID) { else if (dest_shape_id != INVALID_SHAPE_ID) {
rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id); rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
shape_id_t source_shape_id = dest_shape->parent_id; shape_id_t source_shape_id = dest_shape->parent_id;
if (shape_id == source_shape_id && dest_shape->edge_name == id && dest_shape->type == SHAPE_IVAR) {
RUBY_ASSERT(dest_shape->type == SHAPE_IVAR || dest_shape->type == SHAPE_IVAR_UNDEF);
if (shape_id == source_shape_id && dest_shape->edge_name == id) {
RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID); RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
if (UNLIKELY(index >= ROBJECT_NUMIV(obj))) { RUBY_ASSERT(ROBJECT_IV_CAPACITY(obj) == ROBJECT_NUMIV(obj));
rb_init_iv_list(obj);
}
ROBJECT_SET_SHAPE_ID(obj, dest_shape_id); ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
RUBY_ASSERT(rb_shape_get_next(rb_shape_get_shape_by_id(source_shape_id), obj, id) == dest_shape); RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id), id) == dest_shape);
RUBY_ASSERT(ROBJECT_IV_CAPACITY(obj) == ROBJECT_NUMIV(obj));
RUBY_ASSERT(index < ROBJECT_NUMIV(obj)); RUBY_ASSERT(index < ROBJECT_NUMIV(obj));
} }
else { else {
break; break;

View File

@ -248,10 +248,9 @@ extern "C" {
} }
pub const ROBJECT_EMBED: ruby_robject_flags = 8192; pub const ROBJECT_EMBED: ruby_robject_flags = 8192;
pub type ruby_robject_flags = u32; pub type ruby_robject_flags = u32;
pub const ROBJECT_OFFSET_NUMIV: i32 = 16; pub const ROBJECT_OFFSET_AS_HEAP_IVPTR: i32 = 16;
pub const ROBJECT_OFFSET_AS_HEAP_IVPTR: i32 = 24; pub const ROBJECT_OFFSET_AS_HEAP_IV_INDEX_TBL: i32 = 24;
pub const ROBJECT_OFFSET_AS_HEAP_IV_INDEX_TBL: i32 = 32; pub const ROBJECT_OFFSET_AS_ARY: i32 = 16;
pub const ROBJECT_OFFSET_AS_ARY: i32 = 24;
extern "C" { extern "C" {
pub static mut rb_mKernel: VALUE; pub static mut rb_mKernel: VALUE;
} }
@ -420,7 +419,9 @@ pub struct rb_shape {
pub edges: *mut rb_id_table, pub edges: *mut rb_id_table,
pub edge_name: ID, pub edge_name: ID,
pub next_iv_index: attr_index_t, pub next_iv_index: attr_index_t,
pub capacity: u32,
pub type_: u8, pub type_: u8,
pub size_pool_index: u8,
pub parent_id: shape_id_t, pub parent_id: shape_id_t,
} }
pub type rb_shape_t = rb_shape; pub type rb_shape_t = rb_shape;