[Feature #19579] Remove !USE_RVARGC code (#7655)

Remove !USE_RVARGC code

[Feature #19579]

The Variable Width Allocation feature was turned on by default in Ruby
3.2. Since then, we haven't received bug reports or backports to the
non-Variable Width Allocation code paths, so we assume that nobody is
using it. We also don't plan on maintaining the non-Variable Width
Allocation code, so we are going to remove it.
This commit is contained in:
Peter Zhu 2023-04-04 17:30:06 -04:00 committed by GitHub
parent 1190ec60cc
commit 1da2e7fca3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
Notes: git 2023-04-04 21:30:35 +00:00
Merged-By: maximecb <maximecb@ruby-lang.org>
18 changed files with 17 additions and 320 deletions

View File

@ -175,8 +175,6 @@ jobs:
# - { name: USE_THREAD_CACHE=0, env: { cppflags: '-DUSE_THREAD_CACHE=0' } } # - { name: USE_THREAD_CACHE=0, env: { cppflags: '-DUSE_THREAD_CACHE=0' } }
# - { name: USE_TRANSIENT_HEAP=0, env: { cppflags: '-DUSE_TRANSIENT_HEAP=0' } } # - { name: USE_TRANSIENT_HEAP=0, env: { cppflags: '-DUSE_TRANSIENT_HEAP=0' } }
- { name: USE_RUBY_DEBUG_LOG=1, env: { cppflags: '-DUSE_RUBY_DEBUG_LOG=1' } } - { name: USE_RUBY_DEBUG_LOG=1, env: { cppflags: '-DUSE_RUBY_DEBUG_LOG=1' } }
- { name: USE_RVARGC=0, env: { cppflags: '-DUSE_RVARGC=0' } }
# - { name: USE_RVARGC=1, env: { cppflags: '-DUSE_RVARGC=1' } }
# - { name: USE_DEBUG_COUNTER, env: { cppflags: '-DUSE_DEBUG_COUNTER=1', RUBY_DEBUG_COUNTER_DISABLE: '1' } } # - { name: USE_DEBUG_COUNTER, env: { cppflags: '-DUSE_DEBUG_COUNTER=1', RUBY_DEBUG_COUNTER_DISABLE: '1' } }
- { name: DEBUG_FIND_TIME_NUMGUESS, env: { cppflags: '-DDEBUG_FIND_TIME_NUMGUESS' } } - { name: DEBUG_FIND_TIME_NUMGUESS, env: { cppflags: '-DDEBUG_FIND_TIME_NUMGUESS' } }

22
array.c
View File

@ -47,13 +47,8 @@ VALUE rb_cArray;
* 2: RARRAY_SHARED_FLAG (equal to ELTS_SHARED) * 2: RARRAY_SHARED_FLAG (equal to ELTS_SHARED)
* The array is shared. The buffer this array points to is owned by * The array is shared. The buffer this array points to is owned by
* another array (the shared root). * another array (the shared root).
* if USE_RVARGC
* 3-9: RARRAY_EMBED_LEN * 3-9: RARRAY_EMBED_LEN
* The length of the array when RARRAY_EMBED_FLAG is set. * The length of the array when RARRAY_EMBED_FLAG is set.
* else
* 3-4: RARRAY_EMBED_LEN
* The length of the array when RARRAY_EMBED_FLAG is set.
* endif
* 12: RARRAY_SHARED_ROOT_FLAG * 12: RARRAY_SHARED_ROOT_FLAG
* The array is a shared root that does reference counting. The buffer * The array is a shared root that does reference counting. The buffer
* this array points to is owned by this array but may be pointed to * this array points to is owned by this array but may be pointed to
@ -188,13 +183,9 @@ ARY_SET(VALUE a, long i, VALUE v)
static long static long
ary_embed_capa(VALUE ary) ary_embed_capa(VALUE ary)
{ {
#if USE_RVARGC
size_t size = rb_gc_obj_slot_size(ary) - offsetof(struct RArray, as.ary); size_t size = rb_gc_obj_slot_size(ary) - offsetof(struct RArray, as.ary);
assert(size % sizeof(VALUE) == 0); assert(size % sizeof(VALUE) == 0);
return size / sizeof(VALUE); return size / sizeof(VALUE);
#else
return RARRAY_EMBED_LEN_MAX;
#endif
} }
static size_t static size_t
@ -206,11 +197,7 @@ ary_embed_size(long capa)
static bool static bool
ary_embeddable_p(long capa) ary_embeddable_p(long capa)
{ {
#if USE_RVARGC
return rb_gc_size_allocatable_p(ary_embed_size(capa)); return rb_gc_size_allocatable_p(ary_embed_size(capa));
#else
return capa <= RARRAY_EMBED_LEN_MAX;
#endif
} }
bool bool
@ -791,9 +778,6 @@ ary_alloc_embed(VALUE klass, long capa)
{ {
size_t size = ary_embed_size(capa); size_t size = ary_embed_size(capa);
assert(rb_gc_size_allocatable_p(size)); assert(rb_gc_size_allocatable_p(size));
#if !USE_RVARGC
assert(size <= sizeof(struct RArray));
#endif
RVARGC_NEWOBJ_OF(ary, struct RArray, klass, RVARGC_NEWOBJ_OF(ary, struct RArray, klass,
T_ARRAY | RARRAY_EMBED_FLAG | (RGENGC_WB_PROTECTED_ARRAY ? FL_WB_PROTECTED : 0), T_ARRAY | RARRAY_EMBED_FLAG | (RGENGC_WB_PROTECTED_ARRAY ? FL_WB_PROTECTED : 0),
size); size);
@ -906,9 +890,6 @@ ec_ary_alloc_embed(rb_execution_context_t *ec, VALUE klass, long capa)
{ {
size_t size = ary_embed_size(capa); size_t size = ary_embed_size(capa);
assert(rb_gc_size_allocatable_p(size)); assert(rb_gc_size_allocatable_p(size));
#if !USE_RVARGC
assert(size <= sizeof(struct RArray));
#endif
RB_RVARGC_EC_NEWOBJ_OF(ec, ary, struct RArray, klass, RB_RVARGC_EC_NEWOBJ_OF(ec, ary, struct RArray, klass,
T_ARRAY | RARRAY_EMBED_FLAG | (RGENGC_WB_PROTECTED_ARRAY ? FL_WB_PROTECTED : 0), T_ARRAY | RARRAY_EMBED_FLAG | (RGENGC_WB_PROTECTED_ARRAY ? FL_WB_PROTECTED : 0),
size); size);
@ -1033,7 +1014,6 @@ rb_ary_memsize(VALUE ary)
static VALUE static VALUE
ary_make_shared(VALUE ary) ary_make_shared(VALUE ary)
{ {
assert(USE_RVARGC || !ARY_EMBED_P(ary));
ary_verify(ary); ary_verify(ary);
if (ARY_SHARED_P(ary)) { if (ARY_SHARED_P(ary)) {
@ -4741,7 +4721,6 @@ rb_ary_replace(VALUE copy, VALUE orig)
ary_memcpy(copy, 0, RARRAY_LEN(orig), RARRAY_CONST_PTR_TRANSIENT(orig)); ary_memcpy(copy, 0, RARRAY_LEN(orig), RARRAY_CONST_PTR_TRANSIENT(orig));
ARY_SET_EMBED_LEN(copy, RARRAY_LEN(orig)); ARY_SET_EMBED_LEN(copy, RARRAY_LEN(orig));
} }
#if USE_RVARGC
/* orig is embedded but copy does not have enough space to embed the /* orig is embedded but copy does not have enough space to embed the
* contents of orig. */ * contents of orig. */
else if (ARY_EMBED_P(orig)) { else if (ARY_EMBED_P(orig)) {
@ -4757,7 +4736,6 @@ rb_ary_replace(VALUE copy, VALUE orig)
// bad state from the edits above. // bad state from the edits above.
ary_memcpy(copy, 0, len, RARRAY_CONST_PTR_TRANSIENT(orig)); ary_memcpy(copy, 0, len, RARRAY_CONST_PTR_TRANSIENT(orig));
} }
#endif
/* Otherwise, orig is on heap and copy does not have enough space to embed /* Otherwise, orig is on heap and copy does not have enough space to embed
* the contents of orig. */ * the contents of orig. */
else { else {

View File

@ -53,14 +53,8 @@ const union {
rb_econv_result_t econv_result; rb_econv_result_t econv_result;
enum ruby_preserved_encindex encoding_index; enum ruby_preserved_encindex encoding_index;
enum ruby_robject_flags robject_flags; enum ruby_robject_flags robject_flags;
#if !USE_RVARGC
enum ruby_robject_consts robject_consts;
#endif
enum ruby_rmodule_flags rmodule_flags; enum ruby_rmodule_flags rmodule_flags;
enum ruby_rstring_flags rstring_flags; enum ruby_rstring_flags rstring_flags;
#if !USE_RVARGC
enum ruby_rstring_consts rstring_consts;
#endif
enum ruby_rarray_flags rarray_flags; enum ruby_rarray_flags rarray_flags;
enum ruby_rarray_consts rarray_consts; enum ruby_rarray_consts rarray_consts;
enum { enum {

View File

@ -62,12 +62,7 @@ bug_str_unterminated_substring(VALUE str, VALUE vbeg, VALUE vlen)
if (RSTRING_LEN(str) < beg + len) rb_raise(rb_eIndexError, "end: %ld", beg + len); if (RSTRING_LEN(str) < beg + len) rb_raise(rb_eIndexError, "end: %ld", beg + len);
str = rb_str_new_shared(str); str = rb_str_new_shared(str);
if (STR_EMBED_P(str)) { if (STR_EMBED_P(str)) {
#if USE_RVARGC
RSTRING(str)->as.embed.len = (short)len; RSTRING(str)->as.embed.len = (short)len;
#else
RSTRING(str)->basic.flags &= ~RSTRING_EMBED_LEN_MASK;
RSTRING(str)->basic.flags |= len << RSTRING_EMBED_LEN_SHIFT;
#endif
memmove(RSTRING(str)->as.embed.ary, RSTRING(str)->as.embed.ary + beg, len); memmove(RSTRING(str)->as.embed.ary, RSTRING(str)->as.embed.ary + beg, len);
} }
else { else {
@ -116,11 +111,7 @@ bug_str_s_cstr_noembed(VALUE self, VALUE str)
Check_Type(str, T_STRING); Check_Type(str, T_STRING);
FL_SET((str2), STR_NOEMBED); FL_SET((str2), STR_NOEMBED);
memcpy(buf, RSTRING_PTR(str), capacity); memcpy(buf, RSTRING_PTR(str), capacity);
#if USE_RVARGC
RBASIC(str2)->flags &= ~(STR_SHARED | FL_USER5 | FL_USER6); RBASIC(str2)->flags &= ~(STR_SHARED | FL_USER5 | FL_USER6);
#else
RBASIC(str2)->flags &= ~RSTRING_EMBED_LEN_MASK;
#endif
RSTRING(str2)->as.heap.aux.capa = capacity; RSTRING(str2)->as.heap.aux.capa = capacity;
RSTRING(str2)->as.heap.ptr = buf; RSTRING(str2)->as.heap.ptr = buf;
RSTRING(str2)->as.heap.len = RSTRING_LEN(str); RSTRING(str2)->as.heap.len = RSTRING_LEN(str);

98
gc.c
View File

@ -706,11 +706,9 @@ typedef struct rb_size_pool_struct {
size_t total_freed_pages; size_t total_freed_pages;
size_t force_major_gc_count; size_t force_major_gc_count;
#if USE_RVARGC
/* Sweeping statistics */ /* Sweeping statistics */
size_t freed_slots; size_t freed_slots;
size_t empty_slots; size_t empty_slots;
#endif
rb_heap_t eden_heap; rb_heap_t eden_heap;
rb_heap_t tomb_heap; rb_heap_t tomb_heap;
@ -1233,9 +1231,7 @@ static bool gc_marks_continue(rb_objspace_t *objspace, rb_size_pool_t *size_pool
static void gc_sweep(rb_objspace_t *objspace); static void gc_sweep(rb_objspace_t *objspace);
static void gc_sweep_start(rb_objspace_t *objspace); static void gc_sweep_start(rb_objspace_t *objspace);
#if USE_RVARGC
static void gc_sweep_finish_size_pool(rb_objspace_t *objspace, rb_size_pool_t *size_pool); static void gc_sweep_finish_size_pool(rb_objspace_t *objspace, rb_size_pool_t *size_pool);
#endif
static void gc_sweep_finish(rb_objspace_t *objspace); static void gc_sweep_finish(rb_objspace_t *objspace);
static int gc_sweep_step(rb_objspace_t *objspace, rb_size_pool_t *size_pool, rb_heap_t *heap); static int gc_sweep_step(rb_objspace_t *objspace, rb_size_pool_t *size_pool, rb_heap_t *heap);
static void gc_sweep_rest(rb_objspace_t *objspace); static void gc_sweep_rest(rb_objspace_t *objspace);
@ -2657,11 +2653,7 @@ ractor_cache_allocate_slot(rb_objspace_t *objspace, rb_ractor_newobj_cache_t *ca
VALUE obj = (VALUE)p; VALUE obj = (VALUE)p;
MAYBE_UNUSED(const size_t) stride = size_pool_slot_size(size_pool_idx); MAYBE_UNUSED(const size_t) stride = size_pool_slot_size(size_pool_idx);
size_pool_cache->freelist = p->as.free.next; size_pool_cache->freelist = p->as.free.next;
#if USE_RVARGC
asan_unpoison_memory_region(p, stride, true); asan_unpoison_memory_region(p, stride, true);
#else
asan_unpoison_object(obj, true);
#endif
#if RGENGC_CHECK_MODE #if RGENGC_CHECK_MODE
GC_ASSERT(rb_gc_obj_slot_size(obj) == stride); GC_ASSERT(rb_gc_obj_slot_size(obj) == stride);
// zero clear // zero clear
@ -2731,7 +2723,6 @@ newobj_fill(VALUE obj, VALUE v1, VALUE v2, VALUE v3)
static inline size_t static inline size_t
size_pool_idx_for_size(size_t size) size_pool_idx_for_size(size_t size)
{ {
#if USE_RVARGC
size += RVALUE_OVERHEAD; size += RVALUE_OVERHEAD;
size_t slot_count = CEILDIV(size, BASE_SLOT_SIZE); size_t slot_count = CEILDIV(size, BASE_SLOT_SIZE);
@ -2750,10 +2741,6 @@ size_pool_idx_for_size(size_t size)
#endif #endif
return size_pool_idx; return size_pool_idx;
#else
GC_ASSERT(size <= sizeof(RVALUE));
return 0;
#endif
} }
static VALUE static VALUE
@ -2963,16 +2950,12 @@ rb_class_instance_allocate_internal(VALUE klass, VALUE flags, bool wb_protected)
GC_ASSERT(flags & ROBJECT_EMBED); GC_ASSERT(flags & ROBJECT_EMBED);
size_t size; size_t size;
#if USE_RVARGC
uint32_t index_tbl_num_entries = RCLASS_EXT(klass)->max_iv_count; uint32_t index_tbl_num_entries = RCLASS_EXT(klass)->max_iv_count;
size = rb_obj_embedded_size(index_tbl_num_entries); size = rb_obj_embedded_size(index_tbl_num_entries);
if (!rb_gc_size_allocatable_p(size)) { if (!rb_gc_size_allocatable_p(size)) {
size = sizeof(struct RObject); size = sizeof(struct RObject);
} }
#else
size = sizeof(struct RObject);
#endif
VALUE obj = newobj_of(klass, flags, 0, 0, 0, wb_protected, size); VALUE obj = newobj_of(klass, flags, 0, 0, 0, wb_protected, size);
RUBY_ASSERT(rb_shape_get_shape(obj)->type == SHAPE_ROOT || RUBY_ASSERT(rb_shape_get_shape(obj)->type == SHAPE_ROOT ||
@ -5742,23 +5725,6 @@ gc_sweep_page(rb_objspace_t *objspace, rb_heap_t *heap, struct gc_sweep_context
gc_report(2, objspace, "page_sweep: end.\n"); gc_report(2, objspace, "page_sweep: end.\n");
} }
#if !USE_RVARGC
/* allocate additional minimum page to work */
static void
gc_heap_prepare_minimum_pages(rb_objspace_t *objspace, rb_size_pool_t *size_pool, rb_heap_t *heap)
{
for (int i = 0; i < SIZE_POOL_COUNT; i++) {
if (!heap->free_pages && heap_increment(objspace, size_pool, heap) == FALSE) {
/* there is no free after page_sweep() */
size_pool_allocatable_pages_set(objspace, size_pool, 1);
if (!heap_increment(objspace, size_pool, heap)) { /* can't allocate additional free objects */
rb_memerror();
}
}
}
}
#endif
static const char * static const char *
gc_mode_name(enum gc_mode mode) gc_mode_name(enum gc_mode mode)
{ {
@ -5841,14 +5807,12 @@ gc_sweep_start(rb_objspace_t *objspace)
gc_sweep_start_heap(objspace, heap); gc_sweep_start_heap(objspace, heap);
#if USE_RVARGC
/* We should call gc_sweep_finish_size_pool for size pools with no pages. */ /* We should call gc_sweep_finish_size_pool for size pools with no pages. */
if (heap->sweeping_page == NULL) { if (heap->sweeping_page == NULL) {
GC_ASSERT(heap->total_pages == 0); GC_ASSERT(heap->total_pages == 0);
GC_ASSERT(heap->total_slots == 0); GC_ASSERT(heap->total_slots == 0);
gc_sweep_finish_size_pool(objspace, size_pool); gc_sweep_finish_size_pool(objspace, size_pool);
} }
#endif
} }
rb_ractor_t *r = NULL; rb_ractor_t *r = NULL;
@ -5857,7 +5821,6 @@ gc_sweep_start(rb_objspace_t *objspace)
} }
} }
#if USE_RVARGC
static void static void
gc_sweep_finish_size_pool(rb_objspace_t *objspace, rb_size_pool_t *size_pool) gc_sweep_finish_size_pool(rb_objspace_t *objspace, rb_size_pool_t *size_pool)
{ {
@ -5920,7 +5883,6 @@ gc_sweep_finish_size_pool(rb_objspace_t *objspace, rb_size_pool_t *size_pool)
} }
} }
} }
#endif
static void static void
gc_sweep_finish(rb_objspace_t *objspace) gc_sweep_finish(rb_objspace_t *objspace)
@ -5939,7 +5901,6 @@ gc_sweep_finish(rb_objspace_t *objspace)
size_pool->allocatable_pages = tomb_pages; size_pool->allocatable_pages = tomb_pages;
} }
#if USE_RVARGC
size_pool->freed_slots = 0; size_pool->freed_slots = 0;
size_pool->empty_slots = 0; size_pool->empty_slots = 0;
@ -5956,7 +5917,6 @@ gc_sweep_finish(rb_objspace_t *objspace)
eden_heap->pooled_pages = NULL; eden_heap->pooled_pages = NULL;
objspace->rincgc.pooled_slots = 0; objspace->rincgc.pooled_slots = 0;
} }
#endif
} }
heap_pages_expand_sorted(objspace); heap_pages_expand_sorted(objspace);
@ -5974,11 +5934,7 @@ gc_sweep_step(rb_objspace_t *objspace, rb_size_pool_t *size_pool, rb_heap_t *hea
struct heap_page *sweep_page = heap->sweeping_page; struct heap_page *sweep_page = heap->sweeping_page;
int unlink_limit = GC_SWEEP_PAGES_FREEABLE_PER_STEP; int unlink_limit = GC_SWEEP_PAGES_FREEABLE_PER_STEP;
int swept_slots = 0; int swept_slots = 0;
#if USE_RVARGC
bool need_pool = TRUE; bool need_pool = TRUE;
#else
int need_pool = will_be_incremental_marking(objspace) ? TRUE : FALSE;
#endif
gc_report(2, objspace, "gc_sweep_step (need_pool: %d)\n", need_pool); gc_report(2, objspace, "gc_sweep_step (need_pool: %d)\n", need_pool);
@ -6012,10 +5968,8 @@ gc_sweep_step(rb_objspace_t *objspace, rb_size_pool_t *size_pool, rb_heap_t *hea
heap_add_page(objspace, size_pool, SIZE_POOL_TOMB_HEAP(size_pool), sweep_page); heap_add_page(objspace, size_pool, SIZE_POOL_TOMB_HEAP(size_pool), sweep_page);
} }
else if (free_slots > 0) { else if (free_slots > 0) {
#if USE_RVARGC
size_pool->freed_slots += ctx.freed_slots; size_pool->freed_slots += ctx.freed_slots;
size_pool->empty_slots += ctx.empty_slots; size_pool->empty_slots += ctx.empty_slots;
#endif
if (need_pool) { if (need_pool) {
heap_add_poolpage(objspace, heap, sweep_page); heap_add_poolpage(objspace, heap, sweep_page);
@ -6035,9 +5989,7 @@ gc_sweep_step(rb_objspace_t *objspace, rb_size_pool_t *size_pool, rb_heap_t *hea
} while ((sweep_page = heap->sweeping_page)); } while ((sweep_page = heap->sweeping_page));
if (!heap->sweeping_page) { if (!heap->sweeping_page) {
#if USE_RVARGC
gc_sweep_finish_size_pool(objspace, size_pool); gc_sweep_finish_size_pool(objspace, size_pool);
#endif
if (!has_sweeping_pages(objspace)) { if (!has_sweeping_pages(objspace)) {
gc_sweep_finish(objspace); gc_sweep_finish(objspace);
@ -6074,7 +6026,6 @@ gc_sweep_continue(rb_objspace_t *objspace, rb_size_pool_t *sweep_size_pool, rb_h
for (int i = 0; i < SIZE_POOL_COUNT; i++) { for (int i = 0; i < SIZE_POOL_COUNT; i++) {
rb_size_pool_t *size_pool = &size_pools[i]; rb_size_pool_t *size_pool = &size_pools[i];
if (!gc_sweep_step(objspace, size_pool, SIZE_POOL_EDEN_HEAP(size_pool))) { if (!gc_sweep_step(objspace, size_pool, SIZE_POOL_EDEN_HEAP(size_pool))) {
#if USE_RVARGC
/* sweep_size_pool requires a free slot but sweeping did not yield any. */ /* sweep_size_pool requires a free slot but sweeping did not yield any. */
if (size_pool == sweep_size_pool) { if (size_pool == sweep_size_pool) {
if (size_pool->allocatable_pages > 0) { if (size_pool->allocatable_pages > 0) {
@ -6086,7 +6037,6 @@ gc_sweep_continue(rb_objspace_t *objspace, rb_size_pool_t *sweep_size_pool, rb_h
break; break;
} }
} }
#endif
} }
} }
@ -6230,11 +6180,6 @@ gc_sweep(rb_objspace_t *objspace)
} }
} }
#if !USE_RVARGC
rb_size_pool_t *size_pool = &size_pools[0];
gc_heap_prepare_minimum_pages(objspace, size_pool, SIZE_POOL_EDEN_HEAP(size_pool));
#endif
gc_sweeping_exit(objspace); gc_sweeping_exit(objspace);
} }
@ -8438,17 +8383,6 @@ gc_marks_finish(rb_objspace_t *objspace)
objspace->rgengc.need_major_gc |= GPR_FLAG_MAJOR_BY_NOFREE; objspace->rgengc.need_major_gc |= GPR_FLAG_MAJOR_BY_NOFREE;
} }
} }
#if !USE_RVARGC
if (full_marking) {
/* increment: */
gc_report(1, objspace, "gc_marks_finish: heap_set_increment!!\n");
rb_size_pool_t *size_pool = &size_pools[0];
size_pool_allocatable_pages_set(objspace, size_pool, heap_extend_pages(objspace, size_pool, sweep_slots, total_slots, heap_allocated_pages + heap_allocatable_pages(objspace)));
heap_increment(objspace, size_pool, SIZE_POOL_EDEN_HEAP(size_pool));
}
#endif
} }
if (full_marking) { if (full_marking) {
@ -10082,13 +10016,10 @@ static void
gc_ref_update_array(rb_objspace_t * objspace, VALUE v) gc_ref_update_array(rb_objspace_t * objspace, VALUE v)
{ {
if (ARY_SHARED_P(v)) { if (ARY_SHARED_P(v)) {
#if USE_RVARGC
VALUE old_root = RARRAY(v)->as.heap.aux.shared_root; VALUE old_root = RARRAY(v)->as.heap.aux.shared_root;
#endif
UPDATE_IF_MOVED(objspace, RARRAY(v)->as.heap.aux.shared_root); UPDATE_IF_MOVED(objspace, RARRAY(v)->as.heap.aux.shared_root);
#if USE_RVARGC
VALUE new_root = RARRAY(v)->as.heap.aux.shared_root; VALUE new_root = RARRAY(v)->as.heap.aux.shared_root;
// If the root is embedded and its location has changed // If the root is embedded and its location has changed
if (ARY_EMBED_P(new_root) && new_root != old_root) { if (ARY_EMBED_P(new_root) && new_root != old_root) {
@ -10096,7 +10027,6 @@ gc_ref_update_array(rb_objspace_t * objspace, VALUE v)
GC_ASSERT(RARRAY(v)->as.heap.ptr >= RARRAY(old_root)->as.ary); GC_ASSERT(RARRAY(v)->as.heap.ptr >= RARRAY(old_root)->as.ary);
RARRAY(v)->as.heap.ptr = RARRAY(new_root)->as.ary + offset; RARRAY(v)->as.heap.ptr = RARRAY(new_root)->as.ary + offset;
} }
#endif
} }
else { else {
long len = RARRAY_LEN(v); long len = RARRAY_LEN(v);
@ -10108,13 +10038,11 @@ gc_ref_update_array(rb_objspace_t * objspace, VALUE v)
} }
} }
#if USE_RVARGC
if (rb_gc_obj_slot_size(v) >= rb_ary_size_as_embedded(v)) { if (rb_gc_obj_slot_size(v) >= rb_ary_size_as_embedded(v)) {
if (rb_ary_embeddable_p(v)) { if (rb_ary_embeddable_p(v)) {
rb_ary_make_embedded(v); rb_ary_make_embedded(v);
} }
} }
#endif
} }
} }
@ -10128,7 +10056,6 @@ gc_ref_update_object(rb_objspace_t *objspace, VALUE v)
return; return;
} }
#if USE_RVARGC
size_t slot_size = rb_gc_obj_slot_size(v); size_t slot_size = rb_gc_obj_slot_size(v);
size_t embed_size = rb_obj_embedded_size(ROBJECT_IV_CAPACITY(v)); size_t embed_size = rb_obj_embedded_size(ROBJECT_IV_CAPACITY(v));
if (slot_size >= embed_size && !RB_FL_TEST_RAW(v, ROBJECT_EMBED)) { if (slot_size >= embed_size && !RB_FL_TEST_RAW(v, ROBJECT_EMBED)) {
@ -10143,7 +10070,6 @@ gc_ref_update_object(rb_objspace_t *objspace, VALUE v)
} }
ptr = ROBJECT(v)->as.ary; ptr = ROBJECT(v)->as.ary;
} }
#endif
for (uint32_t i = 0; i < ROBJECT_IV_COUNT(v); i++) { for (uint32_t i = 0; i < ROBJECT_IV_COUNT(v); i++) {
UPDATE_IF_MOVED(objspace, ptr[i]); UPDATE_IF_MOVED(objspace, ptr[i]);
@ -10616,17 +10542,12 @@ gc_update_object_references(rb_objspace_t *objspace, VALUE obj)
case T_STRING: case T_STRING:
{ {
if (STR_SHARED_P(obj)) { if (STR_SHARED_P(obj)) {
#if USE_RVARGC
VALUE old_root = any->as.string.as.heap.aux.shared; VALUE old_root = any->as.string.as.heap.aux.shared;
#endif
UPDATE_IF_MOVED(objspace, any->as.string.as.heap.aux.shared); UPDATE_IF_MOVED(objspace, any->as.string.as.heap.aux.shared);
#if USE_RVARGC
VALUE new_root = any->as.string.as.heap.aux.shared; VALUE new_root = any->as.string.as.heap.aux.shared;
rb_str_update_shared_ary(obj, old_root, new_root); rb_str_update_shared_ary(obj, old_root, new_root);
#endif
} }
#if USE_RVARGC
/* If, after move the string is not embedded, and can fit in the /* If, after move the string is not embedded, and can fit in the
* slot it's been placed in, then re-embed it. */ * slot it's been placed in, then re-embed it. */
if (rb_gc_obj_slot_size(obj) >= rb_str_size_as_embedded(obj)) { if (rb_gc_obj_slot_size(obj) >= rb_str_size_as_embedded(obj)) {
@ -10634,7 +10555,6 @@ gc_update_object_references(rb_objspace_t *objspace, VALUE obj)
rb_str_make_embedded(obj); rb_str_make_embedded(obj);
} }
} }
#endif
break; break;
} }
@ -14020,22 +13940,6 @@ rb_gcdebug_remove_stress_to_class(int argc, VALUE *argv, VALUE self)
*/ */
#include "gc.rbinc" #include "gc.rbinc"
/*
* call-seq:
* GC.using_rvargc? -> true or false
*
* Returns true if using experimental feature Variable Width Allocation, false
* otherwise.
*/
static VALUE
gc_using_rvargc_p(VALUE mod)
{
#if USE_RVARGC
return Qtrue;
#else
return Qfalse;
#endif
}
void void
Init_GC(void) Init_GC(void)
@ -14098,8 +14002,6 @@ Init_GC(void)
rb_define_singleton_method(rb_mGC, "malloc_allocations", gc_malloc_allocations, 0); rb_define_singleton_method(rb_mGC, "malloc_allocations", gc_malloc_allocations, 0);
#endif #endif
rb_define_singleton_method(rb_mGC, "using_rvargc?", gc_using_rvargc_p, 0);
if (GC_COMPACTION_SUPPORTED) { if (GC_COMPACTION_SUPPORTED) {
rb_define_singleton_method(rb_mGC, "compact", gc_compact, 0); rb_define_singleton_method(rb_mGC, "compact", gc_compact, 0);
rb_define_singleton_method(rb_mGC, "auto_compact", gc_get_auto_compact, 0); rb_define_singleton_method(rb_mGC, "auto_compact", gc_get_auto_compact, 0);

View File

@ -148,8 +148,4 @@
# undef RBIMPL_TEST3 # undef RBIMPL_TEST3
#endif /* HAVE_VA_ARGS_MACRO */ #endif /* HAVE_VA_ARGS_MACRO */
#ifndef USE_RVARGC
# define USE_RVARGC 1
#endif
#endif /* RBIMPL_CONFIG_H */ #endif /* RBIMPL_CONFIG_H */

View File

@ -130,12 +130,8 @@ enum ruby_rarray_flags {
* 3rd parties must not be aware that there even is more than one way to * 3rd parties must not be aware that there even is more than one way to
* store array elements. It was a bad idea to expose this to them. * store array elements. It was a bad idea to expose this to them.
*/ */
#if USE_RVARGC
RARRAY_EMBED_LEN_MASK = RUBY_FL_USER9 | RUBY_FL_USER8 | RUBY_FL_USER7 | RUBY_FL_USER6 | RARRAY_EMBED_LEN_MASK = RUBY_FL_USER9 | RUBY_FL_USER8 | RUBY_FL_USER7 | RUBY_FL_USER6 |
RUBY_FL_USER5 | RUBY_FL_USER4 | RUBY_FL_USER3 RUBY_FL_USER5 | RUBY_FL_USER4 | RUBY_FL_USER3
#else
RARRAY_EMBED_LEN_MASK = RUBY_FL_USER4 | RUBY_FL_USER3
#endif
#if USE_TRANSIENT_HEAP #if USE_TRANSIENT_HEAP
, ,
@ -163,13 +159,6 @@ enum ruby_rarray_flags {
enum ruby_rarray_consts { enum ruby_rarray_consts {
/** Where ::RARRAY_EMBED_LEN_MASK resides. */ /** Where ::RARRAY_EMBED_LEN_MASK resides. */
RARRAY_EMBED_LEN_SHIFT = RUBY_FL_USHIFT + 3 RARRAY_EMBED_LEN_SHIFT = RUBY_FL_USHIFT + 3
#if !USE_RVARGC
,
/** Max possible number elements that can be embedded. */
RARRAY_EMBED_LEN_MAX = RBIMPL_EMBED_LEN_MAX_OF(VALUE)
#endif
}; };
/** Ruby's array. */ /** Ruby's array. */
@ -228,16 +217,12 @@ struct RArray {
* to store its elements. In this case the length is encoded into the * to store its elements. In this case the length is encoded into the
* flags. * flags.
*/ */
#if USE_RVARGC
/* This is a length 1 array because: /* This is a length 1 array because:
* 1. GCC has a bug that does not optimize C flexible array members * 1. GCC has a bug that does not optimize C flexible array members
* (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=102452) * (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=102452)
* 2. Zero length arrays are not supported by all compilers * 2. Zero length arrays are not supported by all compilers
*/ */
const VALUE ary[1]; const VALUE ary[1];
#else
const VALUE ary[RARRAY_EMBED_LEN_MAX];
#endif
} as; } as;
}; };

View File

@ -74,17 +74,6 @@ enum ruby_robject_flags {
ROBJECT_EMBED = RUBY_FL_USER1 ROBJECT_EMBED = RUBY_FL_USER1
}; };
#if !USE_RVARGC
/**
* This is an enum because GDB wants it (rather than a macro). People need not
* bother.
*/
enum ruby_robject_consts {
/** Max possible number of instance variables that can be embedded. */
ROBJECT_EMBED_LEN_MAX = RBIMPL_EMBED_LEN_MAX_OF(VALUE)
};
#endif
struct st_table; struct st_table;
/** /**
@ -118,7 +107,6 @@ struct RObject {
struct rb_id_table *iv_index_tbl; struct rb_id_table *iv_index_tbl;
} heap; } heap;
#if USE_RVARGC
/* Embedded instance variables. When an object is small enough, it /* Embedded instance variables. When an object is small enough, it
* uses this area to store the instance variables. * uses this area to store the instance variables.
* *
@ -128,13 +116,6 @@ struct RObject {
* 2. Zero length arrays are not supported by all compilers * 2. Zero length arrays are not supported by all compilers
*/ */
VALUE ary[1]; VALUE ary[1];
#else
/**
* Embedded instance variables. When an object is small enough, it
* uses this area to store the instance variables.
*/
VALUE ary[ROBJECT_EMBED_LEN_MAX];
#endif
} as; } as;
}; };

View File

@ -42,11 +42,6 @@
/** @cond INTERNAL_MACRO */ /** @cond INTERNAL_MACRO */
#define RSTRING_NOEMBED RSTRING_NOEMBED #define RSTRING_NOEMBED RSTRING_NOEMBED
#if !USE_RVARGC
#define RSTRING_EMBED_LEN_MASK RSTRING_EMBED_LEN_MASK
#define RSTRING_EMBED_LEN_SHIFT RSTRING_EMBED_LEN_SHIFT
#define RSTRING_EMBED_LEN_MAX RSTRING_EMBED_LEN_MAX
#endif
#define RSTRING_FSTR RSTRING_FSTR #define RSTRING_FSTR RSTRING_FSTR
#define RSTRING_EMBED_LEN RSTRING_EMBED_LEN #define RSTRING_EMBED_LEN RSTRING_EMBED_LEN
#define RSTRING_LEN RSTRING_LEN #define RSTRING_LEN RSTRING_LEN
@ -162,21 +157,6 @@ enum ruby_rstring_flags {
*/ */
RSTRING_NOEMBED = RUBY_FL_USER1, RSTRING_NOEMBED = RUBY_FL_USER1,
#if !USE_RVARGC
/**
* When a string employs embedded strategy (see ::RSTRING_NOEMBED), these
* bits are used to store the number of bytes actually filled into
* ::RString::ary.
*
* @internal
*
* 3rd parties must not be aware that there even is more than one way to
* store a string. Might better be hidden.
*/
RSTRING_EMBED_LEN_MASK = RUBY_FL_USER2 | RUBY_FL_USER3 | RUBY_FL_USER4 |
RUBY_FL_USER5 | RUBY_FL_USER6,
#endif
/* Actually, string encodings are also encoded into the flags, using /* Actually, string encodings are also encoded into the flags, using
* remaining bits.*/ * remaining bits.*/
@ -202,20 +182,6 @@ enum ruby_rstring_flags {
RSTRING_FSTR = RUBY_FL_USER17 RSTRING_FSTR = RUBY_FL_USER17
}; };
#if !USE_RVARGC
/**
* This is an enum because GDB wants it (rather than a macro). People need not
* bother.
*/
enum ruby_rstring_consts {
/** Where ::RSTRING_EMBED_LEN_MASK resides. */
RSTRING_EMBED_LEN_SHIFT = RUBY_FL_USHIFT + 2,
/** Max possible number of characters that can be embedded. */
RSTRING_EMBED_LEN_MAX = RBIMPL_EMBED_LEN_MAX_OF(char) - 1
};
#endif
/** /**
* Ruby's String. A string in ruby conceptually has these information: * Ruby's String. A string in ruby conceptually has these information:
* *
@ -279,7 +245,6 @@ struct RString {
/** Embedded contents. */ /** Embedded contents. */
struct { struct {
#if USE_RVARGC
long len; long len;
/* This is a length 1 array because: /* This is a length 1 array because:
* 1. GCC has a bug that does not optimize C flexible array members * 1. GCC has a bug that does not optimize C flexible array members
@ -287,16 +252,6 @@ struct RString {
* 2. Zero length arrays are not supported by all compilers * 2. Zero length arrays are not supported by all compilers
*/ */
char ary[1]; char ary[1];
#else
/**
* When a string is short enough, it uses this area to store the
* contents themselves. This was impractical in the 20th century,
* but these days 64 bit machines can typically hold 24 bytes here.
* Could be sufficiently large. In this case the length is encoded
* into the flags.
*/
char ary[RSTRING_EMBED_LEN_MAX + 1];
#endif
} embed; } embed;
} as; } as;
}; };
@ -425,15 +380,8 @@ RSTRING_EMBED_LEN(VALUE str)
RBIMPL_ASSERT_TYPE(str, RUBY_T_STRING); RBIMPL_ASSERT_TYPE(str, RUBY_T_STRING);
RBIMPL_ASSERT_OR_ASSUME(! RB_FL_ANY_RAW(str, RSTRING_NOEMBED)); RBIMPL_ASSERT_OR_ASSUME(! RB_FL_ANY_RAW(str, RSTRING_NOEMBED));
#if USE_RVARGC
long f = RSTRING(str)->as.embed.len; long f = RSTRING(str)->as.embed.len;
return f; return f;
#else
VALUE f = RBASIC(str)->flags;
f &= RSTRING_EMBED_LEN_MASK;
f >>= RSTRING_EMBED_LEN_SHIFT;
return RBIMPL_CAST((long)f);
#endif
} }
RBIMPL_WARNING_PUSH() RBIMPL_WARNING_PUSH()

View File

@ -190,7 +190,7 @@ struct rb_objspace; /* in vm_core.h */
// We use SIZE_POOL_COUNT number of shape IDs for transitions out of different size pools // We use SIZE_POOL_COUNT number of shape IDs for transitions out of different size pools
// The next available shape ID will be the SPECIAL_CONST_SHAPE_ID // The next available shape ID will be the SPECIAL_CONST_SHAPE_ID
#ifndef SIZE_POOL_COUNT #ifndef SIZE_POOL_COUNT
# if USE_RVARGC && (SIZEOF_UINT64_T == SIZEOF_VALUE) # if (SIZEOF_UINT64_T == SIZEOF_VALUE)
# define SIZE_POOL_COUNT 5 # define SIZE_POOL_COUNT 5
# else # else
# define SIZE_POOL_COUNT 1 # define SIZE_POOL_COUNT 1

View File

@ -2938,7 +2938,6 @@ module RubyVM::RJIT
def jit_rb_str_empty_p(jit, ctx, asm, argc, known_recv_class) def jit_rb_str_empty_p(jit, ctx, asm, argc, known_recv_class)
# Assume same offset to len embedded or not so we can use one code path to read the length # Assume same offset to len embedded or not so we can use one code path to read the length
#assert_equal(C.RString.offsetof(:as, :heap, :len), C.RString.offsetof(:as, :embed, :len)) #assert_equal(C.RString.offsetof(:as, :heap, :len), C.RString.offsetof(:as, :embed, :len))
# `C.RString.offsetof(:as, :embed, :len)` doesn't work because of USE_RVARGC=0 CI
recv_opnd = ctx.stack_pop(1) recv_opnd = ctx.stack_pop(1)
out_opnd = ctx.stack_push(Type::UnknownImm) out_opnd = ctx.stack_push(Type::UnknownImm)

View File

@ -857,6 +857,11 @@ module RubyVM::RJIT # :nodoc: all
shared: self.VALUE, shared: self.VALUE,
), Primitive.cexpr!("OFFSETOF(((struct RString *)NULL)->as.heap, aux)")], ), Primitive.cexpr!("OFFSETOF(((struct RString *)NULL)->as.heap, aux)")],
), ),
embed: CType::Struct.new(
"", Primitive.cexpr!("SIZEOF(((struct RString *)NULL)->as.embed)"),
len: [CType::Immediate.parse("long"), Primitive.cexpr!("OFFSETOF(((struct RString *)NULL)->as.embed, len)")],
ary: [CType::Pointer.new { CType::Immediate.parse("char") }, Primitive.cexpr!("OFFSETOF(((struct RString *)NULL)->as.embed, ary)")],
),
), Primitive.cexpr!("OFFSETOF((*((struct RString *)NULL)), as)")], ), Primitive.cexpr!("OFFSETOF((*((struct RString *)NULL)), as)")],
) )
end end

8
ruby.c
View File

@ -549,12 +549,8 @@ static VALUE
runtime_libruby_path(void) runtime_libruby_path(void)
{ {
#if defined _WIN32 || defined __CYGWIN__ #if defined _WIN32 || defined __CYGWIN__
DWORD len, ret; DWORD ret;
#if USE_RVARGC DWORD len = 32;
len = 32;
#else
len = RSTRING_EMBED_LEN_MAX;
#endif
VALUE path; VALUE path;
VALUE wsopath = rb_str_new(0, len*sizeof(WCHAR)); VALUE wsopath = rb_str_new(0, len*sizeof(WCHAR));
WCHAR *wlibpath; WCHAR *wlibpath;

View File

@ -108,26 +108,13 @@ VALUE rb_cSymbol;
#define STR_SET_NOEMBED(str) do {\ #define STR_SET_NOEMBED(str) do {\
FL_SET((str), STR_NOEMBED);\ FL_SET((str), STR_NOEMBED);\
if (USE_RVARGC) {\
FL_UNSET((str), STR_SHARED | STR_SHARED_ROOT | STR_BORROWED);\ FL_UNSET((str), STR_SHARED | STR_SHARED_ROOT | STR_BORROWED);\
}\
else {\
STR_SET_EMBED_LEN((str), 0);\
}\
} while (0) } while (0)
#define STR_SET_EMBED(str) FL_UNSET((str), (STR_NOEMBED|STR_NOFREE)) #define STR_SET_EMBED(str) FL_UNSET((str), (STR_NOEMBED|STR_NOFREE))
#if USE_RVARGC
# define STR_SET_EMBED_LEN(str, n) do { \ # define STR_SET_EMBED_LEN(str, n) do { \
assert(str_embed_capa(str) > (n));\ assert(str_embed_capa(str) > (n));\
RSTRING(str)->as.embed.len = (n);\ RSTRING(str)->as.embed.len = (n);\
} while (0) } while (0)
#else
# define STR_SET_EMBED_LEN(str, n) do { \
long tmp_n = (n);\
RBASIC(str)->flags &= ~RSTRING_EMBED_LEN_MASK;\
RBASIC(str)->flags |= (tmp_n) << RSTRING_EMBED_LEN_SHIFT;\
} while (0)
#endif
#define STR_SET_LEN(str, n) do { \ #define STR_SET_LEN(str, n) do { \
if (STR_EMBED_P(str)) {\ if (STR_EMBED_P(str)) {\
@ -227,11 +214,7 @@ str_enc_fastpath(VALUE str)
static inline long static inline long
str_embed_capa(VALUE str) str_embed_capa(VALUE str)
{ {
#if USE_RVARGC
return rb_gc_obj_slot_size(str) - offsetof(struct RString, as.embed.ary); return rb_gc_obj_slot_size(str) - offsetof(struct RString, as.embed.ary);
#else
return RSTRING_EMBED_LEN_MAX + 1;
#endif
} }
bool bool
@ -250,7 +233,6 @@ size_t
rb_str_size_as_embedded(VALUE str) rb_str_size_as_embedded(VALUE str)
{ {
size_t real_size; size_t real_size;
#if USE_RVARGC
if (STR_EMBED_P(str)) { if (STR_EMBED_P(str)) {
real_size = rb_str_embed_size(RSTRING(str)->as.embed.len) + TERM_LEN(str); real_size = rb_str_embed_size(RSTRING(str)->as.embed.len) + TERM_LEN(str);
} }
@ -260,22 +242,15 @@ rb_str_size_as_embedded(VALUE str)
real_size = rb_str_embed_size(RSTRING(str)->as.heap.aux.capa) + TERM_LEN(str); real_size = rb_str_embed_size(RSTRING(str)->as.heap.aux.capa) + TERM_LEN(str);
} }
else { else {
#endif
real_size = sizeof(struct RString); real_size = sizeof(struct RString);
#if USE_RVARGC
} }
#endif
return real_size; return real_size;
} }
static inline bool static inline bool
STR_EMBEDDABLE_P(long len, long termlen) STR_EMBEDDABLE_P(long len, long termlen)
{ {
#if USE_RVARGC
return rb_gc_size_allocatable_p(rb_str_embed_size(len + termlen)); return rb_gc_size_allocatable_p(rb_str_embed_size(len + termlen));
#else
return len <= RSTRING_EMBED_LEN_MAX + 1 - termlen;
#endif
} }
static VALUE str_replace_shared_without_enc(VALUE str2, VALUE str); static VALUE str_replace_shared_without_enc(VALUE str2, VALUE str);
@ -866,11 +841,7 @@ static size_t
str_capacity(VALUE str, const int termlen) str_capacity(VALUE str, const int termlen)
{ {
if (STR_EMBED_P(str)) { if (STR_EMBED_P(str)) {
#if USE_RVARGC
return str_embed_capa(str) - termlen; return str_embed_capa(str) - termlen;
#else
return (RSTRING_EMBED_LEN_MAX + 1 - termlen);
#endif
} }
else if (FL_TEST(str, STR_SHARED|STR_NOFREE)) { else if (FL_TEST(str, STR_SHARED|STR_NOFREE)) {
return RSTRING(str)->as.heap.len; return RSTRING(str)->as.heap.len;
@ -900,9 +871,6 @@ str_alloc_embed(VALUE klass, size_t capa)
size_t size = rb_str_embed_size(capa); size_t size = rb_str_embed_size(capa);
assert(size > 0); assert(size > 0);
assert(rb_gc_size_allocatable_p(size)); assert(rb_gc_size_allocatable_p(size));
#if !USE_RVARGC
assert(size <= sizeof(struct RString));
#endif
RVARGC_NEWOBJ_OF(str, struct RString, klass, RVARGC_NEWOBJ_OF(str, struct RString, klass,
T_STRING | (RGENGC_WB_PROTECTED_STRING ? FL_WB_PROTECTED : 0), size); T_STRING | (RGENGC_WB_PROTECTED_STRING ? FL_WB_PROTECTED : 0), size);
@ -1486,9 +1454,6 @@ str_new_frozen_buffer(VALUE klass, VALUE orig, int copy_encoding)
assert(ofs >= 0); assert(ofs >= 0);
assert(rest >= 0); assert(rest >= 0);
assert(ofs + rest <= RSTRING_LEN(shared)); assert(ofs + rest <= RSTRING_LEN(shared));
#if !USE_RVARGC
assert(!STR_EMBED_P(shared));
#endif
assert(OBJ_FROZEN(shared)); assert(OBJ_FROZEN(shared));
if ((ofs > 0) || (rest > 0) || if ((ofs > 0) || (rest > 0) ||
@ -1537,9 +1502,6 @@ str_new_empty_String(VALUE str)
} }
#define STR_BUF_MIN_SIZE 63 #define STR_BUF_MIN_SIZE 63
#if !USE_RVARGC
STATIC_ASSERT(STR_BUF_MIN_SIZE, STR_BUF_MIN_SIZE > RSTRING_EMBED_LEN_MAX);
#endif
VALUE VALUE
rb_str_buf_new(long capa) rb_str_buf_new(long capa)
@ -1550,11 +1512,6 @@ rb_str_buf_new(long capa)
VALUE str = str_alloc_heap(rb_cString); VALUE str = str_alloc_heap(rb_cString);
#if !USE_RVARGC
if (capa < STR_BUF_MIN_SIZE) {
capa = STR_BUF_MIN_SIZE;
}
#endif
RSTRING(str)->as.heap.aux.capa = capa; RSTRING(str)->as.heap.aux.capa = capa;
RSTRING(str)->as.heap.ptr = ALLOC_N(char, (size_t)capa + 1); RSTRING(str)->as.heap.ptr = ALLOC_N(char, (size_t)capa + 1);
RSTRING(str)->as.heap.ptr[0] = '\0'; RSTRING(str)->as.heap.ptr[0] = '\0';
@ -1654,7 +1611,6 @@ str_shared_replace(VALUE str, VALUE str2)
ENC_CODERANGE_SET(str, cr); ENC_CODERANGE_SET(str, cr);
} }
else { else {
#if USE_RVARGC
if (STR_EMBED_P(str2)) { if (STR_EMBED_P(str2)) {
assert(!FL_TEST(str2, STR_SHARED)); assert(!FL_TEST(str2, STR_SHARED));
long len = RSTRING(str2)->as.embed.len; long len = RSTRING(str2)->as.embed.len;
@ -1667,7 +1623,6 @@ str_shared_replace(VALUE str, VALUE str2)
RSTRING(str2)->as.heap.aux.capa = len; RSTRING(str2)->as.heap.aux.capa = len;
STR_SET_NOEMBED(str2); STR_SET_NOEMBED(str2);
} }
#endif
STR_SET_NOEMBED(str); STR_SET_NOEMBED(str);
FL_UNSET(str, STR_SHARED); FL_UNSET(str, STR_SHARED);
@ -1739,9 +1694,6 @@ ec_str_alloc_embed(struct rb_execution_context_struct *ec, VALUE klass, size_t c
size_t size = rb_str_embed_size(capa); size_t size = rb_str_embed_size(capa);
assert(size > 0); assert(size > 0);
assert(rb_gc_size_allocatable_p(size)); assert(rb_gc_size_allocatable_p(size));
#if !USE_RVARGC
assert(size <= sizeof(struct RString));
#endif
RB_RVARGC_EC_NEWOBJ_OF(ec, str, struct RString, klass, RB_RVARGC_EC_NEWOBJ_OF(ec, str, struct RString, klass,
T_STRING | (RGENGC_WB_PROTECTED_STRING ? FL_WB_PROTECTED : 0), size); T_STRING | (RGENGC_WB_PROTECTED_STRING ? FL_WB_PROTECTED : 0), size);
@ -1762,9 +1714,6 @@ static inline VALUE
str_duplicate_setup(VALUE klass, VALUE str, VALUE dup) str_duplicate_setup(VALUE klass, VALUE str, VALUE dup)
{ {
const VALUE flag_mask = const VALUE flag_mask =
#if !USE_RVARGC
RSTRING_NOEMBED | RSTRING_EMBED_LEN_MASK |
#endif
ENC_CODERANGE_MASK | ENCODING_MASK | ENC_CODERANGE_MASK | ENCODING_MASK |
FL_FREEZE FL_FREEZE
; ;
@ -1789,22 +1738,13 @@ str_duplicate_setup(VALUE klass, VALUE str, VALUE dup)
} }
assert(!STR_SHARED_P(root)); assert(!STR_SHARED_P(root));
assert(RB_OBJ_FROZEN_RAW(root)); assert(RB_OBJ_FROZEN_RAW(root));
if (0) {}
#if !USE_RVARGC
else if (STR_EMBED_P(root)) {
MEMCPY(RSTRING(dup)->as.embed.ary, RSTRING(root)->as.embed.ary,
char, RSTRING_EMBED_LEN_MAX + 1);
FL_UNSET(dup, STR_NOEMBED);
}
#endif
else {
RSTRING(dup)->as.heap.len = RSTRING_LEN(str); RSTRING(dup)->as.heap.len = RSTRING_LEN(str);
RSTRING(dup)->as.heap.ptr = RSTRING_PTR(str); RSTRING(dup)->as.heap.ptr = RSTRING_PTR(str);
FL_SET(root, STR_SHARED_ROOT); FL_SET(root, STR_SHARED_ROOT);
RB_OBJ_WRITE(dup, &RSTRING(dup)->as.heap.aux.shared, root); RB_OBJ_WRITE(dup, &RSTRING(dup)->as.heap.aux.shared, root);
flags |= RSTRING_NOEMBED | STR_SHARED; flags |= RSTRING_NOEMBED | STR_SHARED;
} }
}
if ((flags & ENCODING_MASK) == (ENCODING_INLINE_MAX<<ENCODING_SHIFT)) { if ((flags & ENCODING_MASK) == (ENCODING_INLINE_MAX<<ENCODING_SHIFT)) {
encidx = rb_enc_get_index(str); encidx = rb_enc_get_index(str);
@ -1913,12 +1853,8 @@ rb_str_init(int argc, VALUE *argv, VALUE str)
str_modifiable(str); str_modifiable(str);
if (STR_EMBED_P(str)) { /* make noembed always */ if (STR_EMBED_P(str)) { /* make noembed always */
char *new_ptr = ALLOC_N(char, (size_t)capa + termlen); char *new_ptr = ALLOC_N(char, (size_t)capa + termlen);
#if USE_RVARGC
assert(RSTRING(str)->as.embed.len + 1 <= str_embed_capa(str)); assert(RSTRING(str)->as.embed.len + 1 <= str_embed_capa(str));
memcpy(new_ptr, RSTRING(str)->as.embed.ary, RSTRING(str)->as.embed.len + 1); memcpy(new_ptr, RSTRING(str)->as.embed.ary, RSTRING(str)->as.embed.len + 1);
#else
memcpy(new_ptr, RSTRING(str)->as.embed.ary, RSTRING_EMBED_LEN_MAX + 1);
#endif
RSTRING(str)->as.heap.ptr = new_ptr; RSTRING(str)->as.heap.ptr = new_ptr;
} }
else if (FL_TEST(str, STR_SHARED|STR_NOFREE)) { else if (FL_TEST(str, STR_SHARED|STR_NOFREE)) {
@ -3124,9 +3060,6 @@ str_buf_cat4(VALUE str, const char *ptr, long len, bool keep_cr)
long capa, total, olen, off = -1; long capa, total, olen, off = -1;
char *sptr; char *sptr;
const int termlen = TERM_LEN(str); const int termlen = TERM_LEN(str);
#if !USE_RVARGC
assert(termlen < RSTRING_EMBED_LEN_MAX + 1); /* < (LONG_MAX/2) */
#endif
RSTRING_GETMEM(str, sptr, olen); RSTRING_GETMEM(str, sptr, olen);
if (ptr >= sptr && ptr <= sptr + olen) { if (ptr >= sptr && ptr <= sptr + olen) {

View File

@ -66,11 +66,7 @@ class Test_StringCapacity < Test::Unit::TestCase
end end
def embed_header_size def embed_header_size
if GC.using_rvargc?
2 * RbConfig::SIZEOF['void*'] + RbConfig::SIZEOF['long'] 2 * RbConfig::SIZEOF['void*'] + RbConfig::SIZEOF['long']
else
2 * RbConfig::SIZEOF['void*']
end
end end
def max_embed_len def max_embed_len

View File

@ -633,7 +633,6 @@ generator = BindingGenerator.new(
], ],
skip_fields: { skip_fields: {
'rb_execution_context_struct.machine': %w[regs], # differs between macOS and Linux 'rb_execution_context_struct.machine': %w[regs], # differs between macOS and Linux
'RString.as': %w[embed], # doesn't compile on USE_RVARGC=0 CI
rb_execution_context_struct: %w[method_missing_reason], # non-leading bit fields not supported rb_execution_context_struct: %w[method_missing_reason], # non-leading bit fields not supported
rb_iseq_constant_body: %w[yjit_payload], # conditionally defined rb_iseq_constant_body: %w[yjit_payload], # conditionally defined
rb_thread_struct: %w[status has_dedicated_nt to_kill abort_on_exception report_on_exception pending_interrupt_queue_checked], rb_thread_struct: %w[status has_dedicated_nt to_kill abort_on_exception report_on_exception pending_interrupt_queue_checked],

View File

@ -3768,11 +3768,8 @@ econv_primitive_convert(int argc, VALUE *argv, VALUE self)
rb_str_modify(output); rb_str_modify(output);
if (NIL_P(output_bytesize_v)) { if (NIL_P(output_bytesize_v)) {
#if USE_RVARGC
output_bytesize = rb_str_capacity(output); output_bytesize = rb_str_capacity(output);
#else
output_bytesize = RSTRING_EMBED_LEN_MAX;
#endif
if (!NIL_P(input) && output_bytesize < RSTRING_LEN(input)) if (!NIL_P(input) && output_bytesize < RSTRING_LEN(input))
output_bytesize = RSTRING_LEN(input); output_bytesize = RSTRING_LEN(input);
} }

View File

@ -178,7 +178,6 @@ fn main() {
// From include/ruby/internal/core/robject.h // From include/ruby/internal/core/robject.h
.allowlist_type("ruby_robject_flags") .allowlist_type("ruby_robject_flags")
// .allowlist_type("ruby_robject_consts") // Removed when USE_RVARGC
.allowlist_var("ROBJECT_OFFSET_.*") .allowlist_var("ROBJECT_OFFSET_.*")
// From include/ruby/internal/core/rarray.h // From include/ruby/internal/core/rarray.h