Rename size_pool -> heap

Now that we've inlined the eden_heap into the size_pool, we should
rename the size_pool to heap. So that Ruby contains multiple heaps, with
different sized objects.

The term heap as a collection of memory pages is more in memory
management nomenclature, whereas size_pool was a name chosen out of
necessity during the development of the Variable Width Allocation
features of Ruby.

The concept of size pools was introduced in order to facilitate
different sized objects (other than the default 40 bytes). They wrapped
the eden heap and the tomb heap, and some related state, and provided a
reasonably simple way of duplicating all related concerns, to provide
multiple pools that all shared the same structure but held different
objects.

Since then various changes have happend in Ruby's memory layout:

* The concept of tomb heaps has been replaced by a global free pages list,
  with each page having it's slot size reconfigured at the point when it
  is resurrected
* the eden heap has been inlined into the size pool itself, so that now
  the size pool directly controls the free_pages list, the sweeping
  page, the compaction cursor and the other state that was previously
  being managed by the eden heap.

Now that there is no need for a heap wrapper, we should refer to the
collection of pages containing Ruby objects as a heap again rather than
a size pool
This commit is contained in:
Matt Valentine-House 2024-10-03 13:53:49 +01:00
parent b58a364522
commit 8e7df4b7c6
Notes: git 2024-10-03 20:20:28 +00:00
16 changed files with 442 additions and 442 deletions

View File

@ -45,7 +45,7 @@ benchmark:
"#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}" \ "#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}" \
"#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}" \ "#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}" \
"#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}" "#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}#{CHUNK}"
interpolation_same_size_pool: | interpolation_same_heap: |
buffer = "#{SHORT}#{SHORT}" buffer = "#{SHORT}#{SHORT}"
interpolation_switching_size_pools: | interpolation_switching_heaps: |
buffer = "#{SHORT}#{LONG}" buffer = "#{SHORT}#{LONG}"

24
gc.c
View File

@ -310,13 +310,13 @@ rb_gc_set_shape(VALUE obj, uint32_t shape_id)
} }
uint32_t uint32_t
rb_gc_rebuild_shape(VALUE obj, size_t size_pool_id) rb_gc_rebuild_shape(VALUE obj, size_t heap_id)
{ {
rb_shape_t *orig_shape = rb_shape_get_shape(obj); rb_shape_t *orig_shape = rb_shape_get_shape(obj);
if (rb_shape_obj_too_complex(obj)) return (uint32_t)OBJ_TOO_COMPLEX_SHAPE_ID; if (rb_shape_obj_too_complex(obj)) return (uint32_t)OBJ_TOO_COMPLEX_SHAPE_ID;
rb_shape_t *initial_shape = rb_shape_get_shape_by_id((shape_id_t)(size_pool_id + FIRST_T_OBJECT_SHAPE_ID)); rb_shape_t *initial_shape = rb_shape_get_shape_by_id((shape_id_t)(heap_id + FIRST_T_OBJECT_SHAPE_ID));
rb_shape_t *new_shape = rb_shape_traverse_from_new_root(initial_shape, orig_shape); rb_shape_t *new_shape = rb_shape_traverse_from_new_root(initial_shape, orig_shape);
if (!new_shape) return 0; if (!new_shape) return 0;
@ -577,7 +577,7 @@ typedef struct gc_function_map {
void (*ractor_cache_free)(void *objspace_ptr, void *cache); void (*ractor_cache_free)(void *objspace_ptr, void *cache);
void (*set_params)(void *objspace_ptr); void (*set_params)(void *objspace_ptr);
void (*init)(void); void (*init)(void);
size_t *(*size_pool_sizes)(void *objspace_ptr); size_t *(*heap_sizes)(void *objspace_ptr);
// Shutdown // Shutdown
void (*shutdown_free_objects)(void *objspace_ptr); void (*shutdown_free_objects)(void *objspace_ptr);
// GC // GC
@ -594,7 +594,7 @@ typedef struct gc_function_map {
// Object allocation // Object allocation
VALUE (*new_obj)(void *objspace_ptr, void *cache_ptr, VALUE klass, VALUE flags, VALUE v1, VALUE v2, VALUE v3, bool wb_protected, size_t alloc_size); VALUE (*new_obj)(void *objspace_ptr, void *cache_ptr, VALUE klass, VALUE flags, VALUE v1, VALUE v2, VALUE v3, bool wb_protected, size_t alloc_size);
size_t (*obj_slot_size)(VALUE obj); size_t (*obj_slot_size)(VALUE obj);
size_t (*size_pool_id_for_size)(void *objspace_ptr, size_t size); size_t (*heap_id_for_size)(void *objspace_ptr, size_t size);
bool (*size_allocatable_p)(size_t size); bool (*size_allocatable_p)(size_t size);
// Malloc // Malloc
void *(*malloc)(void *objspace_ptr, size_t size); void *(*malloc)(void *objspace_ptr, size_t size);
@ -708,7 +708,7 @@ ruby_external_gc_init(void)
load_external_gc_func(ractor_cache_free); load_external_gc_func(ractor_cache_free);
load_external_gc_func(set_params); load_external_gc_func(set_params);
load_external_gc_func(init); load_external_gc_func(init);
load_external_gc_func(size_pool_sizes); load_external_gc_func(heap_sizes);
// Shutdown // Shutdown
load_external_gc_func(shutdown_free_objects); load_external_gc_func(shutdown_free_objects);
// GC // GC
@ -725,7 +725,7 @@ ruby_external_gc_init(void)
// Object allocation // Object allocation
load_external_gc_func(new_obj); load_external_gc_func(new_obj);
load_external_gc_func(obj_slot_size); load_external_gc_func(obj_slot_size);
load_external_gc_func(size_pool_id_for_size); load_external_gc_func(heap_id_for_size);
load_external_gc_func(size_allocatable_p); load_external_gc_func(size_allocatable_p);
// Malloc // Malloc
load_external_gc_func(malloc); load_external_gc_func(malloc);
@ -787,7 +787,7 @@ ruby_external_gc_init(void)
# define rb_gc_impl_ractor_cache_free rb_gc_functions.ractor_cache_free # define rb_gc_impl_ractor_cache_free rb_gc_functions.ractor_cache_free
# define rb_gc_impl_set_params rb_gc_functions.set_params # define rb_gc_impl_set_params rb_gc_functions.set_params
# define rb_gc_impl_init rb_gc_functions.init # define rb_gc_impl_init rb_gc_functions.init
# define rb_gc_impl_size_pool_sizes rb_gc_functions.size_pool_sizes # define rb_gc_impl_heap_sizes rb_gc_functions.heap_sizes
// Shutdown // Shutdown
# define rb_gc_impl_shutdown_free_objects rb_gc_functions.shutdown_free_objects # define rb_gc_impl_shutdown_free_objects rb_gc_functions.shutdown_free_objects
// GC // GC
@ -804,7 +804,7 @@ ruby_external_gc_init(void)
// Object allocation // Object allocation
# define rb_gc_impl_new_obj rb_gc_functions.new_obj # define rb_gc_impl_new_obj rb_gc_functions.new_obj
# define rb_gc_impl_obj_slot_size rb_gc_functions.obj_slot_size # define rb_gc_impl_obj_slot_size rb_gc_functions.obj_slot_size
# define rb_gc_impl_size_pool_id_for_size rb_gc_functions.size_pool_id_for_size # define rb_gc_impl_heap_id_for_size rb_gc_functions.heap_id_for_size
# define rb_gc_impl_size_allocatable_p rb_gc_functions.size_allocatable_p # define rb_gc_impl_size_allocatable_p rb_gc_functions.size_allocatable_p
// Malloc // Malloc
# define rb_gc_impl_malloc rb_gc_functions.malloc # define rb_gc_impl_malloc rb_gc_functions.malloc
@ -3000,9 +3000,9 @@ rb_gc_prepare_heap(void)
} }
size_t size_t
rb_gc_size_pool_id_for_size(size_t size) rb_gc_heap_id_for_size(size_t size)
{ {
return rb_gc_impl_size_pool_id_for_size(rb_gc_get_objspace(), size); return rb_gc_impl_heap_id_for_size(rb_gc_get_objspace(), size);
} }
bool bool
@ -3452,9 +3452,9 @@ rb_gc_initial_stress_set(VALUE flag)
} }
size_t * size_t *
rb_gc_size_pool_sizes(void) rb_gc_heap_sizes(void)
{ {
return rb_gc_impl_size_pool_sizes(rb_gc_get_objspace()); return rb_gc_impl_heap_sizes(rb_gc_get_objspace());
} }
VALUE VALUE

File diff suppressed because it is too large Load Diff

View File

@ -40,7 +40,7 @@ const char *rb_obj_info(VALUE obj);
bool rb_gc_shutdown_call_finalizer_p(VALUE obj); bool rb_gc_shutdown_call_finalizer_p(VALUE obj);
uint32_t rb_gc_get_shape(VALUE obj); uint32_t rb_gc_get_shape(VALUE obj);
void rb_gc_set_shape(VALUE obj, uint32_t shape_id); void rb_gc_set_shape(VALUE obj, uint32_t shape_id);
uint32_t rb_gc_rebuild_shape(VALUE obj, size_t size_pool_id); uint32_t rb_gc_rebuild_shape(VALUE obj, size_t heap_id);
size_t rb_obj_memsize_of(VALUE obj); size_t rb_obj_memsize_of(VALUE obj);
RUBY_SYMBOL_EXPORT_END RUBY_SYMBOL_EXPORT_END

View File

@ -32,7 +32,7 @@ GC_IMPL_FN void *rb_gc_impl_ractor_cache_alloc(void *objspace_ptr);
GC_IMPL_FN void rb_gc_impl_ractor_cache_free(void *objspace_ptr, void *cache); GC_IMPL_FN void rb_gc_impl_ractor_cache_free(void *objspace_ptr, void *cache);
GC_IMPL_FN void rb_gc_impl_set_params(void *objspace_ptr); GC_IMPL_FN void rb_gc_impl_set_params(void *objspace_ptr);
GC_IMPL_FN void rb_gc_impl_init(void); GC_IMPL_FN void rb_gc_impl_init(void);
GC_IMPL_FN size_t *rb_gc_impl_size_pool_sizes(void *objspace_ptr); GC_IMPL_FN size_t *rb_gc_impl_heap_sizes(void *objspace_ptr);
// Shutdown // Shutdown
GC_IMPL_FN void rb_gc_impl_shutdown_free_objects(void *objspace_ptr); GC_IMPL_FN void rb_gc_impl_shutdown_free_objects(void *objspace_ptr);
// GC // GC
@ -49,7 +49,7 @@ GC_IMPL_FN VALUE rb_gc_impl_config_set(void *objspace_ptr, VALUE hash);
// Object allocation // Object allocation
GC_IMPL_FN VALUE rb_gc_impl_new_obj(void *objspace_ptr, void *cache_ptr, VALUE klass, VALUE flags, VALUE v1, VALUE v2, VALUE v3, bool wb_protected, size_t alloc_size); GC_IMPL_FN VALUE rb_gc_impl_new_obj(void *objspace_ptr, void *cache_ptr, VALUE klass, VALUE flags, VALUE v1, VALUE v2, VALUE v3, bool wb_protected, size_t alloc_size);
GC_IMPL_FN size_t rb_gc_impl_obj_slot_size(VALUE obj); GC_IMPL_FN size_t rb_gc_impl_obj_slot_size(VALUE obj);
GC_IMPL_FN size_t rb_gc_impl_size_pool_id_for_size(void *objspace_ptr, size_t size); GC_IMPL_FN size_t rb_gc_impl_heap_id_for_size(void *objspace_ptr, size_t size);
GC_IMPL_FN bool rb_gc_impl_size_allocatable_p(size_t size); GC_IMPL_FN bool rb_gc_impl_size_allocatable_p(size_t size);
// Malloc // Malloc
GC_IMPL_FN void *rb_gc_impl_malloc(void *objspace_ptr, size_t size); GC_IMPL_FN void *rb_gc_impl_malloc(void *objspace_ptr, size_t size);

View File

@ -83,7 +83,7 @@ struct RClass {
struct rb_id_table *m_tbl; struct rb_id_table *m_tbl;
}; };
// Assert that classes can be embedded in size_pools[2] (which has 160B slot size) // Assert that classes can be embedded in heaps[2] (which has 160B slot size)
STATIC_ASSERT(sizeof_rb_classext_t, sizeof(struct RClass) + sizeof(rb_classext_t) <= 4 * RVALUE_SIZE); STATIC_ASSERT(sizeof_rb_classext_t, sizeof(struct RClass) + sizeof(rb_classext_t) <= 4 * RVALUE_SIZE);
struct RClass_and_rb_classext_t { struct RClass_and_rb_classext_t {

View File

@ -201,8 +201,8 @@ void *rb_gc_ractor_cache_alloc(void);
void rb_gc_ractor_cache_free(void *cache); void rb_gc_ractor_cache_free(void *cache);
bool rb_gc_size_allocatable_p(size_t size); bool rb_gc_size_allocatable_p(size_t size);
size_t *rb_gc_size_pool_sizes(void); size_t *rb_gc_heap_sizes(void);
size_t rb_gc_size_pool_id_for_size(size_t size); size_t rb_gc_heap_id_for_size(size_t size);
void rb_gc_mark_and_move(VALUE *ptr); void rb_gc_mark_and_move(VALUE *ptr);

View File

@ -547,7 +547,7 @@ class HeapPageIter:
self.target = target self.target = target
self.start = page.GetChildMemberWithName('start').GetValueAsUnsigned(); self.start = page.GetChildMemberWithName('start').GetValueAsUnsigned();
self.num_slots = page.GetChildMemberWithName('total_slots').unsigned self.num_slots = page.GetChildMemberWithName('total_slots').unsigned
self.slot_size = page.GetChildMemberWithName('size_pool').GetChildMemberWithName('slot_size').unsigned self.slot_size = page.GetChildMemberWithName('heap').GetChildMemberWithName('slot_size').unsigned
self.counter = 0 self.counter = 0
self.tRBasic = target.FindFirstType("struct RBasic") self.tRBasic = target.FindFirstType("struct RBasic")
self.tRValue = target.FindFirstType("struct RVALUE") self.tRValue = target.FindFirstType("struct RVALUE")

View File

@ -135,7 +135,7 @@ rb_class_allocate_instance(VALUE klass)
RUBY_ASSERT(rb_shape_get_shape(obj)->type == SHAPE_ROOT); RUBY_ASSERT(rb_shape_get_shape(obj)->type == SHAPE_ROOT);
// Set the shape to the specific T_OBJECT shape. // Set the shape to the specific T_OBJECT shape.
ROBJECT_SET_SHAPE_ID(obj, (shape_id_t)(rb_gc_size_pool_id_for_size(size) + FIRST_T_OBJECT_SHAPE_ID)); ROBJECT_SET_SHAPE_ID(obj, (shape_id_t)(rb_gc_heap_id_for_size(size) + FIRST_T_OBJECT_SHAPE_ID));
#if RUBY_DEBUG #if RUBY_DEBUG
RUBY_ASSERT(!rb_shape_obj_too_complex(obj)); RUBY_ASSERT(!rb_shape_obj_too_complex(obj));
@ -358,7 +358,7 @@ rb_obj_copy_ivar(VALUE dest, VALUE obj)
rb_shape_t * initial_shape = rb_shape_get_shape(dest); rb_shape_t * initial_shape = rb_shape_get_shape(dest);
if (initial_shape->size_pool_index != src_shape->size_pool_index) { if (initial_shape->heap_index != src_shape->heap_index) {
RUBY_ASSERT(initial_shape->type == SHAPE_T_OBJECT); RUBY_ASSERT(initial_shape->type == SHAPE_T_OBJECT);
shape_to_set_on_dest = rb_shape_rebuild_shape(initial_shape, src_shape); shape_to_set_on_dest = rb_shape_rebuild_shape(initial_shape, src_shape);

View File

@ -1446,7 +1446,7 @@ module RubyVM::RJIT # :nodoc: all
next_iv_index: [self.attr_index_t, Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), next_iv_index)")], next_iv_index: [self.attr_index_t, Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), next_iv_index)")],
capacity: [CType::Immediate.parse("uint32_t"), Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), capacity)")], capacity: [CType::Immediate.parse("uint32_t"), Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), capacity)")],
type: [CType::Immediate.parse("uint8_t"), Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), type)")], type: [CType::Immediate.parse("uint8_t"), Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), type)")],
size_pool_index: [CType::Immediate.parse("uint8_t"), Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), size_pool_index)")], heap_index: [CType::Immediate.parse("uint8_t"), Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), heap_index)")],
parent_id: [self.shape_id_t, Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), parent_id)")], parent_id: [self.shape_id_t, Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), parent_id)")],
ancestor_index: [CType::Pointer.new { self.redblack_node_t }, Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), ancestor_index)")], ancestor_index: [CType::Pointer.new { self.redblack_node_t }, Primitive.cexpr!("OFFSETOF((*((struct rb_shape *)NULL)), ancestor_index)")],
) )

14
shape.c
View File

@ -418,7 +418,7 @@ rb_shape_alloc(ID edge_name, rb_shape_t * parent, enum shape_type type)
{ {
rb_shape_t * shape = rb_shape_alloc_with_parent_id(edge_name, rb_shape_id(parent)); rb_shape_t * shape = rb_shape_alloc_with_parent_id(edge_name, rb_shape_id(parent));
shape->type = (uint8_t)type; shape->type = (uint8_t)type;
shape->size_pool_index = parent->size_pool_index; shape->heap_index = parent->heap_index;
shape->capacity = parent->capacity; shape->capacity = parent->capacity;
shape->edges = 0; shape->edges = 0;
return shape; return shape;
@ -1059,7 +1059,7 @@ rb_shape_t_to_rb_cShape(rb_shape_t *shape)
INT2NUM(shape->parent_id), INT2NUM(shape->parent_id),
rb_shape_edge_name(shape), rb_shape_edge_name(shape),
INT2NUM(shape->next_iv_index), INT2NUM(shape->next_iv_index),
INT2NUM(shape->size_pool_index), INT2NUM(shape->heap_index),
INT2NUM(shape->type), INT2NUM(shape->type),
INT2NUM(shape->capacity)); INT2NUM(shape->capacity));
rb_obj_freeze(obj); rb_obj_freeze(obj);
@ -1266,7 +1266,7 @@ Init_default_shapes(void)
rb_shape_t *root = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID); rb_shape_t *root = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID);
root->capacity = 0; root->capacity = 0;
root->type = SHAPE_ROOT; root->type = SHAPE_ROOT;
root->size_pool_index = 0; root->heap_index = 0;
GET_SHAPE_TREE()->root_shape = root; GET_SHAPE_TREE()->root_shape = root;
RUBY_ASSERT(rb_shape_id(GET_SHAPE_TREE()->root_shape) == ROOT_SHAPE_ID); RUBY_ASSERT(rb_shape_id(GET_SHAPE_TREE()->root_shape) == ROOT_SHAPE_ID);
@ -1282,16 +1282,16 @@ Init_default_shapes(void)
rb_shape_t *too_complex_shape = rb_shape_alloc_with_parent_id(0, ROOT_SHAPE_ID); rb_shape_t *too_complex_shape = rb_shape_alloc_with_parent_id(0, ROOT_SHAPE_ID);
too_complex_shape->type = SHAPE_OBJ_TOO_COMPLEX; too_complex_shape->type = SHAPE_OBJ_TOO_COMPLEX;
too_complex_shape->size_pool_index = 0; too_complex_shape->heap_index = 0;
RUBY_ASSERT(OBJ_TOO_COMPLEX_SHAPE_ID == (GET_SHAPE_TREE()->next_shape_id - 1)); RUBY_ASSERT(OBJ_TOO_COMPLEX_SHAPE_ID == (GET_SHAPE_TREE()->next_shape_id - 1));
RUBY_ASSERT(rb_shape_id(too_complex_shape) == OBJ_TOO_COMPLEX_SHAPE_ID); RUBY_ASSERT(rb_shape_id(too_complex_shape) == OBJ_TOO_COMPLEX_SHAPE_ID);
// Make shapes for T_OBJECT // Make shapes for T_OBJECT
size_t *sizes = rb_gc_size_pool_sizes(); size_t *sizes = rb_gc_heap_sizes();
for (int i = 0; sizes[i] > 0; i++) { for (int i = 0; sizes[i] > 0; i++) {
rb_shape_t *t_object_shape = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID); rb_shape_t *t_object_shape = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID);
t_object_shape->type = SHAPE_T_OBJECT; t_object_shape->type = SHAPE_T_OBJECT;
t_object_shape->size_pool_index = i; t_object_shape->heap_index = i;
t_object_shape->capacity = (uint32_t)((sizes[i] - offsetof(struct RObject, as.ary)) / sizeof(VALUE)); t_object_shape->capacity = (uint32_t)((sizes[i] - offsetof(struct RObject, as.ary)) / sizeof(VALUE));
t_object_shape->edges = rb_id_table_create(0); t_object_shape->edges = rb_id_table_create(0);
t_object_shape->ancestor_index = LEAF; t_object_shape->ancestor_index = LEAF;
@ -1308,7 +1308,7 @@ Init_shape(void)
"parent_id", "parent_id",
"edge_name", "edge_name",
"next_iv_index", "next_iv_index",
"size_pool_index", "heap_index",
"type", "type",
"capacity", "capacity",
NULL); NULL);

View File

@ -47,7 +47,7 @@ struct rb_shape {
attr_index_t next_iv_index; attr_index_t next_iv_index;
uint32_t capacity; // Total capacity of the object with this shape uint32_t capacity; // Total capacity of the object with this shape
uint8_t type; uint8_t type;
uint8_t size_pool_index; uint8_t heap_index;
shape_id_t parent_id; shape_id_t parent_id;
redblack_node_t * ancestor_index; redblack_node_t * ancestor_index;
}; };

View File

@ -226,7 +226,7 @@ class TestGc < Test::Unit::TestCase
GC.stat_heap(0, stat_heap) GC.stat_heap(0, stat_heap)
GC.stat(stat) GC.stat(stat)
GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT].times do |i| GC::INTERNAL_CONSTANTS[:HEAP_COUNT].times do |i|
EnvUtil.without_gc do EnvUtil.without_gc do
GC.stat_heap(i, stat_heap) GC.stat_heap(i, stat_heap)
GC.stat(stat) GC.stat(stat)
@ -248,7 +248,7 @@ class TestGc < Test::Unit::TestCase
assert_equal stat_heap[:slot_size], GC.stat_heap(0)[:slot_size] assert_equal stat_heap[:slot_size], GC.stat_heap(0)[:slot_size]
assert_raise(ArgumentError) { GC.stat_heap(-1) } assert_raise(ArgumentError) { GC.stat_heap(-1) }
assert_raise(ArgumentError) { GC.stat_heap(GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT]) } assert_raise(ArgumentError) { GC.stat_heap(GC::INTERNAL_CONSTANTS[:HEAP_COUNT]) }
end end
def test_stat_heap_all def test_stat_heap_all
@ -259,7 +259,7 @@ class TestGc < Test::Unit::TestCase
GC.stat_heap(0, stat_heap) GC.stat_heap(0, stat_heap)
GC.stat_heap(nil, stat_heap_all) GC.stat_heap(nil, stat_heap_all)
GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT].times do |i| GC::INTERNAL_CONSTANTS[:HEAP_COUNT].times do |i|
GC.stat_heap(nil, stat_heap_all) GC.stat_heap(nil, stat_heap_all)
GC.stat_heap(i, stat_heap) GC.stat_heap(i, stat_heap)
@ -538,7 +538,7 @@ class TestGc < Test::Unit::TestCase
gc_count = GC.stat(:count) gc_count = GC.stat(:count)
# Fill up all of the size pools to the init slots # Fill up all of the size pools to the init slots
GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT].times do |i| GC::INTERNAL_CONSTANTS[:HEAP_COUNT].times do |i|
capa = (GC.stat_heap(i, :slot_size) - GC::INTERNAL_CONSTANTS[:RVALUE_OVERHEAD] - (2 * RbConfig::SIZEOF["void*"])) / RbConfig::SIZEOF["void*"] capa = (GC.stat_heap(i, :slot_size) - GC::INTERNAL_CONSTANTS[:RVALUE_OVERHEAD] - (2 * RbConfig::SIZEOF["void*"])) / RbConfig::SIZEOF["void*"]
while GC.stat_heap(i, :heap_eden_slots) < GC_HEAP_INIT_SLOTS while GC.stat_heap(i, :heap_eden_slots) < GC_HEAP_INIT_SLOTS
Array.new(capa) Array.new(capa)
@ -558,7 +558,7 @@ class TestGc < Test::Unit::TestCase
gc_count = GC.stat(:count) gc_count = GC.stat(:count)
# Fill up all of the size pools to the init slots # Fill up all of the size pools to the init slots
GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT].times do |i| GC::INTERNAL_CONSTANTS[:HEAP_COUNT].times do |i|
capa = (GC.stat_heap(i, :slot_size) - GC::INTERNAL_CONSTANTS[:RVALUE_OVERHEAD] - (2 * RbConfig::SIZEOF["void*"])) / RbConfig::SIZEOF["void*"] capa = (GC.stat_heap(i, :slot_size) - GC::INTERNAL_CONSTANTS[:RVALUE_OVERHEAD] - (2 * RbConfig::SIZEOF["void*"])) / RbConfig::SIZEOF["void*"]
while GC.stat_heap(i, :heap_eden_slots) < SIZES[i] while GC.stat_heap(i, :heap_eden_slots) < SIZES[i]
Array.new(capa) Array.new(capa)

View File

@ -283,7 +283,7 @@ class TestGCCompact < Test::Unit::TestCase
end; end;
end end
def test_moving_arrays_down_size_pools def test_moving_arrays_down_heaps
omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1 omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1
assert_separately(%w[-robjspace], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 10) assert_separately(%w[-robjspace], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 10)
@ -305,7 +305,7 @@ class TestGCCompact < Test::Unit::TestCase
end; end;
end end
def test_moving_arrays_up_size_pools def test_moving_arrays_up_heaps
omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1 omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1
assert_separately(%w[-robjspace], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 10) assert_separately(%w[-robjspace], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 10)
@ -329,7 +329,7 @@ class TestGCCompact < Test::Unit::TestCase
end; end;
end end
def test_moving_objects_between_size_pools def test_moving_objects_between_heaps
omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1 omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1
assert_separately(%w[-robjspace], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 60) assert_separately(%w[-robjspace], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 60)
@ -361,7 +361,7 @@ class TestGCCompact < Test::Unit::TestCase
end; end;
end end
def test_moving_strings_up_size_pools def test_moving_strings_up_heaps
omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1 omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1
assert_separately(%w[-robjspace], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 30) assert_separately(%w[-robjspace], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 30)
@ -382,7 +382,7 @@ class TestGCCompact < Test::Unit::TestCase
end; end;
end end
def test_moving_strings_down_size_pools def test_moving_strings_down_heaps
omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1 omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1
assert_separately(%w[-robjspace], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 30) assert_separately(%w[-robjspace], "#{<<~"begin;"}\n#{<<~"end;"}", timeout: 30)
@ -402,7 +402,7 @@ class TestGCCompact < Test::Unit::TestCase
end; end;
end end
def test_moving_hashes_down_size_pools def test_moving_hashes_down_heaps
omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1 omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1
# AR and ST hashes are in the same size pool on 32 bit # AR and ST hashes are in the same size pool on 32 bit
omit unless RbConfig::SIZEOF["uint64_t"] <= RbConfig::SIZEOF["void*"] omit unless RbConfig::SIZEOF["uint64_t"] <= RbConfig::SIZEOF["void*"]
@ -425,7 +425,7 @@ class TestGCCompact < Test::Unit::TestCase
end; end;
end end
def test_moving_objects_between_size_pools_keeps_shape_frozen_status def test_moving_objects_between_heaps_keeps_shape_frozen_status
# [Bug #19536] # [Bug #19536]
assert_separately([], "#{<<~"begin;"}\n#{<<~"end;"}") assert_separately([], "#{<<~"begin;"}\n#{<<~"end;"}")
begin; begin;

View File

@ -662,8 +662,8 @@ CODE
assert_equal(Encoding::UTF_8, "#{s}x".encoding) assert_equal(Encoding::UTF_8, "#{s}x".encoding)
end end
def test_string_interpolations_across_size_pools_get_embedded def test_string_interpolations_across_heaps_get_embedded
omit if GC::INTERNAL_CONSTANTS[:SIZE_POOL_COUNT] == 1 omit if GC::INTERNAL_CONSTANTS[:HEAP_COUNT] == 1
require 'objspace' require 'objspace'
base_slot_size = GC::INTERNAL_CONSTANTS[:BASE_SLOT_SIZE] base_slot_size = GC::INTERNAL_CONSTANTS[:BASE_SLOT_SIZE]

View File

@ -648,7 +648,7 @@ pub struct rb_shape {
pub next_iv_index: attr_index_t, pub next_iv_index: attr_index_t,
pub capacity: u32, pub capacity: u32,
pub type_: u8, pub type_: u8,
pub size_pool_index: u8, pub heap_index: u8,
pub parent_id: shape_id_t, pub parent_id: shape_id_t,
pub ancestor_index: *mut redblack_node_t, pub ancestor_index: *mut redblack_node_t,
} }