rb_gc_impl_malloc can return NULL
Let there be rooms for each GC implementations how to handle multi threaded situations. They can be totally reentrant, or can have their own mutex, or can rely on rb_thread_call_with_gvl. In any ways the allocator (has been, but now officially is) expected to run properly without a GVL. This means there need be a way for them to inform the interpreter about their allocation failures, without relying on raising exceptions. Let them do so by returning NULL.
This commit is contained in:
parent
5067a46502
commit
25ad7e8e6c
Notes:
git
2024-11-29 14:19:26 +00:00
59
gc.c
59
gc.c
@ -4398,12 +4398,20 @@ ruby_memerror(void)
|
||||
fprintf(stderr, "[FATAL] failed to allocate memory\n");
|
||||
}
|
||||
}
|
||||
|
||||
/* We have discussions whether we should die here; */
|
||||
/* We might rethink about it later. */
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
void
|
||||
rb_memerror(void)
|
||||
{
|
||||
/* the `GET_VM()->special_exceptions` below assumes that
|
||||
* the VM is reachable from the current thread. We should
|
||||
* definitely make sure of that. */
|
||||
RUBY_ASSERT_ALWAYS(ruby_thread_has_gvl_p());
|
||||
|
||||
rb_execution_context_t *ec = GET_EC();
|
||||
VALUE exc = GET_VM()->special_exceptions[ruby_error_nomemory];
|
||||
|
||||
@ -4428,8 +4436,27 @@ rb_malloc_info_show_results(void)
|
||||
{
|
||||
}
|
||||
|
||||
static void *
|
||||
handle_malloc_failure(void *ptr)
|
||||
{
|
||||
if (LIKELY(ptr)) {
|
||||
return ptr;
|
||||
}
|
||||
else {
|
||||
return ruby_memerror_body(ptr);
|
||||
}
|
||||
}
|
||||
|
||||
static void *ruby_xmalloc_body(size_t size);
|
||||
|
||||
void *
|
||||
ruby_xmalloc(size_t size)
|
||||
{
|
||||
return handle_malloc_failure(ruby_xmalloc_body(size));
|
||||
}
|
||||
|
||||
static void *
|
||||
ruby_xmalloc_body(size_t size)
|
||||
{
|
||||
if ((ssize_t)size < 0) {
|
||||
negative_size_allocation_error("too large allocation size");
|
||||
@ -4446,23 +4473,47 @@ ruby_malloc_size_overflow(size_t count, size_t elsize)
|
||||
count, elsize);
|
||||
}
|
||||
|
||||
static void *ruby_xmalloc2_body(size_t n, size_t size);
|
||||
|
||||
void *
|
||||
ruby_xmalloc2(size_t n, size_t size)
|
||||
{
|
||||
return handle_malloc_failure(ruby_xmalloc2_body(n, size));
|
||||
}
|
||||
|
||||
static void *
|
||||
ruby_xmalloc2_body(size_t n, size_t size)
|
||||
{
|
||||
return rb_gc_impl_malloc(rb_gc_get_objspace(), xmalloc2_size(n, size));
|
||||
}
|
||||
|
||||
static void *ruby_xcalloc_body(size_t n, size_t size);
|
||||
|
||||
void *
|
||||
ruby_xcalloc(size_t n, size_t size)
|
||||
{
|
||||
return handle_malloc_failure(ruby_xcalloc_body(n, size));
|
||||
}
|
||||
|
||||
static void *
|
||||
ruby_xcalloc_body(size_t n, size_t size)
|
||||
{
|
||||
return rb_gc_impl_calloc(rb_gc_get_objspace(), xmalloc2_size(n, size));
|
||||
}
|
||||
|
||||
static void *ruby_sized_xrealloc_body(void *ptr, size_t new_size, size_t old_size);
|
||||
|
||||
#ifdef ruby_sized_xrealloc
|
||||
#undef ruby_sized_xrealloc
|
||||
#endif
|
||||
void *
|
||||
ruby_sized_xrealloc(void *ptr, size_t new_size, size_t old_size)
|
||||
{
|
||||
return handle_malloc_failure(ruby_sized_xrealloc_body(ptr, new_size, old_size));
|
||||
}
|
||||
|
||||
static void *
|
||||
ruby_sized_xrealloc_body(void *ptr, size_t new_size, size_t old_size)
|
||||
{
|
||||
if ((ssize_t)new_size < 0) {
|
||||
negative_size_allocation_error("too large allocation size");
|
||||
@ -4477,11 +4528,19 @@ ruby_xrealloc(void *ptr, size_t new_size)
|
||||
return ruby_sized_xrealloc(ptr, new_size, 0);
|
||||
}
|
||||
|
||||
static void *ruby_sized_xrealloc2_body(void *ptr, size_t n, size_t size, size_t old_n);
|
||||
|
||||
#ifdef ruby_sized_xrealloc2
|
||||
#undef ruby_sized_xrealloc2
|
||||
#endif
|
||||
void *
|
||||
ruby_sized_xrealloc2(void *ptr, size_t n, size_t size, size_t old_n)
|
||||
{
|
||||
return handle_malloc_failure(ruby_sized_xrealloc2_body(ptr, n, size, old_n));
|
||||
}
|
||||
|
||||
static void *
|
||||
ruby_sized_xrealloc2_body(void *ptr, size_t n, size_t size, size_t old_n)
|
||||
{
|
||||
size_t len = xmalloc2_size(n, size);
|
||||
return rb_gc_impl_realloc(rb_gc_get_objspace(), ptr, len, old_n * size);
|
||||
|
@ -6746,22 +6746,23 @@ int ruby_thread_has_gvl_p(void);
|
||||
static int
|
||||
garbage_collect_with_gvl(rb_objspace_t *objspace, unsigned int reason)
|
||||
{
|
||||
if (dont_gc_val()) return TRUE;
|
||||
if (ruby_thread_has_gvl_p()) {
|
||||
return garbage_collect(objspace, reason);
|
||||
if (dont_gc_val()) {
|
||||
return TRUE;
|
||||
}
|
||||
else if (!ruby_native_thread_p()) {
|
||||
return TRUE;
|
||||
}
|
||||
else if (!ruby_thread_has_gvl_p()) {
|
||||
void *ret;
|
||||
struct objspace_and_reason oar;
|
||||
oar.objspace = objspace;
|
||||
oar.reason = reason;
|
||||
ret = rb_thread_call_with_gvl(gc_with_gvl, (void *)&oar);
|
||||
|
||||
return !!ret;
|
||||
}
|
||||
else {
|
||||
if (ruby_native_thread_p()) {
|
||||
struct objspace_and_reason oar;
|
||||
oar.objspace = objspace;
|
||||
oar.reason = reason;
|
||||
return (int)(VALUE)rb_thread_call_with_gvl(gc_with_gvl, (void *)&oar);
|
||||
}
|
||||
else {
|
||||
/* no ruby thread */
|
||||
fprintf(stderr, "[FATAL] failed to allocate memory\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
return garbage_collect(objspace, reason);
|
||||
}
|
||||
}
|
||||
|
||||
@ -8123,7 +8124,7 @@ objspace_malloc_fixup(rb_objspace_t *objspace, void *mem, size_t size)
|
||||
#endif
|
||||
|
||||
#define GC_MEMERROR(...) \
|
||||
((RB_BUG_INSTEAD_OF_RB_MEMERROR+0) ? rb_bug("" __VA_ARGS__) : rb_memerror())
|
||||
((RB_BUG_INSTEAD_OF_RB_MEMERROR+0) ? rb_bug("" __VA_ARGS__) : (void)0)
|
||||
|
||||
#define TRY_WITH_GC(siz, expr) do { \
|
||||
const gc_profile_record_flag gpr = \
|
||||
@ -8197,6 +8198,7 @@ rb_gc_impl_malloc(void *objspace_ptr, size_t size)
|
||||
size = objspace_malloc_prepare(objspace, size);
|
||||
TRY_WITH_GC(size, mem = malloc(size));
|
||||
RB_DEBUG_COUNTER_INC(heap_xmalloc);
|
||||
if (!mem) return mem;
|
||||
return objspace_malloc_fixup(objspace, mem, size);
|
||||
}
|
||||
|
||||
@ -8216,6 +8218,7 @@ rb_gc_impl_calloc(void *objspace_ptr, size_t size)
|
||||
|
||||
size = objspace_malloc_prepare(objspace, size);
|
||||
TRY_WITH_GC(size, mem = calloc1(size));
|
||||
if (!mem) return mem;
|
||||
return objspace_malloc_fixup(objspace, mem, size);
|
||||
}
|
||||
|
||||
@ -8284,6 +8287,7 @@ rb_gc_impl_realloc(void *objspace_ptr, void *ptr, size_t new_size, size_t old_si
|
||||
|
||||
old_size = objspace_malloc_size(objspace, ptr, old_size);
|
||||
TRY_WITH_GC(new_size, mem = RB_GNUC_EXTENSION_BLOCK(realloc(ptr, new_size)));
|
||||
if (!mem) return mem;
|
||||
new_size = objspace_malloc_size(objspace, mem, new_size);
|
||||
|
||||
#if CALC_EXACT_MALLOC_SIZE
|
||||
|
12
gc/gc_impl.h
12
gc/gc_impl.h
@ -52,6 +52,18 @@ GC_IMPL_FN size_t rb_gc_impl_obj_slot_size(VALUE obj);
|
||||
GC_IMPL_FN size_t rb_gc_impl_heap_id_for_size(void *objspace_ptr, size_t size);
|
||||
GC_IMPL_FN bool rb_gc_impl_size_allocatable_p(size_t size);
|
||||
// Malloc
|
||||
/*
|
||||
* BEWARE: These functions may or may not run under GVL.
|
||||
*
|
||||
* You might want to make them thread-safe.
|
||||
* Garbage collecting inside is possible if and only if you
|
||||
* already have GVL. Also raising exceptions without one is a
|
||||
* total disaster.
|
||||
*
|
||||
* When you absolutely cannot allocate the requested amount of
|
||||
* memory just return NULL (with appropriate errno set).
|
||||
* The caller side takes care of that situation.
|
||||
*/
|
||||
GC_IMPL_FN void *rb_gc_impl_malloc(void *objspace_ptr, size_t size);
|
||||
GC_IMPL_FN void *rb_gc_impl_calloc(void *objspace_ptr, size_t size);
|
||||
GC_IMPL_FN void *rb_gc_impl_realloc(void *objspace_ptr, void *ptr, size_t new_size, size_t old_size);
|
||||
|
Loading…
x
Reference in New Issue
Block a user