Retire RUBY_MARK_UNLESS_NULL

Marking `Qnil` or `Qfalse` works fine, having
an extra macro to avoid it isn't needed.
This commit is contained in:
Jean Boussier 2024-03-06 16:43:00 -05:00 committed by Jean Boussier
parent af8a4205bf
commit 2d80b6093f
2 changed files with 22 additions and 30 deletions

View File

@ -79,14 +79,6 @@ rb_gc_debug_body(const char *mode, const char *msg, int st, void *ptr)
#define RUBY_GC_INFO if(0)printf #define RUBY_GC_INFO if(0)printf
#endif #endif
#define RUBY_MARK_MOVABLE_UNLESS_NULL(ptr) do { \
VALUE markobj = (ptr); \
if (RTEST(markobj)) {rb_gc_mark_movable(markobj);} \
} while (0)
#define RUBY_MARK_UNLESS_NULL(ptr) do { \
VALUE markobj = (ptr); \
if (RTEST(markobj)) {rb_gc_mark(markobj);} \
} while (0)
#define RUBY_FREE_UNLESS_NULL(ptr) if(ptr){ruby_xfree(ptr);(ptr)=NULL;} #define RUBY_FREE_UNLESS_NULL(ptr) if(ptr){ruby_xfree(ptr);(ptr)=NULL;}
#if STACK_GROW_DIRECTION > 0 #if STACK_GROW_DIRECTION > 0

44
vm.c
View File

@ -2963,7 +2963,7 @@ rb_vm_mark(void *ptr)
rb_gc_mark_movable(vm->mark_object_ary); rb_gc_mark_movable(vm->mark_object_ary);
rb_gc_mark_movable(vm->load_path); rb_gc_mark_movable(vm->load_path);
rb_gc_mark_movable(vm->load_path_snapshot); rb_gc_mark_movable(vm->load_path_snapshot);
RUBY_MARK_MOVABLE_UNLESS_NULL(vm->load_path_check_cache); rb_gc_mark_movable(vm->load_path_check_cache);
rb_gc_mark_movable(vm->expanded_load_path); rb_gc_mark_movable(vm->expanded_load_path);
rb_gc_mark_movable(vm->loaded_features); rb_gc_mark_movable(vm->loaded_features);
rb_gc_mark_movable(vm->loaded_features_snapshot); rb_gc_mark_movable(vm->loaded_features_snapshot);
@ -2971,8 +2971,8 @@ rb_vm_mark(void *ptr)
rb_gc_mark_movable(vm->loaded_features_realpath_map); rb_gc_mark_movable(vm->loaded_features_realpath_map);
rb_gc_mark_movable(vm->top_self); rb_gc_mark_movable(vm->top_self);
rb_gc_mark_movable(vm->orig_progname); rb_gc_mark_movable(vm->orig_progname);
RUBY_MARK_MOVABLE_UNLESS_NULL(vm->coverages); rb_gc_mark_movable(vm->coverages);
RUBY_MARK_MOVABLE_UNLESS_NULL(vm->me2counter); rb_gc_mark_movable(vm->me2counter);
if (vm->loading_table) { if (vm->loading_table) {
rb_mark_tbl(vm->loading_table); rb_mark_tbl(vm->loading_table);
@ -3394,16 +3394,16 @@ rb_execution_context_mark(const rb_execution_context_t *ec)
sizeof(ec->machine.regs) / (sizeof(VALUE))); sizeof(ec->machine.regs) / (sizeof(VALUE)));
} }
RUBY_MARK_UNLESS_NULL(ec->errinfo); rb_gc_mark(ec->errinfo);
RUBY_MARK_UNLESS_NULL(ec->root_svar); rb_gc_mark(ec->root_svar);
if (ec->local_storage) { if (ec->local_storage) {
rb_id_table_foreach_values(ec->local_storage, mark_local_storage_i, NULL); rb_id_table_foreach_values(ec->local_storage, mark_local_storage_i, NULL);
} }
RUBY_MARK_UNLESS_NULL(ec->local_storage_recursive_hash); rb_gc_mark(ec->local_storage_recursive_hash);
RUBY_MARK_UNLESS_NULL(ec->local_storage_recursive_hash_for_trace); rb_gc_mark(ec->local_storage_recursive_hash_for_trace);
RUBY_MARK_UNLESS_NULL(ec->private_const_reference); rb_gc_mark(ec->private_const_reference);
RUBY_MARK_MOVABLE_UNLESS_NULL(ec->storage); rb_gc_mark_movable(ec->storage);
} }
void rb_fiber_mark_self(rb_fiber_t *fib); void rb_fiber_mark_self(rb_fiber_t *fib);
@ -3434,8 +3434,8 @@ thread_mark(void *ptr)
switch (th->invoke_type) { switch (th->invoke_type) {
case thread_invoke_type_proc: case thread_invoke_type_proc:
case thread_invoke_type_ractor_proc: case thread_invoke_type_ractor_proc:
RUBY_MARK_UNLESS_NULL(th->invoke_arg.proc.proc); rb_gc_mark(th->invoke_arg.proc.proc);
RUBY_MARK_UNLESS_NULL(th->invoke_arg.proc.args); rb_gc_mark(th->invoke_arg.proc.args);
break; break;
case thread_invoke_type_func: case thread_invoke_type_func:
rb_gc_mark_maybe((VALUE)th->invoke_arg.func.arg); rb_gc_mark_maybe((VALUE)th->invoke_arg.func.arg);
@ -3445,21 +3445,21 @@ thread_mark(void *ptr)
} }
rb_gc_mark(rb_ractor_self(th->ractor)); rb_gc_mark(rb_ractor_self(th->ractor));
RUBY_MARK_UNLESS_NULL(th->thgroup); rb_gc_mark(th->thgroup);
RUBY_MARK_UNLESS_NULL(th->value); rb_gc_mark(th->value);
RUBY_MARK_UNLESS_NULL(th->pending_interrupt_queue); rb_gc_mark(th->pending_interrupt_queue);
RUBY_MARK_UNLESS_NULL(th->pending_interrupt_mask_stack); rb_gc_mark(th->pending_interrupt_mask_stack);
RUBY_MARK_UNLESS_NULL(th->top_self); rb_gc_mark(th->top_self);
RUBY_MARK_UNLESS_NULL(th->top_wrapper); rb_gc_mark(th->top_wrapper);
if (th->root_fiber) rb_fiber_mark_self(th->root_fiber); if (th->root_fiber) rb_fiber_mark_self(th->root_fiber);
RUBY_ASSERT(th->ec == rb_fiberptr_get_ec(th->ec->fiber_ptr)); RUBY_ASSERT(th->ec == rb_fiberptr_get_ec(th->ec->fiber_ptr));
RUBY_MARK_UNLESS_NULL(th->stat_insn_usage); rb_gc_mark(th->stat_insn_usage);
RUBY_MARK_UNLESS_NULL(th->last_status); rb_gc_mark(th->last_status);
RUBY_MARK_UNLESS_NULL(th->locking_mutex); rb_gc_mark(th->locking_mutex);
RUBY_MARK_UNLESS_NULL(th->name); rb_gc_mark(th->name);
RUBY_MARK_UNLESS_NULL(th->scheduler); rb_gc_mark(th->scheduler);
RUBY_MARK_LEAVE("thread"); RUBY_MARK_LEAVE("thread");
} }