Introduce negative method cache

pCMC doesn't have negative method cache so this patch  implements it.
This commit is contained in:
Koichi Sasada 2020-12-13 05:55:18 +09:00
parent fa63052be1
commit 967040ba59
Notes: git 2020-12-14 11:58:12 +09:00
4 changed files with 50 additions and 6 deletions

View File

@ -5671,6 +5671,8 @@ rb_resolve_me_location(const rb_method_entry_t *me, VALUE resolved_location[5])
{ {
VALUE path, beg_pos_lineno, beg_pos_column, end_pos_lineno, end_pos_column; VALUE path, beg_pos_lineno, beg_pos_column, end_pos_lineno, end_pos_column;
if (!me->def) return NULL; // negative cme
retry: retry:
switch (me->def->type) { switch (me->def->type) {
case VM_METHOD_TYPE_ISEQ: { case VM_METHOD_TYPE_ISEQ: {

10
vm.c
View File

@ -2530,6 +2530,13 @@ rb_vm_each_stack_value(void *ptr, void (*cb)(VALUE, void*), void *ctx)
} }
} }
static enum rb_id_table_iterator_result
vm_mark_negative_cme(VALUE val, void *dmy)
{
rb_gc_mark(val);
return ID_TABLE_CONTINUE;
}
void void
rb_vm_mark(void *ptr) rb_vm_mark(void *ptr)
{ {
@ -2585,6 +2592,8 @@ rb_vm_mark(void *ptr)
rb_gc_mark_values(RUBY_NSIG, vm->trap_list.cmd); rb_gc_mark_values(RUBY_NSIG, vm->trap_list.cmd);
rb_id_table_foreach_values(vm->negative_cme_table, vm_mark_negative_cme, NULL);
mjit_mark(); mjit_mark();
} }
@ -3660,6 +3669,7 @@ Init_BareVM(void)
vm->objspace = rb_objspace_alloc(); vm->objspace = rb_objspace_alloc();
ruby_current_vm_ptr = vm; ruby_current_vm_ptr = vm;
vm->negative_cme_table = rb_id_table_create(16);
Init_native_thread(th); Init_native_thread(th);
th->vm = vm; th->vm = vm;

View File

@ -649,6 +649,8 @@ typedef struct rb_vm_struct {
const struct rb_builtin_function *builtin_function_table; const struct rb_builtin_function *builtin_function_table;
int builtin_inline_index; int builtin_inline_index;
struct rb_id_table *negative_cme_table;
#if USE_VM_CLOCK #if USE_VM_CLOCK
uint32_t clock; uint32_t clock;
#endif #endif

View File

@ -164,7 +164,6 @@ clear_method_cache_by_id_in_class(VALUE klass, ID mid)
if (cme) { if (cme) {
// invalidate cme if found to invalidate the inline method cache. // invalidate cme if found to invalidate the inline method cache.
if (METHOD_ENTRY_CACHED(cme)) { if (METHOD_ENTRY_CACHED(cme)) {
if (METHOD_ENTRY_COMPLEMENTED(cme)) { if (METHOD_ENTRY_COMPLEMENTED(cme)) {
// do nothing // do nothing
@ -177,6 +176,7 @@ clear_method_cache_by_id_in_class(VALUE klass, ID mid)
VALUE origin = RCLASS_ORIGIN(owner); VALUE origin = RCLASS_ORIGIN(owner);
rb_method_table_insert(origin, RCLASS_M_TBL(origin), mid, new_cme); rb_method_table_insert(origin, RCLASS_M_TBL(origin), mid, new_cme);
} }
vm_me_invalidate_cache((rb_callable_method_entry_t *)cme); vm_me_invalidate_cache((rb_callable_method_entry_t *)cme);
RB_DEBUG_COUNTER_INC(cc_invalidate_tree_cme); RB_DEBUG_COUNTER_INC(cc_invalidate_tree_cme);
} }
@ -193,6 +193,13 @@ clear_method_cache_by_id_in_class(VALUE klass, ID mid)
RB_DEBUG_COUNTER_INC(cc_invalidate_tree); RB_DEBUG_COUNTER_INC(cc_invalidate_tree);
} }
else {
rb_vm_t *vm = GET_VM();
if (rb_id_table_lookup(vm->negative_cme_table, mid, (VALUE *)&cme)) {
rb_id_table_delete(vm->negative_cme_table, mid);
vm_me_invalidate_cache((rb_callable_method_entry_t *)cme);
}
}
} }
} }
@ -1038,7 +1045,7 @@ static const rb_callable_method_entry_t *
complemented_callable_method_entry(VALUE klass, ID id) complemented_callable_method_entry(VALUE klass, ID id)
{ {
VALUE defined_class; VALUE defined_class;
rb_method_entry_t *me = search_method_protect(klass, id, &defined_class); rb_method_entry_t *me = search_method(klass, id, &defined_class);
return prepare_callable_method_entry(defined_class, id, me, FALSE); return prepare_callable_method_entry(defined_class, id, me, FALSE);
} }
@ -1070,6 +1077,7 @@ static void
cache_callable_method_entry(VALUE klass, ID mid, const rb_callable_method_entry_t *cme) cache_callable_method_entry(VALUE klass, ID mid, const rb_callable_method_entry_t *cme)
{ {
ASSERT_vm_locking(); ASSERT_vm_locking();
VM_ASSERT(cme != NULL);
struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass); struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
struct rb_class_cc_entries *ccs; struct rb_class_cc_entries *ccs;
@ -1087,6 +1095,21 @@ cache_callable_method_entry(VALUE klass, ID mid, const rb_callable_method_entry_
} }
} }
static const rb_callable_method_entry_t *
negative_cme(ID mid)
{
rb_vm_t *vm = GET_VM();
const rb_callable_method_entry_t *cme;
if (!rb_id_table_lookup(vm->negative_cme_table, mid, (VALUE *)&cme)) {
cme = (rb_callable_method_entry_t *)rb_method_entry_alloc(mid, Qnil, Qnil, NULL);
rb_id_table_insert(vm->negative_cme_table, mid, (VALUE)cme);
}
VM_ASSERT(cme != NULL);
return cme;
}
static const rb_callable_method_entry_t * static const rb_callable_method_entry_t *
callable_method_entry(VALUE klass, ID mid, VALUE *defined_class_ptr) callable_method_entry(VALUE klass, ID mid, VALUE *defined_class_ptr)
{ {
@ -1102,15 +1125,22 @@ callable_method_entry(VALUE klass, ID mid, VALUE *defined_class_ptr)
} }
else { else {
VALUE defined_class; VALUE defined_class;
rb_method_entry_t *me = search_method_protect(klass, mid, &defined_class); rb_method_entry_t *me = search_method(klass, mid, &defined_class);
if (defined_class_ptr) *defined_class_ptr = defined_class; if (defined_class_ptr) *defined_class_ptr = defined_class;
cme = prepare_callable_method_entry(defined_class, mid, me, TRUE);
if (cme) cache_callable_method_entry(klass, mid, cme); if (me != NULL) {
cme = prepare_callable_method_entry(defined_class, mid, me, TRUE);
}
else {
cme = negative_cme(mid);
}
cache_callable_method_entry(klass, mid, cme);
} }
} }
RB_VM_LOCK_LEAVE(); RB_VM_LOCK_LEAVE();
return cme; return !UNDEFINED_METHOD_ENTRY_P(cme) ? cme : NULL;
} }
MJIT_FUNC_EXPORTED const rb_callable_method_entry_t * MJIT_FUNC_EXPORTED const rb_callable_method_entry_t *