Support tracing of struct member accessor methods
This follows the same approach used for attr_reader/attr_writer in 2d98593bf54a37397c6e4886ccc7e3654c2eaf85, skipping the checking for tracing after the first call using the call cache, and clearing the call cache when tracing is turned on/off. Fixes [Bug #18886]
This commit is contained in:
parent
1721bb9dc6
commit
3081c83169
25
benchmark/struct_accessor.yml
Normal file
25
benchmark/struct_accessor.yml
Normal file
@ -0,0 +1,25 @@
|
||||
prelude: |
|
||||
C = Struct.new(:x) do
|
||||
class_eval <<-END
|
||||
def r
|
||||
#{'x;'*256}
|
||||
end
|
||||
def w
|
||||
#{'self.x = nil;'*256}
|
||||
end
|
||||
def rm
|
||||
m = method(:x)
|
||||
#{'m.call;'*256}
|
||||
end
|
||||
def wm
|
||||
m = method(:x=)
|
||||
#{'m.call(nil);'*256}
|
||||
end
|
||||
END
|
||||
end
|
||||
obj = C.new(nil)
|
||||
benchmark:
|
||||
member_reader: "obj.r"
|
||||
member_writer: "obj.w"
|
||||
member_reader_method: "obj.rm"
|
||||
member_writer_method: "obj.wm"
|
@ -955,6 +955,55 @@ CODE
|
||||
assert_equal(expected*2, events)
|
||||
end
|
||||
|
||||
def test_tracepoint_struct
|
||||
c = Struct.new(:x) do
|
||||
alias y x
|
||||
alias y= x=
|
||||
end
|
||||
obj = c.new
|
||||
|
||||
ar_meth = obj.method(:x)
|
||||
aw_meth = obj.method(:x=)
|
||||
aar_meth = obj.method(:y)
|
||||
aaw_meth = obj.method(:y=)
|
||||
events = []
|
||||
trace = TracePoint.new(:c_call, :c_return){|tp|
|
||||
next if !target_thread?
|
||||
next if tp.path != __FILE__
|
||||
next if tp.method_id == :call
|
||||
case tp.event
|
||||
when :c_call
|
||||
assert_raise(RuntimeError) {tp.return_value}
|
||||
events << [tp.event, tp.method_id, tp.callee_id]
|
||||
when :c_return
|
||||
events << [tp.event, tp.method_id, tp.callee_id, tp.return_value]
|
||||
end
|
||||
}
|
||||
test_proc = proc do
|
||||
obj.x = 1
|
||||
obj.x
|
||||
obj.y = 2
|
||||
obj.y
|
||||
aw_meth.call(1)
|
||||
ar_meth.call
|
||||
aaw_meth.call(2)
|
||||
aar_meth.call
|
||||
end
|
||||
test_proc.call # populate call caches
|
||||
trace.enable(&test_proc)
|
||||
expected = [
|
||||
[:c_call, :x=, :x=],
|
||||
[:c_return, :x=, :x=, 1],
|
||||
[:c_call, :x, :x],
|
||||
[:c_return, :x, :x, 1],
|
||||
[:c_call, :x=, :y=],
|
||||
[:c_return, :x=, :y=, 2],
|
||||
[:c_call, :x, :y],
|
||||
[:c_return, :x, :y, 2],
|
||||
]
|
||||
assert_equal(expected*2, events)
|
||||
end
|
||||
|
||||
class XYZZYException < Exception; end
|
||||
def method_test_tracepoint_raised_exception err
|
||||
raise err
|
||||
|
@ -300,6 +300,7 @@ struct rb_callcache {
|
||||
#define VM_CALLCACHE_UNMARKABLE FL_FREEZE
|
||||
#define VM_CALLCACHE_ON_STACK FL_EXIVAR
|
||||
|
||||
/* VM_CALLCACHE_IVAR used for IVAR/ATTRSET/STRUCT_AREF/STRUCT_ASET methods */
|
||||
#define VM_CALLCACHE_IVAR IMEMO_FL_USER0
|
||||
#define VM_CALLCACHE_BF IMEMO_FL_USER1
|
||||
#define VM_CALLCACHE_SUPER IMEMO_FL_USER2
|
||||
@ -487,6 +488,12 @@ vm_cc_call_set(const struct rb_callcache *cc, vm_call_handler call)
|
||||
*(vm_call_handler *)&cc->call_ = call;
|
||||
}
|
||||
|
||||
static inline void
|
||||
set_vm_cc_ivar(const struct rb_callcache *cc)
|
||||
{
|
||||
*(VALUE *)&cc->flags |= VM_CALLCACHE_IVAR;
|
||||
}
|
||||
|
||||
static inline void
|
||||
vm_cc_attr_index_set(const struct rb_callcache *cc, attr_index_t index, shape_id_t dest_shape_id)
|
||||
{
|
||||
@ -498,7 +505,7 @@ vm_cc_attr_index_set(const struct rb_callcache *cc, attr_index_t index, shape_id
|
||||
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
|
||||
VM_ASSERT(cc != vm_cc_empty());
|
||||
*attr_value = (attr_index_t)(index + 1) | ((uintptr_t)(dest_shape_id) << SHAPE_FLAG_SHIFT);
|
||||
*(VALUE *)&cc->flags |= VM_CALLCACHE_IVAR;
|
||||
set_vm_cc_ivar(cc);
|
||||
}
|
||||
|
||||
static inline bool
|
||||
|
@ -299,11 +299,15 @@ vm_call0_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const
|
||||
}
|
||||
case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
|
||||
vm_call_check_arity(calling, 0, argv);
|
||||
ret = vm_call_opt_struct_aref0(ec, calling);
|
||||
VM_CALL_METHOD_ATTR(ret,
|
||||
vm_call_opt_struct_aref0(ec, calling),
|
||||
(void)0);
|
||||
goto success;
|
||||
case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
|
||||
vm_call_check_arity(calling, 1, argv);
|
||||
ret = vm_call_opt_struct_aset0(ec, calling, argv[0]);
|
||||
VM_CALL_METHOD_ATTR(ret,
|
||||
vm_call_opt_struct_aset0(ec, calling, argv[0]),
|
||||
(void)0);
|
||||
goto success;
|
||||
default:
|
||||
rb_bug("vm_call0: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
|
||||
|
@ -4342,6 +4342,19 @@ vm_call_opt_struct_aset(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
|
||||
NOINLINE(static VALUE vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
|
||||
const struct rb_callinfo *ci, const struct rb_callcache *cc));
|
||||
|
||||
#define VM_CALL_METHOD_ATTR(var, func, nohook) \
|
||||
if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
|
||||
EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
|
||||
vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
|
||||
var = func; \
|
||||
EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
|
||||
vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
|
||||
} \
|
||||
else { \
|
||||
nohook; \
|
||||
var = func; \
|
||||
}
|
||||
|
||||
static VALUE
|
||||
vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
|
||||
const struct rb_callinfo *ci, const struct rb_callcache *cc)
|
||||
@ -4356,35 +4369,33 @@ vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb
|
||||
case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
|
||||
CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
|
||||
return vm_call_opt_block_call(ec, cfp, calling);
|
||||
case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
|
||||
case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
|
||||
CALLER_SETUP_ARG(cfp, calling, ci, 0);
|
||||
rb_check_arity(calling->argc, 0, 0);
|
||||
CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
|
||||
return vm_call_opt_struct_aref(ec, cfp, calling);
|
||||
|
||||
case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
|
||||
VALUE v;
|
||||
VM_CALL_METHOD_ATTR(v,
|
||||
vm_call_opt_struct_aref(ec, cfp, calling),
|
||||
set_vm_cc_ivar(cc); \
|
||||
CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
|
||||
return v;
|
||||
}
|
||||
case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
|
||||
CALLER_SETUP_ARG(cfp, calling, ci, 1);
|
||||
rb_check_arity(calling->argc, 1, 1);
|
||||
CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
|
||||
return vm_call_opt_struct_aset(ec, cfp, calling);
|
||||
|
||||
VALUE v;
|
||||
VM_CALL_METHOD_ATTR(v,
|
||||
vm_call_opt_struct_aset(ec, cfp, calling),
|
||||
set_vm_cc_ivar(cc); \
|
||||
CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
|
||||
return v;
|
||||
}
|
||||
default:
|
||||
rb_bug("vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
|
||||
}
|
||||
}
|
||||
|
||||
#define VM_CALL_METHOD_ATTR(var, func, nohook) \
|
||||
if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
|
||||
EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
|
||||
vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
|
||||
var = func; \
|
||||
EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
|
||||
vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
|
||||
} \
|
||||
else { \
|
||||
nohook; \
|
||||
var = func; \
|
||||
}
|
||||
|
||||
static VALUE
|
||||
vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
|
||||
{
|
||||
|
Loading…
x
Reference in New Issue
Block a user