1
0
Fork 0
mirror of https://github.com/ruby/ruby.git synced 2022-11-09 12:17:21 -05:00

remove trace instruction. [Feature #14104]

* tool/instruction.rb: create `trace_` prefix instructions.

* compile.c (ADD_TRACE): do not add `trace` instructions but add
  TRACE link elements. TRACE elements will be unified with a next
  instruction as instruction information.

* vm_trace.c (update_global_event_hook): modify all ISeqs when
  hooks are enabled.

* iseq.c (rb_iseq_trace_set): added to toggle `trace_` instructions.

* vm_insnhelper.c (vm_trace): added.
  This function is a body of `trace_` prefix instructions.

* vm_insnhelper.h (JUMP): save PC to a control frame.

* insns.def (trace): removed.

* vm_exec.h (INSN_ENTRY_SIG): add debug output (disabled).


git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@60763 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
This commit is contained in:
ko1 2017-11-14 12:58:36 +00:00
parent fe3decb201
commit 665ba24b44
13 changed files with 416 additions and 270 deletions

3
NEWS
View file

@ -264,6 +264,9 @@ with all sufficient information, see the ChangeLog file or Redmine
* Performance of block passing using block parameters is improved by
lazy Proc allocation [Feature #14045]
* Dynamic instrumentation for TracePoint hooks instead of using "trace"
instruction to avoid overhead [Feature #14104]
=== Miscellaneous changes
* Print backtrace and error message in reverse order if STDERR is unchanged and a tty.

View file

@ -2904,6 +2904,7 @@ vm_trace.$(OBJEXT): {$(VPATH)}id.h
vm_trace.$(OBJEXT): {$(VPATH)}intern.h
vm_trace.$(OBJEXT): {$(VPATH)}internal.h
vm_trace.$(OBJEXT): {$(VPATH)}io.h
vm_trace.$(OBJEXT): {$(VPATH)}iseq.h
vm_trace.$(OBJEXT): {$(VPATH)}method.h
vm_trace.$(OBJEXT): {$(VPATH)}missing.h
vm_trace.$(OBJEXT): {$(VPATH)}node.h

196
compile.c
View file

@ -36,10 +36,10 @@
typedef struct iseq_link_element {
enum {
ISEQ_ELEMENT_NONE,
ISEQ_ELEMENT_LABEL,
ISEQ_ELEMENT_INSN,
ISEQ_ELEMENT_ADJUST
ISEQ_ELEMENT_ADJUST,
ISEQ_ELEMENT_TRACE
} type;
struct iseq_link_element *next;
struct iseq_link_element *prev;
@ -77,6 +77,7 @@ typedef struct iseq_insn_data {
VALUE *operands;
struct {
int line_no;
rb_event_flag_t events;
} insn_info;
} INSN;
@ -86,6 +87,11 @@ typedef struct iseq_adjust_data {
int line_no;
} ADJUST;
typedef struct iseq_trace_data {
LINK_ELEMENT link;
rb_event_flag_t event;
} TRACE;
struct ensure_range {
LABEL *begin;
LABEL *end;
@ -242,6 +248,9 @@ struct iseq_compile_data_ensure_node_stack {
#define ADD_SEND_R(seq, line, id, argc, block, flag, keywords) \
ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_send(iseq, (line), (id), (VALUE)(argc), (block), (VALUE)(flag), (keywords)))
#define ADD_TRACE(seq, event) \
ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event)))
#define ADD_TRACE_LINE_COVERAGE(seq, line) \
do { \
if (ISEQ_COVERAGE(iseq) && \
@ -295,13 +304,6 @@ struct iseq_compile_data_ensure_node_stack {
} \
} while (0)
#define ADD_TRACE(seq, line, event) \
do { \
if (ISEQ_COMPILE_DATA(iseq)->option->trace_instruction) { \
ADD_INSN1((seq), (line), trace, INT2FIX(event)); \
} \
} while (0)
static void iseq_add_getlocal(rb_iseq_t *iseq, LINK_ANCHOR *const seq, int line, int idx, int level);
static void iseq_add_setlocal(rb_iseq_t *iseq, LINK_ANCHOR *const seq, int line, int idx, int level);
@ -484,6 +486,8 @@ static int calc_sp_depth(int depth, INSN *iobj);
static INSN *new_insn_body(rb_iseq_t *iseq, int line_no, enum ruby_vminsn_type insn_id, int argc, ...);
static LABEL *new_label_body(rb_iseq_t *iseq, long line);
static ADJUST *new_adjust_body(rb_iseq_t *iseq, LABEL *label, int line);
static TRACE *new_trace_body(rb_iseq_t *iseq, rb_event_flag_t event);
static int iseq_compile_each(rb_iseq_t *iseq, LINK_ANCHOR *anchor, const NODE *n, int);
static int iseq_setup(rb_iseq_t *iseq, LINK_ANCHOR *const anchor);
@ -639,11 +643,12 @@ rb_iseq_compile_node(rb_iseq_t *iseq, const NODE *node)
start->rescued = LABEL_RESCUE_BEG;
end->rescued = LABEL_RESCUE_END;
ADD_TRACE(ret, FIX2INT(iseq->body->location.first_lineno), RUBY_EVENT_B_CALL);
ADD_TRACE(ret, RUBY_EVENT_B_CALL);
ADD_INSN (ret, FIX2INT(iseq->body->location.first_lineno), nop);
ADD_LABEL(ret, start);
CHECK(COMPILE(ret, "block body", node->nd_body));
ADD_LABEL(ret, end);
ADD_TRACE(ret, nd_line(node), RUBY_EVENT_B_RETURN);
ADD_TRACE(ret, RUBY_EVENT_B_RETURN);
/* wide range catch handler must put at last */
ADD_CATCH_ENTRY(CATCH_TYPE_REDO, start, end, NULL, start);
@ -652,17 +657,17 @@ rb_iseq_compile_node(rb_iseq_t *iseq, const NODE *node)
}
case ISEQ_TYPE_CLASS:
{
ADD_TRACE(ret, FIX2INT(iseq->body->location.first_lineno), RUBY_EVENT_CLASS);
ADD_TRACE(ret, RUBY_EVENT_CLASS);
CHECK(COMPILE(ret, "scoped node", node->nd_body));
ADD_TRACE(ret, nd_line(node), RUBY_EVENT_END);
ADD_TRACE(ret, RUBY_EVENT_END);
break;
}
case ISEQ_TYPE_METHOD:
{
ADD_TRACE(ret, FIX2INT(iseq->body->location.first_lineno), RUBY_EVENT_CALL);
ADD_TRACE(ret, RUBY_EVENT_CALL);
ADD_TRACE_METHOD_COVERAGE(ret, FIX2INT(iseq->body->location.first_lineno), rb_intern_str(iseq->body->location.label));
CHECK(COMPILE(ret, "scoped node", node->nd_body));
ADD_TRACE(ret, nd_line(node), RUBY_EVENT_RETURN);
ADD_TRACE(ret, RUBY_EVENT_RETURN);
break;
}
default: {
@ -897,6 +902,12 @@ compile_data_alloc_adjust(rb_iseq_t *iseq)
return (ADJUST *)compile_data_alloc(iseq, sizeof(ADJUST));
}
static TRACE *
compile_data_alloc_trace(rb_iseq_t *iseq)
{
return (TRACE *)compile_data_alloc(iseq, sizeof(TRACE));
}
/*
* elem1, elemX => elem1, elem2, elemX
*/
@ -951,7 +962,7 @@ REMOVE_ELEM(LINK_ELEMENT *elem)
}
static LINK_ELEMENT *
FIRST_ELEMENT(LINK_ANCHOR *const anchor)
FIRST_ELEMENT(const LINK_ANCHOR *const anchor)
{
return anchor->anchor.next;
}
@ -975,26 +986,42 @@ POP_ELEMENT(ISEQ_ARG_DECLARE LINK_ANCHOR *const anchor)
#define POP_ELEMENT(anchor) POP_ELEMENT(iseq, (anchor))
#endif
static int
LIST_SIZE_ZERO(LINK_ANCHOR *const anchor)
static LINK_ELEMENT *
ELEM_FIRST_INSN(LINK_ELEMENT *elem)
{
if (anchor->anchor.next == 0) {
return 1;
while (elem) {
switch (elem->type) {
case ISEQ_ELEMENT_INSN:
case ISEQ_ELEMENT_ADJUST:
return elem;
default:
elem = elem->next;
}
}
return NULL;
}
static int
LIST_INSN_SIZE_ONE(const LINK_ANCHOR *const anchor)
{
LINK_ELEMENT *first_insn = ELEM_FIRST_INSN(FIRST_ELEMENT(anchor));
if (first_insn != NULL &&
ELEM_FIRST_INSN(first_insn->next) == NULL) {
return TRUE;
}
else {
return 0;
return FALSE;
}
}
static int
LIST_SIZE_ONE(const LINK_ANCHOR *const anchor)
LIST_INSN_SIZE_ZERO(const LINK_ANCHOR *const anchor)
{
if (anchor->anchor.next != NULL &&
anchor->anchor.next->next == NULL) {
return 1;
if (ELEM_FIRST_INSN(FIRST_ELEMENT(anchor)) == NULL) {
return TRUE;
}
else {
return 0;
return FALSE;
}
}
@ -1073,6 +1100,18 @@ debug_list(ISEQ_ARG_DECLARE LINK_ANCHOR *const anchor)
#define debug_list(anc) ((void)0)
#endif
static TRACE *
new_trace_body(rb_iseq_t *iseq, rb_event_flag_t event)
{
TRACE *trace = compile_data_alloc_trace(iseq);
trace->link.type = ISEQ_ELEMENT_TRACE;
trace->link.next = NULL;
trace->event = event;
return trace;
}
static LABEL *
new_label_body(rb_iseq_t *iseq, long line)
{
@ -1108,12 +1147,14 @@ new_insn_core(rb_iseq_t *iseq, int line_no,
int insn_id, int argc, VALUE *argv)
{
INSN *iobj = compile_data_alloc_insn(iseq);
/* printf("insn_id: %d, line: %d\n", insn_id, line_no); */
iobj->link.type = ISEQ_ELEMENT_INSN;
iobj->link.next = 0;
iobj->insn_id = insn_id;
iobj->insn_info.line_no = line_no;
iobj->insn_info.events = 0;
iobj->operands = argv;
iobj->operand_size = argc;
iobj->sc_state = 0;
@ -1764,7 +1805,7 @@ fix_sp_depth(rb_iseq_t *iseq, LINK_ANCHOR *const anchor)
}
break;
}
case ISEQ_ELEMENT_NONE:
case ISEQ_ELEMENT_TRACE:
{
/* ignore */
break;
@ -1798,11 +1839,12 @@ add_insn_info(struct iseq_insn_info_entry *insns_info, int insns_info_index, int
{
if (list->type == ISEQ_ELEMENT_INSN) {
INSN *iobj = (INSN *)list;
if (iobj->insn_info.line_no != 0 &&
(insns_info_index == 0 ||
insns_info[insns_info_index-1].line_no != iobj->insn_info.line_no)) {
if (insns_info_index == 0 ||
insns_info[insns_info_index-1].line_no != iobj->insn_info.line_no ||
insns_info[insns_info_index-1].events != iobj->insn_info.events) {
insns_info[insns_info_index].position = code_index;
insns_info[insns_info_index].line_no = iobj->insn_info.line_no;
insns_info[insns_info_index].events = iobj->insn_info.events;
return TRUE;
}
else {
@ -1815,6 +1857,7 @@ add_insn_info(struct iseq_insn_info_entry *insns_info, int insns_info_index, int
insns_info[insns_info_index-1].line_no != adjust->line_no) {
insns_info[insns_info_index].position = code_index;
insns_info[insns_info_index].line_no = adjust->line_no;
insns_info[insns_info_index].events = 0;
return TRUE;
}
else {
@ -1835,6 +1878,7 @@ iseq_set_sequence(rb_iseq_t *iseq, LINK_ANCHOR *const anchor)
struct iseq_insn_info_entry *insns_info;
LINK_ELEMENT *list;
VALUE *generated_iseq;
rb_event_flag_t events = 0;
int insn_num, code_index, insns_info_index, sp = 0;
int stack_max = fix_sp_depth(iseq, anchor);
@ -1853,6 +1897,8 @@ iseq_set_sequence(rb_iseq_t *iseq, LINK_ANCHOR *const anchor)
sp = calc_sp_depth(sp, iobj);
code_index += insn_data_length(iobj);
insn_num++;
iobj->insn_info.events |= events;
events = 0;
break;
}
case ISEQ_ELEMENT_LABEL:
@ -1862,9 +1908,10 @@ iseq_set_sequence(rb_iseq_t *iseq, LINK_ANCHOR *const anchor)
sp = lobj->sp;
break;
}
case ISEQ_ELEMENT_NONE:
case ISEQ_ELEMENT_TRACE:
{
/* ignore */
TRACE *trace = (TRACE *)list;
events |= trace->event;
break;
}
case ISEQ_ELEMENT_ADJUST:
@ -2177,14 +2224,31 @@ get_destination_insn(INSN *iobj)
{
LABEL *lobj = (LABEL *)OPERAND_AT(iobj, 0);
LINK_ELEMENT *list;
rb_event_flag_t events = 0;
list = lobj->link.next;
while (list) {
if (IS_INSN(list) || IS_ADJUST(list)) {
switch (list->type) {
case ISEQ_ELEMENT_INSN:
case ISEQ_ELEMENT_ADJUST:
goto found;
case ISEQ_ELEMENT_LABEL:
/* ignore */
break;
case ISEQ_ELEMENT_TRACE:
{
TRACE *trace = (TRACE *)list;
events |= trace->event;
}
break;
}
list = list->next;
}
found:
if (list && IS_INSN(list)) {
INSN *iobj = (INSN *)list;
iobj->insn_info.events |= events;
}
return list;
}
@ -2369,6 +2433,7 @@ iseq_peephole_optimize(rb_iseq_t *iseq, LINK_ELEMENT *list, const int do_tailcal
unref_destination(iobj, 0);
iobj->insn_id = BIN(leave);
iobj->operand_size = 0;
iobj->insn_info = diobj->insn_info;
goto again;
}
else if ((piobj = (INSN *)get_prev_insn(iobj)) != 0 &&
@ -2710,7 +2775,6 @@ iseq_peephole_optimize(rb_iseq_t *iseq, LINK_ELEMENT *list, const int do_tailcal
}
switch (INSN_OF(next)) {
case BIN(nop):
/*case BIN(trace):*/
next = next->next;
break;
case BIN(jump):
@ -2780,16 +2844,6 @@ iseq_peephole_optimize(rb_iseq_t *iseq, LINK_ELEMENT *list, const int do_tailcal
}
}
#define IS_TRACE_LINE(insn) \
(IS_INSN_ID(insn, trace) && \
OPERAND_AT(insn, 0) == INT2FIX(RUBY_EVENT_LINE))
if (IS_TRACE_LINE(iobj) && iobj->link.prev && IS_INSN(iobj->link.prev)) {
INSN *piobj = (INSN *)iobj->link.prev;
if (IS_TRACE_LINE(piobj)) {
REMOVE_ELEM(iobj->link.prev);
}
}
return COMPILE_OK;
}
@ -4360,12 +4414,12 @@ setup_args(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn,
}
}
if (!LIST_SIZE_ZERO(args_splat)) {
if (!LIST_INSN_SIZE_ZERO(args_splat)) {
ADD_SEQ(args, args_splat);
}
if (*flag & VM_CALL_ARGS_BLOCKARG) {
if (LIST_SIZE_ONE(arg_block)) {
if (LIST_INSN_SIZE_ONE(arg_block)) {
LINK_ELEMENT *elem = FIRST_ELEMENT(arg_block);
if (elem->type == ISEQ_ELEMENT_INSN) {
INSN *iobj = (INSN *)elem;
@ -5208,7 +5262,7 @@ compile_ensure(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node,
ADD_LABEL(ret, lstart);
CHECK(COMPILE_(ret, "ensure head", node->nd_head, (popped | last_leave)));
ADD_LABEL(ret, lend);
if (ensr->anchor.next == NULL) {
if (LIST_INSN_SIZE_ZERO(ensr)) {
ADD_INSN(ret, line, nop);
}
else {
@ -5272,7 +5326,7 @@ compile_return(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node,
if (type == ISEQ_TYPE_METHOD) {
add_ensure_iseq(ret, iseq, 1);
ADD_TRACE(ret, line, RUBY_EVENT_RETURN);
ADD_TRACE(ret, RUBY_EVENT_RETURN);
ADD_INSN(ret, line, leave);
ADD_ADJUST_RESTORE(ret, splabel);
@ -5314,7 +5368,6 @@ iseq_compile_each(rb_iseq_t *iseq, LINK_ANCHOR *ret, const NODE *node, int poppe
static int
iseq_compile_each0(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *node, int popped)
{
LINK_ELEMENT *saved_last_element = 0;
const int line = (int)nd_line(node);
const enum node_type type = nd_type(node);
@ -5325,8 +5378,7 @@ iseq_compile_each0(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *node, in
if (node->flags & NODE_FL_NEWLINE) {
ISEQ_COMPILE_DATA(iseq)->last_line = line;
ADD_TRACE_LINE_COVERAGE(ret, line);
ADD_TRACE(ret, line, RUBY_EVENT_LINE);
saved_last_element = ret->last;
ADD_TRACE(ret, RUBY_EVENT_LINE);
}
}
@ -6653,7 +6705,7 @@ iseq_compile_each0(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *node, in
INIT_ANCHOR(pref);
INIT_ANCHOR(body);
CHECK(compile_const_prefix(iseq, node, pref, body));
if (LIST_SIZE_ZERO(pref)) {
if (LIST_INSN_SIZE_ZERO(pref)) {
if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache) {
ADD_INSN2(ret, line, getinlinecache, lend, INT2FIX(ic_index));
}
@ -6976,20 +7028,6 @@ iseq_compile_each0(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *node, in
return COMPILE_NG;
}
/* check & remove redundant trace(line) */
if (saved_last_element &&
ret->last == saved_last_element &&
((INSN *)saved_last_element)->insn_id == BIN(trace)) {
POP_ELEMENT(ret);
/* remove trace(coverage) */
if (IS_INSN_ID(ret->last, trace2) &&
(FIX2LONG(OPERAND_AT(ret->last, 0)) & RUBY_EVENT_COVERAGE) &&
(FIX2LONG(OPERAND_AT(ret->last, 1)) == COVERAGE_INDEX_LINES)) {
POP_ELEMENT(ret);
RARRAY_ASET(ISEQ_LINE_COVERAGE(iseq), line - 1, Qnil);
}
}
debug_node_end();
return COMPILE_OK;
}
@ -7154,9 +7192,10 @@ dump_disasm_list_with_cursor(const LINK_ELEMENT *link, const LINK_ELEMENT *curr,
printf(LABEL_FORMAT"%s\n", lobj->label_no, dest == lobj ? " <---" : "");
break;
}
case ISEQ_ELEMENT_NONE:
case ISEQ_ELEMENT_TRACE:
{
printf("[none]\n");
TRACE *trace = (TRACE *)link;
printf("trace: %0x\n", trace->event);
break;
}
case ISEQ_ELEMENT_ADJUST:
@ -7355,6 +7394,21 @@ iseq_build_callinfo_from_hash(rb_iseq_t *iseq, VALUE op)
return (VALUE)new_callinfo(iseq, mid, orig_argc, flag, kw_arg, (flag & VM_CALL_ARGS_SIMPLE) == 0);
}
static rb_event_flag_t
event_name_to_flag(VALUE sym)
{
#define CHECK_EVENT(ev) if (sym == ID2SYM(rb_intern(#ev))) return ev;
CHECK_EVENT(RUBY_EVENT_LINE);
CHECK_EVENT(RUBY_EVENT_CLASS);
CHECK_EVENT(RUBY_EVENT_END);
CHECK_EVENT(RUBY_EVENT_CALL);
CHECK_EVENT(RUBY_EVENT_RETURN);
CHECK_EVENT(RUBY_EVENT_B_CALL);
CHECK_EVENT(RUBY_EVENT_B_RETURN);
#undef CHECK_EVENT
return RUBY_EVENT_NONE;
}
static int
iseq_build_from_ary_body(rb_iseq_t *iseq, LINK_ANCHOR *const anchor,
VALUE body, VALUE labels_wrapper)
@ -7380,9 +7434,15 @@ iseq_build_from_ary_body(rb_iseq_t *iseq, LINK_ANCHOR *const anchor,
VALUE obj = ptr[i];
if (SYMBOL_P(obj)) {
rb_event_flag_t event;
if ((event = event_name_to_flag(obj)) != RUBY_EVENT_NONE) {
ADD_TRACE(anchor, event);
}
else {
LABEL *label = register_label(iseq, labels_table, obj);
ADD_LABEL(anchor, label);
}
}
else if (FIXNUM_P(obj)) {
line_no = NUM2INT(obj);
}

View file

@ -776,24 +776,6 @@ checkkeyword
ret = vm_check_keyword(kw_bits_index, keyword_index, GET_EP());
}
/**
@c setting
@e trace
@j trace
*/
DEFINE_INSN
trace
(rb_num_t nf)
()
()
{
rb_event_flag_t flag = (rb_event_flag_t)nf;
vm_dtrace(flag, ec);
EXEC_EVENT_HOOK(ec, flag, GET_SELF(), 0, 0, 0 /* id and klass are resolved at callee */,
(flag & (RUBY_EVENT_RETURN | RUBY_EVENT_B_RETURN)) ? TOPN(0) : Qundef);
}
/**
@c setting
@e trace

230
iseq.c
View file

@ -336,8 +336,10 @@ prepare_iseq_build(rb_iseq_t *iseq,
return Qtrue;
}
static void rb_iseq_trace_set(const rb_iseq_t *iseq, rb_event_flag_t turnon_events);
static VALUE
cleanup_iseq_build(rb_iseq_t *iseq)
finish_iseq_build(rb_iseq_t *iseq)
{
struct iseq_compile_data *data = ISEQ_COMPILE_DATA(iseq);
VALUE err = data->err_info;
@ -350,6 +352,10 @@ cleanup_iseq_build(rb_iseq_t *iseq)
rb_funcallv(err, rb_intern("set_backtrace"), 1, &path);
rb_exc_raise(err);
}
if (ruby_vm_event_flags) {
rb_iseq_trace_set(iseq, ruby_vm_event_flags);
}
return Qtrue;
}
@ -503,7 +509,7 @@ rb_iseq_new_with_opt(const NODE *node, VALUE name, VALUE path, VALUE realpath,
prepare_iseq_build(iseq, name, path, realpath, first_lineno, parent, type, option);
rb_iseq_compile_node(iseq, node);
cleanup_iseq_build(iseq);
finish_iseq_build(iseq);
return iseq_translate(iseq);
}
@ -606,7 +612,7 @@ iseq_load(VALUE data, const rb_iseq_t *parent, VALUE opt)
rb_iseq_build_from_ary(iseq, misc, locals, params, exception, body);
cleanup_iseq_build(iseq);
finish_iseq_build(iseq);
return iseqw_new(iseq);
}
@ -1265,6 +1271,18 @@ rb_iseq_line_no(const rb_iseq_t *iseq, size_t pos)
}
}
rb_event_flag_t
rb_iseq_event_flags(const rb_iseq_t *iseq, size_t pos)
{
const struct iseq_insn_info_entry *entry = get_insn_info(iseq, pos);
if (entry) {
return entry->events;
}
else {
return 0;
}
}
static VALUE
id_to_name(ID id, VALUE default_value)
{
@ -1477,6 +1495,22 @@ rb_iseq_disasm_insn(VALUE ret, const VALUE *code, size_t pos,
}
}
{
rb_event_flag_t events = rb_iseq_event_flags(iseq, pos);
if (events) {
str = rb_str_catf(str, "[%s%s%s%s%s%s%s%s%s]",
events & RUBY_EVENT_LINE ? "Li" : "",
events & RUBY_EVENT_CLASS ? "Cl" : "",
events & RUBY_EVENT_END ? "En" : "",
events & RUBY_EVENT_CALL ? "Ca" : "",
events & RUBY_EVENT_RETURN ? "Re" : "",
events & RUBY_EVENT_C_CALL ? "Cc" : "",
events & RUBY_EVENT_C_RETURN ? "Cr" : "",
events & RUBY_EVENT_B_CALL ? "Bc" : "",
events & RUBY_EVENT_B_RETURN ? "Br" : "");
}
}
if (ret) {
rb_str_cat2(str, "\n");
rb_str_concat(ret, str);
@ -1834,7 +1868,7 @@ iseq_data_to_ary(const rb_iseq_t *iseq)
long l;
size_t ti;
unsigned int pos;
unsigned int line = 0;
int last_line = 0;
VALUE *seq, *iseq_original;
VALUE val = rb_ary_new();
@ -2095,11 +2129,28 @@ iseq_data_to_ary(const rb_iseq_t *iseq)
rb_ary_push(body, (VALUE)label);
}
if (ti < iseq->body->insns_info_size && iseq->body->insns_info[ti].position == pos) {
line = iseq->body->insns_info[ti].line_no;
if (ti < iseq->body->insns_info_size) {
const struct iseq_insn_info_entry *info = &iseq->body->insns_info[ti];
if (info->position == pos) {
int line = info->line_no;
rb_event_flag_t events = info->events;
if (line > 0 && last_line != line) {
rb_ary_push(body, INT2FIX(line));
last_line = line;
}
#define CHECK_EVENT(ev) if (events & ev) rb_ary_push(body, ID2SYM(rb_intern(#ev)));
CHECK_EVENT(RUBY_EVENT_LINE);
CHECK_EVENT(RUBY_EVENT_CLASS);
CHECK_EVENT(RUBY_EVENT_END);
CHECK_EVENT(RUBY_EVENT_CALL);
CHECK_EVENT(RUBY_EVENT_RETURN);
CHECK_EVENT(RUBY_EVENT_B_CALL);
CHECK_EVENT(RUBY_EVENT_B_RETURN);
#undef CHECK_EVENT
ti++;
}
}
rb_ary_push(body, ary);
pos += RARRAY_LENINT(ary); /* reject too huge data */
@ -2112,7 +2163,6 @@ iseq_data_to_ary(const rb_iseq_t *iseq)
rb_hash_aset(misc, ID2SYM(rb_intern("local_size")), INT2FIX(iseq->body->local_table_size));
rb_hash_aset(misc, ID2SYM(rb_intern("stack_max")), INT2FIX(iseq->body->stack_max));
/* TODO: compatibility issue */
/*
* [:magic, :major_version, :minor_version, :format_type, :misc,
* :name, :path, :absolute_path, :start_lineno, :type, :locals, :args,
@ -2261,131 +2311,72 @@ rb_iseq_defined_string(enum defined_type type)
return str;
}
/* Experimental tracing support: trace(line) -> trace(specified_line)
* MRI Specific.
*/
#define TRACE_INSN_P(insn) ((insn) >= VM_INSTRUCTION_SIZE/2)
int
rb_iseqw_line_trace_each(VALUE iseqw, int (*func)(int line, rb_event_flag_t *events_ptr, void *d), void *data)
#if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
#define INSN_CODE(insn) ((VALUE)table[insn])
#else
#define INSN_CODE(insn) (insn)
#endif
static void
rb_iseq_trace_set(const rb_iseq_t *iseq, rb_event_flag_t turnon_events)
{
int trace_num = 0;
unsigned int pos;
size_t insn;
const rb_iseq_t *iseq = iseqw_check(iseqw);
int cont = 1;
VALUE *iseq_original;
unsigned int i;
VALUE *iseq_encoded = (VALUE *)iseq->body->iseq_encoded;
#if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
VALUE *code = rb_iseq_original_iseq(iseq);
const void * const *table = rb_vm_get_insns_address_table();
#else
const VALUE *code = iseq->body->iseq_encoded;
#endif
iseq_original = rb_iseq_original_iseq(iseq);
for (pos = 0; cont && pos < iseq->body->iseq_size; pos += insn_len(insn)) {
insn = iseq_original[pos];
for (i=0; i<iseq->body->iseq_size;) {
int insn = (int)code[i];
rb_event_flag_t events = rb_iseq_event_flags(iseq, i);
if (insn == BIN(trace)) {
rb_event_flag_t current_events;
current_events = (rb_event_flag_t)iseq_original[pos+1];
if (current_events & RUBY_EVENT_LINE) {
rb_event_flag_t events = current_events & RUBY_EVENT_SPECIFIED_LINE;
trace_num++;
if (func) {
int line = rb_iseq_line_no(iseq, pos);
/* printf("line: %d\n", line); */
cont = (*func)(line, &events, data);
if (current_events != events) {
VALUE *encoded = (VALUE *)iseq->body->iseq_encoded;
iseq_original[pos+1] = encoded[pos+1] =
(VALUE)(current_events | (events & RUBY_EVENT_SPECIFIED_LINE));
if (events & turnon_events) {
if (!TRACE_INSN_P(insn)) {
iseq_encoded[i] = INSN_CODE(insn + VM_INSTRUCTION_SIZE/2);
}
else {
/* OK */
}
}
else if (TRACE_INSN_P(insn)) {
VM_ASSERT(insn - VM_INSTRUCTION_SIZE/2 >= 0);
iseq_encoded[i] = INSN_CODE(insn - VM_INSTRUCTION_SIZE/2);
}
i += insn_len(insn);
}
}
return trace_num;
/* clear for debugging: ISEQ_ORIGINAL_ISEQ_CLEAR(iseq); */
}
static int
collect_trace(int line, rb_event_flag_t *events_ptr, void *ptr)
trace_set_i(void *vstart, void *vend, size_t stride, void *data)
{
VALUE result = (VALUE)ptr;
rb_ary_push(result, INT2NUM(line));
return 1;
rb_event_flag_t turnon_events = *(rb_event_flag_t *)data;
VALUE v = (VALUE)vstart;
for (; v != (VALUE)vend; v += stride) {
if (rb_obj_is_iseq(v)) {
rb_iseq_trace_set(rb_iseq_check((rb_iseq_t *)v), turnon_events);
}
}
return 0;
}
/*
* <b>Experimental MRI specific feature, only available as C level api.</b>
*
* Returns all +specified_line+ events.
*/
VALUE
rb_iseqw_line_trace_all(VALUE iseqw)
void
rb_iseq_trace_set_all(rb_event_flag_t turnon_events)
{
VALUE result = rb_ary_new();
rb_iseqw_line_trace_each(iseqw, collect_trace, (void *)result);
return result;
rb_objspace_each_objects(trace_set_i, &turnon_events);
}
struct set_specifc_data {
int pos;
int set;
int prev; /* 1: set, 2: unset, 0: not found */
};
static int
line_trace_specify(int line, rb_event_flag_t *events_ptr, void *ptr)
void
rb_iseq_trace_on_all(void)
{
struct set_specifc_data *data = (struct set_specifc_data *)ptr;
if (data->pos == 0) {
data->prev = *events_ptr & RUBY_EVENT_SPECIFIED_LINE ? 1 : 2;
if (data->set) {
*events_ptr = *events_ptr | RUBY_EVENT_SPECIFIED_LINE;
}
else {
*events_ptr = *events_ptr & ~RUBY_EVENT_SPECIFIED_LINE;
}
return 0; /* found */
}
else {
data->pos--;
return 1;
}
}
/*
* <b>Experimental MRI specific feature, only available as C level api.</b>
*
* Set a +specified_line+ event at the given line position, if the +set+
* parameter is +true+.
*
* This method is useful for building a debugger breakpoint at a specific line.
*
* A TypeError is raised if +set+ is not boolean.
*
* If +pos+ is a negative integer a TypeError exception is raised.
*/
VALUE
rb_iseqw_line_trace_specify(VALUE iseqval, VALUE pos, VALUE set)
{
struct set_specifc_data data;
data.prev = 0;
data.pos = NUM2INT(pos);
if (data.pos < 0) rb_raise(rb_eTypeError, "`pos' is negative");
switch (set) {
case Qtrue: data.set = 1; break;
case Qfalse: data.set = 0; break;
default:
rb_raise(rb_eTypeError, "`set' should be true/false");
}
rb_iseqw_line_trace_each(iseqval, line_trace_specify, (void *)&data);
if (data.prev == 0) {
rb_raise(rb_eTypeError, "`pos' is out of range.");
}
return data.prev == 1 ? Qtrue : Qfalse;
rb_iseq_trace_set_all(RUBY_EVENT_TRACEPOINT_ALL);
}
VALUE
@ -2494,16 +2485,6 @@ Init_ISeq(void)
rb_define_method(rb_cISeq, "base_label", iseqw_base_label, 0);
rb_define_method(rb_cISeq, "first_lineno", iseqw_first_lineno, 0);
#if 0
/* Now, it is experimental. No discussions, no tests. */
/* They can be used from C level. Please give us feedback. */
rb_define_method(rb_cISeq, "line_trace_all", rb_iseqw_line_trace_all, 0);
rb_define_method(rb_cISeq, "line_trace_specify", rb_iseqw_line_trace_specify, 2);
#else
(void)rb_iseqw_line_trace_all;
(void)rb_iseqw_line_trace_specify;
#endif
#if 0 /* TBD */
rb_define_private_method(rb_cISeq, "marshal_dump", iseqw_marshal_dump, 0);
rb_define_private_method(rb_cISeq, "marshal_load", iseqw_marshal_load, 1);
@ -2524,3 +2505,4 @@ Init_ISeq(void)
rb_undef_method(CLASS_OF(rb_cISeq), "translate");
rb_undef_method(CLASS_OF(rb_cISeq), "load_iseq");
}

12
iseq.h
View file

@ -71,6 +71,12 @@ ISEQ_ORIGINAL_ISEQ(const rb_iseq_t *iseq)
return NULL;
}
static inline void
ISEQ_ORIGINAL_ISEQ_CLEAR(const rb_iseq_t *iseq)
{
RARRAY_ASET(ISEQ_MARK_ARY(iseq), ISEQ_MARK_ARY_ORIGINAL_ISEQ, Qnil);
}
static inline VALUE *
ISEQ_ORIGINAL_ISEQ_ALLOC(const rb_iseq_t *iseq, long size)
{
@ -110,10 +116,9 @@ VALUE rb_iseq_load(VALUE data, VALUE parent, VALUE opt);
VALUE rb_iseq_parameters(const rb_iseq_t *iseq, int is_proc);
struct st_table *ruby_insn_make_insn_table(void);
unsigned int rb_iseq_line_no(const rb_iseq_t *iseq, size_t pos);
void rb_iseq_trace_set_all(rb_event_flag_t turnon_events);
void rb_iseq_trace_on_all(void);
int rb_iseqw_line_trace_each(VALUE iseqval, int (*func)(int line, rb_event_flag_t *events_ptr, void *d), void *data);
VALUE rb_iseqw_line_trace_all(VALUE iseqval);
VALUE rb_iseqw_line_trace_specify(VALUE iseqval, VALUE pos, VALUE set);
VALUE rb_iseqw_new(const rb_iseq_t *iseq);
const rb_iseq_t *rb_iseqw_to_iseq(VALUE iseqw);
@ -145,6 +150,7 @@ struct rb_compile_option_struct {
struct iseq_insn_info_entry {
unsigned int position;
int line_no;
rb_event_flag_t events;
};
struct iseq_catch_table_entry {

View file

@ -77,7 +77,7 @@ class TestSetTraceFunc < Test::Unit::TestCase
events.shift)
assert_equal(["c-return", 5, :+, Integer],
events.shift)
assert_equal(["return", 6, :add, self.class],
assert_equal(["return", 5, :add, self.class],
events.shift)
assert_equal(["line", 8, __method__, self.class],
events.shift)
@ -116,7 +116,7 @@ class TestSetTraceFunc < Test::Unit::TestCase
events.shift)
assert_equal(["c-return", 5, :method_added, Module],
events.shift)
assert_equal(["end", 7, nil, nil],
assert_equal(["end", 5, nil, nil],
events.shift)
assert_equal(["line", 8, __method__, self.class],
events.shift)
@ -130,7 +130,7 @@ class TestSetTraceFunc < Test::Unit::TestCase
events.shift)
assert_equal(["call", 5, :bar, Foo],
events.shift)
assert_equal(["return", 6, :bar, Foo],
assert_equal(["return", 5, :bar, Foo],
events.shift)
assert_equal(["line", 9, __method__, self.class],
events.shift)
@ -176,7 +176,7 @@ class TestSetTraceFunc < Test::Unit::TestCase
events.shift)
assert_equal(["line", 5, :meth_return, self.class],
events.shift)
assert_equal(["return", 7, :meth_return, self.class],
assert_equal(["return", 5, :meth_return, self.class],
events.shift)
assert_equal(["line", 10, :test_return, self.class],
events.shift)
@ -215,7 +215,7 @@ class TestSetTraceFunc < Test::Unit::TestCase
events.shift)
assert_equal(["line", 6, :meth_return2, self.class],
events.shift)
assert_equal(["return", 7, :meth_return2, self.class],
assert_equal(["return", 6, :meth_return2, self.class],
events.shift)
assert_equal(["line", 9, :test_return2, self.class],
events.shift)
@ -239,8 +239,6 @@ class TestSetTraceFunc < Test::Unit::TestCase
EOF
assert_equal(["c-return", 1, :set_trace_func, Kernel],
events.shift)
assert_equal(["line", 4, __method__, self.class],
events.shift)
assert_equal(["line", 5, __method__, self.class],
events.shift)
assert_equal(["c-call", 5, :raise, Kernel],
@ -289,8 +287,8 @@ class TestSetTraceFunc < Test::Unit::TestCase
["line", 4, __method__, self.class],
["c-return", 4, :any?, Array],
["line", 5, __method__, self.class],
["c-call", 5, :set_trace_func, Kernel]].each{|e|
assert_equal(e, events.shift)
["c-call", 5, :set_trace_func, Kernel]].each.with_index{|e, i|
assert_equal(e, events.shift, "mismatch on #{i}th trace")
}
end
@ -345,7 +343,7 @@ class TestSetTraceFunc < Test::Unit::TestCase
["line", 4, nil, nil],
["c-call", 4, :method_added, Module],
["c-return", 4, :method_added, Module],
["end", 7, nil, nil],
["end", 4, nil, nil],
["line", 8, __method__, self.class],
["c-call", 8, :new, Class],
["c-call", 8, :initialize, BasicObject],
@ -355,7 +353,7 @@ class TestSetTraceFunc < Test::Unit::TestCase
["line", 5, :foo, ThreadTraceInnerClass],
["c-call", 5, :+, Integer],
["c-return", 5, :+, Integer],
["return", 6, :foo, ThreadTraceInnerClass],
["return", 5, :foo, ThreadTraceInnerClass],
["line", 9, __method__, self.class],
["c-call", 9, :set_trace_func, Thread]].each do |e|
[:set, :add].each do |type|
@ -489,7 +487,7 @@ class TestSetTraceFunc < Test::Unit::TestCase
[:line, 13, "xyzzy", nil, nil, xyzzy.class, :XYZZY_outer, :nothing],
[:c_call, 13, "xyzzy", Module, :method_added, xyzzy.class, :XYZZY_outer, :nothing],
[:c_return,13, "xyzzy", Module, :method_added, xyzzy.class, :XYZZY_outer, nil],
[:end, 17, "xyzzy", nil, nil, xyzzy.class, :XYZZY_outer, :nothing],
[:end, 13, "xyzzy", nil, nil, xyzzy.class, :XYZZY_outer, :nothing],
[:line, 18, "xyzzy", TestSetTraceFunc, method, self, :outer, :nothing],
[:c_call, 18, "xyzzy", Class, :new, xyzzy.class, :outer, :nothing],
[:c_call, 18, "xyzzy", BasicObject, :initialize, xyzzy, :outer, :nothing],
@ -504,8 +502,8 @@ class TestSetTraceFunc < Test::Unit::TestCase
[:line, 15, "xyzzy", xyzzy.class, :bar, xyzzy, :XYZZY_bar, :nothing],
[:c_call, 15, "xyzzy", Kernel, :tap, xyzzy, :XYZZY_bar, :nothing],
[:c_return,15, "xyzzy", Kernel, :tap, xyzzy, :XYZZY_bar, xyzzy],
[:return, 16, "xyzzy", xyzzy.class, :bar, xyzzy, :XYZZY_bar, xyzzy],
[:return, 12, "xyzzy", xyzzy.class, :foo, xyzzy, :XYZZY_foo, xyzzy],
[:return, 15, "xyzzy", xyzzy.class, :bar, xyzzy, :XYZZY_bar, xyzzy],
[:return, 11, "xyzzy", xyzzy.class, :foo, xyzzy, :XYZZY_foo, xyzzy],
[:line, 20, "xyzzy", TestSetTraceFunc, method, self, :outer, :nothing],
[:c_call, 20, "xyzzy", Kernel, :raise, self, :outer, :nothing],
[:c_call, 20, "xyzzy", Exception, :exception, RuntimeError, :outer, :nothing],

View file

@ -33,6 +33,7 @@ class RubyVM
@optimized = []
@is_sc = false
@sp_inc = sp_inc
@trace = trace
end
def add_sc sci
@ -49,6 +50,7 @@ class RubyVM
attr_reader :is_sc
attr_reader :tvars
attr_reader :sp_inc
attr_accessor :trace
def set_sc
@is_sc = true
@ -116,6 +118,7 @@ class RubyVM
load_opt_operand_def opts[:"opope.def"] || 'defs/opt_operand.def'
load_insn_unification_def opts[:"unif.def"] || 'defs/opt_insn_unif.def'
make_stackcaching_insns if vm_opt?('STACK_CACHING')
make_trace_insns
end
attr_reader :vpath
@ -533,6 +536,21 @@ class RubyVM
}
end
def make_trace_insns
@insns.dup.each{|insn|
body = <<-EOS
vm_trace(ec, GET_CFP(), GET_PC());
goto start_of_#{insn.name};
EOS
trace_insn = Instruction.new(name = "trace_#{insn.name}",
insn.opes, insn.pops, insn.rets, insn.comm,
body, insn.tvars, insn.sp_inc)
trace_insn.trace = true
add_insn trace_insn
}
end
def make_insn_sc orig_insn, name, opes, pops, rets, pushs, nextsc
comm = orig_insn.comm.dup
comm[:c] = 'optimize(sc)'
@ -866,9 +884,11 @@ class RubyVM
end
def make_header insn
commit "INSN_ENTRY(#{insn.name}){"
label = insn.trace ? '' : "start_of_#{insn.name}:;"
commit "INSN_ENTRY(#{insn.name}){#{label}"
make_header_prepare_stack insn
commit "{"
unless insn.trace
make_header_stack_val insn
make_header_default_operands insn
make_header_operands insn
@ -880,13 +900,16 @@ class RubyVM
make_header_popn insn
make_header_defines insn
make_header_analysis insn
end
commit "{"
end
def make_footer insn
unless insn.trace
make_footer_stack_val insn
make_footer_default_operands insn
make_footer_undefs insn
end
commit " END_INSN(#{insn.name});}}}"
end

View file

@ -40,7 +40,14 @@ int
rb_vm_get_sourceline(const rb_control_frame_t *cfp)
{
if (VM_FRAME_RUBYFRAME_P(cfp) && cfp->iseq) {
return calc_lineno(cfp->iseq, cfp->pc);
const rb_iseq_t *iseq = cfp->iseq;
int line = calc_lineno(iseq, cfp->pc);
if (line != 0) {
return line;
}
else {
return FIX2INT(rb_iseq_first_lineno(iseq));
}
}
else {
return 0;

View file

@ -74,8 +74,12 @@ error !
#define ELABEL(x) INSN_ELABEL_##x
#define LABEL_PTR(x) &&LABEL(x)
#define INSN_ENTRY_SIG(insn)
#define INSN_ENTRY_SIG(insn) \
if (0) fprintf(stderr, "exec: %s@(%d, %d)@%s:%d\n", #insn, \
(int)(reg_pc - reg_cfp->iseq->body->iseq_encoded), \
(int)(reg_cfp->pc - reg_cfp->iseq->body->iseq_encoded), \
RSTRING_PTR(rb_iseq_path(reg_cfp->iseq)), \
(int)(rb_iseq_line_no(reg_cfp->iseq, reg_pc - reg_cfp->iseq->body->iseq_encoded)));
#define INSN_DISPATCH_SIG(insn)

View file

@ -3723,3 +3723,58 @@ vm_opt_regexpmatch2(VALUE recv, VALUE obj)
return Qundef;
}
}
rb_event_flag_t rb_iseq_event_flags(const rb_iseq_t *iseq, size_t pos);
NOINLINE(static void vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const VALUE *pc));
static void
vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const VALUE *pc)
{
const rb_iseq_t *iseq = reg_cfp->iseq;
size_t pos = pc - iseq->body->iseq_encoded;
rb_event_flag_t events = rb_iseq_event_flags(iseq, pos);
rb_event_flag_t event;
if (ec->trace_arg != NULL) return;
if (0) {
fprintf(stderr, "vm_trace>>%4d (%4x) - %s:%d %s\n",
(int)pos,
(int)events,
RSTRING_PTR(rb_iseq_path(iseq)),
(int)rb_iseq_line_no(iseq, pos),
RSTRING_PTR(rb_iseq_label(iseq)));
}
VM_ASSERT(reg_cfp->pc == pc);
VM_ASSERT(events != 0);
/* increment PC because source line is calculated with PC-1 */
if (events & ruby_vm_event_flags) {
if (event = (events & (RUBY_EVENT_CLASS | RUBY_EVENT_CALL | RUBY_EVENT_B_CALL))) {
VM_ASSERT(event == RUBY_EVENT_CLASS ||
event == RUBY_EVENT_CALL ||
event == RUBY_EVENT_B_CALL);
reg_cfp->pc++;
vm_dtrace(event, ec);
EXEC_EVENT_HOOK(ec, event, GET_SELF(), 0, 0, 0, Qundef);
reg_cfp->pc--;
}
if (events & RUBY_EVENT_LINE) {
reg_cfp->pc++;
vm_dtrace(RUBY_EVENT_LINE, ec);
EXEC_EVENT_HOOK(ec, RUBY_EVENT_LINE, GET_SELF(), 0, 0, 0, Qundef);
reg_cfp->pc--;
}
if (event = (events & (RUBY_EVENT_END | RUBY_EVENT_RETURN | RUBY_EVENT_B_RETURN))) {
VM_ASSERT(event == RUBY_EVENT_END ||
event == RUBY_EVENT_RETURN ||
event == RUBY_EVENT_B_RETURN);
reg_cfp->pc++;
vm_dtrace(RUBY_EVENT_LINE, ec);
EXEC_EVENT_HOOK(ec, event, GET_SELF(), 0, 0, 0, TOPN(0));
reg_cfp->pc--;
}
}
}

View file

@ -86,7 +86,7 @@ enum vm_regan_acttype {
#define GET_CURRENT_INSN() (*GET_PC())
#define GET_OPERAND(n) (GET_PC()[(n)])
#define ADD_PC(n) (SET_PC(VM_REG_PC + (n)))
#define JUMP(dst) (VM_REG_PC += (dst))
#define JUMP(dst) (SET_PC(VM_REG_PC + (dst)))
/* frame pointer, environment pointer */
#define GET_CFP() (COLLECT_USAGE_REGISTER_HELPER(CFP, GET, VM_REG_CFP))

View file

@ -25,6 +25,7 @@
#include "ruby/debug.h"
#include "vm_core.h"
#include "iseq.h"
#include "eval_intern.h"
/* (1) trace mechanisms */
@ -58,36 +59,45 @@ rb_vm_trace_mark_event_hooks(rb_hook_list_t *hooks)
/* ruby_vm_event_flags management */
static void
update_global_event_hook(rb_event_flag_t vm_events)
{
ruby_vm_event_flags = vm_events;
rb_iseq_trace_set_all(vm_events);
rb_objspace_set_event_hook(vm_events);
}
static void
recalc_add_ruby_vm_event_flags(rb_event_flag_t events)
{
int i;
ruby_vm_event_flags = 0;
rb_event_flag_t vm_events = 0;
for (i=0; i<MAX_EVENT_NUM; i++) {
if (events & ((rb_event_flag_t)1 << i)) {
ruby_event_flag_count[i]++;
}
ruby_vm_event_flags |= ruby_event_flag_count[i] ? (1<<i) : 0;
vm_events |= ruby_event_flag_count[i] ? (1<<i) : 0;
}
rb_objspace_set_event_hook(ruby_vm_event_flags);
update_global_event_hook(vm_events);
}
static void
recalc_remove_ruby_vm_event_flags(rb_event_flag_t events)
{
int i;
ruby_vm_event_flags = 0;
rb_event_flag_t vm_events = 0;
for (i=0; i<MAX_EVENT_NUM; i++) {
if (events & ((rb_event_flag_t)1 << i)) {
VM_ASSERT(ruby_event_flag_count[i] > 0);
ruby_event_flag_count[i]--;
}
ruby_vm_event_flags |= ruby_event_flag_count[i] ? (1<<i) : 0;
vm_events |= ruby_event_flag_count[i] ? (1<<i) : 0;
}
rb_objspace_set_event_hook(ruby_vm_event_flags);
update_global_event_hook(vm_events);
}
/* add/remove hooks */
@ -467,7 +477,6 @@ static void call_trace_func(rb_event_flag_t, VALUE data, VALUE self, ID id, VALU
static VALUE
set_trace_func(VALUE obj, VALUE trace)
{
rb_remove_event_hook(call_trace_func);
if (NIL_P(trace)) {
@ -580,16 +589,41 @@ get_event_id(rb_event_flag_t event)
}
}
static void
get_path_and_lineno(const rb_execution_context_t *ec, const rb_control_frame_t *cfp, rb_event_flag_t event, VALUE *pathp, int *linep)
{
cfp = rb_vm_get_ruby_level_next_cfp(ec, cfp);
if (cfp) {
const rb_iseq_t *iseq = cfp->iseq;
*pathp = rb_iseq_path(iseq);
if (event & (RUBY_EVENT_CLASS |
RUBY_EVENT_CALL |
RUBY_EVENT_B_CALL)) {
*linep = FIX2INT(rb_iseq_first_lineno(iseq));
}
else {
*linep = rb_vm_get_sourceline(cfp);
}
}
else {
*pathp = Qnil;
*linep = 0;
}
}
static void
call_trace_func(rb_event_flag_t event, VALUE proc, VALUE self, ID id, VALUE klass)
{
int line;
const char *srcfile = rb_source_loc(&line);
VALUE filename;
VALUE eventname = rb_str_new2(get_event_name(event));
VALUE filename = srcfile ? rb_str_new2(srcfile) : Qnil;
VALUE argv[6];
const rb_execution_context_t *ec = GET_EC();
get_path_and_lineno(ec, ec->cfp, event, &filename, &line);
if (!klass) {
rb_ec_frame_method_id_and_class(ec, &id, 0, &klass);
}
@ -607,7 +641,7 @@ call_trace_func(rb_event_flag_t event, VALUE proc, VALUE self, ID id, VALUE klas
argv[1] = filename;
argv[2] = INT2FIX(line);
argv[3] = id ? ID2SYM(id) : Qnil;
argv[4] = (self && srcfile) ? rb_binding_new() : Qnil;
argv[4] = (self && (filename != Qnil)) ? rb_binding_new() : Qnil;
argv[5] = klass ? klass : Qnil;
rb_proc_call_with_block(proc, 6, argv, Qnil);
@ -725,16 +759,7 @@ static void
fill_path_and_lineno(rb_trace_arg_t *trace_arg)
{
if (trace_arg->path == Qundef) {
rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(trace_arg->ec, trace_arg->cfp);
if (cfp) {
trace_arg->path = rb_iseq_path(cfp->iseq);
trace_arg->lineno = rb_vm_get_sourceline(cfp);
}
else {
trace_arg->path = Qnil;
trace_arg->lineno = 0;
}
get_path_and_lineno(trace_arg->ec, trace_arg->cfp, trace_arg->event, &trace_arg->path, &trace_arg->lineno);
}
}