Add Thread.each_caller_location

This method takes a block and yields Thread::Backtrace::Location
objects to the block.  It does not take arguments, and always
starts at the default frame that caller_locations would start at.

Implements [Feature #16663]
This commit is contained in:
Jeremy Evans 2022-01-14 13:02:46 -08:00
parent 4113862c00
commit 4c366ec977
Notes: git 2022-02-18 01:54:38 +09:00
2 changed files with 66 additions and 5 deletions

View File

@ -154,6 +154,35 @@ class TestBacktrace < Test::Unit::TestCase
assert_equal caller(0), caller(0, nil)
end
def test_each_backtrace_location
i = 0
cl = caller_locations(1, 1)[0]; ecl = Thread.each_caller_location{|x| i+=1; break x if i == 1}
assert_equal(cl.to_s, ecl.to_s)
assert_kind_of(Thread::Backtrace::Location, ecl)
i = 0
ary = []
cllr = caller_locations(1, 2); last = Thread.each_caller_location{|x| ary << x; i+=1; break x if i == 2}
assert_equal(cllr.map(&:to_s), ary.map(&:to_s))
assert_kind_of(Thread::Backtrace::Location, last)
i = 0
ary = []
->{->{
cllr = caller_locations(1, 2); last = Thread.each_caller_location{|x| ary << x; i+=1; break x if i == 2}
}.()}.()
assert_equal(cllr.map(&:to_s), ary.map(&:to_s))
assert_kind_of(Thread::Backtrace::Location, last)
cllr = caller_locations(1, 2); ary = Thread.to_enum(:each_caller_location).to_a[2..3]
assert_equal(cllr.map(&:to_s), ary.map(&:to_s))
ecl = Thread.to_enum(:each_caller_location)
assert_raise(StopIteration) {
ecl.next
}
end
def test_caller_locations_first_label
def self.label
caller_locations.first.label

View File

@ -572,8 +572,18 @@ bt_update_cfunc_loc(unsigned long cfunc_counter, rb_backtrace_location_t *cfunc_
}
}
static VALUE location_create(rb_backtrace_location_t *srcloc, void *btobj);
static void
bt_yield_loc(rb_backtrace_location_t *loc, long num_frames, VALUE btobj)
{
for (; num_frames > 0; num_frames--, loc++) {
rb_yield(location_create(loc, (void *)btobj));
}
}
static VALUE
rb_ec_partial_backtrace_object(const rb_execution_context_t *ec, long start_frame, long num_frames, int* start_too_large, bool skip_internal)
rb_ec_partial_backtrace_object(const rb_execution_context_t *ec, long start_frame, long num_frames, int* start_too_large, bool skip_internal, bool do_yield)
{
const rb_control_frame_t *cfp = ec->cfp;
const rb_control_frame_t *end_cfp = RUBY_VM_END_CONTROL_FRAME(ec);
@ -631,6 +641,9 @@ rb_ec_partial_backtrace_object(const rb_execution_context_t *ec, long start_fram
loc->iseq = iseq;
loc->pc = pc;
bt_update_cfunc_loc(cfunc_counter, loc-1, iseq, pc);
if (do_yield) {
bt_yield_loc(loc - cfunc_counter, cfunc_counter+1, btobj);
}
cfunc_counter = 0;
}
}
@ -654,6 +667,9 @@ rb_ec_partial_backtrace_object(const rb_execution_context_t *ec, long start_fram
for (; cfp != end_cfp; cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp)) {
if (cfp->iseq && cfp->pc && (!skip_internal || !is_internal_location(cfp))) {
bt_update_cfunc_loc(cfunc_counter, loc, cfp->iseq, cfp->pc);
if (do_yield) {
bt_yield_loc(loc - cfunc_counter, cfunc_counter, btobj);
}
break;
}
}
@ -666,7 +682,7 @@ rb_ec_partial_backtrace_object(const rb_execution_context_t *ec, long start_fram
MJIT_FUNC_EXPORTED VALUE
rb_ec_backtrace_object(const rb_execution_context_t *ec)
{
return rb_ec_partial_backtrace_object(ec, BACKTRACE_START, ALL_BACKTRACE_LINES, NULL, FALSE);
return rb_ec_partial_backtrace_object(ec, BACKTRACE_START, ALL_BACKTRACE_LINES, NULL, FALSE, FALSE);
}
static VALUE
@ -841,13 +857,13 @@ backtrace_limit(VALUE self)
VALUE
rb_ec_backtrace_str_ary(const rb_execution_context_t *ec, long lev, long n)
{
return backtrace_to_str_ary(rb_ec_partial_backtrace_object(ec, lev, n, NULL, FALSE));
return rb_backtrace_to_str_ary(rb_ec_partial_backtrace_object(ec, lev, n, NULL, FALSE, FALSE));
}
VALUE
rb_ec_backtrace_location_ary(const rb_execution_context_t *ec, long lev, long n, bool skip_internal)
{
return backtrace_to_location_ary(rb_ec_partial_backtrace_object(ec, lev, n, NULL, skip_internal));
return rb_backtrace_to_location_ary(rb_ec_partial_backtrace_object(ec, lev, n, NULL, skip_internal, FALSE));
}
/* make old style backtrace directly */
@ -1119,7 +1135,7 @@ ec_backtrace_to_ary(const rb_execution_context_t *ec, int argc, const VALUE *arg
return rb_ary_new();
}
btval = rb_ec_partial_backtrace_object(ec, lev, n, &too_large, FALSE);
btval = rb_ec_partial_backtrace_object(ec, lev, n, &too_large, FALSE, FALSE);
if (too_large) {
return Qnil;
@ -1240,6 +1256,20 @@ rb_f_caller_locations(int argc, VALUE *argv, VALUE _)
return ec_backtrace_to_ary(GET_EC(), argc, argv, 1, 1, 0);
}
/*
* call-seq:
* Thread.each_caller_location{ |loc| ... } -> nil
*
* Yields each frame of the current execution stack as a
* backtrace location object.
*/
static VALUE
each_caller_location(VALUE unused)
{
rb_ec_partial_backtrace_object(GET_EC(), 2, ALL_BACKTRACE_LINES, NULL, FALSE, TRUE);
return Qnil;
}
/* called from Init_vm() in vm.c */
void
Init_vm_backtrace(void)
@ -1315,6 +1345,8 @@ Init_vm_backtrace(void)
rb_define_global_function("caller", rb_f_caller, -1);
rb_define_global_function("caller_locations", rb_f_caller_locations, -1);
rb_define_singleton_method(rb_cThread, "each_caller_location", each_caller_location, 0);
}
/* debugger API */