mirror of
https://github.com/ruby/ruby.git
synced 2022-11-09 12:17:21 -05:00
optimize Struct
getter/setter
Introduce new optimized method type `OPTIMIZED_METHOD_TYPE_STRUCT_AREF/ASET` with index information.
This commit is contained in:
parent
be71c95b88
commit
82ea287018
Notes:
git
2021-11-19 08:32:59 +09:00
10 changed files with 181 additions and 238 deletions
|
@ -16068,6 +16068,7 @@ vm.$(OBJEXT): $(top_srcdir)/internal/sanitizers.h
|
|||
vm.$(OBJEXT): $(top_srcdir)/internal/serial.h
|
||||
vm.$(OBJEXT): $(top_srcdir)/internal/static_assert.h
|
||||
vm.$(OBJEXT): $(top_srcdir)/internal/string.h
|
||||
vm.$(OBJEXT): $(top_srcdir)/internal/struct.h
|
||||
vm.$(OBJEXT): $(top_srcdir)/internal/symbol.h
|
||||
vm.$(OBJEXT): $(top_srcdir)/internal/thread.h
|
||||
vm.$(OBJEXT): $(top_srcdir)/internal/variable.h
|
||||
|
|
95
compile.c
95
compile.c
|
@ -10588,101 +10588,6 @@ rb_local_defined(ID id, const rb_iseq_t *iseq)
|
|||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
caller_location(VALUE *path, VALUE *realpath)
|
||||
{
|
||||
const rb_execution_context_t *ec = GET_EC();
|
||||
const rb_control_frame_t *const cfp =
|
||||
rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
|
||||
|
||||
if (cfp) {
|
||||
int line = rb_vm_get_sourceline(cfp);
|
||||
*path = rb_iseq_path(cfp->iseq);
|
||||
*realpath = rb_iseq_realpath(cfp->iseq);
|
||||
return line;
|
||||
}
|
||||
else {
|
||||
*path = rb_fstring_lit("<compiled>");
|
||||
*realpath = *path;
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
typedef struct {
|
||||
VALUE arg;
|
||||
VALUE func;
|
||||
int line;
|
||||
} accessor_args;
|
||||
|
||||
static const rb_iseq_t *
|
||||
method_for_self(VALUE name, VALUE arg, const struct rb_builtin_function *func,
|
||||
void (*build)(rb_iseq_t *, LINK_ANCHOR *, const void *))
|
||||
{
|
||||
VALUE path, realpath;
|
||||
accessor_args acc;
|
||||
|
||||
acc.arg = arg;
|
||||
acc.func = (VALUE)func;
|
||||
acc.line = caller_location(&path, &realpath);
|
||||
struct rb_iseq_new_with_callback_callback_func *ifunc =
|
||||
rb_iseq_new_with_callback_new_callback(build, &acc);
|
||||
return rb_iseq_new_with_callback(ifunc,
|
||||
rb_sym2str(name), path, realpath,
|
||||
INT2FIX(acc.line), 0, ISEQ_TYPE_METHOD, 0);
|
||||
}
|
||||
|
||||
static void
|
||||
for_self_aref(rb_iseq_t *iseq, LINK_ANCHOR *ret, const void *a)
|
||||
{
|
||||
const accessor_args *const args = (void *)a;
|
||||
const int line = args->line;
|
||||
struct rb_iseq_constant_body *const body = iseq->body;
|
||||
|
||||
iseq_set_local_table(iseq, 0);
|
||||
body->param.lead_num = 0;
|
||||
body->param.size = 0;
|
||||
|
||||
NODE dummy_line_node = generate_dummy_line_node(line, -1);
|
||||
ADD_INSN1(ret, &dummy_line_node, putobject, args->arg);
|
||||
ADD_INSN1(ret, &dummy_line_node, invokebuiltin, args->func);
|
||||
}
|
||||
|
||||
static void
|
||||
for_self_aset(rb_iseq_t *iseq, LINK_ANCHOR *ret, const void *a)
|
||||
{
|
||||
const accessor_args *const args = (void *)a;
|
||||
const int line = args->line;
|
||||
struct rb_iseq_constant_body *const body = iseq->body;
|
||||
static const ID vars[] = {1, idUScore};
|
||||
|
||||
iseq_set_local_table(iseq, vars);
|
||||
body->param.lead_num = 1;
|
||||
body->param.size = 1;
|
||||
|
||||
NODE dummy_line_node = generate_dummy_line_node(line, -1);
|
||||
ADD_GETLOCAL(ret, &dummy_line_node, numberof(vars)-1, 0);
|
||||
ADD_INSN1(ret, &dummy_line_node, putobject, args->arg);
|
||||
ADD_INSN1(ret, &dummy_line_node, invokebuiltin, args->func);
|
||||
}
|
||||
|
||||
/*
|
||||
* func (index) -> (value)
|
||||
*/
|
||||
const rb_iseq_t *
|
||||
rb_method_for_self_aref(VALUE name, VALUE arg, const struct rb_builtin_function *func)
|
||||
{
|
||||
return method_for_self(name, arg, func, for_self_aref);
|
||||
}
|
||||
|
||||
/*
|
||||
* func (index, value) -> (value)
|
||||
*/
|
||||
const rb_iseq_t *
|
||||
rb_method_for_self_aset(VALUE name, VALUE arg, const struct rb_builtin_function *func)
|
||||
{
|
||||
return method_for_self(name, arg, func, for_self_aset);
|
||||
}
|
||||
|
||||
/* ISeq binary format */
|
||||
|
||||
#ifndef IBF_ISEQ_DEBUG
|
||||
|
|
|
@ -97,6 +97,8 @@ RB_DEBUG_COUNTER(ccf_bmethod)
|
|||
RB_DEBUG_COUNTER(ccf_opt_send)
|
||||
RB_DEBUG_COUNTER(ccf_opt_call)
|
||||
RB_DEBUG_COUNTER(ccf_opt_block_call)
|
||||
RB_DEBUG_COUNTER(ccf_opt_struct_aref)
|
||||
RB_DEBUG_COUNTER(ccf_opt_struct_aset)
|
||||
RB_DEBUG_COUNTER(ccf_super_method)
|
||||
|
||||
/*
|
||||
|
|
3
method.h
3
method.h
|
@ -167,11 +167,14 @@ enum method_optimized_type {
|
|||
OPTIMIZED_METHOD_TYPE_SEND,
|
||||
OPTIMIZED_METHOD_TYPE_CALL,
|
||||
OPTIMIZED_METHOD_TYPE_BLOCK_CALL,
|
||||
OPTIMIZED_METHOD_TYPE_STRUCT_AREF,
|
||||
OPTIMIZED_METHOD_TYPE_STRUCT_ASET,
|
||||
OPTIMIZED_METHOD_TYPE__MAX
|
||||
};
|
||||
|
||||
typedef struct rb_method_optimized {
|
||||
enum method_optimized_type type;
|
||||
unsigned int index;
|
||||
} rb_method_optimized_t;
|
||||
|
||||
struct rb_method_definition_struct {
|
||||
|
|
6
proc.c
6
proc.c
|
@ -2681,6 +2681,12 @@ rb_method_entry_min_max_arity(const rb_method_entry_t *me, int *max)
|
|||
case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
|
||||
*max = UNLIMITED_ARGUMENTS;
|
||||
return 0;
|
||||
case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
|
||||
*max = 0;
|
||||
return 0;
|
||||
case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
|
||||
*max = 1;
|
||||
return 0;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
|
70
struct.c
70
struct.c
|
@ -28,9 +28,6 @@ enum {
|
|||
AREF_HASH_THRESHOLD = 10
|
||||
};
|
||||
|
||||
const rb_iseq_t *rb_method_for_self_aref(VALUE name, VALUE arg, const struct rb_builtin_function *func);
|
||||
const rb_iseq_t *rb_method_for_self_aset(VALUE name, VALUE arg, const struct rb_builtin_function *func);
|
||||
|
||||
VALUE rb_cStruct;
|
||||
static ID id_members, id_back_members, id_keyword_init;
|
||||
|
||||
|
@ -229,32 +226,6 @@ rb_struct_getmember(VALUE obj, ID id)
|
|||
UNREACHABLE_RETURN(Qnil);
|
||||
}
|
||||
|
||||
static VALUE rb_struct_ref0(VALUE obj) {return RSTRUCT_GET(obj, 0);}
|
||||
static VALUE rb_struct_ref1(VALUE obj) {return RSTRUCT_GET(obj, 1);}
|
||||
static VALUE rb_struct_ref2(VALUE obj) {return RSTRUCT_GET(obj, 2);}
|
||||
static VALUE rb_struct_ref3(VALUE obj) {return RSTRUCT_GET(obj, 3);}
|
||||
static VALUE rb_struct_ref4(VALUE obj) {return RSTRUCT_GET(obj, 4);}
|
||||
static VALUE rb_struct_ref5(VALUE obj) {return RSTRUCT_GET(obj, 5);}
|
||||
static VALUE rb_struct_ref6(VALUE obj) {return RSTRUCT_GET(obj, 6);}
|
||||
static VALUE rb_struct_ref7(VALUE obj) {return RSTRUCT_GET(obj, 7);}
|
||||
static VALUE rb_struct_ref8(VALUE obj) {return RSTRUCT_GET(obj, 8);}
|
||||
static VALUE rb_struct_ref9(VALUE obj) {return RSTRUCT_GET(obj, 9);}
|
||||
|
||||
#define N_REF_FUNC numberof(ref_func)
|
||||
|
||||
static VALUE (*const ref_func[])(VALUE) = {
|
||||
rb_struct_ref0,
|
||||
rb_struct_ref1,
|
||||
rb_struct_ref2,
|
||||
rb_struct_ref3,
|
||||
rb_struct_ref4,
|
||||
rb_struct_ref5,
|
||||
rb_struct_ref6,
|
||||
rb_struct_ref7,
|
||||
rb_struct_ref8,
|
||||
rb_struct_ref9,
|
||||
};
|
||||
|
||||
static void
|
||||
rb_struct_modify(VALUE s)
|
||||
{
|
||||
|
@ -300,42 +271,16 @@ struct_pos_num(VALUE s, VALUE idx)
|
|||
return i;
|
||||
}
|
||||
|
||||
static VALUE
|
||||
opt_struct_aref(rb_execution_context_t *ec, VALUE self, VALUE idx)
|
||||
{
|
||||
long i = struct_pos_num(self, idx);
|
||||
return RSTRUCT_GET(self, i);
|
||||
}
|
||||
|
||||
static VALUE
|
||||
opt_struct_aset(rb_execution_context_t *ec, VALUE self, VALUE val, VALUE idx)
|
||||
{
|
||||
long i = struct_pos_num(self, idx);
|
||||
rb_struct_modify(self);
|
||||
RSTRUCT_SET(self, i, val);
|
||||
return val;
|
||||
}
|
||||
|
||||
static const struct rb_builtin_function struct_aref_builtin =
|
||||
RB_BUILTIN_FUNCTION(0, struct_aref, opt_struct_aref, 1, 0);
|
||||
static const struct rb_builtin_function struct_aset_builtin =
|
||||
RB_BUILTIN_FUNCTION(1, struct_aref, opt_struct_aset, 2, 0);
|
||||
|
||||
static void
|
||||
define_aref_method(VALUE nstr, VALUE name, VALUE off)
|
||||
{
|
||||
const rb_iseq_t *iseq = rb_method_for_self_aref(name, off, &struct_aref_builtin);
|
||||
iseq->body->builtin_inline_p = true;
|
||||
|
||||
rb_add_method_iseq(nstr, SYM2ID(name), iseq, NULL, METHOD_VISI_PUBLIC);
|
||||
rb_add_method_optimized(nstr, SYM2ID(name), OPTIMIZED_METHOD_TYPE_STRUCT_AREF, FIX2UINT(off), METHOD_VISI_PUBLIC);
|
||||
}
|
||||
|
||||
static void
|
||||
define_aset_method(VALUE nstr, VALUE name, VALUE off)
|
||||
{
|
||||
const rb_iseq_t *iseq = rb_method_for_self_aset(name, off, &struct_aset_builtin);
|
||||
|
||||
rb_add_method_iseq(nstr, SYM2ID(name), iseq, NULL, METHOD_VISI_PUBLIC);
|
||||
rb_add_method_optimized(nstr, SYM2ID(name), OPTIMIZED_METHOD_TYPE_STRUCT_ASET, FIX2UINT(off), METHOD_VISI_PUBLIC);
|
||||
}
|
||||
|
||||
static VALUE
|
||||
|
@ -386,13 +331,8 @@ setup_struct(VALUE nstr, VALUE members)
|
|||
ID id = SYM2ID(sym);
|
||||
VALUE off = LONG2NUM(i);
|
||||
|
||||
if (i < N_REF_FUNC) {
|
||||
rb_define_method_id(nstr, id, ref_func[i], 0);
|
||||
}
|
||||
else {
|
||||
define_aref_method(nstr, sym, off);
|
||||
}
|
||||
define_aset_method(nstr, ID2SYM(rb_id_attrset(id)), off);
|
||||
define_aref_method(nstr, sym, off);
|
||||
define_aset_method(nstr, ID2SYM(rb_id_attrset(id)), off);
|
||||
}
|
||||
|
||||
return nstr;
|
||||
|
@ -844,7 +784,7 @@ rb_struct_alloc(VALUE klass, VALUE values)
|
|||
VALUE
|
||||
rb_struct_new(VALUE klass, ...)
|
||||
{
|
||||
VALUE tmpargs[N_REF_FUNC], *mem = tmpargs;
|
||||
VALUE tmpargs[16], *mem = tmpargs;
|
||||
int size, i;
|
||||
va_list args;
|
||||
|
||||
|
|
|
@ -408,6 +408,8 @@ class TestYJIT < Test::Unit::TestCase
|
|||
end
|
||||
|
||||
def test_invokebuiltin
|
||||
skip "Struct's getter/setter doesn't use invokebuiltin and YJIT doesn't support new logic"
|
||||
|
||||
assert_compiles(<<~RUBY)
|
||||
def foo(obj)
|
||||
obj.foo = 123
|
||||
|
|
39
vm_eval.c
39
vm_eval.c
|
@ -163,6 +163,19 @@ vm_call0_cfunc(rb_execution_context_t *ec, struct rb_calling_info *calling, cons
|
|||
return vm_call0_cfunc_with_frame(ec, calling, argv);
|
||||
}
|
||||
|
||||
static void
|
||||
vm_call_check_arity(struct rb_calling_info *calling, int argc, const VALUE *argv)
|
||||
{
|
||||
if (calling->kw_splat &&
|
||||
calling->argc > 0 &&
|
||||
RB_TYPE_P(argv[calling->argc-1], T_HASH) &&
|
||||
RHASH_EMPTY_P(argv[calling->argc-1])) {
|
||||
calling->argc--;
|
||||
}
|
||||
|
||||
rb_check_arity(calling->argc, argc, argc);
|
||||
}
|
||||
|
||||
/* `ci' should point temporal value (on stack value) */
|
||||
static VALUE
|
||||
vm_call0_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv)
|
||||
|
@ -196,27 +209,13 @@ vm_call0_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const
|
|||
ret = vm_call0_cfunc(ec, calling, argv);
|
||||
goto success;
|
||||
case VM_METHOD_TYPE_ATTRSET:
|
||||
if (calling->kw_splat &&
|
||||
calling->argc > 0 &&
|
||||
RB_TYPE_P(argv[calling->argc-1], T_HASH) &&
|
||||
RHASH_EMPTY_P(argv[calling->argc-1])) {
|
||||
calling->argc--;
|
||||
}
|
||||
|
||||
rb_check_arity(calling->argc, 1, 1);
|
||||
vm_call_check_arity(calling, 1, argv);
|
||||
VM_CALL_METHOD_ATTR(ret,
|
||||
rb_ivar_set(calling->recv, vm_cc_cme(cc)->def->body.attr.id, argv[0]),
|
||||
(void)0);
|
||||
goto success;
|
||||
case VM_METHOD_TYPE_IVAR:
|
||||
if (calling->kw_splat &&
|
||||
calling->argc > 0 &&
|
||||
RB_TYPE_P(argv[calling->argc-1], T_HASH) &&
|
||||
RHASH_EMPTY_P(argv[calling->argc-1])) {
|
||||
calling->argc--;
|
||||
}
|
||||
|
||||
rb_check_arity(calling->argc, 0, 0);
|
||||
vm_call_check_arity(calling, 0, argv);
|
||||
VM_CALL_METHOD_ATTR(ret,
|
||||
rb_attr_get(calling->recv, vm_cc_cme(cc)->def->body.attr.id),
|
||||
(void)0);
|
||||
|
@ -274,6 +273,14 @@ vm_call0_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const
|
|||
ret = rb_vm_invoke_proc(ec, proc, calling->argc, argv, calling->kw_splat, calling->block_handler);
|
||||
goto success;
|
||||
}
|
||||
case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
|
||||
vm_call_check_arity(calling, 0, argv);
|
||||
ret = vm_call_opt_struct_aref0(ec, ec->cfp, calling);
|
||||
goto success;
|
||||
case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
|
||||
vm_call_check_arity(calling, 1, argv);
|
||||
ret = vm_call_opt_struct_aset0(ec, ec->cfp, calling);
|
||||
goto success;
|
||||
default:
|
||||
rb_bug("vm_call0: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
|
||||
}
|
||||
|
|
196
vm_insnhelper.c
196
vm_insnhelper.c
|
@ -22,6 +22,7 @@
|
|||
#include "internal/proc.h"
|
||||
#include "internal/random.h"
|
||||
#include "internal/variable.h"
|
||||
#include "internal/struct.h"
|
||||
#include "variable.h"
|
||||
|
||||
/* finish iseq array */
|
||||
|
@ -3311,53 +3312,6 @@ vm_call_opt_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct
|
|||
}
|
||||
}
|
||||
|
||||
static inline VALUE vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_callinfo *ci, bool is_lambda, VALUE block_handler);
|
||||
|
||||
NOINLINE(static VALUE
|
||||
vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
|
||||
struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler));
|
||||
|
||||
static VALUE
|
||||
vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
|
||||
struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler)
|
||||
{
|
||||
int argc = calling->argc;
|
||||
|
||||
/* remove self */
|
||||
if (argc > 0) MEMMOVE(&TOPN(argc), &TOPN(argc-1), VALUE, argc);
|
||||
DEC_SP(1);
|
||||
|
||||
return vm_invoke_block(ec, reg_cfp, calling, ci, false, block_handler);
|
||||
}
|
||||
|
||||
static VALUE
|
||||
vm_call_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
|
||||
{
|
||||
RB_DEBUG_COUNTER_INC(ccf_opt_call);
|
||||
|
||||
const struct rb_callinfo *ci = calling->ci;
|
||||
VALUE procval = calling->recv;
|
||||
return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
|
||||
}
|
||||
|
||||
static VALUE
|
||||
vm_call_opt_block_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
|
||||
{
|
||||
RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
|
||||
|
||||
VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
|
||||
const struct rb_callinfo *ci = calling->ci;
|
||||
|
||||
if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
|
||||
return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
|
||||
}
|
||||
else {
|
||||
calling->recv = rb_vm_bh_to_procval(ec, block_handler);
|
||||
calling->cc = rb_vm_search_method_slowpath(ci, CLASS_OF(calling->recv));
|
||||
return vm_call_general(ec, reg_cfp, calling);
|
||||
}
|
||||
}
|
||||
|
||||
static VALUE
|
||||
vm_call_method_missing_body(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling,
|
||||
const struct rb_callinfo *orig_ci, enum method_missing_reason reason)
|
||||
|
@ -3529,6 +3483,139 @@ vm_call_refined(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_c
|
|||
}
|
||||
}
|
||||
|
||||
static inline VALUE vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_callinfo *ci, bool is_lambda, VALUE block_handler);
|
||||
|
||||
NOINLINE(static VALUE
|
||||
vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
|
||||
struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler));
|
||||
|
||||
static VALUE
|
||||
vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
|
||||
struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler)
|
||||
{
|
||||
int argc = calling->argc;
|
||||
|
||||
/* remove self */
|
||||
if (argc > 0) MEMMOVE(&TOPN(argc), &TOPN(argc-1), VALUE, argc);
|
||||
DEC_SP(1);
|
||||
|
||||
return vm_invoke_block(ec, reg_cfp, calling, ci, false, block_handler);
|
||||
}
|
||||
|
||||
static VALUE
|
||||
vm_call_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
|
||||
{
|
||||
RB_DEBUG_COUNTER_INC(ccf_opt_call);
|
||||
|
||||
const struct rb_callinfo *ci = calling->ci;
|
||||
VALUE procval = calling->recv;
|
||||
return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
|
||||
}
|
||||
|
||||
static VALUE
|
||||
vm_call_opt_block_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
|
||||
{
|
||||
RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
|
||||
|
||||
VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
|
||||
const struct rb_callinfo *ci = calling->ci;
|
||||
|
||||
if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
|
||||
return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
|
||||
}
|
||||
else {
|
||||
calling->recv = rb_vm_bh_to_procval(ec, block_handler);
|
||||
calling->cc = rb_vm_search_method_slowpath(ci, CLASS_OF(calling->recv));
|
||||
return vm_call_general(ec, reg_cfp, calling);
|
||||
}
|
||||
}
|
||||
|
||||
static VALUE
|
||||
vm_call_opt_struct_aref0(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
|
||||
{
|
||||
VALUE recv = calling->recv;
|
||||
|
||||
VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
|
||||
VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
|
||||
VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
|
||||
|
||||
const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
|
||||
return internal_RSTRUCT_GET(recv, off);
|
||||
}
|
||||
|
||||
static VALUE
|
||||
vm_call_opt_struct_aref(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
|
||||
{
|
||||
RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
|
||||
|
||||
VALUE ret = vm_call_opt_struct_aref0(ec, reg_cfp, calling);
|
||||
reg_cfp->sp -= 1;
|
||||
return ret;
|
||||
}
|
||||
|
||||
static VALUE
|
||||
vm_call_opt_struct_aset0(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
|
||||
{
|
||||
VALUE recv = calling->recv;
|
||||
VALUE val = *(reg_cfp->sp - 1);
|
||||
|
||||
VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
|
||||
VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
|
||||
VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
|
||||
|
||||
rb_check_frozen(recv);
|
||||
|
||||
const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
|
||||
internal_RSTRUCT_SET(recv, off, val);
|
||||
|
||||
return val;
|
||||
}
|
||||
|
||||
static VALUE
|
||||
vm_call_opt_struct_aset(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
|
||||
{
|
||||
RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
|
||||
|
||||
VALUE ret = vm_call_opt_struct_aset0(ec, reg_cfp, calling);
|
||||
reg_cfp->sp -= 2;
|
||||
return ret;
|
||||
}
|
||||
|
||||
NOINLINE(static VALUE vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
|
||||
const struct rb_callinfo *ci, const struct rb_callcache *cc));
|
||||
|
||||
static VALUE
|
||||
vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
|
||||
const struct rb_callinfo *ci, const struct rb_callcache *cc)
|
||||
{
|
||||
switch (vm_cc_cme(cc)->def->body.optimized.type) {
|
||||
case OPTIMIZED_METHOD_TYPE_SEND:
|
||||
CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
|
||||
return vm_call_opt_send(ec, cfp, calling);
|
||||
case OPTIMIZED_METHOD_TYPE_CALL:
|
||||
CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
|
||||
return vm_call_opt_call(ec, cfp, calling);
|
||||
case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
|
||||
CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
|
||||
return vm_call_opt_block_call(ec, cfp, calling);
|
||||
case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
|
||||
CALLER_SETUP_ARG(cfp, calling, ci);
|
||||
CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
|
||||
rb_check_arity(calling->argc, 0, 0);
|
||||
CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
|
||||
return vm_call_opt_struct_aref(ec, cfp, calling);
|
||||
|
||||
case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
|
||||
CALLER_SETUP_ARG(cfp, calling, ci);
|
||||
CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
|
||||
rb_check_arity(calling->argc, 1, 1);
|
||||
CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
|
||||
return vm_call_opt_struct_aset(ec, cfp, calling);
|
||||
default:
|
||||
rb_bug("vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
|
||||
}
|
||||
}
|
||||
|
||||
#define VM_CALL_METHOD_ATTR(var, func, nohook) \
|
||||
if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
|
||||
EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
|
||||
|
@ -3597,20 +3684,7 @@ vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, st
|
|||
return vm_call_alias(ec, cfp, calling);
|
||||
|
||||
case VM_METHOD_TYPE_OPTIMIZED:
|
||||
switch (vm_cc_cme(cc)->def->body.optimized.type) {
|
||||
case OPTIMIZED_METHOD_TYPE_SEND:
|
||||
CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
|
||||
return vm_call_opt_send(ec, cfp, calling);
|
||||
case OPTIMIZED_METHOD_TYPE_CALL:
|
||||
CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
|
||||
return vm_call_opt_call(ec, cfp, calling);
|
||||
case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
|
||||
CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
|
||||
return vm_call_opt_block_call(ec, cfp, calling);
|
||||
default:
|
||||
rb_bug("vm_call_method: unsupported optimized method type (%d)",
|
||||
vm_cc_cme(cc)->def->body.optimized.type);
|
||||
}
|
||||
return vm_call_optimized(ec, cfp, calling, ci, cc);
|
||||
|
||||
case VM_METHOD_TYPE_UNDEF:
|
||||
break;
|
||||
|
|
|
@ -350,6 +350,7 @@ rb_add_method_optimized(VALUE klass, ID mid, enum method_optimized_type opt_type
|
|||
{
|
||||
rb_method_optimized_t opt = {
|
||||
.type = opt_type,
|
||||
.index = index,
|
||||
};
|
||||
rb_add_method(klass, mid, VM_METHOD_TYPE_OPTIMIZED, &opt, visi);
|
||||
}
|
||||
|
@ -1940,7 +1941,8 @@ rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_defini
|
|||
case VM_METHOD_TYPE_UNDEF:
|
||||
return 1;
|
||||
case VM_METHOD_TYPE_OPTIMIZED:
|
||||
return (d1->body.optimized.type == d2->body.optimized.type);
|
||||
return (d1->body.optimized.type == d2->body.optimized.type) &&
|
||||
(d1->body.optimized.index == d2->body.optimized.index);
|
||||
case VM_METHOD_TYPE_REFINED:
|
||||
case VM_METHOD_TYPE_ALIAS:
|
||||
break;
|
||||
|
@ -1974,6 +1976,7 @@ rb_hash_method_definition(st_index_t hash, const rb_method_definition_t *def)
|
|||
case VM_METHOD_TYPE_UNDEF:
|
||||
return hash;
|
||||
case VM_METHOD_TYPE_OPTIMIZED:
|
||||
hash = rb_hash_uint(hash, def->body.optimized.index);
|
||||
return rb_hash_uint(hash, def->body.optimized.type);
|
||||
case VM_METHOD_TYPE_REFINED:
|
||||
case VM_METHOD_TYPE_ALIAS:
|
||||
|
|
Loading…
Reference in a new issue