1
0
Fork 0
mirror of https://github.com/ruby/ruby.git synced 2022-11-09 12:17:21 -05:00

merge revision(s) abdc634f64a440afcdc7f23c9757d27aab4db8a9,083c5f08ec4e95c9b75810d46f933928327a5ab3,1ecda213668644d656eb0d60654737482447dd92,813fe4c256f89babebb8ab53821ae5eb6bb138c6: [Backport #17497]

remove unused decl

	---
	 internal/vm.h | 6 ------
	 vm_args.c     | 2 --
	 2 files changed, 8 deletions(-)

	Check stack overflow in recursive glob_helper [Bug #17162]

	---
	 dir.c         |  2 ++
	 internal/vm.h |  1 +
	 vm_eval.c     | 10 ++++++++++
	 3 files changed, 13 insertions(+)

	global call-cache cache table for rb_funcall*

	rb_funcall* (rb_funcall(), rb_funcallv(), ...) functions invokes
	Ruby's method with given receiver. Ruby 2.7 introduced inline method
	cache with static memory area. However, Ruby 3.0 reimplemented the
	method cache data structures and the inline cache was removed.

	Without inline cache, rb_funcall* searched methods everytime.
	Most of cases per-Class Method Cache (pCMC) will be helped but
	pCMC requires VM-wide locking and it hurts performance on
	multi-Ractor execution, especially all Ractors calls methods
	with rb_funcall*.

	This patch introduced Global Call-Cache Cache Table (gccct) for
	rb_funcall*. Call-Cache was introduced from Ruby 3.0 to manage
	method cache entry atomically and gccct enables method-caching
	without VM-wide locking. This table solves the performance issue
	on multi-ractor execution.
	[Bug #17497]

	Ruby-level method invocation does not use gccct because it has
	inline-method-cache and the table size is limited. Basically
	rb_funcall* is not used frequently, so 1023 entries can be enough.
	We will revisit the table size if it is not enough.
	---
	 debug_counter.h |   3 +
	 vm.c            |  12 +++
	 vm_callinfo.h   |  12 ---
	 vm_core.h       |   5 +
	 vm_eval.c       | 288 ++++++++++++++++++++++++++++++++++++++++++--------------
	 vm_insnhelper.c |  11 ++-
	 vm_method.c     |  14 ++-
	 7 files changed, 255 insertions(+), 90 deletions(-)

	opt_equality_by_mid for rb_equal_opt

	This patch improves the performance of sequential and parallel
	execution of rb_equal() (and rb_eql()).
	[Bug #17497]

	rb_equal_opt (and rb_eql_opt) does not have own cd and it waste
	a time to initialize cd. This patch introduces opt_equality_by_mid()
	to check equality without cd.

	Furthermore, current master uses "static" cd on rb_equal_opt
	(and rb_eql_opt) and it hurts CPU caches on multi-thread execution.
	Now they are gone so there are no bottleneck on parallel execution.
	---
	 vm_insnhelper.c | 99 ++++++++++++++++++++++++++++++++++++---------------------
	 1 file changed, 63 insertions(+), 36 deletions(-)
This commit is contained in:
NARUSE, Yui 2021-03-11 20:24:48 +09:00
parent 0074ea2d83
commit de6072a22e
11 changed files with 332 additions and 135 deletions

View file

@ -62,6 +62,9 @@ RB_DEBUG_COUNTER(ccs_not_found) // count for not found corresponding ccs on met
// vm_eval.c
RB_DEBUG_COUNTER(call0_public)
RB_DEBUG_COUNTER(call0_other)
RB_DEBUG_COUNTER(gccct_hit)
RB_DEBUG_COUNTER(gccct_miss)
RB_DEBUG_COUNTER(gccct_null)
// iseq
RB_DEBUG_COUNTER(iseq_num) // number of total created iseq

2
dir.c
View file

@ -2273,6 +2273,8 @@ glob_helper(
int escape = !(flags & FNM_NOESCAPE);
size_t pathlen = baselen + namelen;
rb_check_stack_overflow();
for (cur = beg; cur < end; ++cur) {
struct glob_pattern *p = *cur;
if (p->type == RECURSIVE) {

View file

@ -80,12 +80,7 @@ VALUE rb_yield_force_blockarg(VALUE values);
VALUE rb_lambda_call(VALUE obj, ID mid, int argc, const VALUE *argv,
rb_block_call_func_t bl_proc, int min_argc, int max_argc,
VALUE data2);
MJIT_SYMBOL_EXPORT_BEGIN
VALUE rb_vm_call0(struct rb_execution_context_struct *ec, VALUE recv, ID id, int argc, const VALUE *argv, const struct rb_callable_method_entry_struct *me, int kw_splat);
VALUE rb_vm_call_kw(struct rb_execution_context_struct *ec, VALUE recv, VALUE id, int argc, const VALUE *argv, const struct rb_callable_method_entry_struct *me, int kw_splat);
VALUE rb_make_no_method_exception(VALUE exc, VALUE format, VALUE obj, int argc, const VALUE *argv, int priv);
MJIT_SYMBOL_EXPORT_END
void rb_check_stack_overflow(void);
/* vm_insnhelper.c */
VALUE rb_equal_opt(VALUE obj1, VALUE obj2);

View file

@ -12,7 +12,7 @@
# define RUBY_VERSION_MINOR RUBY_API_VERSION_MINOR
#define RUBY_VERSION_TEENY 0
#define RUBY_RELEASE_DATE RUBY_RELEASE_YEAR_STR"-"RUBY_RELEASE_MONTH_STR"-"RUBY_RELEASE_DAY_STR
#define RUBY_PATCHLEVEL 50
#define RUBY_PATCHLEVEL 51
#define RUBY_RELEASE_YEAR 2021
#define RUBY_RELEASE_MONTH 3

12
vm.c
View file

@ -2589,6 +2589,18 @@ rb_vm_mark(void *ptr)
rb_gc_mark_values(RUBY_NSIG, vm->trap_list.cmd);
rb_id_table_foreach_values(vm->negative_cme_table, vm_mark_negative_cme, NULL);
for (i=0; i<VM_GLOBAL_CC_CACHE_TABLE_SIZE; i++) {
const struct rb_callcache *cc = vm->global_cc_cache_table[i];
if (cc != NULL) {
if (!vm_cc_invalidated_p(cc)) {
rb_gc_mark((VALUE)cc);
}
else {
vm->global_cc_cache_table[i] = NULL;
}
}
}
mjit_mark();
}

View file

@ -710,8 +710,6 @@ setup_parameters_complex(rb_execution_context_t * const ec, const rb_iseq_t * co
return opt_pc;
}
void rb_backtrace_use_iseq_first_lineno_for_last_location(VALUE self); /* vm_backtrace.c */
static void
raise_argument_error(rb_execution_context_t *ec, const rb_iseq_t *iseq, const VALUE exc)
{

View file

@ -375,18 +375,6 @@ extern const struct rb_callcache *rb_vm_empty_cc(void);
/* callcache: mutate */
static inline void
vm_cc_cme_set(const struct rb_callcache *cc, const struct rb_callable_method_entry_struct *cme)
{
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
VM_ASSERT(cc != vm_cc_empty());
VM_ASSERT(vm_cc_cme(cc) != NULL);
VM_ASSERT(vm_cc_cme(cc)->called_id == cme->called_id);
VM_ASSERT(!vm_cc_markable(cc)); // only used for vm_eval.c
*((const struct rb_callable_method_entry_struct **)&cc->cme_) = cme;
}
static inline void
vm_cc_call_set(const struct rb_callcache *cc, vm_call_handler call)
{

View file

@ -658,6 +658,11 @@ typedef struct rb_vm_struct {
struct rb_id_table *negative_cme_table;
#ifndef VM_GLOBAL_CC_CACHE_TABLE_SIZE
#define VM_GLOBAL_CC_CACHE_TABLE_SIZE 1023
#endif
const struct rb_callcache *global_cc_cache_table[VM_GLOBAL_CC_CACHE_TABLE_SIZE]; // vm_eval.c
#if USE_VM_CLOCK
uint32_t clock;
#endif

298
vm_eval.c
View file

@ -48,7 +48,22 @@ rb_vm_call0(rb_execution_context_t *ec, VALUE recv, ID id, int argc, const VALUE
struct rb_calling_info calling = {
.ci = &VM_CI_ON_STACK(id, kw_splat ? VM_CALL_KW_SPLAT : 0, argc, NULL),
.cc = &VM_CC_ON_STACK(Qfalse, vm_call_general, { 0 }, cme),
.block_handler = VM_BLOCK_HANDLER_NONE,
.block_handler = vm_passed_block_handler(ec),
.recv = recv,
.argc = argc,
.kw_splat = kw_splat,
};
return vm_call0_body(ec, &calling, argv);
}
static inline VALUE
vm_call0_cc(rb_execution_context_t *ec, VALUE recv, ID id, int argc, const VALUE *argv, const struct rb_callcache *cc, int kw_splat)
{
struct rb_calling_info calling = {
.ci = &VM_CI_ON_STACK(id, kw_splat ? VM_CALL_KW_SPLAT : 0, argc, NULL),
.cc = cc,
.block_handler = vm_passed_block_handler(ec),
.recv = recv,
.argc = argc,
.kw_splat = kw_splat,
@ -57,13 +72,38 @@ rb_vm_call0(rb_execution_context_t *ec, VALUE recv, ID id, int argc, const VALUE
return vm_call0_body(ec, &calling, argv);
}
static VALUE
vm_call0_cme(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv, const rb_callable_method_entry_t *cme)
{
calling->cc = &VM_CC_ON_STACK(Qfalse, vm_call_general, { 0 }, cme);
return vm_call0_body(ec, calling, argv);
}
static VALUE
vm_call0_super(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv, VALUE klass, enum method_missing_reason ex)
{
ID mid = vm_ci_mid(calling->ci);
klass = RCLASS_SUPER(klass);
if (klass) {
const rb_callable_method_entry_t *cme = rb_callable_method_entry(klass, mid);
if (cme) {
RUBY_VM_CHECK_INTS(ec);
return vm_call0_cme(ec, calling, argv, cme);
}
}
vm_passed_block_handler_set(ec, calling->block_handler);
return method_missing(ec, calling->recv, mid, calling->argc, argv, ex, calling->kw_splat);
}
static VALUE
vm_call0_cfunc_with_frame(rb_execution_context_t* ec, struct rb_calling_info *calling, const VALUE *argv)
{
const struct rb_callinfo *ci = calling->ci;
const struct rb_callcache *cc = calling->cc;
VALUE val;
const rb_callable_method_entry_t *me = vm_cc_cme(cc);
const rb_callable_method_entry_t *me = vm_cc_cme(calling->cc);
const rb_method_cfunc_t *cfunc = UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
int len = cfunc->argc;
VALUE recv = calling->recv;
@ -115,12 +155,8 @@ vm_call0_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const
{
const struct rb_callinfo *ci = calling->ci;
const struct rb_callcache *cc = calling->cc;
VALUE ret;
calling->block_handler = vm_passed_block_handler(ec);
again:
switch (vm_cc_cme(cc)->def->type) {
case VM_METHOD_TYPE_ISEQ:
{
@ -169,38 +205,27 @@ vm_call0_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const
ret = vm_call_bmethod_body(ec, calling, argv);
goto success;
case VM_METHOD_TYPE_ZSUPER:
{
VALUE klass = RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class);
return vm_call0_super(ec, calling, argv, klass, MISSING_SUPER);
}
case VM_METHOD_TYPE_REFINED:
{
const rb_method_type_t type = vm_cc_cme(cc)->def->type;
VALUE super_class = vm_cc_cme(cc)->defined_class;
const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
if (type == VM_METHOD_TYPE_ZSUPER) {
super_class = RCLASS_ORIGIN(super_class);
}
else if (vm_cc_cme(cc)->def->body.refined.orig_me) {
vm_cc_cme_set(cc, refined_method_callable_without_refinement(vm_cc_cme(cc)));
goto again;
}
super_class = RCLASS_SUPER(super_class);
if (super_class) {
vm_cc_cme_set(cc, rb_callable_method_entry(super_class, vm_ci_mid(ci)));
if (vm_cc_cme(cc)) {
RUBY_VM_CHECK_INTS(ec);
goto again;
}
if (cme->def->body.refined.orig_me) {
const rb_callable_method_entry_t *orig_cme = refined_method_callable_without_refinement(cme);
return vm_call0_cme(ec, calling, argv, orig_cme);
}
enum method_missing_reason ex = (type == VM_METHOD_TYPE_ZSUPER) ? MISSING_SUPER : 0;
ret = method_missing(ec, calling->recv, vm_ci_mid(ci), calling->argc, argv, ex, calling->kw_splat);
goto success;
VALUE klass = cme->defined_class;
return vm_call0_super(ec, calling, argv, klass, 0);
}
case VM_METHOD_TYPE_ALIAS:
vm_cc_cme_set(cc, aliased_callable_method_entry(vm_cc_cme(cc)));
goto again;
return vm_call0_cme(ec, calling, argv, aliased_callable_method_entry(vm_cc_cme(cc)));
case VM_METHOD_TYPE_MISSING:
{
vm_passed_block_handler_set(ec, calling->block_handler);
vm_passed_block_handler_set(ec, calling->block_handler);
return method_missing(ec, calling->recv, vm_ci_mid(ci), calling->argc,
argv, MISSING_NOENTRY, calling->kw_splat);
}
@ -302,9 +327,116 @@ stack_check(rb_execution_context_t *ec)
#ifndef MJIT_HEADER
void
rb_check_stack_overflow(void)
{
#ifndef RB_THREAD_LOCAL_SPECIFIER
if (!ruby_current_ec_key) return;
#endif
rb_execution_context_t *ec = GET_EC();
if (ec) stack_check(ec);
}
NORETURN(static void uncallable_object(VALUE recv, ID mid));
static inline const rb_callable_method_entry_t *rb_search_method_entry(VALUE recv, ID mid);
static inline enum method_missing_reason rb_method_call_status(rb_execution_context_t *ec, const rb_callable_method_entry_t *me, call_type scope, VALUE self);
static const struct rb_callcache *
cc_new(VALUE klass, ID mid, int argc, const rb_callable_method_entry_t *cme)
{
const struct rb_callcache *cc;
RB_VM_LOCK_ENTER();
{
struct rb_class_cc_entries *ccs;
struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
if (rb_id_table_lookup(cc_tbl, mid, (VALUE*)&ccs)) {
// ok
}
else {
ccs = vm_ccs_create(klass, cme);
rb_id_table_insert(cc_tbl, mid, (VALUE)ccs);
}
if (ccs->len > 0) {
cc = ccs->entries[0].cc;
}
else {
const struct rb_callinfo *ci = vm_ci_new(mid, 0, argc, false); // TODO: proper ci
cc = vm_cc_new(klass, cme, vm_call_general);
METHOD_ENTRY_CACHED_SET((struct rb_callable_method_entry_struct *)cme);
vm_ccs_push(klass, ccs, ci, cc);
}
}
RB_VM_LOCK_LEAVE();
return cc;
}
static VALUE
gccct_hash(VALUE klass, ID mid)
{
return (klass >> 3) ^ (VALUE)mid;
}
NOINLINE(static const struct rb_callcache *gccct_method_search_slowpath(rb_vm_t *vm, VALUE klass, ID mid, int argc, unsigned int index));
static const struct rb_callcache *
gccct_method_search_slowpath(rb_vm_t *vm, VALUE klass, ID mid, int argc, unsigned int index)
{
const rb_callable_method_entry_t *cme = rb_callable_method_entry(klass, mid);
const struct rb_callcache *cc;
if (cme != NULL) {
cc = cc_new(klass, mid, argc, cme);
}
else {
cc = NULL;
}
return vm->global_cc_cache_table[index] = cc;
}
static inline const struct rb_callcache *
gccct_method_search(rb_execution_context_t *ec, VALUE recv, ID mid, int argc)
{
VALUE klass;
if (!SPECIAL_CONST_P(recv)) {
klass = RBASIC_CLASS(recv);
if (UNLIKELY(!klass)) uncallable_object(recv, mid);
}
else {
klass = CLASS_OF(recv);
}
// search global method cache
unsigned int index = (unsigned int)(gccct_hash(klass, mid) % VM_GLOBAL_CC_CACHE_TABLE_SIZE);
rb_vm_t *vm = rb_ec_vm_ptr(ec);
const struct rb_callcache *cc = vm->global_cc_cache_table[index];
if (LIKELY(cc)) {
if (LIKELY(vm_cc_class_check(cc, klass))) {
const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
if (LIKELY(!METHOD_ENTRY_INVALIDATED(cme) &&
cme->called_id == mid)) {
VM_ASSERT(vm_cc_cme(cc) == rb_callable_method_entry(klass, mid));
RB_DEBUG_COUNTER_INC(gccct_hit);
return cc;
}
}
}
else {
RB_DEBUG_COUNTER_INC(gccct_null);
}
RB_DEBUG_COUNTER_INC(gccct_miss);
return gccct_method_search_slowpath(vm, klass, mid, argc, index);
}
/*!
* \internal
* calls the specified method.
@ -326,7 +458,6 @@ rb_call0(rb_execution_context_t *ec,
VALUE recv, ID mid, int argc, const VALUE *argv,
call_type call_scope, VALUE self)
{
const rb_callable_method_entry_t *me;
enum method_missing_reason call_status;
call_type scope = call_scope;
int kw_splat = RB_NO_KEYWORDS;
@ -344,21 +475,34 @@ rb_call0(rb_execution_context_t *ec,
break;
}
const struct rb_callcache *cc = gccct_method_search(ec, recv, mid, argc);
if (scope == CALL_PUBLIC) {
RB_DEBUG_COUNTER_INC(call0_public);
me = rb_callable_method_entry_with_refinements(CLASS_OF(recv), mid, NULL);
const rb_callable_method_entry_t *cc_cme = cc ? vm_cc_cme(cc) : NULL;
const rb_callable_method_entry_t *cme = callable_method_entry_refeinements0(CLASS_OF(recv), mid, NULL, true, cc_cme);
call_status = rb_method_call_status(ec, cme, scope, self);
if (UNLIKELY(call_status != MISSING_NONE)) {
return method_missing(ec, recv, mid, argc, argv, call_status, kw_splat);
}
else if (UNLIKELY(cc_cme != cme)) { // refinement is solved
stack_check(ec);
return rb_vm_call_kw(ec, recv, mid, argc, argv, cme, kw_splat);
}
}
else {
RB_DEBUG_COUNTER_INC(call0_other);
me = rb_search_method_entry(recv, mid);
}
call_status = rb_method_call_status(ec, me, scope, self);
call_status = rb_method_call_status(ec, cc ? vm_cc_cme(cc) : NULL, scope, self);
if (call_status != MISSING_NONE) {
return method_missing(ec, recv, mid, argc, argv, call_status, kw_splat);
if (UNLIKELY(call_status != MISSING_NONE)) {
return method_missing(ec, recv, mid, argc, argv, call_status, kw_splat);
}
}
stack_check(ec);
return rb_vm_call_kw(ec, recv, mid, argc, argv, me, kw_splat);
return vm_call0_cc(ec, recv, mid, argc, argv, cc, kw_splat);
}
struct rescue_funcall_args {
@ -576,7 +720,6 @@ rb_type_str(enum ruby_value_type type)
return NULL;
}
NORETURN(static void uncallable_object(VALUE recv, ID mid));
static void
uncallable_object(VALUE recv, ID mid)
{
@ -850,6 +993,45 @@ method_missing(rb_execution_context_t *ec, VALUE obj, ID id, int argc, const VAL
#ifndef MJIT_HEADER
static inline VALUE
rb_funcallv_scope(VALUE recv, ID mid, int argc, const VALUE *argv, call_type scope)
{
rb_execution_context_t *ec = GET_EC();
const struct rb_callcache *cc = gccct_method_search(ec, recv, mid, argc);
VALUE self = ec->cfp->self;
if (LIKELY(cc) &&
LIKELY(rb_method_call_status(ec, vm_cc_cme(cc), scope, self) == MISSING_NONE)) {
// fastpath
return vm_call0_cc(ec, recv, mid, argc, argv, cc, false);
}
else {
return rb_call0(ec, recv, mid, argc, argv, scope, self);
}
}
#ifdef rb_funcallv
#undef rb_funcallv
#endif
/*!
* Calls a method
* \param recv receiver of the method
* \param mid an ID that represents the name of the method
* \param argc the number of arguments
* \param argv pointer to an array of method arguments
*/
VALUE
rb_funcallv(VALUE recv, ID mid, int argc, const VALUE *argv)
{
return rb_funcallv_scope(recv, mid, argc, argv, CALL_FCALL);
}
VALUE
rb_funcallv_kw(VALUE recv, ID mid, int argc, const VALUE *argv, int kw_splat)
{
return rb_call(recv, mid, argc, argv, kw_splat ? CALL_FCALL_KW : CALL_FCALL);
}
/*!
* Calls a method
* \param recv receiver of the method
@ -875,7 +1057,8 @@ rb_apply(VALUE recv, ID mid, VALUE args)
}
argv = ALLOCA_N(VALUE, argc);
MEMCPY(argv, RARRAY_CONST_PTR_TRANSIENT(args), VALUE, argc);
return rb_call(recv, mid, argc, argv, CALL_FCALL);
return rb_funcallv(recv, mid, argc, argv);
}
#ifdef rb_funcall
@ -911,29 +1094,7 @@ rb_funcall(VALUE recv, ID mid, int n, ...)
else {
argv = 0;
}
return rb_call(recv, mid, n, argv, CALL_FCALL);
}
#ifdef rb_funcallv
#undef rb_funcallv
#endif
/*!
* Calls a method
* \param recv receiver of the method
* \param mid an ID that represents the name of the method
* \param argc the number of arguments
* \param argv pointer to an array of method arguments
*/
VALUE
rb_funcallv(VALUE recv, ID mid, int argc, const VALUE *argv)
{
return rb_call(recv, mid, argc, argv, CALL_FCALL);
}
VALUE
rb_funcallv_kw(VALUE recv, ID mid, int argc, const VALUE *argv, int kw_splat)
{
return rb_call(recv, mid, argc, argv, kw_splat ? CALL_FCALL_KW : CALL_FCALL);
return rb_funcallv(recv, mid, n, argv);
}
/*!
@ -963,7 +1124,6 @@ rb_check_funcall_basic_kw(VALUE recv, ID mid, VALUE ancestor, int argc, const VA
return Qundef;
}
/*!
* Calls a method.
*
@ -976,7 +1136,7 @@ rb_check_funcall_basic_kw(VALUE recv, ID mid, VALUE ancestor, int argc, const VA
VALUE
rb_funcallv_public(VALUE recv, ID mid, int argc, const VALUE *argv)
{
return rb_call(recv, mid, argc, argv, CALL_PUBLIC);
return rb_funcallv_scope(recv, mid, argc, argv, CALL_PUBLIC);
}
VALUE
@ -989,7 +1149,7 @@ VALUE
rb_funcall_passing_block(VALUE recv, ID mid, int argc, const VALUE *argv)
{
PASS_PASSED_BLOCK_HANDLER();
return rb_call(recv, mid, argc, argv, CALL_PUBLIC);
return rb_funcallv_public(recv, mid, argc, argv);
}
VALUE
@ -1006,7 +1166,7 @@ rb_funcall_with_block(VALUE recv, ID mid, int argc, const VALUE *argv, VALUE pas
vm_passed_block_handler_set(GET_EC(), passed_procval);
}
return rb_call(recv, mid, argc, argv, CALL_PUBLIC);
return rb_funcallv_public(recv, mid, argc, argv);
}
VALUE
@ -1181,10 +1341,10 @@ VALUE
rb_yield(VALUE val)
{
if (val == Qundef) {
return rb_yield_0(0, 0);
return rb_yield_0(0, NULL);
}
else {
return rb_yield_1(val);
return rb_yield_0(1, &val);
}
}

View file

@ -1856,6 +1856,7 @@ check_cfunc(const rb_callable_method_entry_t *me, VALUE (*func)())
static inline int
vm_method_cfunc_is(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv, VALUE (*func)())
{
VM_ASSERT(iseq != NULL);
const struct rb_callcache *cc = vm_search_method((VALUE)iseq, cd, recv);
return check_cfunc(vm_cc_cme(cc), func);
}
@ -1892,7 +1893,7 @@ FLONUM_2_P(VALUE a, VALUE b)
}
static VALUE
opt_equality(const rb_iseq_t *cd_owner, VALUE recv, VALUE obj, CALL_DATA cd)
opt_equality_specialized(VALUE recv, VALUE obj)
{
if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
goto compare_by_identity;
@ -1904,7 +1905,7 @@ opt_equality(const rb_iseq_t *cd_owner, VALUE recv, VALUE obj, CALL_DATA cd)
goto compare_by_identity;
}
else if (SPECIAL_CONST_P(recv)) {
goto compare_by_funcall;
//
}
else if (RBASIC_CLASS(recv) == rb_cFloat && RB_FLOAT_TYPE_P(obj) && EQ_UNREDEFINED_P(FLOAT)) {
double a = RFLOAT_VALUE(recv);
@ -1934,11 +1935,7 @@ opt_equality(const rb_iseq_t *cd_owner, VALUE recv, VALUE obj, CALL_DATA cd)
return rb_str_eql_internal(obj, recv);
}
}
compare_by_funcall:
if (! vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
return Qundef;
}
return Qundef;
compare_by_identity:
if (recv == obj) {
@ -1949,47 +1946,77 @@ opt_equality(const rb_iseq_t *cd_owner, VALUE recv, VALUE obj, CALL_DATA cd)
}
}
static VALUE
opt_equality(const rb_iseq_t *cd_owner, VALUE recv, VALUE obj, CALL_DATA cd)
{
VM_ASSERT(cd_owner != NULL);
VALUE val = opt_equality_specialized(recv, obj);
if (val != Qundef) return val;
if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
return Qundef;
}
else {
if (recv == obj) {
return Qtrue;
}
else {
return Qfalse;
}
}
}
#undef EQ_UNREDEFINED_P
#ifndef MJIT_HEADER
static inline const struct rb_callcache *gccct_method_search(rb_execution_context_t *ec, VALUE recv, ID mid, int argc); // vm_eval.c
NOINLINE(static VALUE opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid));
static VALUE
opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid)
{
const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, 1);
if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
if (recv == obj) {
return Qtrue;
}
else {
return Qfalse;
}
}
else {
return Qundef;
}
}
static VALUE
opt_equality_by_mid(VALUE recv, VALUE obj, ID mid)
{
VALUE val = opt_equality_specialized(recv, obj);
if (val != Qundef) {
return val;
}
else {
return opt_equality_by_mid_slowpath(recv, obj, mid);
}
}
VALUE
rb_equal_opt(VALUE obj1, VALUE obj2)
{
STATIC_ASSERT(idEq_is_embeddable, VM_CI_EMBEDDABLE_P(idEq, 0, 1, 0));
#if USE_EMBED_CI
static struct rb_call_data cd = {
.ci = vm_ci_new_id(idEq, 0, 1, 0),
};
#else
struct rb_call_data cd = {
.ci = &VM_CI_ON_STACK(idEq, 0, 1, 0),
};
#endif
cd.cc = &vm_empty_cc;
return opt_equality(NULL, obj1, obj2, &cd);
return opt_equality_by_mid(obj1, obj2, idEq);
}
VALUE
rb_eql_opt(VALUE obj1, VALUE obj2)
{
STATIC_ASSERT(idEqlP_is_embeddable, VM_CI_EMBEDDABLE_P(idEqlP, 0, 1, 0));
#if USE_EMBED_CI
static struct rb_call_data cd = {
.ci = vm_ci_new_id(idEqlP, 0, 1, 0),
};
#else
struct rb_call_data cd = {
.ci = &VM_CI_ON_STACK(idEqlP, 0, 1, 0),
};
#endif
cd.cc = &vm_empty_cc;
return opt_equality(NULL, obj1, obj2, &cd);
return opt_equality_by_mid(obj1, obj2, idEqlP);
}
#endif
#endif // MJIT_HEADER
extern VALUE rb_vm_call0(rb_execution_context_t *ec, VALUE, ID, int, const VALUE*, const rb_callable_method_entry_t *, int kw_splat);
@ -2362,6 +2389,7 @@ vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
{
const struct rb_callinfo *ci = calling->ci;
const struct rb_callcache *cc = calling->cc;
bool cacheable_ci = vm_ci_markable(ci);
if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
if (LIKELY(rb_simple_iseq_p(iseq))) {
@ -2375,7 +2403,7 @@ vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
VM_ASSERT(ci == calling->ci);
VM_ASSERT(cc == calling->cc);
CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), vm_call_iseq_optimizable_p(ci, cc));
CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), cacheable_ci && vm_call_iseq_optimizable_p(ci, cc));
return 0;
}
else if (rb_iseq_only_optparam_p(iseq)) {
@ -2395,12 +2423,12 @@ vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
!IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
}
else {
CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
!IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
}
/* initialize opt vars for self-references */
@ -2428,7 +2456,7 @@ vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
return 0;
}
@ -2441,7 +2469,7 @@ vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
if (klocals[kw_param->num] == INT2FIX(0)) {
/* copy from default_values */
CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
cacheable_ci && METHOD_ENTRY_CACHEABLE(vm_cc_cme(cc)));
}
return 0;

View file

@ -1186,11 +1186,10 @@ rb_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
}
static const rb_callable_method_entry_t *
callable_method_entry_refeinements(VALUE klass, ID id, VALUE *defined_class_ptr, bool with_refinements)
callable_method_entry_refeinements0(VALUE klass, ID id, VALUE *defined_class_ptr, bool with_refinements,
const rb_callable_method_entry_t *cme)
{
const rb_callable_method_entry_t *cme = callable_method_entry(klass, id, defined_class_ptr);
if (cme == NULL || cme->def->type != VM_METHOD_TYPE_REFINED) {
if (cme == NULL || LIKELY(cme->def->type != VM_METHOD_TYPE_REFINED)) {
return cme;
}
else {
@ -1200,6 +1199,13 @@ callable_method_entry_refeinements(VALUE klass, ID id, VALUE *defined_class_ptr,
}
}
static const rb_callable_method_entry_t *
callable_method_entry_refeinements(VALUE klass, ID id, VALUE *defined_class_ptr, bool with_refinements)
{
const rb_callable_method_entry_t *cme = callable_method_entry(klass, id, defined_class_ptr);
return callable_method_entry_refeinements0(klass, id, defined_class_ptr, with_refinements, cme);
}
MJIT_FUNC_EXPORTED const rb_callable_method_entry_t *
rb_callable_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
{