1
0
Fork 0
mirror of https://github.com/ruby/ruby.git synced 2022-11-09 12:17:21 -05:00
ruby--ruby/method.h
John Hawthorn f98d6d3f38
YJIT: Implement specialized respond_to? (#6363)
* Add rb_callable_method_entry_or_negative

* YJIT: Implement specialized respond_to?

This implements a specialized respond_to? in YJIT.

* Update yjit/src/codegen.rs

Co-authored-by: Maxime Chevalier-Boisvert <maximechevalierb@gmail.com>
2022-09-14 16:15:55 -04:00

254 lines
9.2 KiB
C

#ifndef RUBY_METHOD_H
#define RUBY_METHOD_H 1
/**********************************************************************
method.h -
$Author$
created at: Wed Jul 15 20:02:33 2009
Copyright (C) 2009 Koichi Sasada
**********************************************************************/
#include "internal.h"
#include "internal/imemo.h"
#include "internal/compilers.h"
#include "internal/static_assert.h"
#ifndef END_OF_ENUMERATION
# if defined(__GNUC__) &&! defined(__STRICT_ANSI__)
# define END_OF_ENUMERATION(key)
# else
# define END_OF_ENUMERATION(key) END_OF_##key##_PLACEHOLDER = 0
# endif
#endif
/* cref */
typedef enum {
METHOD_VISI_UNDEF = 0x00,
METHOD_VISI_PUBLIC = 0x01,
METHOD_VISI_PRIVATE = 0x02,
METHOD_VISI_PROTECTED = 0x03,
METHOD_VISI_MASK = 0x03
} rb_method_visibility_t;
typedef struct rb_scope_visi_struct {
BITFIELD(rb_method_visibility_t, method_visi, 3);
unsigned int module_func : 1;
} rb_scope_visibility_t;
/*! CREF (Class REFerence) */
typedef struct rb_cref_struct {
VALUE flags;
VALUE refinements;
VALUE klass_or_self;
struct rb_cref_struct * next;
const rb_scope_visibility_t scope_visi;
} rb_cref_t;
/* method data type */
typedef struct rb_method_entry_struct {
VALUE flags;
VALUE defined_class;
struct rb_method_definition_struct * const def;
ID called_id;
VALUE owner;
} rb_method_entry_t;
typedef struct rb_callable_method_entry_struct { /* same fields with rb_method_entry_t */
VALUE flags;
const VALUE defined_class;
struct rb_method_definition_struct * const def;
ID called_id;
const VALUE owner;
} rb_callable_method_entry_t;
#define METHOD_ENTRY_VISI(me) (rb_method_visibility_t)(((me)->flags & (IMEMO_FL_USER0 | IMEMO_FL_USER1)) >> (IMEMO_FL_USHIFT+0))
#define METHOD_ENTRY_BASIC(me) (int) (((me)->flags & (IMEMO_FL_USER2 )) >> (IMEMO_FL_USHIFT+2))
#define METHOD_ENTRY_COMPLEMENTED(me) ((me)->flags & IMEMO_FL_USER3)
#define METHOD_ENTRY_COMPLEMENTED_SET(me) ((me)->flags |= IMEMO_FL_USER3)
#define METHOD_ENTRY_CACHED(me) ((me)->flags & IMEMO_FL_USER4)
#define METHOD_ENTRY_CACHED_SET(me) ((me)->flags |= IMEMO_FL_USER4)
#define METHOD_ENTRY_INVALIDATED(me) ((me)->flags & IMEMO_FL_USER5)
#define METHOD_ENTRY_INVALIDATED_SET(me) ((me)->flags |= IMEMO_FL_USER5)
static inline void
METHOD_ENTRY_VISI_SET(rb_method_entry_t *me, rb_method_visibility_t visi)
{
VM_ASSERT((int)visi >= 0 && visi <= 3);
me->flags = (me->flags & ~(IMEMO_FL_USER0 | IMEMO_FL_USER1)) | (visi << (IMEMO_FL_USHIFT+0));
}
static inline void
METHOD_ENTRY_BASIC_SET(rb_method_entry_t *me, unsigned int basic)
{
VM_ASSERT(basic <= 1);
me->flags = (me->flags & ~(IMEMO_FL_USER2 )) | (basic << (IMEMO_FL_USHIFT+2));
}
static inline void
METHOD_ENTRY_FLAGS_SET(rb_method_entry_t *me, rb_method_visibility_t visi, unsigned int basic)
{
VM_ASSERT((int)visi >= 0 && visi <= 3);
VM_ASSERT(basic <= 1);
me->flags =
(me->flags & ~(IMEMO_FL_USER0|IMEMO_FL_USER1|IMEMO_FL_USER2)) |
((visi << (IMEMO_FL_USHIFT+0)) | (basic << (IMEMO_FL_USHIFT+2)));
}
static inline void
METHOD_ENTRY_FLAGS_COPY(rb_method_entry_t *dst, const rb_method_entry_t *src)
{
dst->flags =
(dst->flags & ~(IMEMO_FL_USER0|IMEMO_FL_USER1|IMEMO_FL_USER2)) |
(src->flags & (IMEMO_FL_USER0|IMEMO_FL_USER1|IMEMO_FL_USER2));
}
typedef enum {
VM_METHOD_TYPE_ISEQ, /*!< Ruby method */
VM_METHOD_TYPE_CFUNC, /*!< C method */
VM_METHOD_TYPE_ATTRSET, /*!< attr_writer or attr_accessor */
VM_METHOD_TYPE_IVAR, /*!< attr_reader or attr_accessor */
VM_METHOD_TYPE_BMETHOD,
VM_METHOD_TYPE_ZSUPER,
VM_METHOD_TYPE_ALIAS,
VM_METHOD_TYPE_UNDEF,
VM_METHOD_TYPE_NOTIMPLEMENTED,
VM_METHOD_TYPE_OPTIMIZED, /*!< Kernel#send, Proc#call, etc */
VM_METHOD_TYPE_MISSING, /*!< wrapper for method_missing(id) */
VM_METHOD_TYPE_REFINED, /*!< refinement */
END_OF_ENUMERATION(VM_METHOD_TYPE)
} rb_method_type_t;
#define VM_METHOD_TYPE_MINIMUM_BITS 4
STATIC_ASSERT(VM_METHOD_TYPE_MINIMUM_BITS,
VM_METHOD_TYPE_REFINED <= (1<<VM_METHOD_TYPE_MINIMUM_BITS));
#ifndef rb_iseq_t
typedef struct rb_iseq_struct rb_iseq_t;
#define rb_iseq_t rb_iseq_t
#endif
typedef struct rb_method_iseq_struct {
const rb_iseq_t * iseqptr; /*!< iseq pointer, should be separated from iseqval */
rb_cref_t * cref; /*!< class reference, should be marked */
} rb_method_iseq_t; /* check rb_add_method_iseq() when modify the fields */
typedef struct rb_method_cfunc_struct {
VALUE (*func)(ANYARGS);
VALUE (*invoker)(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS));
int argc;
} rb_method_cfunc_t;
typedef struct rb_method_attr_struct {
ID id;
VALUE location; /* should be marked */
} rb_method_attr_t;
typedef struct rb_method_alias_struct {
struct rb_method_entry_struct * original_me; /* original_me->klass is original owner */
} rb_method_alias_t;
typedef struct rb_method_refined_struct {
struct rb_method_entry_struct * orig_me;
VALUE owner;
} rb_method_refined_t;
typedef struct rb_method_bmethod_struct {
VALUE proc; /* should be marked */
struct rb_hook_list_struct *hooks;
VALUE defined_ractor;
} rb_method_bmethod_t;
enum method_optimized_type {
OPTIMIZED_METHOD_TYPE_SEND,
OPTIMIZED_METHOD_TYPE_CALL,
OPTIMIZED_METHOD_TYPE_BLOCK_CALL,
OPTIMIZED_METHOD_TYPE_STRUCT_AREF,
OPTIMIZED_METHOD_TYPE_STRUCT_ASET,
OPTIMIZED_METHOD_TYPE__MAX
};
typedef struct rb_method_optimized {
enum method_optimized_type type;
unsigned int index;
} rb_method_optimized_t;
struct rb_method_definition_struct {
BITFIELD(rb_method_type_t, type, VM_METHOD_TYPE_MINIMUM_BITS);
unsigned int iseq_overload: 1;
int alias_count : 27;
int complemented_count : 28;
unsigned int no_redef_warning: 1;
union {
rb_method_iseq_t iseq;
rb_method_cfunc_t cfunc;
rb_method_attr_t attr;
rb_method_alias_t alias;
rb_method_refined_t refined;
rb_method_bmethod_t bmethod;
rb_method_optimized_t optimized;
} body;
ID original_id;
uintptr_t method_serial;
};
struct rb_id_table;
typedef struct rb_method_definition_struct rb_method_definition_t;
STATIC_ASSERT(sizeof_method_def, offsetof(rb_method_definition_t, body)==8);
#define UNDEFINED_METHOD_ENTRY_P(me) (!(me) || !(me)->def || (me)->def->type == VM_METHOD_TYPE_UNDEF)
#define UNDEFINED_REFINED_METHOD_P(def) \
((def)->type == VM_METHOD_TYPE_REFINED && \
UNDEFINED_METHOD_ENTRY_P((def)->body.refined.orig_me))
void rb_add_method(VALUE klass, ID mid, rb_method_type_t type, void *option, rb_method_visibility_t visi);
void rb_add_method_cfunc(VALUE klass, ID mid, VALUE (*func)(ANYARGS), int argc, rb_method_visibility_t visi);
void rb_add_method_iseq(VALUE klass, ID mid, const rb_iseq_t *iseq, rb_cref_t *cref, rb_method_visibility_t visi);
void rb_add_method_optimized(VALUE klass, ID mid, enum method_optimized_type, unsigned int index, rb_method_visibility_t visi);
void rb_add_refined_method_entry(VALUE refined_class, ID mid);
rb_method_entry_t *rb_method_entry_set(VALUE klass, ID mid, const rb_method_entry_t *, rb_method_visibility_t noex);
rb_method_entry_t *rb_method_entry_create(ID called_id, VALUE klass, rb_method_visibility_t visi, const rb_method_definition_t *def);
const rb_method_entry_t *rb_method_entry_at(VALUE obj, ID id);
const rb_method_entry_t *rb_method_entry(VALUE klass, ID id);
const rb_method_entry_t *rb_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class);
const rb_method_entry_t *rb_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_class);
const rb_method_entry_t *rb_resolve_refined_method(VALUE refinements, const rb_method_entry_t *me);
RUBY_SYMBOL_EXPORT_BEGIN
const rb_method_entry_t *rb_resolve_me_location(const rb_method_entry_t *, VALUE[5]);
RUBY_SYMBOL_EXPORT_END
const rb_callable_method_entry_t *rb_callable_method_entry(VALUE klass, ID id);
const rb_callable_method_entry_t *rb_callable_method_entry_or_negative(VALUE klass, ID id);
const rb_callable_method_entry_t *rb_callable_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class);
const rb_callable_method_entry_t *rb_callable_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_class);
int rb_method_entry_arity(const rb_method_entry_t *me);
int rb_method_entry_eq(const rb_method_entry_t *m1, const rb_method_entry_t *m2);
st_index_t rb_hash_method_entry(st_index_t hash, const rb_method_entry_t *me);
VALUE rb_method_entry_location(const rb_method_entry_t *me);
void rb_free_method_entry(const rb_method_entry_t *me);
const rb_method_entry_t *rb_method_entry_clone(const rb_method_entry_t *me);
const rb_callable_method_entry_t *rb_method_entry_complement_defined_class(const rb_method_entry_t *src_me, ID called_id, VALUE defined_class);
void rb_method_entry_copy(rb_method_entry_t *dst, const rb_method_entry_t *src);
void rb_method_table_insert(VALUE klass, struct rb_id_table *table, ID method_id, const rb_method_entry_t *me);
void rb_scope_visibility_set(rb_method_visibility_t);
VALUE rb_unnamed_parameters(int arity);
void rb_clear_method_cache(VALUE klass_or_module, ID mid);
void rb_clear_method_cache_all(void);
#endif /* RUBY_METHOD_H */