From f6661f50854e0cdccb03ee516a21ce62adf6c802 Mon Sep 17 00:00:00 2001 From: Koichi Sasada Date: Fri, 16 Oct 2020 15:20:40 +0900 Subject: [PATCH] sync RClass::ext::iv_index_tbl iv_index_tbl manages instance variable indexes (ID -> index). This data structure should be synchronized with other ractors so introduce some VM locks. This patch also introduced atomic ivar cache used by set/getinlinecache instructions. To make updating ivar cache (IVC), we changed iv_index_tbl data structure to manage (ID -> entry) and an entry points serial and index. IVC points to this entry so that cache update becomes atomically. --- common.mk | 1 + compile.c | 2 + gc.c | 30 ++- insns.def | 4 +- internal/class.h | 8 +- iseq.c | 15 ++ mjit_compile.c | 10 +- st.c | 15 ++ tool/ruby_vm/views/_mjit_compile_ivar.erb | 14 +- variable.c | 290 +++++++++++++--------- vm_core.h | 3 +- vm_insnhelper.c | 73 +++--- vm_sync.c | 4 +- 13 files changed, 299 insertions(+), 170 deletions(-) diff --git a/common.mk b/common.mk index 5c87f9514b..e3bf8f9428 100644 --- a/common.mk +++ b/common.mk @@ -6760,6 +6760,7 @@ iseq.$(OBJEXT): $(hdrdir)/ruby.h iseq.$(OBJEXT): $(hdrdir)/ruby/ruby.h iseq.$(OBJEXT): $(top_srcdir)/internal/array.h iseq.$(OBJEXT): $(top_srcdir)/internal/bits.h +iseq.$(OBJEXT): $(top_srcdir)/internal/class.h iseq.$(OBJEXT): $(top_srcdir)/internal/compile.h iseq.$(OBJEXT): $(top_srcdir)/internal/compilers.h iseq.$(OBJEXT): $(top_srcdir)/internal/error.h diff --git a/compile.c b/compile.c index 0b9c276e1f..ff63dfe9aa 100644 --- a/compile.c +++ b/compile.c @@ -2332,6 +2332,8 @@ iseq_set_sequence(rb_iseq_t *iseq, LINK_ANCHOR *const anchor) ic_index, body->is_size); } generated_iseq[code_index + 1 + j] = (VALUE)ic; + + if (type == TS_IVC) FL_SET(iseqv, ISEQ_MARKABLE_ISEQ); break; } case TS_CALLDATA: diff --git a/gc.c b/gc.c index 3b84026146..81d3bc2c27 100644 --- a/gc.c +++ b/gc.c @@ -2535,6 +2535,19 @@ rb_free_const_table(struct rb_id_table *tbl) rb_id_table_free(tbl); } +static int +free_iv_index_tbl_free_i(st_data_t key, st_data_t value, st_data_t data) +{ + xfree((void *)value); + return ST_CONTINUE; +} + +static void +iv_index_tbl_free(struct st_table *tbl) +{ + st_foreach(tbl, free_iv_index_tbl_free_i, 0); +} + // alive: if false, target pointers can be freed already. // To check it, we need objspace parameter. static void @@ -2756,7 +2769,7 @@ obj_free(rb_objspace_t *objspace, VALUE obj) rb_free_const_table(RCLASS_CONST_TBL(obj)); } if (RCLASS_IV_INDEX_TBL(obj)) { - st_free_table(RCLASS_IV_INDEX_TBL(obj)); + iv_index_tbl_free(RCLASS_IV_INDEX_TBL(obj)); } if (RCLASS_EXT(obj)->subclasses) { if (BUILTIN_TYPE(obj) == T_MODULE) { @@ -4088,6 +4101,7 @@ obj_memsize_of(VALUE obj, int use_all_types) size += st_memsize(RCLASS_IV_TBL(obj)); } if (RCLASS_IV_INDEX_TBL(obj)) { + // TODO: more correct value size += st_memsize(RCLASS_IV_INDEX_TBL(obj)); } if (RCLASS(obj)->ptr->iv_tbl) { @@ -8543,12 +8557,26 @@ update_subclass_entries(rb_objspace_t *objspace, rb_subclass_entry_t *entry) } } +static int +update_iv_index_tbl_i(st_data_t key, st_data_t value, st_data_t arg) +{ + rb_objspace_t *objspace = (rb_objspace_t *)arg; + struct rb_iv_index_tbl_entry *ent = (struct rb_iv_index_tbl_entry *)value; + UPDATE_IF_MOVED(objspace, ent->class_value); + return ST_CONTINUE; +} + static void update_class_ext(rb_objspace_t *objspace, rb_classext_t *ext) { UPDATE_IF_MOVED(objspace, ext->origin_); UPDATE_IF_MOVED(objspace, ext->refined_class); update_subclass_entries(objspace, ext->subclasses); + + // ext->iv_index_tbl + if (ext->iv_index_tbl) { + st_foreach(ext->iv_index_tbl, update_iv_index_tbl_i, (st_data_t)objspace); + } } static void diff --git a/insns.def b/insns.def index f6f802f916..3dd65f12c7 100644 --- a/insns.def +++ b/insns.def @@ -213,7 +213,7 @@ getinstancevariable /* "instance variable not initialized" warning can be hooked. */ // attr bool leaf = false; /* has rb_warning() */ { - val = vm_getinstancevariable(GET_SELF(), id, ic); + val = vm_getinstancevariable(GET_ISEQ(), GET_SELF(), id, ic); } /* Set value of instance variable id of self to val. */ @@ -224,7 +224,7 @@ setinstancevariable () // attr bool leaf = false; /* has rb_check_frozen_internal() */ { - vm_setinstancevariable(GET_SELF(), id, val, ic); + vm_setinstancevariable(GET_ISEQ(), GET_SELF(), id, val, ic); } /* Get value of class variable id of klass as val. */ diff --git a/internal/class.h b/internal/class.h index bb12825162..eade920ff0 100644 --- a/internal/class.h +++ b/internal/class.h @@ -25,8 +25,14 @@ struct rb_subclass_entry { struct rb_subclass_entry *next; }; +struct rb_iv_index_tbl_entry { + uint32_t index; + rb_serial_t class_serial; + VALUE class_value; +}; + struct rb_classext_struct { - struct st_table *iv_index_tbl; + struct st_table *iv_index_tbl; // ID -> struct rb_iv_index_tbl_entry struct st_table *iv_tbl; #if SIZEOF_SERIAL_T == SIZEOF_VALUE /* otherwise m_tbl is in struct RClass */ struct rb_id_table *m_tbl; diff --git a/iseq.c b/iseq.c index 05a77c8ed6..2f10cd6223 100644 --- a/iseq.c +++ b/iseq.c @@ -23,6 +23,7 @@ #include "id_table.h" #include "internal.h" #include "internal/bits.h" +#include "internal/class.h" #include "internal/compile.h" #include "internal/error.h" #include "internal/file.h" @@ -180,6 +181,20 @@ iseq_extract_values(VALUE *code, size_t pos, iseq_value_itr_t * func, void *data } } break; + case TS_IVC: + { + IVC ivc = (IVC)code[pos + op_no + 1]; + if (ivc->entry) { + if (RB_TYPE_P(ivc->entry->class_value, T_NONE)) { + rb_bug("!! %u", ivc->entry->index); + } + VALUE nv = func(data, ivc->entry->class_value); + if (ivc->entry->class_value != nv) { + ivc->entry->class_value = nv; + } + } + } + break; case TS_ISE: { union iseq_inline_storage_entry *const is = (union iseq_inline_storage_entry *)code[pos + op_no + 1]; diff --git a/mjit_compile.c b/mjit_compile.c index d58377380e..6371acc8f9 100644 --- a/mjit_compile.c +++ b/mjit_compile.c @@ -443,17 +443,17 @@ init_ivar_compile_status(const struct rb_iseq_constant_body *body, struct compil if (insn == BIN(getinstancevariable) || insn == BIN(setinstancevariable)) { IVC ic = (IVC)body->iseq_encoded[pos+2]; IVC ic_copy = &(status->is_entries + ((union iseq_inline_storage_entry *)ic - body->is_entries))->iv_cache; - if (ic_copy->ic_serial) { // Only initialized (ic_serial > 0) IVCs are optimized + if (ic_copy->entry) { // Only initialized (ic_serial > 0) IVCs are optimized num_ivars++; - if (status->max_ivar_index < ic_copy->index) { - status->max_ivar_index = ic_copy->index; + if (status->max_ivar_index < ic_copy->entry->index) { + status->max_ivar_index = ic_copy->entry->index; } if (status->ivar_serial == 0) { - status->ivar_serial = ic_copy->ic_serial; + status->ivar_serial = ic_copy->entry->class_serial; } - else if (status->ivar_serial != ic_copy->ic_serial) { + else if (status->ivar_serial != ic_copy->entry->class_serial) { // Multiple classes have used this ISeq. Give up assuming one serial. status->merge_ivar_guards_p = false; return; diff --git a/st.c b/st.c index 8be466bf73..fe7a21cf80 100644 --- a/st.c +++ b/st.c @@ -2238,4 +2238,19 @@ rb_hash_bulk_insert_into_st_table(long argc, const VALUE *argv, VALUE hash) else st_insert_generic(tab, argc, argv, hash); } + +// to iterate iv_index_tbl +st_data_t +rb_st_nth_key(st_table *tab, st_index_t index) +{ + if (LIKELY(tab->entries_start == 0 && + tab->num_entries == tab->entries_bound && + index < tab->num_entries)) { + return tab->entries[index].key; + } + else { + rb_bug("unreachable"); + } +} + #endif diff --git a/tool/ruby_vm/views/_mjit_compile_ivar.erb b/tool/ruby_vm/views/_mjit_compile_ivar.erb index eb05f4de8e..01d35b07f6 100644 --- a/tool/ruby_vm/views/_mjit_compile_ivar.erb +++ b/tool/ruby_vm/views/_mjit_compile_ivar.erb @@ -16,18 +16,18 @@ % # compiler: Use copied IVC to avoid race condition IVC ic_copy = &(status->is_entries + ((union iseq_inline_storage_entry *)ic - body->is_entries))->iv_cache; % - if (!status->compile_info->disable_ivar_cache && ic_copy->ic_serial) { // Only initialized (ic_serial > 0) IVCs are optimized + if (!status->compile_info->disable_ivar_cache && ic_copy->entry) { // Only ic_copy is enabled. % # JIT: optimize away motion of sp and pc. This path does not call rb_warning() and so it's always leaf and not `handles_sp`. % # <%= render 'mjit_compile_pc_and_sp', locals: { insn: insn } -%> % % # JIT: prepare vm_getivar/vm_setivar arguments and variables fprintf(f, "{\n"); fprintf(f, " VALUE obj = GET_SELF();\n"); - fprintf(f, " const st_index_t index = %"PRIuSIZE";\n", ic_copy->index); + fprintf(f, " const uint32_t index = %u;\n", (ic_copy->entry->index)); if (status->merge_ivar_guards_p) { % # JIT: Access ivar without checking these VM_ASSERTed prerequisites as we checked them in the beginning of `mjit_compile_body` fprintf(f, " VM_ASSERT(RB_TYPE_P(obj, T_OBJECT));\n"); - fprintf(f, " VM_ASSERT((rb_serial_t)%"PRI_SERIALT_PREFIX"u == RCLASS_SERIAL(RBASIC(obj)->klass));\n", ic_copy->ic_serial); + fprintf(f, " VM_ASSERT((rb_serial_t)%"PRI_SERIALT_PREFIX"u == RCLASS_SERIAL(RBASIC(obj)->klass));\n", ic_copy->entry->class_serial); fprintf(f, " VM_ASSERT(index < ROBJECT_NUMIV(obj));\n"); % if insn.name == 'setinstancevariable' fprintf(f, " if (LIKELY(!RB_OBJ_FROZEN(obj) && %sRB_FL_ANY_RAW(obj, ROBJECT_EMBED))) {\n", status->max_ivar_index >= ROBJECT_EMBED_LEN_MAX ? "!" : ""); @@ -44,7 +44,7 @@ %end } else { - fprintf(f, " const rb_serial_t ic_serial = (rb_serial_t)%"PRI_SERIALT_PREFIX"u;\n", ic_copy->ic_serial); + fprintf(f, " const rb_serial_t ic_serial = (rb_serial_t)%"PRI_SERIALT_PREFIX"u;\n", ic_copy->entry->class_serial); % # JIT: cache hit path of vm_getivar/vm_setivar, or cancel JIT (recompile it with exivar) % if insn.name == 'setinstancevariable' fprintf(f, " if (LIKELY(RB_TYPE_P(obj, T_OBJECT) && ic_serial == RCLASS_SERIAL(RBASIC(obj)->klass) && index < ROBJECT_NUMIV(obj) && !RB_OBJ_FROZEN(obj))) {\n"); @@ -70,15 +70,15 @@ break; } % if insn.name == 'getinstancevariable' - else if (!status->compile_info->disable_exivar_cache && ic_copy->ic_serial) { + else if (!status->compile_info->disable_exivar_cache && ic_copy->entry) { % # JIT: optimize away motion of sp and pc. This path does not call rb_warning() and so it's always leaf and not `handles_sp`. % # <%= render 'mjit_compile_pc_and_sp', locals: { insn: insn } -%> % % # JIT: prepare vm_getivar's arguments and variables fprintf(f, "{\n"); fprintf(f, " VALUE obj = GET_SELF();\n"); - fprintf(f, " const rb_serial_t ic_serial = (rb_serial_t)%"PRI_SERIALT_PREFIX"u;\n", ic_copy->ic_serial); - fprintf(f, " const st_index_t index = %"PRIuSIZE";\n", ic_copy->index); + fprintf(f, " const rb_serial_t ic_serial = (rb_serial_t)%"PRI_SERIALT_PREFIX"u;\n", ic_copy->entry->class_serial); + fprintf(f, " const uint32_t index = %u;\n", ic_copy->entry->index); % # JIT: cache hit path of vm_getivar, or cancel JIT (recompile it without any ivar optimization) fprintf(f, " struct gen_ivtbl *ivtbl;\n"); fprintf(f, " VALUE val;\n"); diff --git a/variable.c b/variable.c index bf80df7fae..4ed60b626d 100644 --- a/variable.c +++ b/variable.c @@ -879,6 +879,29 @@ rb_alias_variable(ID name1, ID name2) entry1->var = entry2->var; } +static bool +iv_index_tbl_lookup(struct st_table *tbl, ID id, uint32_t *indexp) +{ + struct rb_iv_index_tbl_entry *ent; + int r; + + if (tbl == NULL) return false; + + RB_VM_LOCK_ENTER(); + { + r = st_lookup(tbl, (st_data_t)id, (st_data_t *)&ent); + } + RB_VM_LOCK_LEAVE(); + + if (r) { + *indexp = ent->index; + return true; + } + else { + return false; + } +} + static void IVAR_ACCESSOR_SHOULD_BE_MAIN_RACTOR(ID id) { @@ -943,9 +966,9 @@ generic_ivar_delete(VALUE obj, ID id, VALUE undef) if (gen_ivtbl_get(obj, id, &ivtbl)) { st_table *iv_index_tbl = RCLASS_IV_INDEX_TBL(rb_obj_class(obj)); - st_data_t index; + uint32_t index; - if (iv_index_tbl && st_lookup(iv_index_tbl, (st_data_t)id, &index)) { + if (iv_index_tbl && iv_index_tbl_lookup(iv_index_tbl, id, &index)) { if (index < ivtbl->numiv) { VALUE ret = ivtbl->ivptr[index]; @@ -964,9 +987,9 @@ generic_ivar_get(VALUE obj, ID id, VALUE undef) if (gen_ivtbl_get(obj, id, &ivtbl)) { st_table *iv_index_tbl = RCLASS_IV_INDEX_TBL(rb_obj_class(obj)); - st_data_t index; + uint32_t index; - if (iv_index_tbl && st_lookup(iv_index_tbl, (st_data_t)id, &index)) { + if (iv_index_tbl && iv_index_tbl_lookup(iv_index_tbl, id, &index)) { if (index < ivtbl->numiv) { VALUE ret = ivtbl->ivptr[index]; @@ -1025,6 +1048,8 @@ iv_index_tbl_newsize(struct ivar_update *ivup) static int generic_ivar_update(st_data_t *k, st_data_t *v, st_data_t u, int existing) { + ASSERT_vm_locking(); + struct ivar_update *ivup = (struct ivar_update *)u; struct gen_ivtbl *ivtbl = 0; @@ -1048,10 +1073,9 @@ generic_ivar_defined(VALUE obj, ID id) { struct gen_ivtbl *ivtbl; st_table *iv_index_tbl = RCLASS_IV_INDEX_TBL(rb_obj_class(obj)); - st_data_t index; + uint32_t index; - if (!iv_index_tbl) return Qfalse; - if (!st_lookup(iv_index_tbl, (st_data_t)id, &index)) return Qfalse; + if (!iv_index_tbl_lookup(iv_index_tbl, id, &index)) return Qfalse; if (!gen_ivtbl_get(obj, id, &ivtbl)) return Qfalse; if ((index < ivtbl->numiv) && (ivtbl->ivptr[index] != Qundef)) @@ -1064,12 +1088,11 @@ static int generic_ivar_remove(VALUE obj, ID id, VALUE *valp) { struct gen_ivtbl *ivtbl; - st_data_t key = (st_data_t)id; - st_data_t index; + uint32_t index; st_table *iv_index_tbl = RCLASS_IV_INDEX_TBL(rb_obj_class(obj)); if (!iv_index_tbl) return 0; - if (!st_lookup(iv_index_tbl, key, &index)) return 0; + if (!iv_index_tbl_lookup(iv_index_tbl, id, &index)) return 0; if (!gen_ivtbl_get(obj, id, &ivtbl)) return 0; if (index < ivtbl->numiv) { @@ -1150,31 +1173,37 @@ gen_ivtbl_count(const struct gen_ivtbl *ivtbl) VALUE rb_ivar_lookup(VALUE obj, ID id, VALUE undef) { - VALUE val, *ptr; - struct st_table *iv_index_tbl; - uint32_t len; - st_data_t index; + VALUE val; if (SPECIAL_CONST_P(obj)) return undef; switch (BUILTIN_TYPE(obj)) { case T_OBJECT: - len = ROBJECT_NUMIV(obj); - ptr = ROBJECT_IVPTR(obj); - iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj); - if (!iv_index_tbl) break; - if (!st_lookup(iv_index_tbl, (st_data_t)id, &index)) break; - if (len <= index) break; - val = ptr[index]; - if (val != Qundef) - return val; - break; + { + uint32_t index; + uint32_t len = ROBJECT_NUMIV(obj); + VALUE *ptr = ROBJECT_IVPTR(obj); + + if (iv_index_tbl_lookup(ROBJECT_IV_INDEX_TBL(obj), id, &index) && + index < len && + (val = ptr[index]) != Qundef) { + return val; + } + else { + break; + } + } case T_CLASS: case T_MODULE: - IVAR_ACCESSOR_SHOULD_BE_MAIN_RACTOR(id); - if (RCLASS_IV_TBL(obj) && - st_lookup(RCLASS_IV_TBL(obj), (st_data_t)id, &index)) - return (VALUE)index; - break; + { + IVAR_ACCESSOR_SHOULD_BE_MAIN_RACTOR(id); + if (RCLASS_IV_TBL(obj) && + st_lookup(RCLASS_IV_TBL(obj), (st_data_t)id, (st_data_t *)&val)) { + return val; + } + else { + break; + } + } default: if (FL_TEST(obj, FL_EXIVAR)) return generic_ivar_get(obj, id, undef); @@ -1208,8 +1237,7 @@ rb_ivar_delete(VALUE obj, ID id, VALUE undef) { VALUE val, *ptr; struct st_table *iv_index_tbl; - uint32_t len; - st_data_t index; + uint32_t len, index; rb_check_frozen(obj); switch (BUILTIN_TYPE(obj)) { @@ -1217,20 +1245,23 @@ rb_ivar_delete(VALUE obj, ID id, VALUE undef) len = ROBJECT_NUMIV(obj); ptr = ROBJECT_IVPTR(obj); iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj); - if (!iv_index_tbl) break; - if (!st_lookup(iv_index_tbl, (st_data_t)id, &index)) break; - if (len <= index) break; - val = ptr[index]; - ptr[index] = Qundef; - if (val != Qundef) - return val; - break; + if (iv_index_tbl_lookup(iv_index_tbl, id, &index) && + index < len) { + val = ptr[index]; + ptr[index] = Qundef; + + if (val != Qundef) { + return val; + } + } + break; case T_CLASS: case T_MODULE: IVAR_ACCESSOR_SHOULD_BE_MAIN_RACTOR(id); if (RCLASS_IV_TBL(obj) && - st_delete(RCLASS_IV_TBL(obj), (st_data_t *)&id, &index)) - return (VALUE)index; + st_delete(RCLASS_IV_TBL(obj), (st_data_t *)&id, (st_data_t *)&val)) { + return val; + } break; default: if (FL_TEST(obj, FL_EXIVAR)) @@ -1247,46 +1278,57 @@ rb_attr_delete(VALUE obj, ID id) } static st_table * -iv_index_tbl_make(VALUE obj) +iv_index_tbl_make(VALUE obj, VALUE klass) { - VALUE klass = rb_obj_class(obj); st_table *iv_index_tbl; - if (!klass) { + if (UNLIKELY(!klass)) { rb_raise(rb_eTypeError, "hidden object cannot have instance variables"); } - if (!(iv_index_tbl = RCLASS_IV_INDEX_TBL(klass))) { - iv_index_tbl = RCLASS_IV_INDEX_TBL(klass) = st_init_numtable(); + + if ((iv_index_tbl = RCLASS_IV_INDEX_TBL(klass)) == NULL) { + RB_VM_LOCK_ENTER(); + if ((iv_index_tbl = RCLASS_IV_INDEX_TBL(klass)) == NULL) { + iv_index_tbl = RCLASS_IV_INDEX_TBL(klass) = st_init_numtable(); + } + RB_VM_LOCK_LEAVE(); } return iv_index_tbl; } static void -iv_index_tbl_extend(struct ivar_update *ivup, ID id) +iv_index_tbl_extend(struct ivar_update *ivup, ID id, VALUE klass) { - if (st_lookup(ivup->u.iv_index_tbl, (st_data_t)id, &ivup->index)) { + ASSERT_vm_locking(); + struct rb_iv_index_tbl_entry *ent; + + if (st_lookup(ivup->u.iv_index_tbl, (st_data_t)id, (st_data_t *)&ent)) { + ivup->index = ent->index; return; } if (ivup->u.iv_index_tbl->num_entries >= INT_MAX) { rb_raise(rb_eArgError, "too many instance variables"); } - ivup->index = (st_data_t)ivup->u.iv_index_tbl->num_entries; - st_add_direct(ivup->u.iv_index_tbl, (st_data_t)id, ivup->index); + ent = ALLOC(struct rb_iv_index_tbl_entry); + ent->index = ivup->index = (uint32_t)ivup->u.iv_index_tbl->num_entries; + ent->class_value = klass; + ent->class_serial = RCLASS_SERIAL(klass); + st_add_direct(ivup->u.iv_index_tbl, (st_data_t)id, (st_data_t)ent); ivup->iv_extended = 1; } static void generic_ivar_set(VALUE obj, ID id, VALUE val) { + VALUE klass = rb_obj_class(obj); struct ivar_update ivup; - ivup.iv_extended = 0; - ivup.u.iv_index_tbl = iv_index_tbl_make(obj); - iv_index_tbl_extend(&ivup, id); + ivup.u.iv_index_tbl = iv_index_tbl_make(obj, klass); RB_VM_LOCK_ENTER(); { + iv_index_tbl_extend(&ivup, id, klass); st_update(generic_ivtbl(obj, id, false), (st_data_t)obj, generic_ivar_update, (st_data_t)&ivup); } @@ -1361,12 +1403,18 @@ rb_obj_transient_heap_evacuate(VALUE obj, int promote) static VALUE obj_ivar_set(VALUE obj, ID id, VALUE val) { + VALUE klass = rb_obj_class(obj); struct ivar_update ivup; uint32_t i, len; - ivup.iv_extended = 0; - ivup.u.iv_index_tbl = iv_index_tbl_make(obj); - iv_index_tbl_extend(&ivup, id); + ivup.u.iv_index_tbl = iv_index_tbl_make(obj, klass); + + RB_VM_LOCK_ENTER(); + { + iv_index_tbl_extend(&ivup, id, klass); + } + RB_VM_LOCK_LEAVE(); + len = ROBJECT_NUMIV(obj); if (len <= ivup.index) { VALUE *ptr = ROBJECT_IVPTR(obj); @@ -1390,6 +1438,7 @@ obj_ivar_set(VALUE obj, ID id, VALUE val) else { newptr = obj_ivar_heap_realloc(obj, len, newsize); } + for (; len < newsize; len++) { newptr[len] = Qundef; } @@ -1445,18 +1494,17 @@ rb_ivar_defined(VALUE obj, ID id) { VALUE val; struct st_table *iv_index_tbl; - st_data_t index; + uint32_t index; if (SPECIAL_CONST_P(obj)) return Qfalse; switch (BUILTIN_TYPE(obj)) { case T_OBJECT: iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj); - if (!iv_index_tbl) break; - if (!st_lookup(iv_index_tbl, (st_data_t)id, &index)) break; - if (ROBJECT_NUMIV(obj) <= index) break; - val = ROBJECT_IVPTR(obj)[index]; - if (val != Qundef) + if (iv_index_tbl_lookup(iv_index_tbl, id, &index) && + index < ROBJECT_NUMIV(obj) && + (val = ROBJECT_IVPTR(obj)[index]) != Qundef) { return Qtrue; + } break; case T_CLASS: case T_MODULE: @@ -1473,80 +1521,72 @@ rb_ivar_defined(VALUE obj, ID id) } typedef int rb_ivar_foreach_callback_func(ID key, VALUE val, st_data_t arg); +st_data_t rb_st_nth_key(st_table *tab, st_index_t index); -struct obj_ivar_tag { - VALUE obj; - rb_ivar_foreach_callback_func *func; - st_data_t arg; -}; - -static int -obj_ivar_i(st_data_t key, st_data_t index, st_data_t arg) +static ID +iv_index_tbl_nth_id(st_table *iv_index_tbl, uint32_t index) { - struct obj_ivar_tag *data = (struct obj_ivar_tag *)arg; - if (index < ROBJECT_NUMIV(data->obj)) { - VALUE val = ROBJECT_IVPTR(data->obj)[index]; - if (val != Qundef) { - return (data->func)((ID)key, val, data->arg); + st_data_t key; + RB_VM_LOCK_ENTER(); + { + key = rb_st_nth_key(iv_index_tbl, index); + } + RB_VM_LOCK_LEAVE(); + return (ID)key; +} + +static inline bool +ivar_each_i(st_table *iv_index_tbl, VALUE val, uint32_t i, rb_ivar_foreach_callback_func *func, st_data_t arg) +{ + if (val != Qundef) { + ID id = iv_index_tbl_nth_id(iv_index_tbl, i); + switch (func(id, val, arg)) { + case ST_CHECK: + case ST_CONTINUE: + break; + case ST_STOP: + return true; + default: + rb_bug("unreachable"); } } - return ST_CONTINUE; + return false; } static void obj_ivar_each(VALUE obj, rb_ivar_foreach_callback_func *func, st_data_t arg) { - st_table *tbl; - struct obj_ivar_tag data; + st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj); + if (!iv_index_tbl) return; + uint32_t i=0; - tbl = ROBJECT_IV_INDEX_TBL(obj); - if (!tbl) - return; - - data.obj = obj; - data.func = (int (*)(ID key, VALUE val, st_data_t arg))func; - data.arg = arg; - - st_foreach_safe(tbl, obj_ivar_i, (st_data_t)&data); -} - -struct gen_ivar_tag { - struct gen_ivtbl *ivtbl; - rb_ivar_foreach_callback_func *func; - st_data_t arg; -}; - -static int -gen_ivar_each_i(st_data_t key, st_data_t index, st_data_t data) -{ - struct gen_ivar_tag *arg = (struct gen_ivar_tag *)data; - - if (index < arg->ivtbl->numiv) { - VALUE val = arg->ivtbl->ivptr[index]; - if (val != Qundef) { - return (arg->func)((ID)key, val, arg->arg); + for (i=0; i < ROBJECT_NUMIV(obj); i++) { + VALUE val = ROBJECT_IVPTR(obj)[i]; + if (ivar_each_i(iv_index_tbl, val, i, func, arg)) { + return; } } - return ST_CONTINUE; } static void gen_ivar_each(VALUE obj, rb_ivar_foreach_callback_func *func, st_data_t arg) { - struct gen_ivar_tag data; + struct gen_ivtbl *ivtbl; st_table *iv_index_tbl = RCLASS_IV_INDEX_TBL(rb_obj_class(obj)); - if (!iv_index_tbl) return; - if (!gen_ivtbl_get(obj, 0, &data.ivtbl)) return; + if (!gen_ivtbl_get(obj, 0, &ivtbl)) return; - data.func = (int (*)(ID key, VALUE val, st_data_t arg))func; - data.arg = arg; - - st_foreach_safe(iv_index_tbl, gen_ivar_each_i, (st_data_t)&data); + for (uint32_t i=0; inumiv; i++) { + VALUE val = ivtbl->ivptr[i]; + if (ivar_each_i(iv_index_tbl, val, i, func, arg)) { + return; + } + } } struct givar_copy { VALUE obj; + VALUE klass; st_table *iv_index_tbl; struct gen_ivtbl *ivtbl; }; @@ -1559,7 +1599,13 @@ gen_ivar_copy(ID id, VALUE val, st_data_t arg) ivup.iv_extended = 0; ivup.u.iv_index_tbl = c->iv_index_tbl; - iv_index_tbl_extend(&ivup, id); + + RB_VM_LOCK_ENTER(); + { + iv_index_tbl_extend(&ivup, id, c->klass); + } + RB_VM_LOCK_LEAVE(); + if (ivup.index >= c->ivtbl->numiv) { uint32_t newsize = iv_index_tbl_newsize(&ivup); c->ivtbl = gen_ivtbl_resize(c->ivtbl, newsize); @@ -1597,8 +1643,10 @@ rb_copy_generic_ivar(VALUE clone, VALUE obj) FL_SET(clone, FL_EXIVAR); } - c.iv_index_tbl = iv_index_tbl_make(clone); - c.obj = clone; + VALUE klass = rb_obj_class(clone); + c.iv_index_tbl = iv_index_tbl_make(clone, klass); + c.obj = clone; + c.klass = klass; gen_ivar_each(obj, gen_ivar_copy, (st_data_t)&c); /* * c.ivtbl may change in gen_ivar_copy due to realloc, @@ -1652,7 +1700,7 @@ rb_ivar_count(VALUE obj) switch (BUILTIN_TYPE(obj)) { case T_OBJECT: - if ((tbl = ROBJECT_IV_INDEX_TBL(obj)) != 0) { + if (ROBJECT_IV_INDEX_TBL(obj) != 0) { st_index_t i, count, num = ROBJECT_NUMIV(obj); const VALUE *const ivptr = ROBJECT_IVPTR(obj); for (i = count = 0; i < num; ++i) { @@ -1773,7 +1821,7 @@ rb_obj_remove_instance_variable(VALUE obj, VALUE name) const ID id = id_for_var(obj, name, an, instance); st_data_t n, v; struct st_table *iv_index_tbl; - st_data_t index; + uint32_t index; rb_check_frozen(obj); if (!id) { @@ -1783,11 +1831,9 @@ rb_obj_remove_instance_variable(VALUE obj, VALUE name) switch (BUILTIN_TYPE(obj)) { case T_OBJECT: iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj); - if (!iv_index_tbl) break; - if (!st_lookup(iv_index_tbl, (st_data_t)id, &index)) break; - if (ROBJECT_NUMIV(obj) <= index) break; - val = ROBJECT_IVPTR(obj)[index]; - if (val != Qundef) { + if (iv_index_tbl_lookup(iv_index_tbl, id, &index) && + index < ROBJECT_NUMIV(obj) && + (val = ROBJECT_IVPTR(obj)[index]) != Qundef) { ROBJECT_IVPTR(obj)[index] = Qundef; return val; } diff --git a/vm_core.h b/vm_core.h index e62d43d4aa..73b6be52f6 100644 --- a/vm_core.h +++ b/vm_core.h @@ -225,8 +225,7 @@ struct iseq_inline_cache_entry { }; struct iseq_inline_iv_cache_entry { - rb_serial_t ic_serial; - size_t index; + struct rb_iv_index_tbl_entry *entry; }; union iseq_inline_storage_entry { diff --git a/vm_insnhelper.c b/vm_insnhelper.c index 9eedc10172..d22cf3783a 100644 --- a/vm_insnhelper.c +++ b/vm_insnhelper.c @@ -1079,9 +1079,25 @@ vm_search_const_defined_class(const VALUE cbase, ID id) return 0; } -ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, IVC, const struct rb_callcache *, int)); +static bool +iv_index_tbl_lookup(struct st_table *iv_index_tbl, ID id, struct rb_iv_index_tbl_entry **ent) +{ + int found; + + if (iv_index_tbl == NULL) return false; + + RB_VM_LOCK_ENTER(); + { + found = st_lookup(iv_index_tbl, (st_data_t)id, (st_data_t *)ent); + } + RB_VM_LOCK_LEAVE(); + + return found ? true : false; +} + +ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, const rb_iseq_t *, IVC, const struct rb_callcache *, int)); static inline VALUE -vm_getivar(VALUE obj, ID id, IVC ic, const struct rb_callcache *cc, int is_attr) +vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr) { #if OPT_IC_FOR_IVAR VALUE val = Qundef; @@ -1092,8 +1108,8 @@ vm_getivar(VALUE obj, ID id, IVC ic, const struct rb_callcache *cc, int is_attr) else if (LIKELY(is_attr ? RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_unset, vm_cc_attr_index(cc) > 0) : RB_DEBUG_COUNTER_INC_UNLESS(ivar_get_ic_miss_serial, - ic->ic_serial == RCLASS_SERIAL(RBASIC(obj)->klass)))) { - st_index_t index = !is_attr ? ic->index : (vm_cc_attr_index(cc) - 1); + ic->entry && ic->entry->class_serial == RCLASS_SERIAL(RBASIC(obj)->klass)))) { + uint32_t index = !is_attr ? ic->entry->index : (vm_cc_attr_index(cc) - 1); RB_DEBUG_COUNTER_INC(ivar_get_ic_hit); @@ -1116,8 +1132,6 @@ vm_getivar(VALUE obj, ID id, IVC ic, const struct rb_callcache *cc, int is_attr) st_index_t numiv; VALUE *ivptr; - st_data_t index; - if (BUILTIN_TYPE(obj) == T_OBJECT) { iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj); numiv = ROBJECT_NUMIV(obj); @@ -1143,17 +1157,19 @@ vm_getivar(VALUE obj, ID id, IVC ic, const struct rb_callcache *cc, int is_attr) fill: if (iv_index_tbl) { - if (st_lookup(iv_index_tbl, id, &index)) { + struct rb_iv_index_tbl_entry *ent; + + if (iv_index_tbl_lookup(iv_index_tbl, id, &ent)) { if (!is_attr) { - ic->index = index; - ic->ic_serial = RCLASS_SERIAL(RBASIC(obj)->klass); + ic->entry = ent; + RB_OBJ_WRITTEN(iseq, Qundef, ent->class_value); } else { /* call_info */ - vm_cc_attr_index_set(cc, (int)index + 1); + vm_cc_attr_index_set(cc, (int)ent->index + 1); } - if (index < numiv) { - val = ivptr[index]; + if (ent->index < numiv) { + val = ivptr[ent->index]; } } } @@ -1182,20 +1198,20 @@ vm_getivar(VALUE obj, ID id, IVC ic, const struct rb_callcache *cc, int is_attr) } static inline VALUE -vm_setivar(VALUE obj, ID id, VALUE val, IVC ic, const struct rb_callcache *cc, int is_attr) +vm_setivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr) { #if OPT_IC_FOR_IVAR rb_check_frozen_internal(obj); if (LIKELY(RB_TYPE_P(obj, T_OBJECT))) { VALUE klass = RBASIC(obj)->klass; - st_data_t index; + uint32_t index; if (LIKELY( - (!is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_serial, ic->ic_serial == RCLASS_SERIAL(klass))) || - ( is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_unset, vm_cc_attr_index(cc) > 0)))) { + (!is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_serial, ic->entry && ic->entry->class_serial == RCLASS_SERIAL(klass))) || + ( is_attr && RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_unset, vm_cc_attr_index(cc) > 0)))) { VALUE *ptr = ROBJECT_IVPTR(obj); - index = !is_attr ? ic->index : vm_cc_attr_index(cc)-1; + index = !is_attr ? ic->entry->index : vm_cc_attr_index(cc)-1; if (RB_DEBUG_COUNTER_INC_UNLESS(ivar_set_ic_miss_oorange, index < ROBJECT_NUMIV(obj))) { RB_OBJ_WRITE(obj, &ptr[index], val); @@ -1205,17 +1221,18 @@ vm_setivar(VALUE obj, ID id, VALUE val, IVC ic, const struct rb_callcache *cc, i } else { struct st_table *iv_index_tbl = ROBJECT_IV_INDEX_TBL(obj); + struct rb_iv_index_tbl_entry *ent; - if (iv_index_tbl && st_lookup(iv_index_tbl, (st_data_t)id, &index)) { + if (iv_index_tbl_lookup(iv_index_tbl, id, &ent)) { if (!is_attr) { - ic->index = index; - ic->ic_serial = RCLASS_SERIAL(klass); + ic->entry = ent; + RB_OBJ_WRITTEN(iseq, Qundef, ent->class_value); } - else if (index >= INT_MAX) { + else if (ent->index >= INT_MAX) { rb_raise(rb_eArgError, "too many instance variables"); } else { - vm_cc_attr_index_set(cc, (int)(index + 1)); + vm_cc_attr_index_set(cc, (int)(ent->index + 1)); } } /* fall through */ @@ -1230,15 +1247,15 @@ vm_setivar(VALUE obj, ID id, VALUE val, IVC ic, const struct rb_callcache *cc, i } static inline VALUE -vm_getinstancevariable(VALUE obj, ID id, IVC ic) +vm_getinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, IVC ic) { - return vm_getivar(obj, id, ic, NULL, FALSE); + return vm_getivar(obj, id, iseq, ic, NULL, FALSE); } static inline void -vm_setinstancevariable(VALUE obj, ID id, VALUE val, IVC ic) +vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic) { - vm_setivar(obj, id, val, ic, 0, 0); + vm_setivar(obj, id, val, iseq, ic, 0, 0); } static VALUE @@ -2651,7 +2668,7 @@ vm_call_ivar(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_call const struct rb_callcache *cc = cd->cc; RB_DEBUG_COUNTER_INC(ccf_ivar); cfp->sp -= 1; - return vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, cc, TRUE); + return vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE); } static VALUE @@ -2661,7 +2678,7 @@ vm_call_attrset(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_c RB_DEBUG_COUNTER_INC(ccf_attrset); VALUE val = *(cfp->sp - 1); cfp->sp -= 2; - return vm_setivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, val, NULL, cc, 1); + return vm_setivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, val, NULL, NULL, cc, 1); } static inline VALUE diff --git a/vm_sync.c b/vm_sync.c index b670984a13..1b9897d807 100644 --- a/vm_sync.c +++ b/vm_sync.c @@ -124,14 +124,14 @@ vm_lock_leave(rb_vm_t *vm, unsigned int *lev APPEND_LOCATION_ARGS) } } -void +MJIT_FUNC_EXPORTED void rb_vm_lock_enter_body(unsigned int *lev APPEND_LOCATION_ARGS) { rb_vm_t *vm = GET_VM(); vm_lock_enter(vm, vm_locked(vm), lev APPEND_LOCATION_PARAMS); } -void +MJIT_FUNC_EXPORTED void rb_vm_lock_leave_body(unsigned int *lev APPEND_LOCATION_ARGS) { vm_lock_leave(GET_VM(), lev APPEND_LOCATION_PARAMS);