diff --git a/ext/objspace/objspace.c b/ext/objspace/objspace.c index ee76367b44..d1ccb51094 100644 --- a/ext/objspace/objspace.c +++ b/ext/objspace/objspace.c @@ -468,7 +468,6 @@ count_nodes(int argc, VALUE *argv, VALUE os) COUNT_NODE(NODE_ERRINFO); COUNT_NODE(NODE_DEFINED); COUNT_NODE(NODE_POSTEXE); - COUNT_NODE(NODE_ALLOCA); COUNT_NODE(NODE_BMETHOD); COUNT_NODE(NODE_DSYM); COUNT_NODE(NODE_ATTRASGN); @@ -626,6 +625,7 @@ count_imemo_objects(int argc, VALUE *argv, VALUE self) imemo_type_ids[5] = rb_intern("imemo_memo"); imemo_type_ids[6] = rb_intern("imemo_ment"); imemo_type_ids[7] = rb_intern("imemo_iseq"); + imemo_type_ids[8] = rb_intern("imemo_alloc"); } rb_objspace_each_objects(count_imemo_objects_i, (void *)hash); diff --git a/gc.c b/gc.c index a0f7e89448..bcd48565c4 100644 --- a/gc.c +++ b/gc.c @@ -439,6 +439,7 @@ typedef struct RVALUE { struct rb_method_entry_struct ment; const rb_iseq_t iseq; rb_env_t env; + struct rb_imemo_alloc_struct alloc; } imemo; struct { struct RBasic basic; @@ -2354,6 +2355,9 @@ obj_free(rb_objspace_t *objspace, VALUE obj) GC_ASSERT(VM_ENV_ESCAPED_P(RANY(obj)->as.imemo.env.ep)); xfree((VALUE *)RANY(obj)->as.imemo.env.env); break; + case imemo_alloc: + xfree(RANY(obj)->as.imemo.alloc.ptr); + break; default: break; } @@ -3282,6 +3286,9 @@ obj_memsize_of(VALUE obj, int use_all_types) case T_RATIONAL: case T_COMPLEX: case T_IMEMO: + if (imemo_type_p(obj, imemo_alloc)) { + size += RANY(obj)->as.imemo.alloc.cnt * sizeof(VALUE); + } break; case T_FLOAT: @@ -4524,6 +4531,11 @@ gc_mark_imemo(rb_objspace_t *objspace, VALUE obj) case imemo_iseq: rb_iseq_mark((rb_iseq_t *)obj); return; + case imemo_alloc: + rb_gc_mark_locations(RANY(obj)->as.imemo.alloc.ptr, + RANY(obj)->as.imemo.alloc.ptr + RANY(obj)->as.imemo.alloc.cnt); + rb_gc_mark(RANY(obj)->as.imemo.alloc.next); + return; #if VM_CHECK_MODE > 0 default: VM_UNREACHABLE(gc_mark_imemo); @@ -8104,14 +8116,16 @@ ruby_mimfree(void *ptr) void * rb_alloc_tmp_buffer_with_count(volatile VALUE *store, size_t size, size_t cnt) { - NODE *s; + VALUE s; + rb_imemo_alloc_t *a; void *ptr; - s = rb_node_newnode(NODE_ALLOCA, 0, 0, 0); + s = rb_imemo_new(imemo_alloc, 0, 0, 0, 0); ptr = ruby_xmalloc0(size); - s->u1.value = (VALUE)ptr; - s->u3.cnt = cnt; - *store = (VALUE)s; + a = (rb_imemo_alloc_t*)s; + a->ptr = (VALUE*)ptr; + a->cnt = cnt; + *store = s; return ptr; } @@ -9360,6 +9374,7 @@ rb_raw_obj_info(char *buff, const int buff_size, VALUE obj) IMEMO_NAME(memo); IMEMO_NAME(ment); IMEMO_NAME(iseq); + IMEMO_NAME(alloc); #undef IMEMO_NAME } snprintf(buff, buff_size, "%s %s", buff, imemo_name); diff --git a/internal.h b/internal.h index 4ac024d9af..b3d3463895 100644 --- a/internal.h +++ b/internal.h @@ -844,7 +844,8 @@ enum imemo_type { imemo_memo = 5, imemo_ment = 6, imemo_iseq = 7, - imemo_mask = 0x07 + imemo_alloc = 8, + imemo_mask = 0x0f }; static inline enum imemo_type @@ -869,12 +870,12 @@ imemo_type_p(VALUE imemo, enum imemo_type imemo_type) } /* FL_USER0 to FL_USER2 is for type */ -#define IMEMO_FL_USHIFT (FL_USHIFT + 3) -#define IMEMO_FL_USER0 FL_USER3 -#define IMEMO_FL_USER1 FL_USER4 -#define IMEMO_FL_USER2 FL_USER5 -#define IMEMO_FL_USER3 FL_USER6 -#define IMEMO_FL_USER4 FL_USER7 +#define IMEMO_FL_USHIFT (FL_USHIFT + 4) +#define IMEMO_FL_USER0 FL_USER4 +#define IMEMO_FL_USER1 FL_USER5 +#define IMEMO_FL_USER2 FL_USER6 +#define IMEMO_FL_USER3 FL_USER7 +#define IMEMO_FL_USER4 FL_USER8 /* CREF in method.h */ @@ -929,6 +930,14 @@ rb_vm_ifunc_proc_new(VALUE (*func)(ANYARGS), const void *data) return rb_vm_ifunc_new(func, data, 0, UNLIMITED_ARGUMENTS); } +typedef struct rb_imemo_alloc_struct { + VALUE flags; + VALUE reserved; + VALUE *ptr; /* malloc'ed buffer */ + VALUE next; /* next imemo */ + size_t cnt; /* buffer size in VALUE */ +} rb_imemo_alloc_t; + /* MEMO */ struct MEMO { diff --git a/node.c b/node.c index 58732ab987..c0ca3a8186 100644 --- a/node.c +++ b/node.c @@ -1039,7 +1039,6 @@ dump_node(VALUE buf, VALUE indent, int comment, NODE *node) case NODE_ARGS_AUX: case NODE_TO_ARY: case NODE_BLOCK_ARG: - case NODE_ALLOCA: case NODE_BMETHOD: case NODE_LAST: break; @@ -1075,9 +1074,6 @@ rb_gc_free_node(VALUE obj) xfree(RNODE(obj)->nd_ainfo); } break; - case NODE_ALLOCA: - xfree(RNODE(obj)->u1.node); - break; } } @@ -1096,9 +1092,6 @@ rb_node_memsize(VALUE obj) size += sizeof(*RNODE(obj)->nd_ainfo); } break; - case NODE_ALLOCA: - size += RNODE(obj)->nd_cnt * sizeof(VALUE); - break; } return size; } @@ -1223,11 +1216,6 @@ rb_gc_mark_node(NODE *obj) case NODE_ERRINFO: case NODE_BLOCK_ARG: break; - case NODE_ALLOCA: - rb_gc_mark_locations((VALUE*)RNODE(obj)->u1.value, - (VALUE*)RNODE(obj)->u1.value + RNODE(obj)->u3.cnt); - rb_gc_mark(RNODE(obj)->u2.value); - break; default: /* unlisted NODE */ rb_gc_mark_maybe(RNODE(obj)->u1.value); diff --git a/node.h b/node.h index 36d83f0265..51aa5f1f23 100644 --- a/node.h +++ b/node.h @@ -220,8 +220,6 @@ enum node_type { #define NODE_DEFINED NODE_DEFINED NODE_POSTEXE, #define NODE_POSTEXE NODE_POSTEXE - NODE_ALLOCA, -#define NODE_ALLOCA NODE_ALLOCA NODE_BMETHOD, #define NODE_BMETHOD NODE_BMETHOD NODE_DSYM, diff --git a/parse.y b/parse.y index 8d9e2288ed..838b6e00e9 100644 --- a/parse.y +++ b/parse.y @@ -170,7 +170,7 @@ typedef struct token_info { token */ struct parser_params { - NODE *heap; + rb_imemo_alloc_t *heap; YYSTYPE *lval; @@ -11517,15 +11517,15 @@ rb_parser_set_yydebug(VALUE self, VALUE flag) #ifndef RIPPER #ifdef YYMALLOC #define HEAPCNT(n, size) ((n) * (size) / sizeof(YYSTYPE)) -#define NEWHEAP() rb_node_newnode(NODE_ALLOCA, 0, (VALUE)parser->heap, 0) -#define ADD2HEAP(n, c, p) ((parser->heap = (n))->u1.node = (p), \ - (n)->u3.cnt = (c), (p)) +#define NEWHEAP() rb_imemo_new(imemo_alloc, 0, (VALUE)parser->heap, 0, 0) +#define ADD2HEAP(n, c, p) ((parser->heap = (n))->ptr = (p), \ + (n)->cnt = (c), (p)) void * rb_parser_malloc(struct parser_params *parser, size_t size) { size_t cnt = HEAPCNT(1, size); - NODE *n = NEWHEAP(); + rb_imemo_alloc_t *n = NEWHEAP(); void *ptr = xmalloc(size); return ADD2HEAP(n, cnt, ptr); @@ -11535,7 +11535,7 @@ void * rb_parser_calloc(struct parser_params *parser, size_t nelem, size_t size) { size_t cnt = HEAPCNT(nelem, size); - NODE *n = NEWHEAP(); + rb_imemo_alloc_t *n = NEWHEAP(); void *ptr = xcalloc(nelem, size); return ADD2HEAP(n, cnt, ptr); @@ -11544,17 +11544,17 @@ rb_parser_calloc(struct parser_params *parser, size_t nelem, size_t size) void * rb_parser_realloc(struct parser_params *parser, void *ptr, size_t size) { - NODE *n; + rb_imemo_alloc_t *n; size_t cnt = HEAPCNT(1, size); if (ptr && (n = parser->heap) != NULL) { do { - if (n->u1.node == ptr) { - n->u1.node = ptr = xrealloc(ptr, size); - if (n->u3.cnt) n->u3.cnt = cnt; + if (n->ptr == ptr) { + n->ptr = ptr = xrealloc(ptr, size); + if (n->cnt) n->cnt = cnt; return ptr; } - } while ((n = n->u2.node) != NULL); + } while ((n = n->next) != NULL); } n = NEWHEAP(); ptr = xrealloc(ptr, size); @@ -11564,15 +11564,15 @@ rb_parser_realloc(struct parser_params *parser, void *ptr, size_t size) void rb_parser_free(struct parser_params *parser, void *ptr) { - NODE **prev = &parser->heap, *n; + rb_imemo_alloc_t **prev = &parser->heap, *n; while ((n = *prev) != NULL) { - if (n->u1.node == ptr) { - *prev = n->u2.node; + if (n->ptr == ptr) { + *prev = n->next; rb_gc_force_recycle((VALUE)n); break; } - prev = &n->u2.node; + prev = &n->next; } xfree(ptr); }