1
0
Fork 0
mirror of https://github.com/ruby/ruby.git synced 2022-11-09 12:17:21 -05:00

Use the current object as the compaction index

Instead of keeping track of the current bit plane, keep track of the
actual slot when compacting.  This means we don't need to re-scan
objects inside the same bit plane when we continue with movement
This commit is contained in:
Aaron Patterson 2021-06-01 14:34:34 -07:00
parent 9024c7f1bb
commit f9b9d1c580
No known key found for this signature in database
GPG key ID: 953170BCB4FFAFC6

101
gc.c
View file

@ -683,7 +683,7 @@ typedef struct rb_heap_struct {
struct list_head pages;
struct heap_page *sweeping_page; /* iterator for .pages */
struct heap_page *compact_cursor;
size_t compact_cursor_index;
RVALUE * compact_cursor_index;
#if GC_ENABLE_INCREMENTAL_MARK
struct heap_page *pooled_pages;
#endif
@ -4895,11 +4895,48 @@ unlock_page_body(rb_objspace_t *objspace, struct heap_page_body *body)
}
}
static inline bool
try_move_in_plane(rb_objspace_t *objspace, rb_heap_t *heap, intptr_t p, bits_t bits, VALUE dest)
{
if (bits) {
do {
if (bits & 1) {
/* We're trying to move "p" */
objspace->rcompactor.considered_count_table[BUILTIN_TYPE((VALUE)p)]++;
if (gc_is_moveable_obj(objspace, (VALUE)p)) {
/* We were able to move "p" */
objspace->rcompactor.moved_count_table[BUILTIN_TYPE((VALUE)p)]++;
objspace->rcompactor.total_moved++;
bool from_freelist = false;
if (BUILTIN_TYPE(dest) == T_NONE) {
from_freelist = true;
}
gc_move(objspace, (VALUE)p, dest);
gc_pin(objspace, (VALUE)p);
heap->compact_cursor_index = (RVALUE *)p;
if (from_freelist) {
FL_SET((VALUE)p, FL_FROM_FREELIST);
}
return true;
}
}
p += sizeof(RVALUE);
bits >>= 1;
} while (bits);
}
return false;
}
static short
try_move(rb_objspace_t *objspace, rb_heap_t *heap, struct heap_page *sweep_page, VALUE dest)
{
struct heap_page * cursor = heap->compact_cursor;
char from_freelist = 0;
GC_ASSERT(!MARKED_IN_BITMAP(GET_HEAP_MARK_BITS(dest), dest));
@ -4907,49 +4944,39 @@ try_move(rb_objspace_t *objspace, rb_heap_t *heap, struct heap_page *sweep_page,
* T_NONE, it is an object that just got freed but hasn't been
* added to the freelist yet */
if (BUILTIN_TYPE(dest) == T_NONE) {
from_freelist = 1;
}
while(1) {
size_t index = heap->compact_cursor_index;
size_t index;
bits_t *mark_bits = cursor->mark_bits;
bits_t *pin_bits = cursor->pinned_bits;
RVALUE * p = cursor->start;
RVALUE * offset = p - NUM_IN_PAGE(p);
RVALUE * p;
if (heap->compact_cursor_index) {
index = BITMAP_INDEX(heap->compact_cursor_index);
p = heap->compact_cursor_index;
GC_ASSERT(cursor == GET_HEAP_PAGE(p));
} else {
index = 0;
p = cursor->start;
}
bits_t bits = mark_bits[index] & ~pin_bits[index];
bits >>= NUM_IN_PAGE(p);
if (try_move_in_plane(objspace, heap, (intptr_t)p, bits, dest)) return 1;
if (index == 0) {
p = cursor->start + (BITS_BITLENGTH - NUM_IN_PAGE(cursor->start));
} else {
p = cursor->start + (BITS_BITLENGTH - NUM_IN_PAGE(cursor->start)) + (BITS_BITLENGTH * index);
}
/* Find an object to move and move it. Movable objects must be
* marked, so we iterate using the marking bitmap */
for (size_t i = index; i < HEAP_PAGE_BITMAP_LIMIT; i++) {
for (size_t i = index + 1; i < HEAP_PAGE_BITMAP_LIMIT; i++) {
bits_t bits = mark_bits[i] & ~pin_bits[i];
if (bits) {
p = offset + i * BITS_BITLENGTH;
do {
if (bits & 1) {
/* We're trying to move "p" */
objspace->rcompactor.considered_count_table[BUILTIN_TYPE((VALUE)p)]++;
if (gc_is_moveable_obj(objspace, (VALUE)p)) {
/* We were able to move "p" */
objspace->rcompactor.moved_count_table[BUILTIN_TYPE((VALUE)p)]++;
objspace->rcompactor.total_moved++;
gc_move(objspace, (VALUE)p, dest);
gc_pin(objspace, (VALUE)p);
heap->compact_cursor_index = i;
if (from_freelist) {
FL_SET((VALUE)p, FL_FROM_FREELIST);
}
return 1;
}
}
p++;
bits >>= 1;
} while (bits);
}
if (try_move_in_plane(objspace, heap, (intptr_t)p, bits, dest)) return 1;
p += BITS_BITLENGTH;
}
/* We couldn't find a movable object on the compact cursor, so lets