Feature #15626 » 0001-GC-Compaction-for-MRI.patch
class.c | ||
---|---|---|
rb_name_class(obj, id);
|
||
rb_const_set((rb_cObject ? rb_cObject : obj), id, obj);
|
||
rb_vm_add_root_module(id, obj);
|
||
return obj;
|
||
}
|
||
... | ... | |
}
|
||
module = rb_define_module_id(id);
|
||
rb_vm_add_root_module(id, module);
|
||
rb_gc_register_mark_object(module);
|
||
rb_const_set(rb_cObject, id, module);
|
||
return module;
|
constant.h | ||
---|---|---|
typedef struct rb_const_entry_struct {
|
||
rb_const_flag_t flag;
|
||
int line;
|
||
const VALUE value; /* should be mark */
|
||
const VALUE file; /* should be mark */
|
||
VALUE value; /* should be mark */
|
||
VALUE file; /* should be mark */
|
||
} rb_const_entry_t;
|
||
VALUE rb_mod_private_constant(int argc, const VALUE *argv, VALUE obj);
|
ext/-test-/memory_location/extconf.rb | ||
---|---|---|
# frozen_string_literal: false
|
||
$srcs = Dir[File.join($srcdir, "*.{#{SRC_EXT.join(%q{,})}}")]
|
||
inits = $srcs.map {|s| File.basename(s, ".*")}
|
||
inits.delete("init")
|
||
inits.map! {|s|"X(#{s})"}
|
||
$defs << "-DTEST_INIT_FUNCS(X)=\"#{inits.join(' ')}\""
|
||
create_makefile("-test-/memory_location")
|
ext/-test-/memory_location/memory_location.c | ||
---|---|---|
#include "ruby.h"
|
||
#if SIZEOF_LONG == SIZEOF_VOIDP
|
||
# define nonspecial_obj_id(obj) (VALUE)((SIGNED_VALUE)(obj)|FIXNUM_FLAG)
|
||
# define obj_id_to_ref(objid) ((objid) ^ FIXNUM_FLAG) /* unset FIXNUM_FLAG */
|
||
#elif SIZEOF_LONG_LONG == SIZEOF_VOIDP
|
||
# define nonspecial_obj_id(obj) LL2NUM((SIGNED_VALUE)(obj) / 2)
|
||
# define obj_id_to_ref(objid) (FIXNUM_P(objid) ? \
|
||
((objid) ^ FIXNUM_FLAG) : (NUM2PTR(objid) << 1))
|
||
#else
|
||
# error not supported
|
||
#endif
|
||
static VALUE
|
||
rb_memory_location(VALUE self)
|
||
{
|
||
return nonspecial_obj_id(self);
|
||
}
|
||
void
|
||
Init_memory_location(void)
|
||
{
|
||
rb_define_method(rb_mKernel, "memory_location", rb_memory_location, 0);
|
||
}
|
||
gc.c | ||
---|---|---|
#include "ruby_atomic.h"
|
||
#include "probes.h"
|
||
#include "id_table.h"
|
||
#include "symbol.h"
|
||
#include <stdio.h>
|
||
#include <stdarg.h>
|
||
#include <setjmp.h>
|
||
... | ... | |
FALSE,
|
||
};
|
||
static st_table *id_to_obj_tbl;
|
||
static st_table *obj_to_id_tbl;
|
||
/* GC_DEBUG:
|
||
* enable to embed GC debugging information.
|
||
*/
|
||
... | ... | |
VALUE flags; /* always 0 for freed obj */
|
||
struct RVALUE *next;
|
||
} free;
|
||
struct RMoved moved;
|
||
struct RBasic basic;
|
||
struct RObject object;
|
||
struct RClass klass;
|
||
... | ... | |
#if USE_RGENGC
|
||
size_t minor_gc_count;
|
||
size_t major_gc_count;
|
||
size_t object_id_collisions;
|
||
#if RGENGC_PROFILE > 0
|
||
size_t total_generated_normal_object_count;
|
||
size_t total_generated_shady_object_count;
|
||
... | ... | |
size_t error_count;
|
||
#endif
|
||
} rgengc;
|
||
struct {
|
||
size_t considered_count_table[T_MASK];
|
||
size_t moved_count_table[T_MASK];
|
||
} rcompactor;
|
||
#if GC_ENABLE_INCREMENTAL_MARK
|
||
struct {
|
||
size_t pooled_slots;
|
||
... | ... | |
#if USE_RGENGC
|
||
bits_t wb_unprotected_bits[HEAP_PAGE_BITMAP_LIMIT];
|
||
#endif
|
||
/* If set, the object is not movable */
|
||
bits_t pinned_bits[HEAP_PAGE_BITMAP_LIMIT];
|
||
/* the following three bitmaps are cleared at the beginning of full GC */
|
||
bits_t mark_bits[HEAP_PAGE_BITMAP_LIMIT];
|
||
#if USE_RGENGC
|
||
... | ... | |
/* getting bitmap */
|
||
#define GET_HEAP_MARK_BITS(x) (&GET_HEAP_PAGE(x)->mark_bits[0])
|
||
#define GET_HEAP_PINNED_BITS(x) (&GET_HEAP_PAGE(x)->pinned_bits[0])
|
||
#if USE_RGENGC
|
||
#define GET_HEAP_UNCOLLECTIBLE_BITS(x) (&GET_HEAP_PAGE(x)->uncollectible_bits[0])
|
||
#define GET_HEAP_WB_UNPROTECTED_BITS(x) (&GET_HEAP_PAGE(x)->wb_unprotected_bits[0])
|
||
... | ... | |
int ruby_disable_gc = 0;
|
||
void rb_iseq_mark(const rb_iseq_t *iseq);
|
||
void rb_iseq_update_references(rb_iseq_t *iseq);
|
||
void rb_iseq_free(const rb_iseq_t *iseq);
|
||
void rb_vm_update_references(void *ptr);
|
||
void rb_gcdebug_print_obj_condition(VALUE obj);
|
||
... | ... | |
static void gc_sweep_continue(rb_objspace_t *objspace, rb_heap_t *heap);
|
||
static inline void gc_mark(rb_objspace_t *objspace, VALUE ptr);
|
||
static inline void gc_pin(rb_objspace_t *objspace, VALUE ptr);
|
||
static inline void gc_mark_and_pin(rb_objspace_t *objspace, VALUE ptr);
|
||
static void gc_mark_ptr(rb_objspace_t *objspace, VALUE ptr);
|
||
NO_SANITIZE("memory", static void gc_mark_maybe(rb_objspace_t *objspace, VALUE ptr));
|
||
static void gc_mark_and_pin_maybe(rb_objspace_t *objspace, VALUE ptr);
|
||
static void gc_mark_children(rb_objspace_t *objspace, VALUE ptr);
|
||
static int gc_mark_stacked_objects_incremental(rb_objspace_t *, size_t count);
|
||
... | ... | |
static inline void gc_prof_set_malloc_info(rb_objspace_t *);
|
||
static inline void gc_prof_set_heap_info(rb_objspace_t *);
|
||
#define TYPED_UPDATE_IF_MOVED(_objspace, _type, _thing) do { \
|
||
if (gc_object_moved_p(_objspace, (VALUE)_thing)) { \
|
||
(_thing) = (_type)RMOVED((_thing))->destination; \
|
||
} \
|
||
} while (0)
|
||
#define UPDATE_IF_MOVED(_objspace, _thing) TYPED_UPDATE_IF_MOVED(_objspace, VALUE, _thing)
|
||
#define gc_prof_record(objspace) (objspace)->profile.current_record
|
||
#define gc_prof_enabled(objspace) ((objspace)->profile.run && (objspace)->profile.current_record)
|
||
... | ... | |
#define FL_UNSET2(x,f) FL_CHECK2("FL_UNSET2", x, RBASIC(x)->flags &= ~(f))
|
||
#define RVALUE_MARK_BITMAP(obj) MARKED_IN_BITMAP(GET_HEAP_MARK_BITS(obj), (obj))
|
||
#define RVALUE_PIN_BITMAP(obj) MARKED_IN_BITMAP(GET_HEAP_PINNED_BITS(obj), (obj))
|
||
#define RVALUE_PAGE_MARKED(page, obj) MARKED_IN_BITMAP((page)->mark_bits, (obj))
|
||
#if USE_RGENGC
|
||
... | ... | |
}
|
||
#endif
|
||
static inline int
|
||
gc_object_moved_p(rb_objspace_t * objspace, VALUE obj)
|
||
{
|
||
if (RB_SPECIAL_CONST_P(obj)) {
|
||
return FALSE;
|
||
} else {
|
||
return BUILTIN_TYPE(obj) == T_MOVED;
|
||
}
|
||
}
|
||
static inline int
|
||
RVALUE_MARKED(VALUE obj)
|
||
{
|
||
... | ... | |
return RVALUE_MARK_BITMAP(obj) != 0;
|
||
}
|
||
static inline int
|
||
RVALUE_PINNED(VALUE obj)
|
||
{
|
||
check_rvalue_consistency(obj);
|
||
return RVALUE_PIN_BITMAP(obj) != 0;
|
||
}
|
||
#if USE_RGENGC
|
||
static inline int
|
||
RVALUE_WB_UNPROTECTED(VALUE obj)
|
||
... | ... | |
rb_free_generic_ivar((VALUE)obj);
|
||
FL_UNSET(obj, FL_EXIVAR);
|
||
}
|
||
VALUE id;
|
||
if (st_lookup(obj_to_id_tbl, (st_data_t)obj, &id)) {
|
||
#ifdef GC_COMPACT_DEBUG
|
||
fprintf(stderr, "Collecting %p -> %p\n", obj, obj_id_to_ref(id));
|
||
#endif
|
||
st_delete(obj_to_id_tbl, (st_data_t *)&obj, 0);
|
||
st_delete(id_to_obj_tbl, (st_data_t *)&id, 0);
|
||
}
|
||
#if USE_RGENGC
|
||
if (RVALUE_WB_UNPROTECTED(obj)) CLEAR_IN_BITMAP(GET_HEAP_WB_UNPROTECTED_BITS(obj), obj);
|
||
... | ... | |
break;
|
||
case T_RATIONAL:
|
||
case T_COMPLEX:
|
||
case T_MOVED:
|
||
break;
|
||
case T_ICLASS:
|
||
/* Basically , T_ICLASS shares table with the module */
|
||
... | ... | |
UNEXPECTED_NODE(internal_object_p);
|
||
break;
|
||
case T_NONE:
|
||
case T_MOVED:
|
||
case T_IMEMO:
|
||
case T_ICLASS:
|
||
case T_ZOMBIE:
|
||
... | ... | |
if (ptr == Qtrue) return Qtrue;
|
||
if (ptr == Qfalse) return Qfalse;
|
||
if (ptr == Qnil) return Qnil;
|
||
if (FIXNUM_P(ptr)) return (VALUE)ptr;
|
||
if (FLONUM_P(ptr)) return (VALUE)ptr;
|
||
ptr = obj_id_to_ref(objid);
|
||
if (st_lookup(id_to_obj_tbl, objid, &ptr)) {
|
||
return ptr;
|
||
}
|
||
if ((ptr % sizeof(RVALUE)) == (4 << 2)) {
|
||
ID symid = ptr / sizeof(RVALUE);
|
||
if (rb_id2str(symid) == 0)
|
||
... | ... | |
else if (SPECIAL_CONST_P(obj)) {
|
||
return LONG2NUM((SIGNED_VALUE)obj);
|
||
}
|
||
VALUE id;
|
||
if (st_lookup(obj_to_id_tbl, (st_data_t)obj, &id)) {
|
||
#ifdef GC_COMPACT_DEBUG
|
||
fprintf(stderr, "Second time object_id was called on this object: %p\n", obj);
|
||
#endif
|
||
return id;
|
||
} else {
|
||
int tries;
|
||
id = nonspecial_obj_id(obj);
|
||
while(1) {
|
||
/* id is the object id */
|
||
if (st_lookup(id_to_obj_tbl, (st_data_t)id, 0)) {
|
||
#ifdef GC_COMPACT_DEBUG
|
||
fprintf(stderr, "object_id called on %p, but there was a collision at %d\n", obj, NUM2INT(id));
|
||
#endif
|
||
rb_objspace_t *objspace = &rb_objspace;
|
||
objspace->profile.object_id_collisions++;
|
||
id += 40;
|
||
} else {
|
||
#ifdef GC_COMPACT_DEBUG
|
||
fprintf(stderr, "Initial insert: %p id: %d\n", obj, NUM2INT(id));
|
||
#endif
|
||
st_insert(obj_to_id_tbl, (st_data_t)obj, id);
|
||
st_insert(id_to_obj_tbl, (st_data_t)id, obj);
|
||
return id;
|
||
}
|
||
}
|
||
}
|
||
return nonspecial_obj_id(obj);
|
||
}
|
||
... | ... | |
break;
|
||
case T_ZOMBIE:
|
||
case T_MOVED:
|
||
break;
|
||
default:
|
||
... | ... | |
return ST_CONTINUE;
|
||
}
|
||
static VALUE
|
||
type_sym(int type)
|
||
{
|
||
switch (type) {
|
||
#define COUNT_TYPE(t) case (t): return ID2SYM(rb_intern(#t)); break;
|
||
COUNT_TYPE(T_NONE);
|
||
COUNT_TYPE(T_OBJECT);
|
||
COUNT_TYPE(T_CLASS);
|
||
COUNT_TYPE(T_MODULE);
|
||
COUNT_TYPE(T_FLOAT);
|
||
COUNT_TYPE(T_STRING);
|
||
COUNT_TYPE(T_REGEXP);
|
||
COUNT_TYPE(T_ARRAY);
|
||
COUNT_TYPE(T_HASH);
|
||
COUNT_TYPE(T_STRUCT);
|
||
COUNT_TYPE(T_BIGNUM);
|
||
COUNT_TYPE(T_FILE);
|
||
COUNT_TYPE(T_DATA);
|
||
COUNT_TYPE(T_MATCH);
|
||
COUNT_TYPE(T_COMPLEX);
|
||
COUNT_TYPE(T_RATIONAL);
|
||
COUNT_TYPE(T_NIL);
|
||
COUNT_TYPE(T_TRUE);
|
||
COUNT_TYPE(T_FALSE);
|
||
COUNT_TYPE(T_SYMBOL);
|
||
COUNT_TYPE(T_FIXNUM);
|
||
COUNT_TYPE(T_IMEMO);
|
||
COUNT_TYPE(T_UNDEF);
|
||
COUNT_TYPE(T_NODE);
|
||
COUNT_TYPE(T_ICLASS);
|
||
COUNT_TYPE(T_ZOMBIE);
|
||
COUNT_TYPE(T_MOVED);
|
||
#undef COUNT_TYPE
|
||
default: return INT2NUM(type); break;
|
||
}
|
||
}
|
||
/*
|
||
* call-seq:
|
||
* ObjectSpace.count_objects([result_hash]) -> hash
|
||
... | ... | |
rb_hash_aset(hash, ID2SYM(rb_intern("FREE")), SIZET2NUM(freed));
|
||
for (i = 0; i <= T_MASK; i++) {
|
||
VALUE type;
|
||
switch (i) {
|
||
#define COUNT_TYPE(t) case (t): type = ID2SYM(rb_intern(#t)); break;
|
||
COUNT_TYPE(T_NONE);
|
||
COUNT_TYPE(T_OBJECT);
|
||
COUNT_TYPE(T_CLASS);
|
||
COUNT_TYPE(T_MODULE);
|
||
COUNT_TYPE(T_FLOAT);
|
||
COUNT_TYPE(T_STRING);
|
||
COUNT_TYPE(T_REGEXP);
|
||
COUNT_TYPE(T_ARRAY);
|
||
COUNT_TYPE(T_HASH);
|
||
COUNT_TYPE(T_STRUCT);
|
||
COUNT_TYPE(T_BIGNUM);
|
||
COUNT_TYPE(T_FILE);
|
||
COUNT_TYPE(T_DATA);
|
||
COUNT_TYPE(T_MATCH);
|
||
COUNT_TYPE(T_COMPLEX);
|
||
COUNT_TYPE(T_RATIONAL);
|
||
COUNT_TYPE(T_NIL);
|
||
COUNT_TYPE(T_TRUE);
|
||
COUNT_TYPE(T_FALSE);
|
||
COUNT_TYPE(T_SYMBOL);
|
||
COUNT_TYPE(T_FIXNUM);
|
||
COUNT_TYPE(T_IMEMO);
|
||
COUNT_TYPE(T_UNDEF);
|
||
COUNT_TYPE(T_ICLASS);
|
||
COUNT_TYPE(T_ZOMBIE);
|
||
#undef COUNT_TYPE
|
||
default: type = INT2NUM(i); break;
|
||
}
|
||
VALUE type = type_sym(i);
|
||
if (counts[i])
|
||
rb_hash_aset(hash, type, SIZET2NUM(counts[i]));
|
||
}
|
||
... | ... | |
static void
|
||
push_mark_stack(mark_stack_t *stack, VALUE data)
|
||
{
|
||
if (BUILTIN_TYPE(data) == T_MOVED) {
|
||
VALUE dest = (VALUE)RMOVED(data)->destination;
|
||
fprintf(stderr, "<%s>", obj_info(dest));
|
||
rb_bug("moved item (%p -> %p (type: %d) should not be marked", (RVALUE *)data, (RVALUE *)dest, BUILTIN_TYPE(dest));
|
||
}
|
||
if (stack->index == stack->limit) {
|
||
push_mark_stack_chunk(stack);
|
||
}
|
||
... | ... | |
VALUE v;
|
||
while (n--) {
|
||
v = *x;
|
||
gc_mark_maybe(objspace, v);
|
||
gc_mark_and_pin_maybe(objspace, v);
|
||
x++;
|
||
}
|
||
}
|
||
... | ... | |
gc_mark_locations(&rb_objspace, start, end);
|
||
}
|
||
static void
|
||
gc_mark_and_pin_values(rb_objspace_t *objspace, long n, const VALUE *values)
|
||
{
|
||
long i;
|
||
for (i=0; i<n; i++) {
|
||
gc_mark_and_pin(objspace, values[i]);
|
||
}
|
||
}
|
||
static void
|
||
gc_mark_values(rb_objspace_t *objspace, long n, const VALUE *values)
|
||
{
|
||
... | ... | |
rb_gc_mark_values(long n, const VALUE *values)
|
||
{
|
||
rb_objspace_t *objspace = &rb_objspace;
|
||
gc_mark_values(objspace, n, values);
|
||
gc_mark_and_pin_values(objspace, n, values);
|
||
}
|
||
static void
|
||
gc_mark_and_pin_stack_values(rb_objspace_t *objspace, long n, const VALUE *values)
|
||
{
|
||
long i;
|
||
for (i=0; i<n; i++) {
|
||
/* skip MOVED objects that are on the stack */
|
||
if (is_markable_object(objspace, values[i]) && T_MOVED != BUILTIN_TYPE(values[i])) {
|
||
gc_mark_and_pin(objspace, values[i]);
|
||
}
|
||
}
|
||
}
|
||
void
|
||
rb_gc_mark_stack_values(long n, const VALUE *values)
|
||
{
|
||
rb_objspace_t *objspace = &rb_objspace;
|
||
gc_mark_and_pin_stack_values(objspace, n, values);
|
||
}
|
||
static int
|
||
mark_entry(st_data_t key, st_data_t value, st_data_t data)
|
||
mark_entry_no_pin(st_data_t key, st_data_t value, st_data_t data)
|
||
{
|
||
rb_objspace_t *objspace = (rb_objspace_t *)data;
|
||
gc_mark(objspace, (VALUE)value);
|
||
return ST_CONTINUE;
|
||
}
|
||
static int
|
||
mark_entry(st_data_t key, st_data_t value, st_data_t data)
|
||
{
|
||
rb_objspace_t *objspace = (rb_objspace_t *)data;
|
||
gc_mark_and_pin(objspace, (VALUE)value);
|
||
return ST_CONTINUE;
|
||
}
|
||
static void
|
||
mark_tbl_no_pin(rb_objspace_t *objspace, st_table *tbl)
|
||
{
|
||
if (!tbl || tbl->num_entries == 0) return;
|
||
st_foreach(tbl, mark_entry_no_pin, (st_data_t)objspace);
|
||
}
|
||
static void
|
||
mark_tbl(rb_objspace_t *objspace, st_table *tbl)
|
||
{
|
||
... | ... | |
{
|
||
rb_objspace_t *objspace = (rb_objspace_t *)data;
|
||
gc_mark(objspace, (VALUE)key);
|
||
if (SPECIAL_CONST_P((VALUE)key) || BUILTIN_TYPE((VALUE)key) == T_STRING) {
|
||
gc_mark(objspace, (VALUE)key);
|
||
} else {
|
||
gc_mark_and_pin(objspace, (VALUE)key);
|
||
}
|
||
gc_mark(objspace, (VALUE)value);
|
||
return ST_CONTINUE;
|
||
}
|
||
... | ... | |
mark_tbl(&rb_objspace, tbl);
|
||
}
|
||
void
|
||
rb_mark_tbl_no_pin(st_table *tbl)
|
||
{
|
||
mark_tbl_no_pin(&rb_objspace, tbl);
|
||
}
|
||
static void
|
||
gc_mark_and_pin_maybe(rb_objspace_t *objspace, VALUE obj)
|
||
{
|
||
(void)VALGRIND_MAKE_MEM_DEFINED(&obj, sizeof(obj));
|
||
if (is_pointer_to_heap(objspace, (void *)obj)) {
|
||
int type = BUILTIN_TYPE(obj);
|
||
if (type != T_MOVED && type != T_ZOMBIE && type != T_NONE) {
|
||
gc_pin(objspace, obj);
|
||
gc_mark_ptr(objspace, obj);
|
||
}
|
||
}
|
||
}
|
||
static void
|
||
gc_mark_maybe(rb_objspace_t *objspace, VALUE obj)
|
||
{
|
||
... | ... | |
void
|
||
rb_gc_mark_maybe(VALUE obj)
|
||
{
|
||
gc_mark_maybe(&rb_objspace, obj);
|
||
gc_mark_and_pin_maybe(&rb_objspace, obj);
|
||
}
|
||
static inline int
|
||
... | ... | |
}
|
||
}
|
||
static inline void
|
||
gc_mark_and_pin(rb_objspace_t *objspace, VALUE obj)
|
||
{
|
||
if (!is_markable_object(objspace, obj)) return;
|
||
MARK_IN_BITMAP(GET_HEAP_PINNED_BITS(obj), obj);
|
||
gc_mark_ptr(objspace, obj);
|
||
}
|
||
static inline void
|
||
gc_pin(rb_objspace_t *objspace, VALUE obj)
|
||
{
|
||
if (!is_markable_object(objspace, obj)) return;
|
||
MARK_IN_BITMAP(GET_HEAP_PINNED_BITS(obj), obj);
|
||
}
|
||
static inline void
|
||
gc_mark(rb_objspace_t *objspace, VALUE obj)
|
||
{
|
||
... | ... | |
}
|
||
void
|
||
rb_gc_mark(VALUE ptr)
|
||
rb_gc_mark_no_pin(VALUE ptr)
|
||
{
|
||
gc_mark(&rb_objspace, ptr);
|
||
}
|
||
void
|
||
rb_gc_mark(VALUE ptr)
|
||
{
|
||
gc_mark_and_pin(&rb_objspace, ptr);
|
||
}
|
||
/* CAUTION: THIS FUNCTION ENABLE *ONLY BEFORE* SWEEPING.
|
||
* This function is only for GC_END_MARK timing.
|
||
*/
|
||
... | ... | |
return RVALUE_MARKED(obj) ? TRUE : FALSE;
|
||
}
|
||
int
|
||
rb_objspace_pinned_object_p(VALUE obj)
|
||
{
|
||
return RVALUE_PINNED(obj) ? TRUE : FALSE;
|
||
}
|
||
static inline void
|
||
gc_mark_set_parent(rb_objspace_t *objspace, VALUE obj)
|
||
{
|
||
... | ... | |
{
|
||
const rb_env_t *env = (const rb_env_t *)obj;
|
||
GC_ASSERT(VM_ENV_ESCAPED_P(env->ep));
|
||
gc_mark_values(objspace, (long)env->env_size, env->env);
|
||
gc_mark_and_pin_values(objspace, (long)env->env_size, env->env);
|
||
VM_ENV_FLAGS_SET(env->ep, VM_ENV_FLAG_WB_REQUIRED);
|
||
gc_mark(objspace, (VALUE)rb_vm_env_prev_env(env));
|
||
gc_mark_and_pin(objspace, (VALUE)rb_vm_env_prev_env(env));
|
||
gc_mark(objspace, (VALUE)env->iseq);
|
||
}
|
||
return;
|
||
... | ... | |
case T_MODULE:
|
||
mark_m_tbl(objspace, RCLASS_M_TBL(obj));
|
||
if (!RCLASS_EXT(obj)) break;
|
||
mark_tbl(objspace, RCLASS_IV_TBL(obj));
|
||
mark_tbl_no_pin(objspace, RCLASS_IV_TBL(obj));
|
||
mark_const_tbl(objspace, RCLASS_CONST_TBL(obj));
|
||
gc_mark(objspace, RCLASS_SUPER((VALUE)obj));
|
||
break;
|
||
... | ... | |
#if GC_DEBUG
|
||
rb_gcdebug_print_obj_condition((VALUE)obj);
|
||
#endif
|
||
if (BUILTIN_TYPE(obj) == T_MOVED) rb_bug("rb_gc_mark(): %p is T_MOVED", (void *)obj);
|
||
if (BUILTIN_TYPE(obj) == T_NONE) rb_bug("rb_gc_mark(): %p is T_NONE", (void *)obj);
|
||
if (BUILTIN_TYPE(obj) == T_ZOMBIE) rb_bug("rb_gc_mark(): %p is T_ZOMBIE", (void *)obj);
|
||
rb_bug("rb_gc_mark(): unknown data type 0x%x(%p) %s",
|
||
... | ... | |
/* count objects */
|
||
data->live_object_count++;
|
||
rb_objspace_reachable_objects_from(obj, check_children_i, (void *)data);
|
||
if (!gc_object_moved_p(objspace, obj)) {
|
||
/* moved slots don't have children */
|
||
rb_objspace_reachable_objects_from(obj, check_children_i, (void *)data);
|
||
}
|
||
#if USE_RGENGC
|
||
/* check health of children */
|
||
... | ... | |
list_for_each(&heap->pages, page, page_node) {
|
||
memset(&page->mark_bits[0], 0, HEAP_PAGE_BITMAP_SIZE);
|
||
memset(&page->pinned_bits[0], 0, HEAP_PAGE_BITMAP_SIZE);
|
||
memset(&page->marking_bits[0], 0, HEAP_PAGE_BITMAP_SIZE);
|
||
memset(&page->uncollectible_bits[0], 0, HEAP_PAGE_BITMAP_SIZE);
|
||
page->flags.has_uncollectible_shady_objects = FALSE;
|
||
... | ... | |
size_t n = 0;
|
||
static ID ID_marked;
|
||
#if USE_RGENGC
|
||
static ID ID_wb_protected, ID_old, ID_marking, ID_uncollectible;
|
||
static ID ID_wb_protected, ID_old, ID_marking, ID_uncollectible, ID_pinned;
|
||
#endif
|
||
if (!ID_marked) {
|
||
... | ... | |
I(old);
|
||
I(marking);
|
||
I(uncollectible);
|
||
I(pinned);
|
||
#endif
|
||
#undef I
|
||
}
|
||
... | ... | |
if (MARKED_IN_BITMAP(GET_HEAP_MARKING_BITS(obj), obj) && n<max) flags[n++] = ID_marking;
|
||
#endif
|
||
if (MARKED_IN_BITMAP(GET_HEAP_MARK_BITS(obj), obj) && n<max) flags[n++] = ID_marked;
|
||
if (MARKED_IN_BITMAP(GET_HEAP_PINNED_BITS(obj), obj) && n<max) flags[n++] = ID_pinned;
|
||
return n;
|
||
}
|
||
... | ... | |
return Qnil;
|
||
}
|
||
static int
|
||
gc_is_moveable_obj(rb_objspace_t *objspace, VALUE obj)
|
||
{
|
||
if (SPECIAL_CONST_P(obj) || BUILTIN_TYPE(obj) == T_NONE || BUILTIN_TYPE(obj) == T_ZOMBIE || rb_objspace_pinned_object_p(obj)) {
|
||
return FALSE;
|
||
}
|
||
if (FL_TEST(obj, FL_FINALIZE)) {
|
||
return FALSE;
|
||
}
|
||
switch(BUILTIN_TYPE(obj)) {
|
||
case T_NONE:
|
||
case T_NIL:
|
||
return FALSE;
|
||
break;
|
||
case T_STRING:
|
||
case T_OBJECT:
|
||
case T_FLOAT:
|
||
case T_IMEMO:
|
||
case T_ARRAY:
|
||
case T_BIGNUM:
|
||
case T_ICLASS:
|
||
case T_MODULE:
|
||
case T_REGEXP:
|
||
case T_DATA:
|
||
case T_SYMBOL:
|
||
case T_MATCH:
|
||
case T_STRUCT:
|
||
case T_HASH:
|
||
case T_FILE:
|
||
case T_COMPLEX:
|
||
case T_RATIONAL:
|
||
case T_NODE:
|
||
case T_CLASS:
|
||
break;
|
||
default:
|
||
rb_bug("gc_is_moveable_obj: unreachable (%d)", (int)BUILTIN_TYPE(obj));
|
||
break;
|
||
}
|
||
return TRUE;
|
||
}
|
||
static int
|
||
update_id_to_obj(st_data_t *key, st_data_t *value, st_data_t arg, int exists)
|
||
{
|
||
if (exists) {
|
||
*value = arg;
|
||
return ST_CONTINUE;
|
||
} else {
|
||
return ST_STOP;
|
||
}
|
||
}
|
||
static void
|
||
gc_move(rb_objspace_t *objspace, VALUE scan, VALUE free)
|
||
{
|
||
int marked;
|
||
int wb_unprotected;
|
||
int uncollectible;
|
||
int marking;
|
||
RVALUE *dest = (RVALUE *)free;
|
||
RVALUE *src = (RVALUE *)scan;
|
||
#if RGENGC_CHECK_MODE >= 5
|
||
fprintf(stderr, "moving: %s -> ", obj_info(src));
|
||
#endif
|
||
/* Save off bits for current object. */
|
||
marked = rb_objspace_marked_object_p((VALUE)src);
|
||
wb_unprotected = RVALUE_WB_UNPROTECTED((VALUE)src);
|
||
uncollectible = RVALUE_UNCOLLECTIBLE((VALUE)src);
|
||
marking = RVALUE_MARKING((VALUE)src);
|
||
objspace->total_allocated_objects++;
|
||
/* Clear bits for eventual T_MOVED */
|
||
CLEAR_IN_BITMAP(GET_HEAP_MARK_BITS((VALUE)src), (VALUE)src);
|
||
CLEAR_IN_BITMAP(GET_HEAP_WB_UNPROTECTED_BITS((VALUE)src), (VALUE)src);
|
||
CLEAR_IN_BITMAP(GET_HEAP_UNCOLLECTIBLE_BITS((VALUE)src), (VALUE)src);
|
||
CLEAR_IN_BITMAP(GET_HEAP_MARKING_BITS((VALUE)src), (VALUE)src);
|
||
if (FL_TEST(src, FL_EXIVAR)) {
|
||
rb_mv_generic_ivar((VALUE)src, (VALUE)dest);
|
||
}
|
||
VALUE id;
|
||
/* If the source object's object_id has been seen, we need to update
|
||
* the object to object id mapping. */
|
||
if(st_lookup(obj_to_id_tbl, (VALUE)src, &id)) {
|
||
#ifdef GC_COMPACT_DEBUG
|
||
fprintf(stderr, "Moving insert: %p -> %p\n", src, dest);
|
||
#endif
|
||
st_delete(obj_to_id_tbl, (st_data_t *)&src, 0);
|
||
st_insert(obj_to_id_tbl, (VALUE)dest, id);
|
||
st_update(id_to_obj_tbl, (st_data_t)id, update_id_to_obj, (st_data_t)dest);
|
||
}
|
||
/* Move the object */
|
||
memcpy(dest, src, sizeof(RVALUE));
|
||
memset(src, 0, sizeof(RVALUE));
|
||
/* Set bits for object in new location */
|
||
if (marking) {
|
||
MARK_IN_BITMAP(GET_HEAP_MARKING_BITS((VALUE)dest), (VALUE)dest);
|
||
} else {
|
||
CLEAR_IN_BITMAP(GET_HEAP_MARKING_BITS((VALUE)dest), (VALUE)dest);
|
||
}
|
||
if (marked) {
|
||
MARK_IN_BITMAP(GET_HEAP_MARK_BITS((VALUE)dest), (VALUE)dest);
|
||
} else {
|
||
CLEAR_IN_BITMAP(GET_HEAP_MARK_BITS((VALUE)dest), (VALUE)dest);
|
||
}
|
||
if (wb_unprotected) {
|
||
MARK_IN_BITMAP(GET_HEAP_WB_UNPROTECTED_BITS((VALUE)dest), (VALUE)dest);
|
||
} else {
|
||
CLEAR_IN_BITMAP(GET_HEAP_WB_UNPROTECTED_BITS((VALUE)dest), (VALUE)dest);
|
||
}
|
||
if (uncollectible) {
|
||
MARK_IN_BITMAP(GET_HEAP_UNCOLLECTIBLE_BITS((VALUE)dest), (VALUE)dest);
|
||
} else {
|
||
CLEAR_IN_BITMAP(GET_HEAP_UNCOLLECTIBLE_BITS((VALUE)dest), (VALUE)dest);
|
||
}
|
||
/* Assign forwarding address */
|
||
src->as.moved.flags = T_MOVED;
|
||
src->as.moved.destination = (VALUE)dest;
|
||
#if RGENGC_CHECK_MODE >= 5
|
||
fprintf(stderr, "%s\n", obj_info(dest));
|
||
#endif
|
||
}
|
||
struct heap_cursor {
|
||
RVALUE *slot;
|
||
size_t index;
|
||
struct heap_page *page;
|
||
rb_objspace_t * objspace;
|
||
};
|
||
static void
|
||
advance_cursor(struct heap_cursor *free, struct heap_page **page_list)
|
||
{
|
||
rb_objspace_t *objspace = free->objspace;
|
||
if (free->slot == free->page->start + free->page->total_slots - 1) {
|
||
free->index++;
|
||
free->page = page_list[free->index];
|
||
free->slot = free->page->start;
|
||
} else {
|
||
free->slot++;
|
||
}
|
||
}
|
||
static void
|
||
retreat_cursor(struct heap_cursor *scan, struct heap_page **page_list)
|
||
{
|
||
rb_objspace_t *objspace = scan->objspace;
|
||
if (scan->slot == scan->page->start) {
|
||
scan->index--;
|
||
scan->page = page_list[scan->index];
|
||
scan->slot = scan->page->start + scan->page->total_slots - 1;
|
||
} else {
|
||
scan->slot--;
|
||
}
|
||
}
|
||
static int
|
||
not_met(struct heap_cursor *free, struct heap_cursor *scan)
|
||
{
|
||
if (free->index < scan->index)
|
||
return 1;
|
||
if (free->index > scan->index)
|
||
return 0;
|
||
return free->slot < scan->slot;
|
||
}
|
||
static void
|
||
init_cursors(rb_objspace_t *objspace, struct heap_cursor *free, struct heap_cursor *scan, struct heap_page **page_list)
|
||
{
|
||
struct heap_page *page;
|
||
page = page_list[0];
|
||
free->index = 0;
|
||
free->page = page;
|
||
free->slot = page->start;
|
||
free->objspace = objspace;
|
||
page = page_list[heap_allocated_pages - 1];
|
||
scan->index = heap_allocated_pages - 1;
|
||
scan->page = page;
|
||
scan->slot = page->start + page->total_slots - 1;
|
||
scan->objspace = objspace;
|
||
}
|
||
int count_pinned(struct heap_page *page)
|
||
{
|
||
RVALUE *pstart = page->start;
|
||
RVALUE *pend = pstart + page->total_slots;
|
||
int pinned = 0;
|
||
VALUE v = (VALUE)pstart;
|
||
for(; v != (VALUE)pend; v += sizeof(RVALUE)) {
|
||
if (RBASIC(v)->flags && RVALUE_PINNED(v)) {
|
||
pinned++;
|
||
}
|
||
}
|
||
return pinned;
|
||
}
|
||
int compare_pinned(const void *left, const void *right)
|
||
{
|
||
int left_count = count_pinned(*(struct heap_page * const *)left);
|
||
int right_count = count_pinned(*(struct heap_page * const *)right);
|
||
return right_count - left_count;
|
||
}
|
||
static void
|
||
gc_compact_heap(rb_objspace_t *objspace)
|
||
{
|
||
struct heap_cursor free_cursor;
|
||
struct heap_cursor scan_cursor;
|
||
int number_considered;
|
||
struct heap_page **page_list;
|
||
memset(objspace->rcompactor.considered_count_table, 0, T_MASK * sizeof(size_t));
|
||
memset(objspace->rcompactor.moved_count_table, 0, T_MASK * sizeof(size_t));
|
||
page_list = calloc(heap_allocated_pages, sizeof(struct heap_page *));
|
||
memcpy(page_list, heap_pages_sorted, heap_allocated_pages * sizeof(struct heap_page *));
|
||
qsort(page_list, heap_allocated_pages, sizeof(struct heap_page *), compare_pinned);
|
||
init_cursors(objspace, &free_cursor, &scan_cursor, page_list);
|
||
/* Two finger algorithm */
|
||
while (not_met(&free_cursor, &scan_cursor)) {
|
||
while(BUILTIN_TYPE(free_cursor.slot) != T_NONE && not_met(&free_cursor, &scan_cursor)) {
|
||
advance_cursor(&free_cursor, page_list);
|
||
}
|
||
objspace->rcompactor.considered_count_table[BUILTIN_TYPE((VALUE)scan_cursor.slot)]++;
|
||
while(!gc_is_moveable_obj(objspace, (VALUE)scan_cursor.slot) && not_met(&free_cursor, &scan_cursor)) {
|
||
retreat_cursor(&scan_cursor, page_list);
|
||
objspace->rcompactor.considered_count_table[BUILTIN_TYPE((VALUE)scan_cursor.slot)]++;
|
||
}
|
||
if (not_met(&free_cursor, &scan_cursor)) {
|
||
objspace->rcompactor.moved_count_table[BUILTIN_TYPE((VALUE)scan_cursor.slot)]++;
|
||
gc_move(objspace, (VALUE)scan_cursor.slot, (VALUE)free_cursor.slot);
|
||
advance_cursor(&free_cursor, page_list);
|
||
retreat_cursor(&scan_cursor, page_list);
|
||
}
|
||
}
|
||
free(page_list);
|
||
}
|
||
static void
|
||
gc_ref_update_array(rb_objspace_t * objspace, VALUE v)
|
||
{
|
||
long i, len;
|
||
if (FL_TEST(v, ELTS_SHARED))
|
||
return;
|
||
len = RARRAY_LEN(v);
|
||
if (len > 0) {
|
||
VALUE *ptr = (VALUE *)RARRAY_CONST_PTR_TRANSIENT(v);
|
||
for(i = 0; i < len; i++) {
|
||
UPDATE_IF_MOVED(objspace, ptr[i]);
|
||
}
|
||
}
|
||
}
|
||
static void
|
||
gc_ref_update_object(rb_objspace_t * objspace, VALUE v)
|
||
{
|
||
uint32_t i, len = ROBJECT_NUMIV(v);
|
||
VALUE *ptr = ROBJECT_IVPTR(v);
|
||
for (i = 0; i < len; i++) {
|
||
UPDATE_IF_MOVED(objspace, ptr[i]);
|
||
}
|
||
}
|
||
static int
|
||
hash_replace_ref(st_data_t *key, st_data_t *value, st_data_t argp, int existing)
|
||
{
|
||
rb_objspace_t *objspace;
|
||
if(!SPECIAL_CONST_P((void *)*key) && BUILTIN_TYPE(*key) == T_MOVED) {
|
||
*key = (VALUE)RMOVED(*key)->destination;
|
||
}
|
||
if(!SPECIAL_CONST_P((void *)*value) && BUILTIN_TYPE(*value) == T_MOVED) {
|
||
*value = (VALUE)RMOVED(*value)->destination;
|
||
}
|
||
return ST_CONTINUE;
|
||
}
|
||
static int
|
||
hash_foreach_replace(st_data_t key, st_data_t value, st_data_t argp, int error)
|
||
{
|
||
rb_objspace_t *objspace;
|
||
objspace = (rb_objspace_t *)argp;
|
||
if(!SPECIAL_CONST_P((void *)key) && BUILTIN_TYPE(key) == T_MOVED) {
|
||
return ST_REPLACE;
|
||
}
|
||
if(!SPECIAL_CONST_P((void *)value) && BUILTIN_TYPE(value) == T_MOVED) {
|
||
return ST_REPLACE;
|
||
}
|
||
return ST_CHECK;
|
||
}
|
||
static void
|
||
gc_update_table_refs(rb_objspace_t * objspace, st_table *ht)
|
||
{
|
||
if (st_foreach_with_replace(ht, hash_foreach_replace, hash_replace_ref, (st_data_t)objspace)) {
|
||
rb_raise(rb_eRuntimeError, "hash modified during iteration");
|
||
}
|
||
}
|
||
void
|
||
rb_gc_update_tbl_refs(st_table *ptr)
|
||
{
|
||
rb_objspace_t *objspace = &rb_objspace;
|
||
gc_update_table_refs(objspace, ptr);
|
||
}
|
||
static void
|
||
gc_ref_update_hash(rb_objspace_t * objspace, VALUE v)
|
||
{
|
||
gc_update_table_refs(objspace, rb_hash_tbl_raw(v));
|
||
}
|
||
void rb_update_st_references(struct st_table *ht)
|
||
{
|
||
rb_objspace_t *objspace = &rb_objspace;
|
||
gc_update_table_refs(objspace, ht);
|
||
}
|
||
static void
|
||
gc_ref_update_method_entry(rb_objspace_t *objspace, rb_method_entry_t *me)
|
||
{
|
||
rb_method_definition_t *def = me->def;
|
||
UPDATE_IF_MOVED(objspace, me->owner);
|
||
UPDATE_IF_MOVED(objspace, me->defined_class);
|
||
if (def) {
|
||
switch (def->type) {
|
||
case VM_METHOD_TYPE_ISEQ:
|
||
if (def->body.iseq.iseqptr) {
|
||
TYPED_UPDATE_IF_MOVED(objspace, rb_iseq_t *, def->body.iseq.iseqptr);
|
||
}
|
||
TYPED_UPDATE_IF_MOVED(objspace, rb_cref_t *, def->body.iseq.cref);
|
||
break;
|
||
case VM_METHOD_TYPE_ATTRSET:
|
||
case VM_METHOD_TYPE_IVAR:
|
||
UPDATE_IF_MOVED(objspace, def->body.attr.location);
|
||
break;
|
||
case VM_METHOD_TYPE_BMETHOD:
|
||
UPDATE_IF_MOVED(objspace, def->body.bmethod.proc);
|
||
break;
|
||
case VM_METHOD_TYPE_ALIAS:
|
||
TYPED_UPDATE_IF_MOVED(objspace, struct rb_method_entry_struct *, def->body.alias.original_me);
|
||
return;
|
||
case VM_METHOD_TYPE_REFINED:
|
||
TYPED_UPDATE_IF_MOVED(objspace, struct rb_method_entry_struct *, def->body.refined.orig_me);
|
||
UPDATE_IF_MOVED(objspace, def->body.refined.owner);
|
||
break;
|
||
case VM_METHOD_TYPE_CFUNC:
|
||
case VM_METHOD_TYPE_ZSUPER:
|
||
case VM_METHOD_TYPE_MISSING:
|
||
case VM_METHOD_TYPE_OPTIMIZED:
|
||
case VM_METHOD_TYPE_UNDEF:
|
||
case VM_METHOD_TYPE_NOTIMPLEMENTED:
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
static void
|
||
gc_ref_update_imemo(rb_objspace_t *objspace, VALUE obj)
|
||
{
|
||
switch(imemo_type(obj)) {
|
||
case imemo_env:
|
||
{
|
||
rb_env_t *env = (rb_env_t *)obj;
|
||
TYPED_UPDATE_IF_MOVED(objspace, rb_iseq_t *, env->iseq);
|
||
}
|
||
break;
|
||
break;
|
||
case imemo_cref:
|
||
UPDATE_IF_MOVED(objspace, RANY(obj)->as.imemo.cref.klass);
|
||
TYPED_UPDATE_IF_MOVED(objspace, struct rb_cref_struct *, RANY(obj)->as.imemo.cref.next);
|
||
UPDATE_IF_MOVED(objspace, RANY(obj)->as.imemo.cref.refinements);
|
||
break;
|
||
case imemo_svar:
|
||
UPDATE_IF_MOVED(objspace, RANY(obj)->as.imemo.svar.cref_or_me);
|
||
UPDATE_IF_MOVED(objspace, RANY(obj)->as.imemo.svar.lastline);
|
||
UPDATE_IF_MOVED(objspace, RANY(obj)->as.imemo.svar.backref);
|
||
UPDATE_IF_MOVED(objspace, RANY(obj)->as.imemo.svar.others);
|
||
break;
|
||
case imemo_throw_data:
|
||
UPDATE_IF_MOVED(objspace, RANY(obj)->as.imemo.throw_data.throw_obj);
|
||
break;
|
||
case imemo_ifunc:
|
||
if (is_pointer_to_heap(objspace, RANY(obj)->as.imemo.ifunc.data)) {
|
||
TYPED_UPDATE_IF_MOVED(objspace, void *, RANY(obj)->as.imemo.ifunc.data);
|
||
}
|
||
break;
|
||
case imemo_memo:
|
||
UPDATE_IF_MOVED(objspace, RANY(obj)->as.imemo.memo.v1);
|
||
UPDATE_IF_MOVED(objspace, RANY(obj)->as.imemo.memo.v2);
|
||
if (is_pointer_to_heap(objspace, (void *)RANY(obj)->as.imemo.memo.u3.value)) {
|
||
UPDATE_IF_MOVED(objspace, RANY(obj)->as.imemo.memo.u3.value);
|
||
}
|
||
break;
|
||
case imemo_ment:
|
||
gc_ref_update_method_entry(objspace, &RANY(obj)->as.imemo.ment);
|
||
break;
|
||
case imemo_iseq:
|
||
rb_iseq_update_references((rb_iseq_t *)obj);
|
||
break;
|
||
case imemo_ast:
|
||
case imemo_parser_strterm:
|
||
case imemo_tmpbuf:
|
||
break;
|
||
default:
|
||
rb_bug("not reachable %d", imemo_type(obj));
|
||
break;
|
||
}
|
||
}
|
||
static enum rb_id_table_iterator_result
|
||
check_id_table_move(ID id, VALUE value, void *data)
|
||
{
|
||
if(!SPECIAL_CONST_P((void *)value) && BUILTIN_TYPE(value) == T_MOVED) {
|
||
return ID_TABLE_REPLACE;
|
||
}
|
||
return ID_TABLE_CONTINUE;
|
||
}
|
||
/* Returns the new location of an object, if it moved. Otherwise returns
|
||
* the existing location. */
|
||
VALUE
|
||
rb_gc_new_location(VALUE value)
|
||
{
|
||
if(!SPECIAL_CONST_P((void *)value) && BUILTIN_TYPE(value) == T_MOVED) {
|
||
return (VALUE)RMOVED(value)->destination;
|
||
} else {
|
||
return value;
|
||
}
|
||
}
|
||
static enum rb_id_table_iterator_result
|
||
update_id_table(ID *key, VALUE * value, void *data, int existing)
|
||
{
|
||
if(!SPECIAL_CONST_P((void *)*value) && BUILTIN_TYPE(*value) == T_MOVED) {
|
||
*value = (VALUE)RMOVED(*value)->destination;
|
||
}
|
||
return ID_TABLE_CONTINUE;
|
||
}
|
||
static void
|
||
update_m_tbl(rb_objspace_t *objspace, struct rb_id_table *tbl)
|
||
{
|
||
if (tbl) {
|
||
rb_id_table_foreach_with_replace(tbl, check_id_table_move, update_id_table, objspace);
|
||
}
|
||
}
|
||
static enum rb_id_table_iterator_result
|
||
update_const_table(VALUE value, void *data)
|
||
{
|
||
rb_const_entry_t *ce = (rb_const_entry_t *)value;
|
||
if(!SPECIAL_CONST_P((void *)ce->value) && BUILTIN_TYPE(ce->value) == T_MOVED) {
|
||
ce->value = (VALUE)RMOVED(ce->value)->destination;
|
||
}
|
||
if(!SPECIAL_CONST_P((void *)ce->file) && BUILTIN_TYPE(ce->file) == T_MOVED) {
|
||
ce->file = (VALUE)RMOVED(ce->file)->destination;
|
||
}
|
||
return ID_TABLE_CONTINUE;
|
||
}
|
||
static void
|
||
update_const_tbl(rb_objspace_t *objspace, struct rb_id_table *tbl)
|
||
{
|
||
if (!tbl) return;
|
||
rb_id_table_foreach_values(tbl, update_const_table, objspace);
|
||
}
|
||
static void
|
||
update_subclass_entries(rb_objspace_t *objspace, rb_subclass_entry_t *entry)
|
||
{
|
||
while (entry) {
|
||
UPDATE_IF_MOVED(objspace, entry->klass);
|
||
entry = entry->next;
|
||
}
|
||
}
|
||
static void
|
||
update_class_ext(rb_objspace_t *objspace, rb_classext_t *ext)
|
||
{
|
||
UPDATE_IF_MOVED(objspace, ext->origin_);
|
||
UPDATE_IF_MOVED(objspace, ext->refined_class);
|
||
update_subclass_entries(objspace, ext->subclasses);
|
||
}
|
||
static void
|
||
gc_update_object_references(rb_objspace_t *objspace, VALUE obj)
|
||
{
|
||
RVALUE *any = RANY(obj);
|
||
#if RGENGC_CHECK_MODE >= 5
|
||
fprintf(stderr, "update-refs: %s -> ", obj_info(obj));
|
||
#endif
|
||
switch(BUILTIN_TYPE(obj)) {
|
||
case T_CLASS:
|
||
case T_MODULE:
|
||
update_m_tbl(objspace, RCLASS_M_TBL(obj));
|
||
if (!RCLASS_EXT(obj)) break;
|
||
if (RCLASS_IV_TBL(obj)) {
|
||
gc_update_table_refs(objspace, RCLASS_IV_TBL(obj));
|
||
}
|
||
update_class_ext(objspace, RCLASS_EXT(obj));
|
||
update_const_tbl(objspace, RCLASS_CONST_TBL(obj));
|
||
UPDATE_IF_MOVED(objspace, RCLASS(obj)->super);
|
||
break;
|
||
case T_ICLASS:
|
||
if (FL_TEST(obj, RICLASS_IS_ORIGIN)) {
|
||
update_m_tbl(objspace, RCLASS_M_TBL(obj));
|
||
}
|
||
if (!RCLASS_EXT(obj)) break;
|
||
if (RCLASS_IV_TBL(obj)) {
|
||
gc_update_table_refs(objspace, RCLASS_IV_TBL(obj));
|
||
}
|
||
update_class_ext(objspace, RCLASS_EXT(obj));
|
||
update_m_tbl(objspace, RCLASS_CALLABLE_M_TBL(obj));
|
||
UPDATE_IF_MOVED(objspace, RCLASS(obj)->super);
|
||
break;
|
||
case T_IMEMO:
|
||
gc_ref_update_imemo(objspace, obj);
|
||
break;
|
||
case T_NIL:
|
||
case T_FIXNUM:
|
||
case T_NODE:
|
||
case T_MOVED:
|
||
case T_NONE:
|
||
/* These can't move */
|
||
return;
|
||
case T_ARRAY:
|
||
if (FL_TEST(obj, ELTS_SHARED)) {
|
||
UPDATE_IF_MOVED(objspace, any->as.array.as.heap.aux.shared);
|
||
} else {
|
||
gc_ref_update_array(objspace, obj);
|
||
}
|
||
break;
|
||
case T_HASH:
|
||
gc_ref_update_hash(objspace, obj);
|
||
UPDATE_IF_MOVED(objspace, any->as.hash.ifnone);
|
||
break;
|
||
case T_STRING:
|
||
if (STR_SHARED_P(obj)) {
|
||
UPDATE_IF_MOVED(objspace, any->as.string.as.heap.aux.shared);
|
||
}
|
||
case T_DATA:
|
||
/* Call the compaction callback, if it exists */
|
||
{
|
||
void *const ptr = DATA_PTR(obj);
|
||
if (ptr) {
|
||
if (RTYPEDDATA_P(obj)) {
|
||
RUBY_DATA_FUNC compact_func = any->as.typeddata.type->function.dcompact;
|
||
if (compact_func) (*compact_func)(ptr);
|
||
}
|
||
}
|
||
}
|
||
break;
|
||
case T_OBJECT:
|
||
gc_ref_update_object(objspace, obj);
|
||
break;
|
||
case T_FILE:
|
||
if (any->as.file.fptr) {
|
||
UPDATE_IF_MOVED(objspace, any->as.file.fptr->pathv);
|
||
UPDATE_IF_MOVED(objspace, any->as.file.fptr->tied_io_for_writing);
|
||
UPDATE_IF_MOVED(objspace, any->as.file.fptr->writeconv_asciicompat);
|
||
UPDATE_IF_MOVED(objspace, any->as.file.fptr->writeconv_pre_ecopts);
|
||
UPDATE_IF_MOVED(objspace, any->as.file.fptr->encs.ecopts);
|
||
UPDATE_IF_MOVED(objspace, any->as.file.fptr->write_lock);
|
||
}
|
||
break;
|
||
case T_REGEXP:
|
||
UPDATE_IF_MOVED(objspace, any->as.regexp.src);
|
||
break;
|
||
case T_SYMBOL:
|
||
if (DYNAMIC_SYM_P((VALUE)any)) {
|
||
UPDATE_IF_MOVED(objspace, RSYMBOL(any)->fstr);
|
||
}
|
||
break;
|
||
case T_FLOAT:
|
||
case T_BIGNUM:
|
||
break;
|
||
case T_MATCH:
|
||
UPDATE_IF_MOVED(objspace, any->as.match.regexp);
|
||
if (any->as.match.str) {
|
||
UPDATE_IF_MOVED(objspace, any->as.match.str);
|
||
}
|
||
break;
|
||
case T_RATIONAL:
|
||
UPDATE_IF_MOVED(objspace, any->as.rational.num);
|
||
UPDATE_IF_MOVED(objspace, any->as.rational.den);
|
||
break;
|
||
case T_COMPLEX:
|
||
UPDATE_IF_MOVED(objspace, any->as.complex.real);
|
||
UPDATE_IF_MOVED(objspace, any->as.complex.imag);
|
||
break;
|
||
case T_STRUCT:
|
||
{
|
||
long i, len = RSTRUCT_LEN(obj);
|
||
VALUE *ptr = (VALUE *)RSTRUCT_CONST_PTR(obj);
|
||
for(i = 0; i < len; i++) {
|
||
UPDATE_IF_MOVED(objspace, ptr[i]);
|
||
}
|
||
}
|
||
break;
|
||
default:
|
||
#if GC_DEBUG
|
||
rb_gcdebug_print_obj_condition((VALUE)obj);
|
||
rb_obj_info_dump(obj);
|
||
rb_bug("unreachable");
|
||
#endif
|
||
break;
|
||
}
|
||
UPDATE_IF_MOVED(objspace, RBASIC(obj)->klass);
|
||
#if RGENGC_CHECK_MODE >= 5
|
||
fprintf(stderr, "%s\n", obj_info(obj));
|
||
#endif
|
||
}
|
||
static int
|
||
gc_ref_update(void *vstart, void *vend, size_t stride, void * data)
|
||
{
|
||
rb_objspace_t * objspace;
|
||
struct heap_page *page;
|
||
short free_slots = 0;
|
||
VALUE v = (VALUE)vstart;
|
||
objspace = (rb_objspace_t *)data;
|
||
page = GET_HEAP_PAGE(v);
|
||
page->freelist = NULL;
|
||
page->flags.has_uncollectible_shady_objects = FALSE;
|
||
/* For each object on the page */
|
||
for(; v != (VALUE)vend; v += stride) {
|
||
if (SPECIAL_CONST_P(v)) {
|
||
} else if (BUILTIN_TYPE(v) == T_NONE) {
|
||
heap_page_add_freeobj(objspace, page, v);
|
||
free_slots++;
|
||
} else {
|
||
if (RVALUE_WB_UNPROTECTED(v)) {
|
||
page->flags.has_uncollectible_shady_objects = TRUE;
|
||
}
|
||
gc_update_object_references(objspace, v);
|
||
}
|
||
}
|
||
page->free_slots = free_slots;
|
||
return 0;
|
||
}
|
||
extern rb_symbols_t global_symbols;
|
||
static void
|
||
gc_update_references(rb_objspace_t * objspace)
|
||
{
|
||
rb_execution_context_t *ec = GET_EC();
|
||
rb_vm_t *vm = rb_ec_vm_ptr(ec);
|
||
rb_objspace_each_objects_without_setup(gc_ref_update, objspace);
|
||
rb_vm_update_references(vm);
|
||
gc_update_table_refs(objspace, global_symbols.str_sym);
|
||
}
|
||
static VALUE type_sym(int type);
|
||
static VALUE
|
||
rb_gc_compact_stats(VALUE mod)
|
||
{
|
||
int i;
|
||
rb_objspace_t *objspace = &rb_objspace;
|
||
VALUE h = rb_hash_new();
|
||
VALUE considered = rb_hash_new();
|
||
VALUE moved = rb_hash_new();
|
||
for (i=0; i<T_MASK; i++) {
|
||
rb_hash_aset(considered, type_sym(i), SIZET2NUM(objspace->rcompactor.considered_count_table[i]));
|
||
}
|
||
for (i=0; i<T_MASK; i++) {
|
||
rb_hash_aset(moved, type_sym(i), SIZET2NUM(objspace->rcompactor.moved_count_table[i]));
|
||
}
|
||
rb_hash_aset(h, ID2SYM(rb_intern("considered")), considered);
|
||
rb_hash_aset(h, ID2SYM(rb_intern("moved")), moved);
|
||
return h;
|
||
}
|
||
static VALUE
|
||
rb_gc_compact(VALUE mod)
|
||
{
|
||
rb_objspace_t *objspace = &rb_objspace;
|
||
/* Ensure objects are pinned */
|
||
rb_gc();
|
||
/* Drain interrupts so that THEAP has a chance to evacuate before
|
||
* any possible compaction. */
|
||
rb_thread_execute_interrupts(rb_thread_current());
|
||
gc_compact_heap(objspace);
|
||
gc_update_references(objspace);
|
||
rb_clear_method_cache_by_class(rb_cObject);
|
||
rb_clear_constant_cache();
|
||
/* GC after compaction to eliminate T_MOVED */
|
||
rb_gc();
|
||
return rb_gc_compact_stats(mod);
|
||
}
|
||
/*
|
||
* call-seq:
|
||
* GC.verify_compaction_references -> nil
|
||
*
|
||
* Verify compaction reference consistency.
|
||
*
|
||
* This method is implementation specific. During compaction, objects that
|
||
* were moved are replaced with T_MOVED objects. No object should have a
|
||
* reference to a T_MOVED object after compaction.
|
||
*
|
||
* This function doubles the heap to ensure room to move all objects,
|
||
* compacts the heap to make sure everything moves, updates all references,
|
||
* then performs a full GC. If any object contains a reference to a T_MOVED
|
||
* object, that object should be pushed on the mark stack, and will
|
||
* make a SEGV.
|
||
*/
|
||
static VALUE
|
||
gc_verify_compaction_references(VALUE dummy)
|
||
{
|
||
rb_objspace_t *objspace = &rb_objspace;
|
||
/* Double heap size */
|