2007-02-05 07:21:01 -05:00
|
|
|
#ifndef RUBY_GC_H
|
|
|
|
#define RUBY_GC_H 1
|
2019-12-04 03:16:30 -05:00
|
|
|
#include "ruby/ruby.h"
|
2007-02-05 07:21:01 -05:00
|
|
|
|
2017-10-23 01:56:25 -04:00
|
|
|
#if defined(__x86_64__) && !defined(_ILP32) && defined(__GNUC__)
|
2013-02-10 00:41:01 -05:00
|
|
|
#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("movq\t%%rsp, %0" : "=r" (*(p)))
|
2017-10-23 01:56:25 -04:00
|
|
|
#elif defined(__i386) && defined(__GNUC__)
|
2013-02-10 00:41:01 -05:00
|
|
|
#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("movl\t%%esp, %0" : "=r" (*(p)))
|
2021-10-05 00:06:43 -04:00
|
|
|
#elif (defined(__powerpc__) || defined(__powerpc64__)) && defined(__GNUC__) && !defined(_AIX)
|
2019-12-15 20:05:21 -05:00
|
|
|
#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("mr\t%0, %%r1" : "=r" (*(p)))
|
2021-10-05 00:06:43 -04:00
|
|
|
#elif (defined(__powerpc__) || defined(__powerpc64__)) && defined(__GNUC__) && defined(_AIX)
|
|
|
|
#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("mr %0,1" : "=r" (*(p)))
|
2020-08-13 13:15:54 -04:00
|
|
|
#elif defined(__aarch64__) && defined(__GNUC__)
|
|
|
|
#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("mov\t%0, sp" : "=r" (*(p)))
|
2007-07-14 03:19:59 -04:00
|
|
|
#else
|
2007-02-05 07:21:01 -05:00
|
|
|
NOINLINE(void rb_gc_set_stack_end(VALUE **stack_end_p));
|
2007-07-14 03:19:59 -04:00
|
|
|
#define SET_MACHINE_STACK_END(p) rb_gc_set_stack_end(p)
|
|
|
|
#define USE_CONSERVATIVE_STACK_END
|
|
|
|
#endif
|
|
|
|
|
2020-03-09 13:22:11 -04:00
|
|
|
#define RB_GC_SAVE_MACHINE_CONTEXT(th) \
|
|
|
|
do { \
|
2022-07-21 12:23:58 -04:00
|
|
|
FLUSH_REGISTER_WINDOWS; \
|
|
|
|
setjmp((th)->ec->machine.regs); \
|
|
|
|
SET_MACHINE_STACK_END(&(th)->ec->machine.stack_end); \
|
2020-03-09 13:22:11 -04:00
|
|
|
} while (0)
|
|
|
|
|
2007-02-05 07:21:01 -05:00
|
|
|
/* for GC debug */
|
|
|
|
|
2007-06-24 22:44:20 -04:00
|
|
|
#ifndef RUBY_MARK_FREE_DEBUG
|
|
|
|
#define RUBY_MARK_FREE_DEBUG 0
|
2006-12-31 10:02:22 -05:00
|
|
|
#endif
|
|
|
|
|
2007-06-24 22:44:20 -04:00
|
|
|
#if RUBY_MARK_FREE_DEBUG
|
2007-12-21 03:13:39 -05:00
|
|
|
extern int ruby_gc_debug_indent;
|
2006-12-31 10:02:22 -05:00
|
|
|
|
2010-08-14 02:11:03 -04:00
|
|
|
static inline void
|
2006-12-31 10:02:22 -05:00
|
|
|
rb_gc_debug_indent(void)
|
|
|
|
{
|
2021-09-09 10:21:06 -04:00
|
|
|
ruby_debug_printf("%*s", ruby_gc_debug_indent, "");
|
2006-12-31 10:02:22 -05:00
|
|
|
}
|
|
|
|
|
2010-08-14 02:11:03 -04:00
|
|
|
static inline void
|
2009-10-16 00:40:11 -04:00
|
|
|
rb_gc_debug_body(const char *mode, const char *msg, int st, void *ptr)
|
2006-12-31 10:02:22 -05:00
|
|
|
{
|
|
|
|
if (st == 0) {
|
2022-07-21 12:23:58 -04:00
|
|
|
ruby_gc_debug_indent--;
|
2006-12-31 10:02:22 -05:00
|
|
|
}
|
|
|
|
rb_gc_debug_indent();
|
2021-09-09 10:21:06 -04:00
|
|
|
ruby_debug_printf("%s: %s %s (%p)\n", mode, st ? "->" : "<-", msg, ptr);
|
2007-06-24 22:44:20 -04:00
|
|
|
|
2006-12-31 10:02:22 -05:00
|
|
|
if (st) {
|
2022-07-21 12:23:58 -04:00
|
|
|
ruby_gc_debug_indent++;
|
2006-12-31 10:02:22 -05:00
|
|
|
}
|
2007-06-24 22:44:20 -04:00
|
|
|
|
2006-12-31 10:02:22 -05:00
|
|
|
fflush(stdout);
|
|
|
|
}
|
|
|
|
|
2011-01-18 09:05:20 -05:00
|
|
|
#define RUBY_MARK_ENTER(msg) rb_gc_debug_body("mark", (msg), 1, ptr)
|
|
|
|
#define RUBY_MARK_LEAVE(msg) rb_gc_debug_body("mark", (msg), 0, ptr)
|
|
|
|
#define RUBY_FREE_ENTER(msg) rb_gc_debug_body("free", (msg), 1, ptr)
|
|
|
|
#define RUBY_FREE_LEAVE(msg) rb_gc_debug_body("free", (msg), 0, ptr)
|
2021-09-09 10:21:06 -04:00
|
|
|
#define RUBY_GC_INFO rb_gc_debug_indent(), ruby_debug_printf
|
2006-12-31 10:02:22 -05:00
|
|
|
|
|
|
|
#else
|
2007-06-24 22:44:20 -04:00
|
|
|
#define RUBY_MARK_ENTER(msg)
|
|
|
|
#define RUBY_MARK_LEAVE(msg)
|
|
|
|
#define RUBY_FREE_ENTER(msg)
|
|
|
|
#define RUBY_FREE_LEAVE(msg)
|
|
|
|
#define RUBY_GC_INFO if(0)printf
|
2006-12-31 10:02:22 -05:00
|
|
|
#endif
|
|
|
|
|
2020-01-08 06:40:08 -05:00
|
|
|
#define RUBY_MARK_MOVABLE_UNLESS_NULL(ptr) do { \
|
2019-04-19 21:19:47 -04:00
|
|
|
VALUE markobj = (ptr); \
|
2019-08-12 16:09:21 -04:00
|
|
|
if (RTEST(markobj)) {rb_gc_mark_movable(markobj);} \
|
2019-04-19 21:19:47 -04:00
|
|
|
} while (0)
|
2015-07-14 19:59:23 -04:00
|
|
|
#define RUBY_MARK_UNLESS_NULL(ptr) do { \
|
|
|
|
VALUE markobj = (ptr); \
|
|
|
|
if (RTEST(markobj)) {rb_gc_mark(markobj);} \
|
|
|
|
} while (0)
|
2011-02-11 05:45:34 -05:00
|
|
|
#define RUBY_FREE_UNLESS_NULL(ptr) if(ptr){ruby_xfree(ptr);(ptr)=NULL;}
|
2007-02-05 07:21:01 -05:00
|
|
|
|
2008-06-13 22:59:19 -04:00
|
|
|
#if STACK_GROW_DIRECTION > 0
|
2011-01-18 09:05:20 -05:00
|
|
|
# define STACK_UPPER(x, a, b) (a)
|
2008-06-13 22:59:19 -04:00
|
|
|
#elif STACK_GROW_DIRECTION < 0
|
2011-01-18 09:05:20 -05:00
|
|
|
# define STACK_UPPER(x, a, b) (b)
|
2008-06-13 22:59:19 -04:00
|
|
|
#else
|
|
|
|
RUBY_EXTERN int ruby_stack_grow_direction;
|
2010-04-14 03:29:04 -04:00
|
|
|
int ruby_get_stack_grow_direction(volatile VALUE *addr);
|
2008-06-13 22:59:19 -04:00
|
|
|
# define stack_growup_p(x) ( \
|
2022-07-21 12:23:58 -04:00
|
|
|
(ruby_stack_grow_direction ? \
|
|
|
|
ruby_stack_grow_direction : \
|
|
|
|
ruby_get_stack_grow_direction(x)) > 0)
|
2011-01-18 09:05:20 -05:00
|
|
|
# define STACK_UPPER(x, a, b) (stack_growup_p(x) ? (a) : (b))
|
2008-06-13 22:59:19 -04:00
|
|
|
#endif
|
|
|
|
|
2019-06-03 05:35:03 -04:00
|
|
|
/*
|
|
|
|
STACK_GROW_DIR_DETECTION is used with STACK_DIR_UPPER.
|
2019-07-12 02:02:25 -04:00
|
|
|
|
2019-06-03 05:35:03 -04:00
|
|
|
On most normal systems, stacks grow from high address to lower address. In
|
|
|
|
this case, STACK_DIR_UPPER(a, b) will return (b), but on exotic systems where
|
|
|
|
the stack grows UP (from low address to high address), it will return (a).
|
|
|
|
*/
|
|
|
|
|
2010-05-08 12:15:20 -04:00
|
|
|
#if STACK_GROW_DIRECTION
|
|
|
|
#define STACK_GROW_DIR_DETECTION
|
2011-01-18 09:05:20 -05:00
|
|
|
#define STACK_DIR_UPPER(a,b) STACK_UPPER(0, (a), (b))
|
2010-05-08 12:15:20 -04:00
|
|
|
#else
|
|
|
|
#define STACK_GROW_DIR_DETECTION VALUE stack_grow_dir_detection
|
2011-01-18 09:05:20 -05:00
|
|
|
#define STACK_DIR_UPPER(a,b) STACK_UPPER(&stack_grow_dir_detection, (a), (b))
|
2010-05-08 12:15:20 -04:00
|
|
|
#endif
|
2012-06-10 04:54:38 -04:00
|
|
|
#define IS_STACK_DIR_UPPER() STACK_DIR_UPPER(1,0)
|
2010-05-08 12:15:20 -04:00
|
|
|
|
2015-05-31 15:12:42 -04:00
|
|
|
const char *rb_obj_info(VALUE obj);
|
2022-07-05 09:39:08 -04:00
|
|
|
const char *rb_raw_obj_info(char *const buff, const size_t buff_size, VALUE obj);
|
2015-05-31 15:12:42 -04:00
|
|
|
|
2019-05-16 03:44:30 -04:00
|
|
|
VALUE rb_gc_disable_no_rest(void);
|
|
|
|
|
2017-04-12 10:47:50 -04:00
|
|
|
struct rb_thread_struct;
|
|
|
|
|
2013-04-05 06:29:38 -04:00
|
|
|
RUBY_SYMBOL_EXPORT_BEGIN
|
2010-08-14 02:11:03 -04:00
|
|
|
|
2012-10-05 04:14:09 -04:00
|
|
|
/* exports for objspace module */
|
2010-08-14 02:11:03 -04:00
|
|
|
size_t rb_objspace_data_type_memsize(VALUE obj);
|
2012-10-23 20:04:56 -04:00
|
|
|
void rb_objspace_reachable_objects_from(VALUE obj, void (func)(VALUE, void *), void *data);
|
2013-10-15 06:22:33 -04:00
|
|
|
void rb_objspace_reachable_objects_from_root(void (func)(const char *category, VALUE, void *), void *data);
|
2012-10-23 20:04:56 -04:00
|
|
|
int rb_objspace_markable_object_p(VALUE obj);
|
|
|
|
int rb_objspace_internal_object_p(VALUE obj);
|
2014-02-08 02:03:43 -05:00
|
|
|
int rb_objspace_marked_object_p(VALUE obj);
|
2012-10-05 04:14:09 -04:00
|
|
|
|
2010-08-14 02:11:03 -04:00
|
|
|
void rb_objspace_each_objects(
|
|
|
|
int (*callback)(void *start, void *end, size_t stride, void *data),
|
|
|
|
void *data);
|
|
|
|
|
2014-04-02 07:59:50 -04:00
|
|
|
void rb_objspace_each_objects_without_setup(
|
|
|
|
int (*callback)(void *, void *, size_t, void *),
|
|
|
|
void *data);
|
|
|
|
|
2022-02-01 16:36:29 -05:00
|
|
|
size_t rb_gc_obj_slot_size(VALUE obj);
|
|
|
|
|
2013-04-05 06:29:38 -04:00
|
|
|
RUBY_SYMBOL_EXPORT_END
|
2010-08-14 02:11:03 -04:00
|
|
|
|
2008-06-13 22:59:19 -04:00
|
|
|
#endif /* RUBY_GC_H */
|