31 #include <sys/types.h>
35 # define __has_feature(x) 0
38 #ifndef HAVE_MALLOC_USABLE_SIZE
40 # define HAVE_MALLOC_USABLE_SIZE
41 # define malloc_usable_size(a) _msize(a)
42 # elif defined HAVE_MALLOC_SIZE
43 # define HAVE_MALLOC_USABLE_SIZE
44 # define malloc_usable_size(a) malloc_size(a)
47 #ifdef HAVE_MALLOC_USABLE_SIZE
50 # elif defined(HAVE_MALLOC_NP_H)
51 # include <malloc_np.h>
52 # elif defined(HAVE_MALLOC_MALLOC_H)
53 # include <malloc/malloc.h>
58 __has_feature(address_sanitizer) || \
59 defined(__SANITIZE_ADDRESS__)
60 #define ATTRIBUTE_NO_ADDRESS_SAFETY_ANALYSIS \
61 __attribute__((no_address_safety_analysis)) \
62 __attribute__((noinline))
64 #define ATTRIBUTE_NO_ADDRESS_SAFETY_ANALYSIS
67 #ifdef HAVE_SYS_TIME_H
71 #ifdef HAVE_SYS_RESOURCE_H
72 #include <sys/resource.h>
74 #if defined(__native_client__) && defined(NACL_NEWLIB)
76 # undef HAVE_POSIX_MEMALIGN
81 #if defined _WIN32 || defined __CYGWIN__
83 #elif defined(HAVE_POSIX_MEMALIGN)
84 #elif defined(HAVE_MEMALIGN)
88 #define rb_setjmp(env) RUBY_SETJMP(env)
89 #define rb_jmp_buf rb_jmpbuf_t
91 #if defined(HAVE_RB_GC_GUARDED_PTR) && HAVE_RB_GC_GUARDED_PTR
99 #ifndef GC_HEAP_FREE_SLOTS
100 #define GC_HEAP_FREE_SLOTS 4096
102 #ifndef GC_HEAP_INIT_SLOTS
103 #define GC_HEAP_INIT_SLOTS 10000
105 #ifndef GC_HEAP_GROWTH_FACTOR
106 #define GC_HEAP_GROWTH_FACTOR 1.8
108 #ifndef GC_HEAP_GROWTH_MAX_SLOTS
109 #define GC_HEAP_GROWTH_MAX_SLOTS 0
111 #ifndef GC_HEAP_OLDOBJECT_LIMIT_FACTOR
112 #define GC_HEAP_OLDOBJECT_LIMIT_FACTOR 2.0
115 #ifndef GC_MALLOC_LIMIT_MIN
116 #define GC_MALLOC_LIMIT_MIN (16 * 1024 * 1024 )
118 #ifndef GC_MALLOC_LIMIT_MAX
119 #define GC_MALLOC_LIMIT_MAX (32 * 1024 * 1024 )
121 #ifndef GC_MALLOC_LIMIT_GROWTH_FACTOR
122 #define GC_MALLOC_LIMIT_GROWTH_FACTOR 1.4
125 #ifndef GC_OLDMALLOC_LIMIT_MIN
126 #define GC_OLDMALLOC_LIMIT_MIN (16 * 1024 * 1024 )
128 #ifndef GC_OLDMALLOC_LIMIT_GROWTH_FACTOR
129 #define GC_OLDMALLOC_LIMIT_GROWTH_FACTOR 1.2
131 #ifndef GC_OLDMALLOC_LIMIT_MAX
132 #define GC_OLDMALLOC_LIMIT_MAX (128 * 1024 * 1024 )
147 #if defined(ENABLE_VM_OBJSPACE) && ENABLE_VM_OBJSPACE
164 #if defined(ENABLE_VM_OBJSPACE) && ENABLE_VM_OBJSPACE
185 #define RGENGC_DEBUG 0
195 #ifndef RGENGC_CHECK_MODE
196 #define RGENGC_CHECK_MODE 0
204 #ifndef RGENGC_PROFILE
205 #define RGENGC_PROFILE 0
213 #ifndef RGENGC_THREEGEN
214 #define RGENGC_THREEGEN 0
223 #ifndef RGENGC_ESTIMATE_OLDMALLOC
224 #define RGENGC_ESTIMATE_OLDMALLOC 1
229 #define RGENGC_DEBUG 0
230 #define RGENGC_CHECK_MODE 0
231 #define RGENGC_PROFILE 0
232 #define RGENGC_THREEGEN 0
233 #define RGENGC_ESTIMATE_OLDMALLOC 0
237 #ifndef GC_PROFILE_MORE_DETAIL
238 #define GC_PROFILE_MORE_DETAIL 0
240 #ifndef GC_PROFILE_DETAIL_MEMORY
241 #define GC_PROFILE_DETAIL_MEMORY 0
243 #ifndef GC_ENABLE_LAZY_SWEEP
244 #define GC_ENABLE_LAZY_SWEEP 1
246 #ifndef CALC_EXACT_MALLOC_SIZE
247 #define CALC_EXACT_MALLOC_SIZE 0
249 #if defined(HAVE_MALLOC_USABLE_SIZE) || CALC_EXACT_MALLOC_SIZE > 0
250 #ifndef MALLOC_ALLOCATED_SIZE
251 #define MALLOC_ALLOCATED_SIZE 0
254 #define MALLOC_ALLOCATED_SIZE 0
256 #ifndef MALLOC_ALLOCATED_SIZE_CHECK
257 #define MALLOC_ALLOCATED_SIZE_CHECK 0
268 #if RGENGC_ESTIMATE_OLDMALLOC
295 #if GC_PROFILE_MORE_DETAIL
297 double gc_sweep_time;
299 size_t heap_use_pages;
300 size_t heap_live_objects;
301 size_t heap_free_objects;
303 size_t allocate_increase;
304 size_t allocate_limit;
307 size_t removing_objects;
308 size_t empty_objects;
309 #if GC_PROFILE_DETAIL_MEMORY
315 #if MALLOC_ALLOCATED_SIZE
316 size_t allocated_size;
319 #if RGENGC_PROFILE > 0
321 size_t remembered_normal_objects;
322 size_t remembered_shady_objects;
326 #if defined(_MSC_VER) || defined(__BORLANDC__) || defined(__CYGWIN__)
327 #pragma pack(push, 1)
366 #if defined(_MSC_VER) || defined(__BORLANDC__) || defined(__CYGWIN__)
391 #define STACK_CHUNK_SIZE 500
421 #if MALLOC_ALLOCATED_SIZE
422 size_t allocated_size;
463 #if GC_PROFILE_MORE_DETAIL
471 #if RGENGC_PROFILE > 0
472 size_t generated_normal_object_count;
473 size_t generated_shady_object_count;
474 size_t shade_operation_count;
475 size_t promote_infant_count;
477 size_t promote_young_count;
479 size_t remembered_normal_object_count;
482 #if RGENGC_PROFILE >= 2
483 size_t generated_normal_object_count_types[
RUBY_T_MASK];
484 size_t generated_shady_object_count_types[
RUBY_T_MASK];
490 size_t remembered_normal_object_count_types[
RUBY_T_MASK];
491 size_t remembered_shady_object_count_types[
RUBY_T_MASK];
530 size_t young_object_count;
533 #if RGENGC_ESTIMATE_OLDMALLOC
538 #if RGENGC_CHECK_MODE >= 2
547 #ifndef HEAP_ALIGN_LOG
549 #define HEAP_ALIGN_LOG 14
551 #define CEILDIV(i, mod) (((i) + (mod) - 1)/(mod))
582 #define GET_PAGE_BODY(x) ((struct heap_page_body *)((bits_t)(x) & ~(HEAP_ALIGN_MASK)))
583 #define GET_PAGE_HEADER(x) (&GET_PAGE_BODY(x)->header)
584 #define GET_HEAP_PAGE(x) (GET_PAGE_HEADER(x)->page)
585 #define GET_HEAP_MARK_BITS(x) (&GET_HEAP_PAGE(x)->mark_bits[0])
586 #define GET_HEAP_REMEMBERSET_BITS(x) (&GET_HEAP_PAGE(x)->rememberset_bits[0])
587 #define GET_HEAP_OLDGEN_BITS(x) (&GET_HEAP_PAGE(x)->oldgen_bits[0])
588 #define NUM_IN_PAGE(p) (((bits_t)(p) & HEAP_ALIGN_MASK)/sizeof(RVALUE))
589 #define BITMAP_INDEX(p) (NUM_IN_PAGE(p) / BITS_BITLENGTH )
590 #define BITMAP_OFFSET(p) (NUM_IN_PAGE(p) & (BITS_BITLENGTH-1))
591 #define BITMAP_BIT(p) ((bits_t)1 << BITMAP_OFFSET(p))
593 #define MARKED_IN_BITMAP(bits, p) ((bits)[BITMAP_INDEX(p)] & BITMAP_BIT(p))
594 #define MARK_IN_BITMAP(bits, p) ((bits)[BITMAP_INDEX(p)] = (bits)[BITMAP_INDEX(p)] | BITMAP_BIT(p))
595 #define CLEAR_IN_BITMAP(bits, p) ((bits)[BITMAP_INDEX(p)] = (bits)[BITMAP_INDEX(p)] & ~BITMAP_BIT(p))
598 #if defined(ENABLE_VM_OBJSPACE) && ENABLE_VM_OBJSPACE
599 #define rb_objspace (*GET_VM()->objspace)
600 #define ruby_initial_gc_stress gc_params.gc_stress
607 #define malloc_limit objspace->malloc_params.limit
608 #define malloc_increase objspace->malloc_params.increase
609 #define malloc_allocated_size objspace->malloc_params.allocated_size
610 #define heap_pages_sorted objspace->heap_pages.sorted
611 #define heap_pages_used objspace->heap_pages.used
612 #define heap_pages_length objspace->heap_pages.length
613 #define heap_pages_lomem objspace->heap_pages.range[0]
614 #define heap_pages_himem objspace->heap_pages.range[1]
615 #define heap_pages_swept_slots objspace->heap_pages.swept_slots
616 #define heap_pages_increment objspace->heap_pages.increment
617 #define heap_pages_min_free_slots objspace->heap_pages.min_free_slots
618 #define heap_pages_max_free_slots objspace->heap_pages.max_free_slots
619 #define heap_pages_final_slots objspace->heap_pages.final_slots
620 #define heap_pages_deferred_final objspace->heap_pages.deferred_final
621 #define heap_eden (&objspace->eden_heap)
622 #define heap_tomb (&objspace->tomb_heap)
623 #define dont_gc objspace->flags.dont_gc
624 #define during_gc objspace->flags.during_gc
625 #define finalizing objspace->flags.finalizing
626 #define finalizer_table objspace->finalizer_table
627 #define global_List objspace->global_list
628 #define ruby_gc_stress objspace->gc_stress
629 #define monitor_level objspace->rgengc.monitor_level
630 #define monitored_object_table objspace->rgengc.monitored_object_table
632 #define is_lazy_sweeping(heap) ((heap)->sweep_pages != 0)
633 #if SIZEOF_LONG == SIZEOF_VOIDP
634 # define nonspecial_obj_id(obj) (VALUE)((SIGNED_VALUE)(obj)|FIXNUM_FLAG)
635 # define obj_id_to_ref(objid) ((objid) ^ FIXNUM_FLAG)
636 #elif SIZEOF_LONG_LONG == SIZEOF_VOIDP
637 # define nonspecial_obj_id(obj) LL2NUM((SIGNED_VALUE)(obj) / 2)
638 # define obj_id_to_ref(objid) (FIXNUM_P(objid) ? \
639 ((objid) ^ FIXNUM_FLAG) : (NUM2PTR(objid) << 1))
641 # error not supported
644 #define RANY(o) ((RVALUE*)(o))
646 #define nomem_error GET_VM()->special_exceptions[ruby_error_nomemory]
690 #define gc_prof_record(objspace) (objspace)->profile.current_record
691 #define gc_prof_enabled(objspace) ((objspace)->profile.run && (objspace)->profile.current_record)
693 #define rgengc_report if (RGENGC_DEBUG) rgengc_report_body
704 #define FL_TEST2(x,f) ((RGENGC_CHECK_MODE && SPECIAL_CONST_P(x)) ? (rb_bug("FL_TEST2: SPECIAL_CONST"), 0) : FL_TEST_RAW((x),(f)) != 0)
705 #define FL_SET2(x,f) do {if (RGENGC_CHECK_MODE && SPECIAL_CONST_P(x)) rb_bug("FL_SET2: SPECIAL_CONST"); RBASIC(x)->flags |= (f);} while (0)
706 #define FL_UNSET2(x,f) do {if (RGENGC_CHECK_MODE && SPECIAL_CONST_P(x)) rb_bug("FL_UNSET2: SPECIAL_CONST"); RBASIC(x)->flags &= ~(f);} while (0)
708 #define RVALUE_WB_PROTECTED_RAW(obj) FL_TEST2((obj), FL_WB_PROTECTED)
709 #define RVALUE_WB_PROTECTED(obj) RVALUE_WB_PROTECTED_RAW(check_gen_consistency((VALUE)obj))
711 #define RVALUE_OLDGEN_BITMAP(obj) MARKED_IN_BITMAP(GET_HEAP_OLDGEN_BITS(obj), (obj))
727 rb_bug(
"check_gen_consistency: %p (%s) is not Ruby object.", (
void *)obj,
obj_type_name(obj));
732 const char *
type = old_flag ?
"old" :
"young";
733 rb_bug(
"check_gen_consistency: %p (%s) is not WB protected, but %s object.", (
void *)obj,
obj_type_name(obj), type);
738 rb_bug(
"check_gen_consistency: %p (%s) is not infant, but is not old (on 2gen).", (
void *)obj,
obj_type_name(obj));
743 rb_bug(
"check_gen_consistency: %p (%s) is old, but is not marked while minor marking.", (
void *)obj,
obj_type_name(obj));
748 rb_bug(
"check_gen_consistency: %p (%s) is not infant, but is old.", (
void *)obj,
obj_type_name(obj));
798 #if RGENGC_PROFILE >= 1
801 objspace->
profile.promote_infant_count++;
803 #if RGENGC_PROFILE >= 2
830 #if RGENGC_PROFILE >= 1
833 objspace->
profile.promote_young_count++;
834 #if RGENGC_PROFILE >= 2
842 RVALUE_DEMOTE_FROM_YOUNG(
VALUE obj)
845 rb_bug(
"RVALUE_DEMOTE_FROM_YOUNG: %p (%s) is not young object.", (
void *)obj,
obj_type_name(obj));
857 rb_bug(
"RVALUE_DEMOTE_FROM_OLD: %p (%s) is not old object.", (
void *)obj,
obj_type_name(obj));
871 #if defined(ENABLE_VM_OBJSPACE) && ENABLE_VM_OBJSPACE
876 memset(objspace, 0,
sizeof(*objspace));
885 #if defined(ENABLE_VM_OBJSPACE) && ENABLE_VM_OBJSPACE
937 rgengc_report(3, objspace,
"heap_pages_expand_sorted: next_length: %d, size: %d\n", (
int)next_length, (
int)size);
1006 if (0) fprintf(stderr,
"heap_pages_free_unused_pages: %d free page %p, heap_pages_swept_slots: %d, heap_pages_max_free_slots: %d\n",
1008 heap_pages_swept_slots -= page->
limit;
1022 assert(j == heap_pages_used);
1036 if (page_body == 0) {
1050 page->
body = page_body;
1058 mid = (lo +
hi) / 2;
1060 if (mid_page->
body < page_body) {
1063 else if (mid_page->
body > page_body) {
1086 end = start + limit;
1091 page->
start = start;
1092 page->
limit = limit;
1095 for (p = start; p !=
end; p++) {
1096 rgengc_report(3, objspace,
"assign_heap_page: %p is added to freelist\n");
1119 const char *
method =
"recycle";
1122 method =
"allocate";
1124 if (0) fprintf(stderr,
"heap_page_create: %s - %p, heap_pages_used: %d, heap_pages_used: %d, tomb->page_length: %d\n",
1155 for (i = 0; i <
add; i++) {
1165 size_t next_used_limit = (size_t)(used * gc_params.
growth_factor);
1168 if (next_used_limit > max_used_limit) next_used_limit = max_used_limit;
1172 if (next_used_limit < minimum_limit) {
1173 next_used_limit = minimum_limit;
1179 if (0) fprintf(stderr,
"heap_set_increment: heap_pages_length: %d, heap_pages_used: %d, heap_pages_increment: %d, next_used_limit: %d\n",
1186 rgengc_report(5, objspace,
"heap_increment: heap_pages_length: %d, heap_pages_inc: %d, heap->page_length: %d\n",
1189 if (heap_pages_increment > 0) {
1190 heap_pages_increment--;
1216 #if GC_PROFILE_MORE_DETAIL
1217 objspace->
profile.prepare_time = 0;
1236 while (page ==
NULL) {
1278 #define gc_event_hook(objspace, event, data) do { \
1279 if (UNLIKELY((objspace)->hook_events & (event))) { \
1280 gc_event_hook_body((objspace), (event), (data)); \
1293 rb_bug(
"object allocation during garbage collection phase");
1309 RANY(obj)->as.values.v1 = v1;
1310 RANY(obj)->as.values.v2 = v2;
1311 RANY(obj)->as.values.v3 = v3;
1321 objspace->
profile.generated_normal_object_count++;
1322 #if RGENGC_PROFILE >= 2
1327 objspace->
profile.generated_shady_object_count++;
1328 #if RGENGC_PROFILE >= 2
1336 #if USE_RGENGC && RGENGC_CHECK_MODE
1356 return newobj_of(klass, flags, 0, 0, 0);
1409 register size_t hi,
lo, mid;
1418 mid = (lo +
hi) / 2;
1420 if (page->
start <= p) {
1499 rb_bug(
"obj_free() called for broken object");
1516 RANY(obj)->as.object.as.heap.ivptr) {
1517 xfree(
RANY(obj)->as.object.as.heap.ivptr);
1545 if (
RANY(obj)->as.klass.ptr)
1556 if (
RANY(obj)->as.hash.ntbl) {
1561 if (
RANY(obj)->as.regexp.ptr) {
1567 int free_immediately =
FALSE;
1571 RDATA(obj)->dfree =
RANY(obj)->as.typeddata.type->function.dfree;
1572 if (0 && free_immediately == 0)
1573 fprintf(stderr,
"not immediate -> %s\n",
RANY(obj)->as.typeddata.type->wrap_struct_name);
1578 else if (
RANY(obj)->as.data.dfree) {
1579 if (free_immediately) {
1590 if (
RANY(obj)->as.match.rmatch) {
1591 struct rmatch *rm =
RANY(obj)->as.match.rmatch;
1599 if (
RANY(obj)->as.file.fptr) {
1630 if (
RANY(obj)->as.node.u1.tbl) {
1635 if (
RANY(obj)->as.node.u3.args) {
1647 RANY(obj)->as.rstruct.as.heap.ptr) {
1648 xfree((
void *)
RANY(obj)->as.rstruct.as.heap.ptr);
1665 #if RGENGC_ESTIMATE_OLDMALLOC
1673 #ifdef USE_SIGALTSTACK
1677 void *
tmp = th->altstack;
1711 last_body = page->
body;
1713 pstart = page->
start;
1714 pend = pstart + page->
limit;
1765 int prev_dont_lazy_sweep = objspace->flags.dont_lazy_sweep;
1768 objspace->flags.dont_lazy_sweep =
TRUE;
1773 if (prev_dont_lazy_sweep) {
1821 for (; p != pend; p++) {
1978 table = (
VALUE)data;
2006 table = (
VALUE)data;
2061 free_func =
RDATA(obj)->dfree;
2121 rb_bug(
"gc_finalize_deferred_register: can't register finalizer.");
2190 p->as.free.flags = 0;
2192 RDATA(p)->dfree =
RANY(p)->as.typeddata.type->function.dfree;
2197 else if (
RANY(p)->as.data.dfree) {
2202 if (
RANY(p)->as.file.fptr) {
2272 if (!
is_pointer_to_heap(objspace, (
void *)obj))
rb_bug(
"is_markable_object: %p is not pointer to heap", (
void *)obj);
2303 #if SIZEOF_LONG == SIZEOF_VOIDP
2304 #define NUM2PTR(x) NUM2ULONG(x)
2305 #elif SIZEOF_LONG_LONG == SIZEOF_VOIDP
2306 #define NUM2PTR(x) NUM2ULL(x)
2322 if ((ptr %
sizeof(
RVALUE)) == (4 << 2)) {
2410 #if SIZEOF_LONG == SIZEOF_VOIDP
2444 ROBJECT(obj)->as.heap.ivptr) {
2479 if (
RHASH(obj)->ntbl) {
2496 size +=
sizeof(
struct rmatch);
2500 if (
RFILE(obj)->fptr) {
2522 if (
RNODE(obj)->u1.tbl) {
2543 rb_bug(
"objspace/memsize_of(): unknown data type 0x%x(%p)",
2605 for (i = 0; i <=
T_MASK; i++) {
2614 for (;p < pend; p++) {
2622 total += page->
limit;
2634 for (i = 0; i <=
T_MASK; i++) {
2637 #define COUNT_TYPE(t) case (t): type = ID2SYM(rb_intern(#t)); break;
2664 default: type =
INT2NUM(i);
break;
2722 size_t empty_slots = 0, freed_slots = 0,
final_slots = 0;
2730 p = sweep_page->
start; pend = p + sweep_page->
limit;
2746 #if USE_RGENGC && RGENGC_CHECK_MODE
2754 RDATA(p)->dfree = 0;
2777 #if GC_PROFILE_MORE_DETAIL
2780 record->removing_objects +=
final_slots + freed_slots;
2781 record->empty_objects += empty_slots;
2791 if (freed_slots + empty_slots > 0) {
2803 if (0) fprintf(stderr,
"gc_page_sweep(%d): freed?: %d, limt: %d, freed_slots: %d, empty_slots: %d, final_slots: %d\n",
2806 (
int)sweep_page->
limit, (
int)freed_slots, (
int)empty_slots, (
int)
final_slots);
2849 #if defined(__GNUC__) && __GNUC__ == 4 && __GNUC_MINOR__ == 4
2850 __attribute__((noinline))
2856 size_t total_limit_slot;
2861 if (
GET_VM()->unlinked_method_entry_list) {
2876 if (0) fprintf(stderr,
"heap_pages_min_free_slots: %d, heap_pages_max_free_slots: %d\n",
2891 if (inc > malloc_limit) {
2899 malloc_limit = (size_t)(malloc_limit * 0.98);
2906 if (old_limit != malloc_limit) {
2918 #if RGENGC_ESTIMATE_OLDMALLOC
2930 if (0) fprintf(stderr,
"%d\t%d\t%u\t%u\t%d\n",
2959 rgengc_report(1, objspace,
"after_gc_sweep: heap->total_slots: %d, heap->swept_slots: %d, min_free_slots: %d\n",
2962 if (heap_pages_swept_slots < heap_pages_min_free_slots) {
2982 if (heap_pages_increment < heap_tomb->page_length) {
2986 #if RGENGC_PROFILE > 0
2988 fprintf(stderr,
"%d\t%d\t%d\t%d\t%d\t%d\t%d\n",
2992 (
int)objspace->
profile.promote_infant_count,
2994 (
int)objspace->
profile.promote_young_count,
2998 (
int)objspace->
profile.remembered_normal_object_count,
3014 #if GC_ENABLE_LAZY_SWEEP
3033 #if GC_ENABLE_LAZY_SWEEP
3062 if (immediate_sweep) {
3063 #if !GC_ENABLE_LAZY_SWEEP
3068 #if !GC_ENABLE_LAZY_SWEEP
3114 stack->
cache = chunk;
3124 chunk = stack->
cache;
3139 next = stack->
cache;
3165 #if defined(ENABLE_VM_OBJSPACE) && ENABLE_VM_OBJSPACE
3172 while (chunk !=
NULL) {
3195 if (stack->
index == 1) {
3213 for (i=0; i < 4; i++) {
3222 #define SET_STACK_END (SET_MACHINE_STACK_END(&th->machine.stack_end), th->machine.register_stack_end = rb_ia64_bsp())
3224 #define SET_STACK_END SET_MACHINE_STACK_END(&th->machine.stack_end)
3227 #define STACK_START (th->machine.stack_start)
3228 #define STACK_END (th->machine.stack_end)
3229 #define STACK_LEVEL_MAX (th->machine.stack_maxsize/sizeof(VALUE))
3231 #if STACK_GROW_DIRECTION < 0
3232 # define STACK_LENGTH (size_t)(STACK_START - STACK_END)
3233 #elif STACK_GROW_DIRECTION > 0
3234 # define STACK_LENGTH (size_t)(STACK_END - STACK_START + 1)
3236 # define STACK_LENGTH ((STACK_END < STACK_START) ? (size_t)(STACK_START - STACK_END) \
3237 : (size_t)(STACK_END - STACK_START + 1))
3239 #if !STACK_GROW_DIRECTION
3247 if (end > addr)
return ruby_stack_grow_direction = 1;
3248 return ruby_stack_grow_direction = -1;
3261 #if !(defined(POSIX_SIGNAL) && defined(SIGSEGV) && defined(HAVE_SIGALTSTACK))
3271 ret = (
VALUE*)rb_ia64_bsp() - th->
machine.register_stack_start >
3272 th->
machine.register_stack_maxsize/
sizeof(
VALUE) - water_mark;
3279 #define STACKFRAME_FOR_CALL_CFUNC 512
3284 #if defined(POSIX_SIGNAL) && defined(SIGSEGV) && defined(HAVE_SIGALTSTACK)
3308 if (end <= start)
return;
3319 #define rb_gc_mark_locations(start, end) gc_mark_locations(objspace, (start), (end))
3397 switch (def->
type) {
3437 if (!wrapper || !wrapper->
tbl)
return;
3442 if (wrapper->
serial == serial)
return;
3443 wrapper->
serial = serial;
3467 #if STACK_GROW_DIRECTION < 0
3468 #define GET_STACK_BOUNDS(start, end, appendix) ((start) = STACK_END, (end) = STACK_START)
3469 #elif STACK_GROW_DIRECTION > 0
3470 #define GET_STACK_BOUNDS(start, end, appendix) ((start) = STACK_START, (end) = STACK_END+(appendix))
3472 #define GET_STACK_BOUNDS(start, end, appendix) \
3473 ((STACK_END < STACK_START) ? \
3474 ((start) = STACK_END, (end) = STACK_START) : ((start) = STACK_START, (end) = STACK_END+(appendix)))
3483 } save_regs_gc_mark;
3484 VALUE *stack_start, *stack_end;
3502 #if defined(__mc68000__)
3512 VALUE *stack_start, *stack_end;
3658 objspace->
rgengc.young_object_count++;
3671 if (RVALUE_YOUNG_P((
VALUE)obj)) {
3673 RVALUE_PROMOTE_YOUNG((
VALUE)obj);
3704 rb_bug(
"rb_gc_mark() called for broken object");
3736 ptr = (
VALUE)obj->as.node.u3.node;
3771 ptr = (
VALUE)obj->as.node.u2.node;
3787 ptr = (
VALUE)obj->as.node.u1.node;
3794 ptr = (
VALUE)obj->as.node.u2.node;
3808 ptr = (
VALUE)obj->as.node.u2.node;
3832 (
VALUE*)obj->as.node.u1.value,
3833 obj->as.node.u3.cnt);
3838 gc_mark(objspace, obj->as.node.nd_refinements);
3840 ptr = (
VALUE)obj->as.node.nd_next;
3851 gc_mark(objspace, obj->as.basic.klass);
3865 ptr = obj->as.array.as.heap.aux.shared;
3871 for (i=0; i <
len; i++) {
3879 ptr = obj->as.hash.ifnone;
3883 #define STR_ASSOC FL_USER3
3885 ptr = obj->as.string.as.heap.aux.shared;
3892 RUBY_DATA_FUNC mark_func = obj->as.typeddata.type->function.dmark;
3893 if (mark_func) (*mark_func)(
DATA_PTR(obj));
3896 if (obj->as.data.dmark) (*obj->as.data.dmark)(
DATA_PTR(obj));
3904 for (i = 0; i <
len; i++) {
3911 if (obj->as.file.fptr) {
3912 gc_mark(objspace, obj->as.file.fptr->pathv);
3913 gc_mark(objspace, obj->as.file.fptr->tied_io_for_writing);
3914 gc_mark(objspace, obj->as.file.fptr->writeconv_asciicompat);
3915 gc_mark(objspace, obj->as.file.fptr->writeconv_pre_ecopts);
3916 gc_mark(objspace, obj->as.file.fptr->encs.ecopts);
3917 gc_mark(objspace, obj->as.file.fptr->write_lock);
3922 ptr = obj->as.regexp.src;
3930 gc_mark(objspace, obj->as.match.regexp);
3931 if (obj->as.match.str) {
3932 ptr = obj->as.match.str;
3938 gc_mark(objspace, obj->as.rational.num);
3939 ptr = obj->as.rational.den;
3943 gc_mark(objspace, obj->as.complex.real);
3944 ptr = obj->as.complex.imag;
3964 rb_bug(
"rb_gc_mark(): unknown data type 0x%x(%p) %s",
3976 if (!mstack->
index)
return;
3979 rb_bug(
"gc_mark_stacked_objects: %p (%s) is infant, but not marked.", (
void *)obj,
obj_type_name(obj));
3986 #ifndef RGENGC_PRINT_TICK
3987 #define RGENGC_PRINT_TICK 0
3996 #if RGENGC_PRINT_TICK
3997 #if defined(__GNUC__) && defined(__i386__)
3998 typedef unsigned long long tick_t;
4000 static inline tick_t
4003 unsigned long long int x;
4004 __asm__ __volatile__ (
"rdtsc" :
"=A" (x));
4008 #elif defined(__GNUC__) && defined(__x86_64__)
4009 typedef unsigned long long tick_t;
4011 static __inline__ tick_t
4014 unsigned long hi,
lo;
4015 __asm__ __volatile__ (
"rdtsc" :
"=a"(lo),
"=d"(hi));
4016 return ((
unsigned long long)lo)|( ((
unsigned long long)hi)<<32);
4019 #elif defined(_WIN32) && defined(_MSC_VER)
4021 typedef unsigned __int64 tick_t;
4023 static inline tick_t
4030 typedef clock_t tick_t;
4031 static inline tick_t
4038 #define MAX_TICKS 0x100
4039 static tick_t mark_ticks[MAX_TICKS];
4040 static const char *mark_ticks_categories[MAX_TICKS];
4043 show_mark_ticks(
void)
4046 fprintf(stderr,
"mark ticks result:\n");
4047 for (i=0; i<MAX_TICKS; i++) {
4048 const char *category = mark_ticks_categories[
i];
4050 fprintf(stderr,
"%s\t%8lu\n", category, (
unsigned long)mark_ticks[i]);
4065 if (categoryp) *categoryp =
"xxx";
4067 #if RGENGC_PRINT_TICK
4068 tick_t start_tick =
tick();
4070 const char *prev_category = 0;
4072 if (mark_ticks_categories[0] == 0) {
4073 atexit(show_mark_ticks);
4077 #if RGENGC_PRINT_TICK
4078 #define MARK_CHECKPOINT_PRINT_TICK(category) do { \
4079 if (prev_category) { \
4080 tick_t t = tick(); \
4081 mark_ticks[tick_count] = t - start_tick; \
4082 mark_ticks_categories[tick_count] = prev_category; \
4085 prev_category = category; \
4086 start_tick = tick(); \
4089 #define MARK_CHECKPOINT_PRINT_TICK(category)
4092 #define MARK_CHECKPOINT(category) do { \
4093 if (categoryp) *categoryp = category; \
4094 MARK_CHECKPOINT_PRINT_TICK(category); \
4142 #undef MARK_CHECKPOINT
4149 rgengc_report(1, objspace,
"gc_marks_body: start (%s)\n", full_mark ?
"full" :
"minor");
4168 rgengc_report(1, objspace,
"gc_marks_body: end (%s)\n", full_mark ?
"full" :
"minor");
4188 fprintf(stderr,
"verify_internal_consistency_reachable_i: WB miss %p (%s) -> %p (%s)\n",
4202 for (v = (
VALUE)page_start; v != (
VALUE)page_end; v += stride) {
4237 eo_args.
data = (
void *)&data;
4242 rb_bug(
"gc_verify_internal_consistency: found internal consistency.\n");
4247 #if RGENGC_CHECK_MODE >= 3
4249 #define MAKE_ROOTSIG(obj) (((VALUE)(obj) << 1) | 0x01)
4250 #define IS_ROOTSIG(obj) ((VALUE)(obj) & 0x01)
4251 #define GET_ROOTSIG(obj) ((const char *)((VALUE)(obj) >> 1))
4259 static struct reflist *
4260 reflist_create(
VALUE obj)
4262 struct reflist *refs =
xmalloc(
sizeof(
struct reflist));
4265 refs->list[0] =
obj;
4271 reflist_destruct(
struct reflist *refs)
4278 reflist_add(
struct reflist *refs,
VALUE obj)
4280 if (refs->pos == refs->size) {
4285 refs->list[refs->pos++] =
obj;
4289 reflist_dump(
struct reflist *refs)
4292 for (i=0; i<refs->pos; i++) {
4293 VALUE obj = refs->list[
i];
4294 if (IS_ROOTSIG(obj)) {
4295 fprintf(stderr,
"<root@%s>", GET_ROOTSIG(obj));
4298 fprintf(stderr,
"<%p@%s>", (
void *)obj,
obj_type_name(obj));
4300 if (i+1 < refs->pos) fprintf(stderr,
", ");
4304 #if RGENGC_CHECK_MODE >= 3
4306 reflist_refered_from_machine_context(
struct reflist *refs)
4309 for (i=0; i<refs->pos; i++) {
4310 VALUE obj = refs->list[
i];
4311 if (IS_ROOTSIG(obj) && strcmp(GET_ROOTSIG(obj),
"machine_context") == 0)
return 1;
4327 const char *category;
4332 allrefs_add(
struct allrefs *
data,
VALUE obj)
4334 struct reflist *refs;
4337 reflist_add(refs, data->root_obj);
4341 refs = reflist_create(data->root_obj);
4348 allrefs_i(
VALUE obj,
void *ptr)
4350 struct allrefs *data = (
struct allrefs *)ptr;
4352 if (allrefs_add(data, obj)) {
4358 allrefs_roots_i(
VALUE obj,
void *ptr)
4360 struct allrefs *data = (
struct allrefs *)ptr;
4362 data->root_obj = MAKE_ROOTSIG(data->category);
4364 if (allrefs_add(data, obj)) {
4372 struct allrefs data;
4373 struct mark_func_data_struct mfd;
4376 data.objspace = objspace;
4379 mfd.mark_func = allrefs_roots_i;
4393 return data.references;
4399 struct reflist *refs = (
struct reflist *)value;
4400 reflist_destruct(refs);
4405 objspace_allrefs_destruct(
struct st_table *refs)
4407 st_foreach(refs, objspaec_allrefs_destruct_i, 0);
4411 #if RGENGC_CHECK_MODE >= 4
4416 struct reflist *refs = (
struct reflist *)v;
4417 fprintf(stderr,
"[allrefs_dump_i] %p (%s%s%s%s) <- ",
4423 fprintf(stderr,
"\n");
4430 fprintf(stderr,
"[all refs] (size: %d)\n", (
int)objspace->
rgengc.allrefs_table->num_entries);
4435 #if RGENGC_CHECK_MODE >= 3
4440 struct reflist *refs = (
struct reflist *)v;
4445 fprintf(stderr,
"gc_check_after_marks_i: %p (%s) is not marked and not oldgen.\n", (
void *)obj,
obj_type_name(obj));
4446 fprintf(stderr,
"gc_check_after_marks_i: %p is referred from ", (
void *)obj);
4449 if (reflist_refered_from_machine_context(refs)) {
4450 fprintf(stderr,
" (marked from machine stack).\n");
4454 objspace->
rgengc.error_count++;
4455 fprintf(stderr,
"\n");
4463 gc_marks_check(
rb_objspace_t *objspace,
int (*checker_func)(
ANYARGS),
const char *checker_name)
4467 #if RGENGC_ESTIMATE_OLDMALLOC
4472 objspace->
rgengc.allrefs_table = objspace_allrefs(objspace);
4475 if (objspace->
rgengc.error_count > 0) {
4476 #if RGENGC_CHECK_MODE >= 4
4477 allrefs_dump(objspace);
4479 rb_bug(
"%s: GC has problem.", checker_name);
4482 objspace_allrefs_destruct(objspace->
rgengc.allrefs_table);
4483 objspace->
rgengc.allrefs_table = 0;
4487 #if RGENGC_ESTIMATE_OLDMALLOC
4497 struct mark_func_data_struct *prev_mark_func_data;
4507 #if RGENGC_CHECK_MODE >= 2
4510 if (full_mark ==
TRUE) {
4514 objspace->
rgengc.young_object_count = 0;
4529 #if RGENGC_PROFILE > 0
4536 #if RGENGC_CHECK_MODE >= 3
4537 gc_marks_check(objspace, gc_check_after_marks_i,
"after_marks");
4558 const char *
status =
" ";
4566 va_start(args, fmt);
4570 fprintf(out,
"%s|", status);
4609 #if RGENGC_CHECK_MODE > 0
4614 rb_bug(
"rgengc_remember: should not remember %p (%s)\n",
4624 #if RGENGC_PROFILE > 0
4626 objspace->
profile.remembered_normal_object_count++;
4627 #if RGENGC_PROFILE >= 2
4633 #if RGENGC_PROFILE >= 2
4661 #if RGENGC_PROFILE > 0
4662 size_t shady_object_count = 0, clear_count = 0;
4683 if (RVALUE_YOUNG_P((
VALUE)p)) RVALUE_PROMOTE_YOUNG((
VALUE)p);
4686 #if RGENGC_PROFILE > 0
4691 #if RGENGC_PROFILE > 0
4692 shady_object_count++;
4707 rgengc_report(2, objspace,
"rgengc_rememberset_mark: finished\n");
4709 #if RGENGC_PROFILE > 0
4710 rgengc_report(2, objspace,
"rgengc_rememberset_mark: clear_count: %"PRIdSIZE", shady_object_count: %"PRIdSIZE"\n", clear_count, shady_object_count);
4713 record->remembered_normal_objects = clear_count;
4714 record->remembered_shady_objects = shady_object_count;
4744 rgengc_report(2, objspace,
"rb_gc_wb: %p (%s) -> %p (%s)\n",
4771 objspace->
profile.shade_operation_count++;
4772 #if RGENGC_PROFILE >= 2
4779 RVALUE_DEMOTE_FROM_YOUNG(obj);
4796 fprintf(stderr,
"%s\t%d\n", (
char *)key, (
int)val);
4811 if (rgengc_unprotect_logging_table == 0) {
4854 static ID ID_marked;
4856 static ID ID_wb_protected, ID_old, ID_remembered;
4858 static ID ID_young, ID_infant;
4863 #define I(s) ID_##s = rb_intern(#s);
4879 flags[n++] = ID_wb_protected;
4881 flags[n++] = ID_old;
4883 if (RVALUE_YOUNG_P(obj) && n<max)
4884 flags[n++] = ID_young;
4886 flags[n++] = ID_infant;
4889 flags[n++] = ID_remembered;
4892 flags[n++] = ID_marked;
4947 if (tmp->
varptr == addr) {
4982 immediate_sweep = !(flag & 0x02);
4986 immediate_sweep =
TRUE;
5008 if (
GC_NOTIFY) fprintf(stderr,
"start garbage_collect(%d, %d, %d)\n", full_mark, immediate_sweep, reason);
5022 rb_bug(
"during_gc should not be 0. RUBY_INTERNAL_EVENT_GC_START user should not cause GC in events.");
5025 gc_sweep(objspace, immediate_sweep);
5030 if (
GC_NOTIFY) fprintf(stderr,
"end garbage_collect()\n");
5067 #if GC_PROFILE_MORE_DETAIL
5071 #if GC_PROFILE_MORE_DETAIL
5112 fprintf(stderr,
"[FATAL] failed to allocate memory\n");
5159 static ID keyword_ids[2];
5166 if (!keyword_ids[0]) {
5167 keyword_ids[0] =
rb_intern(
"full_mark");
5168 keyword_ids[1] =
rb_intern(
"immediate_sweep");
5174 full_mark =
RTEST(kwvals[0]);
5176 immediate_sweep =
RTEST(kwvals[1]);
5207 #if RGENGC_PROFILE >= 2
5209 gc_count_add_each_types(
VALUE hash,
const char *
name,
const size_t *types)
5213 for (i=0; i<
T_MASK; i++) {
5246 static VALUE sym_major_by =
Qnil, sym_gc_by, sym_immediate_sweep, sym_have_finalizer;
5247 static VALUE sym_nofree, sym_oldgen, sym_shady, sym_rescan, sym_stress;
5248 #if RGENGC_ESTIMATE_OLDMALLOC
5249 static VALUE sym_oldmalloc;
5251 static VALUE sym_newobj, sym_malloc, sym_method, sym_capi;
5262 if (sym_major_by ==
Qnil) {
5263 #define S(s) sym_##s = ID2SYM(rb_intern_const(#s))
5273 #if RGENGC_ESTIMATE_OLDMALLOC
5283 #define SET(name, attr) \
5284 if (key == sym_##name) \
5286 else if (hash != Qnil) \
5287 rb_hash_aset(hash, sym_##name, (attr));
5294 #if RGENGC_ESTIMATE_OLDMALLOC
5299 SET(major_by, major_by);
5357 static VALUE sym_count;
5358 static VALUE sym_heap_used, sym_heap_length, sym_heap_increment;
5359 static VALUE sym_heap_live_slot, sym_heap_free_slot, sym_heap_final_slot, sym_heap_swept_slot;
5360 static VALUE sym_heap_eden_page_length, sym_heap_tomb_page_length;
5361 static VALUE sym_total_allocated_object, sym_total_freed_object;
5362 static VALUE sym_malloc_increase, sym_malloc_limit;
5364 static VALUE sym_minor_gc_count, sym_major_gc_count;
5365 static VALUE sym_remembered_shady_object, sym_remembered_shady_object_limit;
5366 static VALUE sym_old_object, sym_old_object_limit;
5367 #if RGENGC_ESTIMATE_OLDMALLOC
5368 static VALUE sym_oldmalloc_increase, sym_oldmalloc_limit;
5371 static VALUE sym_generated_normal_object_count, sym_generated_shady_object_count;
5372 static VALUE sym_shade_operation_count, sym_promote_infant_count, sym_promote_young_count;
5373 static VALUE sym_remembered_normal_object_count, sym_remembered_shady_object_count;
5382 else if (
SYMBOL_P(hash_or_sym) && out)
5387 if (sym_count == 0) {
5388 #define S(s) sym_##s = ID2SYM(rb_intern_const(#s))
5397 S(heap_eden_page_length);
5398 S(heap_tomb_page_length);
5399 S(total_allocated_object);
5400 S(total_freed_object);
5406 S(remembered_shady_object);
5407 S(remembered_shady_object_limit);
5409 S(old_object_limit);
5410 #if RGENGC_ESTIMATE_OLDMALLOC
5411 S(oldmalloc_increase);
5415 S(generated_normal_object_count);
5416 S(generated_shady_object_count);
5417 S(shade_operation_count);
5418 S(promote_infant_count);
5419 S(promote_young_count);
5420 S(remembered_normal_object_count);
5421 S(remembered_shady_object_count);
5427 #define SET(name, attr) \
5428 if (key == sym_##name) \
5429 return (*out = attr, Qnil); \
5430 else if (hash != Qnil) \
5431 rb_hash_aset(hash, sym_##name, SIZET2NUM(attr));
5456 #if RGENGC_ESTIMATE_OLDMALLOC
5462 SET(generated_normal_object_count, objspace->
profile.generated_normal_object_count);
5463 SET(generated_shady_object_count, objspace->
profile.generated_shady_object_count);
5464 SET(shade_operation_count, objspace->
profile.shade_operation_count);
5465 SET(promote_infant_count, objspace->
profile.promote_infant_count);
5467 SET(promote_young_count, objspace->
profile.promote_young_count);
5469 SET(remembered_normal_object_count, objspace->
profile.remembered_normal_object_count);
5478 #if defined(RGENGC_PROFILE) && RGENGC_PROFILE >= 2
5480 gc_count_add_each_types(hash,
"generated_normal_object_count_types", objspace->
profile.generated_normal_object_count_types);
5481 gc_count_add_each_types(hash,
"generated_shady_object_count_types", objspace->
profile.generated_shady_object_count_types);
5482 gc_count_add_each_types(hash,
"shade_operation_count_types", objspace->
profile.shade_operation_count_types);
5483 gc_count_add_each_types(hash,
"promote_infant_types", objspace->
profile.promote_infant_types);
5485 gc_count_add_each_types(hash,
"promote_young_types", objspace->
profile.promote_young_types);
5487 gc_count_add_each_types(hash,
"remembered_normal_object_count_types", objspace->
profile.remembered_normal_object_count_types);
5488 gc_count_add_each_types(hash,
"remembered_shady_object_count_types", objspace->
profile.remembered_shady_object_count_types);
5655 char *ptr =
getenv(name);
5660 if (val > lower_bound) {
5661 if (
RTEST(
ruby_verbose)) fprintf(stderr,
"%s=%d (default value: %d)\n", name, val, *default_value);
5662 *default_value =
val;
5666 if (
RTEST(
ruby_verbose)) fprintf(stderr,
"%s=%d (default value: %d) is ignored because it must be greater than %d.\n", name, val, *default_value, lower_bound);
5675 char *ptr =
getenv(name);
5680 if (val > lower_bound) {
5681 if (
RTEST(
ruby_verbose)) fprintf(stderr,
"%s=%f (%f)\n", name, val, *default_value);
5682 *default_value =
val;
5686 if (
RTEST(
ruby_verbose)) fprintf(stderr,
"%s=%f (default value: %f) is ignored because it must be greater than %f.\n", name, val, *default_value, lower_bound);
5699 if (min_pages >
heap_eden->page_length) {
5738 if (safe_level > 0)
return;
5745 rb_warn(
"RUBY_FREE_MIN is obsolete. Use RUBY_GC_HEAP_FREE_SLOTS instead.");
5753 rb_warn(
"RUBY_HEAP_MIN_SLOTS is obsolete. Use RUBY_GC_HEAP_INIT_SLOTS instead.");
5765 #if RGENGC_ESTIMATE_OLDMALLOC
5787 struct mark_func_data_struct mfd;
5788 mfd.mark_func =
func;
5814 struct mark_func_data_struct mfd;
5817 data.
data = passing_data;
5853 fprintf(stderr,
"[FATAL] %s\n", msg);
5878 fprintf(stderr,
"[FATAL] failed to allocate memory\n");
5890 fprintf(stderr,
"[FATAL] failed to allocate memory\n");
5907 #if defined __MINGW32__
5908 res = __mingw_aligned_malloc(size, alignment);
5909 #elif defined _WIN32 && !defined __CYGWIN__
5910 void *_aligned_malloc(
size_t,
size_t);
5911 res = _aligned_malloc(size, alignment);
5912 #elif defined(HAVE_POSIX_MEMALIGN)
5913 if (posix_memalign(&res, alignment, size) == 0) {
5919 #elif defined(HAVE_MEMALIGN)
5920 res = memalign(alignment, size);
5923 res =
malloc(alignment + size +
sizeof(
void*));
5924 aligned = (
char*)res + alignment +
sizeof(
void*);
5925 aligned -= ((
VALUE)aligned & (alignment - 1));
5926 ((
void**)aligned)[-1] =
res;
5927 res = (
void*)aligned;
5930 #if defined(_DEBUG) || GC_DEBUG
5932 assert(((alignment - 1) & alignment) == 0);
5933 assert(alignment %
sizeof(
void*) == 0);
5941 #if defined __MINGW32__
5942 __mingw_aligned_free(ptr);
5943 #elif defined _WIN32 && !defined __CYGWIN__
5945 #elif defined(HAVE_MEMALIGN) || defined(HAVE_POSIX_MEMALIGN)
5948 free(((
void**)ptr)[-1]);
5952 static inline size_t
5955 #ifdef HAVE_MALLOC_USABLE_SIZE
5956 return malloc_usable_size(ptr);
5971 if (sub == 0)
return;
5975 if (val < sub) sub =
val;
5983 if (new_size > old_size) {
5985 #if RGENGC_ESTIMATE_OLDMALLOC
5991 #if RGENGC_ESTIMATE_OLDMALLOC
6012 #if MALLOC_ALLOCATED_SIZE
6013 if (new_size >= old_size) {
6017 size_t dec_size = old_size -
new_size;
6018 size_t allocated_size = objspace->
malloc_params.allocated_size;
6020 #if MALLOC_ALLOCATED_SIZE_CHECK
6021 if (allocated_size < dec_size) {
6022 rb_bug(
"objspace_malloc_increase: underflow malloc_params.allocated_size.");
6028 if (0) fprintf(stderr,
"incraese - ptr: %p, type: %s, new_size: %d, old_size: %d\n",
6033 (
int)new_size, (
int)old_size);
6042 if (allocations > 0) {
6045 #if MALLOC_ALLOCATED_SIZE_CHECK
6057 static inline size_t
6060 if ((ssize_t)size < 0) {
6063 if (size == 0) size = 1;
6065 #if CALC_EXACT_MALLOC_SIZE
6066 size +=
sizeof(size_t);
6072 static inline void *
6075 #if CALC_EXACT_MALLOC_SIZE
6076 ((
size_t *)mem)[0] =
size;
6077 mem = (
size_t *)mem + 1;
6083 #define TRY_WITH_GC(alloc) do { \
6085 (!garbage_collect_with_gvl(objspace, 1, 1, GPR_FLAG_MALLOC) || \
6108 if ((ssize_t)new_size < 0) {
6119 if (new_size == 0) {
6124 #if CALC_EXACT_MALLOC_SIZE
6125 new_size +=
sizeof(size_t);
6126 ptr = (
size_t *)ptr - 1;
6127 oldsize = ((
size_t *)ptr)[0];
6134 #if CALC_EXACT_MALLOC_SIZE
6136 mem = (
size_t *)mem + 1;
6147 #if CALC_EXACT_MALLOC_SIZE
6148 ptr = ((
size_t *)ptr) - 1;
6149 oldsize = ((
size_t*)ptr)[0];
6164 static inline size_t
6167 size_t len = size *
n;
6168 if (n != 0 && size != len / n) {
6199 #ifdef ruby_sized_xrealloc
6200 #undef ruby_sized_xrealloc
6214 #ifdef ruby_sized_xrealloc2
6215 #undef ruby_sized_xrealloc2
6220 size_t len = size *
n;
6221 if (n != 0 && size != len / n) {
6233 #ifdef ruby_sized_xfree
6234 #undef ruby_sized_xfree
6257 #if CALC_EXACT_MALLOC_SIZE
6258 size +=
sizeof(size_t);
6261 #if CALC_EXACT_MALLOC_SIZE
6263 ((
size_t *)mem)[0] = 0;
6264 mem = (
size_t *)mem + 1;
6272 size_t *mem = (
size_t *)ptr;
6273 #if CALC_EXACT_MALLOC_SIZE
6279 #if MALLOC_ALLOCATED_SIZE
6290 gc_malloc_allocated_size(
VALUE self)
6305 gc_malloc_allocations(
VALUE self)
6321 #define WMAP_DELETE_DEAD_OBJECT_IN_MARK 0
6323 #if WMAP_DELETE_DEAD_OBJECT_IN_MARK
6338 #if WMAP_DELETE_DEAD_OBJECT_IN_MARK
6365 *(
size_t *)arg += (ptr[0] + 1) *
sizeof(
VALUE);
6407 if (!existing)
return ST_STOP;
6409 for (i = j = 1, size = ptr[0]; i <=
size; ++
i) {
6410 if (ptr[i] != wmap) {
6440 rids = (
VALUE *)data;
6442 for (i = 0; i <
size; ++
i) {
6631 size = (ptr = optr = (
VALUE *)*val)[0];
6642 if (ptr == optr)
return ST_STOP;
6695 #if SIZEOF_ST_INDEX_T <= SIZEOF_LONG
6706 #define GC_PROFILE_RECORD_DEFAULT_SIZE 100
6711 #if defined(HAVE_CLOCK_GETTIME) && defined(CLOCK_PROCESS_CPUTIME_ID)
6713 static int try_clock_gettime = 1;
6715 if (try_clock_gettime &&
clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &ts) == 0) {
6719 try_clock_gettime = 0;
6726 struct rusage usage;
6728 if (
getrusage(RUSAGE_SELF, &usage) == 0) {
6729 time = usage.ru_utime;
6737 FILETIME creation_time, exit_time, kernel_time, user_time;
6742 if (GetProcessTimes(GetCurrentProcess(),
6743 &creation_time, &exit_time, &kernel_time, &user_time) != 0) {
6744 memcpy(&ui, &user_time,
sizeof(FILETIME));
6745 q = ui.QuadPart / 10L;
6746 t = (
DWORD)(q % 1000000L) * 1
e-6;
6751 t += (double)(
DWORD)(q >> 16) * (1 << 16);
6752 t += (
DWORD)q & ~(~0 << 16);
6781 rb_bug(
"gc_profile malloc or realloc miss");
6788 #if MALLOC_ALLOCATED_SIZE
6791 #if GC_PROFILE_DETAIL_MEMORY
6794 struct rusage usage;
6795 if (
getrusage(RUSAGE_SELF, &usage) == 0) {
6796 record->maxrss = usage.ru_maxrss;
6797 record->minflt = usage.ru_minflt;
6798 record->majflt = usage.ru_majflt;
6811 #if GC_PROFILE_MORE_DETAIL
6812 record->prepare_time = objspace->
profile.prepare_time;
6847 #if GC_PROFILE_MORE_DETAIL
6860 #if GC_PROFILE_MORE_DETAIL
6897 record->
gc_time += sweep_time;
6903 #if GC_PROFILE_MORE_DETAIL
6904 record->gc_sweep_time += sweep_time;
6914 #if GC_PROFILE_MORE_DETAIL
6931 #if GC_PROFILE_MORE_DETAIL
6933 record->heap_live_objects = live;
6934 record->heap_free_objects = total - live;
7041 #if GC_PROFILE_MORE_DETAIL
7056 #if RGENGC_PROFILE > 0
7067 #if GC_PROFILE_MORE_DETAIL
7068 #define MAJOR_REASON_MAX 0x10
7071 gc_profile_dump_major_reason(
int flags,
char *buff)
7082 if (reason & GPR_FLAG_MAJOR_BY_##x) { \
7083 buff[i++] = #x[0]; \
7084 if (i >= MAJOR_REASON_MAX) rb_bug("gc_profile_dump_major_reason: overflow"); \
7092 #if RGENGC_ESTIMATE_OLDMALLOC
7106 #ifdef MAJOR_REASON_MAX
7107 char reason_str[MAJOR_REASON_MAX];
7115 append(out,
rb_str_new_cstr(
"Index Invoke Time(sec) Use Size(byte) Total Size(byte) Total Object GC Time(ms)\n"));
7117 for (i = 0; i <
count; i++) {
7124 #if GC_PROFILE_MORE_DETAIL
7127 "Prepare Time = Previously GC's rest sweep time\n"
7128 "Index Flags Allocate Inc. Allocate Limit"
7132 " Use Page Mark Time(ms) Sweep Time(ms) Prepare Time(ms) LivingObj FreeObj RemovedObj EmptyObj"
7134 " OldgenObj RemNormObj RemShadObj"
7137 " MaxRSS(KB) MinorFLT MajorFLT"
7141 for (i = 0; i <
count; i++) {
7157 gc_profile_dump_major_reason(record->
flags, reason_str),
7164 record->allocate_increase, record->allocate_limit,
7166 record->allocated_size,
7168 record->heap_use_pages,
7169 record->gc_mark_time*1000,
7170 record->gc_sweep_time*1000,
7171 record->prepare_time*1000,
7173 record->heap_live_objects,
7174 record->heap_free_objects,
7175 record->removing_objects,
7176 record->empty_objects
7179 record->old_objects,
7180 record->remembered_normal_objects,
7181 record->remembered_shady_objects
7185 record->maxrss / 1024,
7257 for (i = 0; i <
count; i++) {
7321 #define TYPE_NAME(t) case (t): return #t;
7372 fprintf(stderr,
"pointer to heap?: true\n");
7375 fprintf(stderr,
"pointer to heap?: false\n");
7382 fprintf(stderr,
"young? : %s\n", RVALUE_YOUNG_P(obj) ?
"true" :
"false");
7384 fprintf(stderr,
"old? : %s\n",
RVALUE_OLD_P(obj) ?
"true" :
"false");
7390 fprintf(stderr,
"lazy sweeping?: true\n");
7391 fprintf(stderr,
"swept?: %s\n",
is_swept_object(objspace, obj) ?
"done" :
"not yet");
7394 fprintf(stderr,
"lazy sweeping?: false\n");
7401 fprintf(stderr,
"WARNING: object %s(%p) is inadvertently collected\n", (
char *)name, (
void *)obj);
7406 rb_gcdebug_sentinel(
VALUE obj,
const char *name)
7552 #if MALLOC_ALLOCATED_SIZE
7561 #define OPT(o) if (o) rb_ary_push(opts, rb_str_new2(#o))
rb_event_flag_t hook_events
static int rgengc_unprotect_logging_exit_func_i(st_data_t key, st_data_t val)
#define RB_TYPE_P(obj, type)
static void * objspace_malloc_fixup(rb_objspace_t *objspace, void *mem, size_t size)
RUBY_SYMBOL_EXPORT_BEGIN typedef unsigned long st_data_t
#define SET(a, b, c, d, k, s, Ti)
static void mark_hash(rb_objspace_t *objspace, st_table *tbl)
void rb_class_remove_from_super_subclasses(VALUE klass)
size_t heap_total_objects
static void gc_marks(rb_objspace_t *objspace, int full_mark)
VALUE rb_gc_disable(void)
static st_index_t new_size(st_index_t size)
void rb_class_detach_subclasses(VALUE klass)
#define RCLASS_M_TBL_WRAPPER(c)
int register char * block
static void gc_heap_prepare_minimum_pages(rb_objspace_t *objspace, rb_heap_t *heap)
bits_t oldgen_bits[HEAP_BITMAP_LIMIT]
RUBY_EXTERN VALUE rb_cBasicObject
int ruby_thread_has_gvl_p(void)
static int rb_special_const_p(VALUE obj)
static void root_objects_from(VALUE obj, void *ptr)
static void gc_mark_roots(rb_objspace_t *objspace, int full_mark, const char **categoryp)
void(* func)(const char *category, VALUE, void *)
void rb_bug(const char *fmt,...)
#define heap_pages_final_slots
static VALUE gc_profile_disable(void)
static void objspace_malloc_increase(rb_objspace_t *objspace, void *mem, size_t new_size, size_t old_size, enum memop_type type)
static int set_zero(st_data_t key, st_data_t val, st_data_t arg)
#define RUBY_DTRACE_GC_SWEEP_END_ENABLED()
void rb_mark_tbl(struct st_table *)
#define GC_PROFILE_MORE_DETAIL
size_t strlen(const char *)
#define rb_gc_mark_locations(start, end)
void rb_objspace_free(rb_objspace_t *objspace)
static void gc_prof_set_malloc_info(rb_objspace_t *)
static VALUE wmap_each(VALUE self)
const char * rb_obj_classname(VALUE)
static void pop_mark_stack_chunk(mark_stack_t *stack)
static VALUE wmap_each_value(VALUE self)
double gc_sweep_start_time
size_t rb_io_memsize(const rb_io_t *)
static void gc_prof_timer_stop(rb_objspace_t *)
static int rgengc_remembered(rb_objspace_t *objspace, VALUE obj)
VALUE rb_eval_cmd(VALUE, VALUE, int)
static int max(int a, int b)
static VALUE id2ref(VALUE obj, VALUE objid)
int st_lookup(st_table *, st_data_t, st_data_t *)
void(* RUBY_DATA_FUNC)(void *)
void st_add_direct(st_table *, st_data_t, st_data_t)
void rb_define_singleton_method(VALUE obj, const char *name, VALUE(*func)(ANYARGS), int argc)
Defines a singleton method for obj.
VALUE rb_str_buf_append(VALUE, VALUE)
static int mark_keyvalue(st_data_t key, st_data_t value, st_data_t data)
void rb_mark_generic_ivar(VALUE)
#define GC_HEAP_GROWTH_FACTOR
static VALUE RVALUE_PROMOTED_P(VALUE obj)
static VALUE os_each_obj(int argc, VALUE *argv, VALUE os)
static int is_pointer_to_heap(rb_objspace_t *objspace, void *ptr)
RUBY_EXTERN VALUE rb_stdout
struct rb_objspace::@126 heap_pages
size_t ruby_stack_length(VALUE **)
static VALUE run_single_final(VALUE arg)
#define malloc_allocated_size
bits_t rememberset_bits[HEAP_BITMAP_LIMIT]
st_table * st_init_numtable(void)
#define GC_HEAP_OLDOBJECT_LIMIT_FACTOR
static void heap_page_free(rb_objspace_t *objspace, struct heap_page *page)
void rb_class_remove_from_module_subclasses(VALUE klass)
static void RVALUE_PROMOTE_INFANT(VALUE obj)
#define ATOMIC_EXCHANGE(var, val)
int rb_get_kwargs(VALUE keyword_hash, const ID *table, int required, int optional, VALUE *values)
static int wmap_each_i(st_data_t key, st_data_t val, st_data_t arg)
SSL_METHOD *(* func)(void)
static void wmap_mark(void *ptr)
static VALUE gc_verify_internal_consistency(VALUE self)
size_t onig_memsize(const regex_t *reg)
void * ruby_mimmalloc(size_t size)
void rb_gc_force_recycle(VALUE)
static void mark_m_tbl_wrapper(rb_objspace_t *objspace, struct method_table_wrapper *wrapper)
union rb_method_definition_struct::@149 body
static void gc_finalize_deferred(void *dmy)
#define rb_check_frozen(obj)
static size_t objspace_live_slot(rb_objspace_t *objspace)
static size_t objspace_malloc_prepare(rb_objspace_t *objspace, size_t size)
struct rb_method_entry_struct * orig_me
#define RSTRUCT_EMBED_LEN_MASK
struct rb_objspace::@127 flags
static VALUE check_gen_consistency(VALUE obj)
VALUE rb_str_new_cstr(const char *)
void(* mark_func)(VALUE v, void *data)
void rb_gc_mark_global_tbl(void)
static int stack_check(int water_mark)
void * ruby_xmalloc2(size_t n, size_t size)
void rb_define_private_method(VALUE klass, const char *name, VALUE(*func)(ANYARGS), int argc)
static VALUE gc_info_decode(int flags, VALUE hash_or_key)
VALUE rb_obj_freeze(VALUE)
void rb_gcdebug_print_obj_condition(VALUE obj)
#define GC_ENABLE_LAZY_SWEEP
static int wmap_keys_i(st_data_t key, st_data_t val, st_data_t arg)
void rb_define_alloc_func(VALUE, rb_alloc_func_t)
VALUE rb_ary_push(VALUE ary, VALUE item)
static void gc_prof_mark_timer_start(rb_objspace_t *)
#define RUBY_INTERNAL_EVENT_OBJSPACE_MASK
size_t oldmalloc_increase
#define RUBY_INTERNAL_EVENT_GC_START
#define RGENGC_ESTIMATE_OLDMALLOC
void * ruby_xrealloc2(void *ptr, size_t n, size_t size)
static struct heap_page * heap_page_create(rb_objspace_t *objspace)
static VALUE count_objects(int argc, VALUE *argv, VALUE os)
#define RGENGC_CHECK_MODE
void rb_gc_copy_finalizer(VALUE, VALUE)
void rb_objspace_reachable_objects_from_root(void(func)(const char *category, VALUE, void *), void *passing_data)
#define STACK_UPPER(x, a, b)
void rb_mark_generic_ivar_tbl(void)
static double elapsed_time_from(double time)
static void * objspace_xmalloc(rb_objspace_t *objspace, size_t size)
VALUE rb_protect(VALUE(*proc)(VALUE), VALUE data, int *state)
static void gc_prof_sweep_timer_start(rb_objspace_t *)
static void make_deferred(rb_objspace_t *objspace, RVALUE *p)
#define GC_MALLOC_LIMIT_MAX
VALUE rb_define_class_under(VALUE outer, const char *name, VALUE super)
Defines a class under the namespace of outer.
#define MEMMOVE(p1, p2, type, n)
void rb_raise(VALUE exc, const char *fmt,...)
int rb_io_fptr_finalize(rb_io_t *)
VALUE rb_proc_new(VALUE(*)(ANYARGS), VALUE)
static void ruby_memerror(void)
#define RETURN_ENUMERATOR(obj, argc, argv)
static void rgengc_report_body(int level, rb_objspace_t *objspace, const char *fmt,...)
int ruby_get_stack_grow_direction(volatile VALUE *addr)
void rb_sweep_method_entry(void *vm)
#define GC_PROFILE_DETAIL_MEMORY
VALUE rb_class_name(VALUE)
size_t st_memsize(const st_table *)
st_table * st_init_strtable(void)
static int wmap_memsize_map(st_data_t key, st_data_t val, st_data_t arg)
static int wmap_values_i(st_data_t key, st_data_t val, st_data_t arg)
static void gc_heap_rest_sweep(rb_objspace_t *objspace, rb_heap_t *heap)
static VALUE gc_profile_total_time(VALUE self)
static struct heap_page * heap_page_allocate(rb_objspace_t *objspace)
double oldobject_limit_factor
static int obj_free(rb_objspace_t *objspace, VALUE obj)
static int mark_key(st_data_t key, st_data_t value, st_data_t data)
static int heap_is_swept_object(rb_objspace_t *objspace, rb_heap_t *heap, VALUE ptr)
void rb_mark_set(struct st_table *)
void rb_objspace_each_objects(each_obj_callback *callback, void *data)
void rb_include_module(VALUE klass, VALUE module)
#define RUBY_INTERNAL_EVENT_NEWOBJ
#define GC_PROFILE_RECORD_DEFAULT_SIZE
static int rgengc_remembersetbits_get(rb_objspace_t *objspace, VALUE obj)
int rb_objspace_markable_object_p(VALUE obj)
#define ATOMIC_PTR_EXCHANGE(var, val)
static struct heap_page * heap_prepare_freepage(rb_objspace_t *objspace, rb_heap_t *heap)
size_t rb_obj_memsize_of(VALUE)
static const char * obj_type_name(VALUE obj)
void callback(ffi_cif *cif, void *resp, void **args, void *ctx)
size_t heap_used_at_gc_start
static VALUE wmap_each_key(VALUE self)
int char_offset_num_allocated
#define gc_event_hook(objspace, event, data)
#define heap_pages_min_free_slots
static size_t objspace_free_slot(rb_objspace_t *objspace)
static int mark_const_entry_i(ID key, const rb_const_entry_t *ce, st_data_t data)
static int heap_increment(rb_objspace_t *objspace, rb_heap_t *heap)
static size_t objspace_malloc_size(rb_objspace_t *objspace, void *ptr, size_t hint)
static int garbage_collect_body(rb_objspace_t *, int full_mark, int immediate_sweep, int reason)
static int wmap_free_map(st_data_t key, st_data_t val, st_data_t arg)
int ruby_stack_grow_direction
static VALUE gc_stat_internal(VALUE hash_or_sym, size_t *out)
int ruby_stack_check(void)
VALUE rb_obj_is_thread(VALUE obj)
void rb_mark_method_entry(const rb_method_entry_t *me)
struct heap_page_header header
static VALUE wmap_size(VALUE self)
struct rb_objspace::mark_func_data_struct * mark_func_data
static void push_mark_stack(mark_stack_t *, VALUE)
#define MARK_CHECKPOINT(category)
static size_t obj_memsize_of(VALUE obj, int use_tdata)
unsigned long rb_event_flag_t
#define ATOMIC_SIZE_ADD(var, val)
#define ATOMIC_SIZE_CAS(var, oldval, val)
static VALUE objspace_each_objects(VALUE arg)
static void negative_size_allocation_error(const char *)
gc_profile_record * current_record
#define ruby_initial_gc_stress
static VALUE define_final(int argc, VALUE *argv, VALUE os)
#define RUBY_DTRACE_GC_SWEEP_END()
static int force_chain_object(st_data_t key, st_data_t val, st_data_t arg)
#define heap_pages_deferred_final
#define GET_HEAP_OLDGEN_BITS(x)
static void RVALUE_DEMOTE_FROM_OLD(VALUE obj)
#define obj_id_to_ref(objid)
#define nd_set_type(n, t)
static VALUE wmap_allocate(VALUE klass)
static void mark_current_machine_context(rb_objspace_t *objspace, rb_thread_t *th)
static VALUE RVALUE_OLD_BITMAP_P(VALUE obj)
#define GC_OLDMALLOC_LIMIT_GROWTH_FACTOR
unsigned int malloc_limit_min
void rb_gc_unprotect_logging(void *objptr, const char *filename, int line)
static int mark_method_entry_i(ID key, const rb_method_entry_t *me, st_data_t data)
VALUE data[STACK_CHUNK_SIZE]
#define MEMZERO(p, type, n)
void rb_exc_raise(VALUE mesg)
static void finalize_list(rb_objspace_t *objspace, RVALUE *p)
void rb_free_m_tbl_wrapper(struct method_table_wrapper *wrapper)
int st_update(st_table *table, st_data_t key, st_update_callback_func *func, st_data_t arg)
struct RVALUE::@122::@124 values
int st_delete(st_table *, st_data_t *, st_data_t *)
#define FLUSH_REGISTER_WINDOWS
static void gc_marks_body(rb_objspace_t *objspace, int full_mark)
static size_t xmalloc2_size(size_t n, size_t size)
void * ruby_xcalloc(size_t n, size_t size)
#define GET_STACK_BOUNDS(start, end, appendix)
static void gc_prof_timer_start(rb_objspace_t *)
#define ATOMIC_SET(var, val)
size_t oldmalloc_increase_limit
#define heap_pages_max_free_slots
memset(y->frac+ix+1, 0,(y->Prec-(ix+1))*sizeof(BDIGIT))
static const char * type_name(int type, VALUE obj)
int rb_postponed_job_register_one(unsigned int flags, rb_postponed_job_func_t func, void *data)
VALUE rb_block_proc(void)
static VALUE wmap_has_key(VALUE self, VALUE key)
static void * aligned_malloc(size_t, size_t)
int ruby_disable_gc_stress
void rb_ary_free(VALUE ary)
RUBY_SYMBOL_EXPORT_BEGIN const char * rb_objspace_data_type_name(VALUE obj)
void rb_mark_end_proc(void)
static void gc_mark(rb_objspace_t *objspace, VALUE ptr)
#define GC_HEAP_GROWTH_MAX_SLOTS
struct RVALUE::@122::@123 free
size_t total_allocated_object_num
#define TypedData_Get_Struct(obj, type, data_type, sval)
static int garbage_collect(rb_objspace_t *, int full_mark, int immediate_sweep, int reason)
static void * gc_with_gvl(void *ptr)
static void finalize_deferred(rb_objspace_t *objspace)
static VALUE define_final0(VALUE obj, VALUE block)
void rb_gc_finalize_deferred(void)
static void * negative_size_allocation_error_with_gvl(void *ptr)
static void gc_page_sweep(rb_objspace_t *objspace, rb_heap_t *heap, struct heap_page *sweep_page)
static int pop_mark_stack(mark_stack_t *, VALUE *)
static VALUE gc_stress_get(VALUE self)
RUBY_EXTERN VALUE rb_mKernel
static void gc_after_sweep(rb_objspace_t *objspace)
#define RARRAY_CONST_PTR(a)
static VALUE RVALUE_INFANT_P(VALUE obj)
#define RARRAY_AREF(a, i)
static int gc_marked(rb_objspace_t *objspace, VALUE ptr)
struct gc_profile_record gc_profile_record
struct rb_thread_struct::@192 machine
static void gc_mark_maybe(rb_objspace_t *objspace, VALUE ptr)
void rb_free_method_entry(rb_method_entry_t *me)
#define RUBY_DTRACE_GC_SWEEP_BEGIN_ENABLED()
static VALUE heap_get_freeobj(rb_objspace_t *objspace, rb_heap_t *heap)
void rb_define_const(VALUE, const char *, VALUE)
static void push_mark_stack_chunk(mark_stack_t *stack)
#define nonspecial_obj_id(obj)
#define RUBY_DTRACE_GC_MARK_END_ENABLED()
VALUE rb_str_cat2(VALUE, const char *)
static struct heap_page * heap_page_resurrect(rb_objspace_t *objspace)
static VALUE os_obj_of(VALUE of)
static size_t wmap_memsize(const void *ptr)
static int gc_heap_lazy_sweep(rb_objspace_t *objspace, rb_heap_t *heap)
void rb_vm_mark(void *ptr)
#define MALLOC_ALLOCATED_SIZE
static void heap_add_freepage(rb_objspace_t *objspace, rb_heap_t *heap, struct heap_page *page)
unsigned int oldmalloc_limit_max
void onig_region_free(OnigRegion *r, int free_self)
bits_t mark_bits[HEAP_BITMAP_LIMIT]
void rb_objspace_set_event_hook(const rb_event_flag_t event)
void rb_gc_unregister_address(VALUE *)
#define heap_pages_increment
size_t onig_region_memsize(const OnigRegion *regs)
struct force_finalize_list * next
static int free_const_entry_i(ID key, rb_const_entry_t *ce, st_data_t data)
struct rb_objspace::@128 profile
unsigned char buf[MIME_BUF_SIZE]
#define ATOMIC_SIZE_EXCHANGE(var, val)
#define GC_MALLOC_LIMIT_GROWTH_FACTOR
static void gc_before_sweep(rb_objspace_t *objspace)
#define RUBY_INTERNAL_EVENT_GC_END_MARK
#define is_lazy_sweeping(heap)
#define RUBY_DTRACE_GC_MARK_END()
static void rgengc_rememberset_mark(rb_objspace_t *objspace, rb_heap_t *heap)
static RVALUE * heap_get_freeobj_from_next_freepage(rb_objspace_t *objspace, rb_heap_t *heap)
size_t rb_objspace_data_type_memsize(VALUE obj)
#define RBIGNUM_DIGITS(b)
#define RGENGC_WB_PROTECTED_NODE_CREF
void rb_gc_mark_machine_stack(rb_thread_t *th)
static VALUE wmap_values(VALUE self)
static void run_finalizer(rb_objspace_t *objspace, VALUE obj, VALUE table)
st_table * finalizer_table
#define rb_thread_raised_clear(th)
static void gc_mark_locations(rb_objspace_t *objspace, VALUE *start, VALUE *end)
struct rb_classext_struct rb_classext_t
VALUE rb_data_object_alloc(VALUE, void *, RUBY_DATA_FUNC, RUBY_DATA_FUNC)
static int internal_object_p(VALUE obj)
VALUE rb_newobj_of(VALUE, VALUE)
static void gc_prof_set_heap_info(rb_objspace_t *)
static int is_dead_object(rb_objspace_t *objspace, VALUE ptr)
void rb_define_module_function(VALUE module, const char *name, VALUE(*func)(ANYARGS), int argc)
Defines a module function for module.
size_t total_freed_object_num
int st_foreach(st_table *, int(*)(ANYARGS), st_data_t)
static void gc_profile_dump_on(VALUE out, VALUE(*append)(VALUE, VALUE))
struct RRational rational
static void rgengc_unprotect_logging_exit_func(void)
static double getrusage_time(void)
static VALUE gc_profile_report(int argc, VALUE *argv, VALUE self)
VALUE * ruby_initial_gc_stress_ptr
double malloc_limit_growth_factor
static void gc_set_initial_pages(void)
static void gc_prof_sweep_timer_stop(rb_objspace_t *)
VALUE rb_sprintf(const char *format,...)
int rb_objspace_internal_object_p(VALUE obj)
struct heap_page_body * body
#define GET_HEAP_MARK_BITS(x)
#define STACKFRAME_FOR_CALL_CFUNC
#define rb_node_newnode(type, a1, a2, a3)
unsigned int oldmalloc_limit_min
#define RUBY_TYPED_FREE_IMMEDIATELY
static void gc_prof_mark_timer_stop(rb_objspace_t *)
static void aligned_free(void *)
#define RSTRUCT_CONST_PTR(st)
static void add_stack_chunk_cache(mark_stack_t *stack, stack_chunk_t *chunk)
#define MARKED_IN_BITMAP(bits, p)
void rb_gc_register_mark_object(VALUE)
static void run_final(rb_objspace_t *objspace, VALUE obj)
static VALUE wmap_aset(VALUE self, VALUE wmap, VALUE orig)
#define RVALUE_OLDGEN_BITMAP(obj)
static int os_obj_of_i(void *vstart, void *vend, size_t stride, void *data)
int rb_scan_args(int argc, const VALUE *argv, const char *fmt,...)
static int rgengc_remember(rb_objspace_t *objspace, VALUE obj)
#define rb_thread_raised_set(th, f)
VALUE rb_obj_is_mutex(VALUE obj)
static int wmap_final_func(st_data_t *key, st_data_t *value, st_data_t arg, int existing)
static void mark_const_tbl(rb_objspace_t *objspace, st_table *tbl)
static void mark_tbl(rb_objspace_t *objspace, st_table *tbl)
void Init_stack(volatile VALUE *addr)
static st_table * rgengc_unprotect_logging_table
static size_t objspace_total_slot(rb_objspace_t *objspace)
static ruby_gc_params_t gc_params
void rb_gc_resurrect(VALUE ptr)
static VALUE lazy_sweep_enable(void)
#define ATOMIC_SIZE_INC(var)
static void verify_internal_consistency_reachable_i(VALUE child, void *ptr)
#define MALLOC_ALLOCATED_SIZE_CHECK
static void atomic_sub_nounderflow(size_t *var, size_t sub)
void rb_gc_mark_symbols(int full_mark)
static VALUE newobj_of(VALUE klass, VALUE flags, VALUE v1, VALUE v2, VALUE v3)
#define RUBY_DEFAULT_FREE
#define RBASIC_SET_CLASS_RAW(obj, cls)
#define SIZED_REALLOC_N(var, type, n, old_n)
static void heap_pages_free_unused_pages(rb_objspace_t *objspace)
static int is_id_value(rb_objspace_t *objspace, VALUE ptr)
static void rgengc_mark_and_rememberset_clear(rb_objspace_t *objspace, rb_heap_t *heap)
static void heap_add_pages(rb_objspace_t *objspace, rb_heap_t *heap, size_t add)
VALUE rb_obj_rgengc_writebarrier_protected_p(VALUE obj)
VALUE rb_str_buf_new(long)
static VALUE gc_profile_enable_get(VALUE self)
static void rgengc_check_relation(rb_objspace_t *objspace, VALUE obj)
static VALUE gc_start_internal(int argc, VALUE *argv, VALUE self)
static int mark_entry(st_data_t key, st_data_t value, st_data_t data)
#define gc_prof_record(objspace)
void * ruby_sized_xrealloc2(void *ptr, size_t new_count, size_t element_size, size_t old_count) RUBY_ATTR_ALLOC_SIZE((2
size_t rb_obj_gc_flags(VALUE, ID[], size_t)
static int is_markable_object(rb_objspace_t *objspace, VALUE obj)
unsigned int malloc_limit_max
rb_hash_aset(hash, RARRAY_AREF(key_value_pair, 0), RARRAY_AREF(key_value_pair, 1))
static VALUE RVALUE_OLD_P(VALUE obj)
static void * ruby_memerror_body(void *dummy)
VALUE rb_undefine_finalizer(VALUE)
RUBY_FUNC_EXPORTED size_t rb_ary_memsize(VALUE ary)
#define heap_pages_swept_slots
VALUE rb_gc_latest_gc_info(VALUE)
int clock_gettime(clockid_t, struct timespec *)
void void ruby_sized_xfree(void *x, size_t size)
#define SET_MACHINE_STACK_END(p)
void ruby_init_stack(volatile VALUE *)
static int get_envparam_double(const char *name, double *default_value, double lower_bound)
VALUE rb_ensure(VALUE(*b_proc)(ANYARGS), VALUE data1, VALUE(*e_proc)(ANYARGS), VALUE data2)
VALUE rb_obj_method(VALUE, VALUE)
struct heap_page * free_next
void * ruby_xrealloc(void *ptr, size_t new_size)
VALUE rb_define_finalizer(VALUE, VALUE)
#define RVALUE_WB_PROTECTED(obj)
static VALUE gc_stress_set(VALUE self, VALUE flag)
unsigned int heap_free_slots
void rb_class_detach_module_subclasses(VALUE klass)
static void wmap_free(void *ptr)
static int is_mark_stack_empty(mark_stack_t *stack)
int rb_garbage_collect(void)
#define VALGRIND_MAKE_MEM_UNDEFINED(p, n)
static VALUE gc_latest_gc_info(int argc, VALUE *argv, VALUE self)
void rb_gc_mark_maybe(VALUE)
void rb_free_const_table(st_table *tbl)
unsigned int growth_max_slots
static const rb_data_type_t weakmap_type
#define RCLASS_CONST_TBL(c)
VALUE rb_define_module_under(VALUE outer, const char *name)
void rb_gc_set_params(void)
static void heap_assign_page(rb_objspace_t *objspace, rb_heap_t *heap)
static void heap_unlink_page(rb_objspace_t *objspace, rb_heap_t *heap, struct heap_page *page)
void rb_free_m_tbl(st_table *tbl)
void * ruby_xmalloc(size_t size)
static VALUE gc_profile_result(void)
static VALUE wmap_aref(VALUE self, VALUE wmap)
#define MARK_IN_BITMAP(bits, p)
void rb_gc_register_address(VALUE *)
static VALUE wmap_keys(VALUE self)
#define VALGRIND_MAKE_MEM_DEFINED(p, n)
#define RTYPEDDATA_DATA(v)
#define GC_HEAP_FREE_SLOTS
static int rgengc_remembersetbits_set(rb_objspace_t *objspace, VALUE obj)
void rb_gc_mark_parser(void)
static void free_stack_chunks(mark_stack_t *)
static VALUE gc_profile_record_get(void)
struct rb_objspace::@125 malloc_params
static void gc_rest_sweep(rb_objspace_t *objspace)
void rb_gc_writebarrier_unprotect_promoted(VALUE obj)
static int wmap_aset_update(st_data_t *key, st_data_t *val, st_data_t arg, int existing)
static int ready_to_gc(rb_objspace_t *objspace)
void ruby_mimfree(void *ptr)
VALUE rb_obj_is_kind_of(VALUE, VALUE)
static void mark_method_entry(rb_objspace_t *objspace, const rb_method_entry_t *me)
void rb_objspace_reachable_objects_from(VALUE obj, void(func)(VALUE, void *), void *data)
rb_method_definition_t * def
void rb_set_errinfo(VALUE err)
static int wmap_inspect_i(st_data_t key, st_data_t val, st_data_t arg)
#define RUBY_DTRACE_GC_SWEEP_BEGIN()
int getrusage(int who, struct rusage *usage)
struct rb_objspace::@129 rgengc
RUBY_SYMBOL_EXPORT_BEGIN void * rb_thread_call_with_gvl(void *(*func)(void *), void *data1)
int rb_obj_respond_to(VALUE, ID, int)
static VALUE gc_count(VALUE self)
static int is_swept_object(rb_objspace_t *objspace, VALUE ptr)
#define TypedData_Make_Struct(klass, type, data_type, sval)
static void heap_pages_expand_sorted(rb_objspace_t *objspace)
struct gc_list * global_list
static void * objspace_xrealloc(rb_objspace_t *objspace, void *ptr, size_t new_size, size_t old_size)
RUBY_EXTERN VALUE rb_cObject
static void heap_add_page(rb_objspace_t *objspace, rb_heap_t *heap, struct heap_page *page)
static VALUE wmap_inspect(VALUE self)
static void gc_sweep(rb_objspace_t *objspace, int immediate_sweep)
VALUE rb_obj_is_fiber(VALUE obj)
static stack_chunk_t * stack_chunk_alloc(void)
gc_profile_record * records
static void rb_objspace_call_finalizer(rb_objspace_t *objspace)
size_t total_allocated_object_num_at_gc_start
VALUE rb_obj_rgengc_promoted_p(VALUE obj)
struct rb_encoding_entry * list
static int get_envparam_int(const char *name, unsigned int *default_value, int lower_bound)
static int wmap_each_value_i(st_data_t key, st_data_t val, st_data_t arg)
void rb_gc_mark_unlinked_live_method_entries(void *pvm)
#define heap_pages_length
int each_obj_callback(void *, void *, size_t, void *)
struct rb_heap_struct rb_heap_t
rb_objspace_t * rb_objspace_alloc(void)
static int free_method_entry_i(ID key, rb_method_entry_t *me, st_data_t data)
void ruby_gc_set_params(int safe_level)
int st_insert(st_table *, st_data_t, st_data_t)
struct mark_stack mark_stack_t
#define rb_thread_raised_p(th, f)
#define heap_pages_sorted
struct rmatch_offset * char_offset
#define RVALUE_WB_PROTECTED_RAW(obj)
static void gc_event_hook_body(rb_objspace_t *objspace, const rb_event_flag_t event, VALUE data)
static void init_mark_stack(mark_stack_t *stack)
#define EXEC_EVENT_HOOK(th_, flag_, self_, id_, klass_, data_)
#define RCLASS_IV_INDEX_TBL(c)
#define assert(condition)
static void shrink_stack_chunk_cache(mark_stack_t *stack)
#define GC_HEAP_INIT_SLOTS
static int heap_ready_to_gc(rb_objspace_t *objspace, rb_heap_t *heap)
const char * rb_id2name(ID id)
void rb_gc_writebarrier(VALUE a, VALUE b)
static void gc_prof_setup_new_record(rb_objspace_t *objspace, int reason)
void rb_gc_call_finalizer_at_exit(void)
#define ruby_native_thread_p()
void rb_global_variable(VALUE *)
static int is_live_object(rb_objspace_t *objspace, VALUE ptr)
#define GC_OLDMALLOC_LIMIT_MIN
#define RBASIC_CLEAR_CLASS(obj)
unsigned int heap_init_slots
void rb_mark_hash(struct st_table *)
#define ATTRIBUTE_NO_ADDRESS_SAFETY_ANALYSIS
static void make_io_deferred(rb_objspace_t *objspace, RVALUE *p)
static void gc_mark_stacked_objects(rb_objspace_t *)
static int garbage_collect_with_gvl(rb_objspace_t *objspace, int full_mark, int immediate_sweep, int reason)
struct heap_page * using_page
#define RUBY_INTERNAL_EVENT_FREEOBJ
VALUE rb_data_typed_object_alloc(VALUE klass, void *datap, const rb_data_type_t *)
struct RTypedData typeddata
#define RUBY_DTRACE_GC_MARK_BEGIN_ENABLED()
size_t rb_generic_ivar_memsize(VALUE)
size_t remembered_shady_object_count
struct rb_objspace rb_objspace_t
static int verify_internal_consistency_i(void *page_start, void *page_end, size_t stride, void *ptr)
int rb_sigaltstack_size(void)
static void gc_mark_children(rb_objspace_t *objspace, VALUE ptr)
static void gc_before_heap_sweep(rb_objspace_t *objspace, rb_heap_t *heap)
#define SPECIAL_CONST_P(x)
VALUE rb_define_module(const char *name)
#define RUBY_DTRACE_GC_MARK_BEGIN()
#define gc_prof_enabled(objspace)
static void heap_set_increment(rb_objspace_t *objspace, size_t minimum_limit)
#define GC_MALLOC_LIMIT_MIN
each_obj_callback * callback
static VALUE undefine_final(VALUE os, VALUE obj)
static VALUE gc_profile_clear(void)
volatile VALUE * rb_gc_guarded_ptr(volatile VALUE *ptr)
return rb_yield_values(2, rb_enum_values_pack(argc, argv), INT2NUM(n))
void rb_gc_mark_encodings(void)
static void * objspace_xcalloc(rb_objspace_t *objspace, size_t count, size_t elsize)
static int wmap_each_key_i(st_data_t key, st_data_t val, st_data_t arg)
#define GET_HEAP_REMEMBERSET_BITS(x)
static void objspace_xfree(rb_objspace_t *objspace, void *ptr, size_t size)
#define GC_OLDMALLOC_LIMIT_MAX
static VALUE wmap_finalize(VALUE self, VALUE objid)
static VALUE gc_stat(int argc, VALUE *argv, VALUE self)
static void mark_set(rb_objspace_t *objspace, st_table *tbl)
void onig_free(regex_t *reg)
void rb_free_generic_ivar(VALUE)
static rb_thread_t * GET_THREAD(void)
void rb_define_method(VALUE klass, const char *name, VALUE(*func)(ANYARGS), int argc)
#define CALC_EXACT_MALLOC_SIZE
struct heap_page ** sorted
static ATTRIBUTE_NO_ADDRESS_SAFETY_ANALYSIS void mark_locations_array(rb_objspace_t *objspace, register VALUE *x, register long n)
double oldmalloc_limit_growth_factor
#define OBJ_WB_PROTECTED(x)
#define RUBY_INTERNAL_EVENT_GC_END_SWEEP
size_t rb_str_memsize(VALUE)
struct stack_chunk stack_chunk_t
void rb_warn(const char *fmt,...)
#define RBIGNUM_EMBED_FLAG
static VALUE gc_profile_enable(void)
#define CLEAR_IN_BITMAP(bits, p)
void st_free_table(st_table *)
#define RTYPEDDATA_TYPE(v)
void rb_gc_writebarrier_remember_promoted(VALUE obj)
static void should_be_finalizable(VALUE obj)
static void should_be_callable(VALUE block)
struct heap_page * free_pages
static void gc_setup_mark_bits(struct heap_page *page)
VALUE rb_io_write(VALUE, VALUE)
void * ruby_sized_xrealloc(void *ptr, size_t new_size, size_t old_size) RUBY_ATTR_ALLOC_SIZE((2))
static void heap_page_add_freeobj(rb_objspace_t *objspace, struct heap_page *page, VALUE obj)
static void gc_finalize_deferred_register(void)
#define TRY_WITH_GC(alloc)
struct stack_chunk * next
struct heap_page * sweep_pages
static int gc_mark_ptr(rb_objspace_t *objspace, VALUE ptr)
size_t remembered_shady_object_limit