@@ -1499,13 +1499,22 @@ RVALUE_WHITE_P(rb_objspace_t *objspace, VALUE obj)
1499
1499
bool
1500
1500
rb_gc_impl_gc_enabled_p (void * objspace_ptr )
1501
1501
{
1502
+ WHEN_USING_MMTK ({
1503
+ return mmtk_is_collection_enabled ();
1504
+ })
1505
+
1502
1506
rb_objspace_t * objspace = objspace_ptr ;
1503
1507
return !dont_gc_val ();
1504
1508
}
1505
1509
1506
1510
void
1507
1511
rb_gc_impl_gc_enable (void * objspace_ptr )
1508
1512
{
1513
+ WHEN_USING_MMTK ({
1514
+ mmtk_enable_collection ();
1515
+ return ;
1516
+ })
1517
+
1509
1518
rb_objspace_t * objspace = objspace_ptr ;
1510
1519
1511
1520
dont_gc_off ();
@@ -1514,6 +1523,11 @@ rb_gc_impl_gc_enable(void *objspace_ptr)
1514
1523
void
1515
1524
rb_gc_impl_gc_disable (void * objspace_ptr , bool finish_current_gc )
1516
1525
{
1526
+ WHEN_USING_MMTK ({
1527
+ mmtk_disable_collection ();
1528
+ return ;
1529
+ })
1530
+
1517
1531
rb_objspace_t * objspace = objspace_ptr ;
1518
1532
1519
1533
if (finish_current_gc ) {
@@ -7444,7 +7458,11 @@ rb_gc_impl_start(void *objspace_ptr, bool full_mark, bool immediate_mark, bool i
7444
7458
rb_objspace_t * objspace = objspace_ptr ;
7445
7459
#if USE_MMTK
7446
7460
if (rb_mmtk_enabled_p ()) {
7447
- mmtk_handle_user_collection_request (GET_THREAD ());
7461
+ // Note: GC.start will initiates garbage collection even if manually disabled.
7462
+ // Therefore, we need to force GC.
7463
+ // We do a full-heap GC if full_mark is true. In StickyImmix this may or may not trigger defragmentation.
7464
+ // There is currently no way to force a defragmentation GC.
7465
+ mmtk_handle_user_collection_request (GET_THREAD (), true, full_mark );
7448
7466
7449
7467
gc_finalize_deferred (objspace );
7450
7468
}
@@ -8741,6 +8759,8 @@ objspace_malloc_increase_body(rb_objspace_t *objspace, void *mem, size_t new_siz
8741
8759
// object is allocated but other disjoint parts allocated by xmalloc have not been assigned
8742
8760
// to the fields of the object. If GC is triggered at this time, the GC may try to scan
8743
8761
// incomplete objects and crash.
8762
+ // TODO: Re-investigate whether we shouldn't trigger GC. If triggering a GC during xmalloc
8763
+ // is a problem, the default GC should crash, too.
8744
8764
return true;
8745
8765
}
8746
8766
#endif
@@ -8767,7 +8787,8 @@ objspace_malloc_increase_body(rb_objspace_t *objspace, void *mem, size_t new_siz
8767
8787
// This will trigger user-requested GC.
8768
8788
// `obj_free` is called during GC for dead objects.
8769
8789
// It will free underlying xmalloc-ed buffers for them.
8770
- mmtk_handle_user_collection_request ((MMTk_VMMutatorThread )GET_THREAD ());
8790
+ // We don't force GC. When GC.disable is called, allocation should not trigger GC.
8791
+ mmtk_handle_user_collection_request ((MMTk_VMMutatorThread )GET_THREAD (), false, false);
8771
8792
8772
8793
gc_reset_malloc_info (objspace , true);
8773
8794
} else {
0 commit comments