@@ -2380,6 +2380,16 @@ _rpmalloc_deallocate_direct_small_or_medium(span_t* span, void* block) {
23802380 -- span -> used_count ;
23812381 span -> free_list = block ;
23822382 if (UNEXPECTED (span -> used_count == span -> list_size )) {
2383+ // If there are no used blocks it is guaranteed that no other external thread is accessing the span
2384+ if (span -> used_count ) {
2385+ // Make sure we have synchronized the deferred list and list size by using acquire semantics
2386+ // and guarantee that no external thread is accessing span concurrently
2387+ void * free_list ;
2388+ do {
2389+ free_list = atomic_exchange_ptr_acquire (& span -> free_list_deferred , INVALID_POINTER );
2390+ } while (free_list == INVALID_POINTER );
2391+ atomic_store_ptr_release (& span -> free_list_deferred , free_list );
2392+ }
23832393 _rpmalloc_span_double_link_list_remove (& heap -> size_class [span -> size_class ].partial_span , span );
23842394 _rpmalloc_span_release_to_cache (heap , span );
23852395 }
@@ -2408,8 +2418,9 @@ _rpmalloc_deallocate_defer_small_or_medium(span_t* span, void* block) {
24082418 } while (free_list == INVALID_POINTER );
24092419 * ((void * * )block ) = free_list ;
24102420 uint32_t free_count = ++ span -> list_size ;
2421+ int all_deferred_free = (free_count == span -> block_count );
24112422 atomic_store_ptr_release (& span -> free_list_deferred , block );
2412- if (free_count == span -> block_count ) {
2423+ if (all_deferred_free ) {
24132424 // Span was completely freed by this block. Due to the INVALID_POINTER spin lock
24142425 // no other thread can reach this state simultaneously on this span.
24152426 // Safe to move to owner heap deferred cache
0 commit comments