@@ -925,8 +925,9 @@ _rpmalloc_unmap_os(void* address, size_t size, size_t offset, size_t release) {
925925 } else {
926926#if defined(MADV_FREE_REUSABLE )
927927 int ret ;
928- while ((ret = madvise (address , size , MADV_FREE_REUSABLE )) == -1 && errno == EAGAIN );
929- if (ret == -1 && errno != 0 )
928+ while ((ret = madvise (address , size , MADV_FREE_REUSABLE )) == -1 && (errno == EAGAIN ))
929+ errno = 0 ;
930+ if ((ret == -1 ) && (errno != 0 ))
930931#elif defined(MADV_FREE )
931932 if (madvise (address , size , MADV_FREE ))
932933#endif
@@ -978,12 +979,9 @@ _rpmalloc_global_set_reserved_spans(span_t* master, span_t* reserve, size_t rese
978979//! Add a span to double linked list at the head
979980static void
980981_rpmalloc_span_double_link_list_add (span_t * * head , span_t * span ) {
981- if (* head ) {
982- span -> next = * head ;
982+ if (* head )
983983 (* head )-> prev = span ;
984- } else {
985- span -> next = 0 ;
986- }
984+ span -> next = * head ;
987985 * head = span ;
988986}
989987
@@ -1005,9 +1003,8 @@ _rpmalloc_span_double_link_list_remove(span_t** head, span_t* span) {
10051003 span_t * next_span = span -> next ;
10061004 span_t * prev_span = span -> prev ;
10071005 prev_span -> next = next_span ;
1008- if (EXPECTED (next_span != 0 )) {
1006+ if (EXPECTED (next_span != 0 ))
10091007 next_span -> prev = prev_span ;
1010- }
10111008 }
10121009}
10131010
@@ -1241,7 +1238,7 @@ free_list_partial_init(void** list, void** first_block, void* page_start, void*
12411238
12421239//! Initialize an unused span (from cache or mapped) to be new active span, putting the initial free list in heap class free list
12431240static void *
1244- _rpmalloc_span_initialize_new (heap_t * heap , span_t * span , uint32_t class_idx ) {
1241+ _rpmalloc_span_initialize_new (heap_t * heap , heap_size_class_t * heap_size_class , span_t * span , uint32_t class_idx ) {
12451242 rpmalloc_assert (span -> span_count == 1 , "Internal failure" );
12461243 size_class_t * size_class = _memory_size_class + class_idx ;
12471244 span -> size_class = class_idx ;
@@ -1255,11 +1252,11 @@ _rpmalloc_span_initialize_new(heap_t* heap, span_t* span, uint32_t class_idx) {
12551252
12561253 //Setup free list. Only initialize one system page worth of free blocks in list
12571254 void * block ;
1258- span -> free_list_limit = free_list_partial_init (& heap -> size_class [ class_idx ]. free_list , & block ,
1255+ span -> free_list_limit = free_list_partial_init (& heap_size_class -> free_list , & block ,
12591256 span , pointer_offset (span , SPAN_HEADER_SIZE ), size_class -> block_count , size_class -> block_size );
12601257 //Link span as partial if there remains blocks to be initialized as free list, or full if fully initialized
12611258 if (span -> free_list_limit < span -> block_count ) {
1262- _rpmalloc_span_double_link_list_add (& heap -> size_class [ class_idx ]. partial_span , span );
1259+ _rpmalloc_span_double_link_list_add (& heap_size_class -> partial_span , span );
12631260 span -> used_count = span -> free_list_limit ;
12641261 } else {
12651262#if RPMALLOC_FIRST_CLASS_HEAPS
@@ -1722,23 +1719,17 @@ _rpmalloc_inc_span_statistics(heap_t* heap, size_t span_count, uint32_t class_id
17221719
17231720//! Get a span from one of the cache levels (thread cache, reserved, global cache) or fallback to mapping more memory
17241721static span_t *
1725- _rpmalloc_heap_extract_new_span (heap_t * heap , size_t span_count , uint32_t class_idx ) {
1722+ _rpmalloc_heap_extract_new_span (heap_t * heap , heap_size_class_t * heap_size_class , size_t span_count , uint32_t class_idx ) {
17261723 span_t * span ;
17271724#if ENABLE_THREAD_CACHE
1728- if (class_idx < SIZE_CLASS_COUNT ) {
1729- if (heap -> size_class [class_idx ].cache ) {
1730- span = heap -> size_class [class_idx ].cache ;
1731- span_t * new_cache = 0 ;
1732- if (heap -> span_cache .count )
1733- new_cache = heap -> span_cache .span [-- heap -> span_cache .count ];
1734- heap -> size_class [class_idx ].cache = new_cache ;
1735- _rpmalloc_inc_span_statistics (heap , span_count , class_idx );
1736- return span ;
1737- }
1725+ if (heap_size_class && heap_size_class -> cache ) {
1726+ span = heap_size_class -> cache ;
1727+ heap_size_class -> cache = (heap -> span_cache .count ? heap -> span_cache .span [-- heap -> span_cache .count ] : 0 );
1728+ _rpmalloc_inc_span_statistics (heap , span_count , class_idx );
1729+ return span ;
17381730 }
1739- #else
1740- (void )sizeof (class_idx );
17411731#endif
1732+ (void )sizeof (class_idx );
17421733 // Allow 50% overhead to increase cache hits
17431734 size_t base_span_count = span_count ;
17441735 size_t limit_span_count = (span_count > 2 ) ? (span_count + (span_count >> 1 )) : span_count ;
@@ -2037,21 +2028,21 @@ free_list_pop(void** list) {
20372028
20382029//! Allocate a small/medium sized memory block from the given heap
20392030static void *
2040- _rpmalloc_allocate_from_heap_fallback (heap_t * heap , uint32_t class_idx ) {
2041- span_t * span = heap -> size_class [ class_idx ]. partial_span ;
2031+ _rpmalloc_allocate_from_heap_fallback (heap_t * heap , heap_size_class_t * heap_size_class , uint32_t class_idx ) {
2032+ span_t * span = heap_size_class -> partial_span ;
20422033 if (EXPECTED (span != 0 )) {
20432034 rpmalloc_assert (span -> block_count == _memory_size_class [span -> size_class ].block_count , "Span block count corrupted" );
20442035 rpmalloc_assert (!_rpmalloc_span_is_fully_utilized (span ), "Internal failure" );
20452036 void * block ;
20462037 if (span -> free_list ) {
2047- //Swap in free list if not empty
2048- heap -> size_class [class_idx ].free_list = span -> free_list ;
2038+ //Span local free list is not empty, swap to size class free list
2039+ block = free_list_pop (& span -> free_list );
2040+ heap_size_class -> free_list = span -> free_list ;
20492041 span -> free_list = 0 ;
2050- block = free_list_pop (& heap -> size_class [class_idx ].free_list );
20512042 } else {
20522043 //If the span did not fully initialize free list, link up another page worth of blocks
20532044 void * block_start = pointer_offset (span , SPAN_HEADER_SIZE + ((size_t )span -> free_list_limit * span -> block_size ));
2054- span -> free_list_limit += free_list_partial_init (& heap -> size_class [ class_idx ]. free_list , & block ,
2045+ span -> free_list_limit += free_list_partial_init (& heap_size_class -> free_list , & block ,
20552046 (void * )((uintptr_t )block_start & ~(_memory_page_size - 1 )), block_start ,
20562047 span -> block_count - span -> free_list_limit , span -> block_size );
20572048 }
@@ -2067,7 +2058,7 @@ _rpmalloc_allocate_from_heap_fallback(heap_t* heap, uint32_t class_idx) {
20672058 return block ;
20682059
20692060 //The span is fully utilized, unlink from partial list and add to fully utilized list
2070- _rpmalloc_span_double_link_list_pop_head (& heap -> size_class [ class_idx ]. partial_span , span );
2061+ _rpmalloc_span_double_link_list_pop_head (& heap_size_class -> partial_span , span );
20712062#if RPMALLOC_FIRST_CLASS_HEAPS
20722063 _rpmalloc_span_double_link_list_add (& heap -> full_span [class_idx ], span );
20732064#endif
@@ -2076,10 +2067,10 @@ _rpmalloc_allocate_from_heap_fallback(heap_t* heap, uint32_t class_idx) {
20762067 }
20772068
20782069 //Find a span in one of the cache levels
2079- span = _rpmalloc_heap_extract_new_span (heap , 1 , class_idx );
2070+ span = _rpmalloc_heap_extract_new_span (heap , heap_size_class , 1 , class_idx );
20802071 if (EXPECTED (span != 0 )) {
20812072 //Mark span as owned by this heap and set base data, return first block
2082- return _rpmalloc_span_initialize_new (heap , span , class_idx );
2073+ return _rpmalloc_span_initialize_new (heap , heap_size_class , span , class_idx );
20832074 }
20842075
20852076 return 0 ;
@@ -2091,10 +2082,11 @@ _rpmalloc_allocate_small(heap_t* heap, size_t size) {
20912082 rpmalloc_assert (heap , "No thread heap" );
20922083 //Small sizes have unique size classes
20932084 const uint32_t class_idx = (uint32_t )((size + (SMALL_GRANULARITY - 1 )) >> SMALL_GRANULARITY_SHIFT );
2085+ heap_size_class_t * heap_size_class = heap -> size_class + class_idx ;
20942086 _rpmalloc_stat_inc_alloc (heap , class_idx );
2095- if (EXPECTED (heap -> size_class [ class_idx ]. free_list != 0 ))
2096- return free_list_pop (& heap -> size_class [ class_idx ]. free_list );
2097- return _rpmalloc_allocate_from_heap_fallback (heap , class_idx );
2087+ if (EXPECTED (heap_size_class -> free_list != 0 ))
2088+ return free_list_pop (& heap_size_class -> free_list );
2089+ return _rpmalloc_allocate_from_heap_fallback (heap , heap_size_class , class_idx );
20982090}
20992091
21002092//! Allocate a medium sized memory block from the given heap
@@ -2104,10 +2096,11 @@ _rpmalloc_allocate_medium(heap_t* heap, size_t size) {
21042096 //Calculate the size class index and do a dependent lookup of the final class index (in case of merged classes)
21052097 const uint32_t base_idx = (uint32_t )(SMALL_CLASS_COUNT + ((size - (SMALL_SIZE_LIMIT + 1 )) >> MEDIUM_GRANULARITY_SHIFT ));
21062098 const uint32_t class_idx = _memory_size_class [base_idx ].class_idx ;
2099+ heap_size_class_t * heap_size_class = heap -> size_class + class_idx ;
21072100 _rpmalloc_stat_inc_alloc (heap , class_idx );
2108- if (EXPECTED (heap -> size_class [ class_idx ]. free_list != 0 ))
2109- return free_list_pop (& heap -> size_class [ class_idx ]. free_list );
2110- return _rpmalloc_allocate_from_heap_fallback (heap , class_idx );
2101+ if (EXPECTED (heap_size_class -> free_list != 0 ))
2102+ return free_list_pop (& heap_size_class -> free_list );
2103+ return _rpmalloc_allocate_from_heap_fallback (heap , heap_size_class , class_idx );
21112104}
21122105
21132106//! Allocate a large sized memory block from the given heap
@@ -2123,7 +2116,7 @@ _rpmalloc_allocate_large(heap_t* heap, size_t size) {
21232116 ++ span_count ;
21242117
21252118 //Find a span in one of the cache levels
2126- span_t * span = _rpmalloc_heap_extract_new_span (heap , span_count , SIZE_CLASS_LARGE );
2119+ span_t * span = _rpmalloc_heap_extract_new_span (heap , 0 , span_count , SIZE_CLASS_LARGE );
21272120 if (!span )
21282121 return span ;
21292122
@@ -2321,8 +2314,8 @@ _rpmalloc_deallocate_direct_small_or_medium(span_t* span, void* block) {
23212314 _rpmalloc_span_double_link_list_add (& heap -> size_class [span -> size_class ].partial_span , span );
23222315 -- heap -> full_span_count ;
23232316 }
2324- -- span -> used_count ;
23252317 * ((void * * )block ) = span -> free_list ;
2318+ -- span -> used_count ;
23262319 span -> free_list = block ;
23272320 if (UNEXPECTED (span -> used_count == span -> list_size )) {
23282321 _rpmalloc_span_double_link_list_remove (& heap -> size_class [span -> size_class ].partial_span , span );
0 commit comments