Skip to content

Commit 31e8a06

Browse files
committed
refactor: inline bitmap into MiniHeap, template MeshableArena on PageSize
This refactoring eliminates dynamic bitmap allocation by inlining the bitmap directly into MiniHeap. The bitmap size varies with page size: - 4K pages: 32-byte bitmap -> MiniHeap = 64 bytes - 16K pages: 128-byte bitmap -> MiniHeap = 160 bytes Key changes: - MiniHeap: inline BitmapType instead of pointer, remove destructor - common.h: add MiniHeapSizeFor<PageSize>() template helper - MeshableArena: template on PageSize, use compile-time page constants - GlobalHeap: qualify inherited members with this-> for template base - Fork handlers remain in .cc to break circular dependency with runtime.h The CheapHeap allocator now uses the correct size at compile time based on the page size template parameter.
1 parent eaa54dc commit 31e8a06

File tree

6 files changed

+714
-824
lines changed

6 files changed

+714
-824
lines changed

src/common.h

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -73,11 +73,6 @@ static constexpr int kMapShared = 1;
7373
static constexpr int kMapShared = kMeshingEnabled ? MAP_SHARED : MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE;
7474
#endif
7575

76-
// we have to define this here for use in meshable_arena's CheapHeap we allocate
77-
// MiniHeaps out of. We validate (and fail compilation) if this gets out of date
78-
// with a static_assert at the bottom of mini_heap.h
79-
static constexpr size_t kMiniHeapSize = 64;
80-
8176
static constexpr size_t kMinObjectSize = 16;
8277
static constexpr size_t kMaxSize = 16384;
8378
static constexpr size_t kClassSizesMax = 25;
@@ -89,6 +84,12 @@ static constexpr uint64_t kPageSizeMin = 4096;
8984
static constexpr uint64_t kPageSize4K = 4096;
9085
static constexpr uint64_t kPageSize16K = 16384;
9186

87+
// MiniHeap size depends on page size due to inlined bitmap
88+
template <size_t PageSize>
89+
constexpr size_t MiniHeapSizeFor() {
90+
return (PageSize == kPageSize4K) ? 64 : 160;
91+
}
92+
9293
// Runtime page size detection for Apple Silicon (16KB) and x86 (4KB) compatibility
9394
namespace internal {
9495
inline size_t initPageSize() {

src/global_heap.h

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -77,16 +77,16 @@ class GlobalHeapStats {
7777
};
7878

7979
template <size_t PageSize>
80-
class GlobalHeap : public MeshableArena {
80+
class GlobalHeap : public MeshableArena<PageSize> {
8181
private:
8282
DISALLOW_COPY_AND_ASSIGN(GlobalHeap);
83-
typedef MeshableArena Super;
84-
85-
static_assert(HL::gcd<MmapHeap::Alignment, Alignment>::value == Alignment,
86-
"expected MmapHeap to have 16-byte alignment");
83+
typedef MeshableArena<PageSize> Super;
8784

8885
public:
8986
enum { Alignment = 16 };
87+
88+
static_assert(HL::gcd<MmapHeap::Alignment, Alignment>::value == Alignment,
89+
"expected MmapHeap to have 16-byte alignment");
9090
using MiniHeapT = MiniHeap<PageSize>;
9191
using MiniHeapListEntryT = MiniHeapListEntry<PageSize>;
9292

@@ -152,7 +152,7 @@ class GlobalHeap : public MeshableArena {
152152
size_t objectSize, size_t pageAlignment = 1) {
153153
d_assert(0 < pageCount);
154154

155-
void *buf = _mhAllocator.alloc();
155+
void *buf = this->_mhAllocator.alloc();
156156
d_assert(buf != nullptr);
157157

158158
// allocate out of the arena
@@ -161,9 +161,9 @@ class GlobalHeap : public MeshableArena {
161161
d_assert(spanBegin != nullptr);
162162
d_assert((reinterpret_cast<uintptr_t>(spanBegin) / getPageSize()) % pageAlignment == 0);
163163

164-
MiniHeapT *mh = new (buf) MiniHeapT(arenaBegin(), span, objectCount, objectSize);
164+
MiniHeapT *mh = new (buf) MiniHeapT(this->arenaBegin(), span, objectCount, objectSize);
165165

166-
const auto miniheapID = MiniHeapID{_mhAllocator.offsetFor(buf)};
166+
const auto miniheapID = MiniHeapID{this->_mhAllocator.offsetFor(buf)};
167167
Super::trackMiniHeap(span, miniheapID);
168168

169169
// mesh::debug("%p (%u) created!\n", mh, GetMiniHeapID(mh));
@@ -195,7 +195,7 @@ class GlobalHeap : public MeshableArena {
195195
d_assert(mh->spanSize() == pageCount * pageSize);
196196
// d_assert(mh->objectSize() == pageCount * pageSize);
197197

198-
void *ptr = mh->mallocAt(arenaBegin(), 0);
198+
void *ptr = mh->mallocAt(this->arenaBegin(), 0);
199199

200200
return ptr;
201201
}
@@ -476,13 +476,13 @@ class GlobalHeap : public MeshableArena {
476476
}
477477

478478
inline MiniHeapT *ATTRIBUTE_ALWAYS_INLINE miniheapForID(const MiniHeapID id) const {
479-
auto mh = reinterpret_cast<MiniHeapT *>(_mhAllocator.ptrFromOffset(id.value()));
479+
auto mh = reinterpret_cast<MiniHeapT *>(this->_mhAllocator.ptrFromOffset(id.value()));
480480
__builtin_prefetch(mh, 1, 2);
481481
return mh;
482482
}
483483

484484
inline MiniHeapID miniheapIDFor(const MiniHeapT *mh) const {
485-
return MiniHeapID{_mhAllocator.offsetFor(mh)};
485+
return MiniHeapID{this->_mhAllocator.offsetFor(mh)};
486486
}
487487

488488
void untrackMiniheapLocked(MiniHeapT *mh) {
@@ -504,7 +504,7 @@ class GlobalHeap : public MeshableArena {
504504
d_assert(!mh->getFreelist()->next().hasValue());
505505
mh->MiniHeapT::~MiniHeap();
506506
// memset(reinterpret_cast<char *>(mh), 0x77, sizeof(MiniHeap));
507-
_mhAllocator.free(mh);
507+
this->_mhAllocator.free(mh);
508508
_miniheapCount--;
509509
}
510510

@@ -541,7 +541,7 @@ class GlobalHeap : public MeshableArena {
541541
MiniHeapT *mh = toFree[i];
542542
const bool isMeshed = mh->isMeshed();
543543
const auto type = isMeshed ? internal::PageType::Meshed : internal::PageType::Dirty;
544-
Super::free(reinterpret_cast<void *>(mh->getSpanStart(arenaBegin())), spanSize, type);
544+
Super::free(reinterpret_cast<void *>(mh->getSpanStart(this->arenaBegin())), spanSize, type);
545545
_stats.mhFreeCount++;
546546
freeMiniheapAfterMeshLocked(mh, untrack);
547547
}

src/global_heap_impl.h

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ void GlobalHeap<PageSize>::freeFor(MiniHeapT *mh, void *ptr, size_t startEpoch)
104104
// here can't call mh->free(arenaBegin(), ptr), because in consume takeBitmap always clear the bitmap,
105105
// if clearIfNotFree after takeBitmap
106106
// it alwasy return false, but in this case, you need to free again.
107-
auto wasSet = mh->clearIfNotFree(arenaBegin(), ptr);
107+
auto wasSet = mh->clearIfNotFree(this->arenaBegin(), ptr);
108108

109109
bool shouldMesh = false;
110110

@@ -130,7 +130,7 @@ void GlobalHeap<PageSize>::freeFor(MiniHeapT *mh, void *ptr, size_t startEpoch)
130130
// we have confirmation that we raced with meshing, so free the pointer
131131
// on the new miniheap
132132
d_assert(sizeClass == mh->sizeClass());
133-
mh->free(arenaBegin(), ptr);
133+
mh->free(this->arenaBegin(), ptr);
134134
} else {
135135
// our MiniHeap is unrelated to whatever is here in memory now - get out of here.
136136
return;
@@ -317,27 +317,27 @@ void GlobalHeap<PageSize>::meshLocked(MiniHeapT *dst, MiniHeapT *&src) {
317317
// dst->dumpDebug();
318318
// src->dumpDebug();
319319
const size_t dstSpanSize = dst->spanSize();
320-
const auto dstSpanStart = reinterpret_cast<void *>(dst->getSpanStart(arenaBegin()));
320+
const auto dstSpanStart = reinterpret_cast<void *>(dst->getSpanStart(this->arenaBegin()));
321321

322322
src->forEachMeshed([&](const MiniHeapT *mh) {
323323
// marks srcSpans read-only
324-
const auto srcSpan = reinterpret_cast<void *>(mh->getSpanStart(arenaBegin()));
324+
const auto srcSpan = reinterpret_cast<void *>(mh->getSpanStart(this->arenaBegin()));
325325
Super::beginMesh(dstSpanStart, srcSpan, dstSpanSize);
326326
return false;
327327
});
328328

329329
// does the copying of objects and updating of span metadata
330-
dst->consume(arenaBegin(), src);
330+
dst->consume(this->arenaBegin(), src);
331331
d_assert(src->isMeshed());
332332

333333
src->forEachMeshed([&](const MiniHeapT *mh) {
334334
d_assert(mh->isMeshed());
335-
const auto srcSpan = reinterpret_cast<void *>(mh->getSpanStart(arenaBegin()));
335+
const auto srcSpan = reinterpret_cast<void *>(mh->getSpanStart(this->arenaBegin()));
336336
// frees physical memory + re-marks srcSpans as read/write
337337
Super::finalizeMesh(dstSpanStart, srcSpan, dstSpanSize);
338338
return false;
339339
});
340-
Super::freePhys(reinterpret_cast<void *>(src->getSpanStart(arenaBegin())), dstSpanSize);
340+
Super::freePhys(reinterpret_cast<void *>(src->getSpanStart(this->arenaBegin())), dstSpanSize);
341341

342342
// make sure we adjust what bin the destination is in -- it might
343343
// now be full and not a candidate for meshing
@@ -362,7 +362,7 @@ size_t GlobalHeap<PageSize>::meshSizeClassLocked(size_t sizeClass, MergeSetArray
362362
return mergeSetCount < kMaxMergeSets;
363363
});
364364

365-
method::shiftedSplitting(_fastPrng, &_partialFreelist[sizeClass].first, left, right, meshFound);
365+
method::shiftedSplitting(this->_fastPrng, &_partialFreelist[sizeClass].first, left, right, meshFound);
366366

367367
if (mergeSetCount == 0) {
368368
// debug("nothing to mesh.");
@@ -403,7 +403,7 @@ size_t GlobalHeap<PageSize>::meshSizeClassLocked(size_t sizeClass, MergeSetArray
403403
oneEmpty = true;
404404
}
405405

406-
if (!oneEmpty && !aboveMeshThreshold()) {
406+
if (!oneEmpty && !this->aboveMeshThreshold()) {
407407
meshLocked(dst, src);
408408
meshCount++;
409409
}
@@ -498,7 +498,7 @@ void GlobalHeap<PageSize>::dumpStats(int level, bool beDetailed) const {
498498

499499
AllLocksGuard allLocks(_miniheapLocks, _largeAllocLock, _arenaLock);
500500

501-
const auto meshedPageHWM = meshedPageHighWaterMark();
501+
const auto meshedPageHWM = this->meshedPageHighWaterMark();
502502

503503
debug("MESH COUNT: %zu\n", (size_t)_stats.meshCount);
504504
debug("Meshed MB (total): %.1f\n", (size_t)_stats.meshCount * (double)PageSize / 1024.0 / 1024.0);

0 commit comments

Comments
 (0)