diff --git a/gc/base/AllocateDescription.hpp b/gc/base/AllocateDescription.hpp index 001701ed98f..e1871750cac 100644 --- a/gc/base/AllocateDescription.hpp +++ b/gc/base/AllocateDescription.hpp @@ -75,6 +75,7 @@ class MM_AllocateDescription : public MM_Base { bool _climb; /* indicates that current attempt to allocate should try parent, if current subspace failed */ bool _completedFromTlh; + bool _sharedReserved; public: /** @@ -218,6 +219,9 @@ class MM_AllocateDescription : public MM_Base { */ MMINLINE bool getAllocationSucceeded() {return _allocationSucceeded;} + MMINLINE void setSharedReserved(bool sharedReserved) {_sharedReserved = sharedReserved;} + MMINLINE bool getSharedReserved() {return _sharedReserved;} + /** * Create an AllocateDescriptionCore object. */ @@ -245,6 +249,7 @@ class MM_AllocateDescription : public MM_Base { , _collectAndClimb(collectAndClimb) , _climb(false) , _completedFromTlh(false) + , _sharedReserved(false) {} }; diff --git a/gc/base/SparseVirtualMemory.cpp b/gc/base/SparseVirtualMemory.cpp index a345312d8ab..c5bef4ee60f 100644 --- a/gc/base/SparseVirtualMemory.cpp +++ b/gc/base/SparseVirtualMemory.cpp @@ -76,7 +76,18 @@ MM_SparseVirtualMemory::initialize(MM_EnvironmentBase *env, uint32_t memoryCateg off_heap_size = MM_Math::roundToCeiling(regionSize, (in_heap_size / 100) * ext->sparseHeapSizeRatio); } - bool success = MM_VirtualMemory::initialize(env, off_heap_size, NULL, NULL, 0, memoryCategory); + bool success = false; + + _allocationContextArraySize = sizeof(void *) * (off_heap_size / regionSize); + _allocationContextArray = (void **)env->getForge()->allocate(_allocationContextArraySize, OMR::GC::AllocationCategory::FIXED, OMR_GET_CALLSITE()); + if (NULL != _allocationContextArray) { + memset(_allocationContextArray, 0, _allocationContextArraySize); + } else { + _allocationContextArraySize = 0; + return success; + } + + success = MM_VirtualMemory::initialize(env, off_heap_size, NULL, NULL, 0, memoryCategory); if (success) { void *sparseHeapBase = getHeapBase(); @@ -98,6 +109,12 @@ MM_SparseVirtualMemory::initialize(MM_EnvironmentBase *env, uint32_t memoryCateg void MM_SparseVirtualMemory::tearDown(MM_EnvironmentBase *env) { + if (NULL != _allocationContextArray) { + env->getForge()->free(_allocationContextArray); + _allocationContextArray = NULL; + _allocationContextArraySize = 0; + } + if (NULL != _sparseDataPool) { _sparseDataPool->kill(env); _sparseDataPool = NULL; @@ -174,6 +191,9 @@ MM_SparseVirtualMemory::freeSparseRegionAndUnmapFromHeapObject(MM_EnvironmentBas if ((NULL != dataPtr) && (0 != dataSize)) { uintptr_t adjustedSize = adjustSize(dataSize); + + resetAllocationContextForAddress(dataPtr, dataSize); + ret = decommitMemory(env, dataPtr, adjustedSize); if (ret) { omrthread_monitor_enter(_largeObjectVirtualMemoryMutex); diff --git a/gc/base/SparseVirtualMemory.hpp b/gc/base/SparseVirtualMemory.hpp index 603a8163116..a231d8dc41c 100644 --- a/gc/base/SparseVirtualMemory.hpp +++ b/gc/base/SparseVirtualMemory.hpp @@ -36,6 +36,7 @@ #include "BaseVirtual.hpp" #include "Heap.hpp" #include "HeapRegionManager.hpp" +#include "ModronAssertions.h" #include "VirtualMemory.hpp" class GC_HashTableIterator; @@ -63,6 +64,9 @@ class MM_SparseVirtualMemory : public MM_VirtualMemory { MM_Heap *_heap; /**< reference to in-heap */ MM_SparseAddressOrderedFixedSizeDataPool *_sparseDataPool; /**< Structure that manages data and free region of sparse virtual memory */ omrthread_monitor_t _largeObjectVirtualMemoryMutex; /**< Monitor that manages access to sparse virtual memory */ + + void **_allocationContextArray; + uintptr_t _allocationContextArraySize; protected: public: /* @@ -84,6 +88,8 @@ class MM_SparseVirtualMemory : public MM_VirtualMemory { , _heap(in_heap) , _sparseDataPool(NULL) , _largeObjectVirtualMemoryMutex(NULL) + , _allocationContextArray(NULL) + , _allocationContextArraySize(0) { _typeId = __FUNCTION__; } @@ -157,6 +163,42 @@ class MM_SparseVirtualMemory : public MM_VirtualMemory { { return _sparseDataPool; } + + MMINLINE uintptr_t getAllocationContextIndexForAddress(const void *address) + { + const uintptr_t regionSize = _heap->getHeapRegionManager()->getRegionSize(); + return ((uintptr_t)address - (uintptr_t)getHeapBase()) / regionSize; + } + + MMINLINE uintptr_t getAllocationContextCount(uintptr_t size) + { + const uintptr_t regionSize = _heap->getHeapRegionManager()->getRegionSize(); + return size / regionSize; + } + + MMINLINE void* getAllocationContextForAddress(const void *address, uintptr_t index) + { + uintptr_t offset = getAllocationContextIndexForAddress(address); + Assert_MM_true((offset + index) < _allocationContextArraySize); + return _allocationContextArray[offset + index]; + } + + MMINLINE void setAllocationContextForAddress(const void *address, void *allocationContext, uintptr_t index) + { + uintptr_t offset = getAllocationContextIndexForAddress(address); + Assert_MM_true((offset + index) < _allocationContextArraySize); + _allocationContextArray[offset + index] = allocationContext; + } + + MMINLINE void resetAllocationContextForAddress(const void *address, uintptr_t size) + { + uintptr_t offset = getAllocationContextIndexForAddress(address); + uintptr_t count = getAllocationContextCount(size); + Assert_MM_true((offset + count) <= _allocationContextArraySize); + for (uintptr_t index = 0; index < count; index++) { + _allocationContextArray[offset + index] = 0; + } + } }; #endif /* SPARSEVIRTUALMEMORY_HPP_ */