| Index: Source/platform/heap/Heap.cpp
|
| diff --git a/Source/platform/heap/Heap.cpp b/Source/platform/heap/Heap.cpp
|
| index 3f1948e6cdd9fa8424962f288b751c04f063cfbe..419237ac99134744f3c7be8c889032aebb92dccd 100644
|
| --- a/Source/platform/heap/Heap.cpp
|
| +++ b/Source/platform/heap/Heap.cpp
|
| @@ -443,7 +443,7 @@ void HeapObjectHeader::setDebugMark()
|
| m_size |= debugBitMask;
|
| }
|
|
|
| -#ifndef NDEBUG
|
| +#if ENABLE(ASSERT)
|
| NO_SANITIZE_ADDRESS
|
| void HeapObjectHeader::zapMagic()
|
| {
|
| @@ -466,7 +466,7 @@ void HeapObjectHeader::finalize(const GCInfo* gcInfo, Address object, size_t obj
|
| gcInfo->m_finalize(object);
|
| }
|
|
|
| -#if !defined(NDEBUG) || defined(LEAK_SANITIZER) || defined(ADDRESS_SANITIZER)
|
| +#if ENABLE(ASSERT) || defined(LEAK_SANITIZER) || defined(ADDRESS_SANITIZER)
|
| // In Debug builds, memory is zapped when it's freed, and the zapped memory is
|
| // zeroed out when the memory is reused. Memory is also zapped when using Leak
|
| // Sanitizer because the heap is used as a root region for LSan and therefore
|
| @@ -831,7 +831,7 @@ void ThreadHeap<Header>::allocatePage(const GCInfo* gcInfo)
|
| addToFreeList(page->payload(), HeapPage<Header>::payloadSize());
|
| }
|
|
|
| -#ifndef NDEBUG
|
| +#if ENABLE(ASSERT)
|
| template<typename Header>
|
| void ThreadHeap<Header>::getScannedStats(HeapStats& scannedStats)
|
| {
|
| @@ -1067,7 +1067,7 @@ void HeapPage<Header>::sweep()
|
|
|
| if (basicHeader->isFree()) {
|
| size_t size = basicHeader->size();
|
| -#if defined(NDEBUG) && !defined(LEAK_SANITIZER) && !defined(ADDRESS_SANITIZER)
|
| +#if !ENABLE(ASSERT) && !defined(LEAK_SANITIZER) && !defined(ADDRESS_SANITIZER)
|
| // Zero the memory in the free list header to maintain the
|
| // invariant that memory on the free list is zero filled.
|
| // The rest of the memory is already on the free list and is
|
| @@ -1090,7 +1090,7 @@ void HeapPage<Header>::sweep()
|
| ASAN_UNPOISON_MEMORY_REGION(header->payload(), header->payloadSize());
|
| finalize(header);
|
| size_t size = header->size();
|
| -#if defined(NDEBUG) && !defined(LEAK_SANITIZER) && !defined(ADDRESS_SANITIZER)
|
| +#if !ENABLE(ASSERT) && !defined(LEAK_SANITIZER) && !defined(ADDRESS_SANITIZER)
|
| // This memory will be added to the freelist. Maintain the invariant
|
| // that memory on the freelist is zero filled.
|
| memset(headerAddress, 0, size);
|
| @@ -1370,7 +1370,7 @@ void CallbackStack::shutdown(CallbackStack** first)
|
|
|
| CallbackStack::~CallbackStack()
|
| {
|
| -#ifndef NDEBUG
|
| +#if ENABLE(ASSERT)
|
| clearUnused();
|
| #endif
|
| }
|
| @@ -1390,7 +1390,7 @@ bool CallbackStack::popAndInvokeCallback(CallbackStack** first, Visitor* visitor
|
| {
|
| if (m_current == &(m_buffer[0])) {
|
| if (!m_next) {
|
| -#ifndef NDEBUG
|
| +#if ENABLE(ASSERT)
|
| clearUnused();
|
| #endif
|
| return false;
|
| @@ -1443,7 +1443,7 @@ void CallbackStack::invokeOldestCallbacks(Visitor* visitor)
|
| }
|
| }
|
|
|
| -#ifndef NDEBUG
|
| +#if ENABLE(ASSERT)
|
| bool CallbackStack::hasCallbackForObject(const void* object)
|
| {
|
| for (unsigned i = 0; m_buffer + i < m_current; i++) {
|
| @@ -1550,7 +1550,7 @@ public:
|
| Heap::registerWeakTable(const_cast<void*>(closure), iterationCallback, iterationDoneCallback);
|
| }
|
|
|
| -#ifndef NDEBUG
|
| +#if ENABLE(ASSERT)
|
| virtual bool weakTableRegistered(const void* closure)
|
| {
|
| return Heap::weakTableRegistered(closure);
|
| @@ -1733,7 +1733,7 @@ Address Heap::checkAndMarkPointer(Visitor* visitor, Address address)
|
| {
|
| ASSERT(ThreadState::isAnyThreadInGC());
|
|
|
| -#ifdef NDEBUG
|
| +#if !ENABLE(ASSERT)
|
| if (s_heapDoesNotContainCache->lookup(address))
|
| return 0;
|
| #endif
|
| @@ -1749,7 +1749,7 @@ Address Heap::checkAndMarkPointer(Visitor* visitor, Address address)
|
| }
|
| }
|
|
|
| -#ifdef NDEBUG
|
| +#if !ENABLE(ASSERT)
|
| s_heapDoesNotContainCache->addEntry(address, true);
|
| #else
|
| if (!s_heapDoesNotContainCache->lookup(address))
|
| @@ -1849,7 +1849,7 @@ void Heap::registerWeakTable(void* table, EphemeronCallback iterationCallback, E
|
| pushWeakCellPointerCallback(static_cast<void**>(table), iterationDoneCallback);
|
| }
|
|
|
| -#ifndef NDEBUG
|
| +#if ENABLE(ASSERT)
|
| bool Heap::weakTableRegistered(const void* table)
|
| {
|
| ASSERT(s_ephemeronStack);
|
|
|