Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(913)

Unified Diff: src/spaces.cc

Issue 11085070: Enable --verify-heap in release mode (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: After rebase plus one new issue fix Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« src/spaces.h ('K') | « src/spaces.h ('k') | test/cctest/test-debug.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/spaces.cc
diff --git a/src/spaces.cc b/src/spaces.cc
index bc1d7b09ccaa97751c50e1083c3067340a1a93ab..d837503be68e049ff4cce1c99d8bad2cb029974a 100644
--- a/src/spaces.cc
+++ b/src/spaces.cc
@@ -497,6 +497,14 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
VirtualMemory reservation;
Address area_start = NULL;
Address area_end = NULL;
+
+ // Zapping should only occur in debug mode or in release with verify_heap on
+#ifdef DEBUG
+ bool zap_blocks = true;
+#else
+ bool zap_blocks = FLAG_verify_heap;
+#endif
+
if (executable == EXECUTABLE) {
chunk_size = RoundUp(CodePageAreaStartOffset() + body_size,
OS::CommitPageSize()) + CodePageGuardSize();
@@ -529,10 +537,11 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
size_executable_ += reservation.size();
}
-#ifdef DEBUG
- ZapBlock(base, CodePageGuardStartOffset());
- ZapBlock(base + CodePageAreaStartOffset(), body_size);
-#endif
+ if (zap_blocks) {
+ ZapBlock(base, CodePageGuardStartOffset());
+ ZapBlock(base + CodePageAreaStartOffset(), body_size);
+ }
+
area_start = base + CodePageAreaStartOffset();
area_end = area_start + body_size;
} else {
@@ -544,9 +553,9 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
if (base == NULL) return NULL;
-#ifdef DEBUG
- ZapBlock(base, chunk_size);
-#endif
+ if (zap_blocks) {
+ ZapBlock(base, chunk_size);
+ }
area_start = base + Page::kObjectStartOffset;
area_end = base + chunk_size;
@@ -622,9 +631,16 @@ bool MemoryAllocator::CommitBlock(Address start,
size_t size,
Executability executable) {
if (!VirtualMemory::CommitRegion(start, size, executable)) return false;
-#ifdef DEBUG
- ZapBlock(start, size);
+
+ // In release mode, only zap the block if verify heap is on.
+#ifndef DEBUG
+ if (FLAG_verify_heap) {
+#endif
+ ZapBlock(start, size);
+#ifndef DEBUG
+ }
#endif
+
isolate_->counters()->memory_allocated()->Increment(static_cast<int>(size));
return true;
}
@@ -986,7 +1002,6 @@ void PagedSpace::Print() { }
#endif
-#ifdef DEBUG
void PagedSpace::Verify(ObjectVisitor* visitor) {
// We can only iterate over the pages if they were swept precisely.
if (was_swept_conservatively_) return;
@@ -996,29 +1011,31 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
PageIterator page_iterator(this);
while (page_iterator.has_next()) {
Page* page = page_iterator.next();
- ASSERT(page->owner() == this);
+ CHECK(page->owner() == this);
if (page == Page::FromAllocationTop(allocation_info_.top)) {
allocation_pointer_found_in_space = true;
}
- ASSERT(page->WasSweptPrecisely());
+ CHECK(page->WasSweptPrecisely());
HeapObjectIterator it(page, NULL);
Address end_of_previous_object = page->area_start();
Address top = page->area_end();
int black_size = 0;
for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
- ASSERT(end_of_previous_object <= object->address());
+ CHECK(end_of_previous_object <= object->address());
// The first word should be a map, and we expect all map pointers to
// be in map space.
Map* map = object->map();
- ASSERT(map->IsMap());
- ASSERT(heap()->map_space()->Contains(map));
+ CHECK(map->IsMap());
+ CHECK(heap()->map_space()->Contains(map));
// Perform space-specific object verification.
VerifyObject(object);
+#ifdef DEBUG
// The object itself should look OK.
object->Verify();
+#endif
// All the interior pointers should be contained in the heap.
int size = object->Size();
@@ -1027,15 +1044,13 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
black_size += size;
}
- ASSERT(object->address() + size <= top);
+ CHECK(object->address() + size <= top);
end_of_previous_object = object->address() + size;
}
- ASSERT_LE(black_size, page->LiveBytes());
+ CHECK_LE(black_size, page->LiveBytes());
}
- ASSERT(allocation_pointer_found_in_space);
+ CHECK(allocation_pointer_found_in_space);
}
-#endif
-
// -----------------------------------------------------------------------------
// NewSpace implementation
@@ -1259,7 +1274,7 @@ MaybeObject* NewSpace::SlowAllocateRaw(int size_in_bytes) {
}
-#ifdef DEBUG
+
// We do not use the SemiSpaceIterator because verification doesn't assume
// that it works (it depends on the invariants we are checking).
void NewSpace::Verify() {
@@ -1289,8 +1304,10 @@ void NewSpace::Verify() {
CHECK(!object->IsMap());
CHECK(!object->IsCode());
+#ifdef DEBUG
// The object itself should look OK.
object->Verify();
+#endif
// All the interior pointers should be contained in the heap.
VerifyPointersVisitor visitor;
@@ -1308,12 +1325,12 @@ void NewSpace::Verify() {
}
// Check semi-spaces.
- ASSERT_EQ(from_space_.id(), kFromSpace);
- ASSERT_EQ(to_space_.id(), kToSpace);
+ CHECK_EQ(from_space_.id(), kFromSpace);
+ CHECK_EQ(to_space_.id(), kToSpace);
from_space_.Verify();
to_space_.Verify();
}
-#endif
+
// -----------------------------------------------------------------------------
// SemiSpace implementation
@@ -1525,7 +1542,7 @@ void SemiSpace::set_age_mark(Address mark) {
#ifdef DEBUG
void SemiSpace::Print() { }
-
+#endif
void SemiSpace::Verify() {
bool is_from_space = (id_ == kFromSpace);
@@ -1557,7 +1574,7 @@ void SemiSpace::Verify() {
}
}
-
+#ifdef DEBUG
void SemiSpace::AssertValidRange(Address start, Address end) {
// Addresses belong to same semi-space
NewSpacePage* page = NewSpacePage::FromLimit(start);
@@ -2552,24 +2569,20 @@ void FixedSpace::PrepareForMarkCompact() {
// -----------------------------------------------------------------------------
// MapSpace implementation
-#ifdef DEBUG
void MapSpace::VerifyObject(HeapObject* object) {
// The object should be a map or a free-list node.
- ASSERT(object->IsMap() || object->IsFreeSpace());
+ CHECK(object->IsMap() || object->IsFreeSpace());
}
-#endif
// -----------------------------------------------------------------------------
// GlobalPropertyCellSpace implementation
-#ifdef DEBUG
void CellSpace::VerifyObject(HeapObject* object) {
// The object should be a global object property cell or a free-list node.
- ASSERT(object->IsJSGlobalPropertyCell() ||
+ CHECK(object->IsJSGlobalPropertyCell() ||
object->map() == heap()->two_pointer_filler_map());
}
-#endif
// -----------------------------------------------------------------------------
@@ -2679,11 +2692,16 @@ MaybeObject* LargeObjectSpace::AllocateRaw(int object_size,
HeapObject* object = page->GetObject();
-#ifdef DEBUG
- // Make the object consistent so the heap can be vefified in OldSpaceStep.
- reinterpret_cast<Object**>(object->address())[0] =
- heap()->fixed_array_map();
- reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0);
+#ifndef DEBUG
+ if (FLAG_verify_heap) {
+#endif
+ // Make the object consistent so the heap can be vefified in OldSpaceStep.
+ // We only need to do this in debug builds or if verify_heap is on.
+ reinterpret_cast<Object**>(object->address())[0] =
+ heap()->fixed_array_map();
+ reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0);
+#ifndef DEBUG
+ }
#endif
heap()->incremental_marking()->OldSpaceStep(object_size);
@@ -2783,7 +2801,6 @@ bool LargeObjectSpace::Contains(HeapObject* object) {
}
-#ifdef DEBUG
// We do not assume that the large object iterator works, because it depends
// on the invariants we are checking during verification.
void LargeObjectSpace::Verify() {
@@ -2794,23 +2811,25 @@ void LargeObjectSpace::Verify() {
// object area start.
HeapObject* object = chunk->GetObject();
Page* page = Page::FromAddress(object->address());
- ASSERT(object->address() == page->area_start());
+ CHECK(object->address() == page->area_start());
// The first word should be a map, and we expect all map pointers to be
// in map space.
Map* map = object->map();
- ASSERT(map->IsMap());
- ASSERT(heap()->map_space()->Contains(map));
+ CHECK(map->IsMap());
+ CHECK(heap()->map_space()->Contains(map));
// We have only code, sequential strings, external strings
// (sequential strings that have been morphed into external
// strings), fixed arrays, and byte arrays in large object space.
- ASSERT(object->IsCode() || object->IsSeqString() ||
+ CHECK(object->IsCode() || object->IsSeqString() ||
object->IsExternalString() || object->IsFixedArray() ||
object->IsFixedDoubleArray() || object->IsByteArray());
+#ifdef DEBUG
// The object itself should look OK.
object->Verify();
+#endif
// Byte arrays and strings don't have interior pointers.
if (object->IsCode()) {
@@ -2824,15 +2843,15 @@ void LargeObjectSpace::Verify() {
Object* element = array->get(j);
if (element->IsHeapObject()) {
HeapObject* element_object = HeapObject::cast(element);
- ASSERT(heap()->Contains(element_object));
- ASSERT(element_object->map()->IsMap());
+ CHECK(heap()->Contains(element_object));
+ CHECK(element_object->map()->IsMap());
}
}
}
}
}
-
+#ifdef DEBUG
void LargeObjectSpace::Print() {
LargeObjectIterator it(this);
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
« src/spaces.h ('K') | « src/spaces.h ('k') | test/cctest/test-debug.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698