Index: src/heap/spaces.cc |
diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc |
index 0f50cdd0d35028a47fdb59d75a3a4c603267eeda..df64e2beb76a065a17bac9a13e9799de6c019977 100644 |
--- a/src/heap/spaces.cc |
+++ b/src/heap/spaces.cc |
@@ -1183,7 +1183,8 @@ bool PagedSpace::Expand() { |
// When incremental marking was activated, old generation pages are allocated |
// black. |
- if (heap()->incremental_marking()->black_allocation()) { |
+ if (heap()->incremental_marking()->black_allocation() && |
+ identity() != CODE_SPACE && identity() != MAP_SPACE) { |
ulan
2016/04/06 16:44:43
Let's whitelist instead of blacklist, identity() =
Hannes Payer (out of office)
2016/04/06 21:24:39
Done.
|
Bitmap::SetAllBits(p); |
p->SetFlag(Page::BLACK_PAGE); |
if (FLAG_trace_incremental_marking) { |
@@ -2901,11 +2902,6 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size, |
} |
HeapObject* object = page->GetObject(); |
- if (heap()->incremental_marking()->black_allocation()) { |
- MarkBit mark_bit = Marking::MarkBitFrom(object); |
- Marking::MarkBlack(mark_bit); |
- page->SetFlag(Page::BLACK_PAGE); |
- } |
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), object_size); |
if (Heap::ShouldZapGarbage()) { |