Index: src/spaces.cc |
diff --git a/src/spaces.cc b/src/spaces.cc |
index 62d826316e3f17e4bda9f60cf252ce9509d1dacc..62873fa1cfe3520e2941a0ad621b6797dbf9382d 100644 |
--- a/src/spaces.cc |
+++ b/src/spaces.cc |
@@ -447,7 +447,6 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, |
chunk->InitializeReservedMemory(); |
chunk->slots_buffer_ = NULL; |
chunk->skip_list_ = NULL; |
- chunk->write_barrier_counter_ = kWriteBarrierCounterGranularity; |
chunk->ResetLiveBytes(); |
Bitmap::Clear(chunk); |
chunk->initialize_scan_on_scavenge(false); |
@@ -882,10 +881,10 @@ intptr_t PagedSpace::SizeOfFirstPage() { |
size = 192 * KB; |
break; |
case MAP_SPACE: |
- size = 16 * kPointerSize * KB; |
+ size = 128 * KB; |
break; |
case CELL_SPACE: |
- size = 16 * kPointerSize * KB; |
+ size = 96 * KB; |
break; |
case CODE_SPACE: |
if (kPointerSize == 8) { |
@@ -2259,40 +2258,11 @@ bool PagedSpace::ReserveSpace(int size_in_bytes) { |
Free(top(), old_linear_size); |
SetTop(new_area->address(), new_area->address() + size_in_bytes); |
+ Allocate(size_in_bytes); |
return true; |
} |
-static void RepairFreeList(Heap* heap, FreeListNode* n) { |
- while (n != NULL) { |
- Map** map_location = reinterpret_cast<Map**>(n->address()); |
- if (*map_location == NULL) { |
- *map_location = heap->free_space_map(); |
- } else { |
- ASSERT(*map_location == heap->free_space_map()); |
- } |
- n = n->next(); |
- } |
-} |
- |
- |
-void FreeList::RepairLists(Heap* heap) { |
- RepairFreeList(heap, small_list_); |
- RepairFreeList(heap, medium_list_); |
- RepairFreeList(heap, large_list_); |
- RepairFreeList(heap, huge_list_); |
-} |
- |
- |
-// After we have booted, we have created a map which represents free space |
-// on the heap. If there was already a free list then the elements on it |
-// were created with the wrong FreeSpaceMap (normally NULL), so we need to |
-// fix them. |
-void PagedSpace::RepairFreeListsAfterBoot() { |
- free_list_.RepairLists(heap()); |
-} |
- |
- |
// You have to call this last, since the implementation from PagedSpace |
// doesn't know that memory was 'promised' to large object space. |
bool LargeObjectSpace::ReserveSpace(int bytes) { |
@@ -2679,10 +2649,12 @@ MaybeObject* LargeObjectSpace::AllocateRaw(int object_size, |
HeapObject* object = page->GetObject(); |
- // Make the object consistent so the large object space can be traversed. |
+#ifdef DEBUG |
+ // Make the object consistent so the heap can be vefified in OldSpaceStep. |
reinterpret_cast<Object**>(object->address())[0] = |
heap()->fixed_array_map(); |
reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0); |
+#endif |
heap()->incremental_marking()->OldSpaceStep(object_size); |
return object; |