| Index: src/heap/heap.cc
|
| diff --git a/src/heap/heap.cc b/src/heap/heap.cc
|
| index 913ac13d43832494e2234c123e6e421e2074cc6e..eae9232bfb6baf7a4df2c6dc6fbf79742a667ce2 100644
|
| --- a/src/heap/heap.cc
|
| +++ b/src/heap/heap.cc
|
| @@ -1575,10 +1575,6 @@ void Heap::Scavenge() {
|
| Address new_space_front = new_space_.ToSpaceStart();
|
| promotion_queue_.Initialize();
|
|
|
| -#ifdef DEBUG
|
| - store_buffer()->Clean();
|
| -#endif
|
| -
|
| ScavengeVisitor scavenge_visitor(this);
|
| // Copy roots.
|
| IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
|
| @@ -4961,143 +4957,6 @@ void Heap::IterateAndMarkPointersToFromSpace(Address start, Address end,
|
| }
|
|
|
|
|
| -#ifdef DEBUG
|
| -typedef bool (*CheckStoreBufferFilter)(Object** addr);
|
| -
|
| -
|
| -bool IsAMapPointerAddress(Object** addr) {
|
| - uintptr_t a = reinterpret_cast<uintptr_t>(addr);
|
| - int mod = a % Map::kSize;
|
| - return mod >= Map::kPointerFieldsBeginOffset &&
|
| - mod < Map::kPointerFieldsEndOffset;
|
| -}
|
| -
|
| -
|
| -bool EverythingsAPointer(Object** addr) { return true; }
|
| -
|
| -
|
| -static void CheckStoreBuffer(Heap* heap, Object** current, Object** limit,
|
| - Object**** store_buffer_position,
|
| - Object*** store_buffer_top,
|
| - CheckStoreBufferFilter filter,
|
| - Address special_garbage_start,
|
| - Address special_garbage_end) {
|
| - Map* free_space_map = heap->free_space_map();
|
| - for (; current < limit; current++) {
|
| - Object* o = *current;
|
| - Address current_address = reinterpret_cast<Address>(current);
|
| - // Skip free space.
|
| - if (o == free_space_map) {
|
| - Address current_address = reinterpret_cast<Address>(current);
|
| - FreeSpace* free_space =
|
| - FreeSpace::cast(HeapObject::FromAddress(current_address));
|
| - int skip = free_space->Size();
|
| - DCHECK(current_address + skip <= reinterpret_cast<Address>(limit));
|
| - DCHECK(skip > 0);
|
| - current_address += skip - kPointerSize;
|
| - current = reinterpret_cast<Object**>(current_address);
|
| - continue;
|
| - }
|
| - // Skip the current linear allocation space between top and limit which is
|
| - // unmarked with the free space map, but can contain junk.
|
| - if (current_address == special_garbage_start &&
|
| - special_garbage_end != special_garbage_start) {
|
| - current_address = special_garbage_end - kPointerSize;
|
| - current = reinterpret_cast<Object**>(current_address);
|
| - continue;
|
| - }
|
| - if (!(*filter)(current)) continue;
|
| - DCHECK(current_address < special_garbage_start ||
|
| - current_address >= special_garbage_end);
|
| - DCHECK(reinterpret_cast<uintptr_t>(o) != kFreeListZapValue);
|
| - // We have to check that the pointer does not point into new space
|
| - // without trying to cast it to a heap object since the hash field of
|
| - // a string can contain values like 1 and 3 which are tagged null
|
| - // pointers.
|
| - if (!heap->InNewSpace(o)) continue;
|
| - while (**store_buffer_position < current &&
|
| - *store_buffer_position < store_buffer_top) {
|
| - (*store_buffer_position)++;
|
| - }
|
| - if (**store_buffer_position != current ||
|
| - *store_buffer_position == store_buffer_top) {
|
| - Object** obj_start = current;
|
| - while (!(*obj_start)->IsMap()) obj_start--;
|
| - UNREACHABLE();
|
| - }
|
| - }
|
| -}
|
| -
|
| -
|
| -// Check that the store buffer contains all intergenerational pointers by
|
| -// scanning a page and ensuring that all pointers to young space are in the
|
| -// store buffer.
|
| -void Heap::OldPointerSpaceCheckStoreBuffer() {
|
| - OldSpace* space = old_pointer_space();
|
| - PageIterator pages(space);
|
| -
|
| - store_buffer()->SortUniq();
|
| -
|
| - while (pages.has_next()) {
|
| - Page* page = pages.next();
|
| - Object** current = reinterpret_cast<Object**>(page->area_start());
|
| -
|
| - Address end = page->area_end();
|
| -
|
| - Object*** store_buffer_position = store_buffer()->Start();
|
| - Object*** store_buffer_top = store_buffer()->Top();
|
| -
|
| - Object** limit = reinterpret_cast<Object**>(end);
|
| - CheckStoreBuffer(this, current, limit, &store_buffer_position,
|
| - store_buffer_top, &EverythingsAPointer, space->top(),
|
| - space->limit());
|
| - }
|
| -}
|
| -
|
| -
|
| -void Heap::MapSpaceCheckStoreBuffer() {
|
| - MapSpace* space = map_space();
|
| - PageIterator pages(space);
|
| -
|
| - store_buffer()->SortUniq();
|
| -
|
| - while (pages.has_next()) {
|
| - Page* page = pages.next();
|
| - Object** current = reinterpret_cast<Object**>(page->area_start());
|
| -
|
| - Address end = page->area_end();
|
| -
|
| - Object*** store_buffer_position = store_buffer()->Start();
|
| - Object*** store_buffer_top = store_buffer()->Top();
|
| -
|
| - Object** limit = reinterpret_cast<Object**>(end);
|
| - CheckStoreBuffer(this, current, limit, &store_buffer_position,
|
| - store_buffer_top, &IsAMapPointerAddress, space->top(),
|
| - space->limit());
|
| - }
|
| -}
|
| -
|
| -
|
| -void Heap::LargeObjectSpaceCheckStoreBuffer() {
|
| - LargeObjectIterator it(lo_space());
|
| - for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
|
| - // We only have code, sequential strings, or fixed arrays in large
|
| - // object space, and only fixed arrays can possibly contain pointers to
|
| - // the young generation.
|
| - if (object->IsFixedArray()) {
|
| - Object*** store_buffer_position = store_buffer()->Start();
|
| - Object*** store_buffer_top = store_buffer()->Top();
|
| - Object** current = reinterpret_cast<Object**>(object->address());
|
| - Object** limit =
|
| - reinterpret_cast<Object**>(object->address() + object->Size());
|
| - CheckStoreBuffer(this, current, limit, &store_buffer_position,
|
| - store_buffer_top, &EverythingsAPointer, NULL, NULL);
|
| - }
|
| - }
|
| -}
|
| -#endif
|
| -
|
| -
|
| void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
|
| IterateStrongRoots(v, mode);
|
| IterateWeakRoots(v, mode);
|
|
|