Index: src/heap/heap.cc |
diff --git a/src/heap/heap.cc b/src/heap/heap.cc |
index 674b03b6dafa37e8da15b8ea60691407b34b2a2d..0bd763bc763b7d22826cbd0646ac4dd69860640b 100644 |
--- a/src/heap/heap.cc |
+++ b/src/heap/heap.cc |
@@ -3082,6 +3082,24 @@ void Heap::CreateFillerObjectAt(Address addr, int size) { |
} |
+bool Heap::CanMoveObjectStart(HeapObject* object) { |
+ if (!FLAG_move_object_start) return false; |
+ |
+ Address address = object->address(); |
+ |
+ if (lo_space()->Contains(object)) return false; |
+ |
+ Page* page = Page::FromAddress(address); |
+ // We can move the object start if: |
+ // (1) the object is not in old space, |
+ // (2) the page of the object was already swept, |
+ // (3) the page was already concurrently swept. This case is an optimization |
+ // for concurrent sweeping. The WasSwept predicate for concurrently swept |
+ // pages is set after sweeping all pages. |
+ return !InOldSpace(address) || page->WasSwept() || page->SweepingCompleted(); |
+} |
+ |
+ |
void Heap::AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode) { |
if (incremental_marking()->IsMarking() && |
Marking::IsBlack(Marking::MarkBitFrom(object->address()))) { |
@@ -3094,6 +3112,55 @@ void Heap::AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode) { |
} |
+FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, |
+ int elements_to_trim) { |
+ DCHECK(!object->IsFixedTypedArrayBase()); |
+ const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize; |
+ const int bytes_to_trim = elements_to_trim * element_size; |
+ Map* map = object->map(); |
+ |
+ // For now this trick is only applied to objects in new and paged space. |
+ // In large object space the object's start must coincide with chunk |
+ // and thus the trick is just not applicable. |
+ DCHECK(!lo_space()->Contains(object)); |
+ DCHECK(object->map() != fixed_cow_array_map()); |
+ |
+ STATIC_ASSERT(FixedArrayBase::kMapOffset == 0); |
+ STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize); |
+ STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize); |
+ |
+ const int len = object->length(); |
+ DCHECK(elements_to_trim <= len); |
+ |
+ // Calculate location of new array start. |
+ Address new_start = object->address() + bytes_to_trim; |
+ |
+ // Technically in new space this write might be omitted (except for |
+ // debug mode which iterates through the heap), but to play safer |
+ // we still do it. |
+ CreateFillerObjectAt(object->address(), bytes_to_trim); |
+ |
+ // Initialize header of the trimmed array. Since left trimming is only |
+ // performed on pages which are not concurrently swept creating a filler |
+ // object does not require synchronization. |
+ DCHECK(CanMoveObjectStart(object)); |
+ Object** former_start = HeapObject::RawField(object, 0); |
+ int new_start_index = elements_to_trim * (element_size / kPointerSize); |
+ former_start[new_start_index] = map; |
+ former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim); |
+ FixedArrayBase* new_object = |
+ FixedArrayBase::cast(HeapObject::FromAddress(new_start)); |
+ |
+ // Maintain consistency of live bytes during incremental marking |
+ Marking::TransferMark(this, object->address(), new_start); |
+ AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER); |
+ |
+ // Notify the heap profiler of change in object layout. |
+ OnMoveEvent(new_object, object, new_object->Size()); |
+ return new_object; |
+} |
+ |
+ |
// Force instantiation of templatized method. |
template void Heap::RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( |
FixedArrayBase*, int); |