Index: src/mark-compact.cc |
diff --git a/src/mark-compact.cc b/src/mark-compact.cc |
index 29d86161819a5ec8aad048c52ebcd7fc3a5fcf4a..27a867b5716682814c1ebe0f3b21f76a7456f953 100644 |
--- a/src/mark-compact.cc |
+++ b/src/mark-compact.cc |
@@ -72,7 +72,8 @@ MarkCompactCollector::MarkCompactCollector() : // NOLINT |
migration_slots_buffer_(NULL), |
heap_(NULL), |
code_flusher_(NULL), |
- encountered_weak_maps_(NULL) { } |
+ encountered_weak_maps_(Smi::FromInt(0)), |
+ encountered_array_buffers_(Smi::FromInt(0)) { } |
#ifdef VERIFY_HEAP |
@@ -393,6 +394,7 @@ void MarkCompactCollector::CollectGarbage() { |
// update the state as they proceed. |
ASSERT(state_ == PREPARE_GC); |
ASSERT(encountered_weak_maps_ == Smi::FromInt(0)); |
+ ASSERT(encountered_array_buffers_ == Smi::FromInt(0)); |
MarkLiveObjects(); |
ASSERT(heap_->incremental_marking()->IsStopped()); |
@@ -2285,6 +2287,9 @@ void MarkCompactCollector::AfterMarking() { |
MarkCompactWeakObjectRetainer mark_compact_object_retainer; |
heap()->ProcessWeakReferences(&mark_compact_object_retainer); |
+ // Update references from array buffers to typed arrays. |
+ ProcessArrayBuffers(); |
+ |
// Remove object groups after marking phase. |
heap()->isolate()->global_handles()->RemoveObjectGroups(); |
heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); |
@@ -2571,6 +2576,44 @@ void MarkCompactCollector::ClearWeakMaps() { |
} |
+void MarkCompactCollector::ProcessArrayBuffers() { |
+ Object* array_buffer_obj = encountered_array_buffers(); |
+ set_encountered_array_buffers(Smi::FromInt(0)); |
+ while (array_buffer_obj != Smi::FromInt(0)) { |
+ ASSERT(IsMarked(HeapObject::cast(array_buffer_obj))); |
+ JSArrayBuffer* array_buffer = |
+ reinterpret_cast<JSArrayBuffer*>(array_buffer_obj); |
+ Object** slot = HeapObject::RawField(array_buffer, JSArrayBuffer::kFirstArrayOffset); |
+ while (*slot != Smi::FromInt(0)) { |
+ Object* next_obj = *slot; |
+ while (next_obj != Smi::FromInt(0) && !IsMarked(next_obj)) { |
+ next_obj = JSTypedArray::cast(next_obj)->next(); |
+ } |
+ if (next_obj == Smi::FromInt(0)) { |
+ *slot = Smi::FromInt(0); |
+ break; |
+ } |
+ JSTypedArray* next_typed_array = JSTypedArray::cast(next_obj); |
+ *slot = next_obj; |
+ RecordSlot(slot, slot, next_typed_array); |
+ slot = HeapObject::RawField(next_typed_array, JSTypedArray::kNextOffset); |
+ } |
+ |
+ array_buffer_obj = array_buffer->next(); |
+ array_buffer->set_next(Smi::FromInt(0)); |
+ } |
+} |
+ |
+ |
+void MarkCompactCollector::ClearArrayBuffersOnAbort() { |
+ Object* array_buffer_obj = encountered_array_buffers(); |
+ set_encountered_array_buffers(Smi::FromInt(0)); |
+ while (array_buffer_obj != Smi::FromInt(0)) { |
+ JSArrayBuffer* array_buffer = JSArrayBuffer::cast(array_buffer_obj); |
+ array_buffer_obj = array_buffer->next(); |
+ array_buffer->set_next(Smi::FromInt(0)); |
+ } |
+} |
// We scavange new space simultaneously with sweeping. This is done in two |
// passes. |
// |