Index: src/heap/heap.cc |
diff --git a/src/heap/heap.cc b/src/heap/heap.cc |
index bc46cc2cff46d51d2ebaed81bce29d7476d25560..6425224fc0558b5d0eda69729ad717aeef973a5f 100644 |
--- a/src/heap/heap.cc |
+++ b/src/heap/heap.cc |
@@ -1623,6 +1623,8 @@ void Heap::Scavenge() { |
SelectScavengingVisitorsTable(); |
+ PrepareArrayBufferDiscoveryInNewSpace(); |
+ |
// Flip the semispaces. After flipping, to space is empty, from space has |
// live objects. |
new_space_.Flip(); |
@@ -1704,6 +1706,8 @@ void Heap::Scavenge() { |
new_space_.LowerInlineAllocationLimit( |
new_space_.inline_allocation_limit_step()); |
+ FreeDeadArrayBuffers(true); |
+ |
// Update how much has survived scavenge. |
IncrementYoungSurvivorsCounter(static_cast<int>( |
(PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); |
@@ -1797,46 +1801,122 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { |
} |
-void Heap::RegisterNewArrayBuffer(void* data, size_t length) { |
+void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers, |
+ void* data, size_t length) { |
+ live_buffers[data] = length; |
+} |
+ |
+ |
+void Heap::UnregisterArrayBufferHelper( |
+ std::map<void*, size_t>& live_buffers, |
+ std::map<void*, size_t>& not_yet_discovered_buffers, void* data) { |
+ DCHECK(live_buffers.count(data) > 0); |
+ live_buffers.erase(data); |
+ not_yet_discovered_buffers.erase(data); |
+} |
+ |
+ |
+void Heap::RegisterLiveArrayBufferHelper( |
+ std::map<void*, size_t>& not_yet_discovered_buffers, void* data) { |
+ not_yet_discovered_buffers.erase(data); |
+} |
+ |
+ |
+size_t Heap::FreeDeadArrayBuffersHelper( |
+ Isolate* isolate, std::map<void*, size_t>& live_buffers, |
+ std::map<void*, size_t>& not_yet_discovered_buffers) { |
+ size_t freed_memory = 0; |
+ for (auto buffer = not_yet_discovered_buffers.begin(); |
+ buffer != not_yet_discovered_buffers.end(); ++buffer) { |
+ isolate->array_buffer_allocator()->Free(buffer->first, buffer->second); |
+ freed_memory += buffer->second; |
+ live_buffers.erase(buffer->first); |
+ } |
+ not_yet_discovered_buffers = live_buffers; |
+ return freed_memory; |
+} |
+ |
+ |
+void Heap::TearDownArrayBuffersHelper( |
+ Isolate* isolate, std::map<void*, size_t>& live_buffers, |
+ std::map<void*, size_t>& not_yet_discovered_buffers) { |
+ for (auto buffer = live_buffers.begin(); buffer != live_buffers.end(); |
+ ++buffer) { |
+ isolate->array_buffer_allocator()->Free(buffer->first, buffer->second); |
+ } |
+ live_buffers.clear(); |
+ not_yet_discovered_buffers.clear(); |
+} |
+ |
+ |
+void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data, |
+ size_t length) { |
if (!data) return; |
- live_array_buffers_[data] = length; |
+ RegisterNewArrayBufferHelper( |
+ in_new_space ? live_new_array_buffers_ : live_array_buffers_, data, |
+ length); |
reinterpret_cast<v8::Isolate*>(isolate_) |
->AdjustAmountOfExternalAllocatedMemory(length); |
} |
-void Heap::UnregisterArrayBuffer(void* data) { |
+void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) { |
if (!data) return; |
- DCHECK(live_array_buffers_.count(data) > 0); |
- live_array_buffers_.erase(data); |
- not_yet_discovered_array_buffers_.erase(data); |
+ UnregisterArrayBufferHelper( |
+ in_new_space ? live_new_array_buffers_ : live_array_buffers_, |
+ in_new_space ? not_yet_discovered_new_array_buffers_ |
+ : not_yet_discovered_array_buffers_, |
+ data); |
} |
-void Heap::RegisterLiveArrayBuffer(void* data) { |
- not_yet_discovered_array_buffers_.erase(data); |
+void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) { |
+ // ArrayBuffer might be in the middle of being constructed. |
+ if (data == undefined_value()) return; |
+ RegisterLiveArrayBufferHelper(in_new_space |
+ ? not_yet_discovered_new_array_buffers_ |
+ : not_yet_discovered_array_buffers_, |
+ data); |
} |
-void Heap::FreeDeadArrayBuffers() { |
- for (auto buffer = not_yet_discovered_array_buffers_.begin(); |
- buffer != not_yet_discovered_array_buffers_.end(); ++buffer) { |
- isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); |
- // Don't use the API method here since this could trigger another GC. |
- amount_of_external_allocated_memory_ -= buffer->second; |
- live_array_buffers_.erase(buffer->first); |
+void Heap::FreeDeadArrayBuffers(bool in_new_space) { |
+ size_t freed_memory = FreeDeadArrayBuffersHelper( |
+ isolate_, in_new_space ? live_new_array_buffers_ : live_array_buffers_, |
+ in_new_space ? not_yet_discovered_new_array_buffers_ |
+ : not_yet_discovered_array_buffers_); |
+ if (freed_memory) { |
+ reinterpret_cast<v8::Isolate*>(isolate_) |
+ ->AdjustAmountOfExternalAllocatedMemory( |
+ -static_cast<int64_t>(freed_memory)); |
} |
- not_yet_discovered_array_buffers_ = live_array_buffers_; |
} |
void Heap::TearDownArrayBuffers() { |
- for (auto buffer = live_array_buffers_.begin(); |
- buffer != live_array_buffers_.end(); ++buffer) { |
- isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); |
- } |
- live_array_buffers_.clear(); |
- not_yet_discovered_array_buffers_.clear(); |
+ TearDownArrayBuffersHelper(isolate_, live_array_buffers_, |
+ not_yet_discovered_array_buffers_); |
+ TearDownArrayBuffersHelper(isolate_, live_new_array_buffers_, |
+ not_yet_discovered_new_array_buffers_); |
+} |
+ |
+ |
+void Heap::PrepareArrayBufferDiscoveryInNewSpace() { |
+ not_yet_discovered_new_array_buffers_ = live_new_array_buffers_; |
+} |
+ |
+ |
+void Heap::PromoteArrayBuffer(Object* obj) { |
+ JSArrayBuffer* buffer = JSArrayBuffer::cast(obj); |
+ if (buffer->is_external()) return; |
+ void* data = buffer->backing_store(); |
+ if (!data) return; |
+ // ArrayBuffer might be in the middle of being constructed. |
+ if (data == undefined_value()) return; |
+ DCHECK(live_new_array_buffers_.count(data) > 0); |
+ live_array_buffers_[data] = live_new_array_buffers_[data]; |
+ live_new_array_buffers_.erase(data); |
+ not_yet_discovered_new_array_buffers_.erase(data); |
} |
@@ -2089,6 +2169,7 @@ class ScavengingVisitor : public StaticVisitorBase { |
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); |
table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); |
table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); |
+ table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer); |
table_.Register( |
kVisitNativeContext, |
@@ -2118,9 +2199,6 @@ class ScavengingVisitor : public StaticVisitorBase { |
table_.Register(kVisitJSWeakCollection, |
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
- table_.Register(kVisitJSArrayBuffer, |
- &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
- |
table_.Register(kVisitJSTypedArray, |
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
@@ -2348,6 +2426,18 @@ class ScavengingVisitor : public StaticVisitorBase { |
} |
+ static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot, |
+ HeapObject* object) { |
+ ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object); |
+ |
+ Heap* heap = map->GetHeap(); |
+ MapWord map_word = object->map_word(); |
+ DCHECK(map_word.IsForwardingAddress()); |
+ HeapObject* target = map_word.ToForwardingAddress(); |
+ if (!heap->InNewSpace(target)) heap->PromoteArrayBuffer(target); |
+ } |
+ |
+ |
static inline void EvacuateByteArray(Map* map, HeapObject** slot, |
HeapObject* object) { |
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); |