Index: src/heap/heap.cc |
diff --git a/src/heap/heap.cc b/src/heap/heap.cc |
index 672b79dfc263e04d6dbdaac43a0c6136e6e68b6c..bc46cc2cff46d51d2ebaed81bce29d7476d25560 100644 |
--- a/src/heap/heap.cc |
+++ b/src/heap/heap.cc |
@@ -1623,8 +1623,6 @@ |
SelectScavengingVisitorsTable(); |
- PrepareArrayBufferDiscoveryInNewSpace(); |
- |
// Flip the semispaces. After flipping, to space is empty, from space has |
// live objects. |
new_space_.Flip(); |
@@ -1706,8 +1704,6 @@ |
new_space_.LowerInlineAllocationLimit( |
new_space_.inline_allocation_limit_step()); |
- FreeDeadArrayBuffers(true); |
- |
// Update how much has survived scavenge. |
IncrementYoungSurvivorsCounter(static_cast<int>( |
(PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); |
@@ -1801,118 +1797,46 @@ |
} |
-void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers, |
- void* data, size_t length) { |
- live_buffers[data] = length; |
-} |
- |
- |
-void Heap::UnregisterArrayBufferHelper( |
- std::map<void*, size_t>& live_buffers, |
- std::map<void*, size_t>& not_yet_discovered_buffers, void* data) { |
- DCHECK(live_buffers.count(data) > 0); |
- live_buffers.erase(data); |
- not_yet_discovered_buffers.erase(data); |
-} |
- |
- |
-void Heap::RegisterLiveArrayBufferHelper( |
- std::map<void*, size_t>& not_yet_discovered_buffers, void* data) { |
- not_yet_discovered_buffers.erase(data); |
-} |
- |
- |
-size_t Heap::FreeDeadArrayBuffersHelper( |
- Isolate* isolate, std::map<void*, size_t>& live_buffers, |
- std::map<void*, size_t>& not_yet_discovered_buffers) { |
- size_t freed_memory = 0; |
- for (auto buffer = not_yet_discovered_buffers.begin(); |
- buffer != not_yet_discovered_buffers.end(); ++buffer) { |
- isolate->array_buffer_allocator()->Free(buffer->first, buffer->second); |
- freed_memory += buffer->second; |
- live_buffers.erase(buffer->first); |
- } |
- not_yet_discovered_buffers = live_buffers; |
- return freed_memory; |
-} |
- |
- |
-void Heap::TearDownArrayBuffersHelper( |
- Isolate* isolate, std::map<void*, size_t>& live_buffers, |
- std::map<void*, size_t>& not_yet_discovered_buffers) { |
- for (auto buffer = live_buffers.begin(); buffer != live_buffers.end(); |
- ++buffer) { |
- isolate->array_buffer_allocator()->Free(buffer->first, buffer->second); |
- } |
- live_buffers.clear(); |
- not_yet_discovered_buffers.clear(); |
-} |
- |
- |
-void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data, |
- size_t length) { |
+void Heap::RegisterNewArrayBuffer(void* data, size_t length) { |
if (!data) return; |
- RegisterNewArrayBufferHelper( |
- in_new_space ? live_new_array_buffers_ : live_array_buffers_, data, |
- length); |
+ live_array_buffers_[data] = length; |
reinterpret_cast<v8::Isolate*>(isolate_) |
->AdjustAmountOfExternalAllocatedMemory(length); |
} |
-void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) { |
+void Heap::UnregisterArrayBuffer(void* data) { |
if (!data) return; |
- UnregisterArrayBufferHelper( |
- in_new_space ? live_new_array_buffers_ : live_array_buffers_, |
- in_new_space ? not_yet_discovered_new_array_buffers_ |
- : not_yet_discovered_array_buffers_, |
- data); |
-} |
- |
- |
-void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) { |
- RegisterLiveArrayBufferHelper(in_new_space |
- ? not_yet_discovered_new_array_buffers_ |
- : not_yet_discovered_array_buffers_, |
- data); |
-} |
- |
- |
-void Heap::FreeDeadArrayBuffers(bool in_new_space) { |
- size_t freed_memory = FreeDeadArrayBuffersHelper( |
- isolate_, in_new_space ? live_new_array_buffers_ : live_array_buffers_, |
- in_new_space ? not_yet_discovered_new_array_buffers_ |
- : not_yet_discovered_array_buffers_); |
- if (freed_memory) { |
- reinterpret_cast<v8::Isolate*>(isolate_) |
- ->AdjustAmountOfExternalAllocatedMemory( |
- -static_cast<int64_t>(freed_memory)); |
- } |
+ DCHECK(live_array_buffers_.count(data) > 0); |
+ live_array_buffers_.erase(data); |
+ not_yet_discovered_array_buffers_.erase(data); |
+} |
+ |
+ |
+void Heap::RegisterLiveArrayBuffer(void* data) { |
+ not_yet_discovered_array_buffers_.erase(data); |
+} |
+ |
+ |
+void Heap::FreeDeadArrayBuffers() { |
+ for (auto buffer = not_yet_discovered_array_buffers_.begin(); |
+ buffer != not_yet_discovered_array_buffers_.end(); ++buffer) { |
+ isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); |
+ // Don't use the API method here since this could trigger another GC. |
+ amount_of_external_allocated_memory_ -= buffer->second; |
+ live_array_buffers_.erase(buffer->first); |
+ } |
+ not_yet_discovered_array_buffers_ = live_array_buffers_; |
} |
void Heap::TearDownArrayBuffers() { |
- TearDownArrayBuffersHelper(isolate_, live_array_buffers_, |
- not_yet_discovered_array_buffers_); |
- TearDownArrayBuffersHelper(isolate_, live_new_array_buffers_, |
- not_yet_discovered_new_array_buffers_); |
-} |
- |
- |
-void Heap::PrepareArrayBufferDiscoveryInNewSpace() { |
- not_yet_discovered_new_array_buffers_ = live_new_array_buffers_; |
-} |
- |
- |
-void Heap::PromoteArrayBuffer(Object* obj) { |
- JSArrayBuffer* buffer = JSArrayBuffer::cast(obj); |
- if (buffer->is_external()) return; |
- void* data = buffer->backing_store(); |
- if (!data) return; |
- DCHECK(live_new_array_buffers_.count(data) > 0); |
- live_array_buffers_[data] = live_new_array_buffers_[data]; |
- live_new_array_buffers_.erase(data); |
- not_yet_discovered_new_array_buffers_.erase(data); |
+ for (auto buffer = live_array_buffers_.begin(); |
+ buffer != live_array_buffers_.end(); ++buffer) { |
+ isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); |
+ } |
+ live_array_buffers_.clear(); |
+ not_yet_discovered_array_buffers_.clear(); |
} |
@@ -2165,7 +2089,6 @@ |
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); |
table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); |
table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); |
- table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer); |
table_.Register( |
kVisitNativeContext, |
@@ -2193,6 +2116,9 @@ |
SharedFunctionInfo::kSize>); |
table_.Register(kVisitJSWeakCollection, |
+ &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
+ |
+ table_.Register(kVisitJSArrayBuffer, |
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); |
table_.Register(kVisitJSTypedArray, |
@@ -2419,18 +2345,6 @@ |
HeapObject* object) { |
int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size(); |
EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size); |
- } |
- |
- |
- static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot, |
- HeapObject* object) { |
- ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object); |
- |
- Heap* heap = map->GetHeap(); |
- MapWord map_word = object->map_word(); |
- DCHECK(map_word.IsForwardingAddress()); |
- HeapObject* target = map_word.ToForwardingAddress(); |
- if (!heap->InNewSpace(target)) heap->PromoteArrayBuffer(target); |
} |