Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(234)

Side by Side Diff: src/heap/heap.cc

Issue 1133773002: Keep track of array buffers in new space separately (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: updates Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
(...skipping 1543 matching lines...) Expand 10 before | Expand all | Expand 10 after
1554 // Clear descriptor cache. 1554 // Clear descriptor cache.
1555 isolate_->descriptor_lookup_cache()->Clear(); 1555 isolate_->descriptor_lookup_cache()->Clear();
1556 1556
1557 // Used for updating survived_since_last_expansion_ at function end. 1557 // Used for updating survived_since_last_expansion_ at function end.
1558 intptr_t survived_watermark = PromotedSpaceSizeOfObjects(); 1558 intptr_t survived_watermark = PromotedSpaceSizeOfObjects();
1559 1559
1560 SelectScavengingVisitorsTable(); 1560 SelectScavengingVisitorsTable();
1561 1561
1562 incremental_marking()->PrepareForScavenge(); 1562 incremental_marking()->PrepareForScavenge();
1563 1563
1564 PrepareArrayBufferDiscoveryInNewSpace();
1565
1564 // Flip the semispaces. After flipping, to space is empty, from space has 1566 // Flip the semispaces. After flipping, to space is empty, from space has
1565 // live objects. 1567 // live objects.
1566 new_space_.Flip(); 1568 new_space_.Flip();
1567 new_space_.ResetAllocationInfo(); 1569 new_space_.ResetAllocationInfo();
1568 1570
1569 // We need to sweep newly copied objects which can be either in the 1571 // We need to sweep newly copied objects which can be either in the
1570 // to space or promoted to the old generation. For to-space 1572 // to space or promoted to the old generation. For to-space
1571 // objects, we treat the bottom of the to space as a queue. Newly 1573 // objects, we treat the bottom of the to space as a queue. Newly
1572 // copied and unswept objects lie between a 'front' mark and the 1574 // copied and unswept objects lie between a 'front' mark and the
1573 // allocation pointer. 1575 // allocation pointer.
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
1635 ProcessYoungWeakReferences(&weak_object_retainer); 1637 ProcessYoungWeakReferences(&weak_object_retainer);
1636 1638
1637 DCHECK(new_space_front == new_space_.top()); 1639 DCHECK(new_space_front == new_space_.top());
1638 1640
1639 // Set age mark. 1641 // Set age mark.
1640 new_space_.set_age_mark(new_space_.top()); 1642 new_space_.set_age_mark(new_space_.top());
1641 1643
1642 new_space_.LowerInlineAllocationLimit( 1644 new_space_.LowerInlineAllocationLimit(
1643 new_space_.inline_allocation_limit_step()); 1645 new_space_.inline_allocation_limit_step());
1644 1646
1647 FreeDeadArrayBuffers(true);
1648
1645 // Update how much has survived scavenge. 1649 // Update how much has survived scavenge.
1646 IncrementYoungSurvivorsCounter(static_cast<int>( 1650 IncrementYoungSurvivorsCounter(static_cast<int>(
1647 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); 1651 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
1648 1652
1649 LOG(isolate_, ResourceEvent("scavenge", "end")); 1653 LOG(isolate_, ResourceEvent("scavenge", "end"));
1650 1654
1651 gc_state_ = NOT_IN_GC; 1655 gc_state_ = NOT_IN_GC;
1652 } 1656 }
1653 1657
1654 1658
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
1728 } 1732 }
1729 1733
1730 1734
1731 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { 1735 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
1732 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer); 1736 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
1733 // Update the head of the list of contexts. 1737 // Update the head of the list of contexts.
1734 set_native_contexts_list(head); 1738 set_native_contexts_list(head);
1735 } 1739 }
1736 1740
1737 1741
1738 void Heap::RegisterNewArrayBuffer(void* data, size_t length) { 1742 namespace {
1743
1744 void RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
1745 void* data, size_t length) {
1746 live_buffers[data] = length;
1747 }
1748
1749
1750 void UnregisterArrayBufferHelper(
1751 std::map<void*, size_t>& live_buffers,
1752 std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
1753 DCHECK(live_buffers.count(data) > 0);
1754 live_buffers.erase(data);
1755 not_yet_discovered_buffers.erase(data);
1756 }
1757
1758
1759 void RegisterLiveArrayBufferHelper(
1760 std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
1761 not_yet_discovered_buffers.erase(data);
1762 }
1763
1764
1765 size_t FreeDeadArrayBuffersHelper(
1766 Isolate* isolate, std::map<void*, size_t>& live_buffers,
1767 std::map<void*, size_t>& not_yet_discovered_buffers) {
1768 size_t freed_memory = 0;
1769 for (auto buffer = not_yet_discovered_buffers.begin();
1770 buffer != not_yet_discovered_buffers.end(); ++buffer) {
1771 isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
1772 freed_memory += buffer->second;
1773 live_buffers.erase(buffer->first);
1774 }
1775 not_yet_discovered_buffers = live_buffers;
1776 return freed_memory;
1777 }
1778
1779
1780 void TearDownArrayBuffersHelper(
1781 Isolate* isolate, std::map<void*, size_t>& live_buffers,
1782 std::map<void*, size_t>& not_yet_discovered_buffers) {
1783 for (auto buffer = live_buffers.begin(); buffer != live_buffers.end();
1784 ++buffer) {
1785 isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
1786 }
1787 live_buffers.clear();
1788 not_yet_discovered_buffers.clear();
1789 }
1790
1791 } // namespace
1792
1793
1794 void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
1795 size_t length) {
1739 if (!data) return; 1796 if (!data) return;
1740 live_array_buffers_[data] = length; 1797 RegisterNewArrayBufferHelper(
1798 in_new_space ? live_new_array_buffers_ : live_array_buffers_, data,
1799 length);
1741 reinterpret_cast<v8::Isolate*>(isolate_) 1800 reinterpret_cast<v8::Isolate*>(isolate_)
1742 ->AdjustAmountOfExternalAllocatedMemory(length); 1801 ->AdjustAmountOfExternalAllocatedMemory(length);
1743 } 1802 }
1744 1803
1745 1804
1746 void Heap::UnregisterArrayBuffer(void* data) { 1805 void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) {
1747 if (!data) return; 1806 if (!data) return;
1748 DCHECK(live_array_buffers_.count(data) > 0); 1807 UnregisterArrayBufferHelper(
1749 live_array_buffers_.erase(data); 1808 in_new_space ? live_new_array_buffers_ : live_array_buffers_,
1750 not_yet_discovered_array_buffers_.erase(data); 1809 in_new_space ? not_yet_discovered_new_array_buffers_
1810 : not_yet_discovered_array_buffers_,
1811 data);
1751 } 1812 }
1752 1813
1753 1814
1754 void Heap::RegisterLiveArrayBuffer(void* data) { 1815 void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) {
1755 not_yet_discovered_array_buffers_.erase(data); 1816 RegisterLiveArrayBufferHelper(in_new_space
1817 ? not_yet_discovered_new_array_buffers_
1818 : not_yet_discovered_array_buffers_,
1819 data);
1756 } 1820 }
1757 1821
1758 1822
1759 void Heap::FreeDeadArrayBuffers() { 1823 void Heap::FreeDeadArrayBuffers(bool in_new_space) {
1760 for (auto buffer = not_yet_discovered_array_buffers_.begin(); 1824 size_t freed_memory = FreeDeadArrayBuffersHelper(
1761 buffer != not_yet_discovered_array_buffers_.end(); ++buffer) { 1825 isolate_, in_new_space ? live_new_array_buffers_ : live_array_buffers_,
1762 isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); 1826 in_new_space ? not_yet_discovered_new_array_buffers_
1763 // Don't use the API method here since this could trigger another GC. 1827 : not_yet_discovered_array_buffers_);
1764 amount_of_external_allocated_memory_ -= buffer->second; 1828 if (freed_memory) {
1765 live_array_buffers_.erase(buffer->first); 1829 reinterpret_cast<v8::Isolate*>(isolate_)
1830 ->AdjustAmountOfExternalAllocatedMemory(
1831 -static_cast<int64_t>(freed_memory));
1766 } 1832 }
1767 not_yet_discovered_array_buffers_ = live_array_buffers_;
1768 } 1833 }
1769 1834
1770 1835
1771 void Heap::TearDownArrayBuffers() { 1836 void Heap::TearDownArrayBuffers() {
1772 for (auto buffer = live_array_buffers_.begin(); 1837 TearDownArrayBuffersHelper(isolate_, live_array_buffers_,
1773 buffer != live_array_buffers_.end(); ++buffer) { 1838 not_yet_discovered_array_buffers_);
1774 isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); 1839 TearDownArrayBuffersHelper(isolate_, live_new_array_buffers_,
1775 } 1840 not_yet_discovered_new_array_buffers_);
1776 live_array_buffers_.clear();
1777 not_yet_discovered_array_buffers_.clear();
1778 } 1841 }
1779 1842
1780 1843
1844 void Heap::PrepareArrayBufferDiscoveryInNewSpace() {
1845 not_yet_discovered_new_array_buffers_ = live_new_array_buffers_;
1846 }
1847
1848
1849 void Heap::PromoteArrayBuffer(JSArrayBuffer* buffer) {
1850 if (buffer->is_external()) return;
1851 void* data = buffer->backing_store();
1852 if (!data) return;
1853 DCHECK(live_new_array_buffers_.count(data) > 0);
1854 live_array_buffers_[data] = live_new_array_buffers_[data];
1855 live_new_array_buffers_.erase(data);
1856 not_yet_discovered_new_array_buffers_.erase(data);
1857 }
1858
1859
1781 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { 1860 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
1782 Object* allocation_site_obj = 1861 Object* allocation_site_obj =
1783 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer); 1862 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
1784 set_allocation_sites_list(allocation_site_obj); 1863 set_allocation_sites_list(allocation_site_obj);
1785 } 1864 }
1786 1865
1787 1866
1788 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { 1867 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) {
1789 DisallowHeapAllocation no_allocation_scope; 1868 DisallowHeapAllocation no_allocation_scope;
1790 Object* cur = allocation_sites_list(); 1869 Object* cur = allocation_sites_list();
(...skipping 371 matching lines...) Expand 10 before | Expand all | Expand 10 after
2162 if (allocation.To(&target)) { 2241 if (allocation.To(&target)) {
2163 MigrateObject(heap, object, target, object_size); 2242 MigrateObject(heap, object, target, object_size);
2164 2243
2165 // Update slot to new target. 2244 // Update slot to new target.
2166 *slot = target; 2245 *slot = target;
2167 2246
2168 if (object_contents == POINTER_OBJECT) { 2247 if (object_contents == POINTER_OBJECT) {
2169 if (map->instance_type() == JS_FUNCTION_TYPE) { 2248 if (map->instance_type() == JS_FUNCTION_TYPE) {
2170 heap->promotion_queue()->insert(target, 2249 heap->promotion_queue()->insert(target,
2171 JSFunction::kNonWeakFieldsEndOffset); 2250 JSFunction::kNonWeakFieldsEndOffset);
2172 } else { 2251 } else {
Hannes Payer (out of office) 2015/05/12 06:33:28 This logic could also be moved to the promotion qu
2173 heap->promotion_queue()->insert(target, object_size); 2252 heap->promotion_queue()->insert(target, object_size);
2174 } 2253 }
2175 } 2254 }
2255 if (map->instance_type() == JS_ARRAY_BUFFER_TYPE) {
2256 heap->PromoteArrayBuffer(JSArrayBuffer::cast(target));
Hannes Payer (out of office) 2015/05/12 06:33:28 What about moving this logic into the promotion qu
2257 }
2176 heap->IncrementPromotedObjectsSize(object_size); 2258 heap->IncrementPromotedObjectsSize(object_size);
2177 return true; 2259 return true;
2178 } 2260 }
2179 return false; 2261 return false;
2180 } 2262 }
2181 2263
2182 2264
2183 template <ObjectContents object_contents, int alignment> 2265 template <ObjectContents object_contents, int alignment>
2184 static inline void EvacuateObject(Map* map, HeapObject** slot, 2266 static inline void EvacuateObject(Map* map, HeapObject** slot,
2185 HeapObject* object, int object_size) { 2267 HeapObject* object, int object_size) {
(...skipping 4310 matching lines...) Expand 10 before | Expand all | Expand 10 after
6496 } 6578 }
6497 delete list; 6579 delete list;
6498 } else { 6580 } else {
6499 prev = list; 6581 prev = list;
6500 } 6582 }
6501 list = next; 6583 list = next;
6502 } 6584 }
6503 } 6585 }
6504 } 6586 }
6505 } // namespace v8::internal 6587 } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698