Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(71)

Side by Side Diff: src/heap/heap.cc

Issue 1324023007: [heap] introduce ArrayBufferTracker (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: split out Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/heap.h" 5 #include "src/heap/heap.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
11 #include "src/base/utils/random-number-generator.h" 11 #include "src/base/utils/random-number-generator.h"
12 #include "src/bootstrapper.h" 12 #include "src/bootstrapper.h"
13 #include "src/codegen.h" 13 #include "src/codegen.h"
14 #include "src/compilation-cache.h" 14 #include "src/compilation-cache.h"
15 #include "src/conversions.h" 15 #include "src/conversions.h"
16 #include "src/cpu-profiler.h" 16 #include "src/cpu-profiler.h"
17 #include "src/debug/debug.h" 17 #include "src/debug/debug.h"
18 #include "src/deoptimizer.h" 18 #include "src/deoptimizer.h"
19 #include "src/global-handles.h" 19 #include "src/global-handles.h"
20 #include "src/heap/array-buffer-tracker.h"
20 #include "src/heap/gc-idle-time-handler.h" 21 #include "src/heap/gc-idle-time-handler.h"
21 #include "src/heap/gc-tracer.h" 22 #include "src/heap/gc-tracer.h"
22 #include "src/heap/incremental-marking.h" 23 #include "src/heap/incremental-marking.h"
23 #include "src/heap/mark-compact-inl.h" 24 #include "src/heap/mark-compact-inl.h"
24 #include "src/heap/mark-compact.h" 25 #include "src/heap/mark-compact.h"
25 #include "src/heap/memory-reducer.h" 26 #include "src/heap/memory-reducer.h"
26 #include "src/heap/object-stats.h" 27 #include "src/heap/object-stats.h"
27 #include "src/heap/objects-visiting-inl.h" 28 #include "src/heap/objects-visiting-inl.h"
28 #include "src/heap/objects-visiting.h" 29 #include "src/heap/objects-visiting.h"
29 #include "src/heap/store-buffer.h" 30 #include "src/heap/store-buffer.h"
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after
156 DCHECK((max_old_generation_size_ & (Page::kPageSize - 1)) == 0); 157 DCHECK((max_old_generation_size_ & (Page::kPageSize - 1)) == 0);
157 158
158 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); 159 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
159 set_native_contexts_list(NULL); 160 set_native_contexts_list(NULL);
160 set_allocation_sites_list(Smi::FromInt(0)); 161 set_allocation_sites_list(Smi::FromInt(0));
161 set_encountered_weak_collections(Smi::FromInt(0)); 162 set_encountered_weak_collections(Smi::FromInt(0));
162 set_encountered_weak_cells(Smi::FromInt(0)); 163 set_encountered_weak_cells(Smi::FromInt(0));
163 // Put a dummy entry in the remembered pages so we can find the list the 164 // Put a dummy entry in the remembered pages so we can find the list the
164 // minidump even if there are no real unmapped pages. 165 // minidump even if there are no real unmapped pages.
165 RememberUnmappedPage(NULL, false); 166 RememberUnmappedPage(NULL, false);
167
168 array_buffer_tracker_ = new ArrayBufferTracker(this);
Michael Lippautz 2015/09/04 08:24:15 Move this into SetUp() somewhere next to ObjectSta
fedor.indutny 2015/09/04 08:58:05 Acknowledged.
166 } 169 }
167 170
168 171
169 intptr_t Heap::Capacity() { 172 intptr_t Heap::Capacity() {
170 if (!HasBeenSetUp()) return 0; 173 if (!HasBeenSetUp()) return 0;
171 174
172 return new_space_.Capacity() + old_space_->Capacity() + 175 return new_space_.Capacity() + old_space_->Capacity() +
173 code_space_->Capacity() + map_space_->Capacity(); 176 code_space_->Capacity() + map_space_->Capacity();
174 } 177 }
175 178
(...skipping 1364 matching lines...) Expand 10 before | Expand all | Expand 10 after
1540 LOG(isolate_, ResourceEvent("scavenge", "begin")); 1543 LOG(isolate_, ResourceEvent("scavenge", "begin"));
1541 1544
1542 // Clear descriptor cache. 1545 // Clear descriptor cache.
1543 isolate_->descriptor_lookup_cache()->Clear(); 1546 isolate_->descriptor_lookup_cache()->Clear();
1544 1547
1545 // Used for updating survived_since_last_expansion_ at function end. 1548 // Used for updating survived_since_last_expansion_ at function end.
1546 intptr_t survived_watermark = PromotedSpaceSizeOfObjects(); 1549 intptr_t survived_watermark = PromotedSpaceSizeOfObjects();
1547 1550
1548 SelectScavengingVisitorsTable(); 1551 SelectScavengingVisitorsTable();
1549 1552
1550 PrepareArrayBufferDiscoveryInNewSpace(); 1553 array_buffer_tracker()->PrepareDiscoveryInNewSpace();
1551 1554
1552 // Flip the semispaces. After flipping, to space is empty, from space has 1555 // Flip the semispaces. After flipping, to space is empty, from space has
1553 // live objects. 1556 // live objects.
1554 new_space_.Flip(); 1557 new_space_.Flip();
1555 new_space_.ResetAllocationInfo(); 1558 new_space_.ResetAllocationInfo();
1556 1559
1557 // We need to sweep newly copied objects which can be either in the 1560 // We need to sweep newly copied objects which can be either in the
1558 // to space or promoted to the old generation. For to-space 1561 // to space or promoted to the old generation. For to-space
1559 // objects, we treat the bottom of the to space as a queue. Newly 1562 // objects, we treat the bottom of the to space as a queue. Newly
1560 // copied and unswept objects lie between a 'front' mark and the 1563 // copied and unswept objects lie between a 'front' mark and the
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
1642 ProcessYoungWeakReferences(&weak_object_retainer); 1645 ProcessYoungWeakReferences(&weak_object_retainer);
1643 1646
1644 DCHECK(new_space_front == new_space_.top()); 1647 DCHECK(new_space_front == new_space_.top());
1645 1648
1646 // Set age mark. 1649 // Set age mark.
1647 new_space_.set_age_mark(new_space_.top()); 1650 new_space_.set_age_mark(new_space_.top());
1648 1651
1649 new_space_.LowerInlineAllocationLimit( 1652 new_space_.LowerInlineAllocationLimit(
1650 new_space_.inline_allocation_limit_step()); 1653 new_space_.inline_allocation_limit_step());
1651 1654
1652 FreeDeadArrayBuffers(true); 1655 array_buffer_tracker()->FreeDead(true);
1653 1656
1654 // Update how much has survived scavenge. 1657 // Update how much has survived scavenge.
1655 IncrementYoungSurvivorsCounter(static_cast<int>( 1658 IncrementYoungSurvivorsCounter(static_cast<int>(
1656 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); 1659 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
1657 1660
1658 LOG(isolate_, ResourceEvent("scavenge", "end")); 1661 LOG(isolate_, ResourceEvent("scavenge", "end"));
1659 1662
1660 gc_state_ = NOT_IN_GC; 1663 gc_state_ = NOT_IN_GC;
1661 } 1664 }
1662 1665
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
1737 } 1740 }
1738 1741
1739 1742
1740 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { 1743 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
1741 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer); 1744 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
1742 // Update the head of the list of contexts. 1745 // Update the head of the list of contexts.
1743 set_native_contexts_list(head); 1746 set_native_contexts_list(head);
1744 } 1747 }
1745 1748
1746 1749
1747 void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
1748 size_t length) {
1749 if (!data) return;
1750 if (in_new_space) {
1751 live_array_buffers_for_scavenge_[data] = length;
1752 } else {
1753 live_array_buffers_[data] = length;
1754 }
1755
1756 // We may go over the limit of externally allocated memory here. We call the
1757 // api function to trigger a GC in this case.
1758 reinterpret_cast<v8::Isolate*>(isolate_)
1759 ->AdjustAmountOfExternalAllocatedMemory(length);
1760 }
1761
1762
1763 void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) {
1764 if (!data) return;
1765
1766 std::map<void*, size_t>* live_buffers =
1767 in_new_space ? &live_array_buffers_for_scavenge_ : &live_array_buffers_;
1768 std::map<void*, size_t>* not_yet_discovered_buffers =
1769 in_new_space ? &not_yet_discovered_array_buffers_for_scavenge_
1770 : &not_yet_discovered_array_buffers_;
1771
1772 DCHECK(live_buffers->count(data) > 0);
1773
1774 size_t length = (*live_buffers)[data];
1775 live_buffers->erase(data);
1776 not_yet_discovered_buffers->erase(data);
1777
1778 amount_of_external_allocated_memory_ -= length;
1779 }
1780
1781
1782 void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) {
1783 // ArrayBuffer might be in the middle of being constructed.
1784 if (data == undefined_value()) return;
1785 if (in_new_space) {
1786 not_yet_discovered_array_buffers_for_scavenge_.erase(data);
1787 } else {
1788 not_yet_discovered_array_buffers_.erase(data);
1789 }
1790 }
1791
1792
1793 void Heap::FreeDeadArrayBuffers(bool from_scavenge) {
1794 size_t freed_memory = 0;
1795 for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) {
1796 isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
1797 freed_memory += buffer.second;
1798 live_array_buffers_for_scavenge_.erase(buffer.first);
1799 }
1800
1801 if (!from_scavenge) {
1802 for (auto& buffer : not_yet_discovered_array_buffers_) {
1803 isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
1804 freed_memory += buffer.second;
1805 live_array_buffers_.erase(buffer.first);
1806 }
1807 }
1808
1809 not_yet_discovered_array_buffers_for_scavenge_ =
1810 live_array_buffers_for_scavenge_;
1811 if (!from_scavenge) not_yet_discovered_array_buffers_ = live_array_buffers_;
1812
1813 // Do not call through the api as this code is triggered while doing a GC.
1814 amount_of_external_allocated_memory_ -= freed_memory;
1815 }
1816
1817
1818 void Heap::TearDownArrayBuffers() {
1819 size_t freed_memory = 0;
1820 for (auto& buffer : live_array_buffers_) {
1821 isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
1822 freed_memory += buffer.second;
1823 }
1824 for (auto& buffer : live_array_buffers_for_scavenge_) {
1825 isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
1826 freed_memory += buffer.second;
1827 }
1828 live_array_buffers_.clear();
1829 live_array_buffers_for_scavenge_.clear();
1830 not_yet_discovered_array_buffers_.clear();
1831 not_yet_discovered_array_buffers_for_scavenge_.clear();
1832
1833 if (freed_memory > 0) {
1834 reinterpret_cast<v8::Isolate*>(isolate_)
1835 ->AdjustAmountOfExternalAllocatedMemory(
1836 -static_cast<int64_t>(freed_memory));
1837 }
1838 }
1839
1840
1841 void Heap::PrepareArrayBufferDiscoveryInNewSpace() {
1842 not_yet_discovered_array_buffers_for_scavenge_ =
1843 live_array_buffers_for_scavenge_;
1844 }
1845
1846
1847 void Heap::PromoteArrayBuffer(Object* obj) {
1848 JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
1849 if (buffer->is_external()) return;
1850 void* data = buffer->backing_store();
1851 if (!data) return;
1852 // ArrayBuffer might be in the middle of being constructed.
1853 if (data == undefined_value()) return;
1854 DCHECK(live_array_buffers_for_scavenge_.count(data) > 0);
1855 live_array_buffers_[data] = live_array_buffers_for_scavenge_[data];
1856 live_array_buffers_for_scavenge_.erase(data);
1857 not_yet_discovered_array_buffers_for_scavenge_.erase(data);
1858 }
1859
1860
1861 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { 1750 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
1862 Object* allocation_site_obj = 1751 Object* allocation_site_obj =
1863 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer); 1752 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
1864 set_allocation_sites_list(allocation_site_obj); 1753 set_allocation_sites_list(allocation_site_obj);
1865 } 1754 }
1866 1755
1867 1756
1868 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { 1757 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) {
1869 DisallowHeapAllocation no_allocation_scope; 1758 DisallowHeapAllocation no_allocation_scope;
1870 Object* cur = allocation_sites_list(); 1759 Object* cur = allocation_sites_list();
(...skipping 510 matching lines...) Expand 10 before | Expand all | Expand 10 after
2381 2270
2382 2271
2383 static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot, 2272 static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
2384 HeapObject* object) { 2273 HeapObject* object) {
2385 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object); 2274 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
2386 2275
2387 Heap* heap = map->GetHeap(); 2276 Heap* heap = map->GetHeap();
2388 MapWord map_word = object->map_word(); 2277 MapWord map_word = object->map_word();
2389 DCHECK(map_word.IsForwardingAddress()); 2278 DCHECK(map_word.IsForwardingAddress());
2390 HeapObject* target = map_word.ToForwardingAddress(); 2279 HeapObject* target = map_word.ToForwardingAddress();
2391 if (!heap->InNewSpace(target)) heap->PromoteArrayBuffer(target); 2280 if (!heap->InNewSpace(target)) {
2281 heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target));
2282 }
2392 } 2283 }
2393 2284
2394 2285
2395 static inline void EvacuateByteArray(Map* map, HeapObject** slot, 2286 static inline void EvacuateByteArray(Map* map, HeapObject** slot,
2396 HeapObject* object) { 2287 HeapObject* object) {
2397 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); 2288 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
2398 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size); 2289 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
2399 } 2290 }
2400 2291
2401 2292
(...skipping 3352 matching lines...) Expand 10 before | Expand all | Expand 10 after
5754 memory_reducer_->TearDown(); 5645 memory_reducer_->TearDown();
5755 delete memory_reducer_; 5646 delete memory_reducer_;
5756 memory_reducer_ = nullptr; 5647 memory_reducer_ = nullptr;
5757 } 5648 }
5758 5649
5759 delete object_stats_; 5650 delete object_stats_;
5760 object_stats_ = nullptr; 5651 object_stats_ = nullptr;
5761 5652
5762 WaitUntilUnmappingOfFreeChunksCompleted(); 5653 WaitUntilUnmappingOfFreeChunksCompleted();
5763 5654
5764 TearDownArrayBuffers(); 5655 array_buffer_tracker()->TearDown();
5765 5656
5766 isolate_->global_handles()->TearDown(); 5657 isolate_->global_handles()->TearDown();
5767 5658
5768 external_string_table_.TearDown(); 5659 external_string_table_.TearDown();
5769 5660
5770 mark_compact_collector()->TearDown(); 5661 mark_compact_collector()->TearDown();
5771 5662
5772 delete tracer_; 5663 delete tracer_;
5773 tracer_ = nullptr; 5664 tracer_ = nullptr;
5774 5665
(...skipping 883 matching lines...) Expand 10 before | Expand all | Expand 10 after
6658 *object_sub_type = "CODE_AGE/" #name; \ 6549 *object_sub_type = "CODE_AGE/" #name; \
6659 return true; 6550 return true;
6660 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) 6551 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME)
6661 #undef COMPARE_AND_RETURN_NAME 6552 #undef COMPARE_AND_RETURN_NAME
6662 } 6553 }
6663 return false; 6554 return false;
6664 } 6555 }
6665 6556
6666 } // namespace internal 6557 } // namespace internal
6667 } // namespace v8 6558 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698