Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(588)

Side by Side Diff: src/heap/heap.cc

Issue 1324023007: [heap] introduce ArrayBufferTracker (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: destructor Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/heap.h" 5 #include "src/heap/heap.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
11 #include "src/base/utils/random-number-generator.h" 11 #include "src/base/utils/random-number-generator.h"
12 #include "src/bootstrapper.h" 12 #include "src/bootstrapper.h"
13 #include "src/codegen.h" 13 #include "src/codegen.h"
14 #include "src/compilation-cache.h" 14 #include "src/compilation-cache.h"
15 #include "src/conversions.h" 15 #include "src/conversions.h"
16 #include "src/cpu-profiler.h" 16 #include "src/cpu-profiler.h"
17 #include "src/debug/debug.h" 17 #include "src/debug/debug.h"
18 #include "src/deoptimizer.h" 18 #include "src/deoptimizer.h"
19 #include "src/global-handles.h" 19 #include "src/global-handles.h"
20 #include "src/heap/array-buffer-tracker.h"
20 #include "src/heap/gc-idle-time-handler.h" 21 #include "src/heap/gc-idle-time-handler.h"
21 #include "src/heap/gc-tracer.h" 22 #include "src/heap/gc-tracer.h"
22 #include "src/heap/incremental-marking.h" 23 #include "src/heap/incremental-marking.h"
23 #include "src/heap/mark-compact-inl.h" 24 #include "src/heap/mark-compact-inl.h"
24 #include "src/heap/mark-compact.h" 25 #include "src/heap/mark-compact.h"
25 #include "src/heap/memory-reducer.h" 26 #include "src/heap/memory-reducer.h"
26 #include "src/heap/object-stats.h" 27 #include "src/heap/object-stats.h"
27 #include "src/heap/objects-visiting-inl.h" 28 #include "src/heap/objects-visiting-inl.h"
28 #include "src/heap/objects-visiting.h" 29 #include "src/heap/objects-visiting.h"
29 #include "src/heap/store-buffer.h" 30 #include "src/heap/store-buffer.h"
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after
137 configured_(false), 138 configured_(false),
138 current_gc_flags_(Heap::kNoGCFlags), 139 current_gc_flags_(Heap::kNoGCFlags),
139 current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags), 140 current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags),
140 external_string_table_(this), 141 external_string_table_(this),
141 chunks_queued_for_free_(NULL), 142 chunks_queued_for_free_(NULL),
142 concurrent_unmapping_tasks_active_(0), 143 concurrent_unmapping_tasks_active_(0),
143 pending_unmapping_tasks_semaphore_(0), 144 pending_unmapping_tasks_semaphore_(0),
144 gc_callbacks_depth_(0), 145 gc_callbacks_depth_(0),
145 deserialization_complete_(false), 146 deserialization_complete_(false),
146 concurrent_sweeping_enabled_(false), 147 concurrent_sweeping_enabled_(false),
147 strong_roots_list_(NULL) { 148 strong_roots_list_(NULL),
149 array_buffer_tracker_(NULL) {
148 // Allow build-time customization of the max semispace size. Building 150 // Allow build-time customization of the max semispace size. Building
149 // V8 with snapshots and a non-default max semispace size is much 151 // V8 with snapshots and a non-default max semispace size is much
150 // easier if you can define it as part of the build environment. 152 // easier if you can define it as part of the build environment.
151 #if defined(V8_MAX_SEMISPACE_SIZE) 153 #if defined(V8_MAX_SEMISPACE_SIZE)
152 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; 154 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
153 #endif 155 #endif
154 156
155 // Ensure old_generation_size_ is a multiple of kPageSize. 157 // Ensure old_generation_size_ is a multiple of kPageSize.
156 DCHECK((max_old_generation_size_ & (Page::kPageSize - 1)) == 0); 158 DCHECK((max_old_generation_size_ & (Page::kPageSize - 1)) == 0);
157 159
(...skipping 1382 matching lines...) Expand 10 before | Expand all | Expand 10 after
1540 LOG(isolate_, ResourceEvent("scavenge", "begin")); 1542 LOG(isolate_, ResourceEvent("scavenge", "begin"));
1541 1543
1542 // Clear descriptor cache. 1544 // Clear descriptor cache.
1543 isolate_->descriptor_lookup_cache()->Clear(); 1545 isolate_->descriptor_lookup_cache()->Clear();
1544 1546
1545 // Used for updating survived_since_last_expansion_ at function end. 1547 // Used for updating survived_since_last_expansion_ at function end.
1546 intptr_t survived_watermark = PromotedSpaceSizeOfObjects(); 1548 intptr_t survived_watermark = PromotedSpaceSizeOfObjects();
1547 1549
1548 SelectScavengingVisitorsTable(); 1550 SelectScavengingVisitorsTable();
1549 1551
1550 PrepareArrayBufferDiscoveryInNewSpace(); 1552 array_buffer_tracker()->PrepareDiscoveryInNewSpace();
1551 1553
1552 // Flip the semispaces. After flipping, to space is empty, from space has 1554 // Flip the semispaces. After flipping, to space is empty, from space has
1553 // live objects. 1555 // live objects.
1554 new_space_.Flip(); 1556 new_space_.Flip();
1555 new_space_.ResetAllocationInfo(); 1557 new_space_.ResetAllocationInfo();
1556 1558
1557 // We need to sweep newly copied objects which can be either in the 1559 // We need to sweep newly copied objects which can be either in the
1558 // to space or promoted to the old generation. For to-space 1560 // to space or promoted to the old generation. For to-space
1559 // objects, we treat the bottom of the to space as a queue. Newly 1561 // objects, we treat the bottom of the to space as a queue. Newly
1560 // copied and unswept objects lie between a 'front' mark and the 1562 // copied and unswept objects lie between a 'front' mark and the
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
1642 ProcessYoungWeakReferences(&weak_object_retainer); 1644 ProcessYoungWeakReferences(&weak_object_retainer);
1643 1645
1644 DCHECK(new_space_front == new_space_.top()); 1646 DCHECK(new_space_front == new_space_.top());
1645 1647
1646 // Set age mark. 1648 // Set age mark.
1647 new_space_.set_age_mark(new_space_.top()); 1649 new_space_.set_age_mark(new_space_.top());
1648 1650
1649 new_space_.LowerInlineAllocationLimit( 1651 new_space_.LowerInlineAllocationLimit(
1650 new_space_.inline_allocation_limit_step()); 1652 new_space_.inline_allocation_limit_step());
1651 1653
1652 FreeDeadArrayBuffers(true); 1654 array_buffer_tracker()->FreeDead(true);
1653 1655
1654 // Update how much has survived scavenge. 1656 // Update how much has survived scavenge.
1655 IncrementYoungSurvivorsCounter(static_cast<int>( 1657 IncrementYoungSurvivorsCounter(static_cast<int>(
1656 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); 1658 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
1657 1659
1658 LOG(isolate_, ResourceEvent("scavenge", "end")); 1660 LOG(isolate_, ResourceEvent("scavenge", "end"));
1659 1661
1660 gc_state_ = NOT_IN_GC; 1662 gc_state_ = NOT_IN_GC;
1661 } 1663 }
1662 1664
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
1737 } 1739 }
1738 1740
1739 1741
1740 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { 1742 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
1741 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer); 1743 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
1742 // Update the head of the list of contexts. 1744 // Update the head of the list of contexts.
1743 set_native_contexts_list(head); 1745 set_native_contexts_list(head);
1744 } 1746 }
1745 1747
1746 1748
1747 void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
1748 size_t length) {
1749 if (!data) return;
1750 if (in_new_space) {
1751 live_array_buffers_for_scavenge_[data] = length;
1752 } else {
1753 live_array_buffers_[data] = length;
1754 }
1755
1756 // We may go over the limit of externally allocated memory here. We call the
1757 // api function to trigger a GC in this case.
1758 reinterpret_cast<v8::Isolate*>(isolate_)
1759 ->AdjustAmountOfExternalAllocatedMemory(length);
1760 }
1761
1762
1763 void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) {
1764 if (!data) return;
1765
1766 std::map<void*, size_t>* live_buffers =
1767 in_new_space ? &live_array_buffers_for_scavenge_ : &live_array_buffers_;
1768 std::map<void*, size_t>* not_yet_discovered_buffers =
1769 in_new_space ? &not_yet_discovered_array_buffers_for_scavenge_
1770 : &not_yet_discovered_array_buffers_;
1771
1772 DCHECK(live_buffers->count(data) > 0);
1773
1774 size_t length = (*live_buffers)[data];
1775 live_buffers->erase(data);
1776 not_yet_discovered_buffers->erase(data);
1777
1778 amount_of_external_allocated_memory_ -= length;
1779 }
1780
1781
1782 void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) {
1783 // ArrayBuffer might be in the middle of being constructed.
1784 if (data == undefined_value()) return;
1785 if (in_new_space) {
1786 not_yet_discovered_array_buffers_for_scavenge_.erase(data);
1787 } else {
1788 not_yet_discovered_array_buffers_.erase(data);
1789 }
1790 }
1791
1792
1793 void Heap::FreeDeadArrayBuffers(bool from_scavenge) {
1794 size_t freed_memory = 0;
1795 for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) {
1796 isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
1797 freed_memory += buffer.second;
1798 live_array_buffers_for_scavenge_.erase(buffer.first);
1799 }
1800
1801 if (!from_scavenge) {
1802 for (auto& buffer : not_yet_discovered_array_buffers_) {
1803 isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
1804 freed_memory += buffer.second;
1805 live_array_buffers_.erase(buffer.first);
1806 }
1807 }
1808
1809 not_yet_discovered_array_buffers_for_scavenge_ =
1810 live_array_buffers_for_scavenge_;
1811 if (!from_scavenge) not_yet_discovered_array_buffers_ = live_array_buffers_;
1812
1813 // Do not call through the api as this code is triggered while doing a GC.
1814 amount_of_external_allocated_memory_ -= freed_memory;
1815 }
1816
1817
1818 void Heap::TearDownArrayBuffers() {
1819 size_t freed_memory = 0;
1820 for (auto& buffer : live_array_buffers_) {
1821 isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
1822 freed_memory += buffer.second;
1823 }
1824 for (auto& buffer : live_array_buffers_for_scavenge_) {
1825 isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
1826 freed_memory += buffer.second;
1827 }
1828 live_array_buffers_.clear();
1829 live_array_buffers_for_scavenge_.clear();
1830 not_yet_discovered_array_buffers_.clear();
1831 not_yet_discovered_array_buffers_for_scavenge_.clear();
1832
1833 if (freed_memory > 0) {
1834 reinterpret_cast<v8::Isolate*>(isolate_)
1835 ->AdjustAmountOfExternalAllocatedMemory(
1836 -static_cast<int64_t>(freed_memory));
1837 }
1838 }
1839
1840
1841 void Heap::PrepareArrayBufferDiscoveryInNewSpace() {
1842 not_yet_discovered_array_buffers_for_scavenge_ =
1843 live_array_buffers_for_scavenge_;
1844 }
1845
1846
1847 void Heap::PromoteArrayBuffer(Object* obj) {
1848 JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
1849 if (buffer->is_external()) return;
1850 void* data = buffer->backing_store();
1851 if (!data) return;
1852 // ArrayBuffer might be in the middle of being constructed.
1853 if (data == undefined_value()) return;
1854 DCHECK(live_array_buffers_for_scavenge_.count(data) > 0);
1855 live_array_buffers_[data] = live_array_buffers_for_scavenge_[data];
1856 live_array_buffers_for_scavenge_.erase(data);
1857 not_yet_discovered_array_buffers_for_scavenge_.erase(data);
1858 }
1859
1860
1861 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { 1749 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
1862 Object* allocation_site_obj = 1750 Object* allocation_site_obj =
1863 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer); 1751 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
1864 set_allocation_sites_list(allocation_site_obj); 1752 set_allocation_sites_list(allocation_site_obj);
1865 } 1753 }
1866 1754
1867 1755
1868 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { 1756 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) {
1869 DisallowHeapAllocation no_allocation_scope; 1757 DisallowHeapAllocation no_allocation_scope;
1870 Object* cur = allocation_sites_list(); 1758 Object* cur = allocation_sites_list();
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after
2080 CreateFillerObjectAt(object->address() + object_size, filler_size); 1968 CreateFillerObjectAt(object->address() + object_size, filler_size);
2081 return object; 1969 return object;
2082 } 1970 }
2083 1971
2084 1972
2085 HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) { 1973 HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) {
2086 return AlignWithFiller(object, size - kPointerSize, size, kDoubleAligned); 1974 return AlignWithFiller(object, size - kPointerSize, size, kDoubleAligned);
2087 } 1975 }
2088 1976
2089 1977
1978 void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) {
1979 return array_buffer_tracker()->RegisterNew(buffer);
1980 }
1981
1982
1983 void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) {
1984 return array_buffer_tracker()->Unregister(buffer);
1985 }
1986
1987
2090 enum LoggingAndProfiling { 1988 enum LoggingAndProfiling {
2091 LOGGING_AND_PROFILING_ENABLED, 1989 LOGGING_AND_PROFILING_ENABLED,
2092 LOGGING_AND_PROFILING_DISABLED 1990 LOGGING_AND_PROFILING_DISABLED
2093 }; 1991 };
2094 1992
2095 1993
2096 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; 1994 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS };
2097 1995
2098 1996
2099 template <MarksHandling marks_handling, 1997 template <MarksHandling marks_handling,
(...skipping 281 matching lines...) Expand 10 before | Expand all | Expand 10 after
2381 2279
2382 2280
2383 static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot, 2281 static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
2384 HeapObject* object) { 2282 HeapObject* object) {
2385 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object); 2283 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
2386 2284
2387 Heap* heap = map->GetHeap(); 2285 Heap* heap = map->GetHeap();
2388 MapWord map_word = object->map_word(); 2286 MapWord map_word = object->map_word();
2389 DCHECK(map_word.IsForwardingAddress()); 2287 DCHECK(map_word.IsForwardingAddress());
2390 HeapObject* target = map_word.ToForwardingAddress(); 2288 HeapObject* target = map_word.ToForwardingAddress();
2391 if (!heap->InNewSpace(target)) heap->PromoteArrayBuffer(target); 2289 if (!heap->InNewSpace(target)) {
2290 heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target));
2291 }
2392 } 2292 }
2393 2293
2394 2294
2395 static inline void EvacuateByteArray(Map* map, HeapObject** slot, 2295 static inline void EvacuateByteArray(Map* map, HeapObject** slot,
2396 HeapObject* object) { 2296 HeapObject* object) {
2397 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); 2297 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
2398 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size); 2298 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
2399 } 2299 }
2400 2300
2401 2301
(...skipping 3237 matching lines...) Expand 10 before | Expand all | Expand 10 after
5639 deferred_counters_[i] = 0; 5539 deferred_counters_[i] = 0;
5640 } 5540 }
5641 5541
5642 tracer_ = new GCTracer(this); 5542 tracer_ = new GCTracer(this);
5643 5543
5644 memory_reducer_ = new MemoryReducer(this); 5544 memory_reducer_ = new MemoryReducer(this);
5645 5545
5646 object_stats_ = new ObjectStats(this); 5546 object_stats_ = new ObjectStats(this);
5647 object_stats_->ClearObjectStats(true); 5547 object_stats_->ClearObjectStats(true);
5648 5548
5549 array_buffer_tracker_ = new ArrayBufferTracker(this);
5550
5649 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); 5551 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
5650 LOG(isolate_, IntPtrTEvent("heap-available", Available())); 5552 LOG(isolate_, IntPtrTEvent("heap-available", Available()));
5651 5553
5652 store_buffer()->SetUp(); 5554 store_buffer()->SetUp();
5653 5555
5654 mark_compact_collector()->SetUp(); 5556 mark_compact_collector()->SetUp();
5655 5557
5656 return true; 5558 return true;
5657 } 5559 }
5658 5560
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
5754 memory_reducer_->TearDown(); 5656 memory_reducer_->TearDown();
5755 delete memory_reducer_; 5657 delete memory_reducer_;
5756 memory_reducer_ = nullptr; 5658 memory_reducer_ = nullptr;
5757 } 5659 }
5758 5660
5759 delete object_stats_; 5661 delete object_stats_;
5760 object_stats_ = nullptr; 5662 object_stats_ = nullptr;
5761 5663
5762 WaitUntilUnmappingOfFreeChunksCompleted(); 5664 WaitUntilUnmappingOfFreeChunksCompleted();
5763 5665
5764 TearDownArrayBuffers(); 5666 delete array_buffer_tracker_;
5667 array_buffer_tracker_ = nullptr;
5765 5668
5766 isolate_->global_handles()->TearDown(); 5669 isolate_->global_handles()->TearDown();
5767 5670
5768 external_string_table_.TearDown(); 5671 external_string_table_.TearDown();
5769 5672
5770 mark_compact_collector()->TearDown(); 5673 mark_compact_collector()->TearDown();
5771 5674
5772 delete tracer_; 5675 delete tracer_;
5773 tracer_ = nullptr; 5676 tracer_ = nullptr;
5774 5677
(...skipping 883 matching lines...) Expand 10 before | Expand all | Expand 10 after
6658 *object_sub_type = "CODE_AGE/" #name; \ 6561 *object_sub_type = "CODE_AGE/" #name; \
6659 return true; 6562 return true;
6660 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) 6563 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME)
6661 #undef COMPARE_AND_RETURN_NAME 6564 #undef COMPARE_AND_RETURN_NAME
6662 } 6565 }
6663 return false; 6566 return false;
6664 } 6567 }
6665 6568
6666 } // namespace internal 6569 } // namespace internal
6667 } // namespace v8 6570 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698