Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(9)

Side by Side Diff: src/heap/heap.cc

Issue 1177083003: Reland "Keep track of array buffers in new space separately" (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: fix Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
(...skipping 1605 matching lines...) Expand 10 before | Expand all | Expand 10 after
1616 LOG(isolate_, ResourceEvent("scavenge", "begin")); 1616 LOG(isolate_, ResourceEvent("scavenge", "begin"));
1617 1617
1618 // Clear descriptor cache. 1618 // Clear descriptor cache.
1619 isolate_->descriptor_lookup_cache()->Clear(); 1619 isolate_->descriptor_lookup_cache()->Clear();
1620 1620
1621 // Used for updating survived_since_last_expansion_ at function end. 1621 // Used for updating survived_since_last_expansion_ at function end.
1622 intptr_t survived_watermark = PromotedSpaceSizeOfObjects(); 1622 intptr_t survived_watermark = PromotedSpaceSizeOfObjects();
1623 1623
1624 SelectScavengingVisitorsTable(); 1624 SelectScavengingVisitorsTable();
1625 1625
1626 PrepareArrayBufferDiscoveryInNewSpace();
1627
1626 // Flip the semispaces. After flipping, to space is empty, from space has 1628 // Flip the semispaces. After flipping, to space is empty, from space has
1627 // live objects. 1629 // live objects.
1628 new_space_.Flip(); 1630 new_space_.Flip();
1629 new_space_.ResetAllocationInfo(); 1631 new_space_.ResetAllocationInfo();
1630 1632
1631 // We need to sweep newly copied objects which can be either in the 1633 // We need to sweep newly copied objects which can be either in the
1632 // to space or promoted to the old generation. For to-space 1634 // to space or promoted to the old generation. For to-space
1633 // objects, we treat the bottom of the to space as a queue. Newly 1635 // objects, we treat the bottom of the to space as a queue. Newly
1634 // copied and unswept objects lie between a 'front' mark and the 1636 // copied and unswept objects lie between a 'front' mark and the
1635 // allocation pointer. 1637 // allocation pointer.
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
1697 ProcessYoungWeakReferences(&weak_object_retainer); 1699 ProcessYoungWeakReferences(&weak_object_retainer);
1698 1700
1699 DCHECK(new_space_front == new_space_.top()); 1701 DCHECK(new_space_front == new_space_.top());
1700 1702
1701 // Set age mark. 1703 // Set age mark.
1702 new_space_.set_age_mark(new_space_.top()); 1704 new_space_.set_age_mark(new_space_.top());
1703 1705
1704 new_space_.LowerInlineAllocationLimit( 1706 new_space_.LowerInlineAllocationLimit(
1705 new_space_.inline_allocation_limit_step()); 1707 new_space_.inline_allocation_limit_step());
1706 1708
1709 FreeDeadArrayBuffers(true);
1710
1707 // Update how much has survived scavenge. 1711 // Update how much has survived scavenge.
1708 IncrementYoungSurvivorsCounter(static_cast<int>( 1712 IncrementYoungSurvivorsCounter(static_cast<int>(
1709 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); 1713 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
1710 1714
1711 LOG(isolate_, ResourceEvent("scavenge", "end")); 1715 LOG(isolate_, ResourceEvent("scavenge", "end"));
1712 1716
1713 gc_state_ = NOT_IN_GC; 1717 gc_state_ = NOT_IN_GC;
1714 } 1718 }
1715 1719
1716 1720
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
1790 } 1794 }
1791 1795
1792 1796
1793 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { 1797 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
1794 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer); 1798 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
1795 // Update the head of the list of contexts. 1799 // Update the head of the list of contexts.
1796 set_native_contexts_list(head); 1800 set_native_contexts_list(head);
1797 } 1801 }
1798 1802
1799 1803
1800 void Heap::RegisterNewArrayBuffer(void* data, size_t length) { 1804 void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
1805 void* data, size_t length) {
1806 live_buffers[data] = length;
1807 }
1808
1809
1810 void Heap::UnregisterArrayBufferHelper(
1811 std::map<void*, size_t>& live_buffers,
1812 std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
1813 DCHECK(live_buffers.count(data) > 0);
1814 live_buffers.erase(data);
1815 not_yet_discovered_buffers.erase(data);
1816 }
1817
1818
1819 void Heap::RegisterLiveArrayBufferHelper(
1820 std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
1821 not_yet_discovered_buffers.erase(data);
1822 }
1823
1824
1825 size_t Heap::FreeDeadArrayBuffersHelper(
1826 Isolate* isolate, std::map<void*, size_t>& live_buffers,
1827 std::map<void*, size_t>& not_yet_discovered_buffers) {
1828 size_t freed_memory = 0;
1829 for (auto buffer = not_yet_discovered_buffers.begin();
1830 buffer != not_yet_discovered_buffers.end(); ++buffer) {
1831 isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
1832 freed_memory += buffer->second;
1833 live_buffers.erase(buffer->first);
1834 }
1835 not_yet_discovered_buffers = live_buffers;
1836 return freed_memory;
1837 }
1838
1839
1840 void Heap::TearDownArrayBuffersHelper(
1841 Isolate* isolate, std::map<void*, size_t>& live_buffers,
1842 std::map<void*, size_t>& not_yet_discovered_buffers) {
1843 for (auto buffer = live_buffers.begin(); buffer != live_buffers.end();
1844 ++buffer) {
1845 isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
1846 }
1847 live_buffers.clear();
1848 not_yet_discovered_buffers.clear();
1849 }
1850
1851
1852 void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
1853 size_t length) {
1801 if (!data) return; 1854 if (!data) return;
1802 live_array_buffers_[data] = length; 1855 RegisterNewArrayBufferHelper(
1856 in_new_space ? live_new_array_buffers_ : live_array_buffers_, data,
1857 length);
1803 reinterpret_cast<v8::Isolate*>(isolate_) 1858 reinterpret_cast<v8::Isolate*>(isolate_)
1804 ->AdjustAmountOfExternalAllocatedMemory(length); 1859 ->AdjustAmountOfExternalAllocatedMemory(length);
1805 } 1860 }
1806 1861
1807 1862
1808 void Heap::UnregisterArrayBuffer(void* data) { 1863 void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) {
1809 if (!data) return; 1864 if (!data) return;
1810 DCHECK(live_array_buffers_.count(data) > 0); 1865 UnregisterArrayBufferHelper(
1811 live_array_buffers_.erase(data); 1866 in_new_space ? live_new_array_buffers_ : live_array_buffers_,
1812 not_yet_discovered_array_buffers_.erase(data); 1867 in_new_space ? not_yet_discovered_new_array_buffers_
1868 : not_yet_discovered_array_buffers_,
1869 data);
1813 } 1870 }
1814 1871
1815 1872
1816 void Heap::RegisterLiveArrayBuffer(void* data) { 1873 void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) {
1817 not_yet_discovered_array_buffers_.erase(data); 1874 // ArrayBuffer might be in the middle of being constructed.
1875 if (data == undefined_value()) return;
1876 RegisterLiveArrayBufferHelper(in_new_space
1877 ? not_yet_discovered_new_array_buffers_
1878 : not_yet_discovered_array_buffers_,
1879 data);
1818 } 1880 }
1819 1881
1820 1882
1821 void Heap::FreeDeadArrayBuffers() { 1883 void Heap::FreeDeadArrayBuffers(bool in_new_space) {
1822 for (auto buffer = not_yet_discovered_array_buffers_.begin(); 1884 size_t freed_memory = FreeDeadArrayBuffersHelper(
1823 buffer != not_yet_discovered_array_buffers_.end(); ++buffer) { 1885 isolate_, in_new_space ? live_new_array_buffers_ : live_array_buffers_,
1824 isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); 1886 in_new_space ? not_yet_discovered_new_array_buffers_
1825 // Don't use the API method here since this could trigger another GC. 1887 : not_yet_discovered_array_buffers_);
1826 amount_of_external_allocated_memory_ -= buffer->second; 1888 if (freed_memory) {
1827 live_array_buffers_.erase(buffer->first); 1889 reinterpret_cast<v8::Isolate*>(isolate_)
1890 ->AdjustAmountOfExternalAllocatedMemory(
1891 -static_cast<int64_t>(freed_memory));
1828 } 1892 }
1829 not_yet_discovered_array_buffers_ = live_array_buffers_;
1830 } 1893 }
1831 1894
1832 1895
1833 void Heap::TearDownArrayBuffers() { 1896 void Heap::TearDownArrayBuffers() {
1834 for (auto buffer = live_array_buffers_.begin(); 1897 TearDownArrayBuffersHelper(isolate_, live_array_buffers_,
1835 buffer != live_array_buffers_.end(); ++buffer) { 1898 not_yet_discovered_array_buffers_);
1836 isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); 1899 TearDownArrayBuffersHelper(isolate_, live_new_array_buffers_,
1837 } 1900 not_yet_discovered_new_array_buffers_);
1838 live_array_buffers_.clear();
1839 not_yet_discovered_array_buffers_.clear();
1840 } 1901 }
1841 1902
1842 1903
1904 void Heap::PrepareArrayBufferDiscoveryInNewSpace() {
1905 not_yet_discovered_new_array_buffers_ = live_new_array_buffers_;
1906 }
1907
1908
1909 void Heap::PromoteArrayBuffer(Object* obj) {
1910 JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
1911 if (buffer->is_external()) return;
1912 void* data = buffer->backing_store();
1913 if (!data) return;
1914 // ArrayBuffer might be in the middle of being constructed.
1915 if (data == undefined_value()) return;
1916 DCHECK(live_new_array_buffers_.count(data) > 0);
1917 live_array_buffers_[data] = live_new_array_buffers_[data];
1918 live_new_array_buffers_.erase(data);
1919 not_yet_discovered_new_array_buffers_.erase(data);
1920 }
1921
1922
1843 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { 1923 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
1844 Object* allocation_site_obj = 1924 Object* allocation_site_obj =
1845 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer); 1925 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
1846 set_allocation_sites_list(allocation_site_obj); 1926 set_allocation_sites_list(allocation_site_obj);
1847 } 1927 }
1848 1928
1849 1929
1850 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { 1930 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) {
1851 DisallowHeapAllocation no_allocation_scope; 1931 DisallowHeapAllocation no_allocation_scope;
1852 Object* cur = allocation_sites_list(); 1932 Object* cur = allocation_sites_list();
(...skipping 229 matching lines...) Expand 10 before | Expand all | Expand 10 after
2082 public: 2162 public:
2083 static void Initialize() { 2163 static void Initialize() {
2084 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); 2164 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString);
2085 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); 2165 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
2086 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); 2166 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
2087 table_.Register(kVisitByteArray, &EvacuateByteArray); 2167 table_.Register(kVisitByteArray, &EvacuateByteArray);
2088 table_.Register(kVisitFixedArray, &EvacuateFixedArray); 2168 table_.Register(kVisitFixedArray, &EvacuateFixedArray);
2089 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); 2169 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
2090 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); 2170 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
2091 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); 2171 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
2172 table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer);
2092 2173
2093 table_.Register( 2174 table_.Register(
2094 kVisitNativeContext, 2175 kVisitNativeContext,
2095 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2176 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2096 Context::kSize>); 2177 Context::kSize>);
2097 2178
2098 table_.Register( 2179 table_.Register(
2099 kVisitConsString, 2180 kVisitConsString,
2100 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2181 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2101 ConsString::kSize>); 2182 ConsString::kSize>);
2102 2183
2103 table_.Register( 2184 table_.Register(
2104 kVisitSlicedString, 2185 kVisitSlicedString,
2105 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2186 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2106 SlicedString::kSize>); 2187 SlicedString::kSize>);
2107 2188
2108 table_.Register( 2189 table_.Register(
2109 kVisitSymbol, 2190 kVisitSymbol,
2110 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2191 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2111 Symbol::kSize>); 2192 Symbol::kSize>);
2112 2193
2113 table_.Register( 2194 table_.Register(
2114 kVisitSharedFunctionInfo, 2195 kVisitSharedFunctionInfo,
2115 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2196 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2116 SharedFunctionInfo::kSize>); 2197 SharedFunctionInfo::kSize>);
2117 2198
2118 table_.Register(kVisitJSWeakCollection, 2199 table_.Register(kVisitJSWeakCollection,
2119 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2200 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2120 2201
2121 table_.Register(kVisitJSArrayBuffer,
2122 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2123
2124 table_.Register(kVisitJSTypedArray, 2202 table_.Register(kVisitJSTypedArray,
2125 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2203 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2126 2204
2127 table_.Register(kVisitJSDataView, 2205 table_.Register(kVisitJSDataView,
2128 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2206 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2129 2207
2130 table_.Register(kVisitJSRegExp, 2208 table_.Register(kVisitJSRegExp,
2131 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2209 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2132 2210
2133 if (marks_handling == IGNORE_MARKS) { 2211 if (marks_handling == IGNORE_MARKS) {
(...skipping 207 matching lines...) Expand 10 before | Expand all | Expand 10 after
2341 } 2419 }
2342 2420
2343 2421
2344 static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot, 2422 static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot,
2345 HeapObject* object) { 2423 HeapObject* object) {
2346 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size(); 2424 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size();
2347 EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size); 2425 EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size);
2348 } 2426 }
2349 2427
2350 2428
2429 static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
2430 HeapObject* object) {
2431 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
2432
2433 Heap* heap = map->GetHeap();
2434 MapWord map_word = object->map_word();
2435 DCHECK(map_word.IsForwardingAddress());
2436 HeapObject* target = map_word.ToForwardingAddress();
2437 if (!heap->InNewSpace(target)) heap->PromoteArrayBuffer(target);
2438 }
2439
2440
2351 static inline void EvacuateByteArray(Map* map, HeapObject** slot, 2441 static inline void EvacuateByteArray(Map* map, HeapObject** slot,
2352 HeapObject* object) { 2442 HeapObject* object) {
2353 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); 2443 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
2354 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size); 2444 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
2355 } 2445 }
2356 2446
2357 2447
2358 static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot, 2448 static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot,
2359 HeapObject* object) { 2449 HeapObject* object) {
2360 int object_size = SeqOneByteString::cast(object) 2450 int object_size = SeqOneByteString::cast(object)
(...skipping 4255 matching lines...) Expand 10 before | Expand all | Expand 10 after
6616 *object_type = "CODE_TYPE"; \ 6706 *object_type = "CODE_TYPE"; \
6617 *object_sub_type = "CODE_AGE/" #name; \ 6707 *object_sub_type = "CODE_AGE/" #name; \
6618 return true; 6708 return true;
6619 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) 6709 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME)
6620 #undef COMPARE_AND_RETURN_NAME 6710 #undef COMPARE_AND_RETURN_NAME
6621 } 6711 }
6622 return false; 6712 return false;
6623 } 6713 }
6624 } // namespace internal 6714 } // namespace internal
6625 } // namespace v8 6715 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698