Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(665)

Side by Side Diff: src/heap/heap.cc

Issue 1186613007: Revert of Reland "Keep track of array buffers in new space separately" (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
(...skipping 1605 matching lines...) Expand 10 before | Expand all | Expand 10 after
1616 LOG(isolate_, ResourceEvent("scavenge", "begin")); 1616 LOG(isolate_, ResourceEvent("scavenge", "begin"));
1617 1617
1618 // Clear descriptor cache. 1618 // Clear descriptor cache.
1619 isolate_->descriptor_lookup_cache()->Clear(); 1619 isolate_->descriptor_lookup_cache()->Clear();
1620 1620
1621 // Used for updating survived_since_last_expansion_ at function end. 1621 // Used for updating survived_since_last_expansion_ at function end.
1622 intptr_t survived_watermark = PromotedSpaceSizeOfObjects(); 1622 intptr_t survived_watermark = PromotedSpaceSizeOfObjects();
1623 1623
1624 SelectScavengingVisitorsTable(); 1624 SelectScavengingVisitorsTable();
1625 1625
1626 PrepareArrayBufferDiscoveryInNewSpace();
1627
1628 // Flip the semispaces. After flipping, to space is empty, from space has 1626 // Flip the semispaces. After flipping, to space is empty, from space has
1629 // live objects. 1627 // live objects.
1630 new_space_.Flip(); 1628 new_space_.Flip();
1631 new_space_.ResetAllocationInfo(); 1629 new_space_.ResetAllocationInfo();
1632 1630
1633 // We need to sweep newly copied objects which can be either in the 1631 // We need to sweep newly copied objects which can be either in the
1634 // to space or promoted to the old generation. For to-space 1632 // to space or promoted to the old generation. For to-space
1635 // objects, we treat the bottom of the to space as a queue. Newly 1633 // objects, we treat the bottom of the to space as a queue. Newly
1636 // copied and unswept objects lie between a 'front' mark and the 1634 // copied and unswept objects lie between a 'front' mark and the
1637 // allocation pointer. 1635 // allocation pointer.
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
1699 ProcessYoungWeakReferences(&weak_object_retainer); 1697 ProcessYoungWeakReferences(&weak_object_retainer);
1700 1698
1701 DCHECK(new_space_front == new_space_.top()); 1699 DCHECK(new_space_front == new_space_.top());
1702 1700
1703 // Set age mark. 1701 // Set age mark.
1704 new_space_.set_age_mark(new_space_.top()); 1702 new_space_.set_age_mark(new_space_.top());
1705 1703
1706 new_space_.LowerInlineAllocationLimit( 1704 new_space_.LowerInlineAllocationLimit(
1707 new_space_.inline_allocation_limit_step()); 1705 new_space_.inline_allocation_limit_step());
1708 1706
1709 FreeDeadArrayBuffers(true);
1710
1711 // Update how much has survived scavenge. 1707 // Update how much has survived scavenge.
1712 IncrementYoungSurvivorsCounter(static_cast<int>( 1708 IncrementYoungSurvivorsCounter(static_cast<int>(
1713 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); 1709 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
1714 1710
1715 LOG(isolate_, ResourceEvent("scavenge", "end")); 1711 LOG(isolate_, ResourceEvent("scavenge", "end"));
1716 1712
1717 gc_state_ = NOT_IN_GC; 1713 gc_state_ = NOT_IN_GC;
1718 } 1714 }
1719 1715
1720 1716
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
1794 } 1790 }
1795 1791
1796 1792
1797 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { 1793 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
1798 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer); 1794 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
1799 // Update the head of the list of contexts. 1795 // Update the head of the list of contexts.
1800 set_native_contexts_list(head); 1796 set_native_contexts_list(head);
1801 } 1797 }
1802 1798
1803 1799
1804 void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers, 1800 void Heap::RegisterNewArrayBuffer(void* data, size_t length) {
1805 void* data, size_t length) {
1806 live_buffers[data] = length;
1807 }
1808
1809
1810 void Heap::UnregisterArrayBufferHelper(
1811 std::map<void*, size_t>& live_buffers,
1812 std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
1813 DCHECK(live_buffers.count(data) > 0);
1814 live_buffers.erase(data);
1815 not_yet_discovered_buffers.erase(data);
1816 }
1817
1818
1819 void Heap::RegisterLiveArrayBufferHelper(
1820 std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
1821 not_yet_discovered_buffers.erase(data);
1822 }
1823
1824
1825 size_t Heap::FreeDeadArrayBuffersHelper(
1826 Isolate* isolate, std::map<void*, size_t>& live_buffers,
1827 std::map<void*, size_t>& not_yet_discovered_buffers) {
1828 size_t freed_memory = 0;
1829 for (auto buffer = not_yet_discovered_buffers.begin();
1830 buffer != not_yet_discovered_buffers.end(); ++buffer) {
1831 isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
1832 freed_memory += buffer->second;
1833 live_buffers.erase(buffer->first);
1834 }
1835 not_yet_discovered_buffers = live_buffers;
1836 return freed_memory;
1837 }
1838
1839
1840 void Heap::TearDownArrayBuffersHelper(
1841 Isolate* isolate, std::map<void*, size_t>& live_buffers,
1842 std::map<void*, size_t>& not_yet_discovered_buffers) {
1843 for (auto buffer = live_buffers.begin(); buffer != live_buffers.end();
1844 ++buffer) {
1845 isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
1846 }
1847 live_buffers.clear();
1848 not_yet_discovered_buffers.clear();
1849 }
1850
1851
1852 void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
1853 size_t length) {
1854 if (!data) return; 1801 if (!data) return;
1855 RegisterNewArrayBufferHelper( 1802 live_array_buffers_[data] = length;
1856 in_new_space ? live_new_array_buffers_ : live_array_buffers_, data,
1857 length);
1858 reinterpret_cast<v8::Isolate*>(isolate_) 1803 reinterpret_cast<v8::Isolate*>(isolate_)
1859 ->AdjustAmountOfExternalAllocatedMemory(length); 1804 ->AdjustAmountOfExternalAllocatedMemory(length);
1860 } 1805 }
1861 1806
1862 1807
1863 void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) { 1808 void Heap::UnregisterArrayBuffer(void* data) {
1864 if (!data) return; 1809 if (!data) return;
1865 UnregisterArrayBufferHelper( 1810 DCHECK(live_array_buffers_.count(data) > 0);
1866 in_new_space ? live_new_array_buffers_ : live_array_buffers_, 1811 live_array_buffers_.erase(data);
1867 in_new_space ? not_yet_discovered_new_array_buffers_ 1812 not_yet_discovered_array_buffers_.erase(data);
1868 : not_yet_discovered_array_buffers_,
1869 data);
1870 } 1813 }
1871 1814
1872 1815
1873 void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) { 1816 void Heap::RegisterLiveArrayBuffer(void* data) {
1874 // ArrayBuffer might be in the middle of being constructed. 1817 not_yet_discovered_array_buffers_.erase(data);
1875 if (data == undefined_value()) return;
1876 RegisterLiveArrayBufferHelper(in_new_space
1877 ? not_yet_discovered_new_array_buffers_
1878 : not_yet_discovered_array_buffers_,
1879 data);
1880 } 1818 }
1881 1819
1882 1820
1883 void Heap::FreeDeadArrayBuffers(bool in_new_space) { 1821 void Heap::FreeDeadArrayBuffers() {
1884 size_t freed_memory = FreeDeadArrayBuffersHelper( 1822 for (auto buffer = not_yet_discovered_array_buffers_.begin();
1885 isolate_, in_new_space ? live_new_array_buffers_ : live_array_buffers_, 1823 buffer != not_yet_discovered_array_buffers_.end(); ++buffer) {
1886 in_new_space ? not_yet_discovered_new_array_buffers_ 1824 isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
1887 : not_yet_discovered_array_buffers_); 1825 // Don't use the API method here since this could trigger another GC.
1888 if (freed_memory) { 1826 amount_of_external_allocated_memory_ -= buffer->second;
1889 reinterpret_cast<v8::Isolate*>(isolate_) 1827 live_array_buffers_.erase(buffer->first);
1890 ->AdjustAmountOfExternalAllocatedMemory(
1891 -static_cast<int64_t>(freed_memory));
1892 } 1828 }
1829 not_yet_discovered_array_buffers_ = live_array_buffers_;
1893 } 1830 }
1894 1831
1895 1832
1896 void Heap::TearDownArrayBuffers() { 1833 void Heap::TearDownArrayBuffers() {
1897 TearDownArrayBuffersHelper(isolate_, live_array_buffers_, 1834 for (auto buffer = live_array_buffers_.begin();
1898 not_yet_discovered_array_buffers_); 1835 buffer != live_array_buffers_.end(); ++buffer) {
1899 TearDownArrayBuffersHelper(isolate_, live_new_array_buffers_, 1836 isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
1900 not_yet_discovered_new_array_buffers_); 1837 }
1838 live_array_buffers_.clear();
1839 not_yet_discovered_array_buffers_.clear();
1901 } 1840 }
1902 1841
1903 1842
1904 void Heap::PrepareArrayBufferDiscoveryInNewSpace() {
1905 not_yet_discovered_new_array_buffers_ = live_new_array_buffers_;
1906 }
1907
1908
1909 void Heap::PromoteArrayBuffer(Object* obj) {
1910 JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
1911 if (buffer->is_external()) return;
1912 void* data = buffer->backing_store();
1913 if (!data) return;
1914 // ArrayBuffer might be in the middle of being constructed.
1915 if (data == undefined_value()) return;
1916 DCHECK(live_new_array_buffers_.count(data) > 0);
1917 live_array_buffers_[data] = live_new_array_buffers_[data];
1918 live_new_array_buffers_.erase(data);
1919 not_yet_discovered_new_array_buffers_.erase(data);
1920 }
1921
1922
1923 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { 1843 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
1924 Object* allocation_site_obj = 1844 Object* allocation_site_obj =
1925 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer); 1845 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
1926 set_allocation_sites_list(allocation_site_obj); 1846 set_allocation_sites_list(allocation_site_obj);
1927 } 1847 }
1928 1848
1929 1849
1930 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { 1850 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) {
1931 DisallowHeapAllocation no_allocation_scope; 1851 DisallowHeapAllocation no_allocation_scope;
1932 Object* cur = allocation_sites_list(); 1852 Object* cur = allocation_sites_list();
(...skipping 229 matching lines...) Expand 10 before | Expand all | Expand 10 after
2162 public: 2082 public:
2163 static void Initialize() { 2083 static void Initialize() {
2164 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); 2084 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString);
2165 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); 2085 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
2166 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); 2086 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
2167 table_.Register(kVisitByteArray, &EvacuateByteArray); 2087 table_.Register(kVisitByteArray, &EvacuateByteArray);
2168 table_.Register(kVisitFixedArray, &EvacuateFixedArray); 2088 table_.Register(kVisitFixedArray, &EvacuateFixedArray);
2169 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); 2089 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
2170 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); 2090 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
2171 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); 2091 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
2172 table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer);
2173 2092
2174 table_.Register( 2093 table_.Register(
2175 kVisitNativeContext, 2094 kVisitNativeContext,
2176 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2095 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2177 Context::kSize>); 2096 Context::kSize>);
2178 2097
2179 table_.Register( 2098 table_.Register(
2180 kVisitConsString, 2099 kVisitConsString,
2181 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2100 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2182 ConsString::kSize>); 2101 ConsString::kSize>);
2183 2102
2184 table_.Register( 2103 table_.Register(
2185 kVisitSlicedString, 2104 kVisitSlicedString,
2186 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2105 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2187 SlicedString::kSize>); 2106 SlicedString::kSize>);
2188 2107
2189 table_.Register( 2108 table_.Register(
2190 kVisitSymbol, 2109 kVisitSymbol,
2191 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2110 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2192 Symbol::kSize>); 2111 Symbol::kSize>);
2193 2112
2194 table_.Register( 2113 table_.Register(
2195 kVisitSharedFunctionInfo, 2114 kVisitSharedFunctionInfo,
2196 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2115 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2197 SharedFunctionInfo::kSize>); 2116 SharedFunctionInfo::kSize>);
2198 2117
2199 table_.Register(kVisitJSWeakCollection, 2118 table_.Register(kVisitJSWeakCollection,
2200 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2119 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2201 2120
2121 table_.Register(kVisitJSArrayBuffer,
2122 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2123
2202 table_.Register(kVisitJSTypedArray, 2124 table_.Register(kVisitJSTypedArray,
2203 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2125 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2204 2126
2205 table_.Register(kVisitJSDataView, 2127 table_.Register(kVisitJSDataView,
2206 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2128 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2207 2129
2208 table_.Register(kVisitJSRegExp, 2130 table_.Register(kVisitJSRegExp,
2209 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2131 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2210 2132
2211 if (marks_handling == IGNORE_MARKS) { 2133 if (marks_handling == IGNORE_MARKS) {
(...skipping 207 matching lines...) Expand 10 before | Expand all | Expand 10 after
2419 } 2341 }
2420 2342
2421 2343
2422 static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot, 2344 static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot,
2423 HeapObject* object) { 2345 HeapObject* object) {
2424 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size(); 2346 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size();
2425 EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size); 2347 EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size);
2426 } 2348 }
2427 2349
2428 2350
2429 static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
2430 HeapObject* object) {
2431 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
2432
2433 Heap* heap = map->GetHeap();
2434 MapWord map_word = object->map_word();
2435 DCHECK(map_word.IsForwardingAddress());
2436 HeapObject* target = map_word.ToForwardingAddress();
2437 if (!heap->InNewSpace(target)) heap->PromoteArrayBuffer(target);
2438 }
2439
2440
2441 static inline void EvacuateByteArray(Map* map, HeapObject** slot, 2351 static inline void EvacuateByteArray(Map* map, HeapObject** slot,
2442 HeapObject* object) { 2352 HeapObject* object) {
2443 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); 2353 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
2444 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size); 2354 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
2445 } 2355 }
2446 2356
2447 2357
2448 static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot, 2358 static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot,
2449 HeapObject* object) { 2359 HeapObject* object) {
2450 int object_size = SeqOneByteString::cast(object) 2360 int object_size = SeqOneByteString::cast(object)
(...skipping 4255 matching lines...) Expand 10 before | Expand all | Expand 10 after
6706 *object_type = "CODE_TYPE"; \ 6616 *object_type = "CODE_TYPE"; \
6707 *object_sub_type = "CODE_AGE/" #name; \ 6617 *object_sub_type = "CODE_AGE/" #name; \
6708 return true; 6618 return true;
6709 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) 6619 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME)
6710 #undef COMPARE_AND_RETURN_NAME 6620 #undef COMPARE_AND_RETURN_NAME
6711 } 6621 }
6712 return false; 6622 return false;
6713 } 6623 }
6714 } // namespace internal 6624 } // namespace internal
6715 } // namespace v8 6625 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698