Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/heap/heap.cc

Issue 1178403003: Revert of Keep track of array buffers in new space separately (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
(...skipping 1605 matching lines...) Expand 10 before | Expand all | Expand 10 after
1616 LOG(isolate_, ResourceEvent("scavenge", "begin")); 1616 LOG(isolate_, ResourceEvent("scavenge", "begin"));
1617 1617
1618 // Clear descriptor cache. 1618 // Clear descriptor cache.
1619 isolate_->descriptor_lookup_cache()->Clear(); 1619 isolate_->descriptor_lookup_cache()->Clear();
1620 1620
1621 // Used for updating survived_since_last_expansion_ at function end. 1621 // Used for updating survived_since_last_expansion_ at function end.
1622 intptr_t survived_watermark = PromotedSpaceSizeOfObjects(); 1622 intptr_t survived_watermark = PromotedSpaceSizeOfObjects();
1623 1623
1624 SelectScavengingVisitorsTable(); 1624 SelectScavengingVisitorsTable();
1625 1625
1626 PrepareArrayBufferDiscoveryInNewSpace();
1627
1628 // Flip the semispaces. After flipping, to space is empty, from space has 1626 // Flip the semispaces. After flipping, to space is empty, from space has
1629 // live objects. 1627 // live objects.
1630 new_space_.Flip(); 1628 new_space_.Flip();
1631 new_space_.ResetAllocationInfo(); 1629 new_space_.ResetAllocationInfo();
1632 1630
1633 // We need to sweep newly copied objects which can be either in the 1631 // We need to sweep newly copied objects which can be either in the
1634 // to space or promoted to the old generation. For to-space 1632 // to space or promoted to the old generation. For to-space
1635 // objects, we treat the bottom of the to space as a queue. Newly 1633 // objects, we treat the bottom of the to space as a queue. Newly
1636 // copied and unswept objects lie between a 'front' mark and the 1634 // copied and unswept objects lie between a 'front' mark and the
1637 // allocation pointer. 1635 // allocation pointer.
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
1699 ProcessYoungWeakReferences(&weak_object_retainer); 1697 ProcessYoungWeakReferences(&weak_object_retainer);
1700 1698
1701 DCHECK(new_space_front == new_space_.top()); 1699 DCHECK(new_space_front == new_space_.top());
1702 1700
1703 // Set age mark. 1701 // Set age mark.
1704 new_space_.set_age_mark(new_space_.top()); 1702 new_space_.set_age_mark(new_space_.top());
1705 1703
1706 new_space_.LowerInlineAllocationLimit( 1704 new_space_.LowerInlineAllocationLimit(
1707 new_space_.inline_allocation_limit_step()); 1705 new_space_.inline_allocation_limit_step());
1708 1706
1709 FreeDeadArrayBuffers(true);
1710
1711 // Update how much has survived scavenge. 1707 // Update how much has survived scavenge.
1712 IncrementYoungSurvivorsCounter(static_cast<int>( 1708 IncrementYoungSurvivorsCounter(static_cast<int>(
1713 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); 1709 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
1714 1710
1715 LOG(isolate_, ResourceEvent("scavenge", "end")); 1711 LOG(isolate_, ResourceEvent("scavenge", "end"));
1716 1712
1717 gc_state_ = NOT_IN_GC; 1713 gc_state_ = NOT_IN_GC;
1718 } 1714 }
1719 1715
1720 1716
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
1794 } 1790 }
1795 1791
1796 1792
1797 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { 1793 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
1798 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer); 1794 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
1799 // Update the head of the list of contexts. 1795 // Update the head of the list of contexts.
1800 set_native_contexts_list(head); 1796 set_native_contexts_list(head);
1801 } 1797 }
1802 1798
1803 1799
1804 void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers, 1800 void Heap::RegisterNewArrayBuffer(void* data, size_t length) {
1805 void* data, size_t length) {
1806 live_buffers[data] = length;
1807 }
1808
1809
1810 void Heap::UnregisterArrayBufferHelper(
1811 std::map<void*, size_t>& live_buffers,
1812 std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
1813 DCHECK(live_buffers.count(data) > 0);
1814 live_buffers.erase(data);
1815 not_yet_discovered_buffers.erase(data);
1816 }
1817
1818
1819 void Heap::RegisterLiveArrayBufferHelper(
1820 std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
1821 not_yet_discovered_buffers.erase(data);
1822 }
1823
1824
1825 size_t Heap::FreeDeadArrayBuffersHelper(
1826 Isolate* isolate, std::map<void*, size_t>& live_buffers,
1827 std::map<void*, size_t>& not_yet_discovered_buffers) {
1828 size_t freed_memory = 0;
1829 for (auto buffer = not_yet_discovered_buffers.begin();
1830 buffer != not_yet_discovered_buffers.end(); ++buffer) {
1831 isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
1832 freed_memory += buffer->second;
1833 live_buffers.erase(buffer->first);
1834 }
1835 not_yet_discovered_buffers = live_buffers;
1836 return freed_memory;
1837 }
1838
1839
1840 void Heap::TearDownArrayBuffersHelper(
1841 Isolate* isolate, std::map<void*, size_t>& live_buffers,
1842 std::map<void*, size_t>& not_yet_discovered_buffers) {
1843 for (auto buffer = live_buffers.begin(); buffer != live_buffers.end();
1844 ++buffer) {
1845 isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
1846 }
1847 live_buffers.clear();
1848 not_yet_discovered_buffers.clear();
1849 }
1850
1851
1852 void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
1853 size_t length) {
1854 if (!data) return; 1801 if (!data) return;
1855 RegisterNewArrayBufferHelper( 1802 live_array_buffers_[data] = length;
1856 in_new_space ? live_new_array_buffers_ : live_array_buffers_, data,
1857 length);
1858 reinterpret_cast<v8::Isolate*>(isolate_) 1803 reinterpret_cast<v8::Isolate*>(isolate_)
1859 ->AdjustAmountOfExternalAllocatedMemory(length); 1804 ->AdjustAmountOfExternalAllocatedMemory(length);
1860 } 1805 }
1861 1806
1862 1807
1863 void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) { 1808 void Heap::UnregisterArrayBuffer(void* data) {
1864 if (!data) return; 1809 if (!data) return;
1865 UnregisterArrayBufferHelper( 1810 DCHECK(live_array_buffers_.count(data) > 0);
1866 in_new_space ? live_new_array_buffers_ : live_array_buffers_, 1811 live_array_buffers_.erase(data);
1867 in_new_space ? not_yet_discovered_new_array_buffers_ 1812 not_yet_discovered_array_buffers_.erase(data);
1868 : not_yet_discovered_array_buffers_,
1869 data);
1870 } 1813 }
1871 1814
1872 1815
1873 void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) { 1816 void Heap::RegisterLiveArrayBuffer(void* data) {
1874 RegisterLiveArrayBufferHelper(in_new_space 1817 not_yet_discovered_array_buffers_.erase(data);
1875 ? not_yet_discovered_new_array_buffers_
1876 : not_yet_discovered_array_buffers_,
1877 data);
1878 } 1818 }
1879 1819
1880 1820
1881 void Heap::FreeDeadArrayBuffers(bool in_new_space) { 1821 void Heap::FreeDeadArrayBuffers() {
1882 size_t freed_memory = FreeDeadArrayBuffersHelper( 1822 for (auto buffer = not_yet_discovered_array_buffers_.begin();
1883 isolate_, in_new_space ? live_new_array_buffers_ : live_array_buffers_, 1823 buffer != not_yet_discovered_array_buffers_.end(); ++buffer) {
1884 in_new_space ? not_yet_discovered_new_array_buffers_ 1824 isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
1885 : not_yet_discovered_array_buffers_); 1825 // Don't use the API method here since this could trigger another GC.
1886 if (freed_memory) { 1826 amount_of_external_allocated_memory_ -= buffer->second;
1887 reinterpret_cast<v8::Isolate*>(isolate_) 1827 live_array_buffers_.erase(buffer->first);
1888 ->AdjustAmountOfExternalAllocatedMemory(
1889 -static_cast<int64_t>(freed_memory));
1890 } 1828 }
1829 not_yet_discovered_array_buffers_ = live_array_buffers_;
1891 } 1830 }
1892 1831
1893 1832
1894 void Heap::TearDownArrayBuffers() { 1833 void Heap::TearDownArrayBuffers() {
1895 TearDownArrayBuffersHelper(isolate_, live_array_buffers_, 1834 for (auto buffer = live_array_buffers_.begin();
1896 not_yet_discovered_array_buffers_); 1835 buffer != live_array_buffers_.end(); ++buffer) {
1897 TearDownArrayBuffersHelper(isolate_, live_new_array_buffers_, 1836 isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
1898 not_yet_discovered_new_array_buffers_); 1837 }
1838 live_array_buffers_.clear();
1839 not_yet_discovered_array_buffers_.clear();
1899 } 1840 }
1900 1841
1901 1842
1902 void Heap::PrepareArrayBufferDiscoveryInNewSpace() {
1903 not_yet_discovered_new_array_buffers_ = live_new_array_buffers_;
1904 }
1905
1906
1907 void Heap::PromoteArrayBuffer(Object* obj) {
1908 JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
1909 if (buffer->is_external()) return;
1910 void* data = buffer->backing_store();
1911 if (!data) return;
1912 DCHECK(live_new_array_buffers_.count(data) > 0);
1913 live_array_buffers_[data] = live_new_array_buffers_[data];
1914 live_new_array_buffers_.erase(data);
1915 not_yet_discovered_new_array_buffers_.erase(data);
1916 }
1917
1918
1919 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { 1843 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
1920 Object* allocation_site_obj = 1844 Object* allocation_site_obj =
1921 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer); 1845 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
1922 set_allocation_sites_list(allocation_site_obj); 1846 set_allocation_sites_list(allocation_site_obj);
1923 } 1847 }
1924 1848
1925 1849
1926 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { 1850 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) {
1927 DisallowHeapAllocation no_allocation_scope; 1851 DisallowHeapAllocation no_allocation_scope;
1928 Object* cur = allocation_sites_list(); 1852 Object* cur = allocation_sites_list();
(...skipping 229 matching lines...) Expand 10 before | Expand all | Expand 10 after
2158 public: 2082 public:
2159 static void Initialize() { 2083 static void Initialize() {
2160 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); 2084 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString);
2161 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); 2085 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
2162 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); 2086 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
2163 table_.Register(kVisitByteArray, &EvacuateByteArray); 2087 table_.Register(kVisitByteArray, &EvacuateByteArray);
2164 table_.Register(kVisitFixedArray, &EvacuateFixedArray); 2088 table_.Register(kVisitFixedArray, &EvacuateFixedArray);
2165 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); 2089 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
2166 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); 2090 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
2167 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); 2091 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
2168 table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer);
2169 2092
2170 table_.Register( 2093 table_.Register(
2171 kVisitNativeContext, 2094 kVisitNativeContext,
2172 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2095 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2173 Context::kSize>); 2096 Context::kSize>);
2174 2097
2175 table_.Register( 2098 table_.Register(
2176 kVisitConsString, 2099 kVisitConsString,
2177 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2100 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2178 ConsString::kSize>); 2101 ConsString::kSize>);
2179 2102
2180 table_.Register( 2103 table_.Register(
2181 kVisitSlicedString, 2104 kVisitSlicedString,
2182 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2105 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2183 SlicedString::kSize>); 2106 SlicedString::kSize>);
2184 2107
2185 table_.Register( 2108 table_.Register(
2186 kVisitSymbol, 2109 kVisitSymbol,
2187 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2110 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2188 Symbol::kSize>); 2111 Symbol::kSize>);
2189 2112
2190 table_.Register( 2113 table_.Register(
2191 kVisitSharedFunctionInfo, 2114 kVisitSharedFunctionInfo,
2192 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2115 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2193 SharedFunctionInfo::kSize>); 2116 SharedFunctionInfo::kSize>);
2194 2117
2195 table_.Register(kVisitJSWeakCollection, 2118 table_.Register(kVisitJSWeakCollection,
2196 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2119 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2197 2120
2121 table_.Register(kVisitJSArrayBuffer,
2122 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2123
2198 table_.Register(kVisitJSTypedArray, 2124 table_.Register(kVisitJSTypedArray,
2199 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2125 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2200 2126
2201 table_.Register(kVisitJSDataView, 2127 table_.Register(kVisitJSDataView,
2202 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2128 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2203 2129
2204 table_.Register(kVisitJSRegExp, 2130 table_.Register(kVisitJSRegExp,
2205 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2131 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2206 2132
2207 if (marks_handling == IGNORE_MARKS) { 2133 if (marks_handling == IGNORE_MARKS) {
(...skipping 207 matching lines...) Expand 10 before | Expand all | Expand 10 after
2415 } 2341 }
2416 2342
2417 2343
2418 static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot, 2344 static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot,
2419 HeapObject* object) { 2345 HeapObject* object) {
2420 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size(); 2346 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size();
2421 EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size); 2347 EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size);
2422 } 2348 }
2423 2349
2424 2350
2425 static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
2426 HeapObject* object) {
2427 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
2428
2429 Heap* heap = map->GetHeap();
2430 MapWord map_word = object->map_word();
2431 DCHECK(map_word.IsForwardingAddress());
2432 HeapObject* target = map_word.ToForwardingAddress();
2433 if (!heap->InNewSpace(target)) heap->PromoteArrayBuffer(target);
2434 }
2435
2436
2437 static inline void EvacuateByteArray(Map* map, HeapObject** slot, 2351 static inline void EvacuateByteArray(Map* map, HeapObject** slot,
2438 HeapObject* object) { 2352 HeapObject* object) {
2439 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); 2353 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
2440 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size); 2354 EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
2441 } 2355 }
2442 2356
2443 2357
2444 static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot, 2358 static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot,
2445 HeapObject* object) { 2359 HeapObject* object) {
2446 int object_size = SeqOneByteString::cast(object) 2360 int object_size = SeqOneByteString::cast(object)
(...skipping 4255 matching lines...) Expand 10 before | Expand all | Expand 10 after
6702 *object_type = "CODE_TYPE"; \ 6616 *object_type = "CODE_TYPE"; \
6703 *object_sub_type = "CODE_AGE/" #name; \ 6617 *object_sub_type = "CODE_AGE/" #name; \
6704 return true; 6618 return true;
6705 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) 6619 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME)
6706 #undef COMPARE_AND_RETURN_NAME 6620 #undef COMPARE_AND_RETURN_NAME
6707 } 6621 }
6708 return false; 6622 return false;
6709 } 6623 }
6710 } // namespace internal 6624 } // namespace internal
6711 } // namespace v8 6625 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698