Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(208)

Side by Side Diff: src/heap/heap.cc

Issue 1188313006: Version 4.4.63.8 (cherry-pick) (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@4.4
Patch Set: Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
(...skipping 1543 matching lines...) Expand 10 before | Expand all | Expand 10 after
1554 // Clear descriptor cache. 1554 // Clear descriptor cache.
1555 isolate_->descriptor_lookup_cache()->Clear(); 1555 isolate_->descriptor_lookup_cache()->Clear();
1556 1556
1557 // Used for updating survived_since_last_expansion_ at function end. 1557 // Used for updating survived_since_last_expansion_ at function end.
1558 intptr_t survived_watermark = PromotedSpaceSizeOfObjects(); 1558 intptr_t survived_watermark = PromotedSpaceSizeOfObjects();
1559 1559
1560 SelectScavengingVisitorsTable(); 1560 SelectScavengingVisitorsTable();
1561 1561
1562 incremental_marking()->PrepareForScavenge(); 1562 incremental_marking()->PrepareForScavenge();
1563 1563
1564 PrepareArrayBufferDiscoveryInNewSpace();
1565
1564 // Flip the semispaces. After flipping, to space is empty, from space has 1566 // Flip the semispaces. After flipping, to space is empty, from space has
1565 // live objects. 1567 // live objects.
1566 new_space_.Flip(); 1568 new_space_.Flip();
1567 new_space_.ResetAllocationInfo(); 1569 new_space_.ResetAllocationInfo();
1568 1570
1569 // We need to sweep newly copied objects which can be either in the 1571 // We need to sweep newly copied objects which can be either in the
1570 // to space or promoted to the old generation. For to-space 1572 // to space or promoted to the old generation. For to-space
1571 // objects, we treat the bottom of the to space as a queue. Newly 1573 // objects, we treat the bottom of the to space as a queue. Newly
1572 // copied and unswept objects lie between a 'front' mark and the 1574 // copied and unswept objects lie between a 'front' mark and the
1573 // allocation pointer. 1575 // allocation pointer.
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
1635 ProcessYoungWeakReferences(&weak_object_retainer); 1637 ProcessYoungWeakReferences(&weak_object_retainer);
1636 1638
1637 DCHECK(new_space_front == new_space_.top()); 1639 DCHECK(new_space_front == new_space_.top());
1638 1640
1639 // Set age mark. 1641 // Set age mark.
1640 new_space_.set_age_mark(new_space_.top()); 1642 new_space_.set_age_mark(new_space_.top());
1641 1643
1642 new_space_.LowerInlineAllocationLimit( 1644 new_space_.LowerInlineAllocationLimit(
1643 new_space_.inline_allocation_limit_step()); 1645 new_space_.inline_allocation_limit_step());
1644 1646
1647 FreeDeadArrayBuffers(true);
1648
1645 // Update how much has survived scavenge. 1649 // Update how much has survived scavenge.
1646 IncrementYoungSurvivorsCounter(static_cast<int>( 1650 IncrementYoungSurvivorsCounter(static_cast<int>(
1647 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); 1651 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
1648 1652
1649 LOG(isolate_, ResourceEvent("scavenge", "end")); 1653 LOG(isolate_, ResourceEvent("scavenge", "end"));
1650 1654
1651 gc_state_ = NOT_IN_GC; 1655 gc_state_ = NOT_IN_GC;
1652 } 1656 }
1653 1657
1654 1658
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
1728 } 1732 }
1729 1733
1730 1734
1731 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { 1735 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
1732 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer); 1736 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
1733 // Update the head of the list of contexts. 1737 // Update the head of the list of contexts.
1734 set_native_contexts_list(head); 1738 set_native_contexts_list(head);
1735 } 1739 }
1736 1740
1737 1741
1738 void Heap::RegisterNewArrayBuffer(void* data, size_t length) { 1742 void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
1743 void* data, size_t length) {
1744 live_buffers[data] = length;
1745 }
1746
1747
1748 void Heap::UnregisterArrayBufferHelper(
1749 std::map<void*, size_t>& live_buffers,
1750 std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
1751 DCHECK(live_buffers.count(data) > 0);
1752 live_buffers.erase(data);
1753 not_yet_discovered_buffers.erase(data);
1754 }
1755
1756
1757 void Heap::RegisterLiveArrayBufferHelper(
1758 std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
1759 not_yet_discovered_buffers.erase(data);
1760 }
1761
1762
1763 size_t Heap::FreeDeadArrayBuffersHelper(
1764 Isolate* isolate, std::map<void*, size_t>& live_buffers,
1765 std::map<void*, size_t>& not_yet_discovered_buffers) {
1766 size_t freed_memory = 0;
1767 for (auto buffer = not_yet_discovered_buffers.begin();
1768 buffer != not_yet_discovered_buffers.end(); ++buffer) {
1769 isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
1770 freed_memory += buffer->second;
1771 live_buffers.erase(buffer->first);
1772 }
1773 not_yet_discovered_buffers = live_buffers;
1774 return freed_memory;
1775 }
1776
1777
1778 void Heap::TearDownArrayBuffersHelper(
1779 Isolate* isolate, std::map<void*, size_t>& live_buffers,
1780 std::map<void*, size_t>& not_yet_discovered_buffers) {
1781 for (auto buffer = live_buffers.begin(); buffer != live_buffers.end();
1782 ++buffer) {
1783 isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
1784 }
1785 live_buffers.clear();
1786 not_yet_discovered_buffers.clear();
1787 }
1788
1789
1790 void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
1791 size_t length) {
1739 if (!data) return; 1792 if (!data) return;
1740 live_array_buffers_[data] = length; 1793 RegisterNewArrayBufferHelper(
1794 in_new_space ? live_new_array_buffers_ : live_array_buffers_, data,
1795 length);
1741 reinterpret_cast<v8::Isolate*>(isolate_) 1796 reinterpret_cast<v8::Isolate*>(isolate_)
1742 ->AdjustAmountOfExternalAllocatedMemory(length); 1797 ->AdjustAmountOfExternalAllocatedMemory(length);
1743 } 1798 }
1744 1799
1745 1800
1746 void Heap::UnregisterArrayBuffer(void* data) { 1801 void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) {
1747 if (!data) return; 1802 if (!data) return;
1748 DCHECK(live_array_buffers_.count(data) > 0); 1803 UnregisterArrayBufferHelper(
1749 live_array_buffers_.erase(data); 1804 in_new_space ? live_new_array_buffers_ : live_array_buffers_,
1750 not_yet_discovered_array_buffers_.erase(data); 1805 in_new_space ? not_yet_discovered_new_array_buffers_
1806 : not_yet_discovered_array_buffers_,
1807 data);
1751 } 1808 }
1752 1809
1753 1810
1754 void Heap::RegisterLiveArrayBuffer(void* data) { 1811 void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) {
1755 not_yet_discovered_array_buffers_.erase(data); 1812 // ArrayBuffer might be in the middle of being constructed.
1813 if (data == undefined_value()) return;
1814 RegisterLiveArrayBufferHelper(in_new_space
1815 ? not_yet_discovered_new_array_buffers_
1816 : not_yet_discovered_array_buffers_,
1817 data);
1756 } 1818 }
1757 1819
1758 1820
1759 void Heap::FreeDeadArrayBuffers() { 1821 void Heap::FreeDeadArrayBuffers(bool in_new_space) {
1760 for (auto buffer = not_yet_discovered_array_buffers_.begin(); 1822 size_t freed_memory = FreeDeadArrayBuffersHelper(
1761 buffer != not_yet_discovered_array_buffers_.end(); ++buffer) { 1823 isolate_, in_new_space ? live_new_array_buffers_ : live_array_buffers_,
1762 isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); 1824 in_new_space ? not_yet_discovered_new_array_buffers_
1763 // Don't use the API method here since this could trigger another GC. 1825 : not_yet_discovered_array_buffers_);
1764 amount_of_external_allocated_memory_ -= buffer->second; 1826 if (freed_memory) {
1765 live_array_buffers_.erase(buffer->first); 1827 reinterpret_cast<v8::Isolate*>(isolate_)
1828 ->AdjustAmountOfExternalAllocatedMemory(
1829 -static_cast<int64_t>(freed_memory));
1766 } 1830 }
1767 not_yet_discovered_array_buffers_ = live_array_buffers_;
1768 } 1831 }
1769 1832
1770 1833
1771 void Heap::TearDownArrayBuffers() { 1834 void Heap::TearDownArrayBuffers() {
1772 for (auto buffer = live_array_buffers_.begin(); 1835 TearDownArrayBuffersHelper(isolate_, live_array_buffers_,
1773 buffer != live_array_buffers_.end(); ++buffer) { 1836 not_yet_discovered_array_buffers_);
1774 isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); 1837 TearDownArrayBuffersHelper(isolate_, live_new_array_buffers_,
1775 } 1838 not_yet_discovered_new_array_buffers_);
1776 live_array_buffers_.clear();
1777 not_yet_discovered_array_buffers_.clear();
1778 } 1839 }
1779 1840
1780 1841
1842 void Heap::PrepareArrayBufferDiscoveryInNewSpace() {
1843 not_yet_discovered_new_array_buffers_ = live_new_array_buffers_;
1844 }
1845
1846
1847 void Heap::PromoteArrayBuffer(Object* obj) {
1848 JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
1849 if (buffer->is_external()) return;
1850 void* data = buffer->backing_store();
1851 if (!data) return;
1852 // ArrayBuffer might be in the middle of being constructed.
1853 if (data == undefined_value()) return;
1854 DCHECK(live_new_array_buffers_.count(data) > 0);
1855 live_array_buffers_[data] = live_new_array_buffers_[data];
1856 live_new_array_buffers_.erase(data);
1857 not_yet_discovered_new_array_buffers_.erase(data);
1858 }
1859
1860
1781 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { 1861 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
1782 Object* allocation_site_obj = 1862 Object* allocation_site_obj =
1783 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer); 1863 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
1784 set_allocation_sites_list(allocation_site_obj); 1864 set_allocation_sites_list(allocation_site_obj);
1785 } 1865 }
1786 1866
1787 1867
1788 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { 1868 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) {
1789 DisallowHeapAllocation no_allocation_scope; 1869 DisallowHeapAllocation no_allocation_scope;
1790 Object* cur = allocation_sites_list(); 1870 Object* cur = allocation_sites_list();
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after
1979 public: 2059 public:
1980 static void Initialize() { 2060 static void Initialize() {
1981 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString); 2061 table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString);
1982 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); 2062 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
1983 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); 2063 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
1984 table_.Register(kVisitByteArray, &EvacuateByteArray); 2064 table_.Register(kVisitByteArray, &EvacuateByteArray);
1985 table_.Register(kVisitFixedArray, &EvacuateFixedArray); 2065 table_.Register(kVisitFixedArray, &EvacuateFixedArray);
1986 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); 2066 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
1987 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); 2067 table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
1988 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); 2068 table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
2069 table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer);
1989 2070
1990 table_.Register( 2071 table_.Register(
1991 kVisitNativeContext, 2072 kVisitNativeContext,
1992 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2073 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
1993 Context::kSize>); 2074 Context::kSize>);
1994 2075
1995 table_.Register( 2076 table_.Register(
1996 kVisitConsString, 2077 kVisitConsString,
1997 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2078 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
1998 ConsString::kSize>); 2079 ConsString::kSize>);
1999 2080
2000 table_.Register( 2081 table_.Register(
2001 kVisitSlicedString, 2082 kVisitSlicedString,
2002 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2083 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2003 SlicedString::kSize>); 2084 SlicedString::kSize>);
2004 2085
2005 table_.Register( 2086 table_.Register(
2006 kVisitSymbol, 2087 kVisitSymbol,
2007 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2088 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2008 Symbol::kSize>); 2089 Symbol::kSize>);
2009 2090
2010 table_.Register( 2091 table_.Register(
2011 kVisitSharedFunctionInfo, 2092 kVisitSharedFunctionInfo,
2012 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2093 &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2013 SharedFunctionInfo::kSize>); 2094 SharedFunctionInfo::kSize>);
2014 2095
2015 table_.Register(kVisitJSWeakCollection, 2096 table_.Register(kVisitJSWeakCollection,
2016 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2097 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2017 2098
2018 table_.Register(kVisitJSArrayBuffer,
2019 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2020
2021 table_.Register(kVisitJSTypedArray, 2099 table_.Register(kVisitJSTypedArray,
2022 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2100 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2023 2101
2024 table_.Register(kVisitJSDataView, 2102 table_.Register(kVisitJSDataView,
2025 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2103 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2026 2104
2027 table_.Register(kVisitJSRegExp, 2105 table_.Register(kVisitJSRegExp,
2028 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); 2106 &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
2029 2107
2030 if (marks_handling == IGNORE_MARKS) { 2108 if (marks_handling == IGNORE_MARKS) {
(...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after
2257 2335
2258 2336
2259 static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot, 2337 static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot,
2260 HeapObject* object) { 2338 HeapObject* object) {
2261 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size(); 2339 int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size();
2262 EvacuateObject<DATA_OBJECT, kDoubleAlignment>(map, slot, object, 2340 EvacuateObject<DATA_OBJECT, kDoubleAlignment>(map, slot, object,
2263 object_size); 2341 object_size);
2264 } 2342 }
2265 2343
2266 2344
2345 static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
2346 HeapObject* object) {
2347 ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
2348
2349 Heap* heap = map->GetHeap();
2350 MapWord map_word = object->map_word();
2351 DCHECK(map_word.IsForwardingAddress());
2352 HeapObject* target = map_word.ToForwardingAddress();
2353 if (!heap->InNewSpace(target)) heap->PromoteArrayBuffer(target);
2354 }
2355
2356
2267 static inline void EvacuateByteArray(Map* map, HeapObject** slot, 2357 static inline void EvacuateByteArray(Map* map, HeapObject** slot,
2268 HeapObject* object) { 2358 HeapObject* object) {
2269 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); 2359 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
2270 EvacuateObject<DATA_OBJECT, kObjectAlignment>(map, slot, object, 2360 EvacuateObject<DATA_OBJECT, kObjectAlignment>(map, slot, object,
2271 object_size); 2361 object_size);
2272 } 2362 }
2273 2363
2274 2364
2275 static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot, 2365 static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot,
2276 HeapObject* object) { 2366 HeapObject* object) {
(...skipping 4208 matching lines...) Expand 10 before | Expand all | Expand 10 after
6485 } 6575 }
6486 delete list; 6576 delete list;
6487 } else { 6577 } else {
6488 prev = list; 6578 prev = list;
6489 } 6579 }
6490 list = next; 6580 list = next;
6491 } 6581 }
6492 } 6582 }
6493 } 6583 }
6494 } // namespace v8::internal 6584 } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698