OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
(...skipping 1718 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1729 } | 1729 } |
1730 | 1730 |
1731 | 1731 |
1732 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { | 1732 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { |
1733 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer); | 1733 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer); |
1734 // Update the head of the list of contexts. | 1734 // Update the head of the list of contexts. |
1735 set_native_contexts_list(head); | 1735 set_native_contexts_list(head); |
1736 } | 1736 } |
1737 | 1737 |
1738 | 1738 |
1739 void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers, | |
1740 void* data, size_t length) { | |
1741 live_buffers[data] = length; | |
1742 } | |
1743 | |
1744 | |
1745 void Heap::UnregisterArrayBufferHelper( | |
1746 std::map<void*, size_t>& live_buffers, | |
1747 std::map<void*, size_t>& not_yet_discovered_buffers, void* data) { | |
1748 DCHECK(live_buffers.count(data) > 0); | |
1749 live_buffers.erase(data); | |
1750 not_yet_discovered_buffers.erase(data); | |
1751 } | |
1752 | |
1753 | |
1754 void Heap::RegisterLiveArrayBufferHelper( | |
1755 std::map<void*, size_t>& not_yet_discovered_buffers, void* data) { | |
1756 not_yet_discovered_buffers.erase(data); | |
1757 } | |
1758 | |
1759 | |
1760 size_t Heap::FreeDeadArrayBuffersHelper( | |
1761 Isolate* isolate, std::map<void*, size_t>& live_buffers, | |
1762 std::map<void*, size_t>& not_yet_discovered_buffers) { | |
1763 size_t freed_memory = 0; | |
1764 for (auto buffer = not_yet_discovered_buffers.begin(); | |
1765 buffer != not_yet_discovered_buffers.end(); ++buffer) { | |
1766 isolate->array_buffer_allocator()->Free(buffer->first, buffer->second); | |
1767 freed_memory += buffer->second; | |
1768 live_buffers.erase(buffer->first); | |
1769 } | |
1770 not_yet_discovered_buffers = live_buffers; | |
1771 return freed_memory; | |
1772 } | |
1773 | |
1774 | |
1775 void Heap::TearDownArrayBuffersHelper( | |
1776 Isolate* isolate, std::map<void*, size_t>& live_buffers, | |
1777 std::map<void*, size_t>& not_yet_discovered_buffers) { | |
1778 for (auto buffer = live_buffers.begin(); buffer != live_buffers.end(); | |
1779 ++buffer) { | |
1780 isolate->array_buffer_allocator()->Free(buffer->first, buffer->second); | |
1781 } | |
1782 live_buffers.clear(); | |
1783 not_yet_discovered_buffers.clear(); | |
1784 } | |
1785 | |
1786 | |
1787 void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data, | 1739 void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data, |
1788 size_t length) { | 1740 size_t length) { |
1789 if (!data) return; | 1741 if (!data) return; |
1790 RegisterNewArrayBufferHelper(live_array_buffers_, data, length); | |
1791 if (in_new_space) { | 1742 if (in_new_space) { |
1792 RegisterNewArrayBufferHelper(live_array_buffers_for_scavenge_, data, | 1743 live_array_buffers_for_scavenge_[data] = length; |
1793 length); | 1744 } else { |
1745 live_array_buffers_[data] = length; | |
1794 } | 1746 } |
1795 reinterpret_cast<v8::Isolate*>(isolate_) | 1747 reinterpret_cast<v8::Isolate*>(isolate_) |
1796 ->AdjustAmountOfExternalAllocatedMemory(length); | 1748 ->AdjustAmountOfExternalAllocatedMemory(length); |
1797 } | 1749 } |
1798 | 1750 |
1799 | 1751 |
1800 void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) { | 1752 void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) { |
Michael Lippautz
2015/08/28 14:25:30
We can simplify the main part a lot:
std::map<voi
fedor.indutny
2015/08/28 20:49:41
Acknowledged.
| |
1801 if (!data) return; | 1753 if (!data) return; |
1802 UnregisterArrayBufferHelper(live_array_buffers_, | 1754 |
1803 not_yet_discovered_array_buffers_, data); | 1755 std::map<void*, size_t>* live_buffers; |
1756 std::map<void*, size_t>* not_yet_discovered_buffers; | |
1804 if (in_new_space) { | 1757 if (in_new_space) { |
1805 UnregisterArrayBufferHelper(live_array_buffers_for_scavenge_, | 1758 live_buffers = &live_array_buffers_for_scavenge_; |
1806 not_yet_discovered_array_buffers_for_scavenge_, | 1759 not_yet_discovered_buffers = |
1807 data); | 1760 ¬_yet_discovered_array_buffers_for_scavenge_; |
1761 not_yet_discovered_array_buffers_.erase(data); | |
Michael Lippautz
2015/08/28 14:25:30
This .erase() will not be part of the method as w
fedor.indutny
2015/08/28 20:49:41
Acknowledged.
| |
1762 } else { | |
1763 live_buffers = &live_array_buffers_; | |
1764 not_yet_discovered_buffers = ¬_yet_discovered_array_buffers_; | |
1808 } | 1765 } |
1766 | |
1767 DCHECK(live_buffers->count(data) > 0); | |
1768 live_buffers->erase(data); | |
1769 not_yet_discovered_buffers->erase(data); | |
1809 } | 1770 } |
1810 | 1771 |
1811 | 1772 |
1812 void Heap::RegisterLiveArrayBuffer(bool from_scavenge, void* data) { | 1773 void Heap::RegisterLiveArrayBuffer(bool from_scavenge, void* data) { |
1813 // ArrayBuffer might be in the middle of being constructed. | 1774 // ArrayBuffer might be in the middle of being constructed. |
1814 if (data == undefined_value()) return; | 1775 if (data == undefined_value()) return; |
1815 RegisterLiveArrayBufferHelper( | 1776 if (from_scavenge) { |
fedor.indutny
2015/08/28 20:49:41
This one will need to become unconditional.
| |
1816 from_scavenge ? not_yet_discovered_array_buffers_for_scavenge_ | 1777 not_yet_discovered_array_buffers_for_scavenge_.erase(data); |
1817 : not_yet_discovered_array_buffers_, | 1778 } else { |
1818 data); | 1779 not_yet_discovered_array_buffers_.erase(data); |
1780 } | |
1819 } | 1781 } |
1820 | 1782 |
1821 | 1783 |
1822 void Heap::FreeDeadArrayBuffers(bool from_scavenge) { | 1784 void Heap::FreeDeadArrayBuffers(bool from_scavenge) { |
Michael Lippautz
2015/08/28 14:25:30
Since we keep the sets disjoint, we only need to v
fedor.indutny
2015/08/28 20:49:41
Acknowledged.
| |
1785 std::map<void*, size_t> not_yet_discovered_buffers; | |
1823 if (from_scavenge) { | 1786 if (from_scavenge) { |
1824 for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) { | 1787 not_yet_discovered_buffers = not_yet_discovered_array_buffers_for_scavenge_; |
1788 } else { | |
1789 not_yet_discovered_buffers = not_yet_discovered_array_buffers_; | |
1790 } | |
1791 | |
1792 size_t freed_memory = 0; | |
1793 for (auto& buffer : not_yet_discovered_buffers) { | |
1794 isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second); | |
1795 freed_memory += buffer.second; | |
1796 | |
1797 // Scavenge can't happend during evacuation, so we only need to update | |
1798 // live_array_buffers_for_scavenge_. | |
1799 // not_yet_discovered_array_buffers_for_scanvenge_ will be reset before | |
1800 // the next scavenge run in PrepareArrayBufferDiscoveryInNewSpace. | |
1801 live_array_buffers_for_scavenge_.erase(buffer.first); | |
1802 | |
1803 if (from_scavenge) { | |
1825 not_yet_discovered_array_buffers_.erase(buffer.first); | 1804 not_yet_discovered_array_buffers_.erase(buffer.first); |
1805 } else { | |
1826 live_array_buffers_.erase(buffer.first); | 1806 live_array_buffers_.erase(buffer.first); |
1827 } | 1807 } |
1808 } | |
1809 | |
1810 if (from_scavenge) { | |
1811 not_yet_discovered_array_buffers_for_scavenge_ = | |
1812 live_array_buffers_for_scavenge_; | |
1828 } else { | 1813 } else { |
1829 for (auto& buffer : not_yet_discovered_array_buffers_) { | 1814 not_yet_discovered_array_buffers_ = live_array_buffers_; |
1830 // Scavenge can't happend during evacuation, so we only need to update | 1815 not_yet_discovered_array_buffers_.insert( |
1831 // live_array_buffers_for_scavenge_. | 1816 live_array_buffers_for_scavenge_.begin(), |
1832 // not_yet_discovered_array_buffers_for_scanvenge_ will be reset before | 1817 live_array_buffers_for_scavenge_.end()); |
1833 // the next scavenge run in PrepareArrayBufferDiscoveryInNewSpace. | |
1834 live_array_buffers_for_scavenge_.erase(buffer.first); | |
1835 } | |
1836 } | 1818 } |
1837 size_t freed_memory = FreeDeadArrayBuffersHelper( | 1819 |
1838 isolate_, | |
1839 from_scavenge ? live_array_buffers_for_scavenge_ : live_array_buffers_, | |
1840 from_scavenge ? not_yet_discovered_array_buffers_for_scavenge_ | |
1841 : not_yet_discovered_array_buffers_); | |
1842 if (freed_memory) { | 1820 if (freed_memory) { |
1843 reinterpret_cast<v8::Isolate*>(isolate_) | 1821 reinterpret_cast<v8::Isolate*>(isolate_) |
1844 ->AdjustAmountOfExternalAllocatedMemory( | 1822 ->AdjustAmountOfExternalAllocatedMemory( |
1845 -static_cast<int64_t>(freed_memory)); | 1823 -static_cast<int64_t>(freed_memory)); |
1846 } | 1824 } |
1847 } | 1825 } |
1848 | 1826 |
1849 | 1827 |
1850 void Heap::TearDownArrayBuffers() { | 1828 void Heap::TearDownArrayBuffers() { |
Michael Lippautz
2015/08/28 14:25:30
nit: While practically not relevant, we should sti
fedor.indutny
2015/08/28 20:49:41
Acknowledged.
| |
1851 TearDownArrayBuffersHelper(isolate_, live_array_buffers_, | 1829 for (auto& buffer : live_array_buffers_) { |
1852 not_yet_discovered_array_buffers_); | 1830 isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second); |
1831 } | |
1832 for (auto& buffer : live_array_buffers_for_scavenge_) { | |
1833 isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second); | |
1834 } | |
1835 live_array_buffers_.clear(); | |
1836 live_array_buffers_for_scavenge_.clear(); | |
1837 not_yet_discovered_array_buffers_.clear(); | |
1838 not_yet_discovered_array_buffers_for_scavenge_.clear(); | |
1853 } | 1839 } |
1854 | 1840 |
1855 | 1841 |
1856 void Heap::PrepareArrayBufferDiscoveryInNewSpace() { | 1842 void Heap::PrepareArrayBufferDiscoveryInNewSpace() { |
1857 not_yet_discovered_array_buffers_for_scavenge_ = | 1843 not_yet_discovered_array_buffers_for_scavenge_ = |
1858 live_array_buffers_for_scavenge_; | 1844 live_array_buffers_for_scavenge_; |
1859 } | 1845 } |
1860 | 1846 |
1861 | 1847 |
1862 void Heap::PromoteArrayBuffer(Object* obj) { | 1848 void Heap::PromoteArrayBuffer(Object* obj) { |
1863 JSArrayBuffer* buffer = JSArrayBuffer::cast(obj); | 1849 JSArrayBuffer* buffer = JSArrayBuffer::cast(obj); |
1864 if (buffer->is_external()) return; | 1850 if (buffer->is_external()) return; |
1865 void* data = buffer->backing_store(); | 1851 void* data = buffer->backing_store(); |
1866 if (!data) return; | 1852 if (!data) return; |
1867 // ArrayBuffer might be in the middle of being constructed. | 1853 // ArrayBuffer might be in the middle of being constructed. |
1868 if (data == undefined_value()) return; | 1854 if (data == undefined_value()) return; |
1869 DCHECK(live_array_buffers_for_scavenge_.count(data) > 0); | 1855 DCHECK(live_array_buffers_for_scavenge_.count(data) > 0); |
1870 DCHECK(live_array_buffers_.count(data) > 0); | 1856 live_array_buffers_[data] = live_array_buffers_for_scavenge_[data]; |
1871 live_array_buffers_for_scavenge_.erase(data); | 1857 live_array_buffers_for_scavenge_.erase(data); |
1872 not_yet_discovered_array_buffers_for_scavenge_.erase(data); | 1858 not_yet_discovered_array_buffers_for_scavenge_.erase(data); |
1873 } | 1859 } |
1874 | 1860 |
1875 | 1861 |
1876 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { | 1862 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { |
1877 Object* allocation_site_obj = | 1863 Object* allocation_site_obj = |
1878 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer); | 1864 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer); |
1879 set_allocation_sites_list(allocation_site_obj); | 1865 set_allocation_sites_list(allocation_site_obj); |
1880 } | 1866 } |
(...skipping 4894 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6775 *object_type = "CODE_TYPE"; \ | 6761 *object_type = "CODE_TYPE"; \ |
6776 *object_sub_type = "CODE_AGE/" #name; \ | 6762 *object_sub_type = "CODE_AGE/" #name; \ |
6777 return true; | 6763 return true; |
6778 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) | 6764 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) |
6779 #undef COMPARE_AND_RETURN_NAME | 6765 #undef COMPARE_AND_RETURN_NAME |
6780 } | 6766 } |
6781 return false; | 6767 return false; |
6782 } | 6768 } |
6783 } // namespace internal | 6769 } // namespace internal |
6784 } // namespace v8 | 6770 } // namespace v8 |
OLD | NEW |