| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1692 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1703 if (external_string_table_.old_space_strings_.length() > 0) { | 1703 if (external_string_table_.old_space_strings_.length() > 0) { |
| 1704 Object** start = &external_string_table_.old_space_strings_[0]; | 1704 Object** start = &external_string_table_.old_space_strings_[0]; |
| 1705 Object** end = start + external_string_table_.old_space_strings_.length(); | 1705 Object** end = start + external_string_table_.old_space_strings_.length(); |
| 1706 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); | 1706 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); |
| 1707 } | 1707 } |
| 1708 | 1708 |
| 1709 UpdateNewSpaceReferencesInExternalStringTable(updater_func); | 1709 UpdateNewSpaceReferencesInExternalStringTable(updater_func); |
| 1710 } | 1710 } |
| 1711 | 1711 |
| 1712 | 1712 |
| 1713 template <class T> | |
| 1714 struct WeakListVisitor; | |
| 1715 | |
| 1716 | |
| 1717 template <class T> | |
| 1718 static Object* VisitWeakList(Heap* heap, | |
| 1719 Object* list, | |
| 1720 WeakObjectRetainer* retainer, | |
| 1721 bool record_slots) { | |
| 1722 Object* undefined = heap->undefined_value(); | |
| 1723 Object* head = undefined; | |
| 1724 T* tail = NULL; | |
| 1725 MarkCompactCollector* collector = heap->mark_compact_collector(); | |
| 1726 while (list != undefined) { | |
| 1727 // Check whether to keep the candidate in the list. | |
| 1728 T* candidate = reinterpret_cast<T*>(list); | |
| 1729 Object* retained = retainer->RetainAs(list); | |
| 1730 if (retained != NULL) { | |
| 1731 if (head == undefined) { | |
| 1732 // First element in the list. | |
| 1733 head = retained; | |
| 1734 } else { | |
| 1735 // Subsequent elements in the list. | |
| 1736 ASSERT(tail != NULL); | |
| 1737 WeakListVisitor<T>::SetWeakNext(tail, retained); | |
| 1738 if (record_slots) { | |
| 1739 Object** next_slot = | |
| 1740 HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset()); | |
| 1741 collector->RecordSlot(next_slot, next_slot, retained); | |
| 1742 } | |
| 1743 } | |
| 1744 // Retained object is new tail. | |
| 1745 ASSERT(!retained->IsUndefined()); | |
| 1746 candidate = reinterpret_cast<T*>(retained); | |
| 1747 tail = candidate; | |
| 1748 | |
| 1749 | |
| 1750 // tail is a live object, visit it. | |
| 1751 WeakListVisitor<T>::VisitLiveObject( | |
| 1752 heap, tail, retainer, record_slots); | |
| 1753 } else { | |
| 1754 WeakListVisitor<T>::VisitPhantomObject(heap, candidate); | |
| 1755 } | |
| 1756 | |
| 1757 // Move to next element in the list. | |
| 1758 list = WeakListVisitor<T>::WeakNext(candidate); | |
| 1759 } | |
| 1760 | |
| 1761 // Terminate the list if there is one or more elements. | |
| 1762 if (tail != NULL) { | |
| 1763 WeakListVisitor<T>::SetWeakNext(tail, undefined); | |
| 1764 } | |
| 1765 return head; | |
| 1766 } | |
| 1767 | |
| 1768 | |
| 1769 template <class T> | |
| 1770 static void ClearWeakList(Heap* heap, | |
| 1771 Object* list) { | |
| 1772 Object* undefined = heap->undefined_value(); | |
| 1773 while (list != undefined) { | |
| 1774 T* candidate = reinterpret_cast<T*>(list); | |
| 1775 list = WeakListVisitor<T>::WeakNext(candidate); | |
| 1776 WeakListVisitor<T>::SetWeakNext(candidate, undefined); | |
| 1777 } | |
| 1778 } | |
| 1779 | |
| 1780 | |
| 1781 template<> | |
| 1782 struct WeakListVisitor<JSFunction> { | |
| 1783 static void SetWeakNext(JSFunction* function, Object* next) { | |
| 1784 function->set_next_function_link(next); | |
| 1785 } | |
| 1786 | |
| 1787 static Object* WeakNext(JSFunction* function) { | |
| 1788 return function->next_function_link(); | |
| 1789 } | |
| 1790 | |
| 1791 static int WeakNextOffset() { | |
| 1792 return JSFunction::kNextFunctionLinkOffset; | |
| 1793 } | |
| 1794 | |
| 1795 static void VisitLiveObject(Heap*, JSFunction*, | |
| 1796 WeakObjectRetainer*, bool) { | |
| 1797 } | |
| 1798 | |
| 1799 static void VisitPhantomObject(Heap*, JSFunction*) { | |
| 1800 } | |
| 1801 }; | |
| 1802 | |
| 1803 | |
| 1804 template<> | |
| 1805 struct WeakListVisitor<Code> { | |
| 1806 static void SetWeakNext(Code* code, Object* next) { | |
| 1807 code->set_next_code_link(next); | |
| 1808 } | |
| 1809 | |
| 1810 static Object* WeakNext(Code* code) { | |
| 1811 return code->next_code_link(); | |
| 1812 } | |
| 1813 | |
| 1814 static int WeakNextOffset() { | |
| 1815 return Code::kNextCodeLinkOffset; | |
| 1816 } | |
| 1817 | |
| 1818 static void VisitLiveObject(Heap*, Code*, | |
| 1819 WeakObjectRetainer*, bool) { | |
| 1820 } | |
| 1821 | |
| 1822 static void VisitPhantomObject(Heap*, Code*) { | |
| 1823 } | |
| 1824 }; | |
| 1825 | |
| 1826 | |
| 1827 template<> | |
| 1828 struct WeakListVisitor<Context> { | |
| 1829 static void SetWeakNext(Context* context, Object* next) { | |
| 1830 context->set(Context::NEXT_CONTEXT_LINK, | |
| 1831 next, | |
| 1832 UPDATE_WRITE_BARRIER); | |
| 1833 } | |
| 1834 | |
| 1835 static Object* WeakNext(Context* context) { | |
| 1836 return context->get(Context::NEXT_CONTEXT_LINK); | |
| 1837 } | |
| 1838 | |
| 1839 static void VisitLiveObject(Heap* heap, | |
| 1840 Context* context, | |
| 1841 WeakObjectRetainer* retainer, | |
| 1842 bool record_slots) { | |
| 1843 // Process the three weak lists linked off the context. | |
| 1844 DoWeakList<JSFunction>(heap, context, retainer, record_slots, | |
| 1845 Context::OPTIMIZED_FUNCTIONS_LIST); | |
| 1846 DoWeakList<Code>(heap, context, retainer, record_slots, | |
| 1847 Context::OPTIMIZED_CODE_LIST); | |
| 1848 DoWeakList<Code>(heap, context, retainer, record_slots, | |
| 1849 Context::DEOPTIMIZED_CODE_LIST); | |
| 1850 } | |
| 1851 | |
| 1852 template<class T> | |
| 1853 static void DoWeakList(Heap* heap, | |
| 1854 Context* context, | |
| 1855 WeakObjectRetainer* retainer, | |
| 1856 bool record_slots, | |
| 1857 int index) { | |
| 1858 // Visit the weak list, removing dead intermediate elements. | |
| 1859 Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer, | |
| 1860 record_slots); | |
| 1861 | |
| 1862 // Update the list head. | |
| 1863 context->set(index, list_head, UPDATE_WRITE_BARRIER); | |
| 1864 | |
| 1865 if (record_slots) { | |
| 1866 // Record the updated slot if necessary. | |
| 1867 Object** head_slot = HeapObject::RawField( | |
| 1868 context, FixedArray::SizeFor(index)); | |
| 1869 heap->mark_compact_collector()->RecordSlot( | |
| 1870 head_slot, head_slot, list_head); | |
| 1871 } | |
| 1872 } | |
| 1873 | |
| 1874 static void VisitPhantomObject(Heap* heap, Context* context) { | |
| 1875 ClearWeakList<JSFunction>(heap, | |
| 1876 context->get(Context::OPTIMIZED_FUNCTIONS_LIST)); | |
| 1877 ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST)); | |
| 1878 ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST)); | |
| 1879 } | |
| 1880 | |
| 1881 static int WeakNextOffset() { | |
| 1882 return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK); | |
| 1883 } | |
| 1884 }; | |
| 1885 | |
| 1886 | |
| 1887 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { | 1713 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { |
| 1888 // We don't record weak slots during marking or scavenges. | 1714 // We don't record weak slots during marking or scavenges. |
| 1889 // Instead we do it once when we complete mark-compact cycle. | 1715 // Instead we do it once when we complete mark-compact cycle. |
| 1890 // Note that write barrier has no effect if we are already in the middle of | 1716 // Note that write barrier has no effect if we are already in the middle of |
| 1891 // compacting mark-sweep cycle and we have to record slots manually. | 1717 // compacting mark-sweep cycle and we have to record slots manually. |
| 1892 bool record_slots = | 1718 bool record_slots = |
| 1893 gc_state() == MARK_COMPACT && | 1719 gc_state() == MARK_COMPACT && |
| 1894 mark_compact_collector()->is_compacting(); | 1720 mark_compact_collector()->is_compacting(); |
| 1895 ProcessArrayBuffers(retainer, record_slots); | 1721 ProcessArrayBuffers(retainer, record_slots); |
| 1896 ProcessNativeContexts(retainer, record_slots); | 1722 ProcessNativeContexts(retainer, record_slots); |
| 1897 // TODO(mvstanton): AllocationSites only need to be processed during | 1723 // TODO(mvstanton): AllocationSites only need to be processed during |
| 1898 // MARK_COMPACT, as they live in old space. Verify and address. | 1724 // MARK_COMPACT, as they live in old space. Verify and address. |
| 1899 ProcessAllocationSites(retainer, record_slots); | 1725 ProcessAllocationSites(retainer, record_slots); |
| 1900 } | 1726 } |
| 1901 | 1727 |
| 1902 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer, | 1728 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer, |
| 1903 bool record_slots) { | 1729 bool record_slots) { |
| 1904 Object* head = | 1730 Object* head = |
| 1905 VisitWeakList<Context>( | 1731 VisitWeakList<Context>( |
| 1906 this, native_contexts_list(), retainer, record_slots); | 1732 this, native_contexts_list(), retainer, record_slots); |
| 1907 // Update the head of the list of contexts. | 1733 // Update the head of the list of contexts. |
| 1908 native_contexts_list_ = head; | 1734 native_contexts_list_ = head; |
| 1909 } | 1735 } |
| 1910 | 1736 |
| 1911 | 1737 |
| 1912 template<> | |
| 1913 struct WeakListVisitor<JSArrayBufferView> { | |
| 1914 static void SetWeakNext(JSArrayBufferView* obj, Object* next) { | |
| 1915 obj->set_weak_next(next); | |
| 1916 } | |
| 1917 | |
| 1918 static Object* WeakNext(JSArrayBufferView* obj) { | |
| 1919 return obj->weak_next(); | |
| 1920 } | |
| 1921 | |
| 1922 static void VisitLiveObject(Heap*, | |
| 1923 JSArrayBufferView* obj, | |
| 1924 WeakObjectRetainer* retainer, | |
| 1925 bool record_slots) {} | |
| 1926 | |
| 1927 static void VisitPhantomObject(Heap*, JSArrayBufferView*) {} | |
| 1928 | |
| 1929 static int WeakNextOffset() { | |
| 1930 return JSArrayBufferView::kWeakNextOffset; | |
| 1931 } | |
| 1932 }; | |
| 1933 | |
| 1934 | |
| 1935 template<> | |
| 1936 struct WeakListVisitor<JSArrayBuffer> { | |
| 1937 static void SetWeakNext(JSArrayBuffer* obj, Object* next) { | |
| 1938 obj->set_weak_next(next); | |
| 1939 } | |
| 1940 | |
| 1941 static Object* WeakNext(JSArrayBuffer* obj) { | |
| 1942 return obj->weak_next(); | |
| 1943 } | |
| 1944 | |
| 1945 static void VisitLiveObject(Heap* heap, | |
| 1946 JSArrayBuffer* array_buffer, | |
| 1947 WeakObjectRetainer* retainer, | |
| 1948 bool record_slots) { | |
| 1949 Object* typed_array_obj = | |
| 1950 VisitWeakList<JSArrayBufferView>( | |
| 1951 heap, | |
| 1952 array_buffer->weak_first_view(), | |
| 1953 retainer, record_slots); | |
| 1954 array_buffer->set_weak_first_view(typed_array_obj); | |
| 1955 if (typed_array_obj != heap->undefined_value() && record_slots) { | |
| 1956 Object** slot = HeapObject::RawField( | |
| 1957 array_buffer, JSArrayBuffer::kWeakFirstViewOffset); | |
| 1958 heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj); | |
| 1959 } | |
| 1960 } | |
| 1961 | |
| 1962 static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) { | |
| 1963 Runtime::FreeArrayBuffer(heap->isolate(), phantom); | |
| 1964 } | |
| 1965 | |
| 1966 static int WeakNextOffset() { | |
| 1967 return JSArrayBuffer::kWeakNextOffset; | |
| 1968 } | |
| 1969 }; | |
| 1970 | |
| 1971 | |
| 1972 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, | 1738 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, |
| 1973 bool record_slots) { | 1739 bool record_slots) { |
| 1974 Object* array_buffer_obj = | 1740 Object* array_buffer_obj = |
| 1975 VisitWeakList<JSArrayBuffer>(this, | 1741 VisitWeakList<JSArrayBuffer>(this, |
| 1976 array_buffers_list(), | 1742 array_buffers_list(), |
| 1977 retainer, record_slots); | 1743 retainer, record_slots); |
| 1978 set_array_buffers_list(array_buffer_obj); | 1744 set_array_buffers_list(array_buffer_obj); |
| 1979 } | 1745 } |
| 1980 | 1746 |
| 1981 | 1747 |
| 1982 void Heap::TearDownArrayBuffers() { | 1748 void Heap::TearDownArrayBuffers() { |
| 1983 Object* undefined = undefined_value(); | 1749 Object* undefined = undefined_value(); |
| 1984 for (Object* o = array_buffers_list(); o != undefined;) { | 1750 for (Object* o = array_buffers_list(); o != undefined;) { |
| 1985 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); | 1751 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); |
| 1986 Runtime::FreeArrayBuffer(isolate(), buffer); | 1752 Runtime::FreeArrayBuffer(isolate(), buffer); |
| 1987 o = buffer->weak_next(); | 1753 o = buffer->weak_next(); |
| 1988 } | 1754 } |
| 1989 array_buffers_list_ = undefined; | 1755 array_buffers_list_ = undefined; |
| 1990 } | 1756 } |
| 1991 | 1757 |
| 1992 | 1758 |
| 1993 template<> | |
| 1994 struct WeakListVisitor<AllocationSite> { | |
| 1995 static void SetWeakNext(AllocationSite* obj, Object* next) { | |
| 1996 obj->set_weak_next(next); | |
| 1997 } | |
| 1998 | |
| 1999 static Object* WeakNext(AllocationSite* obj) { | |
| 2000 return obj->weak_next(); | |
| 2001 } | |
| 2002 | |
| 2003 static void VisitLiveObject(Heap* heap, | |
| 2004 AllocationSite* site, | |
| 2005 WeakObjectRetainer* retainer, | |
| 2006 bool record_slots) {} | |
| 2007 | |
| 2008 static void VisitPhantomObject(Heap* heap, AllocationSite* phantom) {} | |
| 2009 | |
| 2010 static int WeakNextOffset() { | |
| 2011 return AllocationSite::kWeakNextOffset; | |
| 2012 } | |
| 2013 }; | |
| 2014 | |
| 2015 | |
| 2016 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer, | 1759 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer, |
| 2017 bool record_slots) { | 1760 bool record_slots) { |
| 2018 Object* allocation_site_obj = | 1761 Object* allocation_site_obj = |
| 2019 VisitWeakList<AllocationSite>(this, | 1762 VisitWeakList<AllocationSite>(this, |
| 2020 allocation_sites_list(), | 1763 allocation_sites_list(), |
| 2021 retainer, record_slots); | 1764 retainer, record_slots); |
| 2022 set_allocation_sites_list(allocation_site_obj); | 1765 set_allocation_sites_list(allocation_site_obj); |
| 2023 } | 1766 } |
| 2024 | 1767 |
| 2025 | 1768 |
| (...skipping 2130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4156 code->set_raw_kind_specific_flags2(0); | 3899 code->set_raw_kind_specific_flags2(0); |
| 4157 code->set_is_crankshafted(crankshafted); | 3900 code->set_is_crankshafted(crankshafted); |
| 4158 code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER); | 3901 code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER); |
| 4159 code->set_raw_type_feedback_info(undefined_value()); | 3902 code->set_raw_type_feedback_info(undefined_value()); |
| 4160 code->set_next_code_link(undefined_value()); | 3903 code->set_next_code_link(undefined_value()); |
| 4161 code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER); | 3904 code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER); |
| 4162 code->set_gc_metadata(Smi::FromInt(0)); | 3905 code->set_gc_metadata(Smi::FromInt(0)); |
| 4163 code->set_ic_age(global_ic_age_); | 3906 code->set_ic_age(global_ic_age_); |
| 4164 code->set_prologue_offset(prologue_offset); | 3907 code->set_prologue_offset(prologue_offset); |
| 4165 if (code->kind() == Code::OPTIMIZED_FUNCTION) { | 3908 if (code->kind() == Code::OPTIMIZED_FUNCTION) { |
| 4166 code->set_marked_for_deoptimization(false); | 3909 ASSERT(!code->marked_for_deoptimization()); |
| 3910 } |
| 3911 if (code->is_inline_cache_stub()) { |
| 3912 ASSERT(!code->is_weak_stub()); |
| 3913 ASSERT(!code->is_invalidated_weak_stub()); |
| 4167 } | 3914 } |
| 4168 | 3915 |
| 4169 if (FLAG_enable_ool_constant_pool) { | 3916 if (FLAG_enable_ool_constant_pool) { |
| 4170 desc.origin->PopulateConstantPool(constant_pool); | 3917 desc.origin->PopulateConstantPool(constant_pool); |
| 4171 } | 3918 } |
| 4172 code->set_constant_pool(constant_pool); | 3919 code->set_constant_pool(constant_pool); |
| 4173 | 3920 |
| 4174 #ifdef ENABLE_DEBUGGER_SUPPORT | 3921 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 4175 if (code->kind() == Code::FUNCTION) { | 3922 if (code->kind() == Code::FUNCTION) { |
| 4176 code->set_has_debug_break_slots( | 3923 code->set_has_debug_break_slots( |
| (...skipping 3432 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7609 static_cast<int>(object_sizes_last_time_[index])); | 7356 static_cast<int>(object_sizes_last_time_[index])); |
| 7610 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 7357 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 7611 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7358 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 7612 | 7359 |
| 7613 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7360 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 7614 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7361 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 7615 ClearObjectStats(); | 7362 ClearObjectStats(); |
| 7616 } | 7363 } |
| 7617 | 7364 |
| 7618 } } // namespace v8::internal | 7365 } } // namespace v8::internal |
| OLD | NEW |