OLD | NEW |
---|---|
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
103 | 103 |
104 SweepLargeObjectSpace(); | 104 SweepLargeObjectSpace(); |
105 | 105 |
106 SweepSpaces(); | 106 SweepSpaces(); |
107 PcToCodeCache::FlushPcToCodeCache(); | 107 PcToCodeCache::FlushPcToCodeCache(); |
108 | 108 |
109 Finish(); | 109 Finish(); |
110 | 110 |
111 // Check that swept all marked objects and | 111 // Check that swept all marked objects and |
112 // null out the GC tracer. | 112 // null out the GC tracer. |
113 ASSERT(tracer_->marked_count() == 0); | 113 // TODO(gc) does not work with conservative sweeping. |
114 // ASSERT(tracer_->marked_count() == 0); | |
114 tracer_ = NULL; | 115 tracer_ = NULL; |
115 } | 116 } |
116 | 117 |
117 | 118 |
118 #ifdef DEBUG | 119 #ifdef DEBUG |
119 static void VerifyMarkbitsAreClean(PagedSpace* space) { | 120 static void VerifyMarkbitsAreClean(PagedSpace* space) { |
120 PageIterator it(space, PageIterator::PAGES_IN_USE); | 121 PageIterator it(space, PageIterator::PAGES_IN_USE); |
121 | 122 |
122 while (it.has_next()) { | 123 while (it.has_next()) { |
123 Page* p = it.next(); | 124 Page* p = it.next(); |
124 ASSERT(p->markbits()->IsClean()); | 125 ASSERT(p->markbits()->IsClean()); |
125 } | 126 } |
126 } | 127 } |
127 | 128 |
128 static void VerifyMarkbitsAreClean() { | 129 static void VerifyMarkbitsAreClean() { |
129 VerifyMarkbitsAreClean(Heap::old_pointer_space()); | 130 VerifyMarkbitsAreClean(Heap::old_pointer_space()); |
130 VerifyMarkbitsAreClean(Heap::old_data_space()); | 131 VerifyMarkbitsAreClean(Heap::old_data_space()); |
131 VerifyMarkbitsAreClean(Heap::code_space()); | 132 VerifyMarkbitsAreClean(Heap::code_space()); |
132 VerifyMarkbitsAreClean(Heap::cell_space()); | 133 VerifyMarkbitsAreClean(Heap::cell_space()); |
133 VerifyMarkbitsAreClean(Heap::map_space()); | 134 VerifyMarkbitsAreClean(Heap::map_space()); |
134 } | 135 } |
135 #endif | 136 #endif |
136 | 137 |
137 | 138 |
139 static void ClearMarkbits(PagedSpace* space) { | |
140 PageIterator it(space, PageIterator::PAGES_IN_USE); | |
141 | |
142 while (it.has_next()) { | |
143 Page* p = it.next(); | |
144 p->markbits()->Clear(); | |
145 } | |
146 } | |
147 | |
148 static void ClearMarkbits() { | |
149 // We are sweeping code and map spaces presisely so clearing is not required. | |
Erik Corry
2011/01/19 13:46:48
presisely -> precisely,
Vyacheslav Egorov (Chromium)
2011/01/20 16:40:21
Done.
| |
150 ClearMarkbits(Heap::old_pointer_space()); | |
151 ClearMarkbits(Heap::old_data_space()); | |
152 ClearMarkbits(Heap::cell_space()); | |
153 } | |
154 | |
155 | |
138 void MarkCompactCollector::Prepare(GCTracer* tracer) { | 156 void MarkCompactCollector::Prepare(GCTracer* tracer) { |
139 FLAG_flush_code = false; | 157 FLAG_flush_code = false; |
140 FLAG_always_compact = false; | 158 FLAG_always_compact = false; |
141 FLAG_never_compact = true; | 159 FLAG_never_compact = true; |
142 | 160 |
143 // Rather than passing the tracer around we stash it in a static member | 161 // Rather than passing the tracer around we stash it in a static member |
144 // variable. | 162 // variable. |
145 tracer_ = tracer; | 163 tracer_ = tracer; |
146 | 164 |
147 #ifdef DEBUG | 165 #ifdef DEBUG |
(...skipping 16 matching lines...) Expand all Loading... | |
164 space != NULL; space = spaces.next()) { | 182 space != NULL; space = spaces.next()) { |
165 space->PrepareForMarkCompact(compacting_collection_); | 183 space->PrepareForMarkCompact(compacting_collection_); |
166 } | 184 } |
167 | 185 |
168 Address new_space_top = Heap::new_space()->top(); | 186 Address new_space_top = Heap::new_space()->top(); |
169 Address new_space_bottom = Heap::new_space()->bottom(); | 187 Address new_space_bottom = Heap::new_space()->bottom(); |
170 | 188 |
171 Marking::ClearRange(new_space_bottom, | 189 Marking::ClearRange(new_space_bottom, |
172 static_cast<int>(new_space_top - new_space_bottom)); | 190 static_cast<int>(new_space_top - new_space_bottom)); |
173 | 191 |
192 ClearMarkbits(); | |
193 | |
174 #ifdef DEBUG | 194 #ifdef DEBUG |
175 VerifyMarkbitsAreClean(); | 195 VerifyMarkbitsAreClean(); |
176 | 196 |
177 live_bytes_ = 0; | 197 live_bytes_ = 0; |
178 live_young_objects_size_ = 0; | 198 live_young_objects_size_ = 0; |
179 live_old_pointer_objects_size_ = 0; | 199 live_old_pointer_objects_size_ = 0; |
180 live_old_data_objects_size_ = 0; | 200 live_old_data_objects_size_ = 0; |
181 live_code_objects_size_ = 0; | 201 live_code_objects_size_ = 0; |
182 live_map_objects_size_ = 0; | 202 live_map_objects_size_ = 0; |
183 live_cell_objects_size_ = 0; | 203 live_cell_objects_size_ = 0; |
(...skipping 1527 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1711 // Update pointers from external string table. | 1731 // Update pointers from external string table. |
1712 Heap::UpdateNewSpaceReferencesInExternalStringTable( | 1732 Heap::UpdateNewSpaceReferencesInExternalStringTable( |
1713 &UpdateNewSpaceReferenceInExternalStringTableEntry); | 1733 &UpdateNewSpaceReferenceInExternalStringTableEntry); |
1714 | 1734 |
1715 // All pointers were updated. Update auxiliary allocation info. | 1735 // All pointers were updated. Update auxiliary allocation info. |
1716 Heap::IncrementYoungSurvivorsCounter(survivors_size); | 1736 Heap::IncrementYoungSurvivorsCounter(survivors_size); |
1717 space->set_age_mark(space->top()); | 1737 space->set_age_mark(space->top()); |
1718 } | 1738 } |
1719 | 1739 |
1720 | 1740 |
1721 void MarkCompactCollector::SweepSpace(PagedSpace* space) { | 1741 INLINE(static uint32_t SweepFree(PagedSpace* space, |
1742 Page* p, | |
1743 uint32_t free_start, | |
1744 uint32_t region_end, | |
1745 uint32_t* cells)); | |
1746 | |
1747 | |
1748 static uint32_t SweepFree(PagedSpace* space, | |
1749 Page* p, | |
1750 uint32_t free_start, | |
1751 uint32_t region_end, | |
1752 uint32_t* cells) { | |
1753 uint32_t free_cell = Page::MarkbitsBitmap::Index2Cell(free_start); | |
1754 ASSERT(cells[free_cell] == 0); | |
1755 while (free_cell < region_end && cells[free_cell] == 0) { | |
1756 free_cell++; | |
1757 } | |
1758 | |
1759 if (free_cell >= region_end) { | |
1760 return free_cell; | |
1761 } | |
1762 | |
1763 uint32_t free_end = Page::MarkbitsBitmap::Cell2Index(free_cell); | |
1764 space->DeallocateBlock(p->Markbit2Address(free_start), | |
1765 (free_end - free_start) << kPointerSizeLog2, | |
1766 true); | |
1767 | |
1768 return free_cell; | |
1769 } | |
1770 | |
1771 | |
1772 INLINE(static uint32_t NextCandidate(uint32_t cell, | |
1773 uint32_t last_cell, | |
1774 uint32_t* cells)); | |
1775 | |
1776 | |
1777 static uint32_t NextCandidate(uint32_t cell, | |
1778 uint32_t last_cell, | |
1779 uint32_t* cells) { | |
1780 do { | |
1781 cell++; | |
1782 } while (cell < last_cell && cells[cell] != 0); | |
1783 return cell; | |
1784 } | |
1785 | |
1786 | |
1787 INLINE(static int SizeOfPreviousObject(Page* p, | |
1788 uint32_t cell, | |
1789 uint32_t* cells)); | |
1790 | |
1791 | |
1792 static int SizeOfPreviousObject(Page* p, | |
1793 uint32_t cell, | |
1794 uint32_t* cells) { | |
1795 ASSERT(cells[cell] == 0); | |
1796 if (cells[cell - 1] == 0) return 0; | |
1797 | |
1798 int clz = __builtin_clz(cells[cell - 1]) + 1; | |
Erik Corry
2011/01/19 13:46:48
Variable should be called leading_zeros
Vyacheslav Egorov (Chromium)
2011/01/20 16:40:21
Done.
| |
1799 Address addr = | |
1800 p->Markbit2Address(Page::MarkbitsBitmap::Cell2Index(cell) - clz); | |
1801 HeapObject* obj = HeapObject::FromAddress(addr); | |
1802 ASSERT(obj->map()->IsMap()); | |
1803 return (obj->Size() >> kPointerSizeLog2) - clz; | |
1804 } | |
1805 | |
1806 | |
1807 static void SweepConservatively(PagedSpace* space, | |
1808 Page* p, | |
1809 Address* last_free_start) { | |
1810 typedef Page::MarkbitsBitmap::CellType CellType; | |
1811 Page::MarkbitsBitmap* markbits = p->markbits(); | |
1812 CellType* cells = markbits->cells(); | |
1813 | |
1814 uint32_t last_cell = | |
1815 Page::MarkbitsBitmap::Index2Cell( | |
1816 Page::MarkbitsBitmap::CellAlignIndex( | |
1817 p->Address2Markbit(p->AllocationTop()))); | |
1818 | |
1819 uint32_t cell = Page::kFirstUsedCell; | |
Erik Corry
2011/01/19 13:46:48
See above.
| |
1820 uint32_t poluted_cell = Page::kFirstUsedCell; | |
Erik Corry
2011/01/19 13:46:48
poluted -> polluted
Vyacheslav Egorov (Chromium)
2011/01/20 16:40:21
Done.
| |
1821 if (cells[cell] == 0) { | |
1822 poluted_cell = SweepFree(space, | |
1823 p, | |
1824 p->Address2Markbit(p->ObjectAreaStart()), | |
1825 last_cell, | |
1826 cells); | |
1827 | |
1828 if (poluted_cell >= last_cell) { | |
1829 // All cells are free. | |
1830 *last_free_start = p->ObjectAreaStart(); | |
1831 return; | |
1832 } | |
1833 } | |
1834 | |
1835 p->ClearFlag(Page::IS_CONTINIOUS); | |
1836 | |
1837 ASSERT(cells[poluted_cell] != 0); | |
1838 for (cell = NextCandidate(poluted_cell, last_cell, cells); | |
1839 cell < last_cell; | |
1840 cell = NextCandidate(poluted_cell, last_cell, cells)) { | |
1841 ASSERT(cells[cell] == 0); | |
1842 | |
1843 int size = SizeOfPreviousObject(p, cell, cells); | |
1844 if (size <= 0) { | |
1845 poluted_cell = SweepFree(space, | |
1846 p, | |
1847 Page::MarkbitsBitmap::Cell2Index(cell), | |
1848 last_cell, | |
1849 cells); | |
1850 if (poluted_cell >= last_cell) { | |
1851 // This free region is the last on the page. | |
1852 *last_free_start = p->Markbit2Address( | |
1853 Page::MarkbitsBitmap::Cell2Index(cell)); | |
1854 return; | |
1855 } | |
1856 } else { | |
1857 // Skip cells covered by this object. | |
1858 poluted_cell = cell + | |
1859 Page::MarkbitsBitmap::Index2Cell(size - 1); | |
1860 } | |
1861 } | |
1862 } | |
1863 | |
1864 | |
1865 static void SweepPrecisely(PagedSpace* space, Page* p, Address* last_free_start) { | |
Erik Corry
2011/01/19 13:46:48
Lint?
Vyacheslav Egorov (Chromium)
2011/01/20 16:40:21
Done.
| |
1866 bool is_previous_alive = true; | |
1867 Address free_start = NULL; | |
1868 HeapObject* object; | |
1869 | |
1870 for (Address current = p->ObjectAreaStart(); | |
1871 current < p->AllocationTop(); | |
1872 current += object->Size()) { | |
1873 object = HeapObject::FromAddress(current); | |
1874 if (Marking::IsMarked(object)) { | |
1875 Marking::ClearMark(object); | |
1876 MarkCompactCollector::tracer()->decrement_marked_count(); | |
1877 | |
1878 if (!is_previous_alive) { // Transition from free to live. | |
1879 space->DeallocateBlock(free_start, | |
1880 static_cast<int>(current - free_start), | |
1881 true); | |
1882 is_previous_alive = true; | |
1883 } | |
1884 } else { | |
1885 MarkCompactCollector::ReportDeleteIfNeeded(object); | |
1886 if (is_previous_alive) { // Transition from live to free. | |
1887 free_start = current; | |
1888 is_previous_alive = false; | |
1889 } | |
1890 } | |
1891 } | |
1892 | |
1893 if (!is_previous_alive) *last_free_start = free_start; | |
1894 } | |
1895 | |
1896 | |
1897 void MarkCompactCollector::SweepSpace(PagedSpace* space, | |
1898 SweeperType sweeper) { | |
1722 PageIterator it(space, PageIterator::PAGES_IN_USE); | 1899 PageIterator it(space, PageIterator::PAGES_IN_USE); |
1723 | 1900 |
1724 // During sweeping of paged space we are trying to find longest sequences | 1901 // During sweeping of paged space we are trying to find longest sequences |
1725 // of pages without live objects and free them (instead of putting them on | 1902 // of pages without live objects and free them (instead of putting them on |
1726 // the free list). | 1903 // the free list). |
1727 | 1904 |
1728 // Page preceding current. | 1905 // Page preceding current. |
1729 Page* prev = Page::FromAddress(NULL); | 1906 Page* prev = Page::FromAddress(NULL); |
1730 | 1907 |
1731 // First empty page in a sequence. | 1908 // First empty page in a sequence. |
1732 Page* first_empty_page = Page::FromAddress(NULL); | 1909 Page* first_empty_page = Page::FromAddress(NULL); |
1733 | 1910 |
1734 // Page preceding first empty page. | 1911 // Page preceding first empty page. |
1735 Page* prec_first_empty_page = Page::FromAddress(NULL); | 1912 Page* prec_first_empty_page = Page::FromAddress(NULL); |
1736 | 1913 |
1737 // If last used page of space ends with a sequence of dead objects | 1914 // If last used page of space ends with a sequence of dead objects |
1738 // we can adjust allocation top instead of puting this free area into | 1915 // we can adjust allocation top instead of puting this free area into |
1739 // the free list. Thus during sweeping we keep track of such areas | 1916 // the free list. Thus during sweeping we keep track of such areas |
1740 // and defer their deallocation until the sweeping of the next page | 1917 // and defer their deallocation until the sweeping of the next page |
1741 // is done: if one of the next pages contains live objects we have | 1918 // is done: if one of the next pages contains live objects we have |
1742 // to put such area into the free list. | 1919 // to put such area into the free list. |
1743 Address last_free_start = NULL; | 1920 Address last_free_start = NULL; |
1744 int last_free_size = 0; | 1921 int last_free_size = 0; |
1745 | 1922 |
1746 while (it.has_next()) { | 1923 while (it.has_next()) { |
1747 Page* p = it.next(); | 1924 Page* p = it.next(); |
1748 | 1925 |
1749 bool is_previous_alive = true; | 1926 Address free_start = p->AllocationTop(); |
1750 Address free_start = NULL; | |
1751 HeapObject* object; | |
1752 | 1927 |
1753 for (Address current = p->ObjectAreaStart(); | 1928 if (sweeper == CONSERVATIVE) { |
1754 current < p->AllocationTop(); | 1929 SweepConservatively(space, p, &free_start); |
1755 current += object->Size()) { | 1930 p->set_linearity_boundary(free_start); |
1756 object = HeapObject::FromAddress(current); | 1931 } else { |
1757 if (Marking::IsMarked(object)) { | 1932 ASSERT(sweeper == PRECISE); |
1758 Marking::ClearMark(object); | 1933 SweepPrecisely(space, p, &free_start); |
1759 MarkCompactCollector::tracer()->decrement_marked_count(); | |
1760 | |
1761 if (!is_previous_alive) { // Transition from free to live. | |
1762 space->DeallocateBlock(free_start, | |
1763 static_cast<int>(current - free_start), | |
1764 true); | |
1765 is_previous_alive = true; | |
1766 } | |
1767 } else { | |
1768 MarkCompactCollector::ReportDeleteIfNeeded(object); | |
1769 if (is_previous_alive) { // Transition from live to free. | |
1770 free_start = current; | |
1771 is_previous_alive = false; | |
1772 } | |
1773 } | |
1774 // The object is now unmarked for the call to Size() at the top of the | |
1775 // loop. | |
1776 } | 1934 } |
1777 | 1935 |
1778 bool page_is_empty = (p->ObjectAreaStart() == p->AllocationTop()) | 1936 bool page_is_empty = (p->ObjectAreaStart() == free_start); |
1779 || (!is_previous_alive && free_start == p->ObjectAreaStart()); | 1937 bool is_previous_alive = (free_start == p->AllocationTop()); |
1938 | |
1939 ASSERT(free_start <= p->AllocationTop()); | |
1780 | 1940 |
1781 if (page_is_empty) { | 1941 if (page_is_empty) { |
1782 // This page is empty. Check whether we are in the middle of | 1942 // This page is empty. Check whether we are in the middle of |
1783 // sequence of empty pages and start one if not. | 1943 // sequence of empty pages and start one if not. |
1784 if (!first_empty_page->is_valid()) { | 1944 if (!first_empty_page->is_valid()) { |
1785 first_empty_page = p; | 1945 first_empty_page = p; |
1786 prec_first_empty_page = prev; | 1946 prec_first_empty_page = prev; |
1787 } | 1947 } |
1788 | 1948 |
1789 if (!is_previous_alive) { | 1949 if (!is_previous_alive) { |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1823 prev = p; | 1983 prev = p; |
1824 } | 1984 } |
1825 | 1985 |
1826 // We reached end of space. See if we need to adjust allocation top. | 1986 // We reached end of space. See if we need to adjust allocation top. |
1827 Address new_allocation_top = NULL; | 1987 Address new_allocation_top = NULL; |
1828 | 1988 |
1829 if (first_empty_page->is_valid()) { | 1989 if (first_empty_page->is_valid()) { |
1830 // Last used pages in space are empty. We can move allocation top backwards | 1990 // Last used pages in space are empty. We can move allocation top backwards |
1831 // to the beginning of first empty page. | 1991 // to the beginning of first empty page. |
1832 ASSERT(prev == space->AllocationTopPage()); | 1992 ASSERT(prev == space->AllocationTopPage()); |
1833 | 1993 space->FreePages(prec_first_empty_page, prev); |
1834 new_allocation_top = first_empty_page->ObjectAreaStart(); | 1994 new_allocation_top = first_empty_page->ObjectAreaStart(); |
1835 } | 1995 } |
1836 | 1996 |
1837 if (last_free_size > 0) { | 1997 if (last_free_size > 0) { |
1838 // There was a free ending area on the previous page. | 1998 // There was a free ending area on the previous page. |
1839 // Deallocate it without putting it into freelist and move allocation | 1999 // Deallocate it without putting it into freelist and move allocation |
1840 // top to the beginning of this free area. | 2000 // top to the beginning of this free area. |
1841 space->DeallocateBlock(last_free_start, last_free_size, false); | 2001 space->DeallocateBlock(last_free_start, last_free_size, false); |
1842 new_allocation_top = last_free_start; | 2002 new_allocation_top = last_free_start; |
1843 } | 2003 } |
(...skipping 18 matching lines...) Expand all Loading... | |
1862 void MarkCompactCollector::SweepSpaces() { | 2022 void MarkCompactCollector::SweepSpaces() { |
1863 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); | 2023 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); |
1864 | 2024 |
1865 ASSERT(state_ == SWEEP_SPACES); | 2025 ASSERT(state_ == SWEEP_SPACES); |
1866 ASSERT(!IsCompacting()); | 2026 ASSERT(!IsCompacting()); |
1867 // Noncompacting collections simply sweep the spaces to clear the mark | 2027 // Noncompacting collections simply sweep the spaces to clear the mark |
1868 // bits and free the nonlive blocks (for old and map spaces). We sweep | 2028 // bits and free the nonlive blocks (for old and map spaces). We sweep |
1869 // the map space last because freeing non-live maps overwrites them and | 2029 // the map space last because freeing non-live maps overwrites them and |
1870 // the other spaces rely on possibly non-live maps to get the sizes for | 2030 // the other spaces rely on possibly non-live maps to get the sizes for |
1871 // non-live objects. | 2031 // non-live objects. |
1872 SweepSpace(Heap::old_pointer_space()); | 2032 SweepSpace(Heap::old_pointer_space(), CONSERVATIVE); |
1873 SweepSpace(Heap::old_data_space()); | 2033 SweepSpace(Heap::old_data_space(), CONSERVATIVE); |
1874 SweepSpace(Heap::code_space()); | 2034 SweepSpace(Heap::code_space(), PRECISE); |
1875 SweepSpace(Heap::cell_space()); | 2035 // TODO(gc): implement specialized sweeper for cell space. |
2036 SweepSpace(Heap::cell_space(), CONSERVATIVE); | |
1876 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); | 2037 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); |
1877 SweepNewSpace(Heap::new_space()); | 2038 SweepNewSpace(Heap::new_space()); |
1878 } | 2039 } |
1879 SweepSpace(Heap::map_space()); | 2040 // TODO(gc): ClearNonLiveTransitions depends on precise sweeping of |
2041 // map space to detect whether unmarked map became dead in this | |
2042 // collection on in one of the previous ones. | |
Erik Corry
2011/01/19 13:46:48
on -> or
Vyacheslav Egorov (Chromium)
2011/01/20 16:40:21
Done.
| |
2043 // Implement specialized sweeper for map space. | |
Erik Corry
2011/01/19 13:46:48
Missing TODO?
Vyacheslav Egorov (Chromium)
2011/01/20 16:40:21
Done.
| |
2044 SweepSpace(Heap::map_space(), PRECISE); | |
1880 | 2045 |
1881 Heap::IterateDirtyRegions(Heap::map_space(), | 2046 Heap::IterateDirtyRegions(Heap::map_space(), |
1882 &Heap::IteratePointersInDirtyMapsRegion, | 2047 &Heap::IteratePointersInDirtyMapsRegion, |
1883 &UpdatePointerToNewGen, | 2048 &UpdatePointerToNewGen, |
1884 Heap::WATERMARK_SHOULD_BE_VALID); | 2049 Heap::WATERMARK_SHOULD_BE_VALID); |
1885 | 2050 |
1886 ASSERT(live_map_objects_size_ == Heap::map_space()->Size()); | 2051 ASSERT(live_map_objects_size_ <= Heap::map_space()->Size()); |
1887 } | 2052 } |
1888 | 2053 |
1889 | 2054 |
1890 // Iterate the live objects in a range of addresses (eg, a page or a | 2055 // Iterate the live objects in a range of addresses (eg, a page or a |
1891 // semispace). The live regions of the range have been linked into a list. | 2056 // semispace). The live regions of the range have been linked into a list. |
1892 // The first live region is [first_live_start, first_live_end), and the last | 2057 // The first live region is [first_live_start, first_live_end), and the last |
1893 // address in the range is top. The callback function is used to get the | 2058 // address in the range is top. The callback function is used to get the |
1894 // size of each live object. | 2059 // size of each live object. |
1895 int MarkCompactCollector::IterateLiveObjectsInRange( | 2060 int MarkCompactCollector::IterateLiveObjectsInRange( |
1896 Address start, | 2061 Address start, |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1934 } | 2099 } |
1935 return total; | 2100 return total; |
1936 } | 2101 } |
1937 | 2102 |
1938 | 2103 |
1939 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) { | 2104 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) { |
1940 #ifdef ENABLE_LOGGING_AND_PROFILING | 2105 #ifdef ENABLE_LOGGING_AND_PROFILING |
1941 if (obj->IsCode()) { | 2106 if (obj->IsCode()) { |
1942 PROFILE(CodeDeleteEvent(obj->address())); | 2107 PROFILE(CodeDeleteEvent(obj->address())); |
1943 } else if (obj->IsJSFunction()) { | 2108 } else if (obj->IsJSFunction()) { |
2109 // TODO(gc): we are sweeping old pointer space conservatively thus | |
2110 // we can't notify attached profiler about death of functions. | |
2111 // Consider disabling conservative sweeping when profiler | |
2112 // is enabled. | |
1944 PROFILE(FunctionDeleteEvent(obj->address())); | 2113 PROFILE(FunctionDeleteEvent(obj->address())); |
1945 } | 2114 } |
1946 #endif | 2115 #endif |
1947 } | 2116 } |
1948 | 2117 |
1949 | 2118 |
1950 void MarkCompactCollector::Initialize() { | 2119 void MarkCompactCollector::Initialize() { |
1951 StaticPointersToNewGenUpdatingVisitor::Initialize(); | 2120 StaticPointersToNewGenUpdatingVisitor::Initialize(); |
1952 StaticMarkingVisitor::Initialize(); | 2121 StaticMarkingVisitor::Initialize(); |
1953 } | 2122 } |
1954 | 2123 |
1955 | 2124 |
1956 } } // namespace v8::internal | 2125 } } // namespace v8::internal |
OLD | NEW |