Chromium Code Reviews

Side by Side Diff: src/heap/heap.cc

Issue 929933002: Revert of Just visit young array buffers during scavenge. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff |
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
(...skipping 126 matching lines...)
137 gcs_since_last_deopt_(0), 137 gcs_since_last_deopt_(0),
138 #ifdef VERIFY_HEAP 138 #ifdef VERIFY_HEAP
139 no_weak_object_verification_scope_depth_(0), 139 no_weak_object_verification_scope_depth_(0),
140 #endif 140 #endif
141 allocation_sites_scratchpad_length_(0), 141 allocation_sites_scratchpad_length_(0),
142 promotion_queue_(this), 142 promotion_queue_(this),
143 configured_(false), 143 configured_(false),
144 external_string_table_(this), 144 external_string_table_(this),
145 chunks_queued_for_free_(NULL), 145 chunks_queued_for_free_(NULL),
146 gc_callbacks_depth_(0), 146 gc_callbacks_depth_(0),
147 deserialization_complete_(false), 147 deserialization_complete_(false) {
148 promotion_failure_(false) {
149 // Allow build-time customization of the max semispace size. Building 148 // Allow build-time customization of the max semispace size. Building
150 // V8 with snapshots and a non-default max semispace size is much 149 // V8 with snapshots and a non-default max semispace size is much
151 // easier if you can define it as part of the build environment. 150 // easier if you can define it as part of the build environment.
152 #if defined(V8_MAX_SEMISPACE_SIZE) 151 #if defined(V8_MAX_SEMISPACE_SIZE)
153 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; 152 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
154 #endif 153 #endif
155 154
156 // Ensure old_generation_size_ is a multiple of kPageSize. 155 // Ensure old_generation_size_ is a multiple of kPageSize.
157 DCHECK(MB >= Page::kPageSize); 156 DCHECK(MB >= Page::kPageSize);
158 157
(...skipping 566 matching lines...)
725 #undef UPDATE_FRAGMENTATION_FOR_SPACE 724 #undef UPDATE_FRAGMENTATION_FOR_SPACE
726 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE 725 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE
727 726
728 #ifdef DEBUG 727 #ifdef DEBUG
729 ReportStatisticsAfterGC(); 728 ReportStatisticsAfterGC();
730 #endif // DEBUG 729 #endif // DEBUG
731 730
732 // Remember the last top pointer so that we can later find out 731 // Remember the last top pointer so that we can later find out
733 // whether we allocated in new space since the last GC. 732 // whether we allocated in new space since the last GC.
734 new_space_top_after_last_gc_ = new_space()->top(); 733 new_space_top_after_last_gc_ = new_space()->top();
735 set_promotion_failure(false);
736 } 734 }
737 735
738 736
739 void Heap::HandleGCRequest() { 737 void Heap::HandleGCRequest() {
740 if (incremental_marking()->request_type() == 738 if (incremental_marking()->request_type() ==
741 IncrementalMarking::COMPLETE_MARKING) { 739 IncrementalMarking::COMPLETE_MARKING) {
742 CollectAllGarbage(Heap::kNoGCFlags, "GC interrupt"); 740 CollectAllGarbage(Heap::kNoGCFlags, "GC interrupt");
743 return; 741 return;
744 } 742 }
745 DCHECK(FLAG_overapproximate_weak_closure); 743 DCHECK(FLAG_overapproximate_weak_closure);
(...skipping 981 matching lines...)
1727 Object** start = &external_string_table_.old_space_strings_[0]; 1725 Object** start = &external_string_table_.old_space_strings_[0];
1728 Object** end = start + external_string_table_.old_space_strings_.length(); 1726 Object** end = start + external_string_table_.old_space_strings_.length();
1729 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); 1727 for (Object** p = start; p < end; ++p) *p = updater_func(this, p);
1730 } 1728 }
1731 1729
1732 UpdateNewSpaceReferencesInExternalStringTable(updater_func); 1730 UpdateNewSpaceReferencesInExternalStringTable(updater_func);
1733 } 1731 }
1734 1732
1735 1733
1736 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) { 1734 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
1737 ProcessArrayBuffers(retainer, false); 1735 ProcessArrayBuffers(retainer);
1738 ProcessNewArrayBufferViews(retainer);
1739 ProcessNativeContexts(retainer); 1736 ProcessNativeContexts(retainer);
1740 ProcessAllocationSites(retainer); 1737 ProcessAllocationSites(retainer);
1741 } 1738 }
1742 1739
1743 1740
1744 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) { 1741 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) {
1745 ProcessArrayBuffers(retainer, true); 1742 ProcessArrayBuffers(retainer);
1746 ProcessNewArrayBufferViews(retainer);
1747 ProcessNativeContexts(retainer); 1743 ProcessNativeContexts(retainer);
1748 } 1744 }
1749 1745
1750 1746
1751 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { 1747 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
1752 Object* head = 1748 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
1753 VisitWeakList<Context>(this, native_contexts_list(), retainer, false);
1754 // Update the head of the list of contexts. 1749 // Update the head of the list of contexts.
1755 set_native_contexts_list(head); 1750 set_native_contexts_list(head);
1756 } 1751 }
1757 1752
1758 1753
1759 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, 1754 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer) {
1760 bool stop_after_young) { 1755 Object* array_buffer_obj =
1761 Object* array_buffer_obj = VisitWeakList<JSArrayBuffer>( 1756 VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer);
1762 this, array_buffers_list(), retainer, stop_after_young);
1763 set_array_buffers_list(array_buffer_obj); 1757 set_array_buffers_list(array_buffer_obj);
1764
1765 #ifdef DEBUG
1766 // Verify invariant that young array buffers come before old array buffers
1767 // in array buffers list if there was no promotion failure.
1768 Object* undefined = undefined_value();
1769 Object* next = array_buffers_list();
1770 bool old_objects_recorded = false;
1771 if (promotion_failure()) return;
1772 while (next != undefined) {
1773 if (!old_objects_recorded) {
1774 old_objects_recorded = !InNewSpace(next);
1775 }
1776 DCHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next));
1777 next = JSArrayBuffer::cast(next)->weak_next();
1778 }
1779 #endif
1780 } 1758 }
1781 1759
1782 1760
1783 void Heap::ProcessNewArrayBufferViews(WeakObjectRetainer* retainer) {
1784 // Retain the list of new space views.
1785 Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
1786 this, new_array_buffer_views_list_, retainer, false);
1787 set_new_array_buffer_views_list(typed_array_obj);
1788
1789 // Some objects in the list may be in old space now. Find them
1790 // and move them to the corresponding array buffer.
1791 Object* undefined = undefined_value();
1792 Object* previous = undefined;
1793 Object* head = undefined;
1794 Object* next;
1795 for (Object* o = new_array_buffer_views_list(); o != undefined;) {
1796 JSArrayBufferView* view = JSArrayBufferView::cast(o);
1797 next = view->weak_next();
1798 if (!InNewSpace(view)) {
1799 if (previous != undefined) {
1800 // We are in the middle of the list, skip the old space element.
1801 JSArrayBufferView::cast(previous)->set_weak_next(next);
1802 }
1803 JSArrayBuffer* buffer = JSArrayBuffer::cast(view->buffer());
1804 view->set_weak_next(buffer->weak_first_view());
1805 buffer->set_weak_first_view(view);
1806 } else {
1807 // We found a valid new space view, remember it.
1808 previous = view;
1809 if (head == undefined) {
1810 // We are at the list head.
1811 head = view;
1812 }
1813 }
1814 o = next;
1815 }
1816 set_new_array_buffer_views_list(head);
1817 }
1818
1819
1820 void Heap::TearDownArrayBuffers() { 1761 void Heap::TearDownArrayBuffers() {
1821 Object* undefined = undefined_value(); 1762 Object* undefined = undefined_value();
1822 for (Object* o = array_buffers_list(); o != undefined;) { 1763 for (Object* o = array_buffers_list(); o != undefined;) {
1823 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); 1764 JSArrayBuffer* buffer = JSArrayBuffer::cast(o);
1824 Runtime::FreeArrayBuffer(isolate(), buffer); 1765 Runtime::FreeArrayBuffer(isolate(), buffer);
1825 o = buffer->weak_next(); 1766 o = buffer->weak_next();
1826 } 1767 }
1827 set_array_buffers_list(undefined); 1768 set_array_buffers_list(undefined);
1828 } 1769 }
1829 1770
1830 1771
1831 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { 1772 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
1832 Object* allocation_site_obj = VisitWeakList<AllocationSite>( 1773 Object* allocation_site_obj =
1833 this, allocation_sites_list(), retainer, false); 1774 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
1834 set_allocation_sites_list(allocation_site_obj); 1775 set_allocation_sites_list(allocation_site_obj);
1835 } 1776 }
1836 1777
1837 1778
1838 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { 1779 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) {
1839 DisallowHeapAllocation no_allocation_scope; 1780 DisallowHeapAllocation no_allocation_scope;
1840 Object* cur = allocation_sites_list(); 1781 Object* cur = allocation_sites_list();
1841 bool marked = false; 1782 bool marked = false;
1842 while (cur->IsAllocationSite()) { 1783 while (cur->IsAllocationSite()) {
1843 AllocationSite* casted = AllocationSite::cast(cur); 1784 AllocationSite* casted = AllocationSite::cast(cur);
(...skipping 412 matching lines...)
2256 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { 2197 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) {
2257 return; 2198 return;
2258 } 2199 }
2259 } 2200 }
2260 2201
2261 if (PromoteObject<object_contents, alignment>(map, slot, object, 2202 if (PromoteObject<object_contents, alignment>(map, slot, object,
2262 object_size)) { 2203 object_size)) {
2263 return; 2204 return;
2264 } 2205 }
2265 2206
2266 heap->set_promotion_failure(true);
2267 // If promotion failed, we try to copy the object to the other semi-space 2207 // If promotion failed, we try to copy the object to the other semi-space
2268 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return; 2208 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return;
2269 2209
2270 UNREACHABLE(); 2210 UNREACHABLE();
2271 } 2211 }
2272 2212
2273 2213
2274 static inline void EvacuateJSFunction(Map* map, HeapObject** slot, 2214 static inline void EvacuateJSFunction(Map* map, HeapObject** slot,
2275 HeapObject* object) { 2215 HeapObject* object) {
2276 ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< 2216 ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
(...skipping 3320 matching lines...)
5597 // Create initial maps. 5537 // Create initial maps.
5598 if (!CreateInitialMaps()) return false; 5538 if (!CreateInitialMaps()) return false;
5599 CreateApiObjects(); 5539 CreateApiObjects();
5600 5540
5601 // Create initial objects 5541 // Create initial objects
5602 CreateInitialObjects(); 5542 CreateInitialObjects();
5603 CHECK_EQ(0u, gc_count_); 5543 CHECK_EQ(0u, gc_count_);
5604 5544
5605 set_native_contexts_list(undefined_value()); 5545 set_native_contexts_list(undefined_value());
5606 set_array_buffers_list(undefined_value()); 5546 set_array_buffers_list(undefined_value());
5607 set_new_array_buffer_views_list(undefined_value());
5608 set_allocation_sites_list(undefined_value()); 5547 set_allocation_sites_list(undefined_value());
5609 weak_object_to_code_table_ = undefined_value(); 5548 weak_object_to_code_table_ = undefined_value();
5610 return true; 5549 return true;
5611 } 5550 }
5612 5551
5613 5552
5614 void Heap::SetStackLimits() { 5553 void Heap::SetStackLimits() {
5615 DCHECK(isolate_ != NULL); 5554 DCHECK(isolate_ != NULL);
5616 DCHECK(isolate_ == isolate()); 5555 DCHECK(isolate_ == isolate());
5617 // On 64 bit machines, pointers are generally out of range of Smis. We write 5556 // On 64 bit machines, pointers are generally out of range of Smis. We write
(...skipping 975 matching lines...)
6593 static_cast<int>(object_sizes_last_time_[index])); 6532 static_cast<int>(object_sizes_last_time_[index]));
6594 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) 6533 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT)
6595 #undef ADJUST_LAST_TIME_OBJECT_COUNT 6534 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6596 6535
6597 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 6536 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
6598 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 6537 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
6599 ClearObjectStats(); 6538 ClearObjectStats();
6600 } 6539 }
6601 } 6540 }
6602 } // namespace v8::internal 6541 } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.cc » ('j') | no next file with comments »

Powered by Google App Engine