Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(21)

Side by Side Diff: src/heap/heap.cc

Issue 1053203007: Revert of Reland "Remove the weak list of views from array buffers" (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/objects-visiting.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
134 full_codegen_bytes_generated_(0), 134 full_codegen_bytes_generated_(0),
135 crankshaft_codegen_bytes_generated_(0), 135 crankshaft_codegen_bytes_generated_(0),
136 gcs_since_last_deopt_(0), 136 gcs_since_last_deopt_(0),
137 allocation_sites_scratchpad_length_(0), 137 allocation_sites_scratchpad_length_(0),
138 promotion_queue_(this), 138 promotion_queue_(this),
139 configured_(false), 139 configured_(false),
140 external_string_table_(this), 140 external_string_table_(this),
141 chunks_queued_for_free_(NULL), 141 chunks_queued_for_free_(NULL),
142 gc_callbacks_depth_(0), 142 gc_callbacks_depth_(0),
143 deserialization_complete_(false), 143 deserialization_complete_(false),
144 concurrent_sweeping_enabled_(false) { 144 concurrent_sweeping_enabled_(false),
145 migration_failure_(false),
146 previous_migration_failure_(false) {
145 // Allow build-time customization of the max semispace size. Building 147 // Allow build-time customization of the max semispace size. Building
146 // V8 with snapshots and a non-default max semispace size is much 148 // V8 with snapshots and a non-default max semispace size is much
147 // easier if you can define it as part of the build environment. 149 // easier if you can define it as part of the build environment.
148 #if defined(V8_MAX_SEMISPACE_SIZE) 150 #if defined(V8_MAX_SEMISPACE_SIZE)
149 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; 151 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
150 #endif 152 #endif
151 153
152 // Ensure old_generation_size_ is a multiple of kPageSize. 154 // Ensure old_generation_size_ is a multiple of kPageSize.
153 DCHECK(MB >= Page::kPageSize); 155 DCHECK(MB >= Page::kPageSize);
154 156
(...skipping 541 matching lines...) Expand 10 before | Expand all | Expand 10 after
696 #undef UPDATE_FRAGMENTATION_FOR_SPACE 698 #undef UPDATE_FRAGMENTATION_FOR_SPACE
697 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE 699 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE
698 700
699 #ifdef DEBUG 701 #ifdef DEBUG
700 ReportStatisticsAfterGC(); 702 ReportStatisticsAfterGC();
701 #endif // DEBUG 703 #endif // DEBUG
702 704
703 // Remember the last top pointer so that we can later find out 705 // Remember the last top pointer so that we can later find out
704 // whether we allocated in new space since the last GC. 706 // whether we allocated in new space since the last GC.
705 new_space_top_after_last_gc_ = new_space()->top(); 707 new_space_top_after_last_gc_ = new_space()->top();
708
709 if (migration_failure_) {
710 set_previous_migration_failure(true);
711 } else {
712 set_previous_migration_failure(false);
713 }
714 set_migration_failure(false);
706 } 715 }
707 716
708 717
709 void Heap::HandleGCRequest() { 718 void Heap::HandleGCRequest() {
710 if (incremental_marking()->request_type() == 719 if (incremental_marking()->request_type() ==
711 IncrementalMarking::COMPLETE_MARKING) { 720 IncrementalMarking::COMPLETE_MARKING) {
712 CollectAllGarbage(Heap::kNoGCFlags, "GC interrupt"); 721 CollectAllGarbage(Heap::kNoGCFlags, "GC interrupt");
713 return; 722 return;
714 } 723 }
715 DCHECK(FLAG_overapproximate_weak_closure); 724 DCHECK(FLAG_overapproximate_weak_closure);
(...skipping 955 matching lines...) Expand 10 before | Expand all | Expand 10 after
1671 Object** end = start + external_string_table_.old_space_strings_.length(); 1680 Object** end = start + external_string_table_.old_space_strings_.length();
1672 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); 1681 for (Object** p = start; p < end; ++p) *p = updater_func(this, p);
1673 } 1682 }
1674 1683
1675 UpdateNewSpaceReferencesInExternalStringTable(updater_func); 1684 UpdateNewSpaceReferencesInExternalStringTable(updater_func);
1676 } 1685 }
1677 1686
1678 1687
1679 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) { 1688 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
1680 ProcessArrayBuffers(retainer, false); 1689 ProcessArrayBuffers(retainer, false);
1690 ProcessNewArrayBufferViews(retainer);
1681 ProcessNativeContexts(retainer); 1691 ProcessNativeContexts(retainer);
1682 ProcessAllocationSites(retainer); 1692 ProcessAllocationSites(retainer);
1683 } 1693 }
1684 1694
1685 1695
1686 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) { 1696 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) {
1687 ProcessArrayBuffers(retainer, true); 1697 ProcessArrayBuffers(retainer, true);
1698 ProcessNewArrayBufferViews(retainer);
1688 ProcessNativeContexts(retainer); 1699 ProcessNativeContexts(retainer);
1689 } 1700 }
1690 1701
1691 1702
1692 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { 1703 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
1693 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer, 1704 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer,
1694 false, NULL); 1705 false, NULL);
1695 // Update the head of the list of contexts. 1706 // Update the head of the list of contexts.
1696 set_native_contexts_list(head); 1707 set_native_contexts_list(head);
1697 } 1708 }
1698 1709
1699 1710
1700 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, 1711 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
1701 bool stop_after_young) { 1712 bool stop_after_young) {
1702 Object* last_array_buffer = undefined_value(); 1713 Object* last_array_buffer = undefined_value();
1703 Object* array_buffer_obj = 1714 Object* array_buffer_obj =
1704 VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer, 1715 VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer,
1705 stop_after_young, &last_array_buffer); 1716 stop_after_young, &last_array_buffer);
1706 set_array_buffers_list(array_buffer_obj); 1717 set_array_buffers_list(array_buffer_obj);
1707 set_last_array_buffer_in_list(last_array_buffer); 1718 set_last_array_buffer_in_list(last_array_buffer);
1708 1719
1709 // Verify invariant that young array buffers come before old array buffers 1720 // Verify invariant that young array buffers come before old array buffers
1710 // in array buffers list if there was no promotion failure. 1721 // in array buffers list if there was no promotion failure.
1711 Object* undefined = undefined_value(); 1722 Object* undefined = undefined_value();
1712 Object* next = array_buffers_list(); 1723 Object* next = array_buffers_list();
1713 bool old_objects_recorded = false; 1724 bool old_objects_recorded = false;
1725 if (migration_failure()) return;
1714 while (next != undefined) { 1726 while (next != undefined) {
1715 if (!old_objects_recorded) { 1727 if (!old_objects_recorded) {
1716 old_objects_recorded = !InNewSpace(next); 1728 old_objects_recorded = !InNewSpace(next);
1717 } 1729 }
1718 CHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next)); 1730 CHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next));
1719 next = JSArrayBuffer::cast(next)->weak_next(); 1731 next = JSArrayBuffer::cast(next)->weak_next();
1720 } 1732 }
1721 } 1733 }
1722 1734
1723 1735
1736 void Heap::ProcessNewArrayBufferViews(WeakObjectRetainer* retainer) {
1737 // Retain the list of new space views.
1738 Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
1739 this, new_array_buffer_views_list_, retainer, false, NULL);
1740 set_new_array_buffer_views_list(typed_array_obj);
1741
1742 // Some objects in the list may be in old space now. Find them
1743 // and move them to the corresponding array buffer.
1744 Object* view = VisitNewArrayBufferViewsWeakList(
1745 this, new_array_buffer_views_list_, retainer);
1746 set_new_array_buffer_views_list(view);
1747 }
1748
1749
1724 void Heap::TearDownArrayBuffers() { 1750 void Heap::TearDownArrayBuffers() {
1725 Object* undefined = undefined_value(); 1751 Object* undefined = undefined_value();
1726 for (Object* o = array_buffers_list(); o != undefined;) { 1752 for (Object* o = array_buffers_list(); o != undefined;) {
1727 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); 1753 JSArrayBuffer* buffer = JSArrayBuffer::cast(o);
1728 Runtime::FreeArrayBuffer(isolate(), buffer); 1754 Runtime::FreeArrayBuffer(isolate(), buffer);
1729 o = buffer->weak_next(); 1755 o = buffer->weak_next();
1730 } 1756 }
1731 set_array_buffers_list(undefined); 1757 set_array_buffers_list(undefined);
1732 } 1758 }
1733 1759
(...skipping 411 matching lines...) Expand 10 before | Expand all | Expand 10 after
2145 SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); 2171 SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
2146 SLOW_DCHECK(object->Size() == object_size); 2172 SLOW_DCHECK(object->Size() == object_size);
2147 Heap* heap = map->GetHeap(); 2173 Heap* heap = map->GetHeap();
2148 2174
2149 if (!heap->ShouldBePromoted(object->address(), object_size)) { 2175 if (!heap->ShouldBePromoted(object->address(), object_size)) {
2150 // A semi-space copy may fail due to fragmentation. In that case, we 2176 // A semi-space copy may fail due to fragmentation. In that case, we
2151 // try to promote the object. 2177 // try to promote the object.
2152 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { 2178 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) {
2153 return; 2179 return;
2154 } 2180 }
2181 heap->set_migration_failure(true);
2155 } 2182 }
2156 2183
2157 if (PromoteObject<object_contents, alignment>(map, slot, object, 2184 if (PromoteObject<object_contents, alignment>(map, slot, object,
2158 object_size)) { 2185 object_size)) {
2159 return; 2186 return;
2160 } 2187 }
2161 2188
2162 // If promotion failed, we try to copy the object to the other semi-space 2189 // If promotion failed, we try to copy the object to the other semi-space
2163 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return; 2190 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return;
2164 2191
(...skipping 3204 matching lines...) Expand 10 before | Expand all | Expand 10 after
5369 if (!CreateInitialMaps()) return false; 5396 if (!CreateInitialMaps()) return false;
5370 CreateApiObjects(); 5397 CreateApiObjects();
5371 5398
5372 // Create initial objects 5399 // Create initial objects
5373 CreateInitialObjects(); 5400 CreateInitialObjects();
5374 CHECK_EQ(0u, gc_count_); 5401 CHECK_EQ(0u, gc_count_);
5375 5402
5376 set_native_contexts_list(undefined_value()); 5403 set_native_contexts_list(undefined_value());
5377 set_array_buffers_list(undefined_value()); 5404 set_array_buffers_list(undefined_value());
5378 set_last_array_buffer_in_list(undefined_value()); 5405 set_last_array_buffer_in_list(undefined_value());
5406 set_new_array_buffer_views_list(undefined_value());
5379 set_allocation_sites_list(undefined_value()); 5407 set_allocation_sites_list(undefined_value());
5380 return true; 5408 return true;
5381 } 5409 }
5382 5410
5383 5411
5384 void Heap::SetStackLimits() { 5412 void Heap::SetStackLimits() {
5385 DCHECK(isolate_ != NULL); 5413 DCHECK(isolate_ != NULL);
5386 DCHECK(isolate_ == isolate()); 5414 DCHECK(isolate_ == isolate());
5387 // On 64 bit machines, pointers are generally out of range of Smis. We write 5415 // On 64 bit machines, pointers are generally out of range of Smis. We write
5388 // something that looks like an out of range Smi to the GC. 5416 // something that looks like an out of range Smi to the GC.
(...skipping 992 matching lines...) Expand 10 before | Expand all | Expand 10 after
6381 static_cast<int>(object_sizes_last_time_[index])); 6409 static_cast<int>(object_sizes_last_time_[index]));
6382 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) 6410 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT)
6383 #undef ADJUST_LAST_TIME_OBJECT_COUNT 6411 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6384 6412
6385 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 6413 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
6386 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 6414 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
6387 ClearObjectStats(); 6415 ClearObjectStats();
6388 } 6416 }
6389 } 6417 }
6390 } // namespace v8::internal 6418 } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/objects-visiting.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698