Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(255)

Side by Side Diff: src/heap/heap.cc

Issue 1061753008: Revert of Remove the weak list of views from array buffers (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/objects-visiting.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
134 full_codegen_bytes_generated_(0), 134 full_codegen_bytes_generated_(0),
135 crankshaft_codegen_bytes_generated_(0), 135 crankshaft_codegen_bytes_generated_(0),
136 gcs_since_last_deopt_(0), 136 gcs_since_last_deopt_(0),
137 allocation_sites_scratchpad_length_(0), 137 allocation_sites_scratchpad_length_(0),
138 promotion_queue_(this), 138 promotion_queue_(this),
139 configured_(false), 139 configured_(false),
140 external_string_table_(this), 140 external_string_table_(this),
141 chunks_queued_for_free_(NULL), 141 chunks_queued_for_free_(NULL),
142 gc_callbacks_depth_(0), 142 gc_callbacks_depth_(0),
143 deserialization_complete_(false), 143 deserialization_complete_(false),
144 concurrent_sweeping_enabled_(false) { 144 concurrent_sweeping_enabled_(false),
145 migration_failure_(false),
146 previous_migration_failure_(false) {
145 // Allow build-time customization of the max semispace size. Building 147 // Allow build-time customization of the max semispace size. Building
146 // V8 with snapshots and a non-default max semispace size is much 148 // V8 with snapshots and a non-default max semispace size is much
147 // easier if you can define it as part of the build environment. 149 // easier if you can define it as part of the build environment.
148 #if defined(V8_MAX_SEMISPACE_SIZE) 150 #if defined(V8_MAX_SEMISPACE_SIZE)
149 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; 151 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
150 #endif 152 #endif
151 153
152 // Ensure old_generation_size_ is a multiple of kPageSize. 154 // Ensure old_generation_size_ is a multiple of kPageSize.
153 DCHECK(MB >= Page::kPageSize); 155 DCHECK(MB >= Page::kPageSize);
154 156
(...skipping 541 matching lines...) Expand 10 before | Expand all | Expand 10 after
696 #undef UPDATE_FRAGMENTATION_FOR_SPACE 698 #undef UPDATE_FRAGMENTATION_FOR_SPACE
697 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE 699 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE
698 700
699 #ifdef DEBUG 701 #ifdef DEBUG
700 ReportStatisticsAfterGC(); 702 ReportStatisticsAfterGC();
701 #endif // DEBUG 703 #endif // DEBUG
702 704
703 // Remember the last top pointer so that we can later find out 705 // Remember the last top pointer so that we can later find out
704 // whether we allocated in new space since the last GC. 706 // whether we allocated in new space since the last GC.
705 new_space_top_after_last_gc_ = new_space()->top(); 707 new_space_top_after_last_gc_ = new_space()->top();
708
709 if (migration_failure_) {
710 set_previous_migration_failure(true);
711 } else {
712 set_previous_migration_failure(false);
713 }
714 set_migration_failure(false);
706 } 715 }
707 716
708 717
709 void Heap::HandleGCRequest() { 718 void Heap::HandleGCRequest() {
710 if (incremental_marking()->request_type() == 719 if (incremental_marking()->request_type() ==
711 IncrementalMarking::COMPLETE_MARKING) { 720 IncrementalMarking::COMPLETE_MARKING) {
712 CollectAllGarbage(Heap::kNoGCFlags, "GC interrupt"); 721 CollectAllGarbage(Heap::kNoGCFlags, "GC interrupt");
713 return; 722 return;
714 } 723 }
715 DCHECK(FLAG_overapproximate_weak_closure); 724 DCHECK(FLAG_overapproximate_weak_closure);
(...skipping 954 matching lines...) Expand 10 before | Expand all | Expand 10 after
1670 Object** end = start + external_string_table_.old_space_strings_.length(); 1679 Object** end = start + external_string_table_.old_space_strings_.length();
1671 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); 1680 for (Object** p = start; p < end; ++p) *p = updater_func(this, p);
1672 } 1681 }
1673 1682
1674 UpdateNewSpaceReferencesInExternalStringTable(updater_func); 1683 UpdateNewSpaceReferencesInExternalStringTable(updater_func);
1675 } 1684 }
1676 1685
1677 1686
1678 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) { 1687 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
1679 ProcessArrayBuffers(retainer, false); 1688 ProcessArrayBuffers(retainer, false);
1689 ProcessNewArrayBufferViews(retainer);
1680 ProcessNativeContexts(retainer); 1690 ProcessNativeContexts(retainer);
1681 ProcessAllocationSites(retainer); 1691 ProcessAllocationSites(retainer);
1682 } 1692 }
1683 1693
1684 1694
1685 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) { 1695 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) {
1686 ProcessArrayBuffers(retainer, true); 1696 ProcessArrayBuffers(retainer, true);
1697 ProcessNewArrayBufferViews(retainer);
1687 ProcessNativeContexts(retainer); 1698 ProcessNativeContexts(retainer);
1688 } 1699 }
1689 1700
1690 1701
1691 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { 1702 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
1692 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer, 1703 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer,
1693 false, NULL); 1704 false, NULL);
1694 // Update the head of the list of contexts. 1705 // Update the head of the list of contexts.
1695 set_native_contexts_list(head); 1706 set_native_contexts_list(head);
1696 } 1707 }
1697 1708
1698 1709
1699 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, 1710 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
1700 bool stop_after_young) { 1711 bool stop_after_young) {
1701 Object* last_array_buffer = undefined_value(); 1712 Object* last_array_buffer = undefined_value();
1702 Object* array_buffer_obj = 1713 Object* array_buffer_obj =
1703 VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer, 1714 VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer,
1704 stop_after_young, &last_array_buffer); 1715 stop_after_young, &last_array_buffer);
1705 set_array_buffers_list(array_buffer_obj); 1716 set_array_buffers_list(array_buffer_obj);
1706 set_last_array_buffer_in_list(last_array_buffer); 1717 set_last_array_buffer_in_list(last_array_buffer);
1707 1718
1708 // Verify invariant that young array buffers come before old array buffers 1719 // Verify invariant that young array buffers come before old array buffers
1709 // in array buffers list if there was no promotion failure. 1720 // in array buffers list if there was no promotion failure.
1710 Object* undefined = undefined_value(); 1721 Object* undefined = undefined_value();
1711 Object* next = array_buffers_list(); 1722 Object* next = array_buffers_list();
1712 bool old_objects_recorded = false; 1723 bool old_objects_recorded = false;
1724 if (migration_failure()) return;
1713 while (next != undefined) { 1725 while (next != undefined) {
1714 if (!old_objects_recorded) { 1726 if (!old_objects_recorded) {
1715 old_objects_recorded = !InNewSpace(next); 1727 old_objects_recorded = !InNewSpace(next);
1716 } 1728 }
1717 CHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next)); 1729 CHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next));
1718 next = JSArrayBuffer::cast(next)->weak_next(); 1730 next = JSArrayBuffer::cast(next)->weak_next();
1719 } 1731 }
1720 } 1732 }
1721 1733
1722 1734
1735 void Heap::ProcessNewArrayBufferViews(WeakObjectRetainer* retainer) {
1736 // Retain the list of new space views.
1737 Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
1738 this, new_array_buffer_views_list_, retainer, false, NULL);
1739 set_new_array_buffer_views_list(typed_array_obj);
1740
1741 // Some objects in the list may be in old space now. Find them
1742 // and move them to the corresponding array buffer.
1743 Object* view = VisitNewArrayBufferViewsWeakList(
1744 this, new_array_buffer_views_list_, retainer);
1745 set_new_array_buffer_views_list(view);
1746 }
1747
1748
1723 void Heap::TearDownArrayBuffers() { 1749 void Heap::TearDownArrayBuffers() {
1724 Object* undefined = undefined_value(); 1750 Object* undefined = undefined_value();
1725 for (Object* o = array_buffers_list(); o != undefined;) { 1751 for (Object* o = array_buffers_list(); o != undefined;) {
1726 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); 1752 JSArrayBuffer* buffer = JSArrayBuffer::cast(o);
1727 Runtime::FreeArrayBuffer(isolate(), buffer); 1753 Runtime::FreeArrayBuffer(isolate(), buffer);
1728 o = buffer->weak_next(); 1754 o = buffer->weak_next();
1729 } 1755 }
1730 set_array_buffers_list(undefined); 1756 set_array_buffers_list(undefined);
1731 } 1757 }
1732 1758
(...skipping 411 matching lines...) Expand 10 before | Expand all | Expand 10 after
2144 SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); 2170 SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
2145 SLOW_DCHECK(object->Size() == object_size); 2171 SLOW_DCHECK(object->Size() == object_size);
2146 Heap* heap = map->GetHeap(); 2172 Heap* heap = map->GetHeap();
2147 2173
2148 if (!heap->ShouldBePromoted(object->address(), object_size)) { 2174 if (!heap->ShouldBePromoted(object->address(), object_size)) {
2149 // A semi-space copy may fail due to fragmentation. In that case, we 2175 // A semi-space copy may fail due to fragmentation. In that case, we
2150 // try to promote the object. 2176 // try to promote the object.
2151 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { 2177 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) {
2152 return; 2178 return;
2153 } 2179 }
2180 heap->set_migration_failure(true);
2154 } 2181 }
2155 2182
2156 if (PromoteObject<object_contents, alignment>(map, slot, object, 2183 if (PromoteObject<object_contents, alignment>(map, slot, object,
2157 object_size)) { 2184 object_size)) {
2158 return; 2185 return;
2159 } 2186 }
2160 2187
2161 // If promotion failed, we try to copy the object to the other semi-space 2188 // If promotion failed, we try to copy the object to the other semi-space
2162 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return; 2189 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return;
2163 2190
(...skipping 3200 matching lines...) Expand 10 before | Expand all | Expand 10 after
5364 if (!CreateInitialMaps()) return false; 5391 if (!CreateInitialMaps()) return false;
5365 CreateApiObjects(); 5392 CreateApiObjects();
5366 5393
5367 // Create initial objects 5394 // Create initial objects
5368 CreateInitialObjects(); 5395 CreateInitialObjects();
5369 CHECK_EQ(0u, gc_count_); 5396 CHECK_EQ(0u, gc_count_);
5370 5397
5371 set_native_contexts_list(undefined_value()); 5398 set_native_contexts_list(undefined_value());
5372 set_array_buffers_list(undefined_value()); 5399 set_array_buffers_list(undefined_value());
5373 set_last_array_buffer_in_list(undefined_value()); 5400 set_last_array_buffer_in_list(undefined_value());
5401 set_new_array_buffer_views_list(undefined_value());
5374 set_allocation_sites_list(undefined_value()); 5402 set_allocation_sites_list(undefined_value());
5375 return true; 5403 return true;
5376 } 5404 }
5377 5405
5378 5406
5379 void Heap::SetStackLimits() { 5407 void Heap::SetStackLimits() {
5380 DCHECK(isolate_ != NULL); 5408 DCHECK(isolate_ != NULL);
5381 DCHECK(isolate_ == isolate()); 5409 DCHECK(isolate_ == isolate());
5382 // On 64 bit machines, pointers are generally out of range of Smis. We write 5410 // On 64 bit machines, pointers are generally out of range of Smis. We write
5383 // something that looks like an out of range Smi to the GC. 5411 // something that looks like an out of range Smi to the GC.
(...skipping 992 matching lines...) Expand 10 before | Expand all | Expand 10 after
6376 static_cast<int>(object_sizes_last_time_[index])); 6404 static_cast<int>(object_sizes_last_time_[index]));
6377 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) 6405 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT)
6378 #undef ADJUST_LAST_TIME_OBJECT_COUNT 6406 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6379 6407
6380 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 6408 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
6381 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 6409 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
6382 ClearObjectStats(); 6410 ClearObjectStats();
6383 } 6411 }
6384 } 6412 }
6385 } // namespace v8::internal 6413 } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/objects-visiting.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698