Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(88)

Side by Side Diff: src/heap/heap.cc

Issue 1114563002: Remove the weak list of array buffers (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: updates Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/objects-visiting.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
147 // easier if you can define it as part of the build environment. 147 // easier if you can define it as part of the build environment.
148 #if defined(V8_MAX_SEMISPACE_SIZE) 148 #if defined(V8_MAX_SEMISPACE_SIZE)
149 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; 149 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
150 #endif 150 #endif
151 151
152 // Ensure old_generation_size_ is a multiple of kPageSize. 152 // Ensure old_generation_size_ is a multiple of kPageSize.
153 DCHECK(MB >= Page::kPageSize); 153 DCHECK(MB >= Page::kPageSize);
154 154
155 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); 155 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
156 set_native_contexts_list(NULL); 156 set_native_contexts_list(NULL);
157 set_array_buffers_list(Smi::FromInt(0));
158 set_last_array_buffer_in_list(Smi::FromInt(0));
159 set_allocation_sites_list(Smi::FromInt(0)); 157 set_allocation_sites_list(Smi::FromInt(0));
160 set_encountered_weak_collections(Smi::FromInt(0)); 158 set_encountered_weak_collections(Smi::FromInt(0));
161 set_encountered_weak_cells(Smi::FromInt(0)); 159 set_encountered_weak_cells(Smi::FromInt(0));
162 // Put a dummy entry in the remembered pages so we can find the list the 160 // Put a dummy entry in the remembered pages so we can find the list the
163 // minidump even if there are no real unmapped pages. 161 // minidump even if there are no real unmapped pages.
164 RememberUnmappedPage(NULL, false); 162 RememberUnmappedPage(NULL, false);
165 163
166 ClearObjectStats(true); 164 ClearObjectStats(true);
167 } 165 }
168 166
(...skipping 1119 matching lines...) Expand 10 before | Expand all | Expand 10 after
1288 1286
1289 if (FLAG_allocation_site_pretenuring) { 1287 if (FLAG_allocation_site_pretenuring) {
1290 EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc); 1288 EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc);
1291 } 1289 }
1292 } 1290 }
1293 1291
1294 1292
1295 void Heap::MarkCompactEpilogue() { 1293 void Heap::MarkCompactEpilogue() {
1296 gc_state_ = NOT_IN_GC; 1294 gc_state_ = NOT_IN_GC;
1297 1295
1296 ReapDeadArrayBuffers();
1297
1298 isolate_->counters()->objs_since_last_full()->Set(0); 1298 isolate_->counters()->objs_since_last_full()->Set(0);
1299 1299
1300 incremental_marking()->Epilogue(); 1300 incremental_marking()->Epilogue();
1301 1301
1302 PreprocessStackTraces(); 1302 PreprocessStackTraces();
1303 } 1303 }
1304 1304
1305 1305
1306 void Heap::MarkCompactPrologue() { 1306 void Heap::MarkCompactPrologue() {
1307 // At any old GC clear the keyed lookup cache to enable collection of unused 1307 // At any old GC clear the keyed lookup cache to enable collection of unused
1308 // maps. 1308 // maps.
1309 isolate_->keyed_lookup_cache()->Clear(); 1309 isolate_->keyed_lookup_cache()->Clear();
1310 isolate_->context_slot_cache()->Clear(); 1310 isolate_->context_slot_cache()->Clear();
1311 isolate_->descriptor_lookup_cache()->Clear(); 1311 isolate_->descriptor_lookup_cache()->Clear();
1312 RegExpResultsCache::Clear(string_split_cache()); 1312 RegExpResultsCache::Clear(string_split_cache());
1313 RegExpResultsCache::Clear(regexp_multiple_cache()); 1313 RegExpResultsCache::Clear(regexp_multiple_cache());
1314 1314
1315 isolate_->compilation_cache()->MarkCompactPrologue(); 1315 isolate_->compilation_cache()->MarkCompactPrologue();
1316 1316
1317 CompletelyClearInstanceofCache(); 1317 CompletelyClearInstanceofCache();
1318 1318
1319 FlushNumberStringCache(); 1319 FlushNumberStringCache();
1320 if (FLAG_cleanup_code_caches_at_gc) { 1320 if (FLAG_cleanup_code_caches_at_gc) {
1321 polymorphic_code_cache()->set_cache(undefined_value()); 1321 polymorphic_code_cache()->set_cache(undefined_value());
1322 } 1322 }
1323 1323
1324 ClearNormalizedMapCaches(); 1324 ClearNormalizedMapCaches();
1325
1326 not_yet_discovered_array_buffers_ = live_array_buffers_;
jochen (gone - plz use gerrit) 2015/04/28 19:46:44 so the bug is here... Heap::MarkCompactPrologue is
1325 } 1327 }
1326 1328
1327 1329
1328 // Helper class for copying HeapObjects 1330 // Helper class for copying HeapObjects
1329 class ScavengeVisitor : public ObjectVisitor { 1331 class ScavengeVisitor : public ObjectVisitor {
1330 public: 1332 public:
1331 explicit ScavengeVisitor(Heap* heap) : heap_(heap) {} 1333 explicit ScavengeVisitor(Heap* heap) : heap_(heap) {}
1332 1334
1333 void VisitPointer(Object** p) { ScavengePointer(p); } 1335 void VisitPointer(Object** p) { ScavengePointer(p); }
1334 1336
(...skipping 363 matching lines...) Expand 10 before | Expand all | Expand 10 after
1698 Object** start = &external_string_table_.old_space_strings_[0]; 1700 Object** start = &external_string_table_.old_space_strings_[0];
1699 Object** end = start + external_string_table_.old_space_strings_.length(); 1701 Object** end = start + external_string_table_.old_space_strings_.length();
1700 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); 1702 for (Object** p = start; p < end; ++p) *p = updater_func(this, p);
1701 } 1703 }
1702 1704
1703 UpdateNewSpaceReferencesInExternalStringTable(updater_func); 1705 UpdateNewSpaceReferencesInExternalStringTable(updater_func);
1704 } 1706 }
1705 1707
1706 1708
1707 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) { 1709 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
1708 ProcessArrayBuffers(retainer, false);
1709 ProcessNativeContexts(retainer); 1710 ProcessNativeContexts(retainer);
1710 ProcessAllocationSites(retainer); 1711 ProcessAllocationSites(retainer);
1711 } 1712 }
1712 1713
1713 1714
1714 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) { 1715 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) {
1715 ProcessArrayBuffers(retainer, true);
1716 ProcessNativeContexts(retainer); 1716 ProcessNativeContexts(retainer);
1717 } 1717 }
1718 1718
1719 1719
1720 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { 1720 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
1721 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer, 1721 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
1722 false, NULL);
1723 // Update the head of the list of contexts. 1722 // Update the head of the list of contexts.
1724 set_native_contexts_list(head); 1723 set_native_contexts_list(head);
1725 } 1724 }
1726 1725
1727 1726
1728 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, 1727 void Heap::RegisterLiveArrayBuffer(void* data, size_t length) {
1729 bool stop_after_young) { 1728 live_array_buffers_[data] = length;
1730 Object* last_array_buffer = undefined_value(); 1729 reinterpret_cast<v8::Isolate*>(isolate_)
1731 Object* array_buffer_obj = 1730 ->AdjustAmountOfExternalAllocatedMemory(length);
1732 VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer, 1731 }
1733 stop_after_young, &last_array_buffer);
1734 set_array_buffers_list(array_buffer_obj);
1735 set_last_array_buffer_in_list(last_array_buffer);
1736 1732
1737 // Verify invariant that young array buffers come before old array buffers 1733
1738 // in array buffers list if there was no promotion failure. 1734 void Heap::UnregisterArrayBuffer(void* data) {
1739 Object* undefined = undefined_value(); 1735 DCHECK(live_array_buffers_.count(data) > 0);
1740 Object* next = array_buffers_list(); 1736 live_array_buffers_.erase(data);
1741 bool old_objects_recorded = false; 1737 not_yet_discovered_array_buffers_.erase(data);
1742 while (next != undefined) { 1738 }
1743 if (!old_objects_recorded) { 1739
1744 old_objects_recorded = !InNewSpace(next); 1740
1745 } 1741 void Heap::AddDiscoveredArrayBuffer(void* data) {
1746 CHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next)); 1742 not_yet_discovered_array_buffers_.erase(data);
1747 next = JSArrayBuffer::cast(next)->weak_next(); 1743 }
1744
1745
1746 void Heap::ReapDeadArrayBuffers() {
1747 for (auto buffer = not_yet_discovered_array_buffers_.begin();
1748 buffer != not_yet_discovered_array_buffers_.end(); ++buffer) {
1749 V8::ArrayBufferAllocator()->Free(buffer->first, buffer->second);
1750 // Don't use the API method here since this could trigger another GC.
1751 amount_of_external_allocated_memory_ -= buffer->second;
1752 live_array_buffers_.erase(buffer->first);
1748 } 1753 }
1754 not_yet_discovered_array_buffers_.clear();
1749 } 1755 }
1750 1756
1751 1757
1752 void Heap::TearDownArrayBuffers() { 1758 void Heap::TearDownArrayBuffers() {
1753 Object* undefined = undefined_value(); 1759 for (auto buffer = live_array_buffers_.begin();
1754 for (Object* o = array_buffers_list(); o != undefined;) { 1760 buffer != live_array_buffers_.end(); ++buffer) {
1755 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); 1761 V8::ArrayBufferAllocator()->Free(buffer->first, buffer->second);
1756 Runtime::FreeArrayBuffer(isolate(), buffer);
1757 o = buffer->weak_next();
1758 } 1762 }
1759 set_array_buffers_list(undefined); 1763 live_array_buffers_.clear();
1764 not_yet_discovered_array_buffers_.clear();
1760 } 1765 }
1761 1766
1762 1767
1763 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { 1768 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
1764 Object* allocation_site_obj = VisitWeakList<AllocationSite>( 1769 Object* allocation_site_obj =
1765 this, allocation_sites_list(), retainer, false, NULL); 1770 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
1766 set_allocation_sites_list(allocation_site_obj); 1771 set_allocation_sites_list(allocation_site_obj);
1767 } 1772 }
1768 1773
1769 1774
1770 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { 1775 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) {
1771 DisallowHeapAllocation no_allocation_scope; 1776 DisallowHeapAllocation no_allocation_scope;
1772 Object* cur = allocation_sites_list(); 1777 Object* cur = allocation_sites_list();
1773 bool marked = false; 1778 bool marked = false;
1774 while (cur->IsAllocationSite()) { 1779 while (cur->IsAllocationSite()) {
1775 AllocationSite* casted = AllocationSite::cast(cur); 1780 AllocationSite* casted = AllocationSite::cast(cur);
(...skipping 3624 matching lines...) Expand 10 before | Expand all | Expand 10 after
5400 bool Heap::CreateHeapObjects() { 5405 bool Heap::CreateHeapObjects() {
5401 // Create initial maps. 5406 // Create initial maps.
5402 if (!CreateInitialMaps()) return false; 5407 if (!CreateInitialMaps()) return false;
5403 CreateApiObjects(); 5408 CreateApiObjects();
5404 5409
5405 // Create initial objects 5410 // Create initial objects
5406 CreateInitialObjects(); 5411 CreateInitialObjects();
5407 CHECK_EQ(0u, gc_count_); 5412 CHECK_EQ(0u, gc_count_);
5408 5413
5409 set_native_contexts_list(undefined_value()); 5414 set_native_contexts_list(undefined_value());
5410 set_array_buffers_list(undefined_value());
5411 set_last_array_buffer_in_list(undefined_value());
5412 set_allocation_sites_list(undefined_value()); 5415 set_allocation_sites_list(undefined_value());
5413 return true; 5416 return true;
5414 } 5417 }
5415 5418
5416 5419
5417 void Heap::SetStackLimits() { 5420 void Heap::SetStackLimits() {
5418 DCHECK(isolate_ != NULL); 5421 DCHECK(isolate_ != NULL);
5419 DCHECK(isolate_ == isolate()); 5422 DCHECK(isolate_ == isolate());
5420 // On 64 bit machines, pointers are generally out of range of Smis. We write 5423 // On 64 bit machines, pointers are generally out of range of Smis. We write
5421 // something that looks like an out of range Smi to the GC. 5424 // something that looks like an out of range Smi to the GC.
(...skipping 992 matching lines...) Expand 10 before | Expand all | Expand 10 after
6414 static_cast<int>(object_sizes_last_time_[index])); 6417 static_cast<int>(object_sizes_last_time_[index]));
6415 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) 6418 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT)
6416 #undef ADJUST_LAST_TIME_OBJECT_COUNT 6419 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6417 6420
6418 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 6421 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
6419 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 6422 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
6420 ClearObjectStats(); 6423 ClearObjectStats();
6421 } 6424 }
6422 } 6425 }
6423 } // namespace v8::internal 6426 } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/objects-visiting.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698