Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
| (...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 147 // easier if you can define it as part of the build environment. | 147 // easier if you can define it as part of the build environment. |
| 148 #if defined(V8_MAX_SEMISPACE_SIZE) | 148 #if defined(V8_MAX_SEMISPACE_SIZE) |
| 149 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; | 149 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; |
| 150 #endif | 150 #endif |
| 151 | 151 |
| 152 // Ensure old_generation_size_ is a multiple of kPageSize. | 152 // Ensure old_generation_size_ is a multiple of kPageSize. |
| 153 DCHECK(MB >= Page::kPageSize); | 153 DCHECK(MB >= Page::kPageSize); |
| 154 | 154 |
| 155 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); | 155 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); |
| 156 set_native_contexts_list(NULL); | 156 set_native_contexts_list(NULL); |
| 157 set_array_buffers_list(Smi::FromInt(0)); | |
| 158 set_last_array_buffer_in_list(Smi::FromInt(0)); | |
| 159 set_allocation_sites_list(Smi::FromInt(0)); | 157 set_allocation_sites_list(Smi::FromInt(0)); |
| 160 set_encountered_weak_collections(Smi::FromInt(0)); | 158 set_encountered_weak_collections(Smi::FromInt(0)); |
| 161 set_encountered_weak_cells(Smi::FromInt(0)); | 159 set_encountered_weak_cells(Smi::FromInt(0)); |
| 162 // Put a dummy entry in the remembered pages so we can find the list the | 160 // Put a dummy entry in the remembered pages so we can find the list the |
| 163 // minidump even if there are no real unmapped pages. | 161 // minidump even if there are no real unmapped pages. |
| 164 RememberUnmappedPage(NULL, false); | 162 RememberUnmappedPage(NULL, false); |
| 165 | 163 |
| 166 ClearObjectStats(true); | 164 ClearObjectStats(true); |
| 167 } | 165 } |
| 168 | 166 |
| (...skipping 1531 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1700 Object** start = &external_string_table_.old_space_strings_[0]; | 1698 Object** start = &external_string_table_.old_space_strings_[0]; |
| 1701 Object** end = start + external_string_table_.old_space_strings_.length(); | 1699 Object** end = start + external_string_table_.old_space_strings_.length(); |
| 1702 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); | 1700 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); |
| 1703 } | 1701 } |
| 1704 | 1702 |
| 1705 UpdateNewSpaceReferencesInExternalStringTable(updater_func); | 1703 UpdateNewSpaceReferencesInExternalStringTable(updater_func); |
| 1706 } | 1704 } |
| 1707 | 1705 |
| 1708 | 1706 |
| 1709 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) { | 1707 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) { |
| 1710 ProcessArrayBuffers(retainer, false); | |
| 1711 ProcessNativeContexts(retainer); | 1708 ProcessNativeContexts(retainer); |
| 1712 ProcessAllocationSites(retainer); | 1709 ProcessAllocationSites(retainer); |
| 1713 } | 1710 } |
| 1714 | 1711 |
| 1715 | 1712 |
| 1716 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) { | 1713 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) { |
| 1717 ProcessArrayBuffers(retainer, true); | |
| 1718 ProcessNativeContexts(retainer); | 1714 ProcessNativeContexts(retainer); |
| 1719 } | 1715 } |
| 1720 | 1716 |
| 1721 | 1717 |
| 1722 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { | 1718 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { |
| 1723 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer, | 1719 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer); |
| 1724 false, NULL); | |
| 1725 // Update the head of the list of contexts. | 1720 // Update the head of the list of contexts. |
| 1726 set_native_contexts_list(head); | 1721 set_native_contexts_list(head); |
| 1727 } | 1722 } |
| 1728 | 1723 |
| 1729 | 1724 |
| 1730 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, | 1725 void Heap::RegisterNewArrayBuffer(void* data, size_t length) { |
| 1731 bool stop_after_young) { | 1726 if (!data) return; |
|
Hannes Payer (out of office)
2015/04/30 10:35:29
How can this one be null?
jochen (gone - plz use gerrit)
2015/04/30 12:01:08
malloc(0) returns 0 as well
| |
| 1732 Object* last_array_buffer = undefined_value(); | 1727 live_array_buffers_[data] = length; |
| 1733 Object* array_buffer_obj = | 1728 reinterpret_cast<v8::Isolate*>(isolate_) |
| 1734 VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer, | 1729 ->AdjustAmountOfExternalAllocatedMemory(length); |
| 1735 stop_after_young, &last_array_buffer); | 1730 } |
| 1736 set_array_buffers_list(array_buffer_obj); | |
| 1737 set_last_array_buffer_in_list(last_array_buffer); | |
| 1738 | 1731 |
| 1739 // Verify invariant that young array buffers come before old array buffers | 1732 |
| 1740 // in array buffers list if there was no promotion failure. | 1733 void Heap::UnregisterArrayBuffer(void* data) { |
| 1741 Object* undefined = undefined_value(); | 1734 if (!data) return; |
|
Hannes Payer (out of office)
2015/04/30 10:35:29
This one can only be null in the neuter case right
jochen (gone - plz use gerrit)
2015/04/30 12:01:08
also for on-heap typed arrays
| |
| 1742 Object* next = array_buffers_list(); | 1735 DCHECK(live_array_buffers_.count(data) > 0); |
| 1743 bool old_objects_recorded = false; | 1736 live_array_buffers_.erase(data); |
| 1744 while (next != undefined) { | 1737 not_yet_discovered_array_buffers_.erase(data); |
| 1745 if (!old_objects_recorded) { | 1738 } |
| 1746 old_objects_recorded = !InNewSpace(next); | 1739 |
| 1747 } | 1740 |
| 1748 CHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next)); | 1741 void Heap::RegisterLiveArrayBuffer(void* data) { |
| 1749 next = JSArrayBuffer::cast(next)->weak_next(); | 1742 not_yet_discovered_array_buffers_.erase(data); |
| 1743 } | |
| 1744 | |
| 1745 | |
| 1746 void Heap::FreeDeadArrayBuffers() { | |
| 1747 for (auto buffer = not_yet_discovered_array_buffers_.begin(); | |
| 1748 buffer != not_yet_discovered_array_buffers_.end(); ++buffer) { | |
| 1749 isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); | |
| 1750 // Don't use the API method here since this could trigger another GC. | |
| 1751 amount_of_external_allocated_memory_ -= buffer->second; | |
| 1752 live_array_buffers_.erase(buffer->first); | |
| 1750 } | 1753 } |
| 1754 not_yet_discovered_array_buffers_ = live_array_buffers_; | |
| 1751 } | 1755 } |
| 1752 | 1756 |
| 1753 | 1757 |
| 1754 void Heap::TearDownArrayBuffers() { | 1758 void Heap::TearDownArrayBuffers() { |
| 1755 Object* undefined = undefined_value(); | 1759 for (auto buffer = live_array_buffers_.begin(); |
| 1756 for (Object* o = array_buffers_list(); o != undefined;) { | 1760 buffer != live_array_buffers_.end(); ++buffer) { |
| 1757 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); | 1761 isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); |
| 1758 Runtime::FreeArrayBuffer(isolate(), buffer); | |
| 1759 o = buffer->weak_next(); | |
| 1760 } | 1762 } |
| 1761 set_array_buffers_list(undefined); | 1763 live_array_buffers_.clear(); |
| 1764 not_yet_discovered_array_buffers_.clear(); | |
| 1762 } | 1765 } |
| 1763 | 1766 |
| 1764 | 1767 |
| 1765 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { | 1768 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { |
| 1766 Object* allocation_site_obj = VisitWeakList<AllocationSite>( | 1769 Object* allocation_site_obj = |
| 1767 this, allocation_sites_list(), retainer, false, NULL); | 1770 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer); |
| 1768 set_allocation_sites_list(allocation_site_obj); | 1771 set_allocation_sites_list(allocation_site_obj); |
| 1769 } | 1772 } |
| 1770 | 1773 |
| 1771 | 1774 |
| 1772 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { | 1775 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { |
| 1773 DisallowHeapAllocation no_allocation_scope; | 1776 DisallowHeapAllocation no_allocation_scope; |
| 1774 Object* cur = allocation_sites_list(); | 1777 Object* cur = allocation_sites_list(); |
| 1775 bool marked = false; | 1778 bool marked = false; |
| 1776 while (cur->IsAllocationSite()) { | 1779 while (cur->IsAllocationSite()) { |
| 1777 AllocationSite* casted = AllocationSite::cast(cur); | 1780 AllocationSite* casted = AllocationSite::cast(cur); |
| (...skipping 3625 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5403 bool Heap::CreateHeapObjects() { | 5406 bool Heap::CreateHeapObjects() { |
| 5404 // Create initial maps. | 5407 // Create initial maps. |
| 5405 if (!CreateInitialMaps()) return false; | 5408 if (!CreateInitialMaps()) return false; |
| 5406 CreateApiObjects(); | 5409 CreateApiObjects(); |
| 5407 | 5410 |
| 5408 // Create initial objects | 5411 // Create initial objects |
| 5409 CreateInitialObjects(); | 5412 CreateInitialObjects(); |
| 5410 CHECK_EQ(0u, gc_count_); | 5413 CHECK_EQ(0u, gc_count_); |
| 5411 | 5414 |
| 5412 set_native_contexts_list(undefined_value()); | 5415 set_native_contexts_list(undefined_value()); |
| 5413 set_array_buffers_list(undefined_value()); | |
| 5414 set_last_array_buffer_in_list(undefined_value()); | |
| 5415 set_allocation_sites_list(undefined_value()); | 5416 set_allocation_sites_list(undefined_value()); |
| 5416 return true; | 5417 return true; |
| 5417 } | 5418 } |
| 5418 | 5419 |
| 5419 | 5420 |
| 5420 void Heap::SetStackLimits() { | 5421 void Heap::SetStackLimits() { |
| 5421 DCHECK(isolate_ != NULL); | 5422 DCHECK(isolate_ != NULL); |
| 5422 DCHECK(isolate_ == isolate()); | 5423 DCHECK(isolate_ == isolate()); |
| 5423 // On 64 bit machines, pointers are generally out of range of Smis. We write | 5424 // On 64 bit machines, pointers are generally out of range of Smis. We write |
| 5424 // something that looks like an out of range Smi to the GC. | 5425 // something that looks like an out of range Smi to the GC. |
| (...skipping 993 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6418 static_cast<int>(object_sizes_last_time_[index])); | 6419 static_cast<int>(object_sizes_last_time_[index])); |
| 6419 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 6420 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 6420 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 6421 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 6421 | 6422 |
| 6422 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 6423 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 6423 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 6424 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 6424 ClearObjectStats(); | 6425 ClearObjectStats(); |
| 6425 } | 6426 } |
| 6426 } | 6427 } |
| 6427 } // namespace v8::internal | 6428 } // namespace v8::internal |
| OLD | NEW |