OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
137 gcs_since_last_deopt_(0), | 137 gcs_since_last_deopt_(0), |
138 #ifdef VERIFY_HEAP | 138 #ifdef VERIFY_HEAP |
139 no_weak_object_verification_scope_depth_(0), | 139 no_weak_object_verification_scope_depth_(0), |
140 #endif | 140 #endif |
141 allocation_sites_scratchpad_length_(0), | 141 allocation_sites_scratchpad_length_(0), |
142 promotion_queue_(this), | 142 promotion_queue_(this), |
143 configured_(false), | 143 configured_(false), |
144 external_string_table_(this), | 144 external_string_table_(this), |
145 chunks_queued_for_free_(NULL), | 145 chunks_queued_for_free_(NULL), |
146 gc_callbacks_depth_(0), | 146 gc_callbacks_depth_(0), |
147 deserialization_complete_(false) { | 147 deserialization_complete_(false), |
148 promotion_failure_(false) { | |
148 // Allow build-time customization of the max semispace size. Building | 149 // Allow build-time customization of the max semispace size. Building |
149 // V8 with snapshots and a non-default max semispace size is much | 150 // V8 with snapshots and a non-default max semispace size is much |
150 // easier if you can define it as part of the build environment. | 151 // easier if you can define it as part of the build environment. |
151 #if defined(V8_MAX_SEMISPACE_SIZE) | 152 #if defined(V8_MAX_SEMISPACE_SIZE) |
152 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; | 153 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; |
153 #endif | 154 #endif |
154 | 155 |
155 // Ensure old_generation_size_ is a multiple of kPageSize. | 156 // Ensure old_generation_size_ is a multiple of kPageSize. |
156 DCHECK(MB >= Page::kPageSize); | 157 DCHECK(MB >= Page::kPageSize); |
157 | 158 |
(...skipping 566 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
724 #undef UPDATE_FRAGMENTATION_FOR_SPACE | 725 #undef UPDATE_FRAGMENTATION_FOR_SPACE |
725 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE | 726 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE |
726 | 727 |
727 #ifdef DEBUG | 728 #ifdef DEBUG |
728 ReportStatisticsAfterGC(); | 729 ReportStatisticsAfterGC(); |
729 #endif // DEBUG | 730 #endif // DEBUG |
730 | 731 |
731 // Remember the last top pointer so that we can later find out | 732 // Remember the last top pointer so that we can later find out |
732 // whether we allocated in new space since the last GC. | 733 // whether we allocated in new space since the last GC. |
733 new_space_top_after_last_gc_ = new_space()->top(); | 734 new_space_top_after_last_gc_ = new_space()->top(); |
735 set_promotion_failure(false); | |
734 } | 736 } |
735 | 737 |
736 | 738 |
737 void Heap::CollectAllGarbage(int flags, const char* gc_reason, | 739 void Heap::CollectAllGarbage(int flags, const char* gc_reason, |
738 const v8::GCCallbackFlags gc_callback_flags) { | 740 const v8::GCCallbackFlags gc_callback_flags) { |
739 // Since we are ignoring the return value, the exact choice of space does | 741 // Since we are ignoring the return value, the exact choice of space does |
740 // not matter, so long as we do not specify NEW_SPACE, which would not | 742 // not matter, so long as we do not specify NEW_SPACE, which would not |
741 // cause a full GC. | 743 // cause a full GC. |
742 mark_compact_collector_.SetFlags(flags); | 744 mark_compact_collector_.SetFlags(flags); |
743 CollectGarbage(OLD_POINTER_SPACE, gc_reason, gc_callback_flags); | 745 CollectGarbage(OLD_POINTER_SPACE, gc_reason, gc_callback_flags); |
(...skipping 938 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1682 Object** start = &external_string_table_.old_space_strings_[0]; | 1684 Object** start = &external_string_table_.old_space_strings_[0]; |
1683 Object** end = start + external_string_table_.old_space_strings_.length(); | 1685 Object** end = start + external_string_table_.old_space_strings_.length(); |
1684 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); | 1686 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); |
1685 } | 1687 } |
1686 | 1688 |
1687 UpdateNewSpaceReferencesInExternalStringTable(updater_func); | 1689 UpdateNewSpaceReferencesInExternalStringTable(updater_func); |
1688 } | 1690 } |
1689 | 1691 |
1690 | 1692 |
1691 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) { | 1693 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) { |
1692 ProcessArrayBuffers(retainer); | 1694 ProcessArrayBuffers(retainer, false); |
1695 ProcessNewArrayBufferViews(retainer); | |
1693 ProcessNativeContexts(retainer); | 1696 ProcessNativeContexts(retainer); |
1694 ProcessAllocationSites(retainer); | 1697 ProcessAllocationSites(retainer); |
1695 } | 1698 } |
1696 | 1699 |
1697 | 1700 |
1698 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) { | 1701 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) { |
1699 ProcessArrayBuffers(retainer); | 1702 ProcessArrayBuffers(retainer, true); |
1703 ProcessNewArrayBufferViews(retainer); | |
1700 ProcessNativeContexts(retainer); | 1704 ProcessNativeContexts(retainer); |
1701 } | 1705 } |
1702 | 1706 |
1703 | 1707 |
1704 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { | 1708 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { |
1705 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer); | 1709 Object* head = |
1710 VisitWeakList<Context>(this, native_contexts_list(), retainer, false); | |
1706 // Update the head of the list of contexts. | 1711 // Update the head of the list of contexts. |
1707 set_native_contexts_list(head); | 1712 set_native_contexts_list(head); |
1708 } | 1713 } |
1709 | 1714 |
1710 | 1715 |
1711 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer) { | 1716 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, |
1712 Object* array_buffer_obj = | 1717 bool stop_after_young) { |
1713 VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer); | 1718 Object* array_buffer_obj = VisitWeakList<JSArrayBuffer>( |
1719 this, array_buffers_list(), retainer, stop_after_young); | |
1714 set_array_buffers_list(array_buffer_obj); | 1720 set_array_buffers_list(array_buffer_obj); |
1715 } | 1721 } |
1716 | 1722 |
1717 | 1723 |
1724 void Heap::ProcessNewArrayBufferViews(WeakObjectRetainer* retainer) { | |
1725 // Retain the list of new space views. | |
1726 Object* typed_array_obj = VisitWeakList<JSArrayBufferView>( | |
1727 this, new_array_buffer_views_list_, retainer, false); | |
1728 set_new_array_buffer_views_list(typed_array_obj); | |
1729 | |
1730 // Some objects in the list may be in old space now. Find them | |
1731 // and move them to the corresponding array buffer. | |
Hannes Payer (out of office)
2015/02/06 12:21:10
We could do this in one pass with a specialized Vi
| |
1732 Object* undefined = undefined_value(); | |
1733 Object* previous = undefined; | |
1734 Object* next; | |
1735 for (Object* o = new_array_buffer_views_list(); o != undefined;) { | |
1736 JSArrayBufferView* view = JSArrayBufferView::cast(o); | |
1737 next = view->weak_next(); | |
1738 if (!InNewSpace(view)) { | |
1739 if (previous == undefined) { | |
1740 // We are at the list head. | |
1741 set_new_array_buffer_views_list(next); | |
1742 } else { | |
1743 // We are in the middle of the list, skip the old space element. | |
1744 JSArrayBufferView::cast(previous)->set_weak_next(next); | |
1745 } | |
1746 JSArrayBuffer* buffer = JSArrayBuffer::cast(view->buffer()); | |
1747 view->set_weak_next(buffer->weak_first_view()); | |
1748 buffer->set_weak_first_view(view); | |
1749 } else { | |
1750 // We found a valid new space view, remember it. | |
1751 previous = view; | |
1752 } | |
1753 o = next; | |
1754 } | |
1755 } | |
1756 | |
1757 | |
1718 void Heap::TearDownArrayBuffers() { | 1758 void Heap::TearDownArrayBuffers() { |
1719 Object* undefined = undefined_value(); | 1759 Object* undefined = undefined_value(); |
1720 for (Object* o = array_buffers_list(); o != undefined;) { | 1760 for (Object* o = array_buffers_list(); o != undefined;) { |
1721 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); | 1761 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); |
1722 Runtime::FreeArrayBuffer(isolate(), buffer); | 1762 Runtime::FreeArrayBuffer(isolate(), buffer); |
1723 o = buffer->weak_next(); | 1763 o = buffer->weak_next(); |
1724 } | 1764 } |
1725 set_array_buffers_list(undefined); | 1765 set_array_buffers_list(undefined); |
1726 } | 1766 } |
1727 | 1767 |
1728 | 1768 |
1729 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { | 1769 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { |
1730 Object* allocation_site_obj = | 1770 Object* allocation_site_obj = VisitWeakList<AllocationSite>( |
1731 VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer); | 1771 this, allocation_sites_list(), retainer, false); |
1732 set_allocation_sites_list(allocation_site_obj); | 1772 set_allocation_sites_list(allocation_site_obj); |
1733 } | 1773 } |
1734 | 1774 |
1735 | 1775 |
1736 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { | 1776 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { |
1737 DisallowHeapAllocation no_allocation_scope; | 1777 DisallowHeapAllocation no_allocation_scope; |
1738 Object* cur = allocation_sites_list(); | 1778 Object* cur = allocation_sites_list(); |
1739 bool marked = false; | 1779 bool marked = false; |
1740 while (cur->IsAllocationSite()) { | 1780 while (cur->IsAllocationSite()) { |
1741 AllocationSite* casted = AllocationSite::cast(cur); | 1781 AllocationSite* casted = AllocationSite::cast(cur); |
(...skipping 412 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2154 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { | 2194 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { |
2155 return; | 2195 return; |
2156 } | 2196 } |
2157 } | 2197 } |
2158 | 2198 |
2159 if (PromoteObject<object_contents, alignment>(map, slot, object, | 2199 if (PromoteObject<object_contents, alignment>(map, slot, object, |
2160 object_size)) { | 2200 object_size)) { |
2161 return; | 2201 return; |
2162 } | 2202 } |
2163 | 2203 |
2204 heap->set_promotion_failure(true); | |
2164 // If promotion failed, we try to copy the object to the other semi-space | 2205 // If promotion failed, we try to copy the object to the other semi-space |
2165 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return; | 2206 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return; |
2166 | 2207 |
2167 UNREACHABLE(); | 2208 UNREACHABLE(); |
2168 } | 2209 } |
2169 | 2210 |
2170 | 2211 |
2171 static inline void EvacuateJSFunction(Map* map, HeapObject** slot, | 2212 static inline void EvacuateJSFunction(Map* map, HeapObject** slot, |
2172 HeapObject* object) { | 2213 HeapObject* object) { |
2173 ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< | 2214 ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized< |
(...skipping 3319 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5493 // Create initial maps. | 5534 // Create initial maps. |
5494 if (!CreateInitialMaps()) return false; | 5535 if (!CreateInitialMaps()) return false; |
5495 CreateApiObjects(); | 5536 CreateApiObjects(); |
5496 | 5537 |
5497 // Create initial objects | 5538 // Create initial objects |
5498 CreateInitialObjects(); | 5539 CreateInitialObjects(); |
5499 CHECK_EQ(0u, gc_count_); | 5540 CHECK_EQ(0u, gc_count_); |
5500 | 5541 |
5501 set_native_contexts_list(undefined_value()); | 5542 set_native_contexts_list(undefined_value()); |
5502 set_array_buffers_list(undefined_value()); | 5543 set_array_buffers_list(undefined_value()); |
5544 set_new_array_buffer_views_list(undefined_value()); | |
5503 set_allocation_sites_list(undefined_value()); | 5545 set_allocation_sites_list(undefined_value()); |
5504 weak_object_to_code_table_ = undefined_value(); | 5546 weak_object_to_code_table_ = undefined_value(); |
5505 return true; | 5547 return true; |
5506 } | 5548 } |
5507 | 5549 |
5508 | 5550 |
5509 void Heap::SetStackLimits() { | 5551 void Heap::SetStackLimits() { |
5510 DCHECK(isolate_ != NULL); | 5552 DCHECK(isolate_ != NULL); |
5511 DCHECK(isolate_ == isolate()); | 5553 DCHECK(isolate_ == isolate()); |
5512 // On 64 bit machines, pointers are generally out of range of Smis. We write | 5554 // On 64 bit machines, pointers are generally out of range of Smis. We write |
(...skipping 975 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6488 static_cast<int>(object_sizes_last_time_[index])); | 6530 static_cast<int>(object_sizes_last_time_[index])); |
6489 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 6531 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
6490 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 6532 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
6491 | 6533 |
6492 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 6534 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
6493 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 6535 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
6494 ClearObjectStats(); | 6536 ClearObjectStats(); |
6495 } | 6537 } |
6496 } | 6538 } |
6497 } // namespace v8::internal | 6539 } // namespace v8::internal |
OLD | NEW |