| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
| (...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 134 full_codegen_bytes_generated_(0), | 134 full_codegen_bytes_generated_(0), |
| 135 crankshaft_codegen_bytes_generated_(0), | 135 crankshaft_codegen_bytes_generated_(0), |
| 136 gcs_since_last_deopt_(0), | 136 gcs_since_last_deopt_(0), |
| 137 allocation_sites_scratchpad_length_(0), | 137 allocation_sites_scratchpad_length_(0), |
| 138 promotion_queue_(this), | 138 promotion_queue_(this), |
| 139 configured_(false), | 139 configured_(false), |
| 140 external_string_table_(this), | 140 external_string_table_(this), |
| 141 chunks_queued_for_free_(NULL), | 141 chunks_queued_for_free_(NULL), |
| 142 gc_callbacks_depth_(0), | 142 gc_callbacks_depth_(0), |
| 143 deserialization_complete_(false), | 143 deserialization_complete_(false), |
| 144 concurrent_sweeping_enabled_(false), | 144 concurrent_sweeping_enabled_(false) { |
| 145 migration_failure_(false), | |
| 146 previous_migration_failure_(false) { | |
| 147 // Allow build-time customization of the max semispace size. Building | 145 // Allow build-time customization of the max semispace size. Building |
| 148 // V8 with snapshots and a non-default max semispace size is much | 146 // V8 with snapshots and a non-default max semispace size is much |
| 149 // easier if you can define it as part of the build environment. | 147 // easier if you can define it as part of the build environment. |
| 150 #if defined(V8_MAX_SEMISPACE_SIZE) | 148 #if defined(V8_MAX_SEMISPACE_SIZE) |
| 151 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; | 149 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; |
| 152 #endif | 150 #endif |
| 153 | 151 |
| 154 // Ensure old_generation_size_ is a multiple of kPageSize. | 152 // Ensure old_generation_size_ is a multiple of kPageSize. |
| 155 DCHECK(MB >= Page::kPageSize); | 153 DCHECK(MB >= Page::kPageSize); |
| 156 | 154 |
| (...skipping 541 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 698 #undef UPDATE_FRAGMENTATION_FOR_SPACE | 696 #undef UPDATE_FRAGMENTATION_FOR_SPACE |
| 699 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE | 697 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE |
| 700 | 698 |
| 701 #ifdef DEBUG | 699 #ifdef DEBUG |
| 702 ReportStatisticsAfterGC(); | 700 ReportStatisticsAfterGC(); |
| 703 #endif // DEBUG | 701 #endif // DEBUG |
| 704 | 702 |
| 705 // Remember the last top pointer so that we can later find out | 703 // Remember the last top pointer so that we can later find out |
| 706 // whether we allocated in new space since the last GC. | 704 // whether we allocated in new space since the last GC. |
| 707 new_space_top_after_last_gc_ = new_space()->top(); | 705 new_space_top_after_last_gc_ = new_space()->top(); |
| 708 | |
| 709 if (migration_failure_) { | |
| 710 set_previous_migration_failure(true); | |
| 711 } else { | |
| 712 set_previous_migration_failure(false); | |
| 713 } | |
| 714 set_migration_failure(false); | |
| 715 } | 706 } |
| 716 | 707 |
| 717 | 708 |
| 718 void Heap::HandleGCRequest() { | 709 void Heap::HandleGCRequest() { |
| 719 if (incremental_marking()->request_type() == | 710 if (incremental_marking()->request_type() == |
| 720 IncrementalMarking::COMPLETE_MARKING) { | 711 IncrementalMarking::COMPLETE_MARKING) { |
| 721 CollectAllGarbage(Heap::kNoGCFlags, "GC interrupt"); | 712 CollectAllGarbage(Heap::kNoGCFlags, "GC interrupt"); |
| 722 return; | 713 return; |
| 723 } | 714 } |
| 724 DCHECK(FLAG_overapproximate_weak_closure); | 715 DCHECK(FLAG_overapproximate_weak_closure); |
| (...skipping 954 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1679 Object** end = start + external_string_table_.old_space_strings_.length(); | 1670 Object** end = start + external_string_table_.old_space_strings_.length(); |
| 1680 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); | 1671 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); |
| 1681 } | 1672 } |
| 1682 | 1673 |
| 1683 UpdateNewSpaceReferencesInExternalStringTable(updater_func); | 1674 UpdateNewSpaceReferencesInExternalStringTable(updater_func); |
| 1684 } | 1675 } |
| 1685 | 1676 |
| 1686 | 1677 |
| 1687 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) { | 1678 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) { |
| 1688 ProcessArrayBuffers(retainer, false); | 1679 ProcessArrayBuffers(retainer, false); |
| 1689 ProcessNewArrayBufferViews(retainer); | |
| 1690 ProcessNativeContexts(retainer); | 1680 ProcessNativeContexts(retainer); |
| 1691 ProcessAllocationSites(retainer); | 1681 ProcessAllocationSites(retainer); |
| 1692 } | 1682 } |
| 1693 | 1683 |
| 1694 | 1684 |
| 1695 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) { | 1685 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) { |
| 1696 ProcessArrayBuffers(retainer, true); | 1686 ProcessArrayBuffers(retainer, true); |
| 1697 ProcessNewArrayBufferViews(retainer); | |
| 1698 ProcessNativeContexts(retainer); | 1687 ProcessNativeContexts(retainer); |
| 1699 } | 1688 } |
| 1700 | 1689 |
| 1701 | 1690 |
| 1702 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { | 1691 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { |
| 1703 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer, | 1692 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer, |
| 1704 false, NULL); | 1693 false, NULL); |
| 1705 // Update the head of the list of contexts. | 1694 // Update the head of the list of contexts. |
| 1706 set_native_contexts_list(head); | 1695 set_native_contexts_list(head); |
| 1707 } | 1696 } |
| 1708 | 1697 |
| 1709 | 1698 |
| 1710 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, | 1699 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, |
| 1711 bool stop_after_young) { | 1700 bool stop_after_young) { |
| 1712 Object* last_array_buffer = undefined_value(); | 1701 Object* last_array_buffer = undefined_value(); |
| 1713 Object* array_buffer_obj = | 1702 Object* array_buffer_obj = |
| 1714 VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer, | 1703 VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer, |
| 1715 stop_after_young, &last_array_buffer); | 1704 stop_after_young, &last_array_buffer); |
| 1716 set_array_buffers_list(array_buffer_obj); | 1705 set_array_buffers_list(array_buffer_obj); |
| 1717 set_last_array_buffer_in_list(last_array_buffer); | 1706 set_last_array_buffer_in_list(last_array_buffer); |
| 1718 | 1707 |
| 1719 // Verify invariant that young array buffers come before old array buffers | 1708 // Verify invariant that young array buffers come before old array buffers |
| 1720 // in array buffers list if there was no promotion failure. | 1709 // in array buffers list if there was no promotion failure. |
| 1721 Object* undefined = undefined_value(); | 1710 Object* undefined = undefined_value(); |
| 1722 Object* next = array_buffers_list(); | 1711 Object* next = array_buffers_list(); |
| 1723 bool old_objects_recorded = false; | 1712 bool old_objects_recorded = false; |
| 1724 if (migration_failure()) return; | |
| 1725 while (next != undefined) { | 1713 while (next != undefined) { |
| 1726 if (!old_objects_recorded) { | 1714 if (!old_objects_recorded) { |
| 1727 old_objects_recorded = !InNewSpace(next); | 1715 old_objects_recorded = !InNewSpace(next); |
| 1728 } | 1716 } |
| 1729 CHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next)); | 1717 CHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next)); |
| 1730 next = JSArrayBuffer::cast(next)->weak_next(); | 1718 next = JSArrayBuffer::cast(next)->weak_next(); |
| 1731 } | 1719 } |
| 1732 } | 1720 } |
| 1733 | 1721 |
| 1734 | 1722 |
| 1735 void Heap::ProcessNewArrayBufferViews(WeakObjectRetainer* retainer) { | |
| 1736 // Retain the list of new space views. | |
| 1737 Object* typed_array_obj = VisitWeakList<JSArrayBufferView>( | |
| 1738 this, new_array_buffer_views_list_, retainer, false, NULL); | |
| 1739 set_new_array_buffer_views_list(typed_array_obj); | |
| 1740 | |
| 1741 // Some objects in the list may be in old space now. Find them | |
| 1742 // and move them to the corresponding array buffer. | |
| 1743 Object* view = VisitNewArrayBufferViewsWeakList( | |
| 1744 this, new_array_buffer_views_list_, retainer); | |
| 1745 set_new_array_buffer_views_list(view); | |
| 1746 } | |
| 1747 | |
| 1748 | |
| 1749 void Heap::TearDownArrayBuffers() { | 1723 void Heap::TearDownArrayBuffers() { |
| 1750 Object* undefined = undefined_value(); | 1724 Object* undefined = undefined_value(); |
| 1751 for (Object* o = array_buffers_list(); o != undefined;) { | 1725 for (Object* o = array_buffers_list(); o != undefined;) { |
| 1752 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); | 1726 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); |
| 1753 Runtime::FreeArrayBuffer(isolate(), buffer); | 1727 Runtime::FreeArrayBuffer(isolate(), buffer); |
| 1754 o = buffer->weak_next(); | 1728 o = buffer->weak_next(); |
| 1755 } | 1729 } |
| 1756 set_array_buffers_list(undefined); | 1730 set_array_buffers_list(undefined); |
| 1757 } | 1731 } |
| 1758 | 1732 |
| (...skipping 411 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2170 SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); | 2144 SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); |
| 2171 SLOW_DCHECK(object->Size() == object_size); | 2145 SLOW_DCHECK(object->Size() == object_size); |
| 2172 Heap* heap = map->GetHeap(); | 2146 Heap* heap = map->GetHeap(); |
| 2173 | 2147 |
| 2174 if (!heap->ShouldBePromoted(object->address(), object_size)) { | 2148 if (!heap->ShouldBePromoted(object->address(), object_size)) { |
| 2175 // A semi-space copy may fail due to fragmentation. In that case, we | 2149 // A semi-space copy may fail due to fragmentation. In that case, we |
| 2176 // try to promote the object. | 2150 // try to promote the object. |
| 2177 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { | 2151 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { |
| 2178 return; | 2152 return; |
| 2179 } | 2153 } |
| 2180 heap->set_migration_failure(true); | |
| 2181 } | 2154 } |
| 2182 | 2155 |
| 2183 if (PromoteObject<object_contents, alignment>(map, slot, object, | 2156 if (PromoteObject<object_contents, alignment>(map, slot, object, |
| 2184 object_size)) { | 2157 object_size)) { |
| 2185 return; | 2158 return; |
| 2186 } | 2159 } |
| 2187 | 2160 |
| 2188 // If promotion failed, we try to copy the object to the other semi-space | 2161 // If promotion failed, we try to copy the object to the other semi-space |
| 2189 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return; | 2162 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return; |
| 2190 | 2163 |
| (...skipping 3200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5391 if (!CreateInitialMaps()) return false; | 5364 if (!CreateInitialMaps()) return false; |
| 5392 CreateApiObjects(); | 5365 CreateApiObjects(); |
| 5393 | 5366 |
| 5394 // Create initial objects | 5367 // Create initial objects |
| 5395 CreateInitialObjects(); | 5368 CreateInitialObjects(); |
| 5396 CHECK_EQ(0u, gc_count_); | 5369 CHECK_EQ(0u, gc_count_); |
| 5397 | 5370 |
| 5398 set_native_contexts_list(undefined_value()); | 5371 set_native_contexts_list(undefined_value()); |
| 5399 set_array_buffers_list(undefined_value()); | 5372 set_array_buffers_list(undefined_value()); |
| 5400 set_last_array_buffer_in_list(undefined_value()); | 5373 set_last_array_buffer_in_list(undefined_value()); |
| 5401 set_new_array_buffer_views_list(undefined_value()); | |
| 5402 set_allocation_sites_list(undefined_value()); | 5374 set_allocation_sites_list(undefined_value()); |
| 5403 return true; | 5375 return true; |
| 5404 } | 5376 } |
| 5405 | 5377 |
| 5406 | 5378 |
| 5407 void Heap::SetStackLimits() { | 5379 void Heap::SetStackLimits() { |
| 5408 DCHECK(isolate_ != NULL); | 5380 DCHECK(isolate_ != NULL); |
| 5409 DCHECK(isolate_ == isolate()); | 5381 DCHECK(isolate_ == isolate()); |
| 5410 // On 64 bit machines, pointers are generally out of range of Smis. We write | 5382 // On 64 bit machines, pointers are generally out of range of Smis. We write |
| 5411 // something that looks like an out of range Smi to the GC. | 5383 // something that looks like an out of range Smi to the GC. |
| (...skipping 992 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6404 static_cast<int>(object_sizes_last_time_[index])); | 6376 static_cast<int>(object_sizes_last_time_[index])); |
| 6405 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 6377 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 6406 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 6378 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 6407 | 6379 |
| 6408 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 6380 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 6409 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 6381 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 6410 ClearObjectStats(); | 6382 ClearObjectStats(); |
| 6411 } | 6383 } |
| 6412 } | 6384 } |
| 6413 } // namespace v8::internal | 6385 } // namespace v8::internal |
| OLD | NEW |