| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
| (...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 134 full_codegen_bytes_generated_(0), | 134 full_codegen_bytes_generated_(0), |
| 135 crankshaft_codegen_bytes_generated_(0), | 135 crankshaft_codegen_bytes_generated_(0), |
| 136 gcs_since_last_deopt_(0), | 136 gcs_since_last_deopt_(0), |
| 137 allocation_sites_scratchpad_length_(0), | 137 allocation_sites_scratchpad_length_(0), |
| 138 promotion_queue_(this), | 138 promotion_queue_(this), |
| 139 configured_(false), | 139 configured_(false), |
| 140 external_string_table_(this), | 140 external_string_table_(this), |
| 141 chunks_queued_for_free_(NULL), | 141 chunks_queued_for_free_(NULL), |
| 142 gc_callbacks_depth_(0), | 142 gc_callbacks_depth_(0), |
| 143 deserialization_complete_(false), | 143 deserialization_complete_(false), |
| 144 concurrent_sweeping_enabled_(false), | 144 concurrent_sweeping_enabled_(false) { |
| 145 migration_failure_(false), | |
| 146 previous_migration_failure_(false) { | |
| 147 // Allow build-time customization of the max semispace size. Building | 145 // Allow build-time customization of the max semispace size. Building |
| 148 // V8 with snapshots and a non-default max semispace size is much | 146 // V8 with snapshots and a non-default max semispace size is much |
| 149 // easier if you can define it as part of the build environment. | 147 // easier if you can define it as part of the build environment. |
| 150 #if defined(V8_MAX_SEMISPACE_SIZE) | 148 #if defined(V8_MAX_SEMISPACE_SIZE) |
| 151 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; | 149 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; |
| 152 #endif | 150 #endif |
| 153 | 151 |
| 154 // Ensure old_generation_size_ is a multiple of kPageSize. | 152 // Ensure old_generation_size_ is a multiple of kPageSize. |
| 155 DCHECK(MB >= Page::kPageSize); | 153 DCHECK(MB >= Page::kPageSize); |
| 156 | 154 |
| (...skipping 541 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 698 #undef UPDATE_FRAGMENTATION_FOR_SPACE | 696 #undef UPDATE_FRAGMENTATION_FOR_SPACE |
| 699 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE | 697 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE |
| 700 | 698 |
| 701 #ifdef DEBUG | 699 #ifdef DEBUG |
| 702 ReportStatisticsAfterGC(); | 700 ReportStatisticsAfterGC(); |
| 703 #endif // DEBUG | 701 #endif // DEBUG |
| 704 | 702 |
| 705 // Remember the last top pointer so that we can later find out | 703 // Remember the last top pointer so that we can later find out |
| 706 // whether we allocated in new space since the last GC. | 704 // whether we allocated in new space since the last GC. |
| 707 new_space_top_after_last_gc_ = new_space()->top(); | 705 new_space_top_after_last_gc_ = new_space()->top(); |
| 708 | |
| 709 if (migration_failure_) { | |
| 710 set_previous_migration_failure(true); | |
| 711 } else { | |
| 712 set_previous_migration_failure(false); | |
| 713 } | |
| 714 set_migration_failure(false); | |
| 715 } | 706 } |
| 716 | 707 |
| 717 | 708 |
| 718 void Heap::HandleGCRequest() { | 709 void Heap::HandleGCRequest() { |
| 719 if (incremental_marking()->request_type() == | 710 if (incremental_marking()->request_type() == |
| 720 IncrementalMarking::COMPLETE_MARKING) { | 711 IncrementalMarking::COMPLETE_MARKING) { |
| 721 CollectAllGarbage(Heap::kNoGCFlags, "GC interrupt"); | 712 CollectAllGarbage(Heap::kNoGCFlags, "GC interrupt"); |
| 722 return; | 713 return; |
| 723 } | 714 } |
| 724 DCHECK(FLAG_overapproximate_weak_closure); | 715 DCHECK(FLAG_overapproximate_weak_closure); |
| (...skipping 955 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1680 Object** end = start + external_string_table_.old_space_strings_.length(); | 1671 Object** end = start + external_string_table_.old_space_strings_.length(); |
| 1681 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); | 1672 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); |
| 1682 } | 1673 } |
| 1683 | 1674 |
| 1684 UpdateNewSpaceReferencesInExternalStringTable(updater_func); | 1675 UpdateNewSpaceReferencesInExternalStringTable(updater_func); |
| 1685 } | 1676 } |
| 1686 | 1677 |
| 1687 | 1678 |
| 1688 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) { | 1679 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) { |
| 1689 ProcessArrayBuffers(retainer, false); | 1680 ProcessArrayBuffers(retainer, false); |
| 1690 ProcessNewArrayBufferViews(retainer); | |
| 1691 ProcessNativeContexts(retainer); | 1681 ProcessNativeContexts(retainer); |
| 1692 ProcessAllocationSites(retainer); | 1682 ProcessAllocationSites(retainer); |
| 1693 } | 1683 } |
| 1694 | 1684 |
| 1695 | 1685 |
| 1696 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) { | 1686 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) { |
| 1697 ProcessArrayBuffers(retainer, true); | 1687 ProcessArrayBuffers(retainer, true); |
| 1698 ProcessNewArrayBufferViews(retainer); | |
| 1699 ProcessNativeContexts(retainer); | 1688 ProcessNativeContexts(retainer); |
| 1700 } | 1689 } |
| 1701 | 1690 |
| 1702 | 1691 |
| 1703 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { | 1692 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { |
| 1704 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer, | 1693 Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer, |
| 1705 false, NULL); | 1694 false, NULL); |
| 1706 // Update the head of the list of contexts. | 1695 // Update the head of the list of contexts. |
| 1707 set_native_contexts_list(head); | 1696 set_native_contexts_list(head); |
| 1708 } | 1697 } |
| 1709 | 1698 |
| 1710 | 1699 |
| 1711 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, | 1700 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, |
| 1712 bool stop_after_young) { | 1701 bool stop_after_young) { |
| 1713 Object* last_array_buffer = undefined_value(); | 1702 Object* last_array_buffer = undefined_value(); |
| 1714 Object* array_buffer_obj = | 1703 Object* array_buffer_obj = |
| 1715 VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer, | 1704 VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer, |
| 1716 stop_after_young, &last_array_buffer); | 1705 stop_after_young, &last_array_buffer); |
| 1717 set_array_buffers_list(array_buffer_obj); | 1706 set_array_buffers_list(array_buffer_obj); |
| 1718 set_last_array_buffer_in_list(last_array_buffer); | 1707 set_last_array_buffer_in_list(last_array_buffer); |
| 1719 | 1708 |
| 1720 // Verify invariant that young array buffers come before old array buffers | 1709 // Verify invariant that young array buffers come before old array buffers |
| 1721 // in array buffers list if there was no promotion failure. | 1710 // in array buffers list if there was no promotion failure. |
| 1722 Object* undefined = undefined_value(); | 1711 Object* undefined = undefined_value(); |
| 1723 Object* next = array_buffers_list(); | 1712 Object* next = array_buffers_list(); |
| 1724 bool old_objects_recorded = false; | 1713 bool old_objects_recorded = false; |
| 1725 if (migration_failure()) return; | |
| 1726 while (next != undefined) { | 1714 while (next != undefined) { |
| 1727 if (!old_objects_recorded) { | 1715 if (!old_objects_recorded) { |
| 1728 old_objects_recorded = !InNewSpace(next); | 1716 old_objects_recorded = !InNewSpace(next); |
| 1729 } | 1717 } |
| 1730 CHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next)); | 1718 CHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next)); |
| 1731 next = JSArrayBuffer::cast(next)->weak_next(); | 1719 next = JSArrayBuffer::cast(next)->weak_next(); |
| 1732 } | 1720 } |
| 1733 } | 1721 } |
| 1734 | 1722 |
| 1735 | 1723 |
| 1736 void Heap::ProcessNewArrayBufferViews(WeakObjectRetainer* retainer) { | |
| 1737 // Retain the list of new space views. | |
| 1738 Object* typed_array_obj = VisitWeakList<JSArrayBufferView>( | |
| 1739 this, new_array_buffer_views_list_, retainer, false, NULL); | |
| 1740 set_new_array_buffer_views_list(typed_array_obj); | |
| 1741 | |
| 1742 // Some objects in the list may be in old space now. Find them | |
| 1743 // and move them to the corresponding array buffer. | |
| 1744 Object* view = VisitNewArrayBufferViewsWeakList( | |
| 1745 this, new_array_buffer_views_list_, retainer); | |
| 1746 set_new_array_buffer_views_list(view); | |
| 1747 } | |
| 1748 | |
| 1749 | |
| 1750 void Heap::TearDownArrayBuffers() { | 1724 void Heap::TearDownArrayBuffers() { |
| 1751 Object* undefined = undefined_value(); | 1725 Object* undefined = undefined_value(); |
| 1752 for (Object* o = array_buffers_list(); o != undefined;) { | 1726 for (Object* o = array_buffers_list(); o != undefined;) { |
| 1753 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); | 1727 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); |
| 1754 Runtime::FreeArrayBuffer(isolate(), buffer); | 1728 Runtime::FreeArrayBuffer(isolate(), buffer); |
| 1755 o = buffer->weak_next(); | 1729 o = buffer->weak_next(); |
| 1756 } | 1730 } |
| 1757 set_array_buffers_list(undefined); | 1731 set_array_buffers_list(undefined); |
| 1758 } | 1732 } |
| 1759 | 1733 |
| (...skipping 411 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2171 SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); | 2145 SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); |
| 2172 SLOW_DCHECK(object->Size() == object_size); | 2146 SLOW_DCHECK(object->Size() == object_size); |
| 2173 Heap* heap = map->GetHeap(); | 2147 Heap* heap = map->GetHeap(); |
| 2174 | 2148 |
| 2175 if (!heap->ShouldBePromoted(object->address(), object_size)) { | 2149 if (!heap->ShouldBePromoted(object->address(), object_size)) { |
| 2176 // A semi-space copy may fail due to fragmentation. In that case, we | 2150 // A semi-space copy may fail due to fragmentation. In that case, we |
| 2177 // try to promote the object. | 2151 // try to promote the object. |
| 2178 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { | 2152 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { |
| 2179 return; | 2153 return; |
| 2180 } | 2154 } |
| 2181 heap->set_migration_failure(true); | |
| 2182 } | 2155 } |
| 2183 | 2156 |
| 2184 if (PromoteObject<object_contents, alignment>(map, slot, object, | 2157 if (PromoteObject<object_contents, alignment>(map, slot, object, |
| 2185 object_size)) { | 2158 object_size)) { |
| 2186 return; | 2159 return; |
| 2187 } | 2160 } |
| 2188 | 2161 |
| 2189 // If promotion failed, we try to copy the object to the other semi-space | 2162 // If promotion failed, we try to copy the object to the other semi-space |
| 2190 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return; | 2163 if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return; |
| 2191 | 2164 |
| (...skipping 3204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5396 if (!CreateInitialMaps()) return false; | 5369 if (!CreateInitialMaps()) return false; |
| 5397 CreateApiObjects(); | 5370 CreateApiObjects(); |
| 5398 | 5371 |
| 5399 // Create initial objects | 5372 // Create initial objects |
| 5400 CreateInitialObjects(); | 5373 CreateInitialObjects(); |
| 5401 CHECK_EQ(0u, gc_count_); | 5374 CHECK_EQ(0u, gc_count_); |
| 5402 | 5375 |
| 5403 set_native_contexts_list(undefined_value()); | 5376 set_native_contexts_list(undefined_value()); |
| 5404 set_array_buffers_list(undefined_value()); | 5377 set_array_buffers_list(undefined_value()); |
| 5405 set_last_array_buffer_in_list(undefined_value()); | 5378 set_last_array_buffer_in_list(undefined_value()); |
| 5406 set_new_array_buffer_views_list(undefined_value()); | |
| 5407 set_allocation_sites_list(undefined_value()); | 5379 set_allocation_sites_list(undefined_value()); |
| 5408 return true; | 5380 return true; |
| 5409 } | 5381 } |
| 5410 | 5382 |
| 5411 | 5383 |
| 5412 void Heap::SetStackLimits() { | 5384 void Heap::SetStackLimits() { |
| 5413 DCHECK(isolate_ != NULL); | 5385 DCHECK(isolate_ != NULL); |
| 5414 DCHECK(isolate_ == isolate()); | 5386 DCHECK(isolate_ == isolate()); |
| 5415 // On 64 bit machines, pointers are generally out of range of Smis. We write | 5387 // On 64 bit machines, pointers are generally out of range of Smis. We write |
| 5416 // something that looks like an out of range Smi to the GC. | 5388 // something that looks like an out of range Smi to the GC. |
| (...skipping 992 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6409 static_cast<int>(object_sizes_last_time_[index])); | 6381 static_cast<int>(object_sizes_last_time_[index])); |
| 6410 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 6382 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 6411 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 6383 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 6412 | 6384 |
| 6413 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 6385 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 6414 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 6386 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 6415 ClearObjectStats(); | 6387 ClearObjectStats(); |
| 6416 } | 6388 } |
| 6417 } | 6389 } |
| 6418 } // namespace v8::internal | 6390 } // namespace v8::internal |
| OLD | NEW |