OLD | NEW |
(Empty) | |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #ifndef HEAP_UTILS_H_ |
| 6 #define HEAP_UTILS_H_ |
| 7 |
| 8 #include "src/factory.h" |
| 9 #include "src/heap/heap-inl.h" |
| 10 #include "src/heap/incremental-marking.h" |
| 11 #include "src/heap/mark-compact.h" |
| 12 #include "src/isolate.h" |
| 13 |
| 14 |
| 15 namespace v8 { |
| 16 namespace internal { |
| 17 |
| 18 static int LenFromSize(int size) { |
| 19 return (size - i::FixedArray::kHeaderSize) / i::kPointerSize; |
| 20 } |
| 21 |
| 22 |
| 23 static inline void CreatePadding(i::Heap* heap, int padding_size, |
| 24 i::PretenureFlag tenure) { |
| 25 const int max_number_of_objects = 20; |
| 26 v8::internal::Handle<v8::internal::FixedArray> |
| 27 big_objects[max_number_of_objects]; |
| 28 i::Isolate* isolate = heap->isolate(); |
| 29 int allocate_memory; |
| 30 int length; |
| 31 int free_memory = padding_size; |
| 32 if (tenure == i::TENURED) { |
| 33 int current_free_memory = |
| 34 static_cast<int>(*heap->old_space()->allocation_limit_address() - |
| 35 *heap->old_space()->allocation_top_address()); |
| 36 CHECK(padding_size <= current_free_memory || current_free_memory == 0); |
| 37 } else { |
| 38 heap->new_space()->DisableInlineAllocationSteps(); |
| 39 int current_free_memory = |
| 40 static_cast<int>(*heap->new_space()->allocation_limit_address() - |
| 41 *heap->new_space()->allocation_top_address()); |
| 42 CHECK(padding_size <= current_free_memory || current_free_memory == 0); |
| 43 } |
| 44 for (int i = 0; i < max_number_of_objects && free_memory > 0; i++) { |
| 45 if (free_memory > i::Page::kMaxRegularHeapObjectSize) { |
| 46 allocate_memory = i::Page::kMaxRegularHeapObjectSize; |
| 47 length = LenFromSize(allocate_memory); |
| 48 } else { |
| 49 allocate_memory = free_memory; |
| 50 length = LenFromSize(allocate_memory); |
| 51 if (length <= 0) { |
| 52 // Not enough room to create another fixed array. Let's create a filler. |
| 53 heap->CreateFillerObjectAt(*heap->old_space()->allocation_top_address(), |
| 54 free_memory); |
| 55 break; |
| 56 } |
| 57 } |
| 58 big_objects[i] = isolate->factory()->NewFixedArray(length, tenure); |
| 59 CHECK((tenure == i::NOT_TENURED && heap->InNewSpace(*big_objects[i])) || |
| 60 (tenure == i::TENURED && heap->InOldSpace(*big_objects[i]))); |
| 61 free_memory -= allocate_memory; |
| 62 } |
| 63 } |
| 64 |
| 65 |
| 66 // Helper function that simulates a full new-space in the heap. |
| 67 static inline bool FillUpOnePage(v8::internal::NewSpace* space) { |
| 68 space->DisableInlineAllocationSteps(); |
| 69 int space_remaining = static_cast<int>(*space->allocation_limit_address() - |
| 70 *space->allocation_top_address()); |
| 71 if (space_remaining == 0) return false; |
| 72 CreatePadding(space->heap(), space_remaining, i::NOT_TENURED); |
| 73 return true; |
| 74 } |
| 75 |
| 76 |
| 77 // Helper function that simulates a fill new-space in the heap. |
| 78 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space, |
| 79 int extra_bytes) { |
| 80 space->DisableInlineAllocationSteps(); |
| 81 int space_remaining = static_cast<int>(*space->allocation_limit_address() - |
| 82 *space->allocation_top_address()); |
| 83 CHECK(space_remaining >= extra_bytes); |
| 84 int new_linear_size = space_remaining - extra_bytes; |
| 85 if (new_linear_size == 0) return; |
| 86 CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED); |
| 87 } |
| 88 |
| 89 |
| 90 static inline void FillCurrentPage(v8::internal::NewSpace* space) { |
| 91 AllocateAllButNBytes(space, 0); |
| 92 } |
| 93 |
| 94 |
| 95 static inline void SimulateFullSpace(v8::internal::NewSpace* space) { |
| 96 FillCurrentPage(space); |
| 97 while (FillUpOnePage(space)) { |
| 98 } |
| 99 } |
| 100 |
| 101 |
| 102 // Helper function that simulates a full old-space in the heap. |
| 103 static inline void SimulateFullSpace(v8::internal::PagedSpace* space) { |
| 104 space->EmptyAllocationInfo(); |
| 105 space->ResetFreeList(); |
| 106 space->ClearStats(); |
| 107 } |
| 108 |
| 109 |
| 110 // Helper function that simulates many incremental marking steps until |
| 111 // marking is completed. |
| 112 static inline void SimulateIncrementalMarking(i::Heap* heap, |
| 113 bool force_completion = true) { |
| 114 i::MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 115 i::IncrementalMarking* marking = heap->incremental_marking(); |
| 116 if (collector->sweeping_in_progress()) { |
| 117 collector->EnsureSweepingCompleted(); |
| 118 } |
| 119 CHECK(marking->IsMarking() || marking->IsStopped()); |
| 120 if (marking->IsStopped()) { |
| 121 heap->StartIncrementalMarking(); |
| 122 } |
| 123 CHECK(marking->IsMarking()); |
| 124 if (!force_completion) return; |
| 125 |
| 126 while (!marking->IsComplete()) { |
| 127 marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD); |
| 128 if (marking->IsReadyToOverApproximateWeakClosure()) { |
| 129 marking->FinalizeIncrementally(); |
| 130 } |
| 131 } |
| 132 CHECK(marking->IsComplete()); |
| 133 } |
| 134 |
| 135 } // namespace internal |
| 136 } // namespace v8 |
| 137 |
| 138 #endif // HEAP_UTILS_H_ |
OLD | NEW |