OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef HEAP_UTILS_H_ | 5 #include "test/cctest/heap/heap-utils.h" |
6 #define HEAP_UTILS_H_ | |
7 | 6 |
8 #include "src/factory.h" | 7 #include "src/factory.h" |
9 #include "src/heap/heap-inl.h" | 8 #include "src/heap/heap-inl.h" |
10 #include "src/heap/incremental-marking.h" | 9 #include "src/heap/incremental-marking.h" |
11 #include "src/heap/mark-compact.h" | 10 #include "src/heap/mark-compact.h" |
12 #include "src/isolate.h" | 11 #include "src/isolate.h" |
13 | 12 |
14 | |
15 namespace v8 { | 13 namespace v8 { |
16 namespace internal { | 14 namespace internal { |
| 15 namespace heap { |
17 | 16 |
18 static int LenFromSize(int size) { | 17 void SealCurrentObjects(Heap* heap) { |
| 18 heap->CollectAllGarbage(); |
| 19 heap->CollectAllGarbage(); |
| 20 heap->mark_compact_collector()->EnsureSweepingCompleted(); |
| 21 PageIterator it(heap->old_space()); |
| 22 heap->old_space()->EmptyAllocationInfo(); |
| 23 while (it.has_next()) { |
| 24 Page* page = it.next(); |
| 25 page->MarkNeverAllocateForTesting(); |
| 26 } |
| 27 } |
| 28 |
| 29 int FixedArrayLenFromSize(int size) { |
19 return (size - FixedArray::kHeaderSize) / kPointerSize; | 30 return (size - FixedArray::kHeaderSize) / kPointerSize; |
20 } | 31 } |
21 | 32 |
22 | 33 std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size, |
23 static inline std::vector<Handle<FixedArray>> CreatePadding( | 34 PretenureFlag tenure, |
24 Heap* heap, int padding_size, PretenureFlag tenure, | 35 int object_size) { |
25 int object_size = Page::kMaxRegularHeapObjectSize) { | |
26 std::vector<Handle<FixedArray>> handles; | 36 std::vector<Handle<FixedArray>> handles; |
27 Isolate* isolate = heap->isolate(); | 37 Isolate* isolate = heap->isolate(); |
28 int allocate_memory; | 38 int allocate_memory; |
29 int length; | 39 int length; |
30 int free_memory = padding_size; | 40 int free_memory = padding_size; |
31 if (tenure == i::TENURED) { | 41 if (tenure == i::TENURED) { |
32 heap->old_space()->EmptyAllocationInfo(); | 42 heap->old_space()->EmptyAllocationInfo(); |
33 int overall_free_memory = static_cast<int>(heap->old_space()->Available()); | 43 int overall_free_memory = static_cast<int>(heap->old_space()->Available()); |
34 CHECK(padding_size <= overall_free_memory || overall_free_memory == 0); | 44 CHECK(padding_size <= overall_free_memory || overall_free_memory == 0); |
35 } else { | 45 } else { |
36 heap->new_space()->DisableInlineAllocationSteps(); | 46 heap->new_space()->DisableInlineAllocationSteps(); |
37 int overall_free_memory = | 47 int overall_free_memory = |
38 static_cast<int>(*heap->new_space()->allocation_limit_address() - | 48 static_cast<int>(*heap->new_space()->allocation_limit_address() - |
39 *heap->new_space()->allocation_top_address()); | 49 *heap->new_space()->allocation_top_address()); |
40 CHECK(padding_size <= overall_free_memory || overall_free_memory == 0); | 50 CHECK(padding_size <= overall_free_memory || overall_free_memory == 0); |
41 } | 51 } |
42 while (free_memory > 0) { | 52 while (free_memory > 0) { |
43 if (free_memory > object_size) { | 53 if (free_memory > object_size) { |
44 allocate_memory = object_size; | 54 allocate_memory = object_size; |
45 length = LenFromSize(allocate_memory); | 55 length = FixedArrayLenFromSize(allocate_memory); |
46 } else { | 56 } else { |
47 allocate_memory = free_memory; | 57 allocate_memory = free_memory; |
48 length = LenFromSize(allocate_memory); | 58 length = FixedArrayLenFromSize(allocate_memory); |
49 if (length <= 0) { | 59 if (length <= 0) { |
50 // Not enough room to create another fixed array. Let's create a filler. | 60 // Not enough room to create another fixed array. Let's create a filler. |
51 heap->CreateFillerObjectAt(*heap->old_space()->allocation_top_address(), | 61 if (free_memory > (2 * kPointerSize)) { |
52 free_memory, ClearRecordedSlots::kNo); | 62 heap->CreateFillerObjectAt( |
| 63 *heap->old_space()->allocation_top_address(), free_memory, |
| 64 ClearRecordedSlots::kNo); |
| 65 } |
53 break; | 66 break; |
54 } | 67 } |
55 } | 68 } |
56 handles.push_back(isolate->factory()->NewFixedArray(length, tenure)); | 69 handles.push_back(isolate->factory()->NewFixedArray(length, tenure)); |
57 CHECK((tenure == NOT_TENURED && heap->InNewSpace(*handles.back())) || | 70 CHECK((tenure == NOT_TENURED && heap->InNewSpace(*handles.back())) || |
58 (tenure == TENURED && heap->InOldSpace(*handles.back()))); | 71 (tenure == TENURED && heap->InOldSpace(*handles.back()))); |
59 free_memory -= allocate_memory; | 72 free_memory -= allocate_memory; |
60 } | 73 } |
61 return handles; | 74 return handles; |
62 } | 75 } |
63 | 76 |
64 | 77 void AllocateAllButNBytes(v8::internal::NewSpace* space, int extra_bytes, |
65 // Helper function that simulates a full new-space in the heap. | 78 std::vector<Handle<FixedArray>>* out_handles) { |
66 static inline bool FillUpOnePage( | |
67 v8::internal::NewSpace* space, | |
68 std::vector<Handle<FixedArray>>* out_handles = nullptr) { | |
69 space->DisableInlineAllocationSteps(); | |
70 int space_remaining = static_cast<int>(*space->allocation_limit_address() - | |
71 *space->allocation_top_address()); | |
72 if (space_remaining == 0) return false; | |
73 std::vector<Handle<FixedArray>> handles = | |
74 CreatePadding(space->heap(), space_remaining, i::NOT_TENURED); | |
75 if (out_handles != nullptr) | |
76 out_handles->insert(out_handles->end(), handles.begin(), handles.end()); | |
77 return true; | |
78 } | |
79 | |
80 | |
81 // Helper function that simulates a fill new-space in the heap. | |
82 static inline void AllocateAllButNBytes( | |
83 v8::internal::NewSpace* space, int extra_bytes, | |
84 std::vector<Handle<FixedArray>>* out_handles = nullptr) { | |
85 space->DisableInlineAllocationSteps(); | 79 space->DisableInlineAllocationSteps(); |
86 int space_remaining = static_cast<int>(*space->allocation_limit_address() - | 80 int space_remaining = static_cast<int>(*space->allocation_limit_address() - |
87 *space->allocation_top_address()); | 81 *space->allocation_top_address()); |
88 CHECK(space_remaining >= extra_bytes); | 82 CHECK(space_remaining >= extra_bytes); |
89 int new_linear_size = space_remaining - extra_bytes; | 83 int new_linear_size = space_remaining - extra_bytes; |
90 if (new_linear_size == 0) return; | 84 if (new_linear_size == 0) return; |
91 std::vector<Handle<FixedArray>> handles = | 85 std::vector<Handle<FixedArray>> handles = |
92 CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED); | 86 heap::CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED); |
93 if (out_handles != nullptr) | 87 if (out_handles != nullptr) |
94 out_handles->insert(out_handles->end(), handles.begin(), handles.end()); | 88 out_handles->insert(out_handles->end(), handles.begin(), handles.end()); |
95 } | 89 } |
96 | 90 |
97 static inline void FillCurrentPage( | 91 void FillCurrentPage(v8::internal::NewSpace* space, |
98 v8::internal::NewSpace* space, | 92 std::vector<Handle<FixedArray>>* out_handles) { |
99 std::vector<Handle<FixedArray>>* out_handles = nullptr) { | 93 heap::AllocateAllButNBytes(space, 0, out_handles); |
100 AllocateAllButNBytes(space, 0, out_handles); | |
101 } | 94 } |
102 | 95 |
103 static inline void SimulateFullSpace( | 96 bool FillUpOnePage(v8::internal::NewSpace* space, |
104 v8::internal::NewSpace* space, | 97 std::vector<Handle<FixedArray>>* out_handles) { |
105 std::vector<Handle<FixedArray>>* out_handles = nullptr) { | 98 space->DisableInlineAllocationSteps(); |
106 FillCurrentPage(space, out_handles); | 99 int space_remaining = static_cast<int>(*space->allocation_limit_address() - |
107 while (FillUpOnePage(space, out_handles) || space->AddFreshPage()) { | 100 *space->allocation_top_address()); |
| 101 if (space_remaining == 0) return false; |
| 102 std::vector<Handle<FixedArray>> handles = |
| 103 heap::CreatePadding(space->heap(), space_remaining, i::NOT_TENURED); |
| 104 if (out_handles != nullptr) |
| 105 out_handles->insert(out_handles->end(), handles.begin(), handles.end()); |
| 106 return true; |
| 107 } |
| 108 |
| 109 void SimulateFullSpace(v8::internal::NewSpace* space, |
| 110 std::vector<Handle<FixedArray>>* out_handles) { |
| 111 heap::FillCurrentPage(space, out_handles); |
| 112 while (heap::FillUpOnePage(space, out_handles) || space->AddFreshPage()) { |
108 } | 113 } |
109 } | 114 } |
110 | 115 |
111 | 116 void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) { |
112 // Helper function that simulates a full old-space in the heap. | |
113 static inline void SimulateFullSpace(v8::internal::PagedSpace* space) { | |
114 space->EmptyAllocationInfo(); | |
115 space->ResetFreeList(); | |
116 space->ClearStats(); | |
117 } | |
118 | |
119 | |
120 // Helper function that simulates many incremental marking steps until | |
121 // marking is completed. | |
122 static inline void SimulateIncrementalMarking(i::Heap* heap, | |
123 bool force_completion = true) { | |
124 i::MarkCompactCollector* collector = heap->mark_compact_collector(); | 117 i::MarkCompactCollector* collector = heap->mark_compact_collector(); |
125 i::IncrementalMarking* marking = heap->incremental_marking(); | 118 i::IncrementalMarking* marking = heap->incremental_marking(); |
126 if (collector->sweeping_in_progress()) { | 119 if (collector->sweeping_in_progress()) { |
127 collector->EnsureSweepingCompleted(); | 120 collector->EnsureSweepingCompleted(); |
128 } | 121 } |
129 CHECK(marking->IsMarking() || marking->IsStopped()); | 122 CHECK(marking->IsMarking() || marking->IsStopped()); |
130 if (marking->IsStopped()) { | 123 if (marking->IsStopped()) { |
131 heap->StartIncrementalMarking(); | 124 heap->StartIncrementalMarking(); |
132 } | 125 } |
133 CHECK(marking->IsMarking()); | 126 CHECK(marking->IsMarking()); |
134 if (!force_completion) return; | 127 if (!force_completion) return; |
135 | 128 |
136 while (!marking->IsComplete()) { | 129 while (!marking->IsComplete()) { |
137 marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD); | 130 marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD); |
138 if (marking->IsReadyToOverApproximateWeakClosure()) { | 131 if (marking->IsReadyToOverApproximateWeakClosure()) { |
139 marking->FinalizeIncrementally(); | 132 marking->FinalizeIncrementally(); |
140 } | 133 } |
141 } | 134 } |
142 CHECK(marking->IsComplete()); | 135 CHECK(marking->IsComplete()); |
143 } | 136 } |
144 | 137 |
| 138 void SimulateFullSpace(v8::internal::PagedSpace* space) { |
| 139 space->EmptyAllocationInfo(); |
| 140 space->ResetFreeList(); |
| 141 space->ClearStats(); |
| 142 } |
| 143 |
| 144 } // namespace heap |
145 } // namespace internal | 145 } // namespace internal |
146 } // namespace v8 | 146 } // namespace v8 |
147 | |
148 #endif // HEAP_UTILS_H_ | |
OLD | NEW |