OLD | NEW |
| (Empty) |
1 // Copyright 2015 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #ifndef HEAP_UTILS_H_ | |
6 #define HEAP_UTILS_H_ | |
7 | |
8 #include "src/factory.h" | |
9 #include "src/heap/heap-inl.h" | |
10 #include "src/heap/incremental-marking.h" | |
11 #include "src/heap/mark-compact.h" | |
12 #include "src/isolate.h" | |
13 | |
14 | |
15 namespace v8 { | |
16 namespace internal { | |
17 | |
18 static int LenFromSize(int size) { | |
19 return (size - FixedArray::kHeaderSize) / kPointerSize; | |
20 } | |
21 | |
22 | |
23 static inline std::vector<Handle<FixedArray>> CreatePadding( | |
24 Heap* heap, int padding_size, PretenureFlag tenure, | |
25 int object_size = Page::kMaxRegularHeapObjectSize) { | |
26 std::vector<Handle<FixedArray>> handles; | |
27 Isolate* isolate = heap->isolate(); | |
28 int allocate_memory; | |
29 int length; | |
30 int free_memory = padding_size; | |
31 if (tenure == i::TENURED) { | |
32 heap->old_space()->EmptyAllocationInfo(); | |
33 int overall_free_memory = static_cast<int>(heap->old_space()->Available()); | |
34 CHECK(padding_size <= overall_free_memory || overall_free_memory == 0); | |
35 } else { | |
36 heap->new_space()->DisableInlineAllocationSteps(); | |
37 int overall_free_memory = | |
38 static_cast<int>(*heap->new_space()->allocation_limit_address() - | |
39 *heap->new_space()->allocation_top_address()); | |
40 CHECK(padding_size <= overall_free_memory || overall_free_memory == 0); | |
41 } | |
42 while (free_memory > 0) { | |
43 if (free_memory > object_size) { | |
44 allocate_memory = object_size; | |
45 length = LenFromSize(allocate_memory); | |
46 } else { | |
47 allocate_memory = free_memory; | |
48 length = LenFromSize(allocate_memory); | |
49 if (length <= 0) { | |
50 // Not enough room to create another fixed array. Let's create a filler. | |
51 heap->CreateFillerObjectAt(*heap->old_space()->allocation_top_address(), | |
52 free_memory, ClearRecordedSlots::kNo); | |
53 break; | |
54 } | |
55 } | |
56 handles.push_back(isolate->factory()->NewFixedArray(length, tenure)); | |
57 CHECK((tenure == NOT_TENURED && heap->InNewSpace(*handles.back())) || | |
58 (tenure == TENURED && heap->InOldSpace(*handles.back()))); | |
59 free_memory -= allocate_memory; | |
60 } | |
61 return handles; | |
62 } | |
63 | |
64 | |
65 // Helper function that simulates a full new-space in the heap. | |
66 static inline bool FillUpOnePage( | |
67 v8::internal::NewSpace* space, | |
68 std::vector<Handle<FixedArray>>* out_handles = nullptr) { | |
69 space->DisableInlineAllocationSteps(); | |
70 int space_remaining = static_cast<int>(*space->allocation_limit_address() - | |
71 *space->allocation_top_address()); | |
72 if (space_remaining == 0) return false; | |
73 std::vector<Handle<FixedArray>> handles = | |
74 CreatePadding(space->heap(), space_remaining, i::NOT_TENURED); | |
75 if (out_handles != nullptr) | |
76 out_handles->insert(out_handles->end(), handles.begin(), handles.end()); | |
77 return true; | |
78 } | |
79 | |
80 | |
81 // Helper function that simulates a fill new-space in the heap. | |
82 static inline void AllocateAllButNBytes( | |
83 v8::internal::NewSpace* space, int extra_bytes, | |
84 std::vector<Handle<FixedArray>>* out_handles = nullptr) { | |
85 space->DisableInlineAllocationSteps(); | |
86 int space_remaining = static_cast<int>(*space->allocation_limit_address() - | |
87 *space->allocation_top_address()); | |
88 CHECK(space_remaining >= extra_bytes); | |
89 int new_linear_size = space_remaining - extra_bytes; | |
90 if (new_linear_size == 0) return; | |
91 std::vector<Handle<FixedArray>> handles = | |
92 CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED); | |
93 if (out_handles != nullptr) | |
94 out_handles->insert(out_handles->end(), handles.begin(), handles.end()); | |
95 } | |
96 | |
97 static inline void FillCurrentPage( | |
98 v8::internal::NewSpace* space, | |
99 std::vector<Handle<FixedArray>>* out_handles = nullptr) { | |
100 AllocateAllButNBytes(space, 0, out_handles); | |
101 } | |
102 | |
103 static inline void SimulateFullSpace( | |
104 v8::internal::NewSpace* space, | |
105 std::vector<Handle<FixedArray>>* out_handles = nullptr) { | |
106 FillCurrentPage(space, out_handles); | |
107 while (FillUpOnePage(space, out_handles) || space->AddFreshPage()) { | |
108 } | |
109 } | |
110 | |
111 | |
112 // Helper function that simulates a full old-space in the heap. | |
113 static inline void SimulateFullSpace(v8::internal::PagedSpace* space) { | |
114 space->EmptyAllocationInfo(); | |
115 space->ResetFreeList(); | |
116 space->ClearStats(); | |
117 } | |
118 | |
119 | |
120 // Helper function that simulates many incremental marking steps until | |
121 // marking is completed. | |
122 static inline void SimulateIncrementalMarking(i::Heap* heap, | |
123 bool force_completion = true) { | |
124 i::MarkCompactCollector* collector = heap->mark_compact_collector(); | |
125 i::IncrementalMarking* marking = heap->incremental_marking(); | |
126 if (collector->sweeping_in_progress()) { | |
127 collector->EnsureSweepingCompleted(); | |
128 } | |
129 CHECK(marking->IsMarking() || marking->IsStopped()); | |
130 if (marking->IsStopped()) { | |
131 heap->StartIncrementalMarking(); | |
132 } | |
133 CHECK(marking->IsMarking()); | |
134 if (!force_completion) return; | |
135 | |
136 while (!marking->IsComplete()) { | |
137 marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD); | |
138 if (marking->IsReadyToOverApproximateWeakClosure()) { | |
139 marking->FinalizeIncrementally(); | |
140 } | |
141 } | |
142 CHECK(marking->IsComplete()); | |
143 } | |
144 | |
145 } // namespace internal | |
146 } // namespace v8 | |
147 | |
148 #endif // HEAP_UTILS_H_ | |
OLD | NEW |