OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_INCREMENTAL_MARKING_H_ | 5 #ifndef V8_HEAP_INCREMENTAL_MARKING_H_ |
6 #define V8_HEAP_INCREMENTAL_MARKING_H_ | 6 #define V8_HEAP_INCREMENTAL_MARKING_H_ |
7 | 7 |
8 #include "src/cancelable-task.h" | 8 #include "src/cancelable-task.h" |
9 #include "src/execution.h" | 9 #include "src/execution.h" |
10 #include "src/heap/heap.h" | 10 #include "src/heap/heap.h" |
(...skipping 14 matching lines...) Expand all Loading... |
25 class V8_EXPORT_PRIVATE IncrementalMarking { | 25 class V8_EXPORT_PRIVATE IncrementalMarking { |
26 public: | 26 public: |
27 enum State { STOPPED, SWEEPING, MARKING, COMPLETE }; | 27 enum State { STOPPED, SWEEPING, MARKING, COMPLETE }; |
28 | 28 |
29 enum CompletionAction { GC_VIA_STACK_GUARD, NO_GC_VIA_STACK_GUARD }; | 29 enum CompletionAction { GC_VIA_STACK_GUARD, NO_GC_VIA_STACK_GUARD }; |
30 | 30 |
31 enum ForceCompletionAction { FORCE_COMPLETION, DO_NOT_FORCE_COMPLETION }; | 31 enum ForceCompletionAction { FORCE_COMPLETION, DO_NOT_FORCE_COMPLETION }; |
32 | 32 |
33 enum GCRequestType { NONE, COMPLETE_MARKING, FINALIZATION }; | 33 enum GCRequestType { NONE, COMPLETE_MARKING, FINALIZATION }; |
34 | 34 |
| 35 static void MarkGrey(Heap* heap, HeapObject* object); |
| 36 |
| 37 static void MarkBlack(HeapObject* object, int size); |
| 38 |
| 39 // Transfers mark bits without requiring proper object headers. |
| 40 static void TransferMark(Heap* heap, HeapObject* from, HeapObject* to); |
| 41 |
| 42 // Transfers color including live byte count, requiring properly set up |
| 43 // objects. |
| 44 template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> |
| 45 V8_INLINE static void TransferColor(HeapObject* from, HeapObject* to) { |
| 46 if (ObjectMarking::IsBlack<access_mode>(to, MarkingState::Internal(to))) { |
| 47 DCHECK(to->GetHeap()->incremental_marking()->black_allocation()); |
| 48 return; |
| 49 } |
| 50 |
| 51 DCHECK(ObjectMarking::IsWhite<access_mode>(to, MarkingState::Internal(to))); |
| 52 if (ObjectMarking::IsGrey<access_mode>(from, |
| 53 MarkingState::Internal(from))) { |
| 54 ObjectMarking::WhiteToGrey<access_mode>(to, MarkingState::Internal(to)); |
| 55 } else if (ObjectMarking::IsBlack<access_mode>( |
| 56 from, MarkingState::Internal(from))) { |
| 57 ObjectMarking::WhiteToBlack<access_mode>(to, MarkingState::Internal(to)); |
| 58 } |
| 59 } |
| 60 |
35 explicit IncrementalMarking(Heap* heap); | 61 explicit IncrementalMarking(Heap* heap); |
36 | 62 |
37 static void Initialize(); | 63 static void Initialize(); |
38 | 64 |
39 State state() { | 65 State state() { |
40 DCHECK(state_ == STOPPED || FLAG_incremental_marking); | 66 DCHECK(state_ == STOPPED || FLAG_incremental_marking); |
41 return state_; | 67 return state_; |
42 } | 68 } |
43 | 69 |
44 bool should_hurry() { return should_hurry_; } | 70 bool should_hurry() { return should_hurry_; } |
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
172 void ActivateGeneratedStub(Code* stub); | 198 void ActivateGeneratedStub(Code* stub); |
173 | 199 |
174 void NotifyIncompleteScanOfObject(int unscanned_bytes) { | 200 void NotifyIncompleteScanOfObject(int unscanned_bytes) { |
175 unscanned_bytes_of_large_object_ = unscanned_bytes; | 201 unscanned_bytes_of_large_object_ = unscanned_bytes; |
176 } | 202 } |
177 | 203 |
178 void ClearIdleMarkingDelayCounter(); | 204 void ClearIdleMarkingDelayCounter(); |
179 | 205 |
180 bool IsIdleMarkingDelayCounterLimitReached(); | 206 bool IsIdleMarkingDelayCounterLimitReached(); |
181 | 207 |
182 static void MarkGrey(Heap* heap, HeapObject* object); | |
183 | |
184 static void MarkBlack(HeapObject* object, int size); | |
185 | |
186 static void TransferMark(Heap* heap, HeapObject* from, HeapObject* to); | |
187 | |
188 V8_INLINE static void TransferColor(HeapObject* from, HeapObject* to) { | |
189 if (ObjectMarking::IsBlack(to, MarkingState::Internal(to))) { | |
190 DCHECK(to->GetHeap()->incremental_marking()->black_allocation()); | |
191 return; | |
192 } | |
193 | |
194 DCHECK(ObjectMarking::IsWhite(to, MarkingState::Internal(to))); | |
195 if (ObjectMarking::IsGrey(from, MarkingState::Internal(from))) { | |
196 ObjectMarking::WhiteToGrey(to, MarkingState::Internal(to)); | |
197 } else if (ObjectMarking::IsBlack(from, MarkingState::Internal(from))) { | |
198 ObjectMarking::WhiteToBlack(to, MarkingState::Internal(to)); | |
199 } | |
200 } | |
201 | |
202 void IterateBlackObject(HeapObject* object); | 208 void IterateBlackObject(HeapObject* object); |
203 | 209 |
204 Heap* heap() const { return heap_; } | 210 Heap* heap() const { return heap_; } |
205 | 211 |
206 IncrementalMarkingJob* incremental_marking_job() { | 212 IncrementalMarkingJob* incremental_marking_job() { |
207 return &incremental_marking_job_; | 213 return &incremental_marking_job_; |
208 } | 214 } |
209 | 215 |
210 bool black_allocation() { return black_allocation_; } | 216 bool black_allocation() { return black_allocation_; } |
211 | 217 |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
295 | 301 |
296 Observer new_generation_observer_; | 302 Observer new_generation_observer_; |
297 Observer old_generation_observer_; | 303 Observer old_generation_observer_; |
298 | 304 |
299 DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking); | 305 DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking); |
300 }; | 306 }; |
301 } // namespace internal | 307 } // namespace internal |
302 } // namespace v8 | 308 } // namespace v8 |
303 | 309 |
304 #endif // V8_HEAP_INCREMENTAL_MARKING_H_ | 310 #endif // V8_HEAP_INCREMENTAL_MARKING_H_ |
OLD | NEW |