OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/incremental-marking.h" | 5 #include "src/heap/incremental-marking.h" |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/compilation-cache.h" | 8 #include "src/compilation-cache.h" |
9 #include "src/conversions.h" | 9 #include "src/conversions.h" |
10 #include "src/heap/concurrent-marking.h" | 10 #include "src/heap/concurrent-marking.h" |
(...skipping 30 matching lines...) Expand all Loading... |
41 new_generation_observer_(*this, kAllocatedThreshold), | 41 new_generation_observer_(*this, kAllocatedThreshold), |
42 old_generation_observer_(*this, kAllocatedThreshold) {} | 42 old_generation_observer_(*this, kAllocatedThreshold) {} |
43 | 43 |
44 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { | 44 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { |
45 HeapObject* value_heap_obj = HeapObject::cast(value); | 45 HeapObject* value_heap_obj = HeapObject::cast(value); |
46 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, | 46 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, |
47 marking_state(value_heap_obj))); | 47 marking_state(value_heap_obj))); |
48 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); | 48 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); |
49 const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj)); | 49 const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj)); |
50 | 50 |
51 if (is_black && | 51 if (is_black && WhiteToGreyAndPush(value_heap_obj)) { |
52 ObjectMarking::IsWhite(value_heap_obj, marking_state(value_heap_obj))) { | |
53 WhiteToGreyAndPush(value_heap_obj); | |
54 RestartIfNotMarking(); | 52 RestartIfNotMarking(); |
55 } | 53 } |
56 return is_compacting_ && is_black; | 54 return is_compacting_ && is_black; |
57 } | 55 } |
58 | 56 |
59 | 57 |
60 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, | 58 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, |
61 Object* value) { | 59 Object* value) { |
62 if (BaseRecordWrite(obj, value) && slot != NULL) { | 60 if (BaseRecordWrite(obj, value) && slot != NULL) { |
63 // Object is not going to be rescanned we need to record the slot. | 61 // Object is not going to be rescanned we need to record the slot. |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
114 } | 112 } |
115 | 113 |
116 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, | 114 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, |
117 Object* value) { | 115 Object* value) { |
118 if (BaseRecordWrite(host, value)) { | 116 if (BaseRecordWrite(host, value)) { |
119 // Object is not going to be rescanned. We need to record the slot. | 117 // Object is not going to be rescanned. We need to record the slot. |
120 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); | 118 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); |
121 } | 119 } |
122 } | 120 } |
123 | 121 |
124 void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { | 122 bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { |
125 ObjectMarking::WhiteToGrey(obj, marking_state(obj)); | 123 if (ObjectMarking::WhiteToGrey(obj, marking_state(obj))) { |
126 marking_deque()->Push(obj); | 124 marking_deque()->Push(obj); |
| 125 return true; |
| 126 } |
| 127 return false; |
127 } | 128 } |
128 | 129 |
129 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, | 130 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, |
130 HeapObject* to) { | 131 HeapObject* to) { |
131 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); | 132 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); |
132 // This is only used when resizing an object. | 133 // This is only used when resizing an object. |
133 DCHECK(MemoryChunk::FromAddress(from->address()) == | 134 DCHECK(MemoryChunk::FromAddress(from->address()) == |
134 MemoryChunk::FromAddress(to->address())); | 135 MemoryChunk::FromAddress(to->address())); |
135 | 136 |
136 if (!IsMarking()) return; | 137 if (!IsMarking()) return; |
137 | 138 |
138 // If the mark doesn't move, we don't check the color of the object. | 139 // If the mark doesn't move, we don't check the color of the object. |
139 // It doesn't matter whether the object is black, since it hasn't changed | 140 // It doesn't matter whether the object is black, since it hasn't changed |
140 // size, so the adjustment to the live data count will be zero anyway. | 141 // size, so the adjustment to the live data count will be zero anyway. |
141 if (from == to) return; | 142 if (from == to) return; |
142 | 143 |
143 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); | 144 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); |
144 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); | 145 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); |
145 | 146 |
146 if (Marking::IsBlack(old_mark_bit)) { | 147 if (Marking::IsBlack(old_mark_bit)) { |
147 Marking::MarkBlack(new_mark_bit); | 148 if (from->address() + kPointerSize == to->address()) { |
| 149 // The old and the new markbits overlap. The |to| object has the |
| 150 // grey color. To make it black, we need to set second bit. |
| 151 DCHECK(new_mark_bit.Get()); |
| 152 new_mark_bit.Next().Set(); |
| 153 } else { |
| 154 bool success = Marking::WhiteToBlack(new_mark_bit); |
| 155 DCHECK(success); |
| 156 USE(success); |
| 157 } |
148 } else if (Marking::IsGrey(old_mark_bit)) { | 158 } else if (Marking::IsGrey(old_mark_bit)) { |
149 Marking::WhiteToGrey(new_mark_bit); | 159 if (from->address() + kPointerSize == to->address()) { |
150 marking_deque()->Push(to); | 160 // The old and the new markbits overlap. The |to| object has the |
151 RestartIfNotMarking(); | 161 // white color. To make it black, we need to set both bits. |
| 162 // Note that Marking::WhiteToGrey does not work here because |
| 163 // old_mark_bit.Next() can be set by the concurrent marker at any time. |
| 164 new_mark_bit.Set(); |
| 165 new_mark_bit.Next().Set(); |
| 166 } else { |
| 167 bool success = Marking::WhiteToGrey(new_mark_bit); |
| 168 DCHECK(success); |
| 169 USE(success); |
| 170 marking_deque()->Push(to); |
| 171 RestartIfNotMarking(); |
| 172 } |
152 } | 173 } |
153 } | 174 } |
154 | 175 |
155 class IncrementalMarkingMarkingVisitor | 176 class IncrementalMarkingMarkingVisitor |
156 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { | 177 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { |
157 public: | 178 public: |
158 static void Initialize() { | 179 static void Initialize() { |
159 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); | 180 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); |
160 table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental); | 181 table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental); |
161 table_.Register(kVisitNativeContext, &VisitNativeContextIncremental); | 182 table_.Register(kVisitNativeContext, &VisitNativeContextIncremental); |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
211 // We will mark cache black with a separate pass when we finish marking. | 232 // We will mark cache black with a separate pass when we finish marking. |
212 // Note that GC can happen when the context is not fully initialized, | 233 // Note that GC can happen when the context is not fully initialized, |
213 // so the cache can be undefined. | 234 // so the cache can be undefined. |
214 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); | 235 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); |
215 if (!cache->IsUndefined(map->GetIsolate())) { | 236 if (!cache->IsUndefined(map->GetIsolate())) { |
216 if (cache->IsHeapObject()) { | 237 if (cache->IsHeapObject()) { |
217 HeapObject* heap_obj = HeapObject::cast(cache); | 238 HeapObject* heap_obj = HeapObject::cast(cache); |
218 // Mark the object grey if it is white, do not enque it into the marking | 239 // Mark the object grey if it is white, do not enque it into the marking |
219 // deque. | 240 // deque. |
220 Heap* heap = map->GetHeap(); | 241 Heap* heap = map->GetHeap(); |
221 if (ObjectMarking::IsWhite( | 242 bool ignored = ObjectMarking::WhiteToGrey( |
222 heap_obj, | 243 heap_obj, heap->incremental_marking()->marking_state(heap_obj)); |
223 heap->incremental_marking()->marking_state(heap_obj))) { | 244 USE(ignored); |
224 ObjectMarking::WhiteToGrey( | |
225 heap_obj, heap->incremental_marking()->marking_state(heap_obj)); | |
226 } | |
227 } | 245 } |
228 } | 246 } |
229 VisitNativeContext(map, context); | 247 VisitNativeContext(map, context); |
230 } | 248 } |
231 | 249 |
232 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { | 250 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { |
233 Object* target = *p; | 251 Object* target = *p; |
234 if (target->IsHeapObject()) { | 252 if (target->IsHeapObject()) { |
235 heap->mark_compact_collector()->RecordSlot(object, p, target); | 253 heap->mark_compact_collector()->RecordSlot(object, p, target); |
236 MarkObject(heap, target); | 254 MarkObject(heap, target); |
237 } | 255 } |
238 } | 256 } |
239 | 257 |
240 INLINE(static void VisitPointers(Heap* heap, HeapObject* object, | 258 INLINE(static void VisitPointers(Heap* heap, HeapObject* object, |
241 Object** start, Object** end)) { | 259 Object** start, Object** end)) { |
242 for (Object** p = start; p < end; p++) { | 260 for (Object** p = start; p < end; p++) { |
243 Object* target = *p; | 261 Object* target = *p; |
244 if (target->IsHeapObject()) { | 262 if (target->IsHeapObject()) { |
245 heap->mark_compact_collector()->RecordSlot(object, p, target); | 263 heap->mark_compact_collector()->RecordSlot(object, p, target); |
246 MarkObject(heap, target); | 264 MarkObject(heap, target); |
247 } | 265 } |
248 } | 266 } |
249 } | 267 } |
250 | 268 |
251 // Marks the object grey and pushes it on the marking stack. | 269 // Marks the object grey and pushes it on the marking stack. |
252 INLINE(static void MarkObject(Heap* heap, Object* obj)) { | 270 INLINE(static void MarkObject(Heap* heap, Object* obj)) { |
253 heap->incremental_marking()->MarkGrey(HeapObject::cast(obj)); | 271 heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); |
254 } | 272 } |
255 | 273 |
256 // Marks the object black without pushing it on the marking stack. | 274 // Marks the object black without pushing it on the marking stack. |
257 // Returns true if object needed marking and false otherwise. | 275 // Returns true if object needed marking and false otherwise. |
258 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { | 276 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { |
259 HeapObject* heap_object = HeapObject::cast(obj); | 277 HeapObject* heap_object = HeapObject::cast(obj); |
260 if (ObjectMarking::IsWhite( | 278 return ObjectMarking::WhiteToBlack( |
261 heap_object, | 279 heap_object, heap->incremental_marking()->marking_state(heap_object)); |
262 heap->incremental_marking()->marking_state(heap_object))) { | |
263 ObjectMarking::WhiteToBlack( | |
264 heap_object, heap->incremental_marking()->marking_state(heap_object)); | |
265 return true; | |
266 } | |
267 return false; | |
268 } | 280 } |
269 }; | 281 }; |
270 | 282 |
271 void IncrementalMarking::IterateBlackObject(HeapObject* object) { | 283 void IncrementalMarking::IterateBlackObject(HeapObject* object) { |
272 if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) { | 284 if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) { |
273 Page* page = Page::FromAddress(object->address()); | 285 Page* page = Page::FromAddress(object->address()); |
274 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { | 286 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { |
275 // IterateBlackObject requires us to visit the whole object. | 287 // IterateBlackObject requires us to visit the whole object. |
276 page->ResetProgressBar(); | 288 page->ResetProgressBar(); |
277 } | 289 } |
278 Map* map = object->map(); | 290 Map* map = object->map(); |
279 MarkGrey(map); | 291 WhiteToGreyAndPush(map); |
280 IncrementalMarkingMarkingVisitor::IterateBody(map, object); | 292 IncrementalMarkingMarkingVisitor::IterateBody(map, object); |
281 } | 293 } |
282 } | 294 } |
283 | 295 |
284 class IncrementalMarkingRootMarkingVisitor : public RootVisitor { | 296 class IncrementalMarkingRootMarkingVisitor : public RootVisitor { |
285 public: | 297 public: |
286 explicit IncrementalMarkingRootMarkingVisitor( | 298 explicit IncrementalMarkingRootMarkingVisitor( |
287 IncrementalMarking* incremental_marking) | 299 IncrementalMarking* incremental_marking) |
288 : heap_(incremental_marking->heap()) {} | 300 : heap_(incremental_marking->heap()) {} |
289 | 301 |
290 void VisitRootPointer(Root root, Object** p) override { | 302 void VisitRootPointer(Root root, Object** p) override { |
291 MarkObjectByPointer(p); | 303 MarkObjectByPointer(p); |
292 } | 304 } |
293 | 305 |
294 void VisitRootPointers(Root root, Object** start, Object** end) override { | 306 void VisitRootPointers(Root root, Object** start, Object** end) override { |
295 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); | 307 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); |
296 } | 308 } |
297 | 309 |
298 private: | 310 private: |
299 void MarkObjectByPointer(Object** p) { | 311 void MarkObjectByPointer(Object** p) { |
300 Object* obj = *p; | 312 Object* obj = *p; |
301 if (!obj->IsHeapObject()) return; | 313 if (!obj->IsHeapObject()) return; |
302 | 314 |
303 heap_->incremental_marking()->MarkGrey(HeapObject::cast(obj)); | 315 heap_->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); |
304 } | 316 } |
305 | 317 |
306 Heap* heap_; | 318 Heap* heap_; |
307 }; | 319 }; |
308 | 320 |
309 | 321 |
310 void IncrementalMarking::Initialize() { | 322 void IncrementalMarking::Initialize() { |
311 IncrementalMarkingMarkingVisitor::Initialize(); | 323 IncrementalMarkingMarkingVisitor::Initialize(); |
312 } | 324 } |
313 | 325 |
(...skipping 370 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
684 for (int i = 0; i < length; i += 2) { | 696 for (int i = 0; i < length; i += 2) { |
685 DCHECK(retained_maps->Get(i)->IsWeakCell()); | 697 DCHECK(retained_maps->Get(i)->IsWeakCell()); |
686 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); | 698 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); |
687 if (cell->cleared()) continue; | 699 if (cell->cleared()) continue; |
688 int age = Smi::cast(retained_maps->Get(i + 1))->value(); | 700 int age = Smi::cast(retained_maps->Get(i + 1))->value(); |
689 int new_age; | 701 int new_age; |
690 Map* map = Map::cast(cell->value()); | 702 Map* map = Map::cast(cell->value()); |
691 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && | 703 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && |
692 ObjectMarking::IsWhite(map, marking_state(map))) { | 704 ObjectMarking::IsWhite(map, marking_state(map))) { |
693 if (ShouldRetainMap(map, age)) { | 705 if (ShouldRetainMap(map, age)) { |
694 MarkGrey(map); | 706 WhiteToGreyAndPush(map); |
695 } | 707 } |
696 Object* prototype = map->prototype(); | 708 Object* prototype = map->prototype(); |
697 if (age > 0 && prototype->IsHeapObject() && | 709 if (age > 0 && prototype->IsHeapObject() && |
698 ObjectMarking::IsWhite(HeapObject::cast(prototype), | 710 ObjectMarking::IsWhite(HeapObject::cast(prototype), |
699 marking_state(HeapObject::cast(prototype)))) { | 711 marking_state(HeapObject::cast(prototype)))) { |
700 // The prototype is not marked, age the map. | 712 // The prototype is not marked, age the map. |
701 new_age = age - 1; | 713 new_age = age - 1; |
702 } else { | 714 } else { |
703 // The prototype and the constructor are marked, this map keeps only | 715 // The prototype and the constructor are marked, this map keeps only |
704 // transition tree alive, not JSObjects. Do not age the map. | 716 // transition tree alive, not JSObjects. Do not age the map. |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
804 ObjectMarking::IsBlack(obj, marking_state(obj)))); | 816 ObjectMarking::IsBlack(obj, marking_state(obj)))); |
805 // Skip one word filler objects that appear on the | 817 // Skip one word filler objects that appear on the |
806 // stack when we perform in place array shift. | 818 // stack when we perform in place array shift. |
807 return (obj->map() == filler_map) ? nullptr : obj; | 819 return (obj->map() == filler_map) ? nullptr : obj; |
808 } | 820 } |
809 }); | 821 }); |
810 } | 822 } |
811 | 823 |
812 | 824 |
813 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { | 825 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { |
814 MarkGrey(map); | 826 WhiteToGreyAndPush(map); |
815 | 827 |
816 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); | 828 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); |
817 | 829 |
818 #if ENABLE_SLOW_DCHECKS | 830 #if ENABLE_SLOW_DCHECKS |
819 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); | 831 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); |
820 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 832 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
821 SLOW_DCHECK(Marking::IsGrey(mark_bit) || | 833 SLOW_DCHECK(Marking::IsGrey(mark_bit) || |
822 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | 834 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
823 Marking::IsBlack(mark_bit))); | 835 Marking::IsBlack(mark_bit))); |
824 #endif | 836 #endif |
825 MarkBlack(obj, size); | |
826 } | |
827 | |
828 void IncrementalMarking::MarkGrey(HeapObject* object) { | |
829 if (ObjectMarking::IsWhite(object, marking_state(object))) { | |
830 WhiteToGreyAndPush(object); | |
831 } | |
832 } | |
833 | |
834 void IncrementalMarking::MarkBlack(HeapObject* obj, int size) { | |
835 if (ObjectMarking::IsBlack(obj, marking_state(obj))) return; | |
836 ObjectMarking::GreyToBlack(obj, marking_state(obj)); | 837 ObjectMarking::GreyToBlack(obj, marking_state(obj)); |
837 } | 838 } |
838 | 839 |
839 intptr_t IncrementalMarking::ProcessMarkingDeque( | 840 intptr_t IncrementalMarking::ProcessMarkingDeque( |
840 intptr_t bytes_to_process, ForceCompletionAction completion) { | 841 intptr_t bytes_to_process, ForceCompletionAction completion) { |
841 intptr_t bytes_processed = 0; | 842 intptr_t bytes_processed = 0; |
842 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process || | 843 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process || |
843 completion == FORCE_COMPLETION)) { | 844 completion == FORCE_COMPLETION)) { |
844 HeapObject* obj = marking_deque()->Pop(); | 845 HeapObject* obj = marking_deque()->Pop(); |
845 | 846 |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
894 } | 895 } |
895 } | 896 } |
896 | 897 |
897 Object* context = heap_->native_contexts_list(); | 898 Object* context = heap_->native_contexts_list(); |
898 while (!context->IsUndefined(heap_->isolate())) { | 899 while (!context->IsUndefined(heap_->isolate())) { |
899 // GC can happen when the context is not fully initialized, | 900 // GC can happen when the context is not fully initialized, |
900 // so the cache can be undefined. | 901 // so the cache can be undefined. |
901 HeapObject* cache = HeapObject::cast( | 902 HeapObject* cache = HeapObject::cast( |
902 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); | 903 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); |
903 if (!cache->IsUndefined(heap_->isolate())) { | 904 if (!cache->IsUndefined(heap_->isolate())) { |
904 if (ObjectMarking::IsGrey(cache, marking_state(cache))) { | 905 // Mark the cache black if it is grey. |
905 ObjectMarking::GreyToBlack(cache, marking_state(cache)); | 906 bool ignored = ObjectMarking::GreyToBlack(cache, marking_state(cache)); |
906 } | 907 USE(ignored); |
907 } | 908 } |
908 context = Context::cast(context)->next_context_link(); | 909 context = Context::cast(context)->next_context_link(); |
909 } | 910 } |
910 } | 911 } |
911 | 912 |
912 | 913 |
913 void IncrementalMarking::Stop() { | 914 void IncrementalMarking::Stop() { |
914 if (IsStopped()) return; | 915 if (IsStopped()) return; |
915 if (FLAG_trace_incremental_marking) { | 916 if (FLAG_trace_incremental_marking) { |
916 int old_generation_size_mb = | 917 int old_generation_size_mb = |
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1176 idle_marking_delay_counter_++; | 1177 idle_marking_delay_counter_++; |
1177 } | 1178 } |
1178 | 1179 |
1179 | 1180 |
1180 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 1181 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
1181 idle_marking_delay_counter_ = 0; | 1182 idle_marking_delay_counter_ = 0; |
1182 } | 1183 } |
1183 | 1184 |
1184 } // namespace internal | 1185 } // namespace internal |
1185 } // namespace v8 | 1186 } // namespace v8 |
OLD | NEW |