OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/incremental-marking.h" | 5 #include "src/heap/incremental-marking.h" |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/compilation-cache.h" | 8 #include "src/compilation-cache.h" |
9 #include "src/conversions.h" | 9 #include "src/conversions.h" |
10 #include "src/heap/concurrent-marking.h" | 10 #include "src/heap/concurrent-marking.h" |
(...skipping 30 matching lines...) Expand all Loading... |
41 new_generation_observer_(*this, kAllocatedThreshold), | 41 new_generation_observer_(*this, kAllocatedThreshold), |
42 old_generation_observer_(*this, kAllocatedThreshold) {} | 42 old_generation_observer_(*this, kAllocatedThreshold) {} |
43 | 43 |
44 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { | 44 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { |
45 HeapObject* value_heap_obj = HeapObject::cast(value); | 45 HeapObject* value_heap_obj = HeapObject::cast(value); |
46 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, | 46 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, |
47 marking_state(value_heap_obj))); | 47 marking_state(value_heap_obj))); |
48 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); | 48 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); |
49 const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj)); | 49 const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj)); |
50 | 50 |
51 if (is_black && WhiteToGreyAndPush(value_heap_obj)) { | 51 if (is_black && |
| 52 ObjectMarking::IsWhite(value_heap_obj, marking_state(value_heap_obj))) { |
| 53 WhiteToGreyAndPush(value_heap_obj); |
52 RestartIfNotMarking(); | 54 RestartIfNotMarking(); |
53 } | 55 } |
54 return is_compacting_ && is_black; | 56 return is_compacting_ && is_black; |
55 } | 57 } |
56 | 58 |
57 | 59 |
58 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, | 60 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, |
59 Object* value) { | 61 Object* value) { |
60 if (BaseRecordWrite(obj, value) && slot != NULL) { | 62 if (BaseRecordWrite(obj, value) && slot != NULL) { |
61 // Object is not going to be rescanned we need to record the slot. | 63 // Object is not going to be rescanned we need to record the slot. |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
112 } | 114 } |
113 | 115 |
114 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, | 116 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, |
115 Object* value) { | 117 Object* value) { |
116 if (BaseRecordWrite(host, value)) { | 118 if (BaseRecordWrite(host, value)) { |
117 // Object is not going to be rescanned. We need to record the slot. | 119 // Object is not going to be rescanned. We need to record the slot. |
118 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); | 120 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); |
119 } | 121 } |
120 } | 122 } |
121 | 123 |
122 bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { | 124 void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { |
123 if (ObjectMarking::WhiteToGrey(obj, marking_state(obj))) { | 125 ObjectMarking::WhiteToGrey(obj, marking_state(obj)); |
124 marking_deque()->Push(obj); | 126 marking_deque()->Push(obj); |
125 return true; | |
126 } | |
127 return false; | |
128 } | 127 } |
129 | 128 |
130 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, | 129 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, |
131 HeapObject* to) { | 130 HeapObject* to) { |
132 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); | 131 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); |
133 // This is only used when resizing an object. | 132 // This is only used when resizing an object. |
134 DCHECK(MemoryChunk::FromAddress(from->address()) == | 133 DCHECK(MemoryChunk::FromAddress(from->address()) == |
135 MemoryChunk::FromAddress(to->address())); | 134 MemoryChunk::FromAddress(to->address())); |
136 | 135 |
137 if (!IsMarking()) return; | 136 if (!IsMarking()) return; |
138 | 137 |
139 // If the mark doesn't move, we don't check the color of the object. | 138 // If the mark doesn't move, we don't check the color of the object. |
140 // It doesn't matter whether the object is black, since it hasn't changed | 139 // It doesn't matter whether the object is black, since it hasn't changed |
141 // size, so the adjustment to the live data count will be zero anyway. | 140 // size, so the adjustment to the live data count will be zero anyway. |
142 if (from == to) return; | 141 if (from == to) return; |
143 | 142 |
144 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); | 143 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); |
145 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); | 144 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); |
146 | 145 |
147 if (Marking::IsBlack(old_mark_bit)) { | 146 if (Marking::IsBlack(old_mark_bit)) { |
148 if (from->address() + kPointerSize == to->address()) { | 147 Marking::MarkBlack(new_mark_bit); |
149 // The old and the new markbits overlap. The |to| object has the | |
150 // grey color. To make it black, we need to set second bit. | |
151 DCHECK(new_mark_bit.Get()); | |
152 new_mark_bit.Next().Set(); | |
153 } else { | |
154 bool success = Marking::WhiteToBlack(new_mark_bit); | |
155 DCHECK(success); | |
156 USE(success); | |
157 } | |
158 } else if (Marking::IsGrey(old_mark_bit)) { | 148 } else if (Marking::IsGrey(old_mark_bit)) { |
159 if (from->address() + kPointerSize == to->address()) { | 149 Marking::WhiteToGrey(new_mark_bit); |
160 // The old and the new markbits overlap. The |to| object has the | 150 marking_deque()->Push(to); |
161 // white color. To make it black, we need to set both bits. | 151 RestartIfNotMarking(); |
162 // Note that Marking::WhiteToGrey does not work here because | |
163 // old_mark_bit.Next() can be set by the concurrent marker at any time. | |
164 new_mark_bit.Set(); | |
165 new_mark_bit.Next().Set(); | |
166 } else { | |
167 bool success = Marking::WhiteToGrey(new_mark_bit); | |
168 DCHECK(success); | |
169 USE(success); | |
170 marking_deque()->Push(to); | |
171 RestartIfNotMarking(); | |
172 } | |
173 } | 152 } |
174 } | 153 } |
175 | 154 |
176 class IncrementalMarkingMarkingVisitor | 155 class IncrementalMarkingMarkingVisitor |
177 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { | 156 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { |
178 public: | 157 public: |
179 static void Initialize() { | 158 static void Initialize() { |
180 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); | 159 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); |
181 table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental); | 160 table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental); |
182 table_.Register(kVisitNativeContext, &VisitNativeContextIncremental); | 161 table_.Register(kVisitNativeContext, &VisitNativeContextIncremental); |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
232 // We will mark cache black with a separate pass when we finish marking. | 211 // We will mark cache black with a separate pass when we finish marking. |
233 // Note that GC can happen when the context is not fully initialized, | 212 // Note that GC can happen when the context is not fully initialized, |
234 // so the cache can be undefined. | 213 // so the cache can be undefined. |
235 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); | 214 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); |
236 if (!cache->IsUndefined(map->GetIsolate())) { | 215 if (!cache->IsUndefined(map->GetIsolate())) { |
237 if (cache->IsHeapObject()) { | 216 if (cache->IsHeapObject()) { |
238 HeapObject* heap_obj = HeapObject::cast(cache); | 217 HeapObject* heap_obj = HeapObject::cast(cache); |
239 // Mark the object grey if it is white, do not enque it into the marking | 218 // Mark the object grey if it is white, do not enque it into the marking |
240 // deque. | 219 // deque. |
241 Heap* heap = map->GetHeap(); | 220 Heap* heap = map->GetHeap(); |
242 bool ignored = ObjectMarking::WhiteToGrey( | 221 if (ObjectMarking::IsWhite( |
243 heap_obj, heap->incremental_marking()->marking_state(heap_obj)); | 222 heap_obj, |
244 USE(ignored); | 223 heap->incremental_marking()->marking_state(heap_obj))) { |
| 224 ObjectMarking::WhiteToGrey( |
| 225 heap_obj, heap->incremental_marking()->marking_state(heap_obj)); |
| 226 } |
245 } | 227 } |
246 } | 228 } |
247 VisitNativeContext(map, context); | 229 VisitNativeContext(map, context); |
248 } | 230 } |
249 | 231 |
250 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { | 232 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { |
251 Object* target = *p; | 233 Object* target = *p; |
252 if (target->IsHeapObject()) { | 234 if (target->IsHeapObject()) { |
253 heap->mark_compact_collector()->RecordSlot(object, p, target); | 235 heap->mark_compact_collector()->RecordSlot(object, p, target); |
254 MarkObject(heap, target); | 236 MarkObject(heap, target); |
255 } | 237 } |
256 } | 238 } |
257 | 239 |
258 INLINE(static void VisitPointers(Heap* heap, HeapObject* object, | 240 INLINE(static void VisitPointers(Heap* heap, HeapObject* object, |
259 Object** start, Object** end)) { | 241 Object** start, Object** end)) { |
260 for (Object** p = start; p < end; p++) { | 242 for (Object** p = start; p < end; p++) { |
261 Object* target = *p; | 243 Object* target = *p; |
262 if (target->IsHeapObject()) { | 244 if (target->IsHeapObject()) { |
263 heap->mark_compact_collector()->RecordSlot(object, p, target); | 245 heap->mark_compact_collector()->RecordSlot(object, p, target); |
264 MarkObject(heap, target); | 246 MarkObject(heap, target); |
265 } | 247 } |
266 } | 248 } |
267 } | 249 } |
268 | 250 |
269 // Marks the object grey and pushes it on the marking stack. | 251 // Marks the object grey and pushes it on the marking stack. |
270 INLINE(static void MarkObject(Heap* heap, Object* obj)) { | 252 INLINE(static void MarkObject(Heap* heap, Object* obj)) { |
271 heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); | 253 heap->incremental_marking()->MarkGrey(HeapObject::cast(obj)); |
272 } | 254 } |
273 | 255 |
274 // Marks the object black without pushing it on the marking stack. | 256 // Marks the object black without pushing it on the marking stack. |
275 // Returns true if object needed marking and false otherwise. | 257 // Returns true if object needed marking and false otherwise. |
276 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { | 258 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { |
277 HeapObject* heap_object = HeapObject::cast(obj); | 259 HeapObject* heap_object = HeapObject::cast(obj); |
278 return ObjectMarking::WhiteToBlack( | 260 if (ObjectMarking::IsWhite( |
279 heap_object, heap->incremental_marking()->marking_state(heap_object)); | 261 heap_object, |
| 262 heap->incremental_marking()->marking_state(heap_object))) { |
| 263 ObjectMarking::WhiteToBlack( |
| 264 heap_object, heap->incremental_marking()->marking_state(heap_object)); |
| 265 return true; |
| 266 } |
| 267 return false; |
280 } | 268 } |
281 }; | 269 }; |
282 | 270 |
283 void IncrementalMarking::IterateBlackObject(HeapObject* object) { | 271 void IncrementalMarking::IterateBlackObject(HeapObject* object) { |
284 if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) { | 272 if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) { |
285 Page* page = Page::FromAddress(object->address()); | 273 Page* page = Page::FromAddress(object->address()); |
286 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { | 274 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { |
287 // IterateBlackObject requires us to visit the whole object. | 275 // IterateBlackObject requires us to visit the whole object. |
288 page->ResetProgressBar(); | 276 page->ResetProgressBar(); |
289 } | 277 } |
290 Map* map = object->map(); | 278 Map* map = object->map(); |
291 WhiteToGreyAndPush(map); | 279 MarkGrey(map); |
292 IncrementalMarkingMarkingVisitor::IterateBody(map, object); | 280 IncrementalMarkingMarkingVisitor::IterateBody(map, object); |
293 } | 281 } |
294 } | 282 } |
295 | 283 |
296 class IncrementalMarkingRootMarkingVisitor : public RootVisitor { | 284 class IncrementalMarkingRootMarkingVisitor : public RootVisitor { |
297 public: | 285 public: |
298 explicit IncrementalMarkingRootMarkingVisitor( | 286 explicit IncrementalMarkingRootMarkingVisitor( |
299 IncrementalMarking* incremental_marking) | 287 IncrementalMarking* incremental_marking) |
300 : heap_(incremental_marking->heap()) {} | 288 : heap_(incremental_marking->heap()) {} |
301 | 289 |
302 void VisitRootPointer(Root root, Object** p) override { | 290 void VisitRootPointer(Root root, Object** p) override { |
303 MarkObjectByPointer(p); | 291 MarkObjectByPointer(p); |
304 } | 292 } |
305 | 293 |
306 void VisitRootPointers(Root root, Object** start, Object** end) override { | 294 void VisitRootPointers(Root root, Object** start, Object** end) override { |
307 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); | 295 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); |
308 } | 296 } |
309 | 297 |
310 private: | 298 private: |
311 void MarkObjectByPointer(Object** p) { | 299 void MarkObjectByPointer(Object** p) { |
312 Object* obj = *p; | 300 Object* obj = *p; |
313 if (!obj->IsHeapObject()) return; | 301 if (!obj->IsHeapObject()) return; |
314 | 302 |
315 heap_->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); | 303 heap_->incremental_marking()->MarkGrey(HeapObject::cast(obj)); |
316 } | 304 } |
317 | 305 |
318 Heap* heap_; | 306 Heap* heap_; |
319 }; | 307 }; |
320 | 308 |
321 | 309 |
322 void IncrementalMarking::Initialize() { | 310 void IncrementalMarking::Initialize() { |
323 IncrementalMarkingMarkingVisitor::Initialize(); | 311 IncrementalMarkingMarkingVisitor::Initialize(); |
324 } | 312 } |
325 | 313 |
(...skipping 370 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
696 for (int i = 0; i < length; i += 2) { | 684 for (int i = 0; i < length; i += 2) { |
697 DCHECK(retained_maps->Get(i)->IsWeakCell()); | 685 DCHECK(retained_maps->Get(i)->IsWeakCell()); |
698 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); | 686 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); |
699 if (cell->cleared()) continue; | 687 if (cell->cleared()) continue; |
700 int age = Smi::cast(retained_maps->Get(i + 1))->value(); | 688 int age = Smi::cast(retained_maps->Get(i + 1))->value(); |
701 int new_age; | 689 int new_age; |
702 Map* map = Map::cast(cell->value()); | 690 Map* map = Map::cast(cell->value()); |
703 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && | 691 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && |
704 ObjectMarking::IsWhite(map, marking_state(map))) { | 692 ObjectMarking::IsWhite(map, marking_state(map))) { |
705 if (ShouldRetainMap(map, age)) { | 693 if (ShouldRetainMap(map, age)) { |
706 WhiteToGreyAndPush(map); | 694 MarkGrey(map); |
707 } | 695 } |
708 Object* prototype = map->prototype(); | 696 Object* prototype = map->prototype(); |
709 if (age > 0 && prototype->IsHeapObject() && | 697 if (age > 0 && prototype->IsHeapObject() && |
710 ObjectMarking::IsWhite(HeapObject::cast(prototype), | 698 ObjectMarking::IsWhite(HeapObject::cast(prototype), |
711 marking_state(HeapObject::cast(prototype)))) { | 699 marking_state(HeapObject::cast(prototype)))) { |
712 // The prototype is not marked, age the map. | 700 // The prototype is not marked, age the map. |
713 new_age = age - 1; | 701 new_age = age - 1; |
714 } else { | 702 } else { |
715 // The prototype and the constructor are marked, this map keeps only | 703 // The prototype and the constructor are marked, this map keeps only |
716 // transition tree alive, not JSObjects. Do not age the map. | 704 // transition tree alive, not JSObjects. Do not age the map. |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
816 ObjectMarking::IsBlack(obj, marking_state(obj)))); | 804 ObjectMarking::IsBlack(obj, marking_state(obj)))); |
817 // Skip one word filler objects that appear on the | 805 // Skip one word filler objects that appear on the |
818 // stack when we perform in place array shift. | 806 // stack when we perform in place array shift. |
819 return (obj->map() == filler_map) ? nullptr : obj; | 807 return (obj->map() == filler_map) ? nullptr : obj; |
820 } | 808 } |
821 }); | 809 }); |
822 } | 810 } |
823 | 811 |
824 | 812 |
825 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { | 813 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { |
826 WhiteToGreyAndPush(map); | 814 MarkGrey(map); |
827 | 815 |
828 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); | 816 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); |
829 | 817 |
830 #if ENABLE_SLOW_DCHECKS | 818 #if ENABLE_SLOW_DCHECKS |
831 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); | 819 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); |
832 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 820 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
833 SLOW_DCHECK(Marking::IsGrey(mark_bit) || | 821 SLOW_DCHECK(Marking::IsGrey(mark_bit) || |
834 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | 822 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
835 Marking::IsBlack(mark_bit))); | 823 Marking::IsBlack(mark_bit))); |
836 #endif | 824 #endif |
| 825 MarkBlack(obj, size); |
| 826 } |
| 827 |
| 828 void IncrementalMarking::MarkGrey(HeapObject* object) { |
| 829 if (ObjectMarking::IsWhite(object, marking_state(object))) { |
| 830 WhiteToGreyAndPush(object); |
| 831 } |
| 832 } |
| 833 |
| 834 void IncrementalMarking::MarkBlack(HeapObject* obj, int size) { |
| 835 if (ObjectMarking::IsBlack(obj, marking_state(obj))) return; |
837 ObjectMarking::GreyToBlack(obj, marking_state(obj)); | 836 ObjectMarking::GreyToBlack(obj, marking_state(obj)); |
838 } | 837 } |
839 | 838 |
840 intptr_t IncrementalMarking::ProcessMarkingDeque( | 839 intptr_t IncrementalMarking::ProcessMarkingDeque( |
841 intptr_t bytes_to_process, ForceCompletionAction completion) { | 840 intptr_t bytes_to_process, ForceCompletionAction completion) { |
842 intptr_t bytes_processed = 0; | 841 intptr_t bytes_processed = 0; |
843 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process || | 842 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process || |
844 completion == FORCE_COMPLETION)) { | 843 completion == FORCE_COMPLETION)) { |
845 HeapObject* obj = marking_deque()->Pop(); | 844 HeapObject* obj = marking_deque()->Pop(); |
846 | 845 |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
895 } | 894 } |
896 } | 895 } |
897 | 896 |
898 Object* context = heap_->native_contexts_list(); | 897 Object* context = heap_->native_contexts_list(); |
899 while (!context->IsUndefined(heap_->isolate())) { | 898 while (!context->IsUndefined(heap_->isolate())) { |
900 // GC can happen when the context is not fully initialized, | 899 // GC can happen when the context is not fully initialized, |
901 // so the cache can be undefined. | 900 // so the cache can be undefined. |
902 HeapObject* cache = HeapObject::cast( | 901 HeapObject* cache = HeapObject::cast( |
903 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); | 902 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); |
904 if (!cache->IsUndefined(heap_->isolate())) { | 903 if (!cache->IsUndefined(heap_->isolate())) { |
905 // Mark the cache black if it is grey. | 904 if (ObjectMarking::IsGrey(cache, marking_state(cache))) { |
906 bool ignored = ObjectMarking::GreyToBlack(cache, marking_state(cache)); | 905 ObjectMarking::GreyToBlack(cache, marking_state(cache)); |
907 USE(ignored); | 906 } |
908 } | 907 } |
909 context = Context::cast(context)->next_context_link(); | 908 context = Context::cast(context)->next_context_link(); |
910 } | 909 } |
911 } | 910 } |
912 | 911 |
913 | 912 |
914 void IncrementalMarking::Stop() { | 913 void IncrementalMarking::Stop() { |
915 if (IsStopped()) return; | 914 if (IsStopped()) return; |
916 if (FLAG_trace_incremental_marking) { | 915 if (FLAG_trace_incremental_marking) { |
917 int old_generation_size_mb = | 916 int old_generation_size_mb = |
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1177 idle_marking_delay_counter_++; | 1176 idle_marking_delay_counter_++; |
1178 } | 1177 } |
1179 | 1178 |
1180 | 1179 |
1181 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 1180 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
1182 idle_marking_delay_counter_ = 0; | 1181 idle_marking_delay_counter_ = 0; |
1183 } | 1182 } |
1184 | 1183 |
1185 } // namespace internal | 1184 } // namespace internal |
1186 } // namespace v8 | 1185 } // namespace v8 |
OLD | NEW |