Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/incremental-marking.h" | 5 #include "src/heap/incremental-marking.h" |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/compilation-cache.h" | 8 #include "src/compilation-cache.h" |
| 9 #include "src/conversions.h" | 9 #include "src/conversions.h" |
| 10 #include "src/heap/concurrent-marking.h" | 10 #include "src/heap/concurrent-marking.h" |
| (...skipping 30 matching lines...) Expand all Loading... | |
| 41 new_generation_observer_(*this, kAllocatedThreshold), | 41 new_generation_observer_(*this, kAllocatedThreshold), |
| 42 old_generation_observer_(*this, kAllocatedThreshold) {} | 42 old_generation_observer_(*this, kAllocatedThreshold) {} |
| 43 | 43 |
| 44 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { | 44 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { |
| 45 HeapObject* value_heap_obj = HeapObject::cast(value); | 45 HeapObject* value_heap_obj = HeapObject::cast(value); |
| 46 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, | 46 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, |
| 47 marking_state(value_heap_obj))); | 47 marking_state(value_heap_obj))); |
| 48 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); | 48 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); |
| 49 const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj)); | 49 const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj)); |
| 50 | 50 |
| 51 if (is_black && | 51 if (is_black && WhiteToGreyAndPush(value_heap_obj)) { |
| 52 ObjectMarking::IsWhite(value_heap_obj, marking_state(value_heap_obj))) { | |
| 53 WhiteToGreyAndPush(value_heap_obj); | |
| 54 RestartIfNotMarking(); | 52 RestartIfNotMarking(); |
| 55 } | 53 } |
| 56 return is_compacting_ && is_black; | 54 return is_compacting_ && is_black; |
| 57 } | 55 } |
| 58 | 56 |
| 59 | 57 |
| 60 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, | 58 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, |
| 61 Object* value) { | 59 Object* value) { |
| 62 if (BaseRecordWrite(obj, value) && slot != NULL) { | 60 if (BaseRecordWrite(obj, value) && slot != NULL) { |
| 63 // Object is not going to be rescanned we need to record the slot. | 61 // Object is not going to be rescanned we need to record the slot. |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 114 } | 112 } |
| 115 | 113 |
| 116 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, | 114 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, |
| 117 Object* value) { | 115 Object* value) { |
| 118 if (BaseRecordWrite(host, value)) { | 116 if (BaseRecordWrite(host, value)) { |
| 119 // Object is not going to be rescanned. We need to record the slot. | 117 // Object is not going to be rescanned. We need to record the slot. |
| 120 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); | 118 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); |
| 121 } | 119 } |
| 122 } | 120 } |
| 123 | 121 |
| 124 void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { | 122 bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { |
| 125 ObjectMarking::WhiteToGrey(obj, marking_state(obj)); | 123 if (ObjectMarking::WhiteToGrey(obj, marking_state(obj))) { |
| 126 marking_deque()->Push(obj); | 124 marking_deque()->Push(obj); |
| 125 return true; | |
| 126 } | |
| 127 return false; | |
| 127 } | 128 } |
| 128 | 129 |
| 129 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, | 130 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, |
| 130 HeapObject* to) { | 131 HeapObject* to) { |
| 131 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); | 132 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); |
| 132 // This is only used when resizing an object. | 133 // This is only used when resizing an object. |
| 133 DCHECK(MemoryChunk::FromAddress(from->address()) == | 134 DCHECK(MemoryChunk::FromAddress(from->address()) == |
| 134 MemoryChunk::FromAddress(to->address())); | 135 MemoryChunk::FromAddress(to->address())); |
| 135 | 136 |
| 136 if (!IsMarking()) return; | 137 if (!IsMarking()) return; |
| 137 | 138 |
| 138 // If the mark doesn't move, we don't check the color of the object. | 139 // If the mark doesn't move, we don't check the color of the object. |
| 139 // It doesn't matter whether the object is black, since it hasn't changed | 140 // It doesn't matter whether the object is black, since it hasn't changed |
| 140 // size, so the adjustment to the live data count will be zero anyway. | 141 // size, so the adjustment to the live data count will be zero anyway. |
| 141 if (from == to) return; | 142 if (from == to) return; |
| 142 | 143 |
| 143 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); | 144 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); |
| 144 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); | 145 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); |
| 145 | 146 |
| 146 if (Marking::IsBlack(old_mark_bit)) { | 147 if (Marking::IsBlack(old_mark_bit)) { |
| 147 Marking::MarkBlack(new_mark_bit); | 148 bool success = Marking::WhiteToBlack(new_mark_bit); |
| 149 DCHECK(success); | |
| 150 USE(success); | |
| 148 } else if (Marking::IsGrey(old_mark_bit)) { | 151 } else if (Marking::IsGrey(old_mark_bit)) { |
| 149 Marking::WhiteToGrey(new_mark_bit); | 152 bool success = Marking::WhiteToGrey(new_mark_bit); |
| 153 DCHECK(success); | |
| 154 USE(success); | |
| 150 marking_deque()->Push(to); | 155 marking_deque()->Push(to); |
| 151 RestartIfNotMarking(); | 156 RestartIfNotMarking(); |
| 152 } | 157 } |
| 153 } | 158 } |
| 154 | 159 |
| 155 class IncrementalMarkingMarkingVisitor | 160 class IncrementalMarkingMarkingVisitor |
| 156 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { | 161 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { |
| 157 public: | 162 public: |
| 158 static void Initialize() { | 163 static void Initialize() { |
| 159 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); | 164 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 211 // We will mark cache black with a separate pass when we finish marking. | 216 // We will mark cache black with a separate pass when we finish marking. |
| 212 // Note that GC can happen when the context is not fully initialized, | 217 // Note that GC can happen when the context is not fully initialized, |
| 213 // so the cache can be undefined. | 218 // so the cache can be undefined. |
| 214 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); | 219 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); |
| 215 if (!cache->IsUndefined(map->GetIsolate())) { | 220 if (!cache->IsUndefined(map->GetIsolate())) { |
| 216 if (cache->IsHeapObject()) { | 221 if (cache->IsHeapObject()) { |
| 217 HeapObject* heap_obj = HeapObject::cast(cache); | 222 HeapObject* heap_obj = HeapObject::cast(cache); |
| 218 // Mark the object grey if it is white, do not enque it into the marking | 223 // Mark the object grey if it is white, do not enque it into the marking |
| 219 // deque. | 224 // deque. |
| 220 Heap* heap = map->GetHeap(); | 225 Heap* heap = map->GetHeap(); |
| 221 if (ObjectMarking::IsWhite( | 226 bool ignored = ObjectMarking::WhiteToGrey( |
| 222 heap_obj, | 227 heap_obj, heap->incremental_marking()->marking_state(heap_obj)); |
| 223 heap->incremental_marking()->marking_state(heap_obj))) { | 228 USE(ignored); |
| 224 ObjectMarking::WhiteToGrey( | |
| 225 heap_obj, heap->incremental_marking()->marking_state(heap_obj)); | |
| 226 } | |
| 227 } | 229 } |
| 228 } | 230 } |
| 229 VisitNativeContext(map, context); | 231 VisitNativeContext(map, context); |
| 230 } | 232 } |
| 231 | 233 |
| 232 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { | 234 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { |
| 233 Object* target = *p; | 235 Object* target = *p; |
| 234 if (target->IsHeapObject()) { | 236 if (target->IsHeapObject()) { |
| 235 heap->mark_compact_collector()->RecordSlot(object, p, target); | 237 heap->mark_compact_collector()->RecordSlot(object, p, target); |
| 236 MarkObject(heap, target); | 238 MarkObject(heap, target); |
| 237 } | 239 } |
| 238 } | 240 } |
| 239 | 241 |
| 240 INLINE(static void VisitPointers(Heap* heap, HeapObject* object, | 242 INLINE(static void VisitPointers(Heap* heap, HeapObject* object, |
| 241 Object** start, Object** end)) { | 243 Object** start, Object** end)) { |
| 242 for (Object** p = start; p < end; p++) { | 244 for (Object** p = start; p < end; p++) { |
| 243 Object* target = *p; | 245 Object* target = *p; |
| 244 if (target->IsHeapObject()) { | 246 if (target->IsHeapObject()) { |
| 245 heap->mark_compact_collector()->RecordSlot(object, p, target); | 247 heap->mark_compact_collector()->RecordSlot(object, p, target); |
| 246 MarkObject(heap, target); | 248 MarkObject(heap, target); |
| 247 } | 249 } |
| 248 } | 250 } |
| 249 } | 251 } |
| 250 | 252 |
| 251 // Marks the object grey and pushes it on the marking stack. | 253 // Marks the object grey and pushes it on the marking stack. |
| 252 INLINE(static void MarkObject(Heap* heap, Object* obj)) { | 254 INLINE(static void MarkObject(Heap* heap, Object* obj)) { |
| 253 heap->incremental_marking()->MarkGrey(HeapObject::cast(obj)); | 255 heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); |
| 254 } | 256 } |
| 255 | 257 |
| 256 // Marks the object black without pushing it on the marking stack. | 258 // Marks the object black without pushing it on the marking stack. |
| 257 // Returns true if object needed marking and false otherwise. | 259 // Returns true if object needed marking and false otherwise. |
| 258 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { | 260 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { |
| 259 HeapObject* heap_object = HeapObject::cast(obj); | 261 HeapObject* heap_object = HeapObject::cast(obj); |
| 260 if (ObjectMarking::IsWhite( | 262 return ObjectMarking::WhiteToBlack( |
| 261 heap_object, | 263 heap_object, heap->incremental_marking()->marking_state(heap_object)); |
| 262 heap->incremental_marking()->marking_state(heap_object))) { | |
| 263 ObjectMarking::WhiteToBlack( | |
| 264 heap_object, heap->incremental_marking()->marking_state(heap_object)); | |
| 265 return true; | |
| 266 } | |
| 267 return false; | |
| 268 } | 264 } |
| 269 }; | 265 }; |
| 270 | 266 |
| 271 void IncrementalMarking::IterateBlackObject(HeapObject* object) { | 267 void IncrementalMarking::IterateBlackObject(HeapObject* object) { |
| 272 if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) { | 268 if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) { |
| 273 Page* page = Page::FromAddress(object->address()); | 269 Page* page = Page::FromAddress(object->address()); |
| 274 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { | 270 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { |
| 275 // IterateBlackObject requires us to visit the whole object. | 271 // IterateBlackObject requires us to visit the whole object. |
| 276 page->ResetProgressBar(); | 272 page->ResetProgressBar(); |
| 277 } | 273 } |
| 278 Map* map = object->map(); | 274 Map* map = object->map(); |
| 279 MarkGrey(map); | 275 WhiteToGreyAndPush(map); |
| 280 IncrementalMarkingMarkingVisitor::IterateBody(map, object); | 276 IncrementalMarkingMarkingVisitor::IterateBody(map, object); |
| 281 } | 277 } |
| 282 } | 278 } |
| 283 | 279 |
| 284 class IncrementalMarkingRootMarkingVisitor : public RootVisitor { | 280 class IncrementalMarkingRootMarkingVisitor : public RootVisitor { |
| 285 public: | 281 public: |
| 286 explicit IncrementalMarkingRootMarkingVisitor( | 282 explicit IncrementalMarkingRootMarkingVisitor( |
| 287 IncrementalMarking* incremental_marking) | 283 IncrementalMarking* incremental_marking) |
| 288 : heap_(incremental_marking->heap()) {} | 284 : heap_(incremental_marking->heap()) {} |
| 289 | 285 |
| 290 void VisitRootPointer(Root root, Object** p) override { | 286 void VisitRootPointer(Root root, Object** p) override { |
| 291 MarkObjectByPointer(p); | 287 MarkObjectByPointer(p); |
| 292 } | 288 } |
| 293 | 289 |
| 294 void VisitRootPointers(Root root, Object** start, Object** end) override { | 290 void VisitRootPointers(Root root, Object** start, Object** end) override { |
| 295 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); | 291 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); |
| 296 } | 292 } |
| 297 | 293 |
| 298 private: | 294 private: |
| 299 void MarkObjectByPointer(Object** p) { | 295 void MarkObjectByPointer(Object** p) { |
| 300 Object* obj = *p; | 296 Object* obj = *p; |
| 301 if (!obj->IsHeapObject()) return; | 297 if (!obj->IsHeapObject()) return; |
| 302 | 298 |
| 303 heap_->incremental_marking()->MarkGrey(HeapObject::cast(obj)); | 299 heap_->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); |
| 304 } | 300 } |
| 305 | 301 |
| 306 Heap* heap_; | 302 Heap* heap_; |
| 307 }; | 303 }; |
| 308 | 304 |
| 309 | 305 |
| 310 void IncrementalMarking::Initialize() { | 306 void IncrementalMarking::Initialize() { |
| 311 IncrementalMarkingMarkingVisitor::Initialize(); | 307 IncrementalMarkingMarkingVisitor::Initialize(); |
| 312 } | 308 } |
| 313 | 309 |
| (...skipping 357 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 671 for (int i = 0; i < length; i += 2) { | 667 for (int i = 0; i < length; i += 2) { |
| 672 DCHECK(retained_maps->Get(i)->IsWeakCell()); | 668 DCHECK(retained_maps->Get(i)->IsWeakCell()); |
| 673 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); | 669 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); |
| 674 if (cell->cleared()) continue; | 670 if (cell->cleared()) continue; |
| 675 int age = Smi::cast(retained_maps->Get(i + 1))->value(); | 671 int age = Smi::cast(retained_maps->Get(i + 1))->value(); |
| 676 int new_age; | 672 int new_age; |
| 677 Map* map = Map::cast(cell->value()); | 673 Map* map = Map::cast(cell->value()); |
| 678 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && | 674 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && |
| 679 ObjectMarking::IsWhite(map, marking_state(map))) { | 675 ObjectMarking::IsWhite(map, marking_state(map))) { |
| 680 if (ShouldRetainMap(map, age)) { | 676 if (ShouldRetainMap(map, age)) { |
| 681 MarkGrey(map); | 677 WhiteToGreyAndPush(map); |
| 682 } | 678 } |
| 683 Object* prototype = map->prototype(); | 679 Object* prototype = map->prototype(); |
| 684 if (age > 0 && prototype->IsHeapObject() && | 680 if (age > 0 && prototype->IsHeapObject() && |
| 685 ObjectMarking::IsWhite(HeapObject::cast(prototype), | 681 ObjectMarking::IsWhite(HeapObject::cast(prototype), |
| 686 marking_state(HeapObject::cast(prototype)))) { | 682 marking_state(HeapObject::cast(prototype)))) { |
| 687 // The prototype is not marked, age the map. | 683 // The prototype is not marked, age the map. |
| 688 new_age = age - 1; | 684 new_age = age - 1; |
| 689 } else { | 685 } else { |
| 690 // The prototype and the constructor are marked, this map keeps only | 686 // The prototype and the constructor are marked, this map keeps only |
| 691 // transition tree alive, not JSObjects. Do not age the map. | 687 // transition tree alive, not JSObjects. Do not age the map. |
| (...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 791 ObjectMarking::IsBlack(obj, marking_state(obj)))); | 787 ObjectMarking::IsBlack(obj, marking_state(obj)))); |
| 792 // Skip one word filler objects that appear on the | 788 // Skip one word filler objects that appear on the |
| 793 // stack when we perform in place array shift. | 789 // stack when we perform in place array shift. |
| 794 return (obj->map() == filler_map) ? nullptr : obj; | 790 return (obj->map() == filler_map) ? nullptr : obj; |
| 795 } | 791 } |
| 796 }); | 792 }); |
| 797 } | 793 } |
| 798 | 794 |
| 799 | 795 |
| 800 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { | 796 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { |
| 801 MarkGrey(map); | 797 WhiteToGreyAndPush(map); |
| 802 | 798 |
| 803 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); | 799 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); |
| 804 | 800 |
| 805 #if ENABLE_SLOW_DCHECKS | 801 #if ENABLE_SLOW_DCHECKS |
| 806 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); | 802 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); |
| 807 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 803 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
| 808 SLOW_DCHECK(Marking::IsGrey(mark_bit) || | 804 SLOW_DCHECK(Marking::IsGrey(mark_bit) || |
| 809 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | 805 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
| 810 Marking::IsBlack(mark_bit))); | 806 Marking::IsBlack(mark_bit))); |
| 811 #endif | 807 #endif |
| 812 MarkBlack(obj, size); | |
| 813 } | |
| 814 | |
| 815 void IncrementalMarking::MarkGrey(HeapObject* object) { | |
| 816 if (ObjectMarking::IsWhite(object, marking_state(object))) { | |
| 817 WhiteToGreyAndPush(object); | |
| 818 } | |
| 819 } | |
| 820 | |
| 821 void IncrementalMarking::MarkBlack(HeapObject* obj, int size) { | |
| 822 if (ObjectMarking::IsBlack(obj, marking_state(obj))) return; | |
| 823 ObjectMarking::GreyToBlack(obj, marking_state(obj)); | 808 ObjectMarking::GreyToBlack(obj, marking_state(obj)); |
| 824 } | 809 } |
| 825 | 810 |
| 826 intptr_t IncrementalMarking::ProcessMarkingDeque( | 811 intptr_t IncrementalMarking::ProcessMarkingDeque( |
| 827 intptr_t bytes_to_process, ForceCompletionAction completion) { | 812 intptr_t bytes_to_process, ForceCompletionAction completion) { |
| 828 intptr_t bytes_processed = 0; | 813 intptr_t bytes_processed = 0; |
| 829 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process || | 814 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process || |
| 830 completion == FORCE_COMPLETION)) { | 815 completion == FORCE_COMPLETION)) { |
| 831 HeapObject* obj = marking_deque()->Pop(); | 816 HeapObject* obj = marking_deque()->Pop(); |
| 832 | 817 |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 881 } | 866 } |
| 882 } | 867 } |
| 883 | 868 |
| 884 Object* context = heap_->native_contexts_list(); | 869 Object* context = heap_->native_contexts_list(); |
| 885 while (!context->IsUndefined(heap_->isolate())) { | 870 while (!context->IsUndefined(heap_->isolate())) { |
| 886 // GC can happen when the context is not fully initialized, | 871 // GC can happen when the context is not fully initialized, |
| 887 // so the cache can be undefined. | 872 // so the cache can be undefined. |
| 888 HeapObject* cache = HeapObject::cast( | 873 HeapObject* cache = HeapObject::cast( |
| 889 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); | 874 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); |
| 890 if (!cache->IsUndefined(heap_->isolate())) { | 875 if (!cache->IsUndefined(heap_->isolate())) { |
| 891 if (ObjectMarking::IsGrey(cache, marking_state(cache))) { | 876 // Mark the cache black if it is grey. |
| 892 ObjectMarking::GreyToBlack(cache, marking_state(cache)); | 877 bool ignored = ObjectMarking::GreyToBlack(cache, marking_state(cache)); |
| 893 } | 878 USE(ignored); |
|
Hannes Payer (out of office)
2017/05/03 08:29:05
Do you want to DCHECK ignored?
| |
| 894 } | 879 } |
| 895 context = Context::cast(context)->next_context_link(); | 880 context = Context::cast(context)->next_context_link(); |
| 896 } | 881 } |
| 897 } | 882 } |
| 898 | 883 |
| 899 | 884 |
| 900 void IncrementalMarking::Stop() { | 885 void IncrementalMarking::Stop() { |
| 901 if (IsStopped()) return; | 886 if (IsStopped()) return; |
| 902 if (FLAG_trace_incremental_marking) { | 887 if (FLAG_trace_incremental_marking) { |
| 903 int old_generation_size_mb = | 888 int old_generation_size_mb = |
| (...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1163 idle_marking_delay_counter_++; | 1148 idle_marking_delay_counter_++; |
| 1164 } | 1149 } |
| 1165 | 1150 |
| 1166 | 1151 |
| 1167 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 1152 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
| 1168 idle_marking_delay_counter_ = 0; | 1153 idle_marking_delay_counter_ = 0; |
| 1169 } | 1154 } |
| 1170 | 1155 |
| 1171 } // namespace internal | 1156 } // namespace internal |
| 1172 } // namespace v8 | 1157 } // namespace v8 |
| OLD | NEW |