| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/incremental-marking.h" | 5 #include "src/heap/incremental-marking.h" |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/compilation-cache.h" | 8 #include "src/compilation-cache.h" |
| 9 #include "src/conversions.h" | 9 #include "src/conversions.h" |
| 10 #include "src/heap/concurrent-marking.h" | 10 #include "src/heap/concurrent-marking.h" |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 50 new_generation_observer_(*this, kAllocatedThreshold), | 50 new_generation_observer_(*this, kAllocatedThreshold), |
| 51 old_generation_observer_(*this, kAllocatedThreshold) {} | 51 old_generation_observer_(*this, kAllocatedThreshold) {} |
| 52 | 52 |
| 53 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { | 53 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { |
| 54 HeapObject* value_heap_obj = HeapObject::cast(value); | 54 HeapObject* value_heap_obj = HeapObject::cast(value); |
| 55 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, | 55 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, |
| 56 marking_state(value_heap_obj))); | 56 marking_state(value_heap_obj))); |
| 57 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); | 57 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); |
| 58 const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj)); | 58 const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj)); |
| 59 | 59 |
| 60 if (is_black && | 60 if (is_black && WhiteToGreyAndPush(value_heap_obj)) { |
| 61 ObjectMarking::IsWhite(value_heap_obj, marking_state(value_heap_obj))) { | |
| 62 WhiteToGreyAndPush(value_heap_obj); | |
| 63 RestartIfNotMarking(); | 61 RestartIfNotMarking(); |
| 64 } | 62 } |
| 65 return is_compacting_ && is_black; | 63 return is_compacting_ && is_black; |
| 66 } | 64 } |
| 67 | 65 |
| 68 | 66 |
| 69 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, | 67 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, |
| 70 Object* value) { | 68 Object* value) { |
| 71 if (BaseRecordWrite(obj, value) && slot != NULL) { | 69 if (BaseRecordWrite(obj, value) && slot != NULL) { |
| 72 // Object is not going to be rescanned we need to record the slot. | 70 // Object is not going to be rescanned we need to record the slot. |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 123 } | 121 } |
| 124 | 122 |
| 125 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, | 123 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, |
| 126 Object* value) { | 124 Object* value) { |
| 127 if (BaseRecordWrite(host, value)) { | 125 if (BaseRecordWrite(host, value)) { |
| 128 // Object is not going to be rescanned. We need to record the slot. | 126 // Object is not going to be rescanned. We need to record the slot. |
| 129 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); | 127 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); |
| 130 } | 128 } |
| 131 } | 129 } |
| 132 | 130 |
| 133 void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { | 131 bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { |
| 134 ObjectMarking::WhiteToGrey(obj, marking_state(obj)); | 132 if (ObjectMarking::WhiteToGrey(obj, marking_state(obj))) { |
| 135 marking_deque()->Push(obj); | 133 marking_deque()->Push(obj); |
| 134 return true; |
| 135 } |
| 136 return false; |
| 136 } | 137 } |
| 137 | 138 |
| 138 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, | 139 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, |
| 139 HeapObject* to) { | 140 HeapObject* to) { |
| 140 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); | 141 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); |
| 141 // This is only used when resizing an object. | 142 // This is only used when resizing an object. |
| 142 DCHECK(MemoryChunk::FromAddress(from->address()) == | 143 DCHECK(MemoryChunk::FromAddress(from->address()) == |
| 143 MemoryChunk::FromAddress(to->address())); | 144 MemoryChunk::FromAddress(to->address())); |
| 144 | 145 |
| 145 if (!IsMarking()) return; | 146 if (!IsMarking()) return; |
| 146 | 147 |
| 147 // If the mark doesn't move, we don't check the color of the object. | 148 // If the mark doesn't move, we don't check the color of the object. |
| 148 // It doesn't matter whether the object is black, since it hasn't changed | 149 // It doesn't matter whether the object is black, since it hasn't changed |
| 149 // size, so the adjustment to the live data count will be zero anyway. | 150 // size, so the adjustment to the live data count will be zero anyway. |
| 150 if (from == to) return; | 151 if (from == to) return; |
| 151 | 152 |
| 152 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); | 153 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); |
| 153 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); | 154 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); |
| 154 | 155 |
| 155 if (Marking::IsBlack(old_mark_bit)) { | 156 if (Marking::IsBlack(old_mark_bit)) { |
| 156 Marking::MarkBlack(new_mark_bit); | 157 if (from->address() + kPointerSize == to->address()) { |
| 158 // The old and the new markbits overlap. The |to| object has the |
| 159 // grey color. To make it black, we need to set second bit. |
| 160 DCHECK(new_mark_bit.Get()); |
| 161 new_mark_bit.Next().Set(); |
| 162 } else { |
| 163 bool success = Marking::WhiteToBlack(new_mark_bit); |
| 164 DCHECK(success); |
| 165 USE(success); |
| 166 } |
| 157 } else if (Marking::IsGrey(old_mark_bit)) { | 167 } else if (Marking::IsGrey(old_mark_bit)) { |
| 158 Marking::WhiteToGrey(new_mark_bit); | 168 if (from->address() + kPointerSize == to->address()) { |
| 169 // The old and the new markbits overlap. The |to| object has the |
| 170 // white color. To make it black, we need to set both bits. |
| 171 // Note that Marking::WhiteToGrey does not work here because |
| 172 // old_mark_bit.Next() can be set by the concurrent marker at any time. |
| 173 new_mark_bit.Set(); |
| 174 new_mark_bit.Next().Set(); |
| 175 } else { |
| 176 bool success = Marking::WhiteToGrey(new_mark_bit); |
| 177 DCHECK(success); |
| 178 USE(success); |
| 179 } |
| 159 marking_deque()->Push(to); | 180 marking_deque()->Push(to); |
| 160 RestartIfNotMarking(); | 181 RestartIfNotMarking(); |
| 161 } | 182 } |
| 162 } | 183 } |
| 163 | 184 |
| 164 class IncrementalMarkingMarkingVisitor | 185 class IncrementalMarkingMarkingVisitor |
| 165 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { | 186 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { |
| 166 public: | 187 public: |
| 167 static void Initialize() { | 188 static void Initialize() { |
| 168 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); | 189 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 220 // We will mark cache black with a separate pass when we finish marking. | 241 // We will mark cache black with a separate pass when we finish marking. |
| 221 // Note that GC can happen when the context is not fully initialized, | 242 // Note that GC can happen when the context is not fully initialized, |
| 222 // so the cache can be undefined. | 243 // so the cache can be undefined. |
| 223 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); | 244 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); |
| 224 if (!cache->IsUndefined(map->GetIsolate())) { | 245 if (!cache->IsUndefined(map->GetIsolate())) { |
| 225 if (cache->IsHeapObject()) { | 246 if (cache->IsHeapObject()) { |
| 226 HeapObject* heap_obj = HeapObject::cast(cache); | 247 HeapObject* heap_obj = HeapObject::cast(cache); |
| 227 // Mark the object grey if it is white, do not enque it into the marking | 248 // Mark the object grey if it is white, do not enque it into the marking |
| 228 // deque. | 249 // deque. |
| 229 Heap* heap = map->GetHeap(); | 250 Heap* heap = map->GetHeap(); |
| 230 if (ObjectMarking::IsWhite( | 251 bool ignored = ObjectMarking::WhiteToGrey( |
| 231 heap_obj, | 252 heap_obj, heap->incremental_marking()->marking_state(heap_obj)); |
| 232 heap->incremental_marking()->marking_state(heap_obj))) { | 253 USE(ignored); |
| 233 ObjectMarking::WhiteToGrey( | |
| 234 heap_obj, heap->incremental_marking()->marking_state(heap_obj)); | |
| 235 } | |
| 236 } | 254 } |
| 237 } | 255 } |
| 238 VisitNativeContext(map, context); | 256 VisitNativeContext(map, context); |
| 239 } | 257 } |
| 240 | 258 |
| 241 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { | 259 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { |
| 242 Object* target = *p; | 260 Object* target = *p; |
| 243 if (target->IsHeapObject()) { | 261 if (target->IsHeapObject()) { |
| 244 heap->mark_compact_collector()->RecordSlot(object, p, target); | 262 heap->mark_compact_collector()->RecordSlot(object, p, target); |
| 245 MarkObject(heap, target); | 263 MarkObject(heap, target); |
| 246 } | 264 } |
| 247 } | 265 } |
| 248 | 266 |
| 249 INLINE(static void VisitPointers(Heap* heap, HeapObject* object, | 267 INLINE(static void VisitPointers(Heap* heap, HeapObject* object, |
| 250 Object** start, Object** end)) { | 268 Object** start, Object** end)) { |
| 251 for (Object** p = start; p < end; p++) { | 269 for (Object** p = start; p < end; p++) { |
| 252 Object* target = *p; | 270 Object* target = *p; |
| 253 if (target->IsHeapObject()) { | 271 if (target->IsHeapObject()) { |
| 254 heap->mark_compact_collector()->RecordSlot(object, p, target); | 272 heap->mark_compact_collector()->RecordSlot(object, p, target); |
| 255 MarkObject(heap, target); | 273 MarkObject(heap, target); |
| 256 } | 274 } |
| 257 } | 275 } |
| 258 } | 276 } |
| 259 | 277 |
| 260 // Marks the object grey and pushes it on the marking stack. | 278 // Marks the object grey and pushes it on the marking stack. |
| 261 INLINE(static void MarkObject(Heap* heap, Object* obj)) { | 279 INLINE(static void MarkObject(Heap* heap, Object* obj)) { |
| 262 heap->incremental_marking()->MarkGrey(HeapObject::cast(obj)); | 280 heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); |
| 263 } | 281 } |
| 264 | 282 |
| 265 // Marks the object black without pushing it on the marking stack. | 283 // Marks the object black without pushing it on the marking stack. |
| 266 // Returns true if object needed marking and false otherwise. | 284 // Returns true if object needed marking and false otherwise. |
| 267 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { | 285 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { |
| 268 HeapObject* heap_object = HeapObject::cast(obj); | 286 HeapObject* heap_object = HeapObject::cast(obj); |
| 269 if (ObjectMarking::IsWhite( | 287 return ObjectMarking::WhiteToBlack( |
| 270 heap_object, | 288 heap_object, heap->incremental_marking()->marking_state(heap_object)); |
| 271 heap->incremental_marking()->marking_state(heap_object))) { | |
| 272 ObjectMarking::WhiteToBlack( | |
| 273 heap_object, heap->incremental_marking()->marking_state(heap_object)); | |
| 274 return true; | |
| 275 } | |
| 276 return false; | |
| 277 } | 289 } |
| 278 }; | 290 }; |
| 279 | 291 |
| 280 void IncrementalMarking::IterateBlackObject(HeapObject* object) { | 292 void IncrementalMarking::IterateBlackObject(HeapObject* object) { |
| 281 if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) { | 293 if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) { |
| 282 Page* page = Page::FromAddress(object->address()); | 294 Page* page = Page::FromAddress(object->address()); |
| 283 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { | 295 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { |
| 284 // IterateBlackObject requires us to visit the whole object. | 296 // IterateBlackObject requires us to visit the whole object. |
| 285 page->ResetProgressBar(); | 297 page->ResetProgressBar(); |
| 286 } | 298 } |
| 287 Map* map = object->map(); | 299 Map* map = object->map(); |
| 288 MarkGrey(map); | 300 WhiteToGreyAndPush(map); |
| 289 IncrementalMarkingMarkingVisitor::IterateBody(map, object); | 301 IncrementalMarkingMarkingVisitor::IterateBody(map, object); |
| 290 } | 302 } |
| 291 } | 303 } |
| 292 | 304 |
| 293 class IncrementalMarkingRootMarkingVisitor : public RootVisitor { | 305 class IncrementalMarkingRootMarkingVisitor : public RootVisitor { |
| 294 public: | 306 public: |
| 295 explicit IncrementalMarkingRootMarkingVisitor( | 307 explicit IncrementalMarkingRootMarkingVisitor( |
| 296 IncrementalMarking* incremental_marking) | 308 IncrementalMarking* incremental_marking) |
| 297 : heap_(incremental_marking->heap()) {} | 309 : heap_(incremental_marking->heap()) {} |
| 298 | 310 |
| 299 void VisitRootPointer(Root root, Object** p) override { | 311 void VisitRootPointer(Root root, Object** p) override { |
| 300 MarkObjectByPointer(p); | 312 MarkObjectByPointer(p); |
| 301 } | 313 } |
| 302 | 314 |
| 303 void VisitRootPointers(Root root, Object** start, Object** end) override { | 315 void VisitRootPointers(Root root, Object** start, Object** end) override { |
| 304 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); | 316 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); |
| 305 } | 317 } |
| 306 | 318 |
| 307 private: | 319 private: |
| 308 void MarkObjectByPointer(Object** p) { | 320 void MarkObjectByPointer(Object** p) { |
| 309 Object* obj = *p; | 321 Object* obj = *p; |
| 310 if (!obj->IsHeapObject()) return; | 322 if (!obj->IsHeapObject()) return; |
| 311 | 323 |
| 312 heap_->incremental_marking()->MarkGrey(HeapObject::cast(obj)); | 324 heap_->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); |
| 313 } | 325 } |
| 314 | 326 |
| 315 Heap* heap_; | 327 Heap* heap_; |
| 316 }; | 328 }; |
| 317 | 329 |
| 318 | 330 |
| 319 void IncrementalMarking::Initialize() { | 331 void IncrementalMarking::Initialize() { |
| 320 IncrementalMarkingMarkingVisitor::Initialize(); | 332 IncrementalMarkingMarkingVisitor::Initialize(); |
| 321 } | 333 } |
| 322 | 334 |
| (...skipping 370 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 693 for (int i = 0; i < length; i += 2) { | 705 for (int i = 0; i < length; i += 2) { |
| 694 DCHECK(retained_maps->Get(i)->IsWeakCell()); | 706 DCHECK(retained_maps->Get(i)->IsWeakCell()); |
| 695 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); | 707 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); |
| 696 if (cell->cleared()) continue; | 708 if (cell->cleared()) continue; |
| 697 int age = Smi::cast(retained_maps->Get(i + 1))->value(); | 709 int age = Smi::cast(retained_maps->Get(i + 1))->value(); |
| 698 int new_age; | 710 int new_age; |
| 699 Map* map = Map::cast(cell->value()); | 711 Map* map = Map::cast(cell->value()); |
| 700 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && | 712 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && |
| 701 ObjectMarking::IsWhite(map, marking_state(map))) { | 713 ObjectMarking::IsWhite(map, marking_state(map))) { |
| 702 if (ShouldRetainMap(map, age)) { | 714 if (ShouldRetainMap(map, age)) { |
| 703 MarkGrey(map); | 715 WhiteToGreyAndPush(map); |
| 704 } | 716 } |
| 705 Object* prototype = map->prototype(); | 717 Object* prototype = map->prototype(); |
| 706 if (age > 0 && prototype->IsHeapObject() && | 718 if (age > 0 && prototype->IsHeapObject() && |
| 707 ObjectMarking::IsWhite(HeapObject::cast(prototype), | 719 ObjectMarking::IsWhite(HeapObject::cast(prototype), |
| 708 marking_state(HeapObject::cast(prototype)))) { | 720 marking_state(HeapObject::cast(prototype)))) { |
| 709 // The prototype is not marked, age the map. | 721 // The prototype is not marked, age the map. |
| 710 new_age = age - 1; | 722 new_age = age - 1; |
| 711 } else { | 723 } else { |
| 712 // The prototype and the constructor are marked, this map keeps only | 724 // The prototype and the constructor are marked, this map keeps only |
| 713 // transition tree alive, not JSObjects. Do not age the map. | 725 // transition tree alive, not JSObjects. Do not age the map. |
| (...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 813 ObjectMarking::IsBlack(obj, marking_state(obj)))); | 825 ObjectMarking::IsBlack(obj, marking_state(obj)))); |
| 814 // Skip one word filler objects that appear on the | 826 // Skip one word filler objects that appear on the |
| 815 // stack when we perform in place array shift. | 827 // stack when we perform in place array shift. |
| 816 return (obj->map() == filler_map) ? nullptr : obj; | 828 return (obj->map() == filler_map) ? nullptr : obj; |
| 817 } | 829 } |
| 818 }); | 830 }); |
| 819 } | 831 } |
| 820 | 832 |
| 821 | 833 |
| 822 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { | 834 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { |
| 823 MarkGrey(map); | 835 WhiteToGreyAndPush(map); |
| 824 | 836 |
| 825 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); | 837 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); |
| 826 | 838 |
| 827 #if ENABLE_SLOW_DCHECKS | 839 #if ENABLE_SLOW_DCHECKS |
| 828 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); | 840 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); |
| 829 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 841 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
| 830 SLOW_DCHECK(Marking::IsGrey(mark_bit) || | 842 SLOW_DCHECK(Marking::IsGrey(mark_bit) || |
| 831 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | 843 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
| 832 Marking::IsBlack(mark_bit))); | 844 Marking::IsBlack(mark_bit))); |
| 833 #endif | 845 #endif |
| 834 MarkBlack(obj, size); | |
| 835 } | |
| 836 | |
| 837 void IncrementalMarking::MarkGrey(HeapObject* object) { | |
| 838 if (ObjectMarking::IsWhite(object, marking_state(object))) { | |
| 839 WhiteToGreyAndPush(object); | |
| 840 } | |
| 841 } | |
| 842 | |
| 843 void IncrementalMarking::MarkBlack(HeapObject* obj, int size) { | |
| 844 if (ObjectMarking::IsBlack(obj, marking_state(obj))) return; | |
| 845 ObjectMarking::GreyToBlack(obj, marking_state(obj)); | 846 ObjectMarking::GreyToBlack(obj, marking_state(obj)); |
| 846 } | 847 } |
| 847 | 848 |
| 848 intptr_t IncrementalMarking::ProcessMarkingDeque( | 849 intptr_t IncrementalMarking::ProcessMarkingDeque( |
| 849 intptr_t bytes_to_process, ForceCompletionAction completion) { | 850 intptr_t bytes_to_process, ForceCompletionAction completion) { |
| 850 intptr_t bytes_processed = 0; | 851 intptr_t bytes_processed = 0; |
| 851 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process || | 852 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process || |
| 852 completion == FORCE_COMPLETION)) { | 853 completion == FORCE_COMPLETION)) { |
| 853 HeapObject* obj = marking_deque()->Pop(); | 854 HeapObject* obj = marking_deque()->Pop(); |
| 854 | 855 |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 903 } | 904 } |
| 904 } | 905 } |
| 905 | 906 |
| 906 Object* context = heap_->native_contexts_list(); | 907 Object* context = heap_->native_contexts_list(); |
| 907 while (!context->IsUndefined(heap_->isolate())) { | 908 while (!context->IsUndefined(heap_->isolate())) { |
| 908 // GC can happen when the context is not fully initialized, | 909 // GC can happen when the context is not fully initialized, |
| 909 // so the cache can be undefined. | 910 // so the cache can be undefined. |
| 910 HeapObject* cache = HeapObject::cast( | 911 HeapObject* cache = HeapObject::cast( |
| 911 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); | 912 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); |
| 912 if (!cache->IsUndefined(heap_->isolate())) { | 913 if (!cache->IsUndefined(heap_->isolate())) { |
| 913 if (ObjectMarking::IsGrey(cache, marking_state(cache))) { | 914 // Mark the cache black if it is grey. |
| 914 ObjectMarking::GreyToBlack(cache, marking_state(cache)); | 915 bool ignored = ObjectMarking::GreyToBlack(cache, marking_state(cache)); |
| 915 } | 916 USE(ignored); |
| 916 } | 917 } |
| 917 context = Context::cast(context)->next_context_link(); | 918 context = Context::cast(context)->next_context_link(); |
| 918 } | 919 } |
| 919 } | 920 } |
| 920 | 921 |
| 921 | 922 |
| 922 void IncrementalMarking::Stop() { | 923 void IncrementalMarking::Stop() { |
| 923 if (IsStopped()) return; | 924 if (IsStopped()) return; |
| 924 if (FLAG_trace_incremental_marking) { | 925 if (FLAG_trace_incremental_marking) { |
| 925 int old_generation_size_mb = | 926 int old_generation_size_mb = |
| (...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1185 idle_marking_delay_counter_++; | 1186 idle_marking_delay_counter_++; |
| 1186 } | 1187 } |
| 1187 | 1188 |
| 1188 | 1189 |
| 1189 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 1190 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
| 1190 idle_marking_delay_counter_ = 0; | 1191 idle_marking_delay_counter_ = 0; |
| 1191 } | 1192 } |
| 1192 | 1193 |
| 1193 } // namespace internal | 1194 } // namespace internal |
| 1194 } // namespace v8 | 1195 } // namespace v8 |
| OLD | NEW |