Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/incremental-marking.h" | 5 #include "src/heap/incremental-marking.h" |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/compilation-cache.h" | 8 #include "src/compilation-cache.h" |
| 9 #include "src/conversions.h" | 9 #include "src/conversions.h" |
| 10 #include "src/heap/concurrent-marking.h" | 10 #include "src/heap/concurrent-marking.h" |
| (...skipping 25 matching lines...) Expand all Loading... | |
| 36 was_activated_(false), | 36 was_activated_(false), |
| 37 black_allocation_(false), | 37 black_allocation_(false), |
| 38 finalize_marking_completed_(false), | 38 finalize_marking_completed_(false), |
| 39 trace_wrappers_toggle_(false), | 39 trace_wrappers_toggle_(false), |
| 40 request_type_(NONE), | 40 request_type_(NONE), |
| 41 new_generation_observer_(*this, kAllocatedThreshold), | 41 new_generation_observer_(*this, kAllocatedThreshold), |
| 42 old_generation_observer_(*this, kAllocatedThreshold) {} | 42 old_generation_observer_(*this, kAllocatedThreshold) {} |
| 43 | 43 |
| 44 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { | 44 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { |
| 45 HeapObject* value_heap_obj = HeapObject::cast(value); | 45 HeapObject* value_heap_obj = HeapObject::cast(value); |
| 46 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, | 46 DCHECK(!ObjectMarking::IsImpossible<kAtomicity>( |
| 47 marking_state(value_heap_obj))); | 47 value_heap_obj, marking_state(value_heap_obj))); |
| 48 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); | 48 DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj))); |
|
Michael Lippautz
2017/05/03 07:49:51
Is IsImpossible safe for concurrent marking? I rem
ulan
2017/05/03 09:00:28
It is not safe indeed. I will remove bunch of dche
Hannes Payer (out of office)
2017/05/03 10:08:53
Is the current implementation of the check not cor
ulan
2017/05/03 12:09:30
In general IsImpossible can produce false positive
| |
| 49 const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj)); | 49 const bool is_black = |
| 50 ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj)); | |
| 50 | 51 |
| 51 if (is_black && WhiteToGreyAndPush(value_heap_obj)) { | 52 if (is_black && WhiteToGreyAndPush(value_heap_obj)) { |
| 52 RestartIfNotMarking(); | 53 RestartIfNotMarking(); |
| 53 } | 54 } |
| 54 return is_compacting_ && is_black; | 55 return is_compacting_ && is_black; |
| 55 } | 56 } |
| 56 | 57 |
| 57 | 58 |
| 58 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, | 59 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, |
| 59 Object* value) { | 60 Object* value) { |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 113 | 114 |
| 114 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, | 115 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, |
| 115 Object* value) { | 116 Object* value) { |
| 116 if (BaseRecordWrite(host, value)) { | 117 if (BaseRecordWrite(host, value)) { |
| 117 // Object is not going to be rescanned. We need to record the slot. | 118 // Object is not going to be rescanned. We need to record the slot. |
| 118 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); | 119 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); |
| 119 } | 120 } |
| 120 } | 121 } |
| 121 | 122 |
| 122 bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { | 123 bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { |
| 123 if (ObjectMarking::WhiteToGrey(obj, marking_state(obj))) { | 124 if (ObjectMarking::WhiteToGrey<kAtomicity>(obj, marking_state(obj))) { |
| 124 marking_deque()->Push(obj); | 125 marking_deque()->Push(obj); |
| 125 return true; | 126 return true; |
| 126 } | 127 } |
| 127 return false; | 128 return false; |
| 128 } | 129 } |
| 129 | 130 |
| 130 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, | 131 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, |
| 131 HeapObject* to) { | 132 HeapObject* to) { |
| 132 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); | 133 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); |
| 133 // This is only used when resizing an object. | 134 // This is only used when resizing an object. |
| 134 DCHECK(MemoryChunk::FromAddress(from->address()) == | 135 DCHECK(MemoryChunk::FromAddress(from->address()) == |
| 135 MemoryChunk::FromAddress(to->address())); | 136 MemoryChunk::FromAddress(to->address())); |
| 136 | 137 |
| 137 if (!IsMarking()) return; | 138 if (!IsMarking()) return; |
| 138 | 139 |
| 139 // If the mark doesn't move, we don't check the color of the object. | 140 // If the mark doesn't move, we don't check the color of the object. |
| 140 // It doesn't matter whether the object is black, since it hasn't changed | 141 // It doesn't matter whether the object is black, since it hasn't changed |
| 141 // size, so the adjustment to the live data count will be zero anyway. | 142 // size, so the adjustment to the live data count will be zero anyway. |
| 142 if (from == to) return; | 143 if (from == to) return; |
| 143 | 144 |
| 144 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); | 145 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); |
| 145 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); | 146 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); |
| 146 | 147 |
| 147 if (Marking::IsBlack(old_mark_bit)) { | 148 if (Marking::IsBlack<kAtomicity>(old_mark_bit)) { |
| 148 bool success = Marking::WhiteToBlack(new_mark_bit); | 149 bool success = Marking::WhiteToBlack<kAtomicity>(new_mark_bit); |
| 149 DCHECK(success); | 150 DCHECK(success); |
| 150 USE(success); | 151 USE(success); |
| 151 } else if (Marking::IsGrey(old_mark_bit)) { | 152 } else if (Marking::IsGrey<kAtomicity>(old_mark_bit)) { |
| 152 bool success = Marking::WhiteToGrey(new_mark_bit); | 153 bool success = Marking::WhiteToGrey<kAtomicity>(new_mark_bit); |
| 153 DCHECK(success); | 154 DCHECK(success); |
| 154 USE(success); | 155 USE(success); |
| 155 marking_deque()->Push(to); | 156 marking_deque()->Push(to); |
| 156 RestartIfNotMarking(); | 157 RestartIfNotMarking(); |
| 157 } | 158 } |
| 158 } | 159 } |
| 159 | 160 |
| 160 class IncrementalMarkingMarkingVisitor | 161 class IncrementalMarkingMarkingVisitor |
| 161 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { | 162 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { |
| 162 public: | 163 public: |
| (...skipping 24 matching lines...) Expand all Loading... | |
| 187 bool scan_until_end = false; | 188 bool scan_until_end = false; |
| 188 do { | 189 do { |
| 189 VisitPointers(heap, object, HeapObject::RawField(object, start_offset), | 190 VisitPointers(heap, object, HeapObject::RawField(object, start_offset), |
| 190 HeapObject::RawField(object, end_offset)); | 191 HeapObject::RawField(object, end_offset)); |
| 191 start_offset = end_offset; | 192 start_offset = end_offset; |
| 192 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); | 193 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); |
| 193 scan_until_end = heap->incremental_marking()->marking_deque()->IsFull(); | 194 scan_until_end = heap->incremental_marking()->marking_deque()->IsFull(); |
| 194 } while (scan_until_end && start_offset < object_size); | 195 } while (scan_until_end && start_offset < object_size); |
| 195 chunk->set_progress_bar(start_offset); | 196 chunk->set_progress_bar(start_offset); |
| 196 if (start_offset < object_size) { | 197 if (start_offset < object_size) { |
| 197 if (ObjectMarking::IsGrey( | 198 if (ObjectMarking::IsGrey<IncrementalMarking::kAtomicity>( |
| 198 object, heap->incremental_marking()->marking_state(object))) { | 199 object, heap->incremental_marking()->marking_state(object))) { |
| 199 heap->incremental_marking()->marking_deque()->Unshift(object); | 200 heap->incremental_marking()->marking_deque()->Unshift(object); |
| 200 } else { | 201 } else { |
| 201 DCHECK(ObjectMarking::IsBlack( | 202 DCHECK(ObjectMarking::IsBlack<IncrementalMarking::kAtomicity>( |
| 202 object, heap->incremental_marking()->marking_state(object))); | 203 object, heap->incremental_marking()->marking_state(object))); |
| 203 heap->mark_compact_collector()->UnshiftBlack(object); | 204 heap->mark_compact_collector()->UnshiftBlack(object); |
| 204 } | 205 } |
| 205 heap->incremental_marking()->NotifyIncompleteScanOfObject( | 206 heap->incremental_marking()->NotifyIncompleteScanOfObject( |
| 206 object_size - (start_offset - already_scanned_offset)); | 207 object_size - (start_offset - already_scanned_offset)); |
| 207 } | 208 } |
| 208 } else { | 209 } else { |
| 209 FixedArrayVisitor::Visit(map, object); | 210 FixedArrayVisitor::Visit(map, object); |
| 210 } | 211 } |
| 211 } | 212 } |
| 212 | 213 |
| 213 static void VisitNativeContextIncremental(Map* map, HeapObject* object) { | 214 static void VisitNativeContextIncremental(Map* map, HeapObject* object) { |
| 214 Context* context = Context::cast(object); | 215 Context* context = Context::cast(object); |
| 215 | 216 |
| 216 // We will mark cache black with a separate pass when we finish marking. | 217 // We will mark cache black with a separate pass when we finish marking. |
| 217 // Note that GC can happen when the context is not fully initialized, | 218 // Note that GC can happen when the context is not fully initialized, |
| 218 // so the cache can be undefined. | 219 // so the cache can be undefined. |
| 219 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); | 220 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); |
| 220 if (!cache->IsUndefined(map->GetIsolate())) { | 221 if (!cache->IsUndefined(map->GetIsolate())) { |
| 221 if (cache->IsHeapObject()) { | 222 if (cache->IsHeapObject()) { |
| 222 HeapObject* heap_obj = HeapObject::cast(cache); | 223 HeapObject* heap_obj = HeapObject::cast(cache); |
| 223 // Mark the object grey if it is white, do not enque it into the marking | 224 // Mark the object grey if it is white, do not enque it into the marking |
| 224 // deque. | 225 // deque. |
| 225 Heap* heap = map->GetHeap(); | 226 Heap* heap = map->GetHeap(); |
| 226 bool ignored = ObjectMarking::WhiteToGrey( | 227 bool ignored = |
| 227 heap_obj, heap->incremental_marking()->marking_state(heap_obj)); | 228 ObjectMarking::WhiteToGrey<IncrementalMarking::kAtomicity>( |
| 229 heap_obj, heap->incremental_marking()->marking_state(heap_obj)); | |
| 228 USE(ignored); | 230 USE(ignored); |
| 229 } | 231 } |
| 230 } | 232 } |
| 231 VisitNativeContext(map, context); | 233 VisitNativeContext(map, context); |
| 232 } | 234 } |
| 233 | 235 |
| 234 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { | 236 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { |
| 235 Object* target = *p; | 237 Object* target = *p; |
| 236 if (target->IsHeapObject()) { | 238 if (target->IsHeapObject()) { |
| 237 heap->mark_compact_collector()->RecordSlot(object, p, target); | 239 heap->mark_compact_collector()->RecordSlot(object, p, target); |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 252 | 254 |
| 253 // Marks the object grey and pushes it on the marking stack. | 255 // Marks the object grey and pushes it on the marking stack. |
| 254 INLINE(static void MarkObject(Heap* heap, Object* obj)) { | 256 INLINE(static void MarkObject(Heap* heap, Object* obj)) { |
| 255 heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); | 257 heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); |
| 256 } | 258 } |
| 257 | 259 |
| 258 // Marks the object black without pushing it on the marking stack. | 260 // Marks the object black without pushing it on the marking stack. |
| 259 // Returns true if object needed marking and false otherwise. | 261 // Returns true if object needed marking and false otherwise. |
| 260 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { | 262 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { |
| 261 HeapObject* heap_object = HeapObject::cast(obj); | 263 HeapObject* heap_object = HeapObject::cast(obj); |
| 262 return ObjectMarking::WhiteToBlack( | 264 return ObjectMarking::WhiteToBlack<IncrementalMarking::kAtomicity>( |
| 263 heap_object, heap->incremental_marking()->marking_state(heap_object)); | 265 heap_object, heap->incremental_marking()->marking_state(heap_object)); |
| 264 } | 266 } |
| 265 }; | 267 }; |
| 266 | 268 |
| 267 void IncrementalMarking::IterateBlackObject(HeapObject* object) { | 269 void IncrementalMarking::IterateBlackObject(HeapObject* object) { |
| 268 if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) { | 270 if (IsMarking() && |
| 271 ObjectMarking::IsBlack<kAtomicity>(object, marking_state(object))) { | |
| 269 Page* page = Page::FromAddress(object->address()); | 272 Page* page = Page::FromAddress(object->address()); |
| 270 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { | 273 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { |
| 271 // IterateBlackObject requires us to visit the whole object. | 274 // IterateBlackObject requires us to visit the whole object. |
| 272 page->ResetProgressBar(); | 275 page->ResetProgressBar(); |
| 273 } | 276 } |
| 274 Map* map = object->map(); | 277 Map* map = object->map(); |
| 275 WhiteToGreyAndPush(map); | 278 WhiteToGreyAndPush(map); |
| 276 IncrementalMarkingMarkingVisitor::IterateBody(map, object); | 279 IncrementalMarkingMarkingVisitor::IterateBody(map, object); |
| 277 } | 280 } |
| 278 } | 281 } |
| (...skipping 323 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 602 Object* weak_cell_obj = heap()->encountered_weak_cells(); | 605 Object* weak_cell_obj = heap()->encountered_weak_cells(); |
| 603 Object* weak_cell_head = Smi::kZero; | 606 Object* weak_cell_head = Smi::kZero; |
| 604 WeakCell* prev_weak_cell_obj = NULL; | 607 WeakCell* prev_weak_cell_obj = NULL; |
| 605 while (weak_cell_obj != Smi::kZero) { | 608 while (weak_cell_obj != Smi::kZero) { |
| 606 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); | 609 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); |
| 607 // We do not insert cleared weak cells into the list, so the value | 610 // We do not insert cleared weak cells into the list, so the value |
| 608 // cannot be a Smi here. | 611 // cannot be a Smi here. |
| 609 HeapObject* value = HeapObject::cast(weak_cell->value()); | 612 HeapObject* value = HeapObject::cast(weak_cell->value()); |
| 610 // Remove weak cells with live objects from the list, they do not need | 613 // Remove weak cells with live objects from the list, they do not need |
| 611 // clearing. | 614 // clearing. |
| 612 if (ObjectMarking::IsBlackOrGrey(value, marking_state(value))) { | 615 if (ObjectMarking::IsBlackOrGrey<kAtomicity>(value, marking_state(value))) { |
| 613 // Record slot, if value is pointing to an evacuation candidate. | 616 // Record slot, if value is pointing to an evacuation candidate. |
| 614 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); | 617 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); |
| 615 heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot); | 618 heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot); |
| 616 // Remove entry somewhere after top. | 619 // Remove entry somewhere after top. |
| 617 if (prev_weak_cell_obj != NULL) { | 620 if (prev_weak_cell_obj != NULL) { |
| 618 prev_weak_cell_obj->set_next(weak_cell->next()); | 621 prev_weak_cell_obj->set_next(weak_cell->next()); |
| 619 } | 622 } |
| 620 weak_cell_obj = weak_cell->next(); | 623 weak_cell_obj = weak_cell->next(); |
| 621 weak_cell->clear_next(the_hole_value); | 624 weak_cell->clear_next(the_hole_value); |
| 622 } else { | 625 } else { |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 633 | 636 |
| 634 | 637 |
| 635 bool ShouldRetainMap(Map* map, int age) { | 638 bool ShouldRetainMap(Map* map, int age) { |
| 636 if (age == 0) { | 639 if (age == 0) { |
| 637 // The map has aged. Do not retain this map. | 640 // The map has aged. Do not retain this map. |
| 638 return false; | 641 return false; |
| 639 } | 642 } |
| 640 Object* constructor = map->GetConstructor(); | 643 Object* constructor = map->GetConstructor(); |
| 641 Heap* heap = map->GetHeap(); | 644 Heap* heap = map->GetHeap(); |
| 642 if (!constructor->IsHeapObject() || | 645 if (!constructor->IsHeapObject() || |
| 643 ObjectMarking::IsWhite(HeapObject::cast(constructor), | 646 ObjectMarking::IsWhite<IncrementalMarking::kAtomicity>( |
| 644 heap->incremental_marking()->marking_state( | 647 HeapObject::cast(constructor), |
| 645 HeapObject::cast(constructor)))) { | 648 heap->incremental_marking()->marking_state( |
| 649 HeapObject::cast(constructor)))) { | |
| 646 // The constructor is dead, no new objects with this map can | 650 // The constructor is dead, no new objects with this map can |
| 647 // be created. Do not retain this map. | 651 // be created. Do not retain this map. |
| 648 return false; | 652 return false; |
| 649 } | 653 } |
| 650 return true; | 654 return true; |
| 651 } | 655 } |
| 652 | 656 |
| 653 | 657 |
| 654 void IncrementalMarking::RetainMaps() { | 658 void IncrementalMarking::RetainMaps() { |
| 655 // Do not retain dead maps if flag disables it or there is | 659 // Do not retain dead maps if flag disables it or there is |
| 656 // - memory pressure (reduce_memory_footprint_), | 660 // - memory pressure (reduce_memory_footprint_), |
| 657 // - GC is requested by tests or dev-tools (abort_incremental_marking_). | 661 // - GC is requested by tests or dev-tools (abort_incremental_marking_). |
| 658 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() || | 662 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() || |
| 659 heap()->ShouldAbortIncrementalMarking() || | 663 heap()->ShouldAbortIncrementalMarking() || |
| 660 FLAG_retain_maps_for_n_gc == 0; | 664 FLAG_retain_maps_for_n_gc == 0; |
| 661 ArrayList* retained_maps = heap()->retained_maps(); | 665 ArrayList* retained_maps = heap()->retained_maps(); |
| 662 int length = retained_maps->Length(); | 666 int length = retained_maps->Length(); |
| 663 // The number_of_disposed_maps separates maps in the retained_maps | 667 // The number_of_disposed_maps separates maps in the retained_maps |
| 664 // array that were created before and after context disposal. | 668 // array that were created before and after context disposal. |
| 665 // We do not age and retain disposed maps to avoid memory leaks. | 669 // We do not age and retain disposed maps to avoid memory leaks. |
| 666 int number_of_disposed_maps = heap()->number_of_disposed_maps_; | 670 int number_of_disposed_maps = heap()->number_of_disposed_maps_; |
| 667 for (int i = 0; i < length; i += 2) { | 671 for (int i = 0; i < length; i += 2) { |
| 668 DCHECK(retained_maps->Get(i)->IsWeakCell()); | 672 DCHECK(retained_maps->Get(i)->IsWeakCell()); |
| 669 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); | 673 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); |
| 670 if (cell->cleared()) continue; | 674 if (cell->cleared()) continue; |
| 671 int age = Smi::cast(retained_maps->Get(i + 1))->value(); | 675 int age = Smi::cast(retained_maps->Get(i + 1))->value(); |
| 672 int new_age; | 676 int new_age; |
| 673 Map* map = Map::cast(cell->value()); | 677 Map* map = Map::cast(cell->value()); |
| 674 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && | 678 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && |
| 675 ObjectMarking::IsWhite(map, marking_state(map))) { | 679 ObjectMarking::IsWhite<kAtomicity>(map, marking_state(map))) { |
| 676 if (ShouldRetainMap(map, age)) { | 680 if (ShouldRetainMap(map, age)) { |
| 677 WhiteToGreyAndPush(map); | 681 WhiteToGreyAndPush(map); |
| 678 } | 682 } |
| 679 Object* prototype = map->prototype(); | 683 Object* prototype = map->prototype(); |
| 680 if (age > 0 && prototype->IsHeapObject() && | 684 if (age > 0 && prototype->IsHeapObject() && |
| 681 ObjectMarking::IsWhite(HeapObject::cast(prototype), | 685 ObjectMarking::IsWhite<kAtomicity>( |
| 682 marking_state(HeapObject::cast(prototype)))) { | 686 HeapObject::cast(prototype), |
| 687 marking_state(HeapObject::cast(prototype)))) { | |
| 683 // The prototype is not marked, age the map. | 688 // The prototype is not marked, age the map. |
| 684 new_age = age - 1; | 689 new_age = age - 1; |
| 685 } else { | 690 } else { |
| 686 // The prototype and the constructor are marked, this map keeps only | 691 // The prototype and the constructor are marked, this map keeps only |
| 687 // transition tree alive, not JSObjects. Do not age the map. | 692 // transition tree alive, not JSObjects. Do not age the map. |
| 688 new_age = age; | 693 new_age = age; |
| 689 } | 694 } |
| 690 } else { | 695 } else { |
| 691 new_age = FLAG_retain_maps_for_n_gc; | 696 new_age = FLAG_retain_maps_for_n_gc; |
| 692 } | 697 } |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 763 MapWord map_word = obj->map_word(); | 768 MapWord map_word = obj->map_word(); |
| 764 if (!map_word.IsForwardingAddress()) { | 769 if (!map_word.IsForwardingAddress()) { |
| 765 // There may be objects on the marking deque that do not exist anymore, | 770 // There may be objects on the marking deque that do not exist anymore, |
| 766 // e.g. left trimmed objects or objects from the root set (frames). | 771 // e.g. left trimmed objects or objects from the root set (frames). |
| 767 // If these object are dead at scavenging time, their marking deque | 772 // If these object are dead at scavenging time, their marking deque |
| 768 // entries will not point to forwarding addresses. Hence, we can discard | 773 // entries will not point to forwarding addresses. Hence, we can discard |
| 769 // them. | 774 // them. |
| 770 return nullptr; | 775 return nullptr; |
| 771 } | 776 } |
| 772 HeapObject* dest = map_word.ToForwardingAddress(); | 777 HeapObject* dest = map_word.ToForwardingAddress(); |
| 773 if (ObjectMarking::IsBlack(dest, marking_state(dest))) { | 778 if (ObjectMarking::IsBlack<kAtomicity>(dest, marking_state(dest))) { |
| 774 // The object is already processed by the marker. | 779 // The object is already processed by the marker. |
| 775 return nullptr; | 780 return nullptr; |
| 776 } | 781 } |
| 777 DCHECK( | 782 DCHECK(ObjectMarking::IsGrey<kAtomicity>(obj, marking_state(obj)) || |
| 778 ObjectMarking::IsGrey(obj, marking_state(obj)) || | 783 (obj->IsFiller() && |
| 779 (obj->IsFiller() && ObjectMarking::IsWhite(obj, marking_state(obj)))); | 784 ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj)))); |
| 780 return dest; | 785 return dest; |
| 781 } else { | 786 } else { |
| 782 DCHECK(ObjectMarking::IsGrey(obj, marking_state(obj)) || | 787 DCHECK(ObjectMarking::IsGrey<kAtomicity>(obj, marking_state(obj)) || |
| 783 (obj->IsFiller() && | 788 (obj->IsFiller() && |
| 784 ObjectMarking::IsWhite(obj, marking_state(obj))) || | 789 ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj))) || |
| 785 (MemoryChunk::FromAddress(obj->address()) | 790 (MemoryChunk::FromAddress(obj->address()) |
| 786 ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | 791 ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
| 787 ObjectMarking::IsBlack(obj, marking_state(obj)))); | 792 ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj)))); |
| 788 // Skip one word filler objects that appear on the | 793 // Skip one word filler objects that appear on the |
| 789 // stack when we perform in place array shift. | 794 // stack when we perform in place array shift. |
| 790 return (obj->map() == filler_map) ? nullptr : obj; | 795 return (obj->map() == filler_map) ? nullptr : obj; |
| 791 } | 796 } |
| 792 }); | 797 }); |
| 793 } | 798 } |
| 794 | 799 |
| 795 | 800 |
| 796 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { | 801 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { |
| 797 WhiteToGreyAndPush(map); | 802 WhiteToGreyAndPush(map); |
| 798 | 803 |
| 799 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); | 804 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); |
| 800 | 805 |
| 801 #if ENABLE_SLOW_DCHECKS | 806 #if ENABLE_SLOW_DCHECKS |
| 802 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); | 807 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); |
| 803 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 808 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
| 804 SLOW_DCHECK(Marking::IsGrey(mark_bit) || | 809 SLOW_DCHECK(Marking::IsGrey<kAtomicity>(mark_bit) || |
| 805 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | 810 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
| 806 Marking::IsBlack(mark_bit))); | 811 Marking::IsBlack<kAtomicity>(mark_bit))); |
| 807 #endif | 812 #endif |
| 808 ObjectMarking::GreyToBlack(obj, marking_state(obj)); | 813 ObjectMarking::GreyToBlack<kAtomicity>(obj, marking_state(obj)); |
| 809 } | 814 } |
| 810 | 815 |
| 811 intptr_t IncrementalMarking::ProcessMarkingDeque( | 816 intptr_t IncrementalMarking::ProcessMarkingDeque( |
| 812 intptr_t bytes_to_process, ForceCompletionAction completion) { | 817 intptr_t bytes_to_process, ForceCompletionAction completion) { |
| 813 intptr_t bytes_processed = 0; | 818 intptr_t bytes_processed = 0; |
| 814 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process || | 819 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process || |
| 815 completion == FORCE_COMPLETION)) { | 820 completion == FORCE_COMPLETION)) { |
| 816 HeapObject* obj = marking_deque()->Pop(); | 821 HeapObject* obj = marking_deque()->Pop(); |
| 817 | 822 |
| 818 // Left trimming may result in white, grey, or black filler objects on the | 823 // Left trimming may result in white, grey, or black filler objects on the |
| 819 // marking deque. Ignore these objects. | 824 // marking deque. Ignore these objects. |
| 820 if (obj->IsFiller()) { | 825 if (obj->IsFiller()) { |
| 821 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); | 826 DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj))); |
| 822 continue; | 827 continue; |
| 823 } | 828 } |
| 824 | 829 |
| 825 Map* map = obj->map(); | 830 Map* map = obj->map(); |
| 826 int size = obj->SizeFromMap(map); | 831 int size = obj->SizeFromMap(map); |
| 827 unscanned_bytes_of_large_object_ = 0; | 832 unscanned_bytes_of_large_object_ = 0; |
| 828 VisitObject(map, obj, size); | 833 VisitObject(map, obj, size); |
| 829 bytes_processed += size - unscanned_bytes_of_large_object_; | 834 bytes_processed += size - unscanned_bytes_of_large_object_; |
| 830 } | 835 } |
| 831 // Report all found wrappers to the embedder. This is necessary as the | 836 // Report all found wrappers to the embedder. This is necessary as the |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 867 } | 872 } |
| 868 | 873 |
| 869 Object* context = heap_->native_contexts_list(); | 874 Object* context = heap_->native_contexts_list(); |
| 870 while (!context->IsUndefined(heap_->isolate())) { | 875 while (!context->IsUndefined(heap_->isolate())) { |
| 871 // GC can happen when the context is not fully initialized, | 876 // GC can happen when the context is not fully initialized, |
| 872 // so the cache can be undefined. | 877 // so the cache can be undefined. |
| 873 HeapObject* cache = HeapObject::cast( | 878 HeapObject* cache = HeapObject::cast( |
| 874 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); | 879 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); |
| 875 if (!cache->IsUndefined(heap_->isolate())) { | 880 if (!cache->IsUndefined(heap_->isolate())) { |
| 876 // Mark the cache black if it is grey. | 881 // Mark the cache black if it is grey. |
| 877 bool ignored = ObjectMarking::GreyToBlack(cache, marking_state(cache)); | 882 bool ignored = |
| 883 ObjectMarking::GreyToBlack<kAtomicity>(cache, marking_state(cache)); | |
| 878 USE(ignored); | 884 USE(ignored); |
| 879 } | 885 } |
| 880 context = Context::cast(context)->next_context_link(); | 886 context = Context::cast(context)->next_context_link(); |
| 881 } | 887 } |
| 882 } | 888 } |
| 883 | 889 |
| 884 | 890 |
| 885 void IncrementalMarking::Stop() { | 891 void IncrementalMarking::Stop() { |
| 886 if (IsStopped()) return; | 892 if (IsStopped()) return; |
| 887 if (FLAG_trace_incremental_marking) { | 893 if (FLAG_trace_incremental_marking) { |
| (...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1148 idle_marking_delay_counter_++; | 1154 idle_marking_delay_counter_++; |
| 1149 } | 1155 } |
| 1150 | 1156 |
| 1151 | 1157 |
| 1152 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 1158 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
| 1153 idle_marking_delay_counter_ = 0; | 1159 idle_marking_delay_counter_ = 0; |
| 1154 } | 1160 } |
| 1155 | 1161 |
| 1156 } // namespace internal | 1162 } // namespace internal |
| 1157 } // namespace v8 | 1163 } // namespace v8 |
| OLD | NEW |