Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/incremental-marking.h" | 5 #include "src/heap/incremental-marking.h" |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/compilation-cache.h" | 8 #include "src/compilation-cache.h" |
| 9 #include "src/conversions.h" | 9 #include "src/conversions.h" |
| 10 #include "src/heap/concurrent-marking.h" | 10 #include "src/heap/concurrent-marking.h" |
| (...skipping 533 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 544 | 544 |
| 545 heap_->CompletelyClearInstanceofCache(); | 545 heap_->CompletelyClearInstanceofCache(); |
| 546 heap_->isolate()->compilation_cache()->MarkCompactPrologue(); | 546 heap_->isolate()->compilation_cache()->MarkCompactPrologue(); |
| 547 | 547 |
| 548 // Mark strong roots grey. | 548 // Mark strong roots grey. |
| 549 IncrementalMarkingRootMarkingVisitor visitor(this); | 549 IncrementalMarkingRootMarkingVisitor visitor(this); |
| 550 heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG); | 550 heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG); |
| 551 | 551 |
| 552 if (FLAG_concurrent_marking) { | 552 if (FLAG_concurrent_marking) { |
| 553 ConcurrentMarking* concurrent_marking = heap_->concurrent_marking(); | 553 ConcurrentMarking* concurrent_marking = heap_->concurrent_marking(); |
| 554 heap_->mark_compact_collector()->marking_deque()->Iterate( | |
| 555 [concurrent_marking](HeapObject* obj) { | |
| 556 concurrent_marking->AddRoot(obj); | |
| 557 }); | |
| 558 concurrent_marking->StartTask(); | 554 concurrent_marking->StartTask(); |
| 559 } | 555 } |
| 560 | 556 |
| 561 // Ready to start incremental marking. | 557 // Ready to start incremental marking. |
| 562 if (FLAG_trace_incremental_marking) { | 558 if (FLAG_trace_incremental_marking) { |
| 563 heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Running\n"); | 559 heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Running\n"); |
| 564 } | 560 } |
| 565 } | 561 } |
| 566 | 562 |
| 567 void IncrementalMarking::StartBlackAllocation() { | 563 void IncrementalMarking::StartBlackAllocation() { |
| (...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 705 } | 701 } |
| 706 } | 702 } |
| 707 | 703 |
| 708 void IncrementalMarking::FinalizeIncrementally() { | 704 void IncrementalMarking::FinalizeIncrementally() { |
| 709 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE_BODY); | 705 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE_BODY); |
| 710 DCHECK(!finalize_marking_completed_); | 706 DCHECK(!finalize_marking_completed_); |
| 711 DCHECK(IsMarking()); | 707 DCHECK(IsMarking()); |
| 712 | 708 |
| 713 double start = heap_->MonotonicallyIncreasingTimeInMs(); | 709 double start = heap_->MonotonicallyIncreasingTimeInMs(); |
| 714 | 710 |
| 715 int old_marking_deque_top = | |
| 716 heap_->mark_compact_collector()->marking_deque()->top(); | |
| 717 | |
| 718 // After finishing incremental marking, we try to discover all unmarked | 711 // After finishing incremental marking, we try to discover all unmarked |
| 719 // objects to reduce the marking load in the final pause. | 712 // objects to reduce the marking load in the final pause. |
| 720 // 1) We scan and mark the roots again to find all changes to the root set. | 713 // 1) We scan and mark the roots again to find all changes to the root set. |
| 721 // 2) Age and retain maps embedded in optimized code. | 714 // 2) Age and retain maps embedded in optimized code. |
| 722 // 3) Remove weak cell with live values from the list of weak cells, they | 715 // 3) Remove weak cell with live values from the list of weak cells, they |
| 723 // do not need processing during GC. | 716 // do not need processing during GC. |
| 724 MarkRoots(); | 717 MarkRoots(); |
| 725 | 718 |
| 726 if (incremental_marking_finalization_rounds_ == 0) { | 719 if (incremental_marking_finalization_rounds_ == 0) { |
| 727 // Map retaining is needed for perfromance, not correctness, | 720 // Map retaining is needed for perfromance, not correctness, |
| 728 // so we can do it only once at the beginning of the finalization. | 721 // so we can do it only once at the beginning of the finalization. |
| 729 RetainMaps(); | 722 RetainMaps(); |
| 730 } | 723 } |
| 731 ProcessWeakCells(); | 724 ProcessWeakCells(); |
| 732 | 725 |
| 733 int marking_progress = | 726 int marking_progress = |
| 734 abs(old_marking_deque_top - | 727 heap_->mark_compact_collector()->marking_deque()->Size() + |
|
ulan
2017/04/27 17:49:39
This changes behavior.
| |
| 735 heap_->mark_compact_collector()->marking_deque()->top()); | 728 static_cast<int>( |
| 736 | 729 heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace()); |
| 737 marking_progress += static_cast<int>( | |
| 738 heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace()); | |
| 739 | 730 |
| 740 double end = heap_->MonotonicallyIncreasingTimeInMs(); | 731 double end = heap_->MonotonicallyIncreasingTimeInMs(); |
| 741 double delta = end - start; | 732 double delta = end - start; |
| 742 if (FLAG_trace_incremental_marking) { | 733 if (FLAG_trace_incremental_marking) { |
| 743 heap()->isolate()->PrintWithTimestamp( | 734 heap()->isolate()->PrintWithTimestamp( |
| 744 "[IncrementalMarking] Finalize incrementally round %d, " | 735 "[IncrementalMarking] Finalize incrementally round %d, " |
| 745 "spent %d ms, marking progress %d.\n", | 736 "spent %d ms, marking progress %d.\n", |
| 746 static_cast<int>(delta), incremental_marking_finalization_rounds_, | 737 static_cast<int>(delta), incremental_marking_finalization_rounds_, |
| 747 marking_progress); | 738 marking_progress); |
| 748 } | 739 } |
| (...skipping 13 matching lines...) Expand all Loading... | |
| 762 StartBlackAllocation(); | 753 StartBlackAllocation(); |
| 763 } | 754 } |
| 764 } | 755 } |
| 765 | 756 |
| 766 | 757 |
| 767 void IncrementalMarking::UpdateMarkingDequeAfterScavenge() { | 758 void IncrementalMarking::UpdateMarkingDequeAfterScavenge() { |
| 768 if (!IsMarking()) return; | 759 if (!IsMarking()) return; |
| 769 | 760 |
| 770 MarkingDeque* marking_deque = | 761 MarkingDeque* marking_deque = |
| 771 heap_->mark_compact_collector()->marking_deque(); | 762 heap_->mark_compact_collector()->marking_deque(); |
| 772 int current = marking_deque->bottom(); | |
|
ulan
2017/04/27 17:49:39
We no longer can assume contiguous structure of th
| |
| 773 int mask = marking_deque->mask(); | |
| 774 int limit = marking_deque->top(); | |
| 775 HeapObject** array = marking_deque->array(); | |
| 776 int new_top = current; | |
| 777 | |
| 778 Map* filler_map = heap_->one_pointer_filler_map(); | 763 Map* filler_map = heap_->one_pointer_filler_map(); |
| 779 | 764 Heap* heap = heap_; |
| 780 while (current != limit) { | 765 marking_deque->Update([heap, filler_map](HeapObject* obj) -> HeapObject* { |
| 781 HeapObject* obj = array[current]; | |
| 782 DCHECK(obj->IsHeapObject()); | 766 DCHECK(obj->IsHeapObject()); |
| 783 current = ((current + 1) & mask); | |
| 784 // Only pointers to from space have to be updated. | 767 // Only pointers to from space have to be updated. |
| 785 if (heap_->InFromSpace(obj)) { | 768 if (heap->InFromSpace(obj)) { |
| 786 MapWord map_word = obj->map_word(); | 769 MapWord map_word = obj->map_word(); |
| 787 // There may be objects on the marking deque that do not exist anymore, | 770 // There may be objects on the marking deque that do not exist anymore, |
| 788 // e.g. left trimmed objects or objects from the root set (frames). | 771 // e.g. left trimmed objects or objects from the root set (frames). |
| 789 // If these object are dead at scavenging time, their marking deque | 772 // If these object are dead at scavenging time, their marking deque |
| 790 // entries will not point to forwarding addresses. Hence, we can discard | 773 // entries will not point to forwarding addresses. Hence, we can discard |
| 791 // them. | 774 // them. |
| 792 if (map_word.IsForwardingAddress()) { | 775 if (!map_word.IsForwardingAddress()) return nullptr; |
| 793 HeapObject* dest = map_word.ToForwardingAddress(); | 776 HeapObject* dest = map_word.ToForwardingAddress(); |
| 794 if (ObjectMarking::IsBlack(dest, MarkingState::Internal(dest))) | 777 if (ObjectMarking::IsBlack(dest, MarkingState::Internal(dest))) { |
| 795 continue; | 778 // The object is already processed by the marker. |
| 796 array[new_top] = dest; | 779 return nullptr; |
| 797 new_top = ((new_top + 1) & mask); | |
| 798 DCHECK(new_top != marking_deque->bottom()); | |
| 799 DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) || | |
| 800 (obj->IsFiller() && | |
| 801 ObjectMarking::IsWhite(obj, MarkingState::Internal(obj)))); | |
| 802 } | 780 } |
| 803 } else if (obj->map() != filler_map) { | 781 DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) || |
| 782 (obj->IsFiller() && | |
| 783 ObjectMarking::IsWhite(obj, MarkingState::Internal(obj)))); | |
| 784 return dest; | |
| 785 } else { | |
| 804 // Skip one word filler objects that appear on the | 786 // Skip one word filler objects that appear on the |
| 805 // stack when we perform in place array shift. | 787 // stack when we perform in place array shift. |
| 806 array[new_top] = obj; | |
| 807 new_top = ((new_top + 1) & mask); | |
| 808 DCHECK(new_top != marking_deque->bottom()); | |
| 809 DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) || | 788 DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) || |
| 810 (obj->IsFiller() && | 789 (obj->IsFiller() && |
| 811 ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))) || | 790 ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))) || |
| 812 (MemoryChunk::FromAddress(obj->address()) | 791 (MemoryChunk::FromAddress(obj->address()) |
| 813 ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | 792 ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
| 814 ObjectMarking::IsBlack(obj, MarkingState::Internal(obj)))); | 793 ObjectMarking::IsBlack(obj, MarkingState::Internal(obj)))); |
| 794 return (obj->map() == filler_map) ? nullptr : obj; | |
| 815 } | 795 } |
| 816 } | 796 }); |
| 817 marking_deque->set_top(new_top); | |
| 818 } | 797 } |
| 819 | 798 |
| 820 | 799 |
| 821 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { | 800 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { |
| 822 MarkGrey(heap_, map); | 801 MarkGrey(heap_, map); |
| 823 | 802 |
| 824 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); | 803 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); |
| 825 | 804 |
| 826 #if ENABLE_SLOW_DCHECKS | 805 #if ENABLE_SLOW_DCHECKS |
| 827 MarkBit mark_bit = | 806 MarkBit mark_bit = |
| (...skipping 359 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1187 idle_marking_delay_counter_++; | 1166 idle_marking_delay_counter_++; |
| 1188 } | 1167 } |
| 1189 | 1168 |
| 1190 | 1169 |
| 1191 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 1170 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
| 1192 idle_marking_delay_counter_ = 0; | 1171 idle_marking_delay_counter_ = 0; |
| 1193 } | 1172 } |
| 1194 | 1173 |
| 1195 } // namespace internal | 1174 } // namespace internal |
| 1196 } // namespace v8 | 1175 } // namespace v8 |
| OLD | NEW |