| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/incremental-marking.h" | 5 #include "src/heap/incremental-marking.h" |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/compilation-cache.h" | 8 #include "src/compilation-cache.h" |
| 9 #include "src/conversions.h" | 9 #include "src/conversions.h" |
| 10 #include "src/heap/gc-idle-time-handler.h" | 10 #include "src/heap/gc-idle-time-handler.h" |
| (...skipping 528 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 539 Object* e = stubs->ValueAt(i); | 539 Object* e = stubs->ValueAt(i); |
| 540 if (e->IsCode()) { | 540 if (e->IsCode()) { |
| 541 RecordWriteStub::Patch(Code::cast(e), mode); | 541 RecordWriteStub::Patch(Code::cast(e), mode); |
| 542 } | 542 } |
| 543 } | 543 } |
| 544 } | 544 } |
| 545 } | 545 } |
| 546 } | 546 } |
| 547 | 547 |
| 548 | 548 |
| 549 static void IncrementalMarkingStepCallback(int bytes_allocated, void* arg) { |
| 550 auto im = static_cast<IncrementalMarking*>(arg); |
| 551 im->Step(bytes_allocated, IncrementalMarking::GC_VIA_STACK_GUARD); |
| 552 } |
| 553 |
| 554 |
| 549 void IncrementalMarking::Start(const char* reason) { | 555 void IncrementalMarking::Start(const char* reason) { |
| 550 if (FLAG_trace_incremental_marking) { | 556 if (FLAG_trace_incremental_marking) { |
| 551 PrintF("[IncrementalMarking] Start (%s)\n", | 557 PrintF("[IncrementalMarking] Start (%s)\n", |
| 552 (reason == nullptr) ? "unknown reason" : reason); | 558 (reason == nullptr) ? "unknown reason" : reason); |
| 553 } | 559 } |
| 554 DCHECK(FLAG_incremental_marking); | 560 DCHECK(FLAG_incremental_marking); |
| 555 DCHECK(state_ == STOPPED); | 561 DCHECK(state_ == STOPPED); |
| 556 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC); | 562 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC); |
| 557 DCHECK(!heap_->isolate()->serializer_enabled()); | 563 DCHECK(!heap_->isolate()->serializer_enabled()); |
| 558 | 564 |
| 559 ResetStepCounters(); | 565 ResetStepCounters(); |
| 560 | 566 |
| 561 was_activated_ = true; | 567 was_activated_ = true; |
| 562 | 568 |
| 563 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { | 569 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { |
| 564 StartMarking(); | 570 StartMarking(); |
| 565 } else { | 571 } else { |
| 566 if (FLAG_trace_incremental_marking) { | 572 if (FLAG_trace_incremental_marking) { |
| 567 PrintF("[IncrementalMarking] Start sweeping.\n"); | 573 PrintF("[IncrementalMarking] Start sweeping.\n"); |
| 568 } | 574 } |
| 569 state_ = SWEEPING; | 575 state_ = SWEEPING; |
| 570 } | 576 } |
| 571 | 577 |
| 572 heap_->LowerInlineAllocationLimit(kAllocatedThreshold); | 578 heap_->new_space()->AddInlineAllocationObserver( |
| 579 kAllocatedThreshold, IncrementalMarkingStepCallback, this); |
| 580 |
| 573 incremental_marking_job()->Start(heap_); | 581 incremental_marking_job()->Start(heap_); |
| 574 } | 582 } |
| 575 | 583 |
| 576 | 584 |
| 577 void IncrementalMarking::StartMarking() { | 585 void IncrementalMarking::StartMarking() { |
| 578 if (FLAG_trace_incremental_marking) { | 586 if (FLAG_trace_incremental_marking) { |
| 579 PrintF("[IncrementalMarking] Start marking\n"); | 587 PrintF("[IncrementalMarking] Start marking\n"); |
| 580 } | 588 } |
| 581 | 589 |
| 582 is_compacting_ = !FLAG_never_compact && | 590 is_compacting_ = !FLAG_never_compact && |
| (...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 814 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | 822 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
| 815 } | 823 } |
| 816 } | 824 } |
| 817 | 825 |
| 818 | 826 |
| 819 void IncrementalMarking::Stop() { | 827 void IncrementalMarking::Stop() { |
| 820 if (IsStopped()) return; | 828 if (IsStopped()) return; |
| 821 if (FLAG_trace_incremental_marking) { | 829 if (FLAG_trace_incremental_marking) { |
| 822 PrintF("[IncrementalMarking] Stopping.\n"); | 830 PrintF("[IncrementalMarking] Stopping.\n"); |
| 823 } | 831 } |
| 824 heap_->ResetInlineAllocationLimit(); | 832 heap_->new_space()->RemoveInlineAllocationObserver( |
| 833 IncrementalMarkingStepCallback); |
| 834 |
| 825 IncrementalMarking::set_should_hurry(false); | 835 IncrementalMarking::set_should_hurry(false); |
| 826 ResetStepCounters(); | 836 ResetStepCounters(); |
| 827 if (IsMarking()) { | 837 if (IsMarking()) { |
| 828 PatchIncrementalMarkingRecordWriteStubs(heap_, | 838 PatchIncrementalMarkingRecordWriteStubs(heap_, |
| 829 RecordWriteStub::STORE_BUFFER_ONLY); | 839 RecordWriteStub::STORE_BUFFER_ONLY); |
| 830 DeactivateIncrementalWriteBarrier(); | 840 DeactivateIncrementalWriteBarrier(); |
| 831 | 841 |
| 832 if (is_compacting_) { | 842 if (is_compacting_) { |
| 833 LargeObjectIterator it(heap_->lo_space()); | 843 LargeObjectIterator it(heap_->lo_space()); |
| 834 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 844 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| 835 Page* p = Page::FromAddress(obj->address()); | 845 Page* p = Page::FromAddress(obj->address()); |
| 836 if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { | 846 if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { |
| 837 p->ClearFlag(Page::RESCAN_ON_EVACUATION); | 847 p->ClearFlag(Page::RESCAN_ON_EVACUATION); |
| 838 } | 848 } |
| 839 } | 849 } |
| 840 } | 850 } |
| 841 } | 851 } |
| 842 heap_->isolate()->stack_guard()->ClearGC(); | 852 heap_->isolate()->stack_guard()->ClearGC(); |
| 843 state_ = STOPPED; | 853 state_ = STOPPED; |
| 844 is_compacting_ = false; | 854 is_compacting_ = false; |
| 845 } | 855 } |
| 846 | 856 |
| 847 | 857 |
| 848 void IncrementalMarking::Finalize() { | 858 void IncrementalMarking::Finalize() { |
| 849 Hurry(); | 859 Hurry(); |
| 850 state_ = STOPPED; | 860 state_ = STOPPED; |
| 851 is_compacting_ = false; | 861 is_compacting_ = false; |
| 852 heap_->ResetInlineAllocationLimit(); | 862 heap_->new_space()->RemoveInlineAllocationObserver( |
| 863 IncrementalMarkingStepCallback); |
| 853 IncrementalMarking::set_should_hurry(false); | 864 IncrementalMarking::set_should_hurry(false); |
| 854 ResetStepCounters(); | 865 ResetStepCounters(); |
| 855 PatchIncrementalMarkingRecordWriteStubs(heap_, | 866 PatchIncrementalMarkingRecordWriteStubs(heap_, |
| 856 RecordWriteStub::STORE_BUFFER_ONLY); | 867 RecordWriteStub::STORE_BUFFER_ONLY); |
| 857 DeactivateIncrementalWriteBarrier(); | 868 DeactivateIncrementalWriteBarrier(); |
| 858 DCHECK(heap_->mark_compact_collector()->marking_deque()->IsEmpty()); | 869 DCHECK(heap_->mark_compact_collector()->marking_deque()->IsEmpty()); |
| 859 heap_->isolate()->stack_guard()->ClearGC(); | 870 heap_->isolate()->stack_guard()->ClearGC(); |
| 860 } | 871 } |
| 861 | 872 |
| 862 | 873 |
| (...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1127 void IncrementalMarking::IncrementIdleMarkingDelayCounter() { | 1138 void IncrementalMarking::IncrementIdleMarkingDelayCounter() { |
| 1128 idle_marking_delay_counter_++; | 1139 idle_marking_delay_counter_++; |
| 1129 } | 1140 } |
| 1130 | 1141 |
| 1131 | 1142 |
| 1132 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 1143 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
| 1133 idle_marking_delay_counter_ = 0; | 1144 idle_marking_delay_counter_ = 0; |
| 1134 } | 1145 } |
| 1135 } // namespace internal | 1146 } // namespace internal |
| 1136 } // namespace v8 | 1147 } // namespace v8 |
| OLD | NEW |