Chromium Code Reviews| Index: src/heap/incremental-marking.cc |
| diff --git a/src/heap/incremental-marking.cc b/src/heap/incremental-marking.cc |
| index 7f87512e9abf876438de4b9eab6ca05a75c833fc..d99b56be49a98ac5f69915141dacae8e9f08d3af 100644 |
| --- a/src/heap/incremental-marking.cc |
| +++ b/src/heap/incremental-marking.cc |
| @@ -19,12 +19,6 @@ |
| namespace v8 { |
| namespace internal { |
| -IncrementalMarking::StepActions IncrementalMarking::IdleStepActions() { |
| - return StepActions(IncrementalMarking::NO_GC_VIA_STACK_GUARD, |
| - IncrementalMarking::FORCE_MARKING, |
| - IncrementalMarking::DO_NOT_FORCE_COMPLETION); |
| -} |
| - |
| IncrementalMarking::IncrementalMarking(Heap* heap) |
| : heap_(heap), |
| observer_(*this, kAllocatedThreshold), |
| @@ -1057,38 +1051,25 @@ void IncrementalMarking::Epilogue() { |
| } |
| double IncrementalMarking::AdvanceIncrementalMarking( |
| - double deadline_in_ms, IncrementalMarking::StepActions step_actions) { |
| + double deadline_in_ms, CompletionAction completion_action, |
| + ForceCompletionAction force_completion) { |
| DCHECK(!IsStopped()); |
| + double remaining_time_in_ms = 0.0; |
| intptr_t step_size_in_bytes = GCIdleTimeHandler::EstimateMarkingStepSize( |
| - GCIdleTimeHandler::kIncrementalMarkingStepTimeInMs, |
| + kStepSizeInMs, |
| heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond()); |
| - double remaining_time_in_ms = 0.0; |
| do { |
| - Step(step_size_in_bytes, step_actions.completion_action, |
| - step_actions.force_marking, step_actions.force_completion); |
| + Step(step_size_in_bytes, completion_action, force_completion); |
| remaining_time_in_ms = |
| deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs(); |
| - } while (!heap_->mark_compact_collector()->marking_deque()->IsEmpty() && |
| - remaining_time_in_ms >= |
| - 2.0 * GCIdleTimeHandler::kIncrementalMarkingStepTimeInMs && |
| - !IsComplete() && |
| + } while (remaining_time_in_ms >= kStepSizeInMs && !IsComplete() && |
| !heap()->mark_compact_collector()->marking_deque()->IsEmpty()); |
| return remaining_time_in_ms; |
| } |
| -void IncrementalMarking::OldSpaceStep(intptr_t allocated) { |
| - if (IsStopped() && ShouldActivateEvenWithoutIdleNotification()) { |
| - heap()->StartIncrementalMarking(Heap::kNoGCFlags, kNoGCCallbackFlags, |
| - "old space step"); |
| - } else { |
| - Step(allocated * kFastMarking / kInitialMarkingSpeed, GC_VIA_STACK_GUARD); |
| - } |
| -} |
| - |
| - |
| void IncrementalMarking::SpeedUp() { |
| bool speed_up = false; |
| @@ -1178,39 +1159,17 @@ void IncrementalMarking::FinalizeSweeping() { |
| } |
| } |
| -intptr_t IncrementalMarking::Step(intptr_t allocated_bytes, |
| - CompletionAction action, |
| - ForceMarkingAction marking, |
| - ForceCompletionAction completion) { |
| - DCHECK(allocated_bytes >= 0); |
| - |
| +void IncrementalMarking::NotifyAllocatedBytes(intptr_t allocated_bytes) { |
| if (heap_->gc_state() != Heap::NOT_IN_GC || !FLAG_incremental_marking || |
| (state_ != SWEEPING && state_ != MARKING)) { |
| - return 0; |
| + return; |
| } |
| allocated_ += allocated_bytes; |
| - if (marking == DO_NOT_FORCE_MARKING && allocated_ < kAllocatedThreshold && |
| - write_barriers_invoked_since_last_step_ < |
| + if (allocated_ >= kAllocatedThreshold || |
| + write_barriers_invoked_since_last_step_ >= |
| kWriteBarriersInvokedThreshold) { |
| - return 0; |
| - } |
| - |
| - // If an idle notification happened recently, we delay marking steps. |
| - if (marking == DO_NOT_FORCE_MARKING && |
| - heap_->RecentIdleNotificationHappened()) { |
| - return 0; |
| - } |
| - |
| - intptr_t bytes_processed = 0; |
| - { |
| - HistogramTimerScope incremental_marking_scope( |
| - heap_->isolate()->counters()->gc_incremental_marking()); |
| - TRACE_EVENT0("v8", "V8.GCIncrementalMarking"); |
| - TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL); |
| - double start = heap_->MonotonicallyIncreasingTimeInMs(); |
| - |
| // The marking speed is driven either by the allocation rate or by the rate |
| // at which we are having to check the color of objects in the write |
| // barrier. |
| @@ -1222,73 +1181,83 @@ intptr_t IncrementalMarking::Step(intptr_t allocated_bytes, |
| intptr_t bytes_to_process = |
| marking_speed_ * |
| Max(allocated_, write_barriers_invoked_since_last_step_); |
| - allocated_ = 0; |
| - write_barriers_invoked_since_last_step_ = 0; |
| + Step(bytes_to_process, GC_VIA_STACK_GUARD, FORCE_COMPLETION); |
| + } |
| +} |
| - bytes_scanned_ += bytes_to_process; |
| +void IncrementalMarking::Step(intptr_t bytes_to_process, |
| + CompletionAction action, |
| + ForceCompletionAction completion) { |
| + HistogramTimerScope incremental_marking_scope( |
| + heap_->isolate()->counters()->gc_incremental_marking()); |
| + TRACE_EVENT0("v8", "V8.GCIncrementalMarking"); |
| + TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL); |
| + double start = heap_->MonotonicallyIncreasingTimeInMs(); |
| - if (state_ == SWEEPING) { |
| - TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL_SWEEPING); |
| - FinalizeSweeping(); |
| - } |
| + bytes_scanned_ += bytes_to_process; |
| - if (state_ == MARKING) { |
| - const bool incremental_wrapper_tracing = |
| - FLAG_incremental_marking_wrappers && heap_->UsingEmbedderHeapTracer(); |
| - const bool process_wrappers = |
| - incremental_wrapper_tracing && |
| - (heap_->mark_compact_collector() |
| - ->RequiresImmediateWrapperProcessing() || |
| - heap_->mark_compact_collector()->marking_deque()->IsEmpty()); |
| - bool wrapper_work_left = incremental_wrapper_tracing; |
| - if (!process_wrappers) { |
| - bytes_processed = ProcessMarkingDeque(bytes_to_process); |
| - } else { |
| - const double kWrapperTracngStepMs = 1.0; |
| - const double wrapper_deadline = |
| - heap_->MonotonicallyIncreasingTimeInMs() + kWrapperTracngStepMs; |
| - TRACE_GC(heap()->tracer(), |
| - GCTracer::Scope::MC_INCREMENTAL_WRAPPER_TRACING); |
| - heap_->mark_compact_collector() |
| - ->RegisterWrappersWithEmbedderHeapTracer(); |
| - wrapper_work_left = |
| - heap_->mark_compact_collector() |
| - ->embedder_heap_tracer() |
| - ->AdvanceTracing(wrapper_deadline, |
| - EmbedderHeapTracer::AdvanceTracingActions( |
| - EmbedderHeapTracer::ForceCompletionAction:: |
| - DO_NOT_FORCE_COMPLETION)); |
| - } |
| + allocated_ = 0; |
| + write_barriers_invoked_since_last_step_ = 0; |
| + |
| + if (state_ == SWEEPING) { |
|
ulan
2016/09/02 12:59:40
All changes below are just moves except for kStepS
Michael Lippautz
2016/09/02 19:05:19
Acknowledged. Awesome that we can reuse the consta
|
| + TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL_SWEEPING); |
| + FinalizeSweeping(); |
| + } |
| - if (heap_->mark_compact_collector()->marking_deque()->IsEmpty() && |
| - !wrapper_work_left) { |
| - if (completion == FORCE_COMPLETION || |
| - IsIdleMarkingDelayCounterLimitReached()) { |
| - if (!finalize_marking_completed_) { |
| - FinalizeMarking(action); |
| - } else { |
| - MarkingComplete(action); |
| - } |
| + intptr_t bytes_processed = 0; |
| + if (state_ == MARKING) { |
| + const bool incremental_wrapper_tracing = |
| + FLAG_incremental_marking_wrappers && heap_->UsingEmbedderHeapTracer(); |
| + const bool process_wrappers = |
| + incremental_wrapper_tracing && |
| + (heap_->mark_compact_collector() |
| + ->RequiresImmediateWrapperProcessing() || |
| + heap_->mark_compact_collector()->marking_deque()->IsEmpty()); |
| + bool wrapper_work_left = incremental_wrapper_tracing; |
| + if (!process_wrappers) { |
| + bytes_processed = ProcessMarkingDeque(bytes_to_process); |
| + } else { |
| + const double wrapper_deadline = |
| + heap_->MonotonicallyIncreasingTimeInMs() + kStepSizeInMs; |
| + TRACE_GC(heap()->tracer(), |
| + GCTracer::Scope::MC_INCREMENTAL_WRAPPER_TRACING); |
| + heap_->mark_compact_collector()->RegisterWrappersWithEmbedderHeapTracer(); |
| + wrapper_work_left = |
| + heap_->mark_compact_collector() |
| + ->embedder_heap_tracer() |
| + ->AdvanceTracing(wrapper_deadline, |
| + EmbedderHeapTracer::AdvanceTracingActions( |
| + EmbedderHeapTracer::ForceCompletionAction:: |
| + DO_NOT_FORCE_COMPLETION)); |
| + } |
| + |
| + if (heap_->mark_compact_collector()->marking_deque()->IsEmpty() && |
| + !wrapper_work_left) { |
| + if (completion == FORCE_COMPLETION || |
| + IsIdleMarkingDelayCounterLimitReached()) { |
| + if (!finalize_marking_completed_) { |
| + FinalizeMarking(action); |
| } else { |
| - IncrementIdleMarkingDelayCounter(); |
| + MarkingComplete(action); |
| } |
| + } else { |
| + IncrementIdleMarkingDelayCounter(); |
| } |
| } |
| + } |
| - steps_count_++; |
| + steps_count_++; |
| - // Speed up marking if we are marking too slow or if we are almost done |
| - // with marking. |
| - SpeedUp(); |
| + // Speed up marking if we are marking too slow or if we are almost done |
| + // with marking. |
| + SpeedUp(); |
| - double end = heap_->MonotonicallyIncreasingTimeInMs(); |
| - double duration = (end - start); |
| - // Note that we report zero bytes here when sweeping was in progress or |
| - // when we just started incremental marking. In these cases we did not |
| - // process the marking deque. |
| - heap_->tracer()->AddIncrementalMarkingStep(duration, bytes_processed); |
| - } |
| - return bytes_processed; |
| + double end = heap_->MonotonicallyIncreasingTimeInMs(); |
| + double duration = (end - start); |
| + // Note that we report zero bytes here when sweeping was in progress or |
| + // when we just started incremental marking. In these cases we did not |
| + // process the marking deque. |
| + heap_->tracer()->AddIncrementalMarkingStep(duration, bytes_processed); |
| } |