| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/heap/incremental-marking.h" | 7 #include "src/heap/incremental-marking.h" |
| 8 | 8 |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
| (...skipping 441 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 452 Object* e = stubs->ValueAt(i); | 452 Object* e = stubs->ValueAt(i); |
| 453 if (e->IsCode()) { | 453 if (e->IsCode()) { |
| 454 RecordWriteStub::Patch(Code::cast(e), mode); | 454 RecordWriteStub::Patch(Code::cast(e), mode); |
| 455 } | 455 } |
| 456 } | 456 } |
| 457 } | 457 } |
| 458 } | 458 } |
| 459 } | 459 } |
| 460 | 460 |
| 461 | 461 |
| 462 void IncrementalMarking::Start() { | 462 void IncrementalMarking::Start(CompactionFlag flag) { |
| 463 if (FLAG_trace_incremental_marking) { | 463 if (FLAG_trace_incremental_marking) { |
| 464 PrintF("[IncrementalMarking] Start\n"); | 464 PrintF("[IncrementalMarking] Start\n"); |
| 465 } | 465 } |
| 466 DCHECK(FLAG_incremental_marking); | 466 DCHECK(FLAG_incremental_marking); |
| 467 DCHECK(FLAG_incremental_marking_steps); | 467 DCHECK(FLAG_incremental_marking_steps); |
| 468 DCHECK(state_ == STOPPED); | 468 DCHECK(state_ == STOPPED); |
| 469 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC); | 469 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC); |
| 470 DCHECK(!heap_->isolate()->serializer_enabled()); | 470 DCHECK(!heap_->isolate()->serializer_enabled()); |
| 471 | 471 |
| 472 ResetStepCounters(); | 472 ResetStepCounters(); |
| 473 | 473 |
| 474 was_activated_ = true; | 474 was_activated_ = true; |
| 475 | 475 |
| 476 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { | 476 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { |
| 477 StartMarking(); | 477 StartMarking(flag); |
| 478 } else { | 478 } else { |
| 479 if (FLAG_trace_incremental_marking) { | 479 if (FLAG_trace_incremental_marking) { |
| 480 PrintF("[IncrementalMarking] Start sweeping.\n"); | 480 PrintF("[IncrementalMarking] Start sweeping.\n"); |
| 481 } | 481 } |
| 482 state_ = SWEEPING; | 482 state_ = SWEEPING; |
| 483 } | 483 } |
| 484 | 484 |
| 485 heap_->new_space()->LowerInlineAllocationLimit(kAllocatedThreshold); | 485 heap_->new_space()->LowerInlineAllocationLimit(kAllocatedThreshold); |
| 486 } | 486 } |
| 487 | 487 |
| 488 | 488 |
| 489 void IncrementalMarking::StartMarking() { | 489 void IncrementalMarking::StartMarking(CompactionFlag flag) { |
| 490 if (FLAG_trace_incremental_marking) { | 490 if (FLAG_trace_incremental_marking) { |
| 491 PrintF("[IncrementalMarking] Start marking\n"); | 491 PrintF("[IncrementalMarking] Start marking\n"); |
| 492 } | 492 } |
| 493 | 493 |
| 494 is_compacting_ = !FLAG_never_compact && | 494 is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) && |
| 495 heap_->mark_compact_collector()->StartCompaction( | 495 heap_->mark_compact_collector()->StartCompaction( |
| 496 MarkCompactCollector::INCREMENTAL_COMPACTION); | 496 MarkCompactCollector::INCREMENTAL_COMPACTION); |
| 497 | 497 |
| 498 state_ = MARKING; | 498 state_ = MARKING; |
| 499 | 499 |
| 500 RecordWriteStub::Mode mode = is_compacting_ | 500 RecordWriteStub::Mode mode = is_compacting_ |
| 501 ? RecordWriteStub::INCREMENTAL_COMPACTION | 501 ? RecordWriteStub::INCREMENTAL_COMPACTION |
| 502 : RecordWriteStub::INCREMENTAL; | 502 : RecordWriteStub::INCREMENTAL; |
| 503 | 503 |
| 504 PatchIncrementalMarkingRecordWriteStubs(heap_, mode); | 504 PatchIncrementalMarkingRecordWriteStubs(heap_, mode); |
| (...skipping 311 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 816 | 816 |
| 817 void IncrementalMarking::Epilogue() { | 817 void IncrementalMarking::Epilogue() { |
| 818 was_activated_ = false; | 818 was_activated_ = false; |
| 819 weak_closure_was_overapproximated_ = false; | 819 weak_closure_was_overapproximated_ = false; |
| 820 weak_closure_approximation_rounds_ = 0; | 820 weak_closure_approximation_rounds_ = 0; |
| 821 } | 821 } |
| 822 | 822 |
| 823 | 823 |
| 824 void IncrementalMarking::OldSpaceStep(intptr_t allocated) { | 824 void IncrementalMarking::OldSpaceStep(intptr_t allocated) { |
| 825 if (IsStopped() && ShouldActivate()) { | 825 if (IsStopped() && ShouldActivate()) { |
| 826 Start(); | 826 // TODO(hpayer): Let's play safe for now, but compaction should be |
| 827 // in principle possible. |
| 828 Start(PREVENT_COMPACTION); |
| 827 } else { | 829 } else { |
| 828 Step(allocated * kFastMarking / kInitialMarkingSpeed, GC_VIA_STACK_GUARD); | 830 Step(allocated * kFastMarking / kInitialMarkingSpeed, GC_VIA_STACK_GUARD); |
| 829 } | 831 } |
| 830 } | 832 } |
| 831 | 833 |
| 832 | 834 |
| 833 void IncrementalMarking::SpeedUp() { | 835 void IncrementalMarking::SpeedUp() { |
| 834 bool speed_up = false; | 836 bool speed_up = false; |
| 835 | 837 |
| 836 if ((steps_count_ % kMarkingSpeedAccellerationInterval) == 0) { | 838 if ((steps_count_ % kMarkingSpeedAccellerationInterval) == 0) { |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 945 bytes_scanned_ += bytes_to_process; | 947 bytes_scanned_ += bytes_to_process; |
| 946 | 948 |
| 947 if (state_ == SWEEPING) { | 949 if (state_ == SWEEPING) { |
| 948 if (heap_->mark_compact_collector()->sweeping_in_progress() && | 950 if (heap_->mark_compact_collector()->sweeping_in_progress() && |
| 949 (heap_->mark_compact_collector()->IsSweepingCompleted() || | 951 (heap_->mark_compact_collector()->IsSweepingCompleted() || |
| 950 !heap_->concurrent_sweeping_enabled())) { | 952 !heap_->concurrent_sweeping_enabled())) { |
| 951 heap_->mark_compact_collector()->EnsureSweepingCompleted(); | 953 heap_->mark_compact_collector()->EnsureSweepingCompleted(); |
| 952 } | 954 } |
| 953 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { | 955 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { |
| 954 bytes_scanned_ = 0; | 956 bytes_scanned_ = 0; |
| 955 StartMarking(); | 957 StartMarking(PREVENT_COMPACTION); |
| 956 } | 958 } |
| 957 } else if (state_ == MARKING) { | 959 } else if (state_ == MARKING) { |
| 958 bytes_processed = ProcessMarkingDeque(bytes_to_process); | 960 bytes_processed = ProcessMarkingDeque(bytes_to_process); |
| 959 if (heap_->mark_compact_collector()->marking_deque()->IsEmpty()) { | 961 if (heap_->mark_compact_collector()->marking_deque()->IsEmpty()) { |
| 960 if (completion == FORCE_COMPLETION || | 962 if (completion == FORCE_COMPLETION || |
| 961 IsIdleMarkingDelayCounterLimitReached()) { | 963 IsIdleMarkingDelayCounterLimitReached()) { |
| 962 if (FLAG_overapproximate_weak_closure && | 964 if (FLAG_overapproximate_weak_closure && |
| 963 !weak_closure_was_overapproximated_) { | 965 !weak_closure_was_overapproximated_) { |
| 964 OverApproximateWeakClosure(action); | 966 OverApproximateWeakClosure(action); |
| 965 } else { | 967 } else { |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1014 void IncrementalMarking::IncrementIdleMarkingDelayCounter() { | 1016 void IncrementalMarking::IncrementIdleMarkingDelayCounter() { |
| 1015 idle_marking_delay_counter_++; | 1017 idle_marking_delay_counter_++; |
| 1016 } | 1018 } |
| 1017 | 1019 |
| 1018 | 1020 |
| 1019 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 1021 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
| 1020 idle_marking_delay_counter_ = 0; | 1022 idle_marking_delay_counter_ = 0; |
| 1021 } | 1023 } |
| 1022 } | 1024 } |
| 1023 } // namespace v8::internal | 1025 } // namespace v8::internal |
| OLD | NEW |