| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/heap/incremental-marking.h" | 7 #include "src/heap/incremental-marking.h" |
| 8 | 8 |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
| (...skipping 450 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 461 Object* e = stubs->ValueAt(i); | 461 Object* e = stubs->ValueAt(i); |
| 462 if (e->IsCode()) { | 462 if (e->IsCode()) { |
| 463 RecordWriteStub::Patch(Code::cast(e), mode); | 463 RecordWriteStub::Patch(Code::cast(e), mode); |
| 464 } | 464 } |
| 465 } | 465 } |
| 466 } | 466 } |
| 467 } | 467 } |
| 468 } | 468 } |
| 469 | 469 |
| 470 | 470 |
| 471 void IncrementalMarking::Start() { | 471 void IncrementalMarking::Start(CompactionFlag flag) { |
| 472 if (FLAG_trace_incremental_marking) { | 472 if (FLAG_trace_incremental_marking) { |
| 473 PrintF("[IncrementalMarking] Start\n"); | 473 PrintF("[IncrementalMarking] Start\n"); |
| 474 } | 474 } |
| 475 DCHECK(FLAG_incremental_marking); | 475 DCHECK(FLAG_incremental_marking); |
| 476 DCHECK(FLAG_incremental_marking_steps); | 476 DCHECK(FLAG_incremental_marking_steps); |
| 477 DCHECK(state_ == STOPPED); | 477 DCHECK(state_ == STOPPED); |
| 478 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC); | 478 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC); |
| 479 DCHECK(!heap_->isolate()->serializer_enabled()); | 479 DCHECK(!heap_->isolate()->serializer_enabled()); |
| 480 | 480 |
| 481 ResetStepCounters(); | 481 ResetStepCounters(); |
| 482 | 482 |
| 483 was_activated_ = true; | 483 was_activated_ = true; |
| 484 | 484 |
| 485 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { | 485 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { |
| 486 StartMarking(); | 486 StartMarking(flag); |
| 487 } else { | 487 } else { |
| 488 if (FLAG_trace_incremental_marking) { | 488 if (FLAG_trace_incremental_marking) { |
| 489 PrintF("[IncrementalMarking] Start sweeping.\n"); | 489 PrintF("[IncrementalMarking] Start sweeping.\n"); |
| 490 } | 490 } |
| 491 state_ = SWEEPING; | 491 state_ = SWEEPING; |
| 492 } | 492 } |
| 493 | 493 |
| 494 heap_->new_space()->LowerInlineAllocationLimit(kAllocatedThreshold); | 494 heap_->new_space()->LowerInlineAllocationLimit(kAllocatedThreshold); |
| 495 } | 495 } |
| 496 | 496 |
| 497 | 497 |
| 498 void IncrementalMarking::StartMarking() { | 498 void IncrementalMarking::StartMarking(CompactionFlag flag) { |
| 499 if (FLAG_trace_incremental_marking) { | 499 if (FLAG_trace_incremental_marking) { |
| 500 PrintF("[IncrementalMarking] Start marking\n"); | 500 PrintF("[IncrementalMarking] Start marking\n"); |
| 501 } | 501 } |
| 502 | 502 |
| 503 is_compacting_ = !FLAG_never_compact && | 503 is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) && |
| 504 heap_->mark_compact_collector()->StartCompaction( | 504 heap_->mark_compact_collector()->StartCompaction( |
| 505 MarkCompactCollector::INCREMENTAL_COMPACTION); | 505 MarkCompactCollector::INCREMENTAL_COMPACTION); |
| 506 | 506 |
| 507 state_ = MARKING; | 507 state_ = MARKING; |
| 508 | 508 |
| 509 RecordWriteStub::Mode mode = is_compacting_ | 509 RecordWriteStub::Mode mode = is_compacting_ |
| 510 ? RecordWriteStub::INCREMENTAL_COMPACTION | 510 ? RecordWriteStub::INCREMENTAL_COMPACTION |
| 511 : RecordWriteStub::INCREMENTAL; | 511 : RecordWriteStub::INCREMENTAL; |
| 512 | 512 |
| 513 PatchIncrementalMarkingRecordWriteStubs(heap_, mode); | 513 PatchIncrementalMarkingRecordWriteStubs(heap_, mode); |
| (...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 827 | 827 |
| 828 void IncrementalMarking::Epilogue() { | 828 void IncrementalMarking::Epilogue() { |
| 829 was_activated_ = false; | 829 was_activated_ = false; |
| 830 weak_closure_was_overapproximated_ = false; | 830 weak_closure_was_overapproximated_ = false; |
| 831 weak_closure_approximation_rounds_ = 0; | 831 weak_closure_approximation_rounds_ = 0; |
| 832 } | 832 } |
| 833 | 833 |
| 834 | 834 |
| 835 void IncrementalMarking::OldSpaceStep(intptr_t allocated) { | 835 void IncrementalMarking::OldSpaceStep(intptr_t allocated) { |
| 836 if (IsStopped() && ShouldActivate()) { | 836 if (IsStopped() && ShouldActivate()) { |
| 837 Start(); | 837 // TODO(hpayer): Let's play safe for now, but compaction should be |
| 838 // in principle possible. |
| 839 Start(PREVENT_COMPACTION); |
| 838 } else { | 840 } else { |
| 839 Step(allocated * kFastMarking / kInitialMarkingSpeed, GC_VIA_STACK_GUARD); | 841 Step(allocated * kFastMarking / kInitialMarkingSpeed, GC_VIA_STACK_GUARD); |
| 840 } | 842 } |
| 841 } | 843 } |
| 842 | 844 |
| 843 | 845 |
| 844 void IncrementalMarking::SpeedUp() { | 846 void IncrementalMarking::SpeedUp() { |
| 845 bool speed_up = false; | 847 bool speed_up = false; |
| 846 | 848 |
| 847 if ((steps_count_ % kMarkingSpeedAccellerationInterval) == 0) { | 849 if ((steps_count_ % kMarkingSpeedAccellerationInterval) == 0) { |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 956 bytes_scanned_ += bytes_to_process; | 958 bytes_scanned_ += bytes_to_process; |
| 957 | 959 |
| 958 if (state_ == SWEEPING) { | 960 if (state_ == SWEEPING) { |
| 959 if (heap_->mark_compact_collector()->sweeping_in_progress() && | 961 if (heap_->mark_compact_collector()->sweeping_in_progress() && |
| 960 (heap_->mark_compact_collector()->IsSweepingCompleted() || | 962 (heap_->mark_compact_collector()->IsSweepingCompleted() || |
| 961 !heap_->concurrent_sweeping_enabled())) { | 963 !heap_->concurrent_sweeping_enabled())) { |
| 962 heap_->mark_compact_collector()->EnsureSweepingCompleted(); | 964 heap_->mark_compact_collector()->EnsureSweepingCompleted(); |
| 963 } | 965 } |
| 964 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { | 966 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { |
| 965 bytes_scanned_ = 0; | 967 bytes_scanned_ = 0; |
| 966 StartMarking(); | 968 StartMarking(PREVENT_COMPACTION); |
| 967 } | 969 } |
| 968 } else if (state_ == MARKING) { | 970 } else if (state_ == MARKING) { |
| 969 bytes_processed = ProcessMarkingDeque(bytes_to_process); | 971 bytes_processed = ProcessMarkingDeque(bytes_to_process); |
| 970 if (heap_->mark_compact_collector()->marking_deque()->IsEmpty()) { | 972 if (heap_->mark_compact_collector()->marking_deque()->IsEmpty()) { |
| 971 if (completion == FORCE_COMPLETION || | 973 if (completion == FORCE_COMPLETION || |
| 972 IsIdleMarkingDelayCounterLimitReached()) { | 974 IsIdleMarkingDelayCounterLimitReached()) { |
| 973 if (FLAG_overapproximate_weak_closure && | 975 if (FLAG_overapproximate_weak_closure && |
| 974 !weak_closure_was_overapproximated_) { | 976 !weak_closure_was_overapproximated_) { |
| 975 OverApproximateWeakClosure(action); | 977 OverApproximateWeakClosure(action); |
| 976 } else { | 978 } else { |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1025 void IncrementalMarking::IncrementIdleMarkingDelayCounter() { | 1027 void IncrementalMarking::IncrementIdleMarkingDelayCounter() { |
| 1026 idle_marking_delay_counter_++; | 1028 idle_marking_delay_counter_++; |
| 1027 } | 1029 } |
| 1028 | 1030 |
| 1029 | 1031 |
| 1030 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 1032 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
| 1031 idle_marking_delay_counter_ = 0; | 1033 idle_marking_delay_counter_ = 0; |
| 1032 } | 1034 } |
| 1033 } | 1035 } |
| 1034 } // namespace v8::internal | 1036 } // namespace v8::internal |
| OLD | NEW |