OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/heap/incremental-marking.h" | 7 #include "src/heap/incremental-marking.h" |
8 | 8 |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
11 #include "src/conversions.h" | 11 #include "src/conversions.h" |
12 #include "src/heap/objects-visiting.h" | 12 #include "src/heap/objects-visiting.h" |
13 #include "src/heap/objects-visiting-inl.h" | 13 #include "src/heap/objects-visiting-inl.h" |
14 | 14 |
15 namespace v8 { | 15 namespace v8 { |
16 namespace internal { | 16 namespace internal { |
17 | 17 |
18 | 18 |
19 IncrementalMarking::IncrementalMarking(Heap* heap) | 19 IncrementalMarking::IncrementalMarking(Heap* heap) |
20 : heap_(heap), | 20 : heap_(heap), |
21 state_(STOPPED), | 21 state_(STOPPED), |
22 steps_count_(0), | 22 steps_count_(0), |
23 old_generation_space_available_at_start_of_incremental_(0), | 23 old_generation_space_available_at_start_of_incremental_(0), |
24 old_generation_space_used_at_start_of_incremental_(0), | 24 old_generation_space_used_at_start_of_incremental_(0), |
25 should_hurry_(false), | 25 should_hurry_(false), |
26 marking_speed_(0), | 26 marking_speed_(0), |
27 allocated_(0), | 27 allocated_(0), |
28 idle_marking_delay_counter_(0), | 28 idle_marking_delay_counter_(0), |
29 no_marking_scope_depth_(0), | 29 no_marking_scope_depth_(0), |
30 unscanned_bytes_of_large_object_(0) {} | 30 unscanned_bytes_of_large_object_(0), |
| 31 was_activated_(false) {} |
31 | 32 |
32 | 33 |
33 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, | 34 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, |
34 Object* value) { | 35 Object* value) { |
35 if (BaseRecordWrite(obj, slot, value) && slot != NULL) { | 36 if (BaseRecordWrite(obj, slot, value) && slot != NULL) { |
36 MarkBit obj_bit = Marking::MarkBitFrom(obj); | 37 MarkBit obj_bit = Marking::MarkBitFrom(obj); |
37 if (Marking::IsBlack(obj_bit)) { | 38 if (Marking::IsBlack(obj_bit)) { |
38 // Object is not going to be rescanned we need to record the slot. | 39 // Object is not going to be rescanned we need to record the slot. |
39 heap_->mark_compact_collector()->RecordSlot(HeapObject::RawField(obj, 0), | 40 heap_->mark_compact_collector()->RecordSlot(HeapObject::RawField(obj, 0), |
40 slot, value); | 41 slot, value); |
(...skipping 375 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
416 lop = lop->next_page(); | 417 lop = lop->next_page(); |
417 } | 418 } |
418 } | 419 } |
419 | 420 |
420 | 421 |
421 bool IncrementalMarking::ShouldActivate() { | 422 bool IncrementalMarking::ShouldActivate() { |
422 return WorthActivating() && heap_->NextGCIsLikelyToBeFull(); | 423 return WorthActivating() && heap_->NextGCIsLikelyToBeFull(); |
423 } | 424 } |
424 | 425 |
425 | 426 |
| 427 bool IncrementalMarking::WasActivated() { return was_activated_; } |
| 428 |
| 429 |
426 bool IncrementalMarking::WorthActivating() { | 430 bool IncrementalMarking::WorthActivating() { |
427 #ifndef DEBUG | 431 #ifndef DEBUG |
428 static const intptr_t kActivationThreshold = 8 * MB; | 432 static const intptr_t kActivationThreshold = 8 * MB; |
429 #else | 433 #else |
430 // TODO(gc) consider setting this to some low level so that some | 434 // TODO(gc) consider setting this to some low level so that some |
431 // debug tests run with incremental marking and some without. | 435 // debug tests run with incremental marking and some without. |
432 static const intptr_t kActivationThreshold = 0; | 436 static const intptr_t kActivationThreshold = 0; |
433 #endif | 437 #endif |
434 // Only start incremental marking in a safe state: 1) when incremental | 438 // Only start incremental marking in a safe state: 1) when incremental |
435 // marking is turned on, 2) when we are currently not in a GC, and | 439 // marking is turned on, 2) when we are currently not in a GC, and |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
483 PrintF("[IncrementalMarking] Start\n"); | 487 PrintF("[IncrementalMarking] Start\n"); |
484 } | 488 } |
485 DCHECK(FLAG_incremental_marking); | 489 DCHECK(FLAG_incremental_marking); |
486 DCHECK(FLAG_incremental_marking_steps); | 490 DCHECK(FLAG_incremental_marking_steps); |
487 DCHECK(state_ == STOPPED); | 491 DCHECK(state_ == STOPPED); |
488 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC); | 492 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC); |
489 DCHECK(!heap_->isolate()->serializer_enabled()); | 493 DCHECK(!heap_->isolate()->serializer_enabled()); |
490 | 494 |
491 ResetStepCounters(); | 495 ResetStepCounters(); |
492 | 496 |
| 497 was_activated_ = true; |
| 498 |
493 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { | 499 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { |
494 StartMarking(flag); | 500 StartMarking(flag); |
495 } else { | 501 } else { |
496 if (FLAG_trace_incremental_marking) { | 502 if (FLAG_trace_incremental_marking) { |
497 PrintF("[IncrementalMarking] Start sweeping.\n"); | 503 PrintF("[IncrementalMarking] Start sweeping.\n"); |
498 } | 504 } |
499 state_ = SWEEPING; | 505 state_ = SWEEPING; |
500 } | 506 } |
501 | 507 |
502 heap_->new_space()->LowerInlineAllocationLimit(kAllocatedThreshold); | 508 heap_->new_space()->LowerInlineAllocationLimit(kAllocatedThreshold); |
(...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
778 set_should_hurry(true); | 784 set_should_hurry(true); |
779 if (FLAG_trace_incremental_marking) { | 785 if (FLAG_trace_incremental_marking) { |
780 PrintF("[IncrementalMarking] Complete (normal).\n"); | 786 PrintF("[IncrementalMarking] Complete (normal).\n"); |
781 } | 787 } |
782 if (action == GC_VIA_STACK_GUARD) { | 788 if (action == GC_VIA_STACK_GUARD) { |
783 heap_->isolate()->stack_guard()->RequestGC(); | 789 heap_->isolate()->stack_guard()->RequestGC(); |
784 } | 790 } |
785 } | 791 } |
786 | 792 |
787 | 793 |
| 794 void IncrementalMarking::Epilogue() { was_activated_ = false; } |
| 795 |
| 796 |
788 void IncrementalMarking::OldSpaceStep(intptr_t allocated) { | 797 void IncrementalMarking::OldSpaceStep(intptr_t allocated) { |
789 if (IsStopped() && ShouldActivate()) { | 798 if (IsStopped() && ShouldActivate()) { |
790 // TODO(hpayer): Let's play safe for now, but compaction should be | 799 // TODO(hpayer): Let's play safe for now, but compaction should be |
791 // in principle possible. | 800 // in principle possible. |
792 Start(PREVENT_COMPACTION); | 801 Start(PREVENT_COMPACTION); |
793 } else { | 802 } else { |
794 Step(allocated * kFastMarking / kInitialMarkingSpeed, GC_VIA_STACK_GUARD); | 803 Step(allocated * kFastMarking / kInitialMarkingSpeed, GC_VIA_STACK_GUARD); |
795 } | 804 } |
796 } | 805 } |
797 | 806 |
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
975 void IncrementalMarking::IncrementIdleMarkingDelayCounter() { | 984 void IncrementalMarking::IncrementIdleMarkingDelayCounter() { |
976 idle_marking_delay_counter_++; | 985 idle_marking_delay_counter_++; |
977 } | 986 } |
978 | 987 |
979 | 988 |
980 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 989 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
981 idle_marking_delay_counter_ = 0; | 990 idle_marking_delay_counter_ = 0; |
982 } | 991 } |
983 } | 992 } |
984 } // namespace v8::internal | 993 } // namespace v8::internal |
OLD | NEW |