OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/heap/incremental-marking.h" | 7 #include "src/heap/incremental-marking.h" |
8 | 8 |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
11 #include "src/conversions.h" | 11 #include "src/conversions.h" |
12 #include "src/heap/objects-visiting.h" | 12 #include "src/heap/objects-visiting.h" |
13 #include "src/heap/objects-visiting-inl.h" | 13 #include "src/heap/objects-visiting-inl.h" |
14 | 14 |
15 namespace v8 { | 15 namespace v8 { |
16 namespace internal { | 16 namespace internal { |
17 | 17 |
18 | 18 |
19 IncrementalMarking::IncrementalMarking(Heap* heap) | 19 IncrementalMarking::IncrementalMarking(Heap* heap) |
20 : heap_(heap), | 20 : heap_(heap), |
21 state_(STOPPED), | 21 state_(STOPPED), |
22 marking_deque_memory_(NULL), | |
23 marking_deque_memory_committed_(false), | |
24 steps_count_(0), | 22 steps_count_(0), |
25 old_generation_space_available_at_start_of_incremental_(0), | 23 old_generation_space_available_at_start_of_incremental_(0), |
26 old_generation_space_used_at_start_of_incremental_(0), | 24 old_generation_space_used_at_start_of_incremental_(0), |
27 should_hurry_(false), | 25 should_hurry_(false), |
28 marking_speed_(0), | 26 marking_speed_(0), |
29 allocated_(0), | 27 allocated_(0), |
30 idle_marking_delay_counter_(0), | 28 idle_marking_delay_counter_(0), |
31 no_marking_scope_depth_(0), | 29 no_marking_scope_depth_(0), |
32 unscanned_bytes_of_large_object_(0) {} | 30 unscanned_bytes_of_large_object_(0) {} |
33 | 31 |
34 | 32 |
35 void IncrementalMarking::TearDown() { delete marking_deque_memory_; } | |
36 | |
37 | |
38 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, | 33 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, |
39 Object* value) { | 34 Object* value) { |
40 if (BaseRecordWrite(obj, slot, value) && slot != NULL) { | 35 if (BaseRecordWrite(obj, slot, value) && slot != NULL) { |
41 MarkBit obj_bit = Marking::MarkBitFrom(obj); | 36 MarkBit obj_bit = Marking::MarkBitFrom(obj); |
42 if (Marking::IsBlack(obj_bit)) { | 37 if (Marking::IsBlack(obj_bit)) { |
43 // Object is not going to be rescanned we need to record the slot. | 38 // Object is not going to be rescanned we need to record the slot. |
44 heap_->mark_compact_collector()->RecordSlot(HeapObject::RawField(obj, 0), | 39 heap_->mark_compact_collector()->RecordSlot(HeapObject::RawField(obj, 0), |
45 slot, value); | 40 slot, value); |
46 } | 41 } |
47 } | 42 } |
(...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
188 int end_offset = | 183 int end_offset = |
189 Min(object_size, start_offset + kProgressBarScanningChunk); | 184 Min(object_size, start_offset + kProgressBarScanningChunk); |
190 int already_scanned_offset = start_offset; | 185 int already_scanned_offset = start_offset; |
191 bool scan_until_end = false; | 186 bool scan_until_end = false; |
192 do { | 187 do { |
193 VisitPointersWithAnchor(heap, HeapObject::RawField(object, 0), | 188 VisitPointersWithAnchor(heap, HeapObject::RawField(object, 0), |
194 HeapObject::RawField(object, start_offset), | 189 HeapObject::RawField(object, start_offset), |
195 HeapObject::RawField(object, end_offset)); | 190 HeapObject::RawField(object, end_offset)); |
196 start_offset = end_offset; | 191 start_offset = end_offset; |
197 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); | 192 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); |
198 scan_until_end = heap->incremental_marking()->marking_deque()->IsFull(); | 193 scan_until_end = |
| 194 heap->mark_compact_collector()->marking_deque()->IsFull(); |
199 } while (scan_until_end && start_offset < object_size); | 195 } while (scan_until_end && start_offset < object_size); |
200 chunk->set_progress_bar(start_offset); | 196 chunk->set_progress_bar(start_offset); |
201 if (start_offset < object_size) { | 197 if (start_offset < object_size) { |
202 heap->incremental_marking()->marking_deque()->UnshiftGrey(object); | 198 heap->mark_compact_collector()->marking_deque()->UnshiftGrey(object); |
203 heap->incremental_marking()->NotifyIncompleteScanOfObject( | 199 heap->incremental_marking()->NotifyIncompleteScanOfObject( |
204 object_size - (start_offset - already_scanned_offset)); | 200 object_size - (start_offset - already_scanned_offset)); |
205 } | 201 } |
206 } else { | 202 } else { |
207 FixedArrayVisitor::Visit(map, object); | 203 FixedArrayVisitor::Visit(map, object); |
208 } | 204 } |
209 } | 205 } |
210 | 206 |
211 static void VisitNativeContextIncremental(Map* map, HeapObject* object) { | 207 static void VisitNativeContextIncremental(Map* map, HeapObject* object) { |
212 Context* context = Context::cast(object); | 208 Context* context = Context::cast(object); |
(...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
475 Object* e = stubs->ValueAt(i); | 471 Object* e = stubs->ValueAt(i); |
476 if (e->IsCode()) { | 472 if (e->IsCode()) { |
477 RecordWriteStub::Patch(Code::cast(e), mode); | 473 RecordWriteStub::Patch(Code::cast(e), mode); |
478 } | 474 } |
479 } | 475 } |
480 } | 476 } |
481 } | 477 } |
482 } | 478 } |
483 | 479 |
484 | 480 |
485 void IncrementalMarking::EnsureMarkingDequeIsCommitted() { | |
486 if (marking_deque_memory_ == NULL) { | |
487 marking_deque_memory_ = new base::VirtualMemory(4 * MB); | |
488 } | |
489 if (!marking_deque_memory_committed_) { | |
490 bool success = marking_deque_memory_->Commit( | |
491 reinterpret_cast<Address>(marking_deque_memory_->address()), | |
492 marking_deque_memory_->size(), | |
493 false); // Not executable. | |
494 CHECK(success); | |
495 marking_deque_memory_committed_ = true; | |
496 } | |
497 } | |
498 | |
499 | |
500 void IncrementalMarking::UncommitMarkingDeque() { | |
501 if (state_ == STOPPED && marking_deque_memory_committed_) { | |
502 bool success = marking_deque_memory_->Uncommit( | |
503 reinterpret_cast<Address>(marking_deque_memory_->address()), | |
504 marking_deque_memory_->size()); | |
505 CHECK(success); | |
506 marking_deque_memory_committed_ = false; | |
507 } | |
508 } | |
509 | |
510 | |
511 void IncrementalMarking::Start(CompactionFlag flag) { | 481 void IncrementalMarking::Start(CompactionFlag flag) { |
512 if (FLAG_trace_incremental_marking) { | 482 if (FLAG_trace_incremental_marking) { |
513 PrintF("[IncrementalMarking] Start\n"); | 483 PrintF("[IncrementalMarking] Start\n"); |
514 } | 484 } |
515 DCHECK(FLAG_incremental_marking); | 485 DCHECK(FLAG_incremental_marking); |
516 DCHECK(FLAG_incremental_marking_steps); | 486 DCHECK(FLAG_incremental_marking_steps); |
517 DCHECK(state_ == STOPPED); | 487 DCHECK(state_ == STOPPED); |
518 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC); | 488 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC); |
519 DCHECK(!heap_->isolate()->serializer_enabled()); | 489 DCHECK(!heap_->isolate()->serializer_enabled()); |
520 | 490 |
(...skipping 22 matching lines...) Expand all Loading... |
543 MarkCompactCollector::INCREMENTAL_COMPACTION); | 513 MarkCompactCollector::INCREMENTAL_COMPACTION); |
544 | 514 |
545 state_ = MARKING; | 515 state_ = MARKING; |
546 | 516 |
547 RecordWriteStub::Mode mode = is_compacting_ | 517 RecordWriteStub::Mode mode = is_compacting_ |
548 ? RecordWriteStub::INCREMENTAL_COMPACTION | 518 ? RecordWriteStub::INCREMENTAL_COMPACTION |
549 : RecordWriteStub::INCREMENTAL; | 519 : RecordWriteStub::INCREMENTAL; |
550 | 520 |
551 PatchIncrementalMarkingRecordWriteStubs(heap_, mode); | 521 PatchIncrementalMarkingRecordWriteStubs(heap_, mode); |
552 | 522 |
553 EnsureMarkingDequeIsCommitted(); | 523 heap_->mark_compact_collector()->EnsureMarkingDequeIsCommittedAndInitialize(); |
554 | |
555 // Initialize marking stack. | |
556 Address addr = static_cast<Address>(marking_deque_memory_->address()); | |
557 size_t size = marking_deque_memory_->size(); | |
558 if (FLAG_force_marking_deque_overflows) size = 64 * kPointerSize; | |
559 marking_deque_.Initialize(addr, addr + size); | |
560 | 524 |
561 ActivateIncrementalWriteBarrier(); | 525 ActivateIncrementalWriteBarrier(); |
562 | 526 |
563 // Marking bits are cleared by the sweeper. | 527 // Marking bits are cleared by the sweeper. |
564 #ifdef VERIFY_HEAP | 528 #ifdef VERIFY_HEAP |
565 if (FLAG_verify_heap) { | 529 if (FLAG_verify_heap) { |
566 heap_->mark_compact_collector()->VerifyMarkbitsAreClean(); | 530 heap_->mark_compact_collector()->VerifyMarkbitsAreClean(); |
567 } | 531 } |
568 #endif | 532 #endif |
569 | 533 |
(...skipping 25 matching lines...) Expand all Loading... |
595 heap_->new_space()->FromSpaceEnd()); | 559 heap_->new_space()->FromSpaceEnd()); |
596 while (it.has_next()) { | 560 while (it.has_next()) { |
597 Bitmap::Clear(it.next()); | 561 Bitmap::Clear(it.next()); |
598 } | 562 } |
599 } | 563 } |
600 | 564 |
601 | 565 |
602 void IncrementalMarking::UpdateMarkingDequeAfterScavenge() { | 566 void IncrementalMarking::UpdateMarkingDequeAfterScavenge() { |
603 if (!IsMarking()) return; | 567 if (!IsMarking()) return; |
604 | 568 |
605 int current = marking_deque_.bottom(); | 569 MarkingDeque* marking_deque = |
606 int mask = marking_deque_.mask(); | 570 heap_->mark_compact_collector()->marking_deque(); |
607 int limit = marking_deque_.top(); | 571 int current = marking_deque->bottom(); |
608 HeapObject** array = marking_deque_.array(); | 572 int mask = marking_deque->mask(); |
| 573 int limit = marking_deque->top(); |
| 574 HeapObject** array = marking_deque->array(); |
609 int new_top = current; | 575 int new_top = current; |
610 | 576 |
611 Map* filler_map = heap_->one_pointer_filler_map(); | 577 Map* filler_map = heap_->one_pointer_filler_map(); |
612 | 578 |
613 while (current != limit) { | 579 while (current != limit) { |
614 HeapObject* obj = array[current]; | 580 HeapObject* obj = array[current]; |
615 DCHECK(obj->IsHeapObject()); | 581 DCHECK(obj->IsHeapObject()); |
616 current = ((current + 1) & mask); | 582 current = ((current + 1) & mask); |
617 if (heap_->InNewSpace(obj)) { | 583 if (heap_->InNewSpace(obj)) { |
618 MapWord map_word = obj->map_word(); | 584 MapWord map_word = obj->map_word(); |
619 if (map_word.IsForwardingAddress()) { | 585 if (map_word.IsForwardingAddress()) { |
620 HeapObject* dest = map_word.ToForwardingAddress(); | 586 HeapObject* dest = map_word.ToForwardingAddress(); |
621 array[new_top] = dest; | 587 array[new_top] = dest; |
622 new_top = ((new_top + 1) & mask); | 588 new_top = ((new_top + 1) & mask); |
623 DCHECK(new_top != marking_deque_.bottom()); | 589 DCHECK(new_top != marking_deque->bottom()); |
624 #ifdef DEBUG | 590 #ifdef DEBUG |
625 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 591 MarkBit mark_bit = Marking::MarkBitFrom(obj); |
626 DCHECK(Marking::IsGrey(mark_bit) || | 592 DCHECK(Marking::IsGrey(mark_bit) || |
627 (obj->IsFiller() && Marking::IsWhite(mark_bit))); | 593 (obj->IsFiller() && Marking::IsWhite(mark_bit))); |
628 #endif | 594 #endif |
629 } | 595 } |
630 } else if (obj->map() != filler_map) { | 596 } else if (obj->map() != filler_map) { |
631 // Skip one word filler objects that appear on the | 597 // Skip one word filler objects that appear on the |
632 // stack when we perform in place array shift. | 598 // stack when we perform in place array shift. |
633 array[new_top] = obj; | 599 array[new_top] = obj; |
634 new_top = ((new_top + 1) & mask); | 600 new_top = ((new_top + 1) & mask); |
635 DCHECK(new_top != marking_deque_.bottom()); | 601 DCHECK(new_top != marking_deque->bottom()); |
636 #ifdef DEBUG | 602 #ifdef DEBUG |
637 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 603 MarkBit mark_bit = Marking::MarkBitFrom(obj); |
638 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 604 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
639 DCHECK(Marking::IsGrey(mark_bit) || | 605 DCHECK(Marking::IsGrey(mark_bit) || |
640 (obj->IsFiller() && Marking::IsWhite(mark_bit)) || | 606 (obj->IsFiller() && Marking::IsWhite(mark_bit)) || |
641 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | 607 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
642 Marking::IsBlack(mark_bit))); | 608 Marking::IsBlack(mark_bit))); |
643 #endif | 609 #endif |
644 } | 610 } |
645 } | 611 } |
646 marking_deque_.set_top(new_top); | 612 marking_deque->set_top(new_top); |
647 } | 613 } |
648 | 614 |
649 | 615 |
650 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { | 616 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { |
651 MarkBit map_mark_bit = Marking::MarkBitFrom(map); | 617 MarkBit map_mark_bit = Marking::MarkBitFrom(map); |
652 if (Marking::IsWhite(map_mark_bit)) { | 618 if (Marking::IsWhite(map_mark_bit)) { |
653 WhiteToGreyAndPush(map, map_mark_bit); | 619 WhiteToGreyAndPush(map, map_mark_bit); |
654 } | 620 } |
655 | 621 |
656 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); | 622 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); |
657 | 623 |
658 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 624 MarkBit mark_bit = Marking::MarkBitFrom(obj); |
659 #if ENABLE_SLOW_DCHECKS | 625 #if ENABLE_SLOW_DCHECKS |
660 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 626 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
661 SLOW_DCHECK(Marking::IsGrey(mark_bit) || | 627 SLOW_DCHECK(Marking::IsGrey(mark_bit) || |
662 (obj->IsFiller() && Marking::IsWhite(mark_bit)) || | 628 (obj->IsFiller() && Marking::IsWhite(mark_bit)) || |
663 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | 629 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
664 Marking::IsBlack(mark_bit))); | 630 Marking::IsBlack(mark_bit))); |
665 #endif | 631 #endif |
666 MarkBlackOrKeepBlack(obj, mark_bit, size); | 632 MarkBlackOrKeepBlack(obj, mark_bit, size); |
667 } | 633 } |
668 | 634 |
669 | 635 |
670 intptr_t IncrementalMarking::ProcessMarkingDeque(intptr_t bytes_to_process) { | 636 intptr_t IncrementalMarking::ProcessMarkingDeque(intptr_t bytes_to_process) { |
671 intptr_t bytes_processed = 0; | 637 intptr_t bytes_processed = 0; |
672 Map* filler_map = heap_->one_pointer_filler_map(); | 638 Map* filler_map = heap_->one_pointer_filler_map(); |
673 while (!marking_deque_.IsEmpty() && bytes_processed < bytes_to_process) { | 639 MarkingDeque* marking_deque = |
674 HeapObject* obj = marking_deque_.Pop(); | 640 heap_->mark_compact_collector()->marking_deque(); |
| 641 while (!marking_deque->IsEmpty() && bytes_processed < bytes_to_process) { |
| 642 HeapObject* obj = marking_deque->Pop(); |
675 | 643 |
676 // Explicitly skip one word fillers. Incremental markbit patterns are | 644 // Explicitly skip one word fillers. Incremental markbit patterns are |
677 // correct only for objects that occupy at least two words. | 645 // correct only for objects that occupy at least two words. |
678 Map* map = obj->map(); | 646 Map* map = obj->map(); |
679 if (map == filler_map) continue; | 647 if (map == filler_map) continue; |
680 | 648 |
681 int size = obj->SizeFromMap(map); | 649 int size = obj->SizeFromMap(map); |
682 unscanned_bytes_of_large_object_ = 0; | 650 unscanned_bytes_of_large_object_ = 0; |
683 VisitObject(map, obj, size); | 651 VisitObject(map, obj, size); |
684 int delta = (size - unscanned_bytes_of_large_object_); | 652 int delta = (size - unscanned_bytes_of_large_object_); |
685 // TODO(jochen): remove after http://crbug.com/381820 is resolved. | 653 // TODO(jochen): remove after http://crbug.com/381820 is resolved. |
686 CHECK_LT(0, delta); | 654 CHECK_LT(0, delta); |
687 bytes_processed += delta; | 655 bytes_processed += delta; |
688 } | 656 } |
689 return bytes_processed; | 657 return bytes_processed; |
690 } | 658 } |
691 | 659 |
692 | 660 |
693 void IncrementalMarking::ProcessMarkingDeque() { | 661 void IncrementalMarking::ProcessMarkingDeque() { |
694 Map* filler_map = heap_->one_pointer_filler_map(); | 662 Map* filler_map = heap_->one_pointer_filler_map(); |
695 while (!marking_deque_.IsEmpty()) { | 663 MarkingDeque* marking_deque = |
696 HeapObject* obj = marking_deque_.Pop(); | 664 heap_->mark_compact_collector()->marking_deque(); |
| 665 while (!marking_deque->IsEmpty()) { |
| 666 HeapObject* obj = marking_deque->Pop(); |
697 | 667 |
698 // Explicitly skip one word fillers. Incremental markbit patterns are | 668 // Explicitly skip one word fillers. Incremental markbit patterns are |
699 // correct only for objects that occupy at least two words. | 669 // correct only for objects that occupy at least two words. |
700 Map* map = obj->map(); | 670 Map* map = obj->map(); |
701 if (map == filler_map) continue; | 671 if (map == filler_map) continue; |
702 | 672 |
703 VisitObject(map, obj, obj->SizeFromMap(map)); | 673 VisitObject(map, obj, obj->SizeFromMap(map)); |
704 } | 674 } |
705 } | 675 } |
706 | 676 |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
786 void IncrementalMarking::Finalize() { | 756 void IncrementalMarking::Finalize() { |
787 Hurry(); | 757 Hurry(); |
788 state_ = STOPPED; | 758 state_ = STOPPED; |
789 is_compacting_ = false; | 759 is_compacting_ = false; |
790 heap_->new_space()->LowerInlineAllocationLimit(0); | 760 heap_->new_space()->LowerInlineAllocationLimit(0); |
791 IncrementalMarking::set_should_hurry(false); | 761 IncrementalMarking::set_should_hurry(false); |
792 ResetStepCounters(); | 762 ResetStepCounters(); |
793 PatchIncrementalMarkingRecordWriteStubs(heap_, | 763 PatchIncrementalMarkingRecordWriteStubs(heap_, |
794 RecordWriteStub::STORE_BUFFER_ONLY); | 764 RecordWriteStub::STORE_BUFFER_ONLY); |
795 DeactivateIncrementalWriteBarrier(); | 765 DeactivateIncrementalWriteBarrier(); |
796 DCHECK(marking_deque_.IsEmpty()); | 766 DCHECK(heap_->mark_compact_collector()->marking_deque()->IsEmpty()); |
797 heap_->isolate()->stack_guard()->ClearGC(); | 767 heap_->isolate()->stack_guard()->ClearGC(); |
798 } | 768 } |
799 | 769 |
800 | 770 |
801 void IncrementalMarking::MarkingComplete(CompletionAction action) { | 771 void IncrementalMarking::MarkingComplete(CompletionAction action) { |
802 state_ = COMPLETE; | 772 state_ = COMPLETE; |
803 // We will set the stack guard to request a GC now. This will mean the rest | 773 // We will set the stack guard to request a GC now. This will mean the rest |
804 // of the GC gets performed as soon as possible (we can't do a GC here in a | 774 // of the GC gets performed as soon as possible (we can't do a GC here in a |
805 // record-write context). If a few things get allocated between now and then | 775 // record-write context). If a few things get allocated between now and then |
806 // that shouldn't make us do a scavenge and keep being incremental, so we set | 776 // that shouldn't make us do a scavenge and keep being incremental, so we set |
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
939 (heap_->mark_compact_collector()->IsSweepingCompleted() || | 909 (heap_->mark_compact_collector()->IsSweepingCompleted() || |
940 !FLAG_concurrent_sweeping)) { | 910 !FLAG_concurrent_sweeping)) { |
941 heap_->mark_compact_collector()->EnsureSweepingCompleted(); | 911 heap_->mark_compact_collector()->EnsureSweepingCompleted(); |
942 } | 912 } |
943 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { | 913 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { |
944 bytes_scanned_ = 0; | 914 bytes_scanned_ = 0; |
945 StartMarking(PREVENT_COMPACTION); | 915 StartMarking(PREVENT_COMPACTION); |
946 } | 916 } |
947 } else if (state_ == MARKING) { | 917 } else if (state_ == MARKING) { |
948 bytes_processed = ProcessMarkingDeque(bytes_to_process); | 918 bytes_processed = ProcessMarkingDeque(bytes_to_process); |
949 if (marking_deque_.IsEmpty()) { | 919 if (heap_->mark_compact_collector()->marking_deque()->IsEmpty()) { |
950 if (completion == FORCE_COMPLETION || | 920 if (completion == FORCE_COMPLETION || |
951 IsIdleMarkingDelayCounterLimitReached()) { | 921 IsIdleMarkingDelayCounterLimitReached()) { |
952 MarkingComplete(action); | 922 MarkingComplete(action); |
953 } else { | 923 } else { |
954 IncrementIdleMarkingDelayCounter(); | 924 IncrementIdleMarkingDelayCounter(); |
955 } | 925 } |
956 } | 926 } |
957 } | 927 } |
958 | 928 |
959 steps_count_++; | 929 steps_count_++; |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
999 void IncrementalMarking::IncrementIdleMarkingDelayCounter() { | 969 void IncrementalMarking::IncrementIdleMarkingDelayCounter() { |
1000 idle_marking_delay_counter_++; | 970 idle_marking_delay_counter_++; |
1001 } | 971 } |
1002 | 972 |
1003 | 973 |
1004 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 974 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
1005 idle_marking_delay_counter_ = 0; | 975 idle_marking_delay_counter_ = 0; |
1006 } | 976 } |
1007 } | 977 } |
1008 } // namespace v8::internal | 978 } // namespace v8::internal |
OLD | NEW |