Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(112)

Side by Side Diff: src/incremental-marking.cc

Issue 11368137: Refactoring incremental marking (Closed) Base URL: git://github.com/v8/v8.git@master
Patch Set: Created 8 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/incremental-marking.h ('k') | src/objects-visiting.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
168 MarkBit obj_bit = Marking::MarkBitFrom(obj); 168 MarkBit obj_bit = Marking::MarkBitFrom(obj);
169 if (Marking::IsBlack(obj_bit)) { 169 if (Marking::IsBlack(obj_bit)) {
170 // Object is not going to be rescanned. We need to record the slot. 170 // Object is not going to be rescanned. We need to record the slot.
171 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, 171 heap_->mark_compact_collector()->RecordRelocSlot(rinfo,
172 Code::cast(value)); 172 Code::cast(value));
173 } 173 }
174 } 174 }
175 } 175 }
176 176
177 177
178 static void MarkObjectGreyDoNotEnqueue(Object* obj) {
179 if (obj->IsHeapObject()) {
180 HeapObject* heap_obj = HeapObject::cast(obj);
181 MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::cast(obj));
182 if (Marking::IsBlack(mark_bit)) {
183 MemoryChunk::IncrementLiveBytesFromGC(heap_obj->address(),
184 -heap_obj->Size());
185 }
186 Marking::AnyToGrey(mark_bit);
187 }
188 }
189
190
178 class IncrementalMarkingMarkingVisitor 191 class IncrementalMarkingMarkingVisitor
179 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { 192 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> {
180 public: 193 public:
181 static void Initialize() { 194 static void Initialize() {
182 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); 195 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
183 196
197 table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
184 table_.Register(kVisitJSRegExp, &VisitJSRegExp); 198 table_.Register(kVisitJSRegExp, &VisitJSRegExp);
185 } 199 }
186 200
201 static void VisitNativeContextIncremental(Map* map, HeapObject* object) {
202 Context* context = Context::cast(object);
203
204 // We will mark cache black with a separate pass
205 // when we finish marking.
206 MarkObjectGreyDoNotEnqueue(context->normalized_map_cache());
207 VisitNativeContext(map, context);
208 }
209
187 static void VisitJSWeakMap(Map* map, HeapObject* object) { 210 static void VisitJSWeakMap(Map* map, HeapObject* object) {
188 Heap* heap = map->GetHeap(); 211 Heap* heap = map->GetHeap();
189 VisitPointers(heap, 212 VisitPointers(heap,
190 HeapObject::RawField(object, JSWeakMap::kPropertiesOffset), 213 HeapObject::RawField(object, JSWeakMap::kPropertiesOffset),
191 HeapObject::RawField(object, JSWeakMap::kSize)); 214 HeapObject::RawField(object, JSWeakMap::kSize));
192 } 215 }
193 216
194 static void BeforeVisitingSharedFunctionInfo(HeapObject* object) {} 217 static void BeforeVisitingSharedFunctionInfo(HeapObject* object) {}
195 218
196 INLINE(static void VisitPointer(Heap* heap, Object** p)) { 219 INLINE(static void VisitPointer(Heap* heap, Object** p)) {
(...skipping 290 matching lines...) Expand 10 before | Expand all | Expand 10 after
487 if (FLAG_trace_incremental_marking) { 510 if (FLAG_trace_incremental_marking) {
488 PrintF("[IncrementalMarking] Start sweeping.\n"); 511 PrintF("[IncrementalMarking] Start sweeping.\n");
489 } 512 }
490 state_ = SWEEPING; 513 state_ = SWEEPING;
491 } 514 }
492 515
493 heap_->new_space()->LowerInlineAllocationLimit(kAllocatedThreshold); 516 heap_->new_space()->LowerInlineAllocationLimit(kAllocatedThreshold);
494 } 517 }
495 518
496 519
497 static void MarkObjectGreyDoNotEnqueue(Object* obj) {
498 if (obj->IsHeapObject()) {
499 HeapObject* heap_obj = HeapObject::cast(obj);
500 MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::cast(obj));
501 if (Marking::IsBlack(mark_bit)) {
502 MemoryChunk::IncrementLiveBytesFromGC(heap_obj->address(),
503 -heap_obj->Size());
504 }
505 Marking::AnyToGrey(mark_bit);
506 }
507 }
508
509
510 void IncrementalMarking::StartMarking(CompactionFlag flag) { 520 void IncrementalMarking::StartMarking(CompactionFlag flag) {
511 if (FLAG_trace_incremental_marking) { 521 if (FLAG_trace_incremental_marking) {
512 PrintF("[IncrementalMarking] Start marking\n"); 522 PrintF("[IncrementalMarking] Start marking\n");
513 } 523 }
514 524
515 is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) && 525 is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) &&
516 heap_->mark_compact_collector()->StartCompaction( 526 heap_->mark_compact_collector()->StartCompaction(
517 MarkCompactCollector::INCREMENTAL_COMPACTION); 527 MarkCompactCollector::INCREMENTAL_COMPACTION);
518 528
519 state_ = MARKING; 529 state_ = MARKING;
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
612 } 622 }
613 } 623 }
614 marking_deque_.set_top(new_top); 624 marking_deque_.set_top(new_top);
615 625
616 steps_took_since_last_gc_ = 0; 626 steps_took_since_last_gc_ = 0;
617 steps_count_since_last_gc_ = 0; 627 steps_count_since_last_gc_ = 0;
618 longest_step_ = 0.0; 628 longest_step_ = 0.0;
619 } 629 }
620 630
621 631
632 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
633 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
634 if (Marking::IsWhite(map_mark_bit)) {
635 WhiteToGreyAndPush(map, map_mark_bit);
636 }
637
638 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
639
640 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
641 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
642 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
643 Marking::MarkBlack(obj_mark_bit);
644 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
645 }
646
647
648 void IncrementalMarking::ProcessMarkingDeque(intptr_t bytes_to_process) {
649 Map* filler_map = heap_->one_pointer_filler_map();
650 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
651 HeapObject* obj = marking_deque_.Pop();
652
653 // Explicitly skip one word fillers. Incremental markbit patterns are
654 // correct only for objects that occupy at least two words.
655 Map* map = obj->map();
656 if (map == filler_map) continue;
657
658 int size = obj->SizeFromMap(map);
659 bytes_to_process -= size;
660 VisitObject(map, obj, size);
661 }
662 }
663
664
665 void IncrementalMarking::ProcessMarkingDeque() {
666 Map* filler_map = heap_->one_pointer_filler_map();
667 while (!marking_deque_.IsEmpty()) {
668 HeapObject* obj = marking_deque_.Pop();
669
670 // Explicitly skip one word fillers. Incremental markbit patterns are
671 // correct only for objects that occupy at least two words.
672 Map* map = obj->map();
673 if (map == filler_map) continue;
674
675 VisitObject(map, obj, obj->SizeFromMap(map));
676 }
677 }
678
679
622 void IncrementalMarking::Hurry() { 680 void IncrementalMarking::Hurry() {
623 if (state() == MARKING) { 681 if (state() == MARKING) {
624 double start = 0.0; 682 double start = 0.0;
625 if (FLAG_trace_incremental_marking) { 683 if (FLAG_trace_incremental_marking) {
626 PrintF("[IncrementalMarking] Hurry\n"); 684 PrintF("[IncrementalMarking] Hurry\n");
627 start = OS::TimeCurrentMillis(); 685 start = OS::TimeCurrentMillis();
628 } 686 }
629 // TODO(gc) hurry can mark objects it encounters black as mutator 687 // TODO(gc) hurry can mark objects it encounters black as mutator
630 // was stopped. 688 // was stopped.
631 Map* filler_map = heap_->one_pointer_filler_map(); 689 ProcessMarkingDeque();
632 Map* native_context_map = heap_->native_context_map();
633 while (!marking_deque_.IsEmpty()) {
634 HeapObject* obj = marking_deque_.Pop();
635
636 // Explicitly skip one word fillers. Incremental markbit patterns are
637 // correct only for objects that occupy at least two words.
638 Map* map = obj->map();
639 if (map == filler_map) {
640 continue;
641 } else if (map == native_context_map) {
642 // Native contexts have weak fields.
643 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj);
644 } else {
645 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
646 if (Marking::IsWhite(map_mark_bit)) {
647 WhiteToGreyAndPush(map, map_mark_bit);
648 }
649 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
650 }
651
652 MarkBit mark_bit = Marking::MarkBitFrom(obj);
653 ASSERT(!Marking::IsBlack(mark_bit));
654 Marking::MarkBlack(mark_bit);
655 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
656 }
657 state_ = COMPLETE; 690 state_ = COMPLETE;
658 if (FLAG_trace_incremental_marking) { 691 if (FLAG_trace_incremental_marking) {
659 double end = OS::TimeCurrentMillis(); 692 double end = OS::TimeCurrentMillis();
660 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n", 693 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n",
661 static_cast<int>(end - start)); 694 static_cast<int>(end - start));
662 } 695 }
663 } 696 }
664 697
665 if (FLAG_cleanup_code_caches_at_gc) { 698 if (FLAG_cleanup_code_caches_at_gc) {
666 PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache(); 699 PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache();
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
785 if (FLAG_trace_incremental_marking || FLAG_trace_gc) { 818 if (FLAG_trace_incremental_marking || FLAG_trace_gc) {
786 start = OS::TimeCurrentMillis(); 819 start = OS::TimeCurrentMillis();
787 } 820 }
788 821
789 if (state_ == SWEEPING) { 822 if (state_ == SWEEPING) {
790 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) { 823 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) {
791 bytes_scanned_ = 0; 824 bytes_scanned_ = 0;
792 StartMarking(PREVENT_COMPACTION); 825 StartMarking(PREVENT_COMPACTION);
793 } 826 }
794 } else if (state_ == MARKING) { 827 } else if (state_ == MARKING) {
795 Map* filler_map = heap_->one_pointer_filler_map(); 828 ProcessMarkingDeque(bytes_to_process);
796 Map* native_context_map = heap_->native_context_map();
797 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
798 HeapObject* obj = marking_deque_.Pop();
799
800 // Explicitly skip one word fillers. Incremental markbit patterns are
801 // correct only for objects that occupy at least two words.
802 Map* map = obj->map();
803 if (map == filler_map) continue;
804
805 int size = obj->SizeFromMap(map);
806 bytes_to_process -= size;
807 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
808 if (Marking::IsWhite(map_mark_bit)) {
809 WhiteToGreyAndPush(map, map_mark_bit);
810 }
811
812 // TODO(gc) switch to static visitor instead of normal visitor.
813 if (map == native_context_map) {
814 // Native contexts have weak fields.
815 Context* ctx = Context::cast(obj);
816
817 // We will mark cache black with a separate pass
818 // when we finish marking.
819 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
820
821 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx);
822 } else {
823 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
824 }
825
826 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
827 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
828 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
829 Marking::MarkBlack(obj_mark_bit);
830 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
831 }
832 if (marking_deque_.IsEmpty()) MarkingComplete(action); 829 if (marking_deque_.IsEmpty()) MarkingComplete(action);
833 } 830 }
834 831
835 steps_count_++; 832 steps_count_++;
836 steps_count_since_last_gc_++; 833 steps_count_since_last_gc_++;
837 834
838 bool speed_up = false; 835 bool speed_up = false;
839 836
840 if ((steps_count_ % kMarkingSpeedAccellerationInterval) == 0) { 837 if ((steps_count_ % kMarkingSpeedAccellerationInterval) == 0) {
841 if (FLAG_trace_gc) { 838 if (FLAG_trace_gc) {
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
923 bytes_scanned_ = 0; 920 bytes_scanned_ = 0;
924 write_barriers_invoked_since_last_step_ = 0; 921 write_barriers_invoked_since_last_step_ = 0;
925 } 922 }
926 923
927 924
928 int64_t IncrementalMarking::SpaceLeftInOldSpace() { 925 int64_t IncrementalMarking::SpaceLeftInOldSpace() {
929 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); 926 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects();
930 } 927 }
931 928
932 } } // namespace v8::internal 929 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/incremental-marking.h ('k') | src/objects-visiting.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698