Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(220)

Side by Side Diff: src/incremental-marking.cc

Issue 430503007: Rename ASSERT* to DCHECK*. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE and fixes Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/incremental-marking.h ('k') | src/incremental-marking-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/incremental-marking.h" 7 #include "src/incremental-marking.h"
8 8
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/compilation-cache.h" 10 #include "src/compilation-cache.h"
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
46 heap_->mark_compact_collector()->RecordSlot( 46 heap_->mark_compact_collector()->RecordSlot(
47 HeapObject::RawField(obj, 0), slot, value); 47 HeapObject::RawField(obj, 0), slot, value);
48 } 48 }
49 } 49 }
50 } 50 }
51 51
52 52
53 void IncrementalMarking::RecordWriteFromCode(HeapObject* obj, 53 void IncrementalMarking::RecordWriteFromCode(HeapObject* obj,
54 Object** slot, 54 Object** slot,
55 Isolate* isolate) { 55 Isolate* isolate) {
56 ASSERT(obj->IsHeapObject()); 56 DCHECK(obj->IsHeapObject());
57 IncrementalMarking* marking = isolate->heap()->incremental_marking(); 57 IncrementalMarking* marking = isolate->heap()->incremental_marking();
58 58
59 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); 59 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
60 int counter = chunk->write_barrier_counter(); 60 int counter = chunk->write_barrier_counter();
61 if (counter < (MemoryChunk::kWriteBarrierCounterGranularity / 2)) { 61 if (counter < (MemoryChunk::kWriteBarrierCounterGranularity / 2)) {
62 marking->write_barriers_invoked_since_last_step_ += 62 marking->write_barriers_invoked_since_last_step_ +=
63 MemoryChunk::kWriteBarrierCounterGranularity - 63 MemoryChunk::kWriteBarrierCounterGranularity -
64 chunk->write_barrier_counter(); 64 chunk->write_barrier_counter();
65 chunk->set_write_barrier_counter( 65 chunk->set_write_barrier_counter(
66 MemoryChunk::kWriteBarrierCounterGranularity); 66 MemoryChunk::kWriteBarrierCounterGranularity);
(...skipping 20 matching lines...) Expand all
87 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); 87 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
88 RecordWriteIntoCode(host, &rinfo, value); 88 RecordWriteIntoCode(host, &rinfo, value);
89 } 89 }
90 } 90 }
91 91
92 92
93 void IncrementalMarking::RecordWriteOfCodeEntrySlow(JSFunction* host, 93 void IncrementalMarking::RecordWriteOfCodeEntrySlow(JSFunction* host,
94 Object** slot, 94 Object** slot,
95 Code* value) { 95 Code* value) {
96 if (BaseRecordWrite(host, slot, value)) { 96 if (BaseRecordWrite(host, slot, value)) {
97 ASSERT(slot != NULL); 97 DCHECK(slot != NULL);
98 heap_->mark_compact_collector()-> 98 heap_->mark_compact_collector()->
99 RecordCodeEntrySlot(reinterpret_cast<Address>(slot), value); 99 RecordCodeEntrySlot(reinterpret_cast<Address>(slot), value);
100 } 100 }
101 } 101 }
102 102
103 103
104 void IncrementalMarking::RecordWriteIntoCodeSlow(HeapObject* obj, 104 void IncrementalMarking::RecordWriteIntoCodeSlow(HeapObject* obj,
105 RelocInfo* rinfo, 105 RelocInfo* rinfo,
106 Object* value) { 106 Object* value) {
107 MarkBit value_bit = Marking::MarkBitFrom(HeapObject::cast(value)); 107 MarkBit value_bit = Marking::MarkBitFrom(HeapObject::cast(value));
(...skipping 27 matching lines...) Expand all
135 -heap_obj->Size()); 135 -heap_obj->Size());
136 } 136 }
137 Marking::AnyToGrey(mark_bit); 137 Marking::AnyToGrey(mark_bit);
138 } 138 }
139 } 139 }
140 140
141 141
142 static inline void MarkBlackOrKeepGrey(HeapObject* heap_object, 142 static inline void MarkBlackOrKeepGrey(HeapObject* heap_object,
143 MarkBit mark_bit, 143 MarkBit mark_bit,
144 int size) { 144 int size) {
145 ASSERT(!Marking::IsImpossible(mark_bit)); 145 DCHECK(!Marking::IsImpossible(mark_bit));
146 if (mark_bit.Get()) return; 146 if (mark_bit.Get()) return;
147 mark_bit.Set(); 147 mark_bit.Set();
148 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), size); 148 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), size);
149 ASSERT(Marking::IsBlack(mark_bit)); 149 DCHECK(Marking::IsBlack(mark_bit));
150 } 150 }
151 151
152 152
153 static inline void MarkBlackOrKeepBlack(HeapObject* heap_object, 153 static inline void MarkBlackOrKeepBlack(HeapObject* heap_object,
154 MarkBit mark_bit, 154 MarkBit mark_bit,
155 int size) { 155 int size) {
156 ASSERT(!Marking::IsImpossible(mark_bit)); 156 DCHECK(!Marking::IsImpossible(mark_bit));
157 if (Marking::IsBlack(mark_bit)) return; 157 if (Marking::IsBlack(mark_bit)) return;
158 Marking::MarkBlack(mark_bit); 158 Marking::MarkBlack(mark_bit);
159 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), size); 159 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), size);
160 ASSERT(Marking::IsBlack(mark_bit)); 160 DCHECK(Marking::IsBlack(mark_bit));
161 } 161 }
162 162
163 163
164 class IncrementalMarkingMarkingVisitor 164 class IncrementalMarkingMarkingVisitor
165 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { 165 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> {
166 public: 166 public:
167 static void Initialize() { 167 static void Initialize() {
168 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); 168 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
169 table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental); 169 table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental);
170 table_.Register(kVisitNativeContext, &VisitNativeContextIncremental); 170 table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
(...skipping 277 matching lines...) Expand 10 before | Expand all | Expand 10 after
448 return FLAG_incremental_marking && 448 return FLAG_incremental_marking &&
449 FLAG_incremental_marking_steps && 449 FLAG_incremental_marking_steps &&
450 heap_->gc_state() == Heap::NOT_IN_GC && 450 heap_->gc_state() == Heap::NOT_IN_GC &&
451 !heap_->isolate()->serializer_enabled() && 451 !heap_->isolate()->serializer_enabled() &&
452 heap_->isolate()->IsInitialized() && 452 heap_->isolate()->IsInitialized() &&
453 heap_->PromotedSpaceSizeOfObjects() > kActivationThreshold; 453 heap_->PromotedSpaceSizeOfObjects() > kActivationThreshold;
454 } 454 }
455 455
456 456
457 void IncrementalMarking::ActivateGeneratedStub(Code* stub) { 457 void IncrementalMarking::ActivateGeneratedStub(Code* stub) {
458 ASSERT(RecordWriteStub::GetMode(stub) == 458 DCHECK(RecordWriteStub::GetMode(stub) ==
459 RecordWriteStub::STORE_BUFFER_ONLY); 459 RecordWriteStub::STORE_BUFFER_ONLY);
460 460
461 if (!IsMarking()) { 461 if (!IsMarking()) {
462 // Initially stub is generated in STORE_BUFFER_ONLY mode thus 462 // Initially stub is generated in STORE_BUFFER_ONLY mode thus
463 // we don't need to do anything if incremental marking is 463 // we don't need to do anything if incremental marking is
464 // not active. 464 // not active.
465 } else if (IsCompacting()) { 465 } else if (IsCompacting()) {
466 RecordWriteStub::Patch(stub, RecordWriteStub::INCREMENTAL_COMPACTION); 466 RecordWriteStub::Patch(stub, RecordWriteStub::INCREMENTAL_COMPACTION);
467 } else { 467 } else {
468 RecordWriteStub::Patch(stub, RecordWriteStub::INCREMENTAL); 468 RecordWriteStub::Patch(stub, RecordWriteStub::INCREMENTAL);
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
515 CHECK(success); 515 CHECK(success);
516 marking_deque_memory_committed_ = false; 516 marking_deque_memory_committed_ = false;
517 } 517 }
518 } 518 }
519 519
520 520
521 void IncrementalMarking::Start(CompactionFlag flag) { 521 void IncrementalMarking::Start(CompactionFlag flag) {
522 if (FLAG_trace_incremental_marking) { 522 if (FLAG_trace_incremental_marking) {
523 PrintF("[IncrementalMarking] Start\n"); 523 PrintF("[IncrementalMarking] Start\n");
524 } 524 }
525 ASSERT(FLAG_incremental_marking); 525 DCHECK(FLAG_incremental_marking);
526 ASSERT(FLAG_incremental_marking_steps); 526 DCHECK(FLAG_incremental_marking_steps);
527 ASSERT(state_ == STOPPED); 527 DCHECK(state_ == STOPPED);
528 ASSERT(heap_->gc_state() == Heap::NOT_IN_GC); 528 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC);
529 ASSERT(!heap_->isolate()->serializer_enabled()); 529 DCHECK(!heap_->isolate()->serializer_enabled());
530 ASSERT(heap_->isolate()->IsInitialized()); 530 DCHECK(heap_->isolate()->IsInitialized());
531 531
532 ResetStepCounters(); 532 ResetStepCounters();
533 533
534 if (!heap_->mark_compact_collector()->sweeping_in_progress()) { 534 if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
535 StartMarking(flag); 535 StartMarking(flag);
536 } else { 536 } else {
537 if (FLAG_trace_incremental_marking) { 537 if (FLAG_trace_incremental_marking) {
538 PrintF("[IncrementalMarking] Start sweeping.\n"); 538 PrintF("[IncrementalMarking] Start sweeping.\n");
539 } 539 }
540 state_ = SWEEPING; 540 state_ = SWEEPING;
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
615 int current = marking_deque_.bottom(); 615 int current = marking_deque_.bottom();
616 int mask = marking_deque_.mask(); 616 int mask = marking_deque_.mask();
617 int limit = marking_deque_.top(); 617 int limit = marking_deque_.top();
618 HeapObject** array = marking_deque_.array(); 618 HeapObject** array = marking_deque_.array();
619 int new_top = current; 619 int new_top = current;
620 620
621 Map* filler_map = heap_->one_pointer_filler_map(); 621 Map* filler_map = heap_->one_pointer_filler_map();
622 622
623 while (current != limit) { 623 while (current != limit) {
624 HeapObject* obj = array[current]; 624 HeapObject* obj = array[current];
625 ASSERT(obj->IsHeapObject()); 625 DCHECK(obj->IsHeapObject());
626 current = ((current + 1) & mask); 626 current = ((current + 1) & mask);
627 if (heap_->InNewSpace(obj)) { 627 if (heap_->InNewSpace(obj)) {
628 MapWord map_word = obj->map_word(); 628 MapWord map_word = obj->map_word();
629 if (map_word.IsForwardingAddress()) { 629 if (map_word.IsForwardingAddress()) {
630 HeapObject* dest = map_word.ToForwardingAddress(); 630 HeapObject* dest = map_word.ToForwardingAddress();
631 array[new_top] = dest; 631 array[new_top] = dest;
632 new_top = ((new_top + 1) & mask); 632 new_top = ((new_top + 1) & mask);
633 ASSERT(new_top != marking_deque_.bottom()); 633 DCHECK(new_top != marking_deque_.bottom());
634 #ifdef DEBUG 634 #ifdef DEBUG
635 MarkBit mark_bit = Marking::MarkBitFrom(obj); 635 MarkBit mark_bit = Marking::MarkBitFrom(obj);
636 ASSERT(Marking::IsGrey(mark_bit) || 636 DCHECK(Marking::IsGrey(mark_bit) ||
637 (obj->IsFiller() && Marking::IsWhite(mark_bit))); 637 (obj->IsFiller() && Marking::IsWhite(mark_bit)));
638 #endif 638 #endif
639 } 639 }
640 } else if (obj->map() != filler_map) { 640 } else if (obj->map() != filler_map) {
641 // Skip one word filler objects that appear on the 641 // Skip one word filler objects that appear on the
642 // stack when we perform in place array shift. 642 // stack when we perform in place array shift.
643 array[new_top] = obj; 643 array[new_top] = obj;
644 new_top = ((new_top + 1) & mask); 644 new_top = ((new_top + 1) & mask);
645 ASSERT(new_top != marking_deque_.bottom()); 645 DCHECK(new_top != marking_deque_.bottom());
646 #ifdef DEBUG 646 #ifdef DEBUG
647 MarkBit mark_bit = Marking::MarkBitFrom(obj); 647 MarkBit mark_bit = Marking::MarkBitFrom(obj);
648 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); 648 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
649 ASSERT(Marking::IsGrey(mark_bit) || 649 DCHECK(Marking::IsGrey(mark_bit) ||
650 (obj->IsFiller() && Marking::IsWhite(mark_bit)) || 650 (obj->IsFiller() && Marking::IsWhite(mark_bit)) ||
651 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && 651 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
652 Marking::IsBlack(mark_bit))); 652 Marking::IsBlack(mark_bit)));
653 #endif 653 #endif
654 } 654 }
655 } 655 }
656 marking_deque_.set_top(new_top); 656 marking_deque_.set_top(new_top);
657 } 657 }
658 658
659 659
660 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { 660 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
661 MarkBit map_mark_bit = Marking::MarkBitFrom(map); 661 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
662 if (Marking::IsWhite(map_mark_bit)) { 662 if (Marking::IsWhite(map_mark_bit)) {
663 WhiteToGreyAndPush(map, map_mark_bit); 663 WhiteToGreyAndPush(map, map_mark_bit);
664 } 664 }
665 665
666 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); 666 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
667 667
668 MarkBit mark_bit = Marking::MarkBitFrom(obj); 668 MarkBit mark_bit = Marking::MarkBitFrom(obj);
669 #if ENABLE_SLOW_ASSERTS 669 #if ENABLE_SLOW_DCHECKS
670 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); 670 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
671 SLOW_ASSERT(Marking::IsGrey(mark_bit) || 671 SLOW_DCHECK(Marking::IsGrey(mark_bit) ||
672 (obj->IsFiller() && Marking::IsWhite(mark_bit)) || 672 (obj->IsFiller() && Marking::IsWhite(mark_bit)) ||
673 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && 673 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
674 Marking::IsBlack(mark_bit))); 674 Marking::IsBlack(mark_bit)));
675 #endif 675 #endif
676 MarkBlackOrKeepBlack(obj, mark_bit, size); 676 MarkBlackOrKeepBlack(obj, mark_bit, size);
677 } 677 }
678 678
679 679
680 intptr_t IncrementalMarking::ProcessMarkingDeque(intptr_t bytes_to_process) { 680 intptr_t IncrementalMarking::ProcessMarkingDeque(intptr_t bytes_to_process) {
681 intptr_t bytes_processed = 0; 681 intptr_t bytes_processed = 0;
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after
796 void IncrementalMarking::Finalize() { 796 void IncrementalMarking::Finalize() {
797 Hurry(); 797 Hurry();
798 state_ = STOPPED; 798 state_ = STOPPED;
799 is_compacting_ = false; 799 is_compacting_ = false;
800 heap_->new_space()->LowerInlineAllocationLimit(0); 800 heap_->new_space()->LowerInlineAllocationLimit(0);
801 IncrementalMarking::set_should_hurry(false); 801 IncrementalMarking::set_should_hurry(false);
802 ResetStepCounters(); 802 ResetStepCounters();
803 PatchIncrementalMarkingRecordWriteStubs(heap_, 803 PatchIncrementalMarkingRecordWriteStubs(heap_,
804 RecordWriteStub::STORE_BUFFER_ONLY); 804 RecordWriteStub::STORE_BUFFER_ONLY);
805 DeactivateIncrementalWriteBarrier(); 805 DeactivateIncrementalWriteBarrier();
806 ASSERT(marking_deque_.IsEmpty()); 806 DCHECK(marking_deque_.IsEmpty());
807 heap_->isolate()->stack_guard()->ClearGC(); 807 heap_->isolate()->stack_guard()->ClearGC();
808 } 808 }
809 809
810 810
811 void IncrementalMarking::MarkingComplete(CompletionAction action) { 811 void IncrementalMarking::MarkingComplete(CompletionAction action) {
812 state_ = COMPLETE; 812 state_ = COMPLETE;
813 // We will set the stack guard to request a GC now. This will mean the rest 813 // We will set the stack guard to request a GC now. This will mean the rest
814 // of the GC gets performed as soon as possible (we can't do a GC here in a 814 // of the GC gets performed as soon as possible (we can't do a GC here in a
815 // record-write context). If a few things get allocated between now and then 815 // record-write context). If a few things get allocated between now and then
816 // that shouldn't make us do a scavenge and keep being incremental, so we set 816 // that shouldn't make us do a scavenge and keep being incremental, so we set
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after
978 bytes_scanned_ = 0; 978 bytes_scanned_ = 0;
979 write_barriers_invoked_since_last_step_ = 0; 979 write_barriers_invoked_since_last_step_ = 0;
980 } 980 }
981 981
982 982
983 int64_t IncrementalMarking::SpaceLeftInOldSpace() { 983 int64_t IncrementalMarking::SpaceLeftInOldSpace() {
984 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); 984 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects();
985 } 985 }
986 986
987 } } // namespace v8::internal 987 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/incremental-marking.h ('k') | src/incremental-marking-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698