OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/incremental-marking.h" | 5 #include "src/heap/incremental-marking.h" |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/compilation-cache.h" | 8 #include "src/compilation-cache.h" |
9 #include "src/conversions.h" | 9 #include "src/conversions.h" |
10 #include "src/heap/gc-idle-time-handler.h" | 10 #include "src/heap/gc-idle-time-handler.h" |
(...skipping 21 matching lines...) Expand all Loading... |
32 was_activated_(false), | 32 was_activated_(false), |
33 black_allocation_(false), | 33 black_allocation_(false), |
34 finalize_marking_completed_(false), | 34 finalize_marking_completed_(false), |
35 trace_wrappers_toggle_(false), | 35 trace_wrappers_toggle_(false), |
36 request_type_(NONE), | 36 request_type_(NONE), |
37 new_generation_observer_(*this, kAllocatedThreshold), | 37 new_generation_observer_(*this, kAllocatedThreshold), |
38 old_generation_observer_(*this, kAllocatedThreshold) {} | 38 old_generation_observer_(*this, kAllocatedThreshold) {} |
39 | 39 |
40 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { | 40 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { |
41 HeapObject* value_heap_obj = HeapObject::cast(value); | 41 HeapObject* value_heap_obj = HeapObject::cast(value); |
42 MarkBit value_bit = ObjectMarking::MarkBitFrom(value_heap_obj); | 42 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj)); |
43 DCHECK(!Marking::IsImpossible(value_bit)); | 43 DCHECK(!ObjectMarking::IsImpossible(obj)); |
| 44 const bool is_black = ObjectMarking::IsBlack(obj); |
44 | 45 |
45 MarkBit obj_bit = ObjectMarking::MarkBitFrom(obj); | 46 if (is_black && ObjectMarking::IsWhite(value_heap_obj)) { |
46 DCHECK(!Marking::IsImpossible(obj_bit)); | 47 WhiteToGreyAndPush(value_heap_obj); |
47 bool is_black = Marking::IsBlack(obj_bit); | |
48 | |
49 if (is_black && Marking::IsWhite(value_bit)) { | |
50 WhiteToGreyAndPush(value_heap_obj, value_bit); | |
51 RestartIfNotMarking(); | 48 RestartIfNotMarking(); |
52 } | 49 } |
53 return is_compacting_ && is_black; | 50 return is_compacting_ && is_black; |
54 } | 51 } |
55 | 52 |
56 | 53 |
57 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, | 54 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, |
58 Object* value) { | 55 Object* value) { |
59 if (BaseRecordWrite(obj, value) && slot != NULL) { | 56 if (BaseRecordWrite(obj, value) && slot != NULL) { |
60 // Object is not going to be rescanned we need to record the slot. | 57 // Object is not going to be rescanned we need to record the slot. |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
111 } | 108 } |
112 | 109 |
113 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, | 110 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, |
114 Object* value) { | 111 Object* value) { |
115 if (BaseRecordWrite(host, value)) { | 112 if (BaseRecordWrite(host, value)) { |
116 // Object is not going to be rescanned. We need to record the slot. | 113 // Object is not going to be rescanned. We need to record the slot. |
117 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); | 114 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); |
118 } | 115 } |
119 } | 116 } |
120 | 117 |
121 | 118 void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { |
122 void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit) { | 119 ObjectMarking::WhiteToGrey(obj); |
123 Marking::WhiteToGrey(mark_bit); | |
124 heap_->mark_compact_collector()->marking_deque()->Push(obj); | 120 heap_->mark_compact_collector()->marking_deque()->Push(obj); |
125 } | 121 } |
126 | 122 |
127 | 123 |
128 static void MarkObjectGreyDoNotEnqueue(Object* obj) { | 124 static void MarkObjectGreyDoNotEnqueue(Object* obj) { |
129 if (obj->IsHeapObject()) { | 125 if (obj->IsHeapObject()) { |
130 HeapObject* heap_obj = HeapObject::cast(obj); | 126 HeapObject* heap_obj = HeapObject::cast(obj); |
131 MarkBit mark_bit = ObjectMarking::MarkBitFrom(HeapObject::cast(obj)); | 127 ObjectMarking::AnyToGrey(heap_obj); |
132 if (Marking::IsBlack(mark_bit)) { | |
133 MemoryChunk::IncrementLiveBytes(heap_obj, -heap_obj->Size()); | |
134 } | |
135 Marking::AnyToGrey(mark_bit); | |
136 } | 128 } |
137 } | 129 } |
138 | 130 |
139 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, | 131 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, |
140 HeapObject* to) { | 132 HeapObject* to) { |
| 133 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); |
141 // This is only used when resizing an object. | 134 // This is only used when resizing an object. |
142 DCHECK(MemoryChunk::FromAddress(from->address()) == | 135 DCHECK(MemoryChunk::FromAddress(from->address()) == |
143 MemoryChunk::FromAddress(to->address())); | 136 MemoryChunk::FromAddress(to->address())); |
144 | 137 |
145 if (!heap->incremental_marking()->IsMarking()) return; | 138 if (!heap->incremental_marking()->IsMarking()) return; |
146 | 139 |
147 // If the mark doesn't move, we don't check the color of the object. | 140 // If the mark doesn't move, we don't check the color of the object. |
148 // It doesn't matter whether the object is black, since it hasn't changed | 141 // It doesn't matter whether the object is black, since it hasn't changed |
149 // size, so the adjustment to the live data count will be zero anyway. | 142 // size, so the adjustment to the live data count will be zero anyway. |
150 if (from == to) return; | 143 if (from == to) return; |
151 | 144 |
152 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to); | 145 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to); |
153 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from); | 146 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from); |
154 | 147 |
155 #ifdef DEBUG | 148 #ifdef DEBUG |
156 Marking::ObjectColor old_color = Marking::Color(old_mark_bit); | 149 Marking::ObjectColor old_color = Marking::Color(old_mark_bit); |
157 #endif | 150 #endif |
158 | 151 |
159 if (Marking::IsBlack(old_mark_bit)) { | 152 if (Marking::IsBlack(old_mark_bit)) { |
160 Marking::BlackToWhite(old_mark_bit); | 153 Marking::BlackToWhite(old_mark_bit); |
161 Marking::MarkBlack(new_mark_bit); | 154 Marking::WhiteToBlack(new_mark_bit); |
162 return; | 155 return; |
163 } else if (Marking::IsGrey(old_mark_bit)) { | 156 } else if (Marking::IsGrey(old_mark_bit)) { |
164 Marking::GreyToWhite(old_mark_bit); | 157 Marking::GreyToWhite(old_mark_bit); |
165 heap->incremental_marking()->WhiteToGreyAndPush(to, new_mark_bit); | 158 Marking::WhiteToGrey(new_mark_bit); |
| 159 heap->mark_compact_collector()->marking_deque()->Push(to); |
166 heap->incremental_marking()->RestartIfNotMarking(); | 160 heap->incremental_marking()->RestartIfNotMarking(); |
167 } | 161 } |
168 | 162 |
169 #ifdef DEBUG | 163 #ifdef DEBUG |
170 Marking::ObjectColor new_color = Marking::Color(new_mark_bit); | 164 Marking::ObjectColor new_color = Marking::Color(new_mark_bit); |
171 DCHECK(new_color == old_color); | 165 DCHECK(new_color == old_color); |
172 #endif | 166 #endif |
173 } | 167 } |
174 | 168 |
175 class IncrementalMarkingMarkingVisitor | 169 class IncrementalMarkingMarkingVisitor |
(...skipping 27 matching lines...) Expand all Loading... |
203 do { | 197 do { |
204 VisitPointers(heap, object, HeapObject::RawField(object, start_offset), | 198 VisitPointers(heap, object, HeapObject::RawField(object, start_offset), |
205 HeapObject::RawField(object, end_offset)); | 199 HeapObject::RawField(object, end_offset)); |
206 start_offset = end_offset; | 200 start_offset = end_offset; |
207 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); | 201 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); |
208 scan_until_end = | 202 scan_until_end = |
209 heap->mark_compact_collector()->marking_deque()->IsFull(); | 203 heap->mark_compact_collector()->marking_deque()->IsFull(); |
210 } while (scan_until_end && start_offset < object_size); | 204 } while (scan_until_end && start_offset < object_size); |
211 chunk->set_progress_bar(start_offset); | 205 chunk->set_progress_bar(start_offset); |
212 if (start_offset < object_size) { | 206 if (start_offset < object_size) { |
213 if (Marking::IsGrey(ObjectMarking::MarkBitFrom(object))) { | 207 if (ObjectMarking::IsGrey(object)) { |
214 heap->mark_compact_collector()->marking_deque()->Unshift(object); | 208 heap->mark_compact_collector()->marking_deque()->Unshift(object); |
215 } else { | 209 } else { |
216 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); | 210 DCHECK(ObjectMarking::IsBlack(object)); |
217 heap->mark_compact_collector()->UnshiftBlack(object); | 211 heap->mark_compact_collector()->UnshiftBlack(object); |
218 } | 212 } |
219 heap->incremental_marking()->NotifyIncompleteScanOfObject( | 213 heap->incremental_marking()->NotifyIncompleteScanOfObject( |
220 object_size - (start_offset - already_scanned_offset)); | 214 object_size - (start_offset - already_scanned_offset)); |
221 } | 215 } |
222 } else { | 216 } else { |
223 FixedArrayVisitor::Visit(map, object); | 217 FixedArrayVisitor::Visit(map, object); |
224 } | 218 } |
225 } | 219 } |
226 | 220 |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
258 | 252 |
259 // Marks the object grey and pushes it on the marking stack. | 253 // Marks the object grey and pushes it on the marking stack. |
260 INLINE(static void MarkObject(Heap* heap, Object* obj)) { | 254 INLINE(static void MarkObject(Heap* heap, Object* obj)) { |
261 IncrementalMarking::MarkGrey(heap, HeapObject::cast(obj)); | 255 IncrementalMarking::MarkGrey(heap, HeapObject::cast(obj)); |
262 } | 256 } |
263 | 257 |
264 // Marks the object black without pushing it on the marking stack. | 258 // Marks the object black without pushing it on the marking stack. |
265 // Returns true if object needed marking and false otherwise. | 259 // Returns true if object needed marking and false otherwise. |
266 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { | 260 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { |
267 HeapObject* heap_object = HeapObject::cast(obj); | 261 HeapObject* heap_object = HeapObject::cast(obj); |
268 MarkBit mark_bit = ObjectMarking::MarkBitFrom(heap_object); | 262 if (ObjectMarking::IsWhite(heap_object)) { |
269 if (Marking::IsWhite(mark_bit)) { | 263 ObjectMarking::WhiteToBlack(heap_object); |
270 Marking::MarkBlack(mark_bit); | |
271 MemoryChunk::IncrementLiveBytes(heap_object, heap_object->Size()); | |
272 return true; | 264 return true; |
273 } | 265 } |
274 return false; | 266 return false; |
275 } | 267 } |
276 }; | 268 }; |
277 | 269 |
278 void IncrementalMarking::IterateBlackObject(HeapObject* object) { | 270 void IncrementalMarking::IterateBlackObject(HeapObject* object) { |
279 if (IsMarking() && Marking::IsBlack(ObjectMarking::MarkBitFrom(object))) { | 271 if (IsMarking() && ObjectMarking::IsBlack(object)) { |
280 Page* page = Page::FromAddress(object->address()); | 272 Page* page = Page::FromAddress(object->address()); |
281 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { | 273 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { |
282 // IterateBlackObject requires us to visit the whole object. | 274 // IterateBlackObject requires us to visit the whole object. |
283 page->ResetProgressBar(); | 275 page->ResetProgressBar(); |
284 } | 276 } |
285 Map* map = object->map(); | 277 Map* map = object->map(); |
286 MarkGrey(heap_, map); | 278 MarkGrey(heap_, map); |
287 IncrementalMarkingMarkingVisitor::IterateBody(map, object); | 279 IncrementalMarkingMarkingVisitor::IterateBody(map, object); |
288 } | 280 } |
289 } | 281 } |
(...skipping 364 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
654 } | 646 } |
655 | 647 |
656 | 648 |
657 bool ShouldRetainMap(Map* map, int age) { | 649 bool ShouldRetainMap(Map* map, int age) { |
658 if (age == 0) { | 650 if (age == 0) { |
659 // The map has aged. Do not retain this map. | 651 // The map has aged. Do not retain this map. |
660 return false; | 652 return false; |
661 } | 653 } |
662 Object* constructor = map->GetConstructor(); | 654 Object* constructor = map->GetConstructor(); |
663 if (!constructor->IsHeapObject() || | 655 if (!constructor->IsHeapObject() || |
664 Marking::IsWhite( | 656 ObjectMarking::IsWhite(HeapObject::cast(constructor))) { |
665 ObjectMarking::MarkBitFrom(HeapObject::cast(constructor)))) { | |
666 // The constructor is dead, no new objects with this map can | 657 // The constructor is dead, no new objects with this map can |
667 // be created. Do not retain this map. | 658 // be created. Do not retain this map. |
668 return false; | 659 return false; |
669 } | 660 } |
670 return true; | 661 return true; |
671 } | 662 } |
672 | 663 |
673 | 664 |
674 void IncrementalMarking::RetainMaps() { | 665 void IncrementalMarking::RetainMaps() { |
675 // Do not retain dead maps if flag disables it or there is | 666 // Do not retain dead maps if flag disables it or there is |
676 // - memory pressure (reduce_memory_footprint_), | 667 // - memory pressure (reduce_memory_footprint_), |
677 // - GC is requested by tests or dev-tools (abort_incremental_marking_). | 668 // - GC is requested by tests or dev-tools (abort_incremental_marking_). |
678 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() || | 669 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() || |
679 heap()->ShouldAbortIncrementalMarking() || | 670 heap()->ShouldAbortIncrementalMarking() || |
680 FLAG_retain_maps_for_n_gc == 0; | 671 FLAG_retain_maps_for_n_gc == 0; |
681 ArrayList* retained_maps = heap()->retained_maps(); | 672 ArrayList* retained_maps = heap()->retained_maps(); |
682 int length = retained_maps->Length(); | 673 int length = retained_maps->Length(); |
683 // The number_of_disposed_maps separates maps in the retained_maps | 674 // The number_of_disposed_maps separates maps in the retained_maps |
684 // array that were created before and after context disposal. | 675 // array that were created before and after context disposal. |
685 // We do not age and retain disposed maps to avoid memory leaks. | 676 // We do not age and retain disposed maps to avoid memory leaks. |
686 int number_of_disposed_maps = heap()->number_of_disposed_maps_; | 677 int number_of_disposed_maps = heap()->number_of_disposed_maps_; |
687 for (int i = 0; i < length; i += 2) { | 678 for (int i = 0; i < length; i += 2) { |
688 DCHECK(retained_maps->Get(i)->IsWeakCell()); | 679 DCHECK(retained_maps->Get(i)->IsWeakCell()); |
689 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); | 680 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); |
690 if (cell->cleared()) continue; | 681 if (cell->cleared()) continue; |
691 int age = Smi::cast(retained_maps->Get(i + 1))->value(); | 682 int age = Smi::cast(retained_maps->Get(i + 1))->value(); |
692 int new_age; | 683 int new_age; |
693 Map* map = Map::cast(cell->value()); | 684 Map* map = Map::cast(cell->value()); |
694 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); | |
695 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && | 685 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && |
696 Marking::IsWhite(map_mark)) { | 686 ObjectMarking::IsWhite(map)) { |
697 if (ShouldRetainMap(map, age)) { | 687 if (ShouldRetainMap(map, age)) { |
698 MarkGrey(heap(), map); | 688 MarkGrey(heap(), map); |
699 } | 689 } |
700 Object* prototype = map->prototype(); | 690 Object* prototype = map->prototype(); |
701 if (age > 0 && prototype->IsHeapObject() && | 691 if (age > 0 && prototype->IsHeapObject() && |
702 Marking::IsWhite( | 692 ObjectMarking::IsWhite(HeapObject::cast(prototype))) { |
703 ObjectMarking::MarkBitFrom(HeapObject::cast(prototype)))) { | |
704 // The prototype is not marked, age the map. | 693 // The prototype is not marked, age the map. |
705 new_age = age - 1; | 694 new_age = age - 1; |
706 } else { | 695 } else { |
707 // The prototype and the constructor are marked, this map keeps only | 696 // The prototype and the constructor are marked, this map keeps only |
708 // transition tree alive, not JSObjects. Do not age the map. | 697 // transition tree alive, not JSObjects. Do not age the map. |
709 new_age = age; | 698 new_age = age; |
710 } | 699 } |
711 } else { | 700 } else { |
712 new_age = FLAG_retain_maps_for_n_gc; | 701 new_age = FLAG_retain_maps_for_n_gc; |
713 } | 702 } |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
800 // Only pointers to from space have to be updated. | 789 // Only pointers to from space have to be updated. |
801 if (heap_->InFromSpace(obj)) { | 790 if (heap_->InFromSpace(obj)) { |
802 MapWord map_word = obj->map_word(); | 791 MapWord map_word = obj->map_word(); |
803 // There may be objects on the marking deque that do not exist anymore, | 792 // There may be objects on the marking deque that do not exist anymore, |
804 // e.g. left trimmed objects or objects from the root set (frames). | 793 // e.g. left trimmed objects or objects from the root set (frames). |
805 // If these object are dead at scavenging time, their marking deque | 794 // If these object are dead at scavenging time, their marking deque |
806 // entries will not point to forwarding addresses. Hence, we can discard | 795 // entries will not point to forwarding addresses. Hence, we can discard |
807 // them. | 796 // them. |
808 if (map_word.IsForwardingAddress()) { | 797 if (map_word.IsForwardingAddress()) { |
809 HeapObject* dest = map_word.ToForwardingAddress(); | 798 HeapObject* dest = map_word.ToForwardingAddress(); |
810 if (Marking::IsBlack(ObjectMarking::MarkBitFrom(dest))) continue; | 799 if (ObjectMarking::IsBlack(dest)) continue; |
811 array[new_top] = dest; | 800 array[new_top] = dest; |
812 new_top = ((new_top + 1) & mask); | 801 new_top = ((new_top + 1) & mask); |
813 DCHECK(new_top != marking_deque->bottom()); | 802 DCHECK(new_top != marking_deque->bottom()); |
814 #ifdef DEBUG | 803 DCHECK(ObjectMarking::IsGrey(obj) || |
815 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); | 804 (obj->IsFiller() && ObjectMarking::IsWhite(obj))); |
816 DCHECK(Marking::IsGrey(mark_bit) || | |
817 (obj->IsFiller() && Marking::IsWhite(mark_bit))); | |
818 #endif | |
819 } | 805 } |
820 } else if (obj->map() != filler_map) { | 806 } else if (obj->map() != filler_map) { |
821 // Skip one word filler objects that appear on the | 807 // Skip one word filler objects that appear on the |
822 // stack when we perform in place array shift. | 808 // stack when we perform in place array shift. |
823 array[new_top] = obj; | 809 array[new_top] = obj; |
824 new_top = ((new_top + 1) & mask); | 810 new_top = ((new_top + 1) & mask); |
825 DCHECK(new_top != marking_deque->bottom()); | 811 DCHECK(new_top != marking_deque->bottom()); |
826 #ifdef DEBUG | 812 DCHECK(ObjectMarking::IsGrey(obj) || |
827 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); | 813 (obj->IsFiller() && ObjectMarking::IsWhite(obj)) || |
828 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 814 (MemoryChunk::FromAddress(obj->address()) |
829 DCHECK(Marking::IsGrey(mark_bit) || | 815 ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
830 (obj->IsFiller() && Marking::IsWhite(mark_bit)) || | 816 ObjectMarking::IsBlack(obj))); |
831 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | |
832 Marking::IsBlack(mark_bit))); | |
833 #endif | |
834 } | 817 } |
835 } | 818 } |
836 marking_deque->set_top(new_top); | 819 marking_deque->set_top(new_top); |
837 } | 820 } |
838 | 821 |
839 | 822 |
840 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { | 823 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { |
841 MarkGrey(heap_, map); | 824 MarkGrey(heap_, map); |
842 | 825 |
843 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); | 826 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); |
844 | 827 |
845 #if ENABLE_SLOW_DCHECKS | 828 #if ENABLE_SLOW_DCHECKS |
846 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); | 829 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); |
847 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 830 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
848 SLOW_DCHECK(Marking::IsGrey(mark_bit) || | 831 SLOW_DCHECK(Marking::IsGrey(mark_bit) || |
849 (obj->IsFiller() && Marking::IsWhite(mark_bit)) || | 832 (obj->IsFiller() && Marking::IsWhite(mark_bit)) || |
850 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | 833 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
851 Marking::IsBlack(mark_bit))); | 834 Marking::IsBlack(mark_bit))); |
852 #endif | 835 #endif |
853 MarkBlack(obj, size); | 836 MarkBlack(obj, size); |
854 } | 837 } |
855 | 838 |
856 void IncrementalMarking::MarkGrey(Heap* heap, HeapObject* object) { | 839 void IncrementalMarking::MarkGrey(Heap* heap, HeapObject* object) { |
857 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); | 840 if (ObjectMarking::IsWhite(object)) { |
858 if (Marking::IsWhite(mark_bit)) { | 841 heap->incremental_marking()->WhiteToGreyAndPush(object); |
859 heap->incremental_marking()->WhiteToGreyAndPush(object, mark_bit); | |
860 } | 842 } |
861 } | 843 } |
862 | 844 |
863 void IncrementalMarking::MarkBlack(HeapObject* obj, int size) { | 845 void IncrementalMarking::MarkBlack(HeapObject* obj, int size) { |
864 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); | 846 if (ObjectMarking::IsBlack(obj)) return; |
865 if (Marking::IsBlack(mark_bit)) return; | 847 ObjectMarking::GreyToBlack(obj); |
866 Marking::GreyToBlack(mark_bit); | |
867 MemoryChunk::IncrementLiveBytes(obj, size); | |
868 } | 848 } |
869 | 849 |
870 intptr_t IncrementalMarking::ProcessMarkingDeque( | 850 intptr_t IncrementalMarking::ProcessMarkingDeque( |
871 intptr_t bytes_to_process, ForceCompletionAction completion) { | 851 intptr_t bytes_to_process, ForceCompletionAction completion) { |
872 intptr_t bytes_processed = 0; | 852 intptr_t bytes_processed = 0; |
873 MarkingDeque* marking_deque = | 853 MarkingDeque* marking_deque = |
874 heap_->mark_compact_collector()->marking_deque(); | 854 heap_->mark_compact_collector()->marking_deque(); |
875 while (!marking_deque->IsEmpty() && (bytes_processed < bytes_to_process || | 855 while (!marking_deque->IsEmpty() && (bytes_processed < bytes_to_process || |
876 completion == FORCE_COMPLETION)) { | 856 completion == FORCE_COMPLETION)) { |
877 HeapObject* obj = marking_deque->Pop(); | 857 HeapObject* obj = marking_deque->Pop(); |
878 | 858 |
879 // Left trimming may result in white filler objects on the marking deque. | 859 // Left trimming may result in white filler objects on the marking deque. |
880 // Ignore these objects. | 860 // Ignore these objects. |
881 if (obj->IsFiller()) { | 861 if (obj->IsFiller()) { |
882 DCHECK(Marking::IsImpossible(ObjectMarking::MarkBitFrom(obj)) || | 862 DCHECK(ObjectMarking::IsImpossible(obj) || ObjectMarking::IsWhite(obj)); |
883 Marking::IsWhite(ObjectMarking::MarkBitFrom(obj))); | |
884 continue; | 863 continue; |
885 } | 864 } |
886 | 865 |
887 Map* map = obj->map(); | 866 Map* map = obj->map(); |
888 int size = obj->SizeFromMap(map); | 867 int size = obj->SizeFromMap(map); |
889 unscanned_bytes_of_large_object_ = 0; | 868 unscanned_bytes_of_large_object_ = 0; |
890 VisitObject(map, obj, size); | 869 VisitObject(map, obj, size); |
891 bytes_processed += size - unscanned_bytes_of_large_object_; | 870 bytes_processed += size - unscanned_bytes_of_large_object_; |
892 } | 871 } |
893 // Report all found wrappers to the embedder. This is necessary as the | 872 // Report all found wrappers to the embedder. This is necessary as the |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
928 } | 907 } |
929 } | 908 } |
930 | 909 |
931 Object* context = heap_->native_contexts_list(); | 910 Object* context = heap_->native_contexts_list(); |
932 while (!context->IsUndefined(heap_->isolate())) { | 911 while (!context->IsUndefined(heap_->isolate())) { |
933 // GC can happen when the context is not fully initialized, | 912 // GC can happen when the context is not fully initialized, |
934 // so the cache can be undefined. | 913 // so the cache can be undefined. |
935 HeapObject* cache = HeapObject::cast( | 914 HeapObject* cache = HeapObject::cast( |
936 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); | 915 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); |
937 if (!cache->IsUndefined(heap_->isolate())) { | 916 if (!cache->IsUndefined(heap_->isolate())) { |
938 MarkBit mark_bit = ObjectMarking::MarkBitFrom(cache); | 917 if (ObjectMarking::IsGrey(cache)) { |
939 if (Marking::IsGrey(mark_bit)) { | 918 ObjectMarking::GreyToBlack(cache); |
940 Marking::GreyToBlack(mark_bit); | |
941 MemoryChunk::IncrementLiveBytes(cache, cache->Size()); | |
942 } | 919 } |
943 } | 920 } |
944 context = Context::cast(context)->next_context_link(); | 921 context = Context::cast(context)->next_context_link(); |
945 } | 922 } |
946 } | 923 } |
947 | 924 |
948 | 925 |
949 void IncrementalMarking::Stop() { | 926 void IncrementalMarking::Stop() { |
950 if (IsStopped()) return; | 927 if (IsStopped()) return; |
951 if (FLAG_trace_incremental_marking) { | 928 if (FLAG_trace_incremental_marking) { |
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1212 idle_marking_delay_counter_++; | 1189 idle_marking_delay_counter_++; |
1213 } | 1190 } |
1214 | 1191 |
1215 | 1192 |
1216 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 1193 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
1217 idle_marking_delay_counter_ = 0; | 1194 idle_marking_delay_counter_ = 0; |
1218 } | 1195 } |
1219 | 1196 |
1220 } // namespace internal | 1197 } // namespace internal |
1221 } // namespace v8 | 1198 } // namespace v8 |
OLD | NEW |