OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/incremental-marking.h" | 5 #include "src/heap/incremental-marking.h" |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/compilation-cache.h" | 8 #include "src/compilation-cache.h" |
9 #include "src/conversions.h" | 9 #include "src/conversions.h" |
10 #include "src/heap/gc-idle-time-handler.h" | 10 #include "src/heap/gc-idle-time-handler.h" |
(...skipping 22 matching lines...) Expand all Loading... |
33 was_activated_(false), | 33 was_activated_(false), |
34 black_allocation_(false), | 34 black_allocation_(false), |
35 finalize_marking_completed_(false), | 35 finalize_marking_completed_(false), |
36 trace_wrappers_toggle_(false), | 36 trace_wrappers_toggle_(false), |
37 request_type_(NONE), | 37 request_type_(NONE), |
38 new_generation_observer_(*this, kAllocatedThreshold), | 38 new_generation_observer_(*this, kAllocatedThreshold), |
39 old_generation_observer_(*this, kAllocatedThreshold) {} | 39 old_generation_observer_(*this, kAllocatedThreshold) {} |
40 | 40 |
41 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { | 41 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { |
42 HeapObject* value_heap_obj = HeapObject::cast(value); | 42 HeapObject* value_heap_obj = HeapObject::cast(value); |
43 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj)); | 43 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, |
44 DCHECK(!ObjectMarking::IsImpossible(obj)); | 44 MarkingState::Internal(value_heap_obj))); |
45 const bool is_black = ObjectMarking::IsBlack(obj); | 45 DCHECK(!ObjectMarking::IsImpossible(obj, MarkingState::Internal(obj))); |
| 46 const bool is_black = |
| 47 ObjectMarking::IsBlack(obj, MarkingState::Internal(obj)); |
46 | 48 |
47 if (is_black && ObjectMarking::IsWhite(value_heap_obj)) { | 49 if (is_black && ObjectMarking::IsWhite( |
| 50 value_heap_obj, MarkingState::Internal(value_heap_obj))) { |
48 WhiteToGreyAndPush(value_heap_obj); | 51 WhiteToGreyAndPush(value_heap_obj); |
49 RestartIfNotMarking(); | 52 RestartIfNotMarking(); |
50 } | 53 } |
51 return is_compacting_ && is_black; | 54 return is_compacting_ && is_black; |
52 } | 55 } |
53 | 56 |
54 | 57 |
55 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, | 58 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, |
56 Object* value) { | 59 Object* value) { |
57 if (BaseRecordWrite(obj, value) && slot != NULL) { | 60 if (BaseRecordWrite(obj, value) && slot != NULL) { |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
110 | 113 |
111 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, | 114 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, |
112 Object* value) { | 115 Object* value) { |
113 if (BaseRecordWrite(host, value)) { | 116 if (BaseRecordWrite(host, value)) { |
114 // Object is not going to be rescanned. We need to record the slot. | 117 // Object is not going to be rescanned. We need to record the slot. |
115 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); | 118 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); |
116 } | 119 } |
117 } | 120 } |
118 | 121 |
119 void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { | 122 void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { |
120 ObjectMarking::WhiteToGrey(obj); | 123 ObjectMarking::WhiteToGrey(obj, MarkingState::Internal(obj)); |
121 heap_->mark_compact_collector()->marking_deque()->Push(obj); | 124 heap_->mark_compact_collector()->marking_deque()->Push(obj); |
122 } | 125 } |
123 | 126 |
124 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, | 127 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, |
125 HeapObject* to) { | 128 HeapObject* to) { |
126 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); | 129 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); |
127 // This is only used when resizing an object. | 130 // This is only used when resizing an object. |
128 DCHECK(MemoryChunk::FromAddress(from->address()) == | 131 DCHECK(MemoryChunk::FromAddress(from->address()) == |
129 MemoryChunk::FromAddress(to->address())); | 132 MemoryChunk::FromAddress(to->address())); |
130 | 133 |
131 if (!heap->incremental_marking()->IsMarking()) return; | 134 if (!heap->incremental_marking()->IsMarking()) return; |
132 | 135 |
133 // If the mark doesn't move, we don't check the color of the object. | 136 // If the mark doesn't move, we don't check the color of the object. |
134 // It doesn't matter whether the object is black, since it hasn't changed | 137 // It doesn't matter whether the object is black, since it hasn't changed |
135 // size, so the adjustment to the live data count will be zero anyway. | 138 // size, so the adjustment to the live data count will be zero anyway. |
136 if (from == to) return; | 139 if (from == to) return; |
137 | 140 |
138 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to); | 141 MarkBit new_mark_bit = |
139 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from); | 142 ObjectMarking::MarkBitFrom(to, MarkingState::Internal(to)); |
| 143 MarkBit old_mark_bit = |
| 144 ObjectMarking::MarkBitFrom(from, MarkingState::Internal(from)); |
140 | 145 |
141 if (Marking::IsBlack(old_mark_bit)) { | 146 if (Marking::IsBlack(old_mark_bit)) { |
142 Marking::MarkBlack(new_mark_bit); | 147 Marking::MarkBlack(new_mark_bit); |
143 } else if (Marking::IsGrey(old_mark_bit)) { | 148 } else if (Marking::IsGrey(old_mark_bit)) { |
144 Marking::WhiteToGrey(new_mark_bit); | 149 Marking::WhiteToGrey(new_mark_bit); |
145 heap->mark_compact_collector()->marking_deque()->Push(to); | 150 heap->mark_compact_collector()->marking_deque()->Push(to); |
146 heap->incremental_marking()->RestartIfNotMarking(); | 151 heap->incremental_marking()->RestartIfNotMarking(); |
147 } | 152 } |
148 } | 153 } |
149 | 154 |
(...skipping 28 matching lines...) Expand all Loading... |
178 do { | 183 do { |
179 VisitPointers(heap, object, HeapObject::RawField(object, start_offset), | 184 VisitPointers(heap, object, HeapObject::RawField(object, start_offset), |
180 HeapObject::RawField(object, end_offset)); | 185 HeapObject::RawField(object, end_offset)); |
181 start_offset = end_offset; | 186 start_offset = end_offset; |
182 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); | 187 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); |
183 scan_until_end = | 188 scan_until_end = |
184 heap->mark_compact_collector()->marking_deque()->IsFull(); | 189 heap->mark_compact_collector()->marking_deque()->IsFull(); |
185 } while (scan_until_end && start_offset < object_size); | 190 } while (scan_until_end && start_offset < object_size); |
186 chunk->set_progress_bar(start_offset); | 191 chunk->set_progress_bar(start_offset); |
187 if (start_offset < object_size) { | 192 if (start_offset < object_size) { |
188 if (ObjectMarking::IsGrey(object)) { | 193 if (ObjectMarking::IsGrey(object, MarkingState::Internal(object))) { |
189 heap->mark_compact_collector()->marking_deque()->Unshift(object); | 194 heap->mark_compact_collector()->marking_deque()->Unshift(object); |
190 } else { | 195 } else { |
191 DCHECK(ObjectMarking::IsBlack(object)); | 196 DCHECK( |
| 197 ObjectMarking::IsBlack(object, MarkingState::Internal(object))); |
192 heap->mark_compact_collector()->UnshiftBlack(object); | 198 heap->mark_compact_collector()->UnshiftBlack(object); |
193 } | 199 } |
194 heap->incremental_marking()->NotifyIncompleteScanOfObject( | 200 heap->incremental_marking()->NotifyIncompleteScanOfObject( |
195 object_size - (start_offset - already_scanned_offset)); | 201 object_size - (start_offset - already_scanned_offset)); |
196 } | 202 } |
197 } else { | 203 } else { |
198 FixedArrayVisitor::Visit(map, object); | 204 FixedArrayVisitor::Visit(map, object); |
199 } | 205 } |
200 } | 206 } |
201 | 207 |
202 static void VisitNativeContextIncremental(Map* map, HeapObject* object) { | 208 static void VisitNativeContextIncremental(Map* map, HeapObject* object) { |
203 Context* context = Context::cast(object); | 209 Context* context = Context::cast(object); |
204 | 210 |
205 // We will mark cache black with a separate pass when we finish marking. | 211 // We will mark cache black with a separate pass when we finish marking. |
206 // Note that GC can happen when the context is not fully initialized, | 212 // Note that GC can happen when the context is not fully initialized, |
207 // so the cache can be undefined. | 213 // so the cache can be undefined. |
208 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); | 214 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); |
209 if (!cache->IsUndefined(map->GetIsolate())) { | 215 if (!cache->IsUndefined(map->GetIsolate())) { |
210 if (cache->IsHeapObject()) { | 216 if (cache->IsHeapObject()) { |
211 HeapObject* heap_obj = HeapObject::cast(cache); | 217 HeapObject* heap_obj = HeapObject::cast(cache); |
212 // Mark the object grey if it is white, do not enque it into the marking | 218 // Mark the object grey if it is white, do not enque it into the marking |
213 // deque. | 219 // deque. |
214 if (ObjectMarking::IsWhite(heap_obj)) { | 220 if (ObjectMarking::IsWhite(heap_obj, |
215 ObjectMarking::WhiteToGrey(heap_obj); | 221 MarkingState::Internal(heap_obj))) { |
| 222 ObjectMarking::WhiteToGrey(heap_obj, |
| 223 MarkingState::Internal(heap_obj)); |
216 } | 224 } |
217 } | 225 } |
218 } | 226 } |
219 VisitNativeContext(map, context); | 227 VisitNativeContext(map, context); |
220 } | 228 } |
221 | 229 |
222 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { | 230 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { |
223 Object* target = *p; | 231 Object* target = *p; |
224 if (target->IsHeapObject()) { | 232 if (target->IsHeapObject()) { |
225 heap->mark_compact_collector()->RecordSlot(object, p, target); | 233 heap->mark_compact_collector()->RecordSlot(object, p, target); |
(...skipping 14 matching lines...) Expand all Loading... |
240 | 248 |
241 // Marks the object grey and pushes it on the marking stack. | 249 // Marks the object grey and pushes it on the marking stack. |
242 INLINE(static void MarkObject(Heap* heap, Object* obj)) { | 250 INLINE(static void MarkObject(Heap* heap, Object* obj)) { |
243 IncrementalMarking::MarkGrey(heap, HeapObject::cast(obj)); | 251 IncrementalMarking::MarkGrey(heap, HeapObject::cast(obj)); |
244 } | 252 } |
245 | 253 |
246 // Marks the object black without pushing it on the marking stack. | 254 // Marks the object black without pushing it on the marking stack. |
247 // Returns true if object needed marking and false otherwise. | 255 // Returns true if object needed marking and false otherwise. |
248 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { | 256 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { |
249 HeapObject* heap_object = HeapObject::cast(obj); | 257 HeapObject* heap_object = HeapObject::cast(obj); |
250 if (ObjectMarking::IsWhite(heap_object)) { | 258 if (ObjectMarking::IsWhite(heap_object, |
251 ObjectMarking::WhiteToBlack(heap_object); | 259 MarkingState::Internal(heap_object))) { |
| 260 ObjectMarking::WhiteToBlack(heap_object, |
| 261 MarkingState::Internal(heap_object)); |
252 return true; | 262 return true; |
253 } | 263 } |
254 return false; | 264 return false; |
255 } | 265 } |
256 }; | 266 }; |
257 | 267 |
258 void IncrementalMarking::IterateBlackObject(HeapObject* object) { | 268 void IncrementalMarking::IterateBlackObject(HeapObject* object) { |
259 if (IsMarking() && ObjectMarking::IsBlack(object)) { | 269 if (IsMarking() && |
| 270 ObjectMarking::IsBlack(object, MarkingState::Internal(object))) { |
260 Page* page = Page::FromAddress(object->address()); | 271 Page* page = Page::FromAddress(object->address()); |
261 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { | 272 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { |
262 // IterateBlackObject requires us to visit the whole object. | 273 // IterateBlackObject requires us to visit the whole object. |
263 page->ResetProgressBar(); | 274 page->ResetProgressBar(); |
264 } | 275 } |
265 Map* map = object->map(); | 276 Map* map = object->map(); |
266 MarkGrey(heap_, map); | 277 MarkGrey(heap_, map); |
267 IncrementalMarkingMarkingVisitor::IterateBody(map, object); | 278 IncrementalMarkingMarkingVisitor::IterateBody(map, object); |
268 } | 279 } |
269 } | 280 } |
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
586 Object* weak_cell_obj = heap()->encountered_weak_cells(); | 597 Object* weak_cell_obj = heap()->encountered_weak_cells(); |
587 Object* weak_cell_head = Smi::kZero; | 598 Object* weak_cell_head = Smi::kZero; |
588 WeakCell* prev_weak_cell_obj = NULL; | 599 WeakCell* prev_weak_cell_obj = NULL; |
589 while (weak_cell_obj != Smi::kZero) { | 600 while (weak_cell_obj != Smi::kZero) { |
590 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); | 601 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); |
591 // We do not insert cleared weak cells into the list, so the value | 602 // We do not insert cleared weak cells into the list, so the value |
592 // cannot be a Smi here. | 603 // cannot be a Smi here. |
593 HeapObject* value = HeapObject::cast(weak_cell->value()); | 604 HeapObject* value = HeapObject::cast(weak_cell->value()); |
594 // Remove weak cells with live objects from the list, they do not need | 605 // Remove weak cells with live objects from the list, they do not need |
595 // clearing. | 606 // clearing. |
596 if (ObjectMarking::IsBlackOrGrey(value)) { | 607 if (ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) { |
597 // Record slot, if value is pointing to an evacuation candidate. | 608 // Record slot, if value is pointing to an evacuation candidate. |
598 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); | 609 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); |
599 heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot); | 610 heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot); |
600 // Remove entry somewhere after top. | 611 // Remove entry somewhere after top. |
601 if (prev_weak_cell_obj != NULL) { | 612 if (prev_weak_cell_obj != NULL) { |
602 prev_weak_cell_obj->set_next(weak_cell->next()); | 613 prev_weak_cell_obj->set_next(weak_cell->next()); |
603 } | 614 } |
604 weak_cell_obj = weak_cell->next(); | 615 weak_cell_obj = weak_cell->next(); |
605 weak_cell->clear_next(the_hole_value); | 616 weak_cell->clear_next(the_hole_value); |
606 } else { | 617 } else { |
607 if (weak_cell_head == Smi::kZero) { | 618 if (weak_cell_head == Smi::kZero) { |
608 weak_cell_head = weak_cell; | 619 weak_cell_head = weak_cell; |
609 } | 620 } |
610 prev_weak_cell_obj = weak_cell; | 621 prev_weak_cell_obj = weak_cell; |
611 weak_cell_obj = weak_cell->next(); | 622 weak_cell_obj = weak_cell->next(); |
612 } | 623 } |
613 } | 624 } |
614 // Top may have changed. | 625 // Top may have changed. |
615 heap()->set_encountered_weak_cells(weak_cell_head); | 626 heap()->set_encountered_weak_cells(weak_cell_head); |
616 } | 627 } |
617 | 628 |
618 | 629 |
619 bool ShouldRetainMap(Map* map, int age) { | 630 bool ShouldRetainMap(Map* map, int age) { |
620 if (age == 0) { | 631 if (age == 0) { |
621 // The map has aged. Do not retain this map. | 632 // The map has aged. Do not retain this map. |
622 return false; | 633 return false; |
623 } | 634 } |
624 Object* constructor = map->GetConstructor(); | 635 Object* constructor = map->GetConstructor(); |
625 if (!constructor->IsHeapObject() || | 636 if (!constructor->IsHeapObject() || |
626 ObjectMarking::IsWhite(HeapObject::cast(constructor))) { | 637 ObjectMarking::IsWhite( |
| 638 HeapObject::cast(constructor), |
| 639 MarkingState::Internal(HeapObject::cast(constructor)))) { |
627 // The constructor is dead, no new objects with this map can | 640 // The constructor is dead, no new objects with this map can |
628 // be created. Do not retain this map. | 641 // be created. Do not retain this map. |
629 return false; | 642 return false; |
630 } | 643 } |
631 return true; | 644 return true; |
632 } | 645 } |
633 | 646 |
634 | 647 |
635 void IncrementalMarking::RetainMaps() { | 648 void IncrementalMarking::RetainMaps() { |
636 // Do not retain dead maps if flag disables it or there is | 649 // Do not retain dead maps if flag disables it or there is |
637 // - memory pressure (reduce_memory_footprint_), | 650 // - memory pressure (reduce_memory_footprint_), |
638 // - GC is requested by tests or dev-tools (abort_incremental_marking_). | 651 // - GC is requested by tests or dev-tools (abort_incremental_marking_). |
639 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() || | 652 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() || |
640 heap()->ShouldAbortIncrementalMarking() || | 653 heap()->ShouldAbortIncrementalMarking() || |
641 FLAG_retain_maps_for_n_gc == 0; | 654 FLAG_retain_maps_for_n_gc == 0; |
642 ArrayList* retained_maps = heap()->retained_maps(); | 655 ArrayList* retained_maps = heap()->retained_maps(); |
643 int length = retained_maps->Length(); | 656 int length = retained_maps->Length(); |
644 // The number_of_disposed_maps separates maps in the retained_maps | 657 // The number_of_disposed_maps separates maps in the retained_maps |
645 // array that were created before and after context disposal. | 658 // array that were created before and after context disposal. |
646 // We do not age and retain disposed maps to avoid memory leaks. | 659 // We do not age and retain disposed maps to avoid memory leaks. |
647 int number_of_disposed_maps = heap()->number_of_disposed_maps_; | 660 int number_of_disposed_maps = heap()->number_of_disposed_maps_; |
648 for (int i = 0; i < length; i += 2) { | 661 for (int i = 0; i < length; i += 2) { |
649 DCHECK(retained_maps->Get(i)->IsWeakCell()); | 662 DCHECK(retained_maps->Get(i)->IsWeakCell()); |
650 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); | 663 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); |
651 if (cell->cleared()) continue; | 664 if (cell->cleared()) continue; |
652 int age = Smi::cast(retained_maps->Get(i + 1))->value(); | 665 int age = Smi::cast(retained_maps->Get(i + 1))->value(); |
653 int new_age; | 666 int new_age; |
654 Map* map = Map::cast(cell->value()); | 667 Map* map = Map::cast(cell->value()); |
655 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && | 668 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && |
656 ObjectMarking::IsWhite(map)) { | 669 ObjectMarking::IsWhite(map, MarkingState::Internal(map))) { |
657 if (ShouldRetainMap(map, age)) { | 670 if (ShouldRetainMap(map, age)) { |
658 MarkGrey(heap(), map); | 671 MarkGrey(heap(), map); |
659 } | 672 } |
660 Object* prototype = map->prototype(); | 673 Object* prototype = map->prototype(); |
661 if (age > 0 && prototype->IsHeapObject() && | 674 if (age > 0 && prototype->IsHeapObject() && |
662 ObjectMarking::IsWhite(HeapObject::cast(prototype))) { | 675 ObjectMarking::IsWhite( |
| 676 HeapObject::cast(prototype), |
| 677 MarkingState::Internal(HeapObject::cast(prototype)))) { |
663 // The prototype is not marked, age the map. | 678 // The prototype is not marked, age the map. |
664 new_age = age - 1; | 679 new_age = age - 1; |
665 } else { | 680 } else { |
666 // The prototype and the constructor are marked, this map keeps only | 681 // The prototype and the constructor are marked, this map keeps only |
667 // transition tree alive, not JSObjects. Do not age the map. | 682 // transition tree alive, not JSObjects. Do not age the map. |
668 new_age = age; | 683 new_age = age; |
669 } | 684 } |
670 } else { | 685 } else { |
671 new_age = FLAG_retain_maps_for_n_gc; | 686 new_age = FLAG_retain_maps_for_n_gc; |
672 } | 687 } |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
756 // Only pointers to from space have to be updated. | 771 // Only pointers to from space have to be updated. |
757 if (heap_->InFromSpace(obj)) { | 772 if (heap_->InFromSpace(obj)) { |
758 MapWord map_word = obj->map_word(); | 773 MapWord map_word = obj->map_word(); |
759 // There may be objects on the marking deque that do not exist anymore, | 774 // There may be objects on the marking deque that do not exist anymore, |
760 // e.g. left trimmed objects or objects from the root set (frames). | 775 // e.g. left trimmed objects or objects from the root set (frames). |
761 // If these object are dead at scavenging time, their marking deque | 776 // If these object are dead at scavenging time, their marking deque |
762 // entries will not point to forwarding addresses. Hence, we can discard | 777 // entries will not point to forwarding addresses. Hence, we can discard |
763 // them. | 778 // them. |
764 if (map_word.IsForwardingAddress()) { | 779 if (map_word.IsForwardingAddress()) { |
765 HeapObject* dest = map_word.ToForwardingAddress(); | 780 HeapObject* dest = map_word.ToForwardingAddress(); |
766 if (ObjectMarking::IsBlack(dest)) continue; | 781 if (ObjectMarking::IsBlack(dest, MarkingState::Internal(dest))) |
| 782 continue; |
767 array[new_top] = dest; | 783 array[new_top] = dest; |
768 new_top = ((new_top + 1) & mask); | 784 new_top = ((new_top + 1) & mask); |
769 DCHECK(new_top != marking_deque->bottom()); | 785 DCHECK(new_top != marking_deque->bottom()); |
770 DCHECK(ObjectMarking::IsGrey(obj) || | 786 DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) || |
771 (obj->IsFiller() && ObjectMarking::IsWhite(obj))); | 787 (obj->IsFiller() && |
| 788 ObjectMarking::IsWhite(obj, MarkingState::Internal(obj)))); |
772 } | 789 } |
773 } else if (obj->map() != filler_map) { | 790 } else if (obj->map() != filler_map) { |
774 // Skip one word filler objects that appear on the | 791 // Skip one word filler objects that appear on the |
775 // stack when we perform in place array shift. | 792 // stack when we perform in place array shift. |
776 array[new_top] = obj; | 793 array[new_top] = obj; |
777 new_top = ((new_top + 1) & mask); | 794 new_top = ((new_top + 1) & mask); |
778 DCHECK(new_top != marking_deque->bottom()); | 795 DCHECK(new_top != marking_deque->bottom()); |
779 DCHECK(ObjectMarking::IsGrey(obj) || | 796 DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) || |
780 (obj->IsFiller() && ObjectMarking::IsWhite(obj)) || | 797 (obj->IsFiller() && |
| 798 ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))) || |
781 (MemoryChunk::FromAddress(obj->address()) | 799 (MemoryChunk::FromAddress(obj->address()) |
782 ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | 800 ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
783 ObjectMarking::IsBlack(obj))); | 801 ObjectMarking::IsBlack(obj, MarkingState::Internal(obj)))); |
784 } | 802 } |
785 } | 803 } |
786 marking_deque->set_top(new_top); | 804 marking_deque->set_top(new_top); |
787 } | 805 } |
788 | 806 |
789 | 807 |
790 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { | 808 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { |
791 MarkGrey(heap_, map); | 809 MarkGrey(heap_, map); |
792 | 810 |
793 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); | 811 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); |
794 | 812 |
795 #if ENABLE_SLOW_DCHECKS | 813 #if ENABLE_SLOW_DCHECKS |
796 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); | 814 MarkBit mark_bit = |
| 815 ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj)); |
797 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 816 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
798 SLOW_DCHECK(Marking::IsGrey(mark_bit) || | 817 SLOW_DCHECK(Marking::IsGrey(mark_bit) || |
799 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | 818 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
800 Marking::IsBlack(mark_bit))); | 819 Marking::IsBlack(mark_bit))); |
801 #endif | 820 #endif |
802 MarkBlack(obj, size); | 821 MarkBlack(obj, size); |
803 } | 822 } |
804 | 823 |
805 void IncrementalMarking::MarkGrey(Heap* heap, HeapObject* object) { | 824 void IncrementalMarking::MarkGrey(Heap* heap, HeapObject* object) { |
806 if (ObjectMarking::IsWhite(object)) { | 825 if (ObjectMarking::IsWhite(object, MarkingState::Internal(object))) { |
807 heap->incremental_marking()->WhiteToGreyAndPush(object); | 826 heap->incremental_marking()->WhiteToGreyAndPush(object); |
808 } | 827 } |
809 } | 828 } |
810 | 829 |
811 void IncrementalMarking::MarkBlack(HeapObject* obj, int size) { | 830 void IncrementalMarking::MarkBlack(HeapObject* obj, int size) { |
812 if (ObjectMarking::IsBlack(obj)) return; | 831 if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) return; |
813 ObjectMarking::GreyToBlack(obj); | 832 ObjectMarking::GreyToBlack(obj, MarkingState::Internal(obj)); |
814 } | 833 } |
815 | 834 |
816 intptr_t IncrementalMarking::ProcessMarkingDeque( | 835 intptr_t IncrementalMarking::ProcessMarkingDeque( |
817 intptr_t bytes_to_process, ForceCompletionAction completion) { | 836 intptr_t bytes_to_process, ForceCompletionAction completion) { |
818 intptr_t bytes_processed = 0; | 837 intptr_t bytes_processed = 0; |
819 MarkingDeque* marking_deque = | 838 MarkingDeque* marking_deque = |
820 heap_->mark_compact_collector()->marking_deque(); | 839 heap_->mark_compact_collector()->marking_deque(); |
821 while (!marking_deque->IsEmpty() && (bytes_processed < bytes_to_process || | 840 while (!marking_deque->IsEmpty() && (bytes_processed < bytes_to_process || |
822 completion == FORCE_COMPLETION)) { | 841 completion == FORCE_COMPLETION)) { |
823 HeapObject* obj = marking_deque->Pop(); | 842 HeapObject* obj = marking_deque->Pop(); |
824 | 843 |
825 // Left trimming may result in white, grey, or black filler objects on the | 844 // Left trimming may result in white, grey, or black filler objects on the |
826 // marking deque. Ignore these objects. | 845 // marking deque. Ignore these objects. |
827 if (obj->IsFiller()) { | 846 if (obj->IsFiller()) { |
828 DCHECK(!ObjectMarking::IsImpossible(obj)); | 847 DCHECK(!ObjectMarking::IsImpossible(obj, MarkingState::Internal(obj))); |
829 continue; | 848 continue; |
830 } | 849 } |
831 | 850 |
832 Map* map = obj->map(); | 851 Map* map = obj->map(); |
833 int size = obj->SizeFromMap(map); | 852 int size = obj->SizeFromMap(map); |
834 unscanned_bytes_of_large_object_ = 0; | 853 unscanned_bytes_of_large_object_ = 0; |
835 VisitObject(map, obj, size); | 854 VisitObject(map, obj, size); |
836 bytes_processed += size - unscanned_bytes_of_large_object_; | 855 bytes_processed += size - unscanned_bytes_of_large_object_; |
837 } | 856 } |
838 // Report all found wrappers to the embedder. This is necessary as the | 857 // Report all found wrappers to the embedder. This is necessary as the |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
873 } | 892 } |
874 } | 893 } |
875 | 894 |
876 Object* context = heap_->native_contexts_list(); | 895 Object* context = heap_->native_contexts_list(); |
877 while (!context->IsUndefined(heap_->isolate())) { | 896 while (!context->IsUndefined(heap_->isolate())) { |
878 // GC can happen when the context is not fully initialized, | 897 // GC can happen when the context is not fully initialized, |
879 // so the cache can be undefined. | 898 // so the cache can be undefined. |
880 HeapObject* cache = HeapObject::cast( | 899 HeapObject* cache = HeapObject::cast( |
881 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); | 900 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); |
882 if (!cache->IsUndefined(heap_->isolate())) { | 901 if (!cache->IsUndefined(heap_->isolate())) { |
883 if (ObjectMarking::IsGrey(cache)) { | 902 if (ObjectMarking::IsGrey(cache, MarkingState::Internal(cache))) { |
884 ObjectMarking::GreyToBlack(cache); | 903 ObjectMarking::GreyToBlack(cache, MarkingState::Internal(cache)); |
885 } | 904 } |
886 } | 905 } |
887 context = Context::cast(context)->next_context_link(); | 906 context = Context::cast(context)->next_context_link(); |
888 } | 907 } |
889 } | 908 } |
890 | 909 |
891 | 910 |
892 void IncrementalMarking::Stop() { | 911 void IncrementalMarking::Stop() { |
893 if (IsStopped()) return; | 912 if (IsStopped()) return; |
894 if (FLAG_trace_incremental_marking) { | 913 if (FLAG_trace_incremental_marking) { |
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1155 idle_marking_delay_counter_++; | 1174 idle_marking_delay_counter_++; |
1156 } | 1175 } |
1157 | 1176 |
1158 | 1177 |
1159 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 1178 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
1160 idle_marking_delay_counter_ = 0; | 1179 idle_marking_delay_counter_ = 0; |
1161 } | 1180 } |
1162 | 1181 |
1163 } // namespace internal | 1182 } // namespace internal |
1164 } // namespace v8 | 1183 } // namespace v8 |
OLD | NEW |