| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/incremental-marking.h" | 5 #include "src/heap/incremental-marking.h" |
| 6 | 6 |
| 7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
| 8 #include "src/compilation-cache.h" | 8 #include "src/compilation-cache.h" |
| 9 #include "src/conversions.h" | 9 #include "src/conversions.h" |
| 10 #include "src/heap/gc-idle-time-handler.h" | 10 #include "src/heap/gc-idle-time-handler.h" |
| (...skipping 21 matching lines...) Expand all Loading... |
| 32 was_activated_(false), | 32 was_activated_(false), |
| 33 black_allocation_(false), | 33 black_allocation_(false), |
| 34 finalize_marking_completed_(false), | 34 finalize_marking_completed_(false), |
| 35 trace_wrappers_toggle_(false), | 35 trace_wrappers_toggle_(false), |
| 36 request_type_(NONE), | 36 request_type_(NONE), |
| 37 new_generation_observer_(*this, kAllocatedThreshold), | 37 new_generation_observer_(*this, kAllocatedThreshold), |
| 38 old_generation_observer_(*this, kAllocatedThreshold) {} | 38 old_generation_observer_(*this, kAllocatedThreshold) {} |
| 39 | 39 |
| 40 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { | 40 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { |
| 41 HeapObject* value_heap_obj = HeapObject::cast(value); | 41 HeapObject* value_heap_obj = HeapObject::cast(value); |
| 42 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj)); | 42 MarkBit value_bit = ObjectMarking::MarkBitFrom(value_heap_obj); |
| 43 DCHECK(!ObjectMarking::IsImpossible(obj)); | 43 DCHECK(!Marking::IsImpossible(value_bit)); |
| 44 const bool is_black = ObjectMarking::IsBlack(obj); | |
| 45 | 44 |
| 46 if (is_black && ObjectMarking::IsWhite(value_heap_obj)) { | 45 MarkBit obj_bit = ObjectMarking::MarkBitFrom(obj); |
| 47 WhiteToGreyAndPush(value_heap_obj); | 46 DCHECK(!Marking::IsImpossible(obj_bit)); |
| 47 bool is_black = Marking::IsBlack(obj_bit); |
| 48 |
| 49 if (is_black && Marking::IsWhite(value_bit)) { |
| 50 WhiteToGreyAndPush(value_heap_obj, value_bit); |
| 48 RestartIfNotMarking(); | 51 RestartIfNotMarking(); |
| 49 } | 52 } |
| 50 return is_compacting_ && is_black; | 53 return is_compacting_ && is_black; |
| 51 } | 54 } |
| 52 | 55 |
| 53 | 56 |
| 54 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, | 57 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, |
| 55 Object* value) { | 58 Object* value) { |
| 56 if (BaseRecordWrite(obj, value) && slot != NULL) { | 59 if (BaseRecordWrite(obj, value) && slot != NULL) { |
| 57 // Object is not going to be rescanned we need to record the slot. | 60 // Object is not going to be rescanned we need to record the slot. |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 108 } | 111 } |
| 109 | 112 |
| 110 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, | 113 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, |
| 111 Object* value) { | 114 Object* value) { |
| 112 if (BaseRecordWrite(host, value)) { | 115 if (BaseRecordWrite(host, value)) { |
| 113 // Object is not going to be rescanned. We need to record the slot. | 116 // Object is not going to be rescanned. We need to record the slot. |
| 114 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); | 117 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); |
| 115 } | 118 } |
| 116 } | 119 } |
| 117 | 120 |
| 118 void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { | 121 |
| 119 ObjectMarking::WhiteToGrey(obj); | 122 void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit) { |
| 123 Marking::WhiteToGrey(mark_bit); |
| 120 heap_->mark_compact_collector()->marking_deque()->Push(obj); | 124 heap_->mark_compact_collector()->marking_deque()->Push(obj); |
| 121 } | 125 } |
| 122 | 126 |
| 123 | 127 |
| 124 static void MarkObjectGreyDoNotEnqueue(Object* obj) { | 128 static void MarkObjectGreyDoNotEnqueue(Object* obj) { |
| 125 if (obj->IsHeapObject()) { | 129 if (obj->IsHeapObject()) { |
| 126 HeapObject* heap_obj = HeapObject::cast(obj); | 130 HeapObject* heap_obj = HeapObject::cast(obj); |
| 127 ObjectMarking::AnyToGrey(heap_obj); | 131 MarkBit mark_bit = ObjectMarking::MarkBitFrom(HeapObject::cast(obj)); |
| 132 if (Marking::IsBlack(mark_bit)) { |
| 133 MemoryChunk::IncrementLiveBytes(heap_obj, -heap_obj->Size()); |
| 134 } |
| 135 Marking::AnyToGrey(mark_bit); |
| 128 } | 136 } |
| 129 } | 137 } |
| 130 | 138 |
| 131 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, | 139 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, |
| 132 HeapObject* to) { | 140 HeapObject* to) { |
| 133 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); | |
| 134 // This is only used when resizing an object. | 141 // This is only used when resizing an object. |
| 135 DCHECK(MemoryChunk::FromAddress(from->address()) == | 142 DCHECK(MemoryChunk::FromAddress(from->address()) == |
| 136 MemoryChunk::FromAddress(to->address())); | 143 MemoryChunk::FromAddress(to->address())); |
| 137 | 144 |
| 138 if (!heap->incremental_marking()->IsMarking()) return; | 145 if (!heap->incremental_marking()->IsMarking()) return; |
| 139 | 146 |
| 140 // If the mark doesn't move, we don't check the color of the object. | 147 // If the mark doesn't move, we don't check the color of the object. |
| 141 // It doesn't matter whether the object is black, since it hasn't changed | 148 // It doesn't matter whether the object is black, since it hasn't changed |
| 142 // size, so the adjustment to the live data count will be zero anyway. | 149 // size, so the adjustment to the live data count will be zero anyway. |
| 143 if (from == to) return; | 150 if (from == to) return; |
| 144 | 151 |
| 145 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to); | 152 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to); |
| 146 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from); | 153 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from); |
| 147 | 154 |
| 148 #ifdef DEBUG | 155 #ifdef DEBUG |
| 149 Marking::ObjectColor old_color = Marking::Color(old_mark_bit); | 156 Marking::ObjectColor old_color = Marking::Color(old_mark_bit); |
| 150 #endif | 157 #endif |
| 151 | 158 |
| 152 if (Marking::IsBlack(old_mark_bit)) { | 159 if (Marking::IsBlack(old_mark_bit)) { |
| 153 Marking::BlackToWhite(old_mark_bit); | 160 Marking::BlackToWhite(old_mark_bit); |
| 154 Marking::WhiteToBlack(new_mark_bit); | 161 Marking::MarkBlack(new_mark_bit); |
| 155 return; | 162 return; |
| 156 } else if (Marking::IsGrey(old_mark_bit)) { | 163 } else if (Marking::IsGrey(old_mark_bit)) { |
| 157 Marking::GreyToWhite(old_mark_bit); | 164 Marking::GreyToWhite(old_mark_bit); |
| 158 Marking::WhiteToGrey(new_mark_bit); | 165 heap->incremental_marking()->WhiteToGreyAndPush(to, new_mark_bit); |
| 159 heap->mark_compact_collector()->marking_deque()->Push(to); | |
| 160 heap->incremental_marking()->RestartIfNotMarking(); | 166 heap->incremental_marking()->RestartIfNotMarking(); |
| 161 } | 167 } |
| 162 | 168 |
| 163 #ifdef DEBUG | 169 #ifdef DEBUG |
| 164 Marking::ObjectColor new_color = Marking::Color(new_mark_bit); | 170 Marking::ObjectColor new_color = Marking::Color(new_mark_bit); |
| 165 DCHECK(new_color == old_color); | 171 DCHECK(new_color == old_color); |
| 166 #endif | 172 #endif |
| 167 } | 173 } |
| 168 | 174 |
| 169 class IncrementalMarkingMarkingVisitor | 175 class IncrementalMarkingMarkingVisitor |
| (...skipping 27 matching lines...) Expand all Loading... |
| 197 do { | 203 do { |
| 198 VisitPointers(heap, object, HeapObject::RawField(object, start_offset), | 204 VisitPointers(heap, object, HeapObject::RawField(object, start_offset), |
| 199 HeapObject::RawField(object, end_offset)); | 205 HeapObject::RawField(object, end_offset)); |
| 200 start_offset = end_offset; | 206 start_offset = end_offset; |
| 201 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); | 207 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); |
| 202 scan_until_end = | 208 scan_until_end = |
| 203 heap->mark_compact_collector()->marking_deque()->IsFull(); | 209 heap->mark_compact_collector()->marking_deque()->IsFull(); |
| 204 } while (scan_until_end && start_offset < object_size); | 210 } while (scan_until_end && start_offset < object_size); |
| 205 chunk->set_progress_bar(start_offset); | 211 chunk->set_progress_bar(start_offset); |
| 206 if (start_offset < object_size) { | 212 if (start_offset < object_size) { |
| 207 if (ObjectMarking::IsGrey(object)) { | 213 if (Marking::IsGrey(ObjectMarking::MarkBitFrom(object))) { |
| 208 heap->mark_compact_collector()->marking_deque()->Unshift(object); | 214 heap->mark_compact_collector()->marking_deque()->Unshift(object); |
| 209 } else { | 215 } else { |
| 210 DCHECK(ObjectMarking::IsBlack(object)); | 216 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); |
| 211 heap->mark_compact_collector()->UnshiftBlack(object); | 217 heap->mark_compact_collector()->UnshiftBlack(object); |
| 212 } | 218 } |
| 213 heap->incremental_marking()->NotifyIncompleteScanOfObject( | 219 heap->incremental_marking()->NotifyIncompleteScanOfObject( |
| 214 object_size - (start_offset - already_scanned_offset)); | 220 object_size - (start_offset - already_scanned_offset)); |
| 215 } | 221 } |
| 216 } else { | 222 } else { |
| 217 FixedArrayVisitor::Visit(map, object); | 223 FixedArrayVisitor::Visit(map, object); |
| 218 } | 224 } |
| 219 } | 225 } |
| 220 | 226 |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 252 | 258 |
| 253 // Marks the object grey and pushes it on the marking stack. | 259 // Marks the object grey and pushes it on the marking stack. |
| 254 INLINE(static void MarkObject(Heap* heap, Object* obj)) { | 260 INLINE(static void MarkObject(Heap* heap, Object* obj)) { |
| 255 IncrementalMarking::MarkGrey(heap, HeapObject::cast(obj)); | 261 IncrementalMarking::MarkGrey(heap, HeapObject::cast(obj)); |
| 256 } | 262 } |
| 257 | 263 |
| 258 // Marks the object black without pushing it on the marking stack. | 264 // Marks the object black without pushing it on the marking stack. |
| 259 // Returns true if object needed marking and false otherwise. | 265 // Returns true if object needed marking and false otherwise. |
| 260 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { | 266 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { |
| 261 HeapObject* heap_object = HeapObject::cast(obj); | 267 HeapObject* heap_object = HeapObject::cast(obj); |
| 262 if (ObjectMarking::IsWhite(heap_object)) { | 268 MarkBit mark_bit = ObjectMarking::MarkBitFrom(heap_object); |
| 263 ObjectMarking::WhiteToBlack(heap_object); | 269 if (Marking::IsWhite(mark_bit)) { |
| 270 Marking::MarkBlack(mark_bit); |
| 271 MemoryChunk::IncrementLiveBytes(heap_object, heap_object->Size()); |
| 264 return true; | 272 return true; |
| 265 } | 273 } |
| 266 return false; | 274 return false; |
| 267 } | 275 } |
| 268 }; | 276 }; |
| 269 | 277 |
| 270 void IncrementalMarking::IterateBlackObject(HeapObject* object) { | 278 void IncrementalMarking::IterateBlackObject(HeapObject* object) { |
| 271 if (IsMarking() && ObjectMarking::IsBlack(object)) { | 279 if (IsMarking() && Marking::IsBlack(ObjectMarking::MarkBitFrom(object))) { |
| 272 Page* page = Page::FromAddress(object->address()); | 280 Page* page = Page::FromAddress(object->address()); |
| 273 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { | 281 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { |
| 274 // IterateBlackObject requires us to visit the whole object. | 282 // IterateBlackObject requires us to visit the whole object. |
| 275 page->ResetProgressBar(); | 283 page->ResetProgressBar(); |
| 276 } | 284 } |
| 277 Map* map = object->map(); | 285 Map* map = object->map(); |
| 278 MarkGrey(heap_, map); | 286 MarkGrey(heap_, map); |
| 279 IncrementalMarkingMarkingVisitor::IterateBody(map, object); | 287 IncrementalMarkingMarkingVisitor::IterateBody(map, object); |
| 280 } | 288 } |
| 281 } | 289 } |
| (...skipping 364 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 646 } | 654 } |
| 647 | 655 |
| 648 | 656 |
| 649 bool ShouldRetainMap(Map* map, int age) { | 657 bool ShouldRetainMap(Map* map, int age) { |
| 650 if (age == 0) { | 658 if (age == 0) { |
| 651 // The map has aged. Do not retain this map. | 659 // The map has aged. Do not retain this map. |
| 652 return false; | 660 return false; |
| 653 } | 661 } |
| 654 Object* constructor = map->GetConstructor(); | 662 Object* constructor = map->GetConstructor(); |
| 655 if (!constructor->IsHeapObject() || | 663 if (!constructor->IsHeapObject() || |
| 656 ObjectMarking::IsWhite(HeapObject::cast(constructor))) { | 664 Marking::IsWhite( |
| 665 ObjectMarking::MarkBitFrom(HeapObject::cast(constructor)))) { |
| 657 // The constructor is dead, no new objects with this map can | 666 // The constructor is dead, no new objects with this map can |
| 658 // be created. Do not retain this map. | 667 // be created. Do not retain this map. |
| 659 return false; | 668 return false; |
| 660 } | 669 } |
| 661 return true; | 670 return true; |
| 662 } | 671 } |
| 663 | 672 |
| 664 | 673 |
| 665 void IncrementalMarking::RetainMaps() { | 674 void IncrementalMarking::RetainMaps() { |
| 666 // Do not retain dead maps if flag disables it or there is | 675 // Do not retain dead maps if flag disables it or there is |
| 667 // - memory pressure (reduce_memory_footprint_), | 676 // - memory pressure (reduce_memory_footprint_), |
| 668 // - GC is requested by tests or dev-tools (abort_incremental_marking_). | 677 // - GC is requested by tests or dev-tools (abort_incremental_marking_). |
| 669 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() || | 678 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() || |
| 670 heap()->ShouldAbortIncrementalMarking() || | 679 heap()->ShouldAbortIncrementalMarking() || |
| 671 FLAG_retain_maps_for_n_gc == 0; | 680 FLAG_retain_maps_for_n_gc == 0; |
| 672 ArrayList* retained_maps = heap()->retained_maps(); | 681 ArrayList* retained_maps = heap()->retained_maps(); |
| 673 int length = retained_maps->Length(); | 682 int length = retained_maps->Length(); |
| 674 // The number_of_disposed_maps separates maps in the retained_maps | 683 // The number_of_disposed_maps separates maps in the retained_maps |
| 675 // array that were created before and after context disposal. | 684 // array that were created before and after context disposal. |
| 676 // We do not age and retain disposed maps to avoid memory leaks. | 685 // We do not age and retain disposed maps to avoid memory leaks. |
| 677 int number_of_disposed_maps = heap()->number_of_disposed_maps_; | 686 int number_of_disposed_maps = heap()->number_of_disposed_maps_; |
| 678 for (int i = 0; i < length; i += 2) { | 687 for (int i = 0; i < length; i += 2) { |
| 679 DCHECK(retained_maps->Get(i)->IsWeakCell()); | 688 DCHECK(retained_maps->Get(i)->IsWeakCell()); |
| 680 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); | 689 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); |
| 681 if (cell->cleared()) continue; | 690 if (cell->cleared()) continue; |
| 682 int age = Smi::cast(retained_maps->Get(i + 1))->value(); | 691 int age = Smi::cast(retained_maps->Get(i + 1))->value(); |
| 683 int new_age; | 692 int new_age; |
| 684 Map* map = Map::cast(cell->value()); | 693 Map* map = Map::cast(cell->value()); |
| 694 MarkBit map_mark = ObjectMarking::MarkBitFrom(map); |
| 685 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && | 695 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && |
| 686 ObjectMarking::IsWhite(map)) { | 696 Marking::IsWhite(map_mark)) { |
| 687 if (ShouldRetainMap(map, age)) { | 697 if (ShouldRetainMap(map, age)) { |
| 688 MarkGrey(heap(), map); | 698 MarkGrey(heap(), map); |
| 689 } | 699 } |
| 690 Object* prototype = map->prototype(); | 700 Object* prototype = map->prototype(); |
| 691 if (age > 0 && prototype->IsHeapObject() && | 701 if (age > 0 && prototype->IsHeapObject() && |
| 692 ObjectMarking::IsWhite(HeapObject::cast(prototype))) { | 702 Marking::IsWhite( |
| 703 ObjectMarking::MarkBitFrom(HeapObject::cast(prototype)))) { |
| 693 // The prototype is not marked, age the map. | 704 // The prototype is not marked, age the map. |
| 694 new_age = age - 1; | 705 new_age = age - 1; |
| 695 } else { | 706 } else { |
| 696 // The prototype and the constructor are marked, this map keeps only | 707 // The prototype and the constructor are marked, this map keeps only |
| 697 // transition tree alive, not JSObjects. Do not age the map. | 708 // transition tree alive, not JSObjects. Do not age the map. |
| 698 new_age = age; | 709 new_age = age; |
| 699 } | 710 } |
| 700 } else { | 711 } else { |
| 701 new_age = FLAG_retain_maps_for_n_gc; | 712 new_age = FLAG_retain_maps_for_n_gc; |
| 702 } | 713 } |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 789 // Only pointers to from space have to be updated. | 800 // Only pointers to from space have to be updated. |
| 790 if (heap_->InFromSpace(obj)) { | 801 if (heap_->InFromSpace(obj)) { |
| 791 MapWord map_word = obj->map_word(); | 802 MapWord map_word = obj->map_word(); |
| 792 // There may be objects on the marking deque that do not exist anymore, | 803 // There may be objects on the marking deque that do not exist anymore, |
| 793 // e.g. left trimmed objects or objects from the root set (frames). | 804 // e.g. left trimmed objects or objects from the root set (frames). |
| 794 // If these object are dead at scavenging time, their marking deque | 805 // If these object are dead at scavenging time, their marking deque |
| 795 // entries will not point to forwarding addresses. Hence, we can discard | 806 // entries will not point to forwarding addresses. Hence, we can discard |
| 796 // them. | 807 // them. |
| 797 if (map_word.IsForwardingAddress()) { | 808 if (map_word.IsForwardingAddress()) { |
| 798 HeapObject* dest = map_word.ToForwardingAddress(); | 809 HeapObject* dest = map_word.ToForwardingAddress(); |
| 799 if (ObjectMarking::IsBlack(dest)) continue; | 810 if (Marking::IsBlack(ObjectMarking::MarkBitFrom(dest))) continue; |
| 800 array[new_top] = dest; | 811 array[new_top] = dest; |
| 801 new_top = ((new_top + 1) & mask); | 812 new_top = ((new_top + 1) & mask); |
| 802 DCHECK(new_top != marking_deque->bottom()); | 813 DCHECK(new_top != marking_deque->bottom()); |
| 803 DCHECK(ObjectMarking::IsGrey(obj) || | 814 #ifdef DEBUG |
| 804 (obj->IsFiller() && ObjectMarking::IsWhite(obj))); | 815 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); |
| 816 DCHECK(Marking::IsGrey(mark_bit) || |
| 817 (obj->IsFiller() && Marking::IsWhite(mark_bit))); |
| 818 #endif |
| 805 } | 819 } |
| 806 } else if (obj->map() != filler_map) { | 820 } else if (obj->map() != filler_map) { |
| 807 // Skip one word filler objects that appear on the | 821 // Skip one word filler objects that appear on the |
| 808 // stack when we perform in place array shift. | 822 // stack when we perform in place array shift. |
| 809 array[new_top] = obj; | 823 array[new_top] = obj; |
| 810 new_top = ((new_top + 1) & mask); | 824 new_top = ((new_top + 1) & mask); |
| 811 DCHECK(new_top != marking_deque->bottom()); | 825 DCHECK(new_top != marking_deque->bottom()); |
| 812 DCHECK(ObjectMarking::IsGrey(obj) || | 826 #ifdef DEBUG |
| 813 (obj->IsFiller() && ObjectMarking::IsWhite(obj)) || | 827 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); |
| 814 (MemoryChunk::FromAddress(obj->address()) | 828 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
| 815 ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | 829 DCHECK(Marking::IsGrey(mark_bit) || |
| 816 ObjectMarking::IsBlack(obj))); | 830 (obj->IsFiller() && Marking::IsWhite(mark_bit)) || |
| 831 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
| 832 Marking::IsBlack(mark_bit))); |
| 833 #endif |
| 817 } | 834 } |
| 818 } | 835 } |
| 819 marking_deque->set_top(new_top); | 836 marking_deque->set_top(new_top); |
| 820 } | 837 } |
| 821 | 838 |
| 822 | 839 |
| 823 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { | 840 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { |
| 824 MarkGrey(heap_, map); | 841 MarkGrey(heap_, map); |
| 825 | 842 |
| 826 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); | 843 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); |
| 827 | 844 |
| 828 #if ENABLE_SLOW_DCHECKS | 845 #if ENABLE_SLOW_DCHECKS |
| 829 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); | 846 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); |
| 830 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 847 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
| 831 SLOW_DCHECK(Marking::IsGrey(mark_bit) || | 848 SLOW_DCHECK(Marking::IsGrey(mark_bit) || |
| 832 (obj->IsFiller() && Marking::IsWhite(mark_bit)) || | 849 (obj->IsFiller() && Marking::IsWhite(mark_bit)) || |
| 833 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && | 850 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && |
| 834 Marking::IsBlack(mark_bit))); | 851 Marking::IsBlack(mark_bit))); |
| 835 #endif | 852 #endif |
| 836 MarkBlack(obj, size); | 853 MarkBlack(obj, size); |
| 837 } | 854 } |
| 838 | 855 |
| 839 void IncrementalMarking::MarkGrey(Heap* heap, HeapObject* object) { | 856 void IncrementalMarking::MarkGrey(Heap* heap, HeapObject* object) { |
| 840 if (ObjectMarking::IsWhite(object)) { | 857 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); |
| 841 heap->incremental_marking()->WhiteToGreyAndPush(object); | 858 if (Marking::IsWhite(mark_bit)) { |
| 859 heap->incremental_marking()->WhiteToGreyAndPush(object, mark_bit); |
| 842 } | 860 } |
| 843 } | 861 } |
| 844 | 862 |
| 845 void IncrementalMarking::MarkBlack(HeapObject* obj, int size) { | 863 void IncrementalMarking::MarkBlack(HeapObject* obj, int size) { |
| 846 if (ObjectMarking::IsBlack(obj)) return; | 864 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); |
| 847 ObjectMarking::GreyToBlack(obj); | 865 if (Marking::IsBlack(mark_bit)) return; |
| 866 Marking::GreyToBlack(mark_bit); |
| 867 MemoryChunk::IncrementLiveBytes(obj, size); |
| 848 } | 868 } |
| 849 | 869 |
| 850 intptr_t IncrementalMarking::ProcessMarkingDeque( | 870 intptr_t IncrementalMarking::ProcessMarkingDeque( |
| 851 intptr_t bytes_to_process, ForceCompletionAction completion) { | 871 intptr_t bytes_to_process, ForceCompletionAction completion) { |
| 852 intptr_t bytes_processed = 0; | 872 intptr_t bytes_processed = 0; |
| 853 MarkingDeque* marking_deque = | 873 MarkingDeque* marking_deque = |
| 854 heap_->mark_compact_collector()->marking_deque(); | 874 heap_->mark_compact_collector()->marking_deque(); |
| 855 while (!marking_deque->IsEmpty() && (bytes_processed < bytes_to_process || | 875 while (!marking_deque->IsEmpty() && (bytes_processed < bytes_to_process || |
| 856 completion == FORCE_COMPLETION)) { | 876 completion == FORCE_COMPLETION)) { |
| 857 HeapObject* obj = marking_deque->Pop(); | 877 HeapObject* obj = marking_deque->Pop(); |
| 858 | 878 |
| 859 // Left trimming may result in white filler objects on the marking deque. | 879 // Left trimming may result in white filler objects on the marking deque. |
| 860 // Ignore these objects. | 880 // Ignore these objects. |
| 861 if (obj->IsFiller()) { | 881 if (obj->IsFiller()) { |
| 862 DCHECK(ObjectMarking::IsImpossible(obj) || ObjectMarking::IsWhite(obj)); | 882 DCHECK(Marking::IsImpossible(ObjectMarking::MarkBitFrom(obj)) || |
| 883 Marking::IsWhite(ObjectMarking::MarkBitFrom(obj))); |
| 863 continue; | 884 continue; |
| 864 } | 885 } |
| 865 | 886 |
| 866 Map* map = obj->map(); | 887 Map* map = obj->map(); |
| 867 int size = obj->SizeFromMap(map); | 888 int size = obj->SizeFromMap(map); |
| 868 unscanned_bytes_of_large_object_ = 0; | 889 unscanned_bytes_of_large_object_ = 0; |
| 869 VisitObject(map, obj, size); | 890 VisitObject(map, obj, size); |
| 870 bytes_processed += size - unscanned_bytes_of_large_object_; | 891 bytes_processed += size - unscanned_bytes_of_large_object_; |
| 871 } | 892 } |
| 872 // Report all found wrappers to the embedder. This is necessary as the | 893 // Report all found wrappers to the embedder. This is necessary as the |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 907 } | 928 } |
| 908 } | 929 } |
| 909 | 930 |
| 910 Object* context = heap_->native_contexts_list(); | 931 Object* context = heap_->native_contexts_list(); |
| 911 while (!context->IsUndefined(heap_->isolate())) { | 932 while (!context->IsUndefined(heap_->isolate())) { |
| 912 // GC can happen when the context is not fully initialized, | 933 // GC can happen when the context is not fully initialized, |
| 913 // so the cache can be undefined. | 934 // so the cache can be undefined. |
| 914 HeapObject* cache = HeapObject::cast( | 935 HeapObject* cache = HeapObject::cast( |
| 915 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); | 936 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); |
| 916 if (!cache->IsUndefined(heap_->isolate())) { | 937 if (!cache->IsUndefined(heap_->isolate())) { |
| 917 if (ObjectMarking::IsGrey(cache)) { | 938 MarkBit mark_bit = ObjectMarking::MarkBitFrom(cache); |
| 918 ObjectMarking::GreyToBlack(cache); | 939 if (Marking::IsGrey(mark_bit)) { |
| 940 Marking::GreyToBlack(mark_bit); |
| 941 MemoryChunk::IncrementLiveBytes(cache, cache->Size()); |
| 919 } | 942 } |
| 920 } | 943 } |
| 921 context = Context::cast(context)->next_context_link(); | 944 context = Context::cast(context)->next_context_link(); |
| 922 } | 945 } |
| 923 } | 946 } |
| 924 | 947 |
| 925 | 948 |
| 926 void IncrementalMarking::Stop() { | 949 void IncrementalMarking::Stop() { |
| 927 if (IsStopped()) return; | 950 if (IsStopped()) return; |
| 928 if (FLAG_trace_incremental_marking) { | 951 if (FLAG_trace_incremental_marking) { |
| (...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1189 idle_marking_delay_counter_++; | 1212 idle_marking_delay_counter_++; |
| 1190 } | 1213 } |
| 1191 | 1214 |
| 1192 | 1215 |
| 1193 void IncrementalMarking::ClearIdleMarkingDelayCounter() { | 1216 void IncrementalMarking::ClearIdleMarkingDelayCounter() { |
| 1194 idle_marking_delay_counter_ = 0; | 1217 idle_marking_delay_counter_ = 0; |
| 1195 } | 1218 } |
| 1196 | 1219 |
| 1197 } // namespace internal | 1220 } // namespace internal |
| 1198 } // namespace v8 | 1221 } // namespace v8 |
| OLD | NEW |