Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(410)

Side by Side Diff: src/heap/incremental-marking.cc

Issue 2857743002: [heap] Use atomic marking operations in incremental marking if (Closed)
Patch Set: rebase Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/incremental-marking.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/incremental-marking.h" 5 #include "src/heap/incremental-marking.h"
6 6
7 #include "src/code-stubs.h" 7 #include "src/code-stubs.h"
8 #include "src/compilation-cache.h" 8 #include "src/compilation-cache.h"
9 #include "src/conversions.h" 9 #include "src/conversions.h"
10 #include "src/heap/concurrent-marking.h" 10 #include "src/heap/concurrent-marking.h"
(...skipping 25 matching lines...) Expand all
36 was_activated_(false), 36 was_activated_(false),
37 black_allocation_(false), 37 black_allocation_(false),
38 finalize_marking_completed_(false), 38 finalize_marking_completed_(false),
39 trace_wrappers_toggle_(false), 39 trace_wrappers_toggle_(false),
40 request_type_(NONE), 40 request_type_(NONE),
41 new_generation_observer_(*this, kAllocatedThreshold), 41 new_generation_observer_(*this, kAllocatedThreshold),
42 old_generation_observer_(*this, kAllocatedThreshold) {} 42 old_generation_observer_(*this, kAllocatedThreshold) {}
43 43
44 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { 44 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
45 HeapObject* value_heap_obj = HeapObject::cast(value); 45 HeapObject* value_heap_obj = HeapObject::cast(value);
46 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, 46 DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(
47 marking_state(value_heap_obj))); 47 value_heap_obj, marking_state(value_heap_obj)));
48 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); 48 DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj)));
49 const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj)); 49 const bool is_black =
50 ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj));
50 51
51 if (is_black && WhiteToGreyAndPush(value_heap_obj)) { 52 if (is_black && WhiteToGreyAndPush(value_heap_obj)) {
52 RestartIfNotMarking(); 53 RestartIfNotMarking();
53 } 54 }
54 return is_compacting_ && is_black; 55 return is_compacting_ && is_black;
55 } 56 }
56 57
57 58
58 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, 59 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot,
59 Object* value) { 60 Object* value) {
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
113 114
114 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, 115 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
115 Object* value) { 116 Object* value) {
116 if (BaseRecordWrite(host, value)) { 117 if (BaseRecordWrite(host, value)) {
117 // Object is not going to be rescanned. We need to record the slot. 118 // Object is not going to be rescanned. We need to record the slot.
118 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); 119 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value);
119 } 120 }
120 } 121 }
121 122
122 bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { 123 bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
123 if (ObjectMarking::WhiteToGrey(obj, marking_state(obj))) { 124 if (ObjectMarking::WhiteToGrey<kAtomicity>(obj, marking_state(obj))) {
124 marking_deque()->Push(obj); 125 marking_deque()->Push(obj);
125 return true; 126 return true;
126 } 127 }
127 return false; 128 return false;
128 } 129 }
129 130
130 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, 131 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
131 HeapObject* to) { 132 HeapObject* to) {
132 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); 133 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone());
133 // This is only used when resizing an object. 134 // This is only used when resizing an object.
134 DCHECK(MemoryChunk::FromAddress(from->address()) == 135 DCHECK(MemoryChunk::FromAddress(from->address()) ==
135 MemoryChunk::FromAddress(to->address())); 136 MemoryChunk::FromAddress(to->address()));
136 137
137 if (!IsMarking()) return; 138 if (!IsMarking()) return;
138 139
139 // If the mark doesn't move, we don't check the color of the object. 140 // If the mark doesn't move, we don't check the color of the object.
140 // It doesn't matter whether the object is black, since it hasn't changed 141 // It doesn't matter whether the object is black, since it hasn't changed
141 // size, so the adjustment to the live data count will be zero anyway. 142 // size, so the adjustment to the live data count will be zero anyway.
142 if (from == to) return; 143 if (from == to) return;
143 144
144 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); 145 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to));
145 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); 146 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from));
146 147
147 if (Marking::IsBlack(old_mark_bit)) { 148 if (Marking::IsBlack<kAtomicity>(old_mark_bit)) {
148 if (from->address() + kPointerSize == to->address()) { 149 if (from->address() + kPointerSize == to->address()) {
149 // The old and the new markbits overlap. The |to| object has the 150 // The old and the new markbits overlap. The |to| object has the
150 // grey color. To make it black, we need to set second bit. 151 // grey color. To make it black, we need to set second bit.
151 DCHECK(new_mark_bit.Get()); 152 DCHECK(new_mark_bit.Get<kAtomicity>());
152 new_mark_bit.Next().Set(); 153 new_mark_bit.Next().Set<kAtomicity>();
153 } else { 154 } else {
154 bool success = Marking::WhiteToBlack(new_mark_bit); 155 bool success = Marking::WhiteToBlack<kAtomicity>(new_mark_bit);
155 DCHECK(success); 156 DCHECK(success);
156 USE(success); 157 USE(success);
157 } 158 }
158 } else if (Marking::IsGrey(old_mark_bit)) { 159 } else if (Marking::IsGrey<kAtomicity>(old_mark_bit)) {
159 if (from->address() + kPointerSize == to->address()) { 160 if (from->address() + kPointerSize == to->address()) {
160 // The old and the new markbits overlap. The |to| object has the 161 // The old and the new markbits overlap. The |to| object has the
161 // white color. To make it black, we need to set both bits. 162 // white color. To make it black, we need to set both bits.
162 // Note that Marking::WhiteToGrey does not work here because 163 // Note that Marking::WhiteToGrey does not work here because
163 // old_mark_bit.Next() can be set by the concurrent marker at any time. 164 // old_mark_bit.Next() can be set by the concurrent marker at any time.
164 new_mark_bit.Set(); 165 new_mark_bit.Set<kAtomicity>();
165 new_mark_bit.Next().Set(); 166 new_mark_bit.Next().Set<kAtomicity>();
166 } else { 167 } else {
167 bool success = Marking::WhiteToGrey(new_mark_bit); 168 bool success = Marking::WhiteToGrey<kAtomicity>(new_mark_bit);
168 DCHECK(success); 169 DCHECK(success);
169 USE(success); 170 USE(success);
170 marking_deque()->Push(to); 171 marking_deque()->Push(to);
171 RestartIfNotMarking(); 172 RestartIfNotMarking();
172 } 173 }
173 } 174 }
174 } 175 }
175 176
176 class IncrementalMarkingMarkingVisitor 177 class IncrementalMarkingMarkingVisitor
177 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { 178 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> {
(...skipping 25 matching lines...) Expand all
203 bool scan_until_end = false; 204 bool scan_until_end = false;
204 do { 205 do {
205 VisitPointers(heap, object, HeapObject::RawField(object, start_offset), 206 VisitPointers(heap, object, HeapObject::RawField(object, start_offset),
206 HeapObject::RawField(object, end_offset)); 207 HeapObject::RawField(object, end_offset));
207 start_offset = end_offset; 208 start_offset = end_offset;
208 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); 209 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
209 scan_until_end = heap->incremental_marking()->marking_deque()->IsFull(); 210 scan_until_end = heap->incremental_marking()->marking_deque()->IsFull();
210 } while (scan_until_end && start_offset < object_size); 211 } while (scan_until_end && start_offset < object_size);
211 chunk->set_progress_bar(start_offset); 212 chunk->set_progress_bar(start_offset);
212 if (start_offset < object_size) { 213 if (start_offset < object_size) {
213 if (ObjectMarking::IsGrey( 214 if (ObjectMarking::IsGrey<IncrementalMarking::kAtomicity>(
214 object, heap->incremental_marking()->marking_state(object))) { 215 object, heap->incremental_marking()->marking_state(object))) {
215 heap->incremental_marking()->marking_deque()->Unshift(object); 216 heap->incremental_marking()->marking_deque()->Unshift(object);
216 } else { 217 } else {
217 DCHECK(ObjectMarking::IsBlack( 218 DCHECK(ObjectMarking::IsBlack<IncrementalMarking::kAtomicity>(
218 object, heap->incremental_marking()->marking_state(object))); 219 object, heap->incremental_marking()->marking_state(object)));
219 heap->mark_compact_collector()->UnshiftBlack(object); 220 heap->mark_compact_collector()->UnshiftBlack(object);
220 } 221 }
221 heap->incremental_marking()->NotifyIncompleteScanOfObject( 222 heap->incremental_marking()->NotifyIncompleteScanOfObject(
222 object_size - (start_offset - already_scanned_offset)); 223 object_size - (start_offset - already_scanned_offset));
223 } 224 }
224 } else { 225 } else {
225 FixedArrayVisitor::Visit(map, object); 226 FixedArrayVisitor::Visit(map, object);
226 } 227 }
227 } 228 }
228 229
229 static void VisitNativeContextIncremental(Map* map, HeapObject* object) { 230 static void VisitNativeContextIncremental(Map* map, HeapObject* object) {
230 Context* context = Context::cast(object); 231 Context* context = Context::cast(object);
231 232
232 // We will mark cache black with a separate pass when we finish marking. 233 // We will mark cache black with a separate pass when we finish marking.
233 // Note that GC can happen when the context is not fully initialized, 234 // Note that GC can happen when the context is not fully initialized,
234 // so the cache can be undefined. 235 // so the cache can be undefined.
235 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); 236 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX);
236 if (!cache->IsUndefined(map->GetIsolate())) { 237 if (!cache->IsUndefined(map->GetIsolate())) {
237 if (cache->IsHeapObject()) { 238 if (cache->IsHeapObject()) {
238 HeapObject* heap_obj = HeapObject::cast(cache); 239 HeapObject* heap_obj = HeapObject::cast(cache);
239 // Mark the object grey if it is white, do not enque it into the marking 240 // Mark the object grey if it is white, do not enque it into the marking
240 // deque. 241 // deque.
241 Heap* heap = map->GetHeap(); 242 Heap* heap = map->GetHeap();
242 bool ignored = ObjectMarking::WhiteToGrey( 243 bool ignored =
243 heap_obj, heap->incremental_marking()->marking_state(heap_obj)); 244 ObjectMarking::WhiteToGrey<IncrementalMarking::kAtomicity>(
245 heap_obj, heap->incremental_marking()->marking_state(heap_obj));
244 USE(ignored); 246 USE(ignored);
245 } 247 }
246 } 248 }
247 VisitNativeContext(map, context); 249 VisitNativeContext(map, context);
248 } 250 }
249 251
250 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { 252 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) {
251 Object* target = *p; 253 Object* target = *p;
252 if (target->IsHeapObject()) { 254 if (target->IsHeapObject()) {
253 heap->mark_compact_collector()->RecordSlot(object, p, target); 255 heap->mark_compact_collector()->RecordSlot(object, p, target);
(...skipping 14 matching lines...) Expand all
268 270
269 // Marks the object grey and pushes it on the marking stack. 271 // Marks the object grey and pushes it on the marking stack.
270 INLINE(static void MarkObject(Heap* heap, Object* obj)) { 272 INLINE(static void MarkObject(Heap* heap, Object* obj)) {
271 heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); 273 heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj));
272 } 274 }
273 275
274 // Marks the object black without pushing it on the marking stack. 276 // Marks the object black without pushing it on the marking stack.
275 // Returns true if object needed marking and false otherwise. 277 // Returns true if object needed marking and false otherwise.
276 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { 278 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
277 HeapObject* heap_object = HeapObject::cast(obj); 279 HeapObject* heap_object = HeapObject::cast(obj);
278 return ObjectMarking::WhiteToBlack( 280 return ObjectMarking::WhiteToBlack<IncrementalMarking::kAtomicity>(
279 heap_object, heap->incremental_marking()->marking_state(heap_object)); 281 heap_object, heap->incremental_marking()->marking_state(heap_object));
280 } 282 }
281 }; 283 };
282 284
283 void IncrementalMarking::IterateBlackObject(HeapObject* object) { 285 void IncrementalMarking::IterateBlackObject(HeapObject* object) {
284 if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) { 286 if (IsMarking() &&
287 ObjectMarking::IsBlack<kAtomicity>(object, marking_state(object))) {
285 Page* page = Page::FromAddress(object->address()); 288 Page* page = Page::FromAddress(object->address());
286 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { 289 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) {
287 // IterateBlackObject requires us to visit the whole object. 290 // IterateBlackObject requires us to visit the whole object.
288 page->ResetProgressBar(); 291 page->ResetProgressBar();
289 } 292 }
290 Map* map = object->map(); 293 Map* map = object->map();
291 WhiteToGreyAndPush(map); 294 WhiteToGreyAndPush(map);
292 IncrementalMarkingMarkingVisitor::IterateBody(map, object); 295 IncrementalMarkingMarkingVisitor::IterateBody(map, object);
293 } 296 }
294 } 297 }
(...skipping 336 matching lines...) Expand 10 before | Expand all | Expand 10 after
631 Object* weak_cell_obj = heap()->encountered_weak_cells(); 634 Object* weak_cell_obj = heap()->encountered_weak_cells();
632 Object* weak_cell_head = Smi::kZero; 635 Object* weak_cell_head = Smi::kZero;
633 WeakCell* prev_weak_cell_obj = NULL; 636 WeakCell* prev_weak_cell_obj = NULL;
634 while (weak_cell_obj != Smi::kZero) { 637 while (weak_cell_obj != Smi::kZero) {
635 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); 638 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
636 // We do not insert cleared weak cells into the list, so the value 639 // We do not insert cleared weak cells into the list, so the value
637 // cannot be a Smi here. 640 // cannot be a Smi here.
638 HeapObject* value = HeapObject::cast(weak_cell->value()); 641 HeapObject* value = HeapObject::cast(weak_cell->value());
639 // Remove weak cells with live objects from the list, they do not need 642 // Remove weak cells with live objects from the list, they do not need
640 // clearing. 643 // clearing.
641 if (ObjectMarking::IsBlackOrGrey(value, marking_state(value))) { 644 if (ObjectMarking::IsBlackOrGrey<kAtomicity>(value, marking_state(value))) {
642 // Record slot, if value is pointing to an evacuation candidate. 645 // Record slot, if value is pointing to an evacuation candidate.
643 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); 646 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
644 heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot); 647 heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot);
645 // Remove entry somewhere after top. 648 // Remove entry somewhere after top.
646 if (prev_weak_cell_obj != NULL) { 649 if (prev_weak_cell_obj != NULL) {
647 prev_weak_cell_obj->set_next(weak_cell->next()); 650 prev_weak_cell_obj->set_next(weak_cell->next());
648 } 651 }
649 weak_cell_obj = weak_cell->next(); 652 weak_cell_obj = weak_cell->next();
650 weak_cell->clear_next(the_hole_value); 653 weak_cell->clear_next(the_hole_value);
651 } else { 654 } else {
(...skipping 10 matching lines...) Expand all
662 665
663 666
664 bool ShouldRetainMap(Map* map, int age) { 667 bool ShouldRetainMap(Map* map, int age) {
665 if (age == 0) { 668 if (age == 0) {
666 // The map has aged. Do not retain this map. 669 // The map has aged. Do not retain this map.
667 return false; 670 return false;
668 } 671 }
669 Object* constructor = map->GetConstructor(); 672 Object* constructor = map->GetConstructor();
670 Heap* heap = map->GetHeap(); 673 Heap* heap = map->GetHeap();
671 if (!constructor->IsHeapObject() || 674 if (!constructor->IsHeapObject() ||
672 ObjectMarking::IsWhite(HeapObject::cast(constructor), 675 ObjectMarking::IsWhite<IncrementalMarking::kAtomicity>(
673 heap->incremental_marking()->marking_state( 676 HeapObject::cast(constructor),
674 HeapObject::cast(constructor)))) { 677 heap->incremental_marking()->marking_state(
678 HeapObject::cast(constructor)))) {
675 // The constructor is dead, no new objects with this map can 679 // The constructor is dead, no new objects with this map can
676 // be created. Do not retain this map. 680 // be created. Do not retain this map.
677 return false; 681 return false;
678 } 682 }
679 return true; 683 return true;
680 } 684 }
681 685
682 686
683 void IncrementalMarking::RetainMaps() { 687 void IncrementalMarking::RetainMaps() {
684 // Do not retain dead maps if flag disables it or there is 688 // Do not retain dead maps if flag disables it or there is
685 // - memory pressure (reduce_memory_footprint_), 689 // - memory pressure (reduce_memory_footprint_),
686 // - GC is requested by tests or dev-tools (abort_incremental_marking_). 690 // - GC is requested by tests or dev-tools (abort_incremental_marking_).
687 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() || 691 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() ||
688 heap()->ShouldAbortIncrementalMarking() || 692 heap()->ShouldAbortIncrementalMarking() ||
689 FLAG_retain_maps_for_n_gc == 0; 693 FLAG_retain_maps_for_n_gc == 0;
690 ArrayList* retained_maps = heap()->retained_maps(); 694 ArrayList* retained_maps = heap()->retained_maps();
691 int length = retained_maps->Length(); 695 int length = retained_maps->Length();
692 // The number_of_disposed_maps separates maps in the retained_maps 696 // The number_of_disposed_maps separates maps in the retained_maps
693 // array that were created before and after context disposal. 697 // array that were created before and after context disposal.
694 // We do not age and retain disposed maps to avoid memory leaks. 698 // We do not age and retain disposed maps to avoid memory leaks.
695 int number_of_disposed_maps = heap()->number_of_disposed_maps_; 699 int number_of_disposed_maps = heap()->number_of_disposed_maps_;
696 for (int i = 0; i < length; i += 2) { 700 for (int i = 0; i < length; i += 2) {
697 DCHECK(retained_maps->Get(i)->IsWeakCell()); 701 DCHECK(retained_maps->Get(i)->IsWeakCell());
698 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); 702 WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
699 if (cell->cleared()) continue; 703 if (cell->cleared()) continue;
700 int age = Smi::cast(retained_maps->Get(i + 1))->value(); 704 int age = Smi::cast(retained_maps->Get(i + 1))->value();
701 int new_age; 705 int new_age;
702 Map* map = Map::cast(cell->value()); 706 Map* map = Map::cast(cell->value());
703 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && 707 if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
704 ObjectMarking::IsWhite(map, marking_state(map))) { 708 ObjectMarking::IsWhite<kAtomicity>(map, marking_state(map))) {
705 if (ShouldRetainMap(map, age)) { 709 if (ShouldRetainMap(map, age)) {
706 WhiteToGreyAndPush(map); 710 WhiteToGreyAndPush(map);
707 } 711 }
708 Object* prototype = map->prototype(); 712 Object* prototype = map->prototype();
709 if (age > 0 && prototype->IsHeapObject() && 713 if (age > 0 && prototype->IsHeapObject() &&
710 ObjectMarking::IsWhite(HeapObject::cast(prototype), 714 ObjectMarking::IsWhite<kAtomicity>(
711 marking_state(HeapObject::cast(prototype)))) { 715 HeapObject::cast(prototype),
716 marking_state(HeapObject::cast(prototype)))) {
712 // The prototype is not marked, age the map. 717 // The prototype is not marked, age the map.
713 new_age = age - 1; 718 new_age = age - 1;
714 } else { 719 } else {
715 // The prototype and the constructor are marked, this map keeps only 720 // The prototype and the constructor are marked, this map keeps only
716 // transition tree alive, not JSObjects. Do not age the map. 721 // transition tree alive, not JSObjects. Do not age the map.
717 new_age = age; 722 new_age = age;
718 } 723 }
719 } else { 724 } else {
720 new_age = FLAG_retain_maps_for_n_gc; 725 new_age = FLAG_retain_maps_for_n_gc;
721 } 726 }
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
792 MapWord map_word = obj->map_word(); 797 MapWord map_word = obj->map_word();
793 if (!map_word.IsForwardingAddress()) { 798 if (!map_word.IsForwardingAddress()) {
794 // There may be objects on the marking deque that do not exist anymore, 799 // There may be objects on the marking deque that do not exist anymore,
795 // e.g. left trimmed objects or objects from the root set (frames). 800 // e.g. left trimmed objects or objects from the root set (frames).
796 // If these object are dead at scavenging time, their marking deque 801 // If these object are dead at scavenging time, their marking deque
797 // entries will not point to forwarding addresses. Hence, we can discard 802 // entries will not point to forwarding addresses. Hence, we can discard
798 // them. 803 // them.
799 return nullptr; 804 return nullptr;
800 } 805 }
801 HeapObject* dest = map_word.ToForwardingAddress(); 806 HeapObject* dest = map_word.ToForwardingAddress();
802 if (ObjectMarking::IsBlack(dest, marking_state(dest))) { 807 if (ObjectMarking::IsBlack<kAtomicity>(dest, marking_state(dest))) {
803 // The object is already processed by the marker. 808 // The object is already processed by the marker.
804 return nullptr; 809 return nullptr;
805 } 810 }
806 DCHECK( 811 DCHECK(ObjectMarking::IsGrey<kAtomicity>(obj, marking_state(obj)) ||
807 ObjectMarking::IsGrey(obj, marking_state(obj)) || 812 (obj->IsFiller() &&
808 (obj->IsFiller() && ObjectMarking::IsWhite(obj, marking_state(obj)))); 813 ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj))));
809 return dest; 814 return dest;
810 } else { 815 } else {
811 DCHECK(ObjectMarking::IsGrey(obj, marking_state(obj)) || 816 DCHECK(ObjectMarking::IsGrey<kAtomicity>(obj, marking_state(obj)) ||
812 (obj->IsFiller() && 817 (obj->IsFiller() &&
813 ObjectMarking::IsWhite(obj, marking_state(obj))) || 818 ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj))) ||
814 (MemoryChunk::FromAddress(obj->address()) 819 (MemoryChunk::FromAddress(obj->address())
815 ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && 820 ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
816 ObjectMarking::IsBlack(obj, marking_state(obj)))); 821 ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj))));
817 // Skip one word filler objects that appear on the 822 // Skip one word filler objects that appear on the
818 // stack when we perform in place array shift. 823 // stack when we perform in place array shift.
819 return (obj->map() == filler_map) ? nullptr : obj; 824 return (obj->map() == filler_map) ? nullptr : obj;
820 } 825 }
821 }); 826 });
822 } 827 }
823 828
824 829
825 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { 830 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
826 WhiteToGreyAndPush(map); 831 WhiteToGreyAndPush(map);
827 832
828 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); 833 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
829 834
830 #if ENABLE_SLOW_DCHECKS 835 #if ENABLE_SLOW_DCHECKS
831 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); 836 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj));
832 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); 837 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
833 SLOW_DCHECK(Marking::IsGrey(mark_bit) || 838 SLOW_DCHECK(Marking::IsGrey<kAtomicity>(mark_bit) ||
834 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && 839 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
835 Marking::IsBlack(mark_bit))); 840 Marking::IsBlack<kAtomicity>(mark_bit)));
836 #endif 841 #endif
837 ObjectMarking::GreyToBlack(obj, marking_state(obj)); 842 ObjectMarking::GreyToBlack<kAtomicity>(obj, marking_state(obj));
838 } 843 }
839 844
840 intptr_t IncrementalMarking::ProcessMarkingDeque( 845 intptr_t IncrementalMarking::ProcessMarkingDeque(
841 intptr_t bytes_to_process, ForceCompletionAction completion) { 846 intptr_t bytes_to_process, ForceCompletionAction completion) {
842 intptr_t bytes_processed = 0; 847 intptr_t bytes_processed = 0;
843 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process || 848 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process ||
844 completion == FORCE_COMPLETION)) { 849 completion == FORCE_COMPLETION)) {
845 HeapObject* obj = marking_deque()->Pop(); 850 HeapObject* obj = marking_deque()->Pop();
846 851
847 // Left trimming may result in white, grey, or black filler objects on the 852 // Left trimming may result in white, grey, or black filler objects on the
848 // marking deque. Ignore these objects. 853 // marking deque. Ignore these objects.
849 if (obj->IsFiller()) { 854 if (obj->IsFiller()) {
850 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); 855 DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj)));
851 continue; 856 continue;
852 } 857 }
853 858
854 Map* map = obj->map(); 859 Map* map = obj->map();
855 int size = obj->SizeFromMap(map); 860 int size = obj->SizeFromMap(map);
856 unscanned_bytes_of_large_object_ = 0; 861 unscanned_bytes_of_large_object_ = 0;
857 VisitObject(map, obj, size); 862 VisitObject(map, obj, size);
858 bytes_processed += size - unscanned_bytes_of_large_object_; 863 bytes_processed += size - unscanned_bytes_of_large_object_;
859 } 864 }
860 // Report all found wrappers to the embedder. This is necessary as the 865 // Report all found wrappers to the embedder. This is necessary as the
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
896 } 901 }
897 902
898 Object* context = heap_->native_contexts_list(); 903 Object* context = heap_->native_contexts_list();
899 while (!context->IsUndefined(heap_->isolate())) { 904 while (!context->IsUndefined(heap_->isolate())) {
900 // GC can happen when the context is not fully initialized, 905 // GC can happen when the context is not fully initialized,
901 // so the cache can be undefined. 906 // so the cache can be undefined.
902 HeapObject* cache = HeapObject::cast( 907 HeapObject* cache = HeapObject::cast(
903 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); 908 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
904 if (!cache->IsUndefined(heap_->isolate())) { 909 if (!cache->IsUndefined(heap_->isolate())) {
905 // Mark the cache black if it is grey. 910 // Mark the cache black if it is grey.
906 bool ignored = ObjectMarking::GreyToBlack(cache, marking_state(cache)); 911 bool ignored =
912 ObjectMarking::GreyToBlack<kAtomicity>(cache, marking_state(cache));
907 USE(ignored); 913 USE(ignored);
908 } 914 }
909 context = Context::cast(context)->next_context_link(); 915 context = Context::cast(context)->next_context_link();
910 } 916 }
911 } 917 }
912 918
913 919
914 void IncrementalMarking::Stop() { 920 void IncrementalMarking::Stop() {
915 if (IsStopped()) return; 921 if (IsStopped()) return;
916 if (FLAG_trace_incremental_marking) { 922 if (FLAG_trace_incremental_marking) {
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after
1177 idle_marking_delay_counter_++; 1183 idle_marking_delay_counter_++;
1178 } 1184 }
1179 1185
1180 1186
1181 void IncrementalMarking::ClearIdleMarkingDelayCounter() { 1187 void IncrementalMarking::ClearIdleMarkingDelayCounter() {
1182 idle_marking_delay_counter_ = 0; 1188 idle_marking_delay_counter_ = 0;
1183 } 1189 }
1184 1190
1185 } // namespace internal 1191 } // namespace internal
1186 } // namespace v8 1192 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/incremental-marking.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698