Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(139)

Side by Side Diff: src/heap/incremental-marking.cc

Issue 2858343003: [heap] Reland Use atomic marking operations in incremental marking if (Closed)
Patch Set: rebase Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/incremental-marking.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/incremental-marking.h" 5 #include "src/heap/incremental-marking.h"
6 6
7 #include "src/code-stubs.h" 7 #include "src/code-stubs.h"
8 #include "src/compilation-cache.h" 8 #include "src/compilation-cache.h"
9 #include "src/conversions.h" 9 #include "src/conversions.h"
10 #include "src/heap/concurrent-marking.h" 10 #include "src/heap/concurrent-marking.h"
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
45 was_activated_(false), 45 was_activated_(false),
46 black_allocation_(false), 46 black_allocation_(false),
47 finalize_marking_completed_(false), 47 finalize_marking_completed_(false),
48 trace_wrappers_toggle_(false), 48 trace_wrappers_toggle_(false),
49 request_type_(NONE), 49 request_type_(NONE),
50 new_generation_observer_(*this, kAllocatedThreshold), 50 new_generation_observer_(*this, kAllocatedThreshold),
51 old_generation_observer_(*this, kAllocatedThreshold) {} 51 old_generation_observer_(*this, kAllocatedThreshold) {}
52 52
53 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { 53 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
54 HeapObject* value_heap_obj = HeapObject::cast(value); 54 HeapObject* value_heap_obj = HeapObject::cast(value);
55 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, 55 DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(
56 marking_state(value_heap_obj))); 56 value_heap_obj, marking_state(value_heap_obj)));
57 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); 57 DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj)));
58 const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj)); 58 const bool is_black =
59 ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj));
59 60
60 if (is_black && WhiteToGreyAndPush(value_heap_obj)) { 61 if (is_black && WhiteToGreyAndPush(value_heap_obj)) {
61 RestartIfNotMarking(); 62 RestartIfNotMarking();
62 } 63 }
63 return is_compacting_ && is_black; 64 return is_compacting_ && is_black;
64 } 65 }
65 66
66 67
67 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, 68 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot,
68 Object* value) { 69 Object* value) {
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
122 123
123 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, 124 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
124 Object* value) { 125 Object* value) {
125 if (BaseRecordWrite(host, value)) { 126 if (BaseRecordWrite(host, value)) {
126 // Object is not going to be rescanned. We need to record the slot. 127 // Object is not going to be rescanned. We need to record the slot.
127 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); 128 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value);
128 } 129 }
129 } 130 }
130 131
131 bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { 132 bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
132 if (ObjectMarking::WhiteToGrey(obj, marking_state(obj))) { 133 if (ObjectMarking::WhiteToGrey<kAtomicity>(obj, marking_state(obj))) {
133 marking_deque()->Push(obj); 134 marking_deque()->Push(obj);
134 return true; 135 return true;
135 } 136 }
136 return false; 137 return false;
137 } 138 }
138 139
139 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, 140 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
140 HeapObject* to) { 141 HeapObject* to) {
141 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); 142 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone());
142 // This is only used when resizing an object. 143 // This is only used when resizing an object.
143 DCHECK(MemoryChunk::FromAddress(from->address()) == 144 DCHECK(MemoryChunk::FromAddress(from->address()) ==
144 MemoryChunk::FromAddress(to->address())); 145 MemoryChunk::FromAddress(to->address()));
145 146
146 if (!IsMarking()) return; 147 if (!IsMarking()) return;
147 148
148 // If the mark doesn't move, we don't check the color of the object. 149 // If the mark doesn't move, we don't check the color of the object.
149 // It doesn't matter whether the object is black, since it hasn't changed 150 // It doesn't matter whether the object is black, since it hasn't changed
150 // size, so the adjustment to the live data count will be zero anyway. 151 // size, so the adjustment to the live data count will be zero anyway.
151 if (from == to) return; 152 if (from == to) return;
152 153
153 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); 154 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to));
154 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); 155 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from));
155 156
156 if (Marking::IsBlack(old_mark_bit)) { 157 if (Marking::IsBlack<kAtomicity>(old_mark_bit)) {
157 if (from->address() + kPointerSize == to->address()) { 158 if (from->address() + kPointerSize == to->address()) {
158 // The old and the new markbits overlap. The |to| object has the 159 // The old and the new markbits overlap. The |to| object has the
159 // grey color. To make it black, we need to set the second bit. 160 // grey color. To make it black, we need to set the second bit.
160 DCHECK(new_mark_bit.Get()); 161 DCHECK(new_mark_bit.Get<kAtomicity>());
161 new_mark_bit.Next().Set(); 162 new_mark_bit.Next().Set<kAtomicity>();
162 } else { 163 } else {
163 bool success = Marking::WhiteToBlack(new_mark_bit); 164 bool success = Marking::WhiteToBlack<kAtomicity>(new_mark_bit);
164 DCHECK(success); 165 DCHECK(success);
165 USE(success); 166 USE(success);
166 } 167 }
167 } else if (Marking::IsGrey(old_mark_bit)) { 168 } else if (Marking::IsGrey<kAtomicity>(old_mark_bit)) {
168 if (from->address() + kPointerSize == to->address()) { 169 if (from->address() + kPointerSize == to->address()) {
169 // The old and the new markbits overlap. The |to| object has the 170 // The old and the new markbits overlap. The |to| object has the
170 // white color. To make it grey, we need to set the first bit. 171 // white color. To make it grey, we need to set the first bit.
171 // Note that Marking::WhiteToGrey does not work here because 172 // Note that Marking::WhiteToGrey does not work here because
172 // old_mark_bit.Next() can be set by the concurrent marker at any time. 173 // old_mark_bit.Next() can be set by the concurrent marker at any time.
173 new_mark_bit.Set(); 174 new_mark_bit.Set();
174 DCHECK(!new_mark_bit.Next().Get()); 175 DCHECK(!new_mark_bit.Next().Get());
175 } else { 176 } else {
176 bool success = Marking::WhiteToGrey(new_mark_bit); 177 bool success = Marking::WhiteToGrey<kAtomicity>(new_mark_bit);
177 DCHECK(success); 178 DCHECK(success);
178 USE(success); 179 USE(success);
179 } 180 }
180 marking_deque()->Push(to); 181 marking_deque()->Push(to);
181 RestartIfNotMarking(); 182 RestartIfNotMarking();
182 } 183 }
183 } 184 }
184 185
185 class IncrementalMarkingMarkingVisitor 186 class IncrementalMarkingMarkingVisitor
186 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { 187 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> {
(...skipping 25 matching lines...) Expand all
212 bool scan_until_end = false; 213 bool scan_until_end = false;
213 do { 214 do {
214 VisitPointers(heap, object, HeapObject::RawField(object, start_offset), 215 VisitPointers(heap, object, HeapObject::RawField(object, start_offset),
215 HeapObject::RawField(object, end_offset)); 216 HeapObject::RawField(object, end_offset));
216 start_offset = end_offset; 217 start_offset = end_offset;
217 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); 218 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
218 scan_until_end = heap->incremental_marking()->marking_deque()->IsFull(); 219 scan_until_end = heap->incremental_marking()->marking_deque()->IsFull();
219 } while (scan_until_end && start_offset < object_size); 220 } while (scan_until_end && start_offset < object_size);
220 chunk->set_progress_bar(start_offset); 221 chunk->set_progress_bar(start_offset);
221 if (start_offset < object_size) { 222 if (start_offset < object_size) {
222 if (ObjectMarking::IsGrey( 223 if (ObjectMarking::IsGrey<IncrementalMarking::kAtomicity>(
223 object, heap->incremental_marking()->marking_state(object))) { 224 object, heap->incremental_marking()->marking_state(object))) {
224 heap->incremental_marking()->marking_deque()->Unshift(object); 225 heap->incremental_marking()->marking_deque()->Unshift(object);
225 } else { 226 } else {
226 DCHECK(ObjectMarking::IsBlack( 227 DCHECK(ObjectMarking::IsBlack<IncrementalMarking::kAtomicity>(
227 object, heap->incremental_marking()->marking_state(object))); 228 object, heap->incremental_marking()->marking_state(object)));
228 heap->mark_compact_collector()->UnshiftBlack(object); 229 heap->mark_compact_collector()->UnshiftBlack(object);
229 } 230 }
230 heap->incremental_marking()->NotifyIncompleteScanOfObject( 231 heap->incremental_marking()->NotifyIncompleteScanOfObject(
231 object_size - (start_offset - already_scanned_offset)); 232 object_size - (start_offset - already_scanned_offset));
232 } 233 }
233 } else { 234 } else {
234 FixedArrayVisitor::Visit(map, object); 235 FixedArrayVisitor::Visit(map, object);
235 } 236 }
236 } 237 }
237 238
238 static void VisitNativeContextIncremental(Map* map, HeapObject* object) { 239 static void VisitNativeContextIncremental(Map* map, HeapObject* object) {
239 Context* context = Context::cast(object); 240 Context* context = Context::cast(object);
240 241
241 // We will mark cache black with a separate pass when we finish marking. 242 // We will mark cache black with a separate pass when we finish marking.
242 // Note that GC can happen when the context is not fully initialized, 243 // Note that GC can happen when the context is not fully initialized,
243 // so the cache can be undefined. 244 // so the cache can be undefined.
244 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); 245 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX);
245 if (!cache->IsUndefined(map->GetIsolate())) { 246 if (!cache->IsUndefined(map->GetIsolate())) {
246 if (cache->IsHeapObject()) { 247 if (cache->IsHeapObject()) {
247 HeapObject* heap_obj = HeapObject::cast(cache); 248 HeapObject* heap_obj = HeapObject::cast(cache);
248 // Mark the object grey if it is white, do not enque it into the marking 249 // Mark the object grey if it is white, do not enque it into the marking
249 // deque. 250 // deque.
250 Heap* heap = map->GetHeap(); 251 Heap* heap = map->GetHeap();
251 bool ignored = ObjectMarking::WhiteToGrey( 252 bool ignored =
252 heap_obj, heap->incremental_marking()->marking_state(heap_obj)); 253 ObjectMarking::WhiteToGrey<IncrementalMarking::kAtomicity>(
254 heap_obj, heap->incremental_marking()->marking_state(heap_obj));
253 USE(ignored); 255 USE(ignored);
254 } 256 }
255 } 257 }
256 VisitNativeContext(map, context); 258 VisitNativeContext(map, context);
257 } 259 }
258 260
259 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { 261 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) {
260 Object* target = *p; 262 Object* target = *p;
261 if (target->IsHeapObject()) { 263 if (target->IsHeapObject()) {
262 heap->mark_compact_collector()->RecordSlot(object, p, target); 264 heap->mark_compact_collector()->RecordSlot(object, p, target);
(...skipping 14 matching lines...) Expand all
277 279
278 // Marks the object grey and pushes it on the marking stack. 280 // Marks the object grey and pushes it on the marking stack.
279 INLINE(static void MarkObject(Heap* heap, Object* obj)) { 281 INLINE(static void MarkObject(Heap* heap, Object* obj)) {
280 heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); 282 heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj));
281 } 283 }
282 284
283 // Marks the object black without pushing it on the marking stack. 285 // Marks the object black without pushing it on the marking stack.
284 // Returns true if object needed marking and false otherwise. 286 // Returns true if object needed marking and false otherwise.
285 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { 287 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
286 HeapObject* heap_object = HeapObject::cast(obj); 288 HeapObject* heap_object = HeapObject::cast(obj);
287 return ObjectMarking::WhiteToBlack( 289 return ObjectMarking::WhiteToBlack<IncrementalMarking::kAtomicity>(
288 heap_object, heap->incremental_marking()->marking_state(heap_object)); 290 heap_object, heap->incremental_marking()->marking_state(heap_object));
289 } 291 }
290 }; 292 };
291 293
292 void IncrementalMarking::IterateBlackObject(HeapObject* object) { 294 void IncrementalMarking::IterateBlackObject(HeapObject* object) {
293 if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) { 295 if (IsMarking() &&
296 ObjectMarking::IsBlack<kAtomicity>(object, marking_state(object))) {
294 Page* page = Page::FromAddress(object->address()); 297 Page* page = Page::FromAddress(object->address());
295 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { 298 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) {
296 // IterateBlackObject requires us to visit the whole object. 299 // IterateBlackObject requires us to visit the whole object.
297 page->ResetProgressBar(); 300 page->ResetProgressBar();
298 } 301 }
299 Map* map = object->map(); 302 Map* map = object->map();
300 WhiteToGreyAndPush(map); 303 WhiteToGreyAndPush(map);
301 IncrementalMarkingMarkingVisitor::IterateBody(map, object); 304 IncrementalMarkingMarkingVisitor::IterateBody(map, object);
302 } 305 }
303 } 306 }
(...skipping 336 matching lines...) Expand 10 before | Expand all | Expand 10 after
640 Object* weak_cell_obj = heap()->encountered_weak_cells(); 643 Object* weak_cell_obj = heap()->encountered_weak_cells();
641 Object* weak_cell_head = Smi::kZero; 644 Object* weak_cell_head = Smi::kZero;
642 WeakCell* prev_weak_cell_obj = NULL; 645 WeakCell* prev_weak_cell_obj = NULL;
643 while (weak_cell_obj != Smi::kZero) { 646 while (weak_cell_obj != Smi::kZero) {
644 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); 647 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
645 // We do not insert cleared weak cells into the list, so the value 648 // We do not insert cleared weak cells into the list, so the value
646 // cannot be a Smi here. 649 // cannot be a Smi here.
647 HeapObject* value = HeapObject::cast(weak_cell->value()); 650 HeapObject* value = HeapObject::cast(weak_cell->value());
648 // Remove weak cells with live objects from the list, they do not need 651 // Remove weak cells with live objects from the list, they do not need
649 // clearing. 652 // clearing.
650 if (ObjectMarking::IsBlackOrGrey(value, marking_state(value))) { 653 if (ObjectMarking::IsBlackOrGrey<kAtomicity>(value, marking_state(value))) {
651 // Record slot, if value is pointing to an evacuation candidate. 654 // Record slot, if value is pointing to an evacuation candidate.
652 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); 655 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
653 heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot); 656 heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot);
654 // Remove entry somewhere after top. 657 // Remove entry somewhere after top.
655 if (prev_weak_cell_obj != NULL) { 658 if (prev_weak_cell_obj != NULL) {
656 prev_weak_cell_obj->set_next(weak_cell->next()); 659 prev_weak_cell_obj->set_next(weak_cell->next());
657 } 660 }
658 weak_cell_obj = weak_cell->next(); 661 weak_cell_obj = weak_cell->next();
659 weak_cell->clear_next(the_hole_value); 662 weak_cell->clear_next(the_hole_value);
660 } else { 663 } else {
(...skipping 10 matching lines...) Expand all
671 674
672 675
673 bool ShouldRetainMap(Map* map, int age) { 676 bool ShouldRetainMap(Map* map, int age) {
674 if (age == 0) { 677 if (age == 0) {
675 // The map has aged. Do not retain this map. 678 // The map has aged. Do not retain this map.
676 return false; 679 return false;
677 } 680 }
678 Object* constructor = map->GetConstructor(); 681 Object* constructor = map->GetConstructor();
679 Heap* heap = map->GetHeap(); 682 Heap* heap = map->GetHeap();
680 if (!constructor->IsHeapObject() || 683 if (!constructor->IsHeapObject() ||
681 ObjectMarking::IsWhite(HeapObject::cast(constructor), 684 ObjectMarking::IsWhite<IncrementalMarking::kAtomicity>(
682 heap->incremental_marking()->marking_state( 685 HeapObject::cast(constructor),
683 HeapObject::cast(constructor)))) { 686 heap->incremental_marking()->marking_state(
687 HeapObject::cast(constructor)))) {
684 // The constructor is dead, no new objects with this map can 688 // The constructor is dead, no new objects with this map can
685 // be created. Do not retain this map. 689 // be created. Do not retain this map.
686 return false; 690 return false;
687 } 691 }
688 return true; 692 return true;
689 } 693 }
690 694
691 695
692 void IncrementalMarking::RetainMaps() { 696 void IncrementalMarking::RetainMaps() {
693 // Do not retain dead maps if flag disables it or there is 697 // Do not retain dead maps if flag disables it or there is
694 // - memory pressure (reduce_memory_footprint_), 698 // - memory pressure (reduce_memory_footprint_),
695 // - GC is requested by tests or dev-tools (abort_incremental_marking_). 699 // - GC is requested by tests or dev-tools (abort_incremental_marking_).
696 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() || 700 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() ||
697 heap()->ShouldAbortIncrementalMarking() || 701 heap()->ShouldAbortIncrementalMarking() ||
698 FLAG_retain_maps_for_n_gc == 0; 702 FLAG_retain_maps_for_n_gc == 0;
699 ArrayList* retained_maps = heap()->retained_maps(); 703 ArrayList* retained_maps = heap()->retained_maps();
700 int length = retained_maps->Length(); 704 int length = retained_maps->Length();
701 // The number_of_disposed_maps separates maps in the retained_maps 705 // The number_of_disposed_maps separates maps in the retained_maps
702 // array that were created before and after context disposal. 706 // array that were created before and after context disposal.
703 // We do not age and retain disposed maps to avoid memory leaks. 707 // We do not age and retain disposed maps to avoid memory leaks.
704 int number_of_disposed_maps = heap()->number_of_disposed_maps_; 708 int number_of_disposed_maps = heap()->number_of_disposed_maps_;
705 for (int i = 0; i < length; i += 2) { 709 for (int i = 0; i < length; i += 2) {
706 DCHECK(retained_maps->Get(i)->IsWeakCell()); 710 DCHECK(retained_maps->Get(i)->IsWeakCell());
707 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); 711 WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
708 if (cell->cleared()) continue; 712 if (cell->cleared()) continue;
709 int age = Smi::cast(retained_maps->Get(i + 1))->value(); 713 int age = Smi::cast(retained_maps->Get(i + 1))->value();
710 int new_age; 714 int new_age;
711 Map* map = Map::cast(cell->value()); 715 Map* map = Map::cast(cell->value());
712 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && 716 if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
713 ObjectMarking::IsWhite(map, marking_state(map))) { 717 ObjectMarking::IsWhite<kAtomicity>(map, marking_state(map))) {
714 if (ShouldRetainMap(map, age)) { 718 if (ShouldRetainMap(map, age)) {
715 WhiteToGreyAndPush(map); 719 WhiteToGreyAndPush(map);
716 } 720 }
717 Object* prototype = map->prototype(); 721 Object* prototype = map->prototype();
718 if (age > 0 && prototype->IsHeapObject() && 722 if (age > 0 && prototype->IsHeapObject() &&
719 ObjectMarking::IsWhite(HeapObject::cast(prototype), 723 ObjectMarking::IsWhite<kAtomicity>(
720 marking_state(HeapObject::cast(prototype)))) { 724 HeapObject::cast(prototype),
725 marking_state(HeapObject::cast(prototype)))) {
721 // The prototype is not marked, age the map. 726 // The prototype is not marked, age the map.
722 new_age = age - 1; 727 new_age = age - 1;
723 } else { 728 } else {
724 // The prototype and the constructor are marked, this map keeps only 729 // The prototype and the constructor are marked, this map keeps only
725 // transition tree alive, not JSObjects. Do not age the map. 730 // transition tree alive, not JSObjects. Do not age the map.
726 new_age = age; 731 new_age = age;
727 } 732 }
728 } else { 733 } else {
729 new_age = FLAG_retain_maps_for_n_gc; 734 new_age = FLAG_retain_maps_for_n_gc;
730 } 735 }
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
801 MapWord map_word = obj->map_word(); 806 MapWord map_word = obj->map_word();
802 if (!map_word.IsForwardingAddress()) { 807 if (!map_word.IsForwardingAddress()) {
803 // There may be objects on the marking deque that do not exist anymore, 808 // There may be objects on the marking deque that do not exist anymore,
804 // e.g. left trimmed objects or objects from the root set (frames). 809 // e.g. left trimmed objects or objects from the root set (frames).
805 // If these object are dead at scavenging time, their marking deque 810 // If these object are dead at scavenging time, their marking deque
806 // entries will not point to forwarding addresses. Hence, we can discard 811 // entries will not point to forwarding addresses. Hence, we can discard
807 // them. 812 // them.
808 return nullptr; 813 return nullptr;
809 } 814 }
810 HeapObject* dest = map_word.ToForwardingAddress(); 815 HeapObject* dest = map_word.ToForwardingAddress();
811 if (ObjectMarking::IsBlack(dest, marking_state(dest))) { 816 if (ObjectMarking::IsBlack<kAtomicity>(dest, marking_state(dest))) {
812 // The object is already processed by the marker. 817 // The object is already processed by the marker.
813 return nullptr; 818 return nullptr;
814 } 819 }
815 DCHECK( 820 DCHECK(ObjectMarking::IsGrey<kAtomicity>(obj, marking_state(obj)) ||
816 ObjectMarking::IsGrey(obj, marking_state(obj)) || 821 (obj->IsFiller() &&
817 (obj->IsFiller() && ObjectMarking::IsWhite(obj, marking_state(obj)))); 822 ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj))));
818 return dest; 823 return dest;
819 } else { 824 } else {
820 DCHECK(ObjectMarking::IsGrey(obj, marking_state(obj)) || 825 DCHECK(ObjectMarking::IsGrey<kAtomicity>(obj, marking_state(obj)) ||
821 (obj->IsFiller() && 826 (obj->IsFiller() &&
822 ObjectMarking::IsWhite(obj, marking_state(obj))) || 827 ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj))) ||
823 (MemoryChunk::FromAddress(obj->address()) 828 (MemoryChunk::FromAddress(obj->address())
824 ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && 829 ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
825 ObjectMarking::IsBlack(obj, marking_state(obj)))); 830 ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj))));
826 // Skip one word filler objects that appear on the 831 // Skip one word filler objects that appear on the
827 // stack when we perform in place array shift. 832 // stack when we perform in place array shift.
828 return (obj->map() == filler_map) ? nullptr : obj; 833 return (obj->map() == filler_map) ? nullptr : obj;
829 } 834 }
830 }); 835 });
831 } 836 }
832 837
833 838
834 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { 839 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
835 WhiteToGreyAndPush(map); 840 WhiteToGreyAndPush(map);
836 841
837 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); 842 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
838 843
839 #if ENABLE_SLOW_DCHECKS 844 #if ENABLE_SLOW_DCHECKS
840 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); 845 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj));
841 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); 846 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
842 SLOW_DCHECK(Marking::IsGrey(mark_bit) || 847 SLOW_DCHECK(Marking::IsGrey<kAtomicity>(mark_bit) ||
843 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && 848 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
844 Marking::IsBlack(mark_bit))); 849 Marking::IsBlack<kAtomicity>(mark_bit)));
845 #endif 850 #endif
846 ObjectMarking::GreyToBlack(obj, marking_state(obj)); 851 ObjectMarking::GreyToBlack<kAtomicity>(obj, marking_state(obj));
847 } 852 }
848 853
849 intptr_t IncrementalMarking::ProcessMarkingDeque( 854 intptr_t IncrementalMarking::ProcessMarkingDeque(
850 intptr_t bytes_to_process, ForceCompletionAction completion) { 855 intptr_t bytes_to_process, ForceCompletionAction completion) {
851 intptr_t bytes_processed = 0; 856 intptr_t bytes_processed = 0;
852 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process || 857 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process ||
853 completion == FORCE_COMPLETION)) { 858 completion == FORCE_COMPLETION)) {
854 HeapObject* obj = marking_deque()->Pop(); 859 HeapObject* obj = marking_deque()->Pop();
855 860
856 // Left trimming may result in white, grey, or black filler objects on the 861 // Left trimming may result in white, grey, or black filler objects on the
857 // marking deque. Ignore these objects. 862 // marking deque. Ignore these objects.
858 if (obj->IsFiller()) { 863 if (obj->IsFiller()) {
859 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); 864 DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj)));
860 continue; 865 continue;
861 } 866 }
862 867
863 Map* map = obj->map(); 868 Map* map = obj->map();
864 int size = obj->SizeFromMap(map); 869 int size = obj->SizeFromMap(map);
865 unscanned_bytes_of_large_object_ = 0; 870 unscanned_bytes_of_large_object_ = 0;
866 VisitObject(map, obj, size); 871 VisitObject(map, obj, size);
867 bytes_processed += size - unscanned_bytes_of_large_object_; 872 bytes_processed += size - unscanned_bytes_of_large_object_;
868 } 873 }
869 // Report all found wrappers to the embedder. This is necessary as the 874 // Report all found wrappers to the embedder. This is necessary as the
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
905 } 910 }
906 911
907 Object* context = heap_->native_contexts_list(); 912 Object* context = heap_->native_contexts_list();
908 while (!context->IsUndefined(heap_->isolate())) { 913 while (!context->IsUndefined(heap_->isolate())) {
909 // GC can happen when the context is not fully initialized, 914 // GC can happen when the context is not fully initialized,
910 // so the cache can be undefined. 915 // so the cache can be undefined.
911 HeapObject* cache = HeapObject::cast( 916 HeapObject* cache = HeapObject::cast(
912 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); 917 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
913 if (!cache->IsUndefined(heap_->isolate())) { 918 if (!cache->IsUndefined(heap_->isolate())) {
914 // Mark the cache black if it is grey. 919 // Mark the cache black if it is grey.
915 bool ignored = ObjectMarking::GreyToBlack(cache, marking_state(cache)); 920 bool ignored =
921 ObjectMarking::GreyToBlack<kAtomicity>(cache, marking_state(cache));
916 USE(ignored); 922 USE(ignored);
917 } 923 }
918 context = Context::cast(context)->next_context_link(); 924 context = Context::cast(context)->next_context_link();
919 } 925 }
920 } 926 }
921 927
922 928
923 void IncrementalMarking::Stop() { 929 void IncrementalMarking::Stop() {
924 if (IsStopped()) return; 930 if (IsStopped()) return;
925 if (FLAG_trace_incremental_marking) { 931 if (FLAG_trace_incremental_marking) {
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after
1186 idle_marking_delay_counter_++; 1192 idle_marking_delay_counter_++;
1187 } 1193 }
1188 1194
1189 1195
1190 void IncrementalMarking::ClearIdleMarkingDelayCounter() { 1196 void IncrementalMarking::ClearIdleMarkingDelayCounter() {
1191 idle_marking_delay_counter_ = 0; 1197 idle_marking_delay_counter_ = 0;
1192 } 1198 }
1193 1199
1194 } // namespace internal 1200 } // namespace internal
1195 } // namespace v8 1201 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/incremental-marking.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698