Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(657)

Side by Side Diff: src/heap/incremental-marking.cc

Issue 2863953002: Revert of [heap] Reland "Make non-atomic markbit operations consistent with atomic ones." (Closed)
Patch Set: Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/incremental-marking.h ('k') | src/heap/mark-compact.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/incremental-marking.h" 5 #include "src/heap/incremental-marking.h"
6 6
7 #include "src/code-stubs.h" 7 #include "src/code-stubs.h"
8 #include "src/compilation-cache.h" 8 #include "src/compilation-cache.h"
9 #include "src/conversions.h" 9 #include "src/conversions.h"
10 #include "src/heap/concurrent-marking.h" 10 #include "src/heap/concurrent-marking.h"
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
50 new_generation_observer_(*this, kAllocatedThreshold), 50 new_generation_observer_(*this, kAllocatedThreshold),
51 old_generation_observer_(*this, kAllocatedThreshold) {} 51 old_generation_observer_(*this, kAllocatedThreshold) {}
52 52
53 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { 53 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
54 HeapObject* value_heap_obj = HeapObject::cast(value); 54 HeapObject* value_heap_obj = HeapObject::cast(value);
55 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, 55 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj,
56 marking_state(value_heap_obj))); 56 marking_state(value_heap_obj)));
57 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); 57 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj)));
58 const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj)); 58 const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj));
59 59
60 if (is_black && WhiteToGreyAndPush(value_heap_obj)) { 60 if (is_black &&
61 ObjectMarking::IsWhite(value_heap_obj, marking_state(value_heap_obj))) {
62 WhiteToGreyAndPush(value_heap_obj);
61 RestartIfNotMarking(); 63 RestartIfNotMarking();
62 } 64 }
63 return is_compacting_ && is_black; 65 return is_compacting_ && is_black;
64 } 66 }
65 67
66 68
67 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, 69 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot,
68 Object* value) { 70 Object* value) {
69 if (BaseRecordWrite(obj, value) && slot != NULL) { 71 if (BaseRecordWrite(obj, value) && slot != NULL) {
70 // Object is not going to be rescanned we need to record the slot. 72 // Object is not going to be rescanned we need to record the slot.
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
121 } 123 }
122 124
123 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, 125 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
124 Object* value) { 126 Object* value) {
125 if (BaseRecordWrite(host, value)) { 127 if (BaseRecordWrite(host, value)) {
126 // Object is not going to be rescanned. We need to record the slot. 128 // Object is not going to be rescanned. We need to record the slot.
127 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); 129 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value);
128 } 130 }
129 } 131 }
130 132
131 bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { 133 void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
132 if (ObjectMarking::WhiteToGrey(obj, marking_state(obj))) { 134 ObjectMarking::WhiteToGrey(obj, marking_state(obj));
133 marking_deque()->Push(obj); 135 marking_deque()->Push(obj);
134 return true;
135 }
136 return false;
137 } 136 }
138 137
139 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, 138 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
140 HeapObject* to) { 139 HeapObject* to) {
141 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); 140 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone());
142 // This is only used when resizing an object. 141 // This is only used when resizing an object.
143 DCHECK(MemoryChunk::FromAddress(from->address()) == 142 DCHECK(MemoryChunk::FromAddress(from->address()) ==
144 MemoryChunk::FromAddress(to->address())); 143 MemoryChunk::FromAddress(to->address()));
145 144
146 if (!IsMarking()) return; 145 if (!IsMarking()) return;
147 146
148 // If the mark doesn't move, we don't check the color of the object. 147 // If the mark doesn't move, we don't check the color of the object.
149 // It doesn't matter whether the object is black, since it hasn't changed 148 // It doesn't matter whether the object is black, since it hasn't changed
150 // size, so the adjustment to the live data count will be zero anyway. 149 // size, so the adjustment to the live data count will be zero anyway.
151 if (from == to) return; 150 if (from == to) return;
152 151
153 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); 152 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to));
154 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); 153 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from));
155 154
156 if (Marking::IsBlack(old_mark_bit)) { 155 if (Marking::IsBlack(old_mark_bit)) {
157 if (from->address() + kPointerSize == to->address()) { 156 Marking::MarkBlack(new_mark_bit);
158 // The old and the new markbits overlap. The |to| object has the
159 // grey color. To make it black, we need to set second bit.
160 DCHECK(new_mark_bit.Get());
161 new_mark_bit.Next().Set();
162 } else {
163 bool success = Marking::WhiteToBlack(new_mark_bit);
164 DCHECK(success);
165 USE(success);
166 }
167 } else if (Marking::IsGrey(old_mark_bit)) { 157 } else if (Marking::IsGrey(old_mark_bit)) {
168 if (from->address() + kPointerSize == to->address()) { 158 Marking::WhiteToGrey(new_mark_bit);
169 // The old and the new markbits overlap. The |to| object has the
170 // white color. To make it black, we need to set both bits.
171 // Note that Marking::WhiteToGrey does not work here because
172 // old_mark_bit.Next() can be set by the concurrent marker at any time.
173 new_mark_bit.Set();
174 new_mark_bit.Next().Set();
175 } else {
176 bool success = Marking::WhiteToGrey(new_mark_bit);
177 DCHECK(success);
178 USE(success);
179 }
180 marking_deque()->Push(to); 159 marking_deque()->Push(to);
181 RestartIfNotMarking(); 160 RestartIfNotMarking();
182 } 161 }
183 } 162 }
184 163
185 class IncrementalMarkingMarkingVisitor 164 class IncrementalMarkingMarkingVisitor
186 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { 165 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> {
187 public: 166 public:
188 static void Initialize() { 167 static void Initialize() {
189 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); 168 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
241 // We will mark cache black with a separate pass when we finish marking. 220 // We will mark cache black with a separate pass when we finish marking.
242 // Note that GC can happen when the context is not fully initialized, 221 // Note that GC can happen when the context is not fully initialized,
243 // so the cache can be undefined. 222 // so the cache can be undefined.
244 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); 223 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX);
245 if (!cache->IsUndefined(map->GetIsolate())) { 224 if (!cache->IsUndefined(map->GetIsolate())) {
246 if (cache->IsHeapObject()) { 225 if (cache->IsHeapObject()) {
247 HeapObject* heap_obj = HeapObject::cast(cache); 226 HeapObject* heap_obj = HeapObject::cast(cache);
248 // Mark the object grey if it is white, do not enque it into the marking 227 // Mark the object grey if it is white, do not enque it into the marking
249 // deque. 228 // deque.
250 Heap* heap = map->GetHeap(); 229 Heap* heap = map->GetHeap();
251 bool ignored = ObjectMarking::WhiteToGrey( 230 if (ObjectMarking::IsWhite(
252 heap_obj, heap->incremental_marking()->marking_state(heap_obj)); 231 heap_obj,
253 USE(ignored); 232 heap->incremental_marking()->marking_state(heap_obj))) {
233 ObjectMarking::WhiteToGrey(
234 heap_obj, heap->incremental_marking()->marking_state(heap_obj));
235 }
254 } 236 }
255 } 237 }
256 VisitNativeContext(map, context); 238 VisitNativeContext(map, context);
257 } 239 }
258 240
259 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { 241 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) {
260 Object* target = *p; 242 Object* target = *p;
261 if (target->IsHeapObject()) { 243 if (target->IsHeapObject()) {
262 heap->mark_compact_collector()->RecordSlot(object, p, target); 244 heap->mark_compact_collector()->RecordSlot(object, p, target);
263 MarkObject(heap, target); 245 MarkObject(heap, target);
264 } 246 }
265 } 247 }
266 248
267 INLINE(static void VisitPointers(Heap* heap, HeapObject* object, 249 INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
268 Object** start, Object** end)) { 250 Object** start, Object** end)) {
269 for (Object** p = start; p < end; p++) { 251 for (Object** p = start; p < end; p++) {
270 Object* target = *p; 252 Object* target = *p;
271 if (target->IsHeapObject()) { 253 if (target->IsHeapObject()) {
272 heap->mark_compact_collector()->RecordSlot(object, p, target); 254 heap->mark_compact_collector()->RecordSlot(object, p, target);
273 MarkObject(heap, target); 255 MarkObject(heap, target);
274 } 256 }
275 } 257 }
276 } 258 }
277 259
278 // Marks the object grey and pushes it on the marking stack. 260 // Marks the object grey and pushes it on the marking stack.
279 INLINE(static void MarkObject(Heap* heap, Object* obj)) { 261 INLINE(static void MarkObject(Heap* heap, Object* obj)) {
280 heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); 262 heap->incremental_marking()->MarkGrey(HeapObject::cast(obj));
281 } 263 }
282 264
283 // Marks the object black without pushing it on the marking stack. 265 // Marks the object black without pushing it on the marking stack.
284 // Returns true if object needed marking and false otherwise. 266 // Returns true if object needed marking and false otherwise.
285 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { 267 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
286 HeapObject* heap_object = HeapObject::cast(obj); 268 HeapObject* heap_object = HeapObject::cast(obj);
287 return ObjectMarking::WhiteToBlack( 269 if (ObjectMarking::IsWhite(
288 heap_object, heap->incremental_marking()->marking_state(heap_object)); 270 heap_object,
271 heap->incremental_marking()->marking_state(heap_object))) {
272 ObjectMarking::WhiteToBlack(
273 heap_object, heap->incremental_marking()->marking_state(heap_object));
274 return true;
275 }
276 return false;
289 } 277 }
290 }; 278 };
291 279
292 void IncrementalMarking::IterateBlackObject(HeapObject* object) { 280 void IncrementalMarking::IterateBlackObject(HeapObject* object) {
293 if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) { 281 if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) {
294 Page* page = Page::FromAddress(object->address()); 282 Page* page = Page::FromAddress(object->address());
295 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { 283 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) {
296 // IterateBlackObject requires us to visit the whole object. 284 // IterateBlackObject requires us to visit the whole object.
297 page->ResetProgressBar(); 285 page->ResetProgressBar();
298 } 286 }
299 Map* map = object->map(); 287 Map* map = object->map();
300 WhiteToGreyAndPush(map); 288 MarkGrey(map);
301 IncrementalMarkingMarkingVisitor::IterateBody(map, object); 289 IncrementalMarkingMarkingVisitor::IterateBody(map, object);
302 } 290 }
303 } 291 }
304 292
305 class IncrementalMarkingRootMarkingVisitor : public RootVisitor { 293 class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
306 public: 294 public:
307 explicit IncrementalMarkingRootMarkingVisitor( 295 explicit IncrementalMarkingRootMarkingVisitor(
308 IncrementalMarking* incremental_marking) 296 IncrementalMarking* incremental_marking)
309 : heap_(incremental_marking->heap()) {} 297 : heap_(incremental_marking->heap()) {}
310 298
311 void VisitRootPointer(Root root, Object** p) override { 299 void VisitRootPointer(Root root, Object** p) override {
312 MarkObjectByPointer(p); 300 MarkObjectByPointer(p);
313 } 301 }
314 302
315 void VisitRootPointers(Root root, Object** start, Object** end) override { 303 void VisitRootPointers(Root root, Object** start, Object** end) override {
316 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); 304 for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
317 } 305 }
318 306
319 private: 307 private:
320 void MarkObjectByPointer(Object** p) { 308 void MarkObjectByPointer(Object** p) {
321 Object* obj = *p; 309 Object* obj = *p;
322 if (!obj->IsHeapObject()) return; 310 if (!obj->IsHeapObject()) return;
323 311
324 heap_->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj)); 312 heap_->incremental_marking()->MarkGrey(HeapObject::cast(obj));
325 } 313 }
326 314
327 Heap* heap_; 315 Heap* heap_;
328 }; 316 };
329 317
330 318
331 void IncrementalMarking::Initialize() { 319 void IncrementalMarking::Initialize() {
332 IncrementalMarkingMarkingVisitor::Initialize(); 320 IncrementalMarkingMarkingVisitor::Initialize();
333 } 321 }
334 322
(...skipping 370 matching lines...) Expand 10 before | Expand all | Expand 10 after
705 for (int i = 0; i < length; i += 2) { 693 for (int i = 0; i < length; i += 2) {
706 DCHECK(retained_maps->Get(i)->IsWeakCell()); 694 DCHECK(retained_maps->Get(i)->IsWeakCell());
707 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); 695 WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
708 if (cell->cleared()) continue; 696 if (cell->cleared()) continue;
709 int age = Smi::cast(retained_maps->Get(i + 1))->value(); 697 int age = Smi::cast(retained_maps->Get(i + 1))->value();
710 int new_age; 698 int new_age;
711 Map* map = Map::cast(cell->value()); 699 Map* map = Map::cast(cell->value());
712 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && 700 if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
713 ObjectMarking::IsWhite(map, marking_state(map))) { 701 ObjectMarking::IsWhite(map, marking_state(map))) {
714 if (ShouldRetainMap(map, age)) { 702 if (ShouldRetainMap(map, age)) {
715 WhiteToGreyAndPush(map); 703 MarkGrey(map);
716 } 704 }
717 Object* prototype = map->prototype(); 705 Object* prototype = map->prototype();
718 if (age > 0 && prototype->IsHeapObject() && 706 if (age > 0 && prototype->IsHeapObject() &&
719 ObjectMarking::IsWhite(HeapObject::cast(prototype), 707 ObjectMarking::IsWhite(HeapObject::cast(prototype),
720 marking_state(HeapObject::cast(prototype)))) { 708 marking_state(HeapObject::cast(prototype)))) {
721 // The prototype is not marked, age the map. 709 // The prototype is not marked, age the map.
722 new_age = age - 1; 710 new_age = age - 1;
723 } else { 711 } else {
724 // The prototype and the constructor are marked, this map keeps only 712 // The prototype and the constructor are marked, this map keeps only
725 // transition tree alive, not JSObjects. Do not age the map. 713 // transition tree alive, not JSObjects. Do not age the map.
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
825 ObjectMarking::IsBlack(obj, marking_state(obj)))); 813 ObjectMarking::IsBlack(obj, marking_state(obj))));
826 // Skip one word filler objects that appear on the 814 // Skip one word filler objects that appear on the
827 // stack when we perform in place array shift. 815 // stack when we perform in place array shift.
828 return (obj->map() == filler_map) ? nullptr : obj; 816 return (obj->map() == filler_map) ? nullptr : obj;
829 } 817 }
830 }); 818 });
831 } 819 }
832 820
833 821
834 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { 822 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
835 WhiteToGreyAndPush(map); 823 MarkGrey(map);
836 824
837 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); 825 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
838 826
839 #if ENABLE_SLOW_DCHECKS 827 #if ENABLE_SLOW_DCHECKS
840 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); 828 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj));
841 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); 829 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
842 SLOW_DCHECK(Marking::IsGrey(mark_bit) || 830 SLOW_DCHECK(Marking::IsGrey(mark_bit) ||
843 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && 831 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
844 Marking::IsBlack(mark_bit))); 832 Marking::IsBlack(mark_bit)));
845 #endif 833 #endif
834 MarkBlack(obj, size);
835 }
836
837 void IncrementalMarking::MarkGrey(HeapObject* object) {
838 if (ObjectMarking::IsWhite(object, marking_state(object))) {
839 WhiteToGreyAndPush(object);
840 }
841 }
842
843 void IncrementalMarking::MarkBlack(HeapObject* obj, int size) {
844 if (ObjectMarking::IsBlack(obj, marking_state(obj))) return;
846 ObjectMarking::GreyToBlack(obj, marking_state(obj)); 845 ObjectMarking::GreyToBlack(obj, marking_state(obj));
847 } 846 }
848 847
849 intptr_t IncrementalMarking::ProcessMarkingDeque( 848 intptr_t IncrementalMarking::ProcessMarkingDeque(
850 intptr_t bytes_to_process, ForceCompletionAction completion) { 849 intptr_t bytes_to_process, ForceCompletionAction completion) {
851 intptr_t bytes_processed = 0; 850 intptr_t bytes_processed = 0;
852 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process || 851 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process ||
853 completion == FORCE_COMPLETION)) { 852 completion == FORCE_COMPLETION)) {
854 HeapObject* obj = marking_deque()->Pop(); 853 HeapObject* obj = marking_deque()->Pop();
855 854
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
904 } 903 }
905 } 904 }
906 905
907 Object* context = heap_->native_contexts_list(); 906 Object* context = heap_->native_contexts_list();
908 while (!context->IsUndefined(heap_->isolate())) { 907 while (!context->IsUndefined(heap_->isolate())) {
909 // GC can happen when the context is not fully initialized, 908 // GC can happen when the context is not fully initialized,
910 // so the cache can be undefined. 909 // so the cache can be undefined.
911 HeapObject* cache = HeapObject::cast( 910 HeapObject* cache = HeapObject::cast(
912 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); 911 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
913 if (!cache->IsUndefined(heap_->isolate())) { 912 if (!cache->IsUndefined(heap_->isolate())) {
914 // Mark the cache black if it is grey. 913 if (ObjectMarking::IsGrey(cache, marking_state(cache))) {
915 bool ignored = ObjectMarking::GreyToBlack(cache, marking_state(cache)); 914 ObjectMarking::GreyToBlack(cache, marking_state(cache));
916 USE(ignored); 915 }
917 } 916 }
918 context = Context::cast(context)->next_context_link(); 917 context = Context::cast(context)->next_context_link();
919 } 918 }
920 } 919 }
921 920
922 921
923 void IncrementalMarking::Stop() { 922 void IncrementalMarking::Stop() {
924 if (IsStopped()) return; 923 if (IsStopped()) return;
925 if (FLAG_trace_incremental_marking) { 924 if (FLAG_trace_incremental_marking) {
926 int old_generation_size_mb = 925 int old_generation_size_mb =
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after
1186 idle_marking_delay_counter_++; 1185 idle_marking_delay_counter_++;
1187 } 1186 }
1188 1187
1189 1188
1190 void IncrementalMarking::ClearIdleMarkingDelayCounter() { 1189 void IncrementalMarking::ClearIdleMarkingDelayCounter() {
1191 idle_marking_delay_counter_ = 0; 1190 idle_marking_delay_counter_ = 0;
1192 } 1191 }
1193 1192
1194 } // namespace internal 1193 } // namespace internal
1195 } // namespace v8 1194 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/incremental-marking.h ('k') | src/heap/mark-compact.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698