Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(587)

Side by Side Diff: src/heap/incremental-marking.cc

Issue 2847953002: [heap] Cleanup: Untangle marking state and deque in incremental marking (Closed)
Patch Set: Remove unused parameter Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/incremental-marking.h ('k') | src/heap/scavenger.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/incremental-marking.h" 5 #include "src/heap/incremental-marking.h"
6 6
7 #include "src/code-stubs.h" 7 #include "src/code-stubs.h"
8 #include "src/compilation-cache.h" 8 #include "src/compilation-cache.h"
9 #include "src/conversions.h" 9 #include "src/conversions.h"
10 #include "src/heap/concurrent-marking.h" 10 #include "src/heap/concurrent-marking.h"
11 #include "src/heap/gc-idle-time-handler.h" 11 #include "src/heap/gc-idle-time-handler.h"
12 #include "src/heap/gc-tracer.h" 12 #include "src/heap/gc-tracer.h"
13 #include "src/heap/heap-inl.h" 13 #include "src/heap/heap-inl.h"
14 #include "src/heap/mark-compact-inl.h" 14 #include "src/heap/mark-compact-inl.h"
15 #include "src/heap/object-stats.h" 15 #include "src/heap/object-stats.h"
16 #include "src/heap/objects-visiting-inl.h" 16 #include "src/heap/objects-visiting-inl.h"
17 #include "src/heap/objects-visiting.h" 17 #include "src/heap/objects-visiting.h"
18 #include "src/tracing/trace-event.h" 18 #include "src/tracing/trace-event.h"
19 #include "src/v8.h" 19 #include "src/v8.h"
20 #include "src/visitors.h" 20 #include "src/visitors.h"
21 21
22 namespace v8 { 22 namespace v8 {
23 namespace internal { 23 namespace internal {
24 24
25 IncrementalMarking::IncrementalMarking(Heap* heap) 25 IncrementalMarking::IncrementalMarking(Heap* heap)
26 : heap_(heap), 26 : heap_(heap),
27 marking_deque_(nullptr),
27 initial_old_generation_size_(0), 28 initial_old_generation_size_(0),
28 bytes_marked_ahead_of_schedule_(0), 29 bytes_marked_ahead_of_schedule_(0),
29 unscanned_bytes_of_large_object_(0), 30 unscanned_bytes_of_large_object_(0),
30 state_(STOPPED), 31 state_(STOPPED),
31 idle_marking_delay_counter_(0), 32 idle_marking_delay_counter_(0),
32 incremental_marking_finalization_rounds_(0), 33 incremental_marking_finalization_rounds_(0),
33 is_compacting_(false), 34 is_compacting_(false),
34 should_hurry_(false), 35 should_hurry_(false),
35 was_activated_(false), 36 was_activated_(false),
36 black_allocation_(false), 37 black_allocation_(false),
37 finalize_marking_completed_(false), 38 finalize_marking_completed_(false),
38 trace_wrappers_toggle_(false), 39 trace_wrappers_toggle_(false),
39 request_type_(NONE), 40 request_type_(NONE),
40 new_generation_observer_(*this, kAllocatedThreshold), 41 new_generation_observer_(*this, kAllocatedThreshold),
41 old_generation_observer_(*this, kAllocatedThreshold) {} 42 old_generation_observer_(*this, kAllocatedThreshold) {}
42 43
43 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { 44 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
44 HeapObject* value_heap_obj = HeapObject::cast(value); 45 HeapObject* value_heap_obj = HeapObject::cast(value);
45 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, 46 DCHECK(!ObjectMarking::IsImpossible(value_heap_obj,
46 MarkingState::Internal(value_heap_obj))); 47 marking_state(value_heap_obj)));
47 DCHECK(!ObjectMarking::IsImpossible(obj, MarkingState::Internal(obj))); 48 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj)));
48 const bool is_black = 49 const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj));
49 ObjectMarking::IsBlack(obj, MarkingState::Internal(obj));
50 50
51 if (is_black && ObjectMarking::IsWhite( 51 if (is_black &&
52 value_heap_obj, MarkingState::Internal(value_heap_obj))) { 52 ObjectMarking::IsWhite(value_heap_obj, marking_state(value_heap_obj))) {
53 WhiteToGreyAndPush(value_heap_obj); 53 WhiteToGreyAndPush(value_heap_obj);
54 RestartIfNotMarking(); 54 RestartIfNotMarking();
55 } 55 }
56 return is_compacting_ && is_black; 56 return is_compacting_ && is_black;
57 } 57 }
58 58
59 59
60 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot, 60 void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot,
61 Object* value) { 61 Object* value) {
62 if (BaseRecordWrite(obj, value) && slot != NULL) { 62 if (BaseRecordWrite(obj, value) && slot != NULL) {
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
115 115
116 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, 116 void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
117 Object* value) { 117 Object* value) {
118 if (BaseRecordWrite(host, value)) { 118 if (BaseRecordWrite(host, value)) {
119 // Object is not going to be rescanned. We need to record the slot. 119 // Object is not going to be rescanned. We need to record the slot.
120 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value); 120 heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, value);
121 } 121 }
122 } 122 }
123 123
124 void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { 124 void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
125 ObjectMarking::WhiteToGrey(obj, MarkingState::Internal(obj)); 125 ObjectMarking::WhiteToGrey(obj, marking_state(obj));
126 heap_->mark_compact_collector()->marking_deque()->Push(obj); 126 marking_deque()->Push(obj);
127 } 127 }
128 128
129 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, 129 void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
130 HeapObject* to) { 130 HeapObject* to) {
131 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); 131 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone());
132 // This is only used when resizing an object. 132 // This is only used when resizing an object.
133 DCHECK(MemoryChunk::FromAddress(from->address()) == 133 DCHECK(MemoryChunk::FromAddress(from->address()) ==
134 MemoryChunk::FromAddress(to->address())); 134 MemoryChunk::FromAddress(to->address()));
135 135
136 if (!heap->incremental_marking()->IsMarking()) return; 136 if (!IsMarking()) return;
137 137
138 // If the mark doesn't move, we don't check the color of the object. 138 // If the mark doesn't move, we don't check the color of the object.
139 // It doesn't matter whether the object is black, since it hasn't changed 139 // It doesn't matter whether the object is black, since it hasn't changed
140 // size, so the adjustment to the live data count will be zero anyway. 140 // size, so the adjustment to the live data count will be zero anyway.
141 if (from == to) return; 141 if (from == to) return;
142 142
143 MarkBit new_mark_bit = 143 MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to));
144 ObjectMarking::MarkBitFrom(to, MarkingState::Internal(to)); 144 MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from));
145 MarkBit old_mark_bit =
146 ObjectMarking::MarkBitFrom(from, MarkingState::Internal(from));
147 145
148 if (Marking::IsBlack(old_mark_bit)) { 146 if (Marking::IsBlack(old_mark_bit)) {
149 Marking::MarkBlack(new_mark_bit); 147 Marking::MarkBlack(new_mark_bit);
150 } else if (Marking::IsGrey(old_mark_bit)) { 148 } else if (Marking::IsGrey(old_mark_bit)) {
151 Marking::WhiteToGrey(new_mark_bit); 149 Marking::WhiteToGrey(new_mark_bit);
152 heap->mark_compact_collector()->marking_deque()->Push(to); 150 marking_deque()->Push(to);
153 heap->incremental_marking()->RestartIfNotMarking(); 151 RestartIfNotMarking();
154 } 152 }
155 } 153 }
156 154
157 class IncrementalMarkingMarkingVisitor 155 class IncrementalMarkingMarkingVisitor
158 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { 156 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> {
159 public: 157 public:
160 static void Initialize() { 158 static void Initialize() {
161 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); 159 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
162 table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental); 160 table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental);
163 table_.Register(kVisitNativeContext, &VisitNativeContextIncremental); 161 table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
(...skipping 16 matching lines...) Expand all
180 Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar()); 178 Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
181 int end_offset = 179 int end_offset =
182 Min(object_size, start_offset + kProgressBarScanningChunk); 180 Min(object_size, start_offset + kProgressBarScanningChunk);
183 int already_scanned_offset = start_offset; 181 int already_scanned_offset = start_offset;
184 bool scan_until_end = false; 182 bool scan_until_end = false;
185 do { 183 do {
186 VisitPointers(heap, object, HeapObject::RawField(object, start_offset), 184 VisitPointers(heap, object, HeapObject::RawField(object, start_offset),
187 HeapObject::RawField(object, end_offset)); 185 HeapObject::RawField(object, end_offset));
188 start_offset = end_offset; 186 start_offset = end_offset;
189 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); 187 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
190 scan_until_end = 188 scan_until_end = heap->incremental_marking()->marking_deque()->IsFull();
191 heap->mark_compact_collector()->marking_deque()->IsFull();
192 } while (scan_until_end && start_offset < object_size); 189 } while (scan_until_end && start_offset < object_size);
193 chunk->set_progress_bar(start_offset); 190 chunk->set_progress_bar(start_offset);
194 if (start_offset < object_size) { 191 if (start_offset < object_size) {
195 if (ObjectMarking::IsGrey(object, MarkingState::Internal(object))) { 192 if (ObjectMarking::IsGrey(
196 heap->mark_compact_collector()->marking_deque()->Unshift(object); 193 object, heap->incremental_marking()->marking_state(object))) {
194 heap->incremental_marking()->marking_deque()->Unshift(object);
197 } else { 195 } else {
198 DCHECK( 196 DCHECK(ObjectMarking::IsBlack(
199 ObjectMarking::IsBlack(object, MarkingState::Internal(object))); 197 object, heap->incremental_marking()->marking_state(object)));
200 heap->mark_compact_collector()->UnshiftBlack(object); 198 heap->mark_compact_collector()->UnshiftBlack(object);
201 } 199 }
202 heap->incremental_marking()->NotifyIncompleteScanOfObject( 200 heap->incremental_marking()->NotifyIncompleteScanOfObject(
203 object_size - (start_offset - already_scanned_offset)); 201 object_size - (start_offset - already_scanned_offset));
204 } 202 }
205 } else { 203 } else {
206 FixedArrayVisitor::Visit(map, object); 204 FixedArrayVisitor::Visit(map, object);
207 } 205 }
208 } 206 }
209 207
210 static void VisitNativeContextIncremental(Map* map, HeapObject* object) { 208 static void VisitNativeContextIncremental(Map* map, HeapObject* object) {
211 Context* context = Context::cast(object); 209 Context* context = Context::cast(object);
212 210
213 // We will mark cache black with a separate pass when we finish marking. 211 // We will mark cache black with a separate pass when we finish marking.
214 // Note that GC can happen when the context is not fully initialized, 212 // Note that GC can happen when the context is not fully initialized,
215 // so the cache can be undefined. 213 // so the cache can be undefined.
216 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX); 214 Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX);
217 if (!cache->IsUndefined(map->GetIsolate())) { 215 if (!cache->IsUndefined(map->GetIsolate())) {
218 if (cache->IsHeapObject()) { 216 if (cache->IsHeapObject()) {
219 HeapObject* heap_obj = HeapObject::cast(cache); 217 HeapObject* heap_obj = HeapObject::cast(cache);
220 // Mark the object grey if it is white, do not enque it into the marking 218 // Mark the object grey if it is white, do not enque it into the marking
221 // deque. 219 // deque.
222 if (ObjectMarking::IsWhite(heap_obj, 220 Heap* heap = map->GetHeap();
223 MarkingState::Internal(heap_obj))) { 221 if (ObjectMarking::IsWhite(
224 ObjectMarking::WhiteToGrey(heap_obj, 222 heap_obj,
225 MarkingState::Internal(heap_obj)); 223 heap->incremental_marking()->marking_state(heap_obj))) {
224 ObjectMarking::WhiteToGrey(
225 heap_obj, heap->incremental_marking()->marking_state(heap_obj));
226 } 226 }
227 } 227 }
228 } 228 }
229 VisitNativeContext(map, context); 229 VisitNativeContext(map, context);
230 } 230 }
231 231
232 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { 232 INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) {
233 Object* target = *p; 233 Object* target = *p;
234 if (target->IsHeapObject()) { 234 if (target->IsHeapObject()) {
235 heap->mark_compact_collector()->RecordSlot(object, p, target); 235 heap->mark_compact_collector()->RecordSlot(object, p, target);
236 MarkObject(heap, target); 236 MarkObject(heap, target);
237 } 237 }
238 } 238 }
239 239
240 INLINE(static void VisitPointers(Heap* heap, HeapObject* object, 240 INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
241 Object** start, Object** end)) { 241 Object** start, Object** end)) {
242 for (Object** p = start; p < end; p++) { 242 for (Object** p = start; p < end; p++) {
243 Object* target = *p; 243 Object* target = *p;
244 if (target->IsHeapObject()) { 244 if (target->IsHeapObject()) {
245 heap->mark_compact_collector()->RecordSlot(object, p, target); 245 heap->mark_compact_collector()->RecordSlot(object, p, target);
246 MarkObject(heap, target); 246 MarkObject(heap, target);
247 } 247 }
248 } 248 }
249 } 249 }
250 250
251 // Marks the object grey and pushes it on the marking stack. 251 // Marks the object grey and pushes it on the marking stack.
252 INLINE(static void MarkObject(Heap* heap, Object* obj)) { 252 INLINE(static void MarkObject(Heap* heap, Object* obj)) {
253 IncrementalMarking::MarkGrey(heap, HeapObject::cast(obj)); 253 heap->incremental_marking()->MarkGrey(HeapObject::cast(obj));
254 } 254 }
255 255
256 // Marks the object black without pushing it on the marking stack. 256 // Marks the object black without pushing it on the marking stack.
257 // Returns true if object needed marking and false otherwise. 257 // Returns true if object needed marking and false otherwise.
258 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { 258 INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
259 HeapObject* heap_object = HeapObject::cast(obj); 259 HeapObject* heap_object = HeapObject::cast(obj);
260 if (ObjectMarking::IsWhite(heap_object, 260 if (ObjectMarking::IsWhite(
261 MarkingState::Internal(heap_object))) { 261 heap_object,
262 ObjectMarking::WhiteToBlack(heap_object, 262 heap->incremental_marking()->marking_state(heap_object))) {
263 MarkingState::Internal(heap_object)); 263 ObjectMarking::WhiteToBlack(
264 heap_object, heap->incremental_marking()->marking_state(heap_object));
264 return true; 265 return true;
265 } 266 }
266 return false; 267 return false;
267 } 268 }
268 }; 269 };
269 270
270 void IncrementalMarking::IterateBlackObject(HeapObject* object) { 271 void IncrementalMarking::IterateBlackObject(HeapObject* object) {
271 if (IsMarking() && 272 if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) {
272 ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
273 Page* page = Page::FromAddress(object->address()); 273 Page* page = Page::FromAddress(object->address());
274 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { 274 if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) {
275 // IterateBlackObject requires us to visit the whole object. 275 // IterateBlackObject requires us to visit the whole object.
276 page->ResetProgressBar(); 276 page->ResetProgressBar();
277 } 277 }
278 Map* map = object->map(); 278 Map* map = object->map();
279 MarkGrey(heap_, map); 279 MarkGrey(map);
280 IncrementalMarkingMarkingVisitor::IterateBody(map, object); 280 IncrementalMarkingMarkingVisitor::IterateBody(map, object);
281 } 281 }
282 } 282 }
283 283
284 class IncrementalMarkingRootMarkingVisitor : public RootVisitor { 284 class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
285 public: 285 public:
286 explicit IncrementalMarkingRootMarkingVisitor( 286 explicit IncrementalMarkingRootMarkingVisitor(
287 IncrementalMarking* incremental_marking) 287 IncrementalMarking* incremental_marking)
288 : heap_(incremental_marking->heap()) {} 288 : heap_(incremental_marking->heap()) {}
289 289
290 void VisitRootPointer(Root root, Object** p) override { 290 void VisitRootPointer(Root root, Object** p) override {
291 MarkObjectByPointer(p); 291 MarkObjectByPointer(p);
292 } 292 }
293 293
294 void VisitRootPointers(Root root, Object** start, Object** end) override { 294 void VisitRootPointers(Root root, Object** start, Object** end) override {
295 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); 295 for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
296 } 296 }
297 297
298 private: 298 private:
299 void MarkObjectByPointer(Object** p) { 299 void MarkObjectByPointer(Object** p) {
300 Object* obj = *p; 300 Object* obj = *p;
301 if (!obj->IsHeapObject()) return; 301 if (!obj->IsHeapObject()) return;
302 302
303 IncrementalMarking::MarkGrey(heap_, HeapObject::cast(obj)); 303 heap_->incremental_marking()->MarkGrey(HeapObject::cast(obj));
304 } 304 }
305 305
306 Heap* heap_; 306 Heap* heap_;
307 }; 307 };
308 308
309 309
310 void IncrementalMarking::Initialize() { 310 void IncrementalMarking::Initialize() {
311 IncrementalMarkingMarkingVisitor::Initialize(); 311 IncrementalMarkingMarkingVisitor::Initialize();
312 } 312 }
313 313
(...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after
524 GCTracer::Scope::MC_INCREMENTAL_WRAPPER_PROLOGUE); 524 GCTracer::Scope::MC_INCREMENTAL_WRAPPER_PROLOGUE);
525 heap_->local_embedder_heap_tracer()->TracePrologue(); 525 heap_->local_embedder_heap_tracer()->TracePrologue();
526 } 526 }
527 527
528 RecordWriteStub::Mode mode = is_compacting_ 528 RecordWriteStub::Mode mode = is_compacting_
529 ? RecordWriteStub::INCREMENTAL_COMPACTION 529 ? RecordWriteStub::INCREMENTAL_COMPACTION
530 : RecordWriteStub::INCREMENTAL; 530 : RecordWriteStub::INCREMENTAL;
531 531
532 PatchIncrementalMarkingRecordWriteStubs(heap_, mode); 532 PatchIncrementalMarkingRecordWriteStubs(heap_, mode);
533 533
534 heap_->mark_compact_collector()->marking_deque()->StartUsing(); 534 marking_deque()->StartUsing();
535 535
536 ActivateIncrementalWriteBarrier(); 536 ActivateIncrementalWriteBarrier();
537 537
538 // Marking bits are cleared by the sweeper. 538 // Marking bits are cleared by the sweeper.
539 #ifdef VERIFY_HEAP 539 #ifdef VERIFY_HEAP
540 if (FLAG_verify_heap) { 540 if (FLAG_verify_heap) {
541 heap_->mark_compact_collector()->VerifyMarkbitsAreClean(); 541 heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
542 } 542 }
543 #endif 543 #endif
544 544
545 heap_->CompletelyClearInstanceofCache(); 545 heap_->CompletelyClearInstanceofCache();
546 heap_->isolate()->compilation_cache()->MarkCompactPrologue(); 546 heap_->isolate()->compilation_cache()->MarkCompactPrologue();
547 547
548 // Mark strong roots grey. 548 // Mark strong roots grey.
549 IncrementalMarkingRootMarkingVisitor visitor(this); 549 IncrementalMarkingRootMarkingVisitor visitor(this);
550 heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG); 550 heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
551 551
552 if (FLAG_concurrent_marking) { 552 if (FLAG_concurrent_marking) {
553 ConcurrentMarking* concurrent_marking = heap_->concurrent_marking(); 553 ConcurrentMarking* concurrent_marking = heap_->concurrent_marking();
554 heap_->mark_compact_collector()->marking_deque()->Iterate( 554 marking_deque()->Iterate([concurrent_marking](HeapObject* obj) {
555 [concurrent_marking](HeapObject* obj) { 555 concurrent_marking->AddRoot(obj);
556 concurrent_marking->AddRoot(obj); 556 });
557 });
558 concurrent_marking->StartTask(); 557 concurrent_marking->StartTask();
559 } 558 }
560 559
561 // Ready to start incremental marking. 560 // Ready to start incremental marking.
562 if (FLAG_trace_incremental_marking) { 561 if (FLAG_trace_incremental_marking) {
563 heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Running\n"); 562 heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Running\n");
564 } 563 }
565 } 564 }
566 565
567 void IncrementalMarking::StartBlackAllocation() { 566 void IncrementalMarking::StartBlackAllocation() {
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
610 Object* weak_cell_obj = heap()->encountered_weak_cells(); 609 Object* weak_cell_obj = heap()->encountered_weak_cells();
611 Object* weak_cell_head = Smi::kZero; 610 Object* weak_cell_head = Smi::kZero;
612 WeakCell* prev_weak_cell_obj = NULL; 611 WeakCell* prev_weak_cell_obj = NULL;
613 while (weak_cell_obj != Smi::kZero) { 612 while (weak_cell_obj != Smi::kZero) {
614 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); 613 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
615 // We do not insert cleared weak cells into the list, so the value 614 // We do not insert cleared weak cells into the list, so the value
616 // cannot be a Smi here. 615 // cannot be a Smi here.
617 HeapObject* value = HeapObject::cast(weak_cell->value()); 616 HeapObject* value = HeapObject::cast(weak_cell->value());
618 // Remove weak cells with live objects from the list, they do not need 617 // Remove weak cells with live objects from the list, they do not need
619 // clearing. 618 // clearing.
620 if (ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) { 619 if (ObjectMarking::IsBlackOrGrey(value, marking_state(value))) {
621 // Record slot, if value is pointing to an evacuation candidate. 620 // Record slot, if value is pointing to an evacuation candidate.
622 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); 621 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
623 heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot); 622 heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot);
624 // Remove entry somewhere after top. 623 // Remove entry somewhere after top.
625 if (prev_weak_cell_obj != NULL) { 624 if (prev_weak_cell_obj != NULL) {
626 prev_weak_cell_obj->set_next(weak_cell->next()); 625 prev_weak_cell_obj->set_next(weak_cell->next());
627 } 626 }
628 weak_cell_obj = weak_cell->next(); 627 weak_cell_obj = weak_cell->next();
629 weak_cell->clear_next(the_hole_value); 628 weak_cell->clear_next(the_hole_value);
630 } else { 629 } else {
631 if (weak_cell_head == Smi::kZero) { 630 if (weak_cell_head == Smi::kZero) {
632 weak_cell_head = weak_cell; 631 weak_cell_head = weak_cell;
633 } 632 }
634 prev_weak_cell_obj = weak_cell; 633 prev_weak_cell_obj = weak_cell;
635 weak_cell_obj = weak_cell->next(); 634 weak_cell_obj = weak_cell->next();
636 } 635 }
637 } 636 }
638 // Top may have changed. 637 // Top may have changed.
639 heap()->set_encountered_weak_cells(weak_cell_head); 638 heap()->set_encountered_weak_cells(weak_cell_head);
640 } 639 }
641 640
642 641
643 bool ShouldRetainMap(Map* map, int age) { 642 bool ShouldRetainMap(Map* map, int age) {
644 if (age == 0) { 643 if (age == 0) {
645 // The map has aged. Do not retain this map. 644 // The map has aged. Do not retain this map.
646 return false; 645 return false;
647 } 646 }
648 Object* constructor = map->GetConstructor(); 647 Object* constructor = map->GetConstructor();
648 Heap* heap = map->GetHeap();
649 if (!constructor->IsHeapObject() || 649 if (!constructor->IsHeapObject() ||
650 ObjectMarking::IsWhite( 650 ObjectMarking::IsWhite(HeapObject::cast(constructor),
651 HeapObject::cast(constructor), 651 heap->incremental_marking()->marking_state(
652 MarkingState::Internal(HeapObject::cast(constructor)))) { 652 HeapObject::cast(constructor)))) {
653 // The constructor is dead, no new objects with this map can 653 // The constructor is dead, no new objects with this map can
654 // be created. Do not retain this map. 654 // be created. Do not retain this map.
655 return false; 655 return false;
656 } 656 }
657 return true; 657 return true;
658 } 658 }
659 659
660 660
661 void IncrementalMarking::RetainMaps() { 661 void IncrementalMarking::RetainMaps() {
662 // Do not retain dead maps if flag disables it or there is 662 // Do not retain dead maps if flag disables it or there is
663 // - memory pressure (reduce_memory_footprint_), 663 // - memory pressure (reduce_memory_footprint_),
664 // - GC is requested by tests or dev-tools (abort_incremental_marking_). 664 // - GC is requested by tests or dev-tools (abort_incremental_marking_).
665 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() || 665 bool map_retaining_is_disabled = heap()->ShouldReduceMemory() ||
666 heap()->ShouldAbortIncrementalMarking() || 666 heap()->ShouldAbortIncrementalMarking() ||
667 FLAG_retain_maps_for_n_gc == 0; 667 FLAG_retain_maps_for_n_gc == 0;
668 ArrayList* retained_maps = heap()->retained_maps(); 668 ArrayList* retained_maps = heap()->retained_maps();
669 int length = retained_maps->Length(); 669 int length = retained_maps->Length();
670 // The number_of_disposed_maps separates maps in the retained_maps 670 // The number_of_disposed_maps separates maps in the retained_maps
671 // array that were created before and after context disposal. 671 // array that were created before and after context disposal.
672 // We do not age and retain disposed maps to avoid memory leaks. 672 // We do not age and retain disposed maps to avoid memory leaks.
673 int number_of_disposed_maps = heap()->number_of_disposed_maps_; 673 int number_of_disposed_maps = heap()->number_of_disposed_maps_;
674 for (int i = 0; i < length; i += 2) { 674 for (int i = 0; i < length; i += 2) {
675 DCHECK(retained_maps->Get(i)->IsWeakCell()); 675 DCHECK(retained_maps->Get(i)->IsWeakCell());
676 WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); 676 WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
677 if (cell->cleared()) continue; 677 if (cell->cleared()) continue;
678 int age = Smi::cast(retained_maps->Get(i + 1))->value(); 678 int age = Smi::cast(retained_maps->Get(i + 1))->value();
679 int new_age; 679 int new_age;
680 Map* map = Map::cast(cell->value()); 680 Map* map = Map::cast(cell->value());
681 if (i >= number_of_disposed_maps && !map_retaining_is_disabled && 681 if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
682 ObjectMarking::IsWhite(map, MarkingState::Internal(map))) { 682 ObjectMarking::IsWhite(map, marking_state(map))) {
683 if (ShouldRetainMap(map, age)) { 683 if (ShouldRetainMap(map, age)) {
684 MarkGrey(heap(), map); 684 MarkGrey(map);
685 } 685 }
686 Object* prototype = map->prototype(); 686 Object* prototype = map->prototype();
687 if (age > 0 && prototype->IsHeapObject() && 687 if (age > 0 && prototype->IsHeapObject() &&
688 ObjectMarking::IsWhite( 688 ObjectMarking::IsWhite(HeapObject::cast(prototype),
689 HeapObject::cast(prototype), 689 marking_state(HeapObject::cast(prototype)))) {
690 MarkingState::Internal(HeapObject::cast(prototype)))) {
691 // The prototype is not marked, age the map. 690 // The prototype is not marked, age the map.
692 new_age = age - 1; 691 new_age = age - 1;
693 } else { 692 } else {
694 // The prototype and the constructor are marked, this map keeps only 693 // The prototype and the constructor are marked, this map keeps only
695 // transition tree alive, not JSObjects. Do not age the map. 694 // transition tree alive, not JSObjects. Do not age the map.
696 new_age = age; 695 new_age = age;
697 } 696 }
698 } else { 697 } else {
699 new_age = FLAG_retain_maps_for_n_gc; 698 new_age = FLAG_retain_maps_for_n_gc;
700 } 699 }
701 // Compact the array and update the age. 700 // Compact the array and update the age.
702 if (new_age != age) { 701 if (new_age != age) {
703 retained_maps->Set(i + 1, Smi::FromInt(new_age)); 702 retained_maps->Set(i + 1, Smi::FromInt(new_age));
704 } 703 }
705 } 704 }
706 } 705 }
707 706
708 void IncrementalMarking::FinalizeIncrementally() { 707 void IncrementalMarking::FinalizeIncrementally() {
709 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE_BODY); 708 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE_BODY);
710 DCHECK(!finalize_marking_completed_); 709 DCHECK(!finalize_marking_completed_);
711 DCHECK(IsMarking()); 710 DCHECK(IsMarking());
712 711
713 double start = heap_->MonotonicallyIncreasingTimeInMs(); 712 double start = heap_->MonotonicallyIncreasingTimeInMs();
714 713
715 int old_marking_deque_top = 714 int old_marking_deque_top = marking_deque()->top();
716 heap_->mark_compact_collector()->marking_deque()->top();
717 715
718 // After finishing incremental marking, we try to discover all unmarked 716 // After finishing incremental marking, we try to discover all unmarked
719 // objects to reduce the marking load in the final pause. 717 // objects to reduce the marking load in the final pause.
720 // 1) We scan and mark the roots again to find all changes to the root set. 718 // 1) We scan and mark the roots again to find all changes to the root set.
721 // 2) Age and retain maps embedded in optimized code. 719 // 2) Age and retain maps embedded in optimized code.
722 // 3) Remove weak cell with live values from the list of weak cells, they 720 // 3) Remove weak cell with live values from the list of weak cells, they
723 // do not need processing during GC. 721 // do not need processing during GC.
724 MarkRoots(); 722 MarkRoots();
725 723
726 if (incremental_marking_finalization_rounds_ == 0) { 724 if (incremental_marking_finalization_rounds_ == 0) {
727 // Map retaining is needed for perfromance, not correctness, 725 // Map retaining is needed for perfromance, not correctness,
728 // so we can do it only once at the beginning of the finalization. 726 // so we can do it only once at the beginning of the finalization.
729 RetainMaps(); 727 RetainMaps();
730 } 728 }
731 ProcessWeakCells(); 729 ProcessWeakCells();
732 730
733 int marking_progress = 731 int marking_progress = abs(old_marking_deque_top - marking_deque()->top());
734 abs(old_marking_deque_top -
735 heap_->mark_compact_collector()->marking_deque()->top());
736 732
737 marking_progress += static_cast<int>( 733 marking_progress += static_cast<int>(
738 heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace()); 734 heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace());
739 735
740 double end = heap_->MonotonicallyIncreasingTimeInMs(); 736 double end = heap_->MonotonicallyIncreasingTimeInMs();
741 double delta = end - start; 737 double delta = end - start;
742 if (FLAG_trace_incremental_marking) { 738 if (FLAG_trace_incremental_marking) {
743 heap()->isolate()->PrintWithTimestamp( 739 heap()->isolate()->PrintWithTimestamp(
744 "[IncrementalMarking] Finalize incrementally round %d, " 740 "[IncrementalMarking] Finalize incrementally round %d, "
745 "spent %d ms, marking progress %d.\n", 741 "spent %d ms, marking progress %d.\n",
(...skipping 14 matching lines...) Expand all
760 // TODO(hpayer): Move to an earlier point as soon as we make faster marking 756 // TODO(hpayer): Move to an earlier point as soon as we make faster marking
761 // progress. 757 // progress.
762 StartBlackAllocation(); 758 StartBlackAllocation();
763 } 759 }
764 } 760 }
765 761
766 762
767 void IncrementalMarking::UpdateMarkingDequeAfterScavenge() { 763 void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
768 if (!IsMarking()) return; 764 if (!IsMarking()) return;
769 765
770 MarkingDeque* marking_deque = 766 int current = marking_deque()->bottom();
771 heap_->mark_compact_collector()->marking_deque(); 767 int mask = marking_deque()->mask();
772 int current = marking_deque->bottom(); 768 int limit = marking_deque()->top();
773 int mask = marking_deque->mask(); 769 HeapObject** array = marking_deque()->array();
774 int limit = marking_deque->top();
775 HeapObject** array = marking_deque->array();
776 int new_top = current; 770 int new_top = current;
777 771
778 Map* filler_map = heap_->one_pointer_filler_map(); 772 Map* filler_map = heap_->one_pointer_filler_map();
779 773
780 while (current != limit) { 774 while (current != limit) {
781 HeapObject* obj = array[current]; 775 HeapObject* obj = array[current];
782 DCHECK(obj->IsHeapObject()); 776 DCHECK(obj->IsHeapObject());
783 current = ((current + 1) & mask); 777 current = ((current + 1) & mask);
784 // Only pointers to from space have to be updated. 778 // Only pointers to from space have to be updated.
785 if (heap_->InFromSpace(obj)) { 779 if (heap_->InFromSpace(obj)) {
786 MapWord map_word = obj->map_word(); 780 MapWord map_word = obj->map_word();
787 // There may be objects on the marking deque that do not exist anymore, 781 // There may be objects on the marking deque that do not exist anymore,
788 // e.g. left trimmed objects or objects from the root set (frames). 782 // e.g. left trimmed objects or objects from the root set (frames).
789 // If these object are dead at scavenging time, their marking deque 783 // If these object are dead at scavenging time, their marking deque
790 // entries will not point to forwarding addresses. Hence, we can discard 784 // entries will not point to forwarding addresses. Hence, we can discard
791 // them. 785 // them.
792 if (map_word.IsForwardingAddress()) { 786 if (map_word.IsForwardingAddress()) {
793 HeapObject* dest = map_word.ToForwardingAddress(); 787 HeapObject* dest = map_word.ToForwardingAddress();
794 if (ObjectMarking::IsBlack(dest, MarkingState::Internal(dest))) 788 if (ObjectMarking::IsBlack(dest, marking_state(dest))) continue;
795 continue;
796 array[new_top] = dest; 789 array[new_top] = dest;
797 new_top = ((new_top + 1) & mask); 790 new_top = ((new_top + 1) & mask);
798 DCHECK(new_top != marking_deque->bottom()); 791 DCHECK(new_top != marking_deque()->bottom());
799 DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) || 792 DCHECK(ObjectMarking::IsGrey(obj, marking_state(obj)) ||
800 (obj->IsFiller() && 793 (obj->IsFiller() &&
801 ObjectMarking::IsWhite(obj, MarkingState::Internal(obj)))); 794 ObjectMarking::IsWhite(obj, marking_state(obj))));
802 } 795 }
803 } else if (obj->map() != filler_map) { 796 } else if (obj->map() != filler_map) {
804 // Skip one word filler objects that appear on the 797 // Skip one word filler objects that appear on the
805 // stack when we perform in place array shift. 798 // stack when we perform in place array shift.
806 array[new_top] = obj; 799 array[new_top] = obj;
807 new_top = ((new_top + 1) & mask); 800 new_top = ((new_top + 1) & mask);
808 DCHECK(new_top != marking_deque->bottom()); 801 DCHECK(new_top != marking_deque()->bottom());
809 DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) || 802 DCHECK(ObjectMarking::IsGrey(obj, marking_state(obj)) ||
810 (obj->IsFiller() && 803 (obj->IsFiller() &&
811 ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))) || 804 ObjectMarking::IsWhite(obj, marking_state(obj))) ||
812 (MemoryChunk::FromAddress(obj->address()) 805 (MemoryChunk::FromAddress(obj->address())
813 ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && 806 ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
814 ObjectMarking::IsBlack(obj, MarkingState::Internal(obj)))); 807 ObjectMarking::IsBlack(obj, marking_state(obj))));
815 } 808 }
816 } 809 }
817 marking_deque->set_top(new_top); 810 marking_deque()->set_top(new_top);
818 } 811 }
819 812
820 813
821 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { 814 void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
822 MarkGrey(heap_, map); 815 MarkGrey(map);
823 816
824 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); 817 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
825 818
826 #if ENABLE_SLOW_DCHECKS 819 #if ENABLE_SLOW_DCHECKS
827 MarkBit mark_bit = 820 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj));
828 ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj));
829 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); 821 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
830 SLOW_DCHECK(Marking::IsGrey(mark_bit) || 822 SLOW_DCHECK(Marking::IsGrey(mark_bit) ||
831 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && 823 (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
832 Marking::IsBlack(mark_bit))); 824 Marking::IsBlack(mark_bit)));
833 #endif 825 #endif
834 MarkBlack(obj, size); 826 MarkBlack(obj, size);
835 } 827 }
836 828
837 void IncrementalMarking::MarkGrey(Heap* heap, HeapObject* object) { 829 void IncrementalMarking::MarkGrey(HeapObject* object) {
838 if (ObjectMarking::IsWhite(object, MarkingState::Internal(object))) { 830 if (ObjectMarking::IsWhite(object, marking_state(object))) {
839 heap->incremental_marking()->WhiteToGreyAndPush(object); 831 WhiteToGreyAndPush(object);
840 } 832 }
841 } 833 }
842 834
843 void IncrementalMarking::MarkBlack(HeapObject* obj, int size) { 835 void IncrementalMarking::MarkBlack(HeapObject* obj, int size) {
844 if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) return; 836 if (ObjectMarking::IsBlack(obj, marking_state(obj))) return;
845 ObjectMarking::GreyToBlack(obj, MarkingState::Internal(obj)); 837 ObjectMarking::GreyToBlack(obj, marking_state(obj));
846 } 838 }
847 839
848 intptr_t IncrementalMarking::ProcessMarkingDeque( 840 intptr_t IncrementalMarking::ProcessMarkingDeque(
849 intptr_t bytes_to_process, ForceCompletionAction completion) { 841 intptr_t bytes_to_process, ForceCompletionAction completion) {
850 intptr_t bytes_processed = 0; 842 intptr_t bytes_processed = 0;
851 MarkingDeque* marking_deque = 843 while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process ||
852 heap_->mark_compact_collector()->marking_deque(); 844 completion == FORCE_COMPLETION)) {
853 while (!marking_deque->IsEmpty() && (bytes_processed < bytes_to_process || 845 HeapObject* obj = marking_deque()->Pop();
854 completion == FORCE_COMPLETION)) {
855 HeapObject* obj = marking_deque->Pop();
856 846
857 // Left trimming may result in white, grey, or black filler objects on the 847 // Left trimming may result in white, grey, or black filler objects on the
858 // marking deque. Ignore these objects. 848 // marking deque. Ignore these objects.
859 if (obj->IsFiller()) { 849 if (obj->IsFiller()) {
860 DCHECK(!ObjectMarking::IsImpossible(obj, MarkingState::Internal(obj))); 850 DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj)));
861 continue; 851 continue;
862 } 852 }
863 853
864 Map* map = obj->map(); 854 Map* map = obj->map();
865 int size = obj->SizeFromMap(map); 855 int size = obj->SizeFromMap(map);
866 unscanned_bytes_of_large_object_ = 0; 856 unscanned_bytes_of_large_object_ = 0;
867 VisitObject(map, obj, size); 857 VisitObject(map, obj, size);
868 bytes_processed += size - unscanned_bytes_of_large_object_; 858 bytes_processed += size - unscanned_bytes_of_large_object_;
869 } 859 }
870 // Report all found wrappers to the embedder. This is necessary as the 860 // Report all found wrappers to the embedder. This is necessary as the
871 // embedder could potentially invalidate wrappers as soon as V8 is done 861 // embedder could potentially invalidate wrappers as soon as V8 is done
872 // with its incremental marking processing. Any cached wrappers could 862 // with its incremental marking processing. Any cached wrappers could
873 // result in broken pointers at this point. 863 // result in broken pointers at this point.
874 heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer(); 864 heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
875 return bytes_processed; 865 return bytes_processed;
876 } 866 }
877 867
878 868
879 void IncrementalMarking::Hurry() { 869 void IncrementalMarking::Hurry() {
880 // A scavenge may have pushed new objects on the marking deque (due to black 870 // A scavenge may have pushed new objects on the marking deque (due to black
881 // allocation) even in COMPLETE state. This may happen if scavenges are 871 // allocation) even in COMPLETE state. This may happen if scavenges are
882 // forced e.g. in tests. It should not happen when COMPLETE was set when 872 // forced e.g. in tests. It should not happen when COMPLETE was set when
883 // incremental marking finished and a regular GC was triggered after that 873 // incremental marking finished and a regular GC was triggered after that
884 // because should_hurry_ will force a full GC. 874 // because should_hurry_ will force a full GC.
885 if (!heap_->mark_compact_collector()->marking_deque()->IsEmpty()) { 875 if (!marking_deque()->IsEmpty()) {
886 double start = 0.0; 876 double start = 0.0;
887 if (FLAG_trace_incremental_marking) { 877 if (FLAG_trace_incremental_marking) {
888 start = heap_->MonotonicallyIncreasingTimeInMs(); 878 start = heap_->MonotonicallyIncreasingTimeInMs();
889 if (FLAG_trace_incremental_marking) { 879 if (FLAG_trace_incremental_marking) {
890 heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Hurry\n"); 880 heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Hurry\n");
891 } 881 }
892 } 882 }
893 // TODO(gc) hurry can mark objects it encounters black as mutator 883 // TODO(gc) hurry can mark objects it encounters black as mutator
894 // was stopped. 884 // was stopped.
895 ProcessMarkingDeque(0, FORCE_COMPLETION); 885 ProcessMarkingDeque(0, FORCE_COMPLETION);
896 state_ = COMPLETE; 886 state_ = COMPLETE;
897 if (FLAG_trace_incremental_marking) { 887 if (FLAG_trace_incremental_marking) {
898 double end = heap_->MonotonicallyIncreasingTimeInMs(); 888 double end = heap_->MonotonicallyIncreasingTimeInMs();
899 double delta = end - start; 889 double delta = end - start;
900 if (FLAG_trace_incremental_marking) { 890 if (FLAG_trace_incremental_marking) {
901 heap()->isolate()->PrintWithTimestamp( 891 heap()->isolate()->PrintWithTimestamp(
902 "[IncrementalMarking] Complete (hurry), spent %d ms.\n", 892 "[IncrementalMarking] Complete (hurry), spent %d ms.\n",
903 static_cast<int>(delta)); 893 static_cast<int>(delta));
904 } 894 }
905 } 895 }
906 } 896 }
907 897
908 Object* context = heap_->native_contexts_list(); 898 Object* context = heap_->native_contexts_list();
909 while (!context->IsUndefined(heap_->isolate())) { 899 while (!context->IsUndefined(heap_->isolate())) {
910 // GC can happen when the context is not fully initialized, 900 // GC can happen when the context is not fully initialized,
911 // so the cache can be undefined. 901 // so the cache can be undefined.
912 HeapObject* cache = HeapObject::cast( 902 HeapObject* cache = HeapObject::cast(
913 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); 903 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
914 if (!cache->IsUndefined(heap_->isolate())) { 904 if (!cache->IsUndefined(heap_->isolate())) {
915 if (ObjectMarking::IsGrey(cache, MarkingState::Internal(cache))) { 905 if (ObjectMarking::IsGrey(cache, marking_state(cache))) {
916 ObjectMarking::GreyToBlack(cache, MarkingState::Internal(cache)); 906 ObjectMarking::GreyToBlack(cache, marking_state(cache));
917 } 907 }
918 } 908 }
919 context = Context::cast(context)->next_context_link(); 909 context = Context::cast(context)->next_context_link();
920 } 910 }
921 } 911 }
922 912
923 913
924 void IncrementalMarking::Stop() { 914 void IncrementalMarking::Stop() {
925 if (IsStopped()) return; 915 if (IsStopped()) return;
926 if (FLAG_trace_incremental_marking) { 916 if (FLAG_trace_incremental_marking) {
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
1036 DO_NOT_FORCE_COMPLETION)); 1026 DO_NOT_FORCE_COMPLETION));
1037 } 1027 }
1038 } else { 1028 } else {
1039 Step(step_size_in_bytes, completion_action, force_completion, 1029 Step(step_size_in_bytes, completion_action, force_completion,
1040 step_origin); 1030 step_origin);
1041 } 1031 }
1042 trace_wrappers_toggle_ = !trace_wrappers_toggle_; 1032 trace_wrappers_toggle_ = !trace_wrappers_toggle_;
1043 remaining_time_in_ms = 1033 remaining_time_in_ms =
1044 deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs(); 1034 deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs();
1045 } while (remaining_time_in_ms >= kStepSizeInMs && !IsComplete() && 1035 } while (remaining_time_in_ms >= kStepSizeInMs && !IsComplete() &&
1046 !heap()->mark_compact_collector()->marking_deque()->IsEmpty()); 1036 !marking_deque()->IsEmpty());
1047 return remaining_time_in_ms; 1037 return remaining_time_in_ms;
1048 } 1038 }
1049 1039
1050 1040
1051 void IncrementalMarking::FinalizeSweeping() { 1041 void IncrementalMarking::FinalizeSweeping() {
1052 DCHECK(state_ == SWEEPING); 1042 DCHECK(state_ == SWEEPING);
1053 if (heap_->mark_compact_collector()->sweeping_in_progress() && 1043 if (heap_->mark_compact_collector()->sweeping_in_progress() &&
1054 (!FLAG_concurrent_sweeping || 1044 (!FLAG_concurrent_sweeping ||
1055 !heap_->mark_compact_collector()->sweeper().AreSweeperTasksRunning())) { 1045 !heap_->mark_compact_collector()->sweeper().AreSweeperTasksRunning())) {
1056 heap_->mark_compact_collector()->EnsureSweepingCompleted(); 1046 heap_->mark_compact_collector()->EnsureSweepingCompleted();
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
1136 FinalizeSweeping(); 1126 FinalizeSweeping();
1137 } 1127 }
1138 1128
1139 size_t bytes_processed = 0; 1129 size_t bytes_processed = 0;
1140 if (state_ == MARKING) { 1130 if (state_ == MARKING) {
1141 bytes_processed = ProcessMarkingDeque(bytes_to_process); 1131 bytes_processed = ProcessMarkingDeque(bytes_to_process);
1142 if (step_origin == StepOrigin::kTask) { 1132 if (step_origin == StepOrigin::kTask) {
1143 bytes_marked_ahead_of_schedule_ += bytes_processed; 1133 bytes_marked_ahead_of_schedule_ += bytes_processed;
1144 } 1134 }
1145 1135
1146 if (heap_->mark_compact_collector()->marking_deque()->IsEmpty()) { 1136 if (marking_deque()->IsEmpty()) {
1147 if (heap_->local_embedder_heap_tracer() 1137 if (heap_->local_embedder_heap_tracer()
1148 ->ShouldFinalizeIncrementalMarking()) { 1138 ->ShouldFinalizeIncrementalMarking()) {
1149 if (completion == FORCE_COMPLETION || 1139 if (completion == FORCE_COMPLETION ||
1150 IsIdleMarkingDelayCounterLimitReached()) { 1140 IsIdleMarkingDelayCounterLimitReached()) {
1151 if (!finalize_marking_completed_) { 1141 if (!finalize_marking_completed_) {
1152 FinalizeMarking(action); 1142 FinalizeMarking(action);
1153 } else { 1143 } else {
1154 MarkingComplete(action); 1144 MarkingComplete(action);
1155 } 1145 }
1156 } else { 1146 } else {
(...skipping 30 matching lines...) Expand all
1187 idle_marking_delay_counter_++; 1177 idle_marking_delay_counter_++;
1188 } 1178 }
1189 1179
1190 1180
1191 void IncrementalMarking::ClearIdleMarkingDelayCounter() { 1181 void IncrementalMarking::ClearIdleMarkingDelayCounter() {
1192 idle_marking_delay_counter_ = 0; 1182 idle_marking_delay_counter_ = 0;
1193 } 1183 }
1194 1184
1195 } // namespace internal 1185 } // namespace internal
1196 } // namespace v8 1186 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/incremental-marking.h ('k') | src/heap/scavenger.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698