Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(181)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1735523002: Reland "Replace slots buffer with remembered set. (patchset #14 id:250001 of https://codereview.chr… (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: one more int->size_t Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/mark-compact-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
11 #include "src/compilation-cache.h" 11 #include "src/compilation-cache.h"
12 #include "src/deoptimizer.h" 12 #include "src/deoptimizer.h"
13 #include "src/execution.h" 13 #include "src/execution.h"
14 #include "src/frames-inl.h" 14 #include "src/frames-inl.h"
15 #include "src/gdb-jit.h" 15 #include "src/gdb-jit.h"
16 #include "src/global-handles.h" 16 #include "src/global-handles.h"
17 #include "src/heap/array-buffer-tracker.h" 17 #include "src/heap/array-buffer-tracker.h"
18 #include "src/heap/gc-tracer.h" 18 #include "src/heap/gc-tracer.h"
19 #include "src/heap/incremental-marking.h" 19 #include "src/heap/incremental-marking.h"
20 #include "src/heap/mark-compact-inl.h" 20 #include "src/heap/mark-compact-inl.h"
21 #include "src/heap/object-stats.h" 21 #include "src/heap/object-stats.h"
22 #include "src/heap/objects-visiting-inl.h" 22 #include "src/heap/objects-visiting-inl.h"
23 #include "src/heap/objects-visiting.h" 23 #include "src/heap/objects-visiting.h"
24 #include "src/heap/slots-buffer.h"
25 #include "src/heap/spaces-inl.h" 24 #include "src/heap/spaces-inl.h"
26 #include "src/ic/ic.h" 25 #include "src/ic/ic.h"
27 #include "src/ic/stub-cache.h" 26 #include "src/ic/stub-cache.h"
28 #include "src/profiler/cpu-profiler.h" 27 #include "src/profiler/cpu-profiler.h"
29 #include "src/utils-inl.h" 28 #include "src/utils-inl.h"
30 #include "src/v8.h" 29 #include "src/v8.h"
31 30
32 namespace v8 { 31 namespace v8 {
33 namespace internal { 32 namespace internal {
34 33
(...skipping 13 matching lines...) Expand all
48 // MarkCompactCollector 47 // MarkCompactCollector
49 48
50 MarkCompactCollector::MarkCompactCollector(Heap* heap) 49 MarkCompactCollector::MarkCompactCollector(Heap* heap)
51 : // NOLINT 50 : // NOLINT
52 #ifdef DEBUG 51 #ifdef DEBUG
53 state_(IDLE), 52 state_(IDLE),
54 #endif 53 #endif
55 marking_parity_(ODD_MARKING_PARITY), 54 marking_parity_(ODD_MARKING_PARITY),
56 was_marked_incrementally_(false), 55 was_marked_incrementally_(false),
57 evacuation_(false), 56 evacuation_(false),
58 slots_buffer_allocator_(nullptr),
59 migration_slots_buffer_(nullptr),
60 heap_(heap), 57 heap_(heap),
61 marking_deque_memory_(NULL), 58 marking_deque_memory_(NULL),
62 marking_deque_memory_committed_(0), 59 marking_deque_memory_committed_(0),
63 code_flusher_(nullptr), 60 code_flusher_(nullptr),
64 have_code_to_deoptimize_(false), 61 have_code_to_deoptimize_(false),
65 compacting_(false), 62 compacting_(false),
66 sweeping_in_progress_(false), 63 sweeping_in_progress_(false),
67 compaction_in_progress_(false), 64 compaction_in_progress_(false),
68 pending_sweeper_tasks_semaphore_(0), 65 pending_sweeper_tasks_semaphore_(0),
69 pending_compaction_tasks_semaphore_(0) { 66 pending_compaction_tasks_semaphore_(0) {
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
242 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0); 239 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
243 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0); 240 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
244 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0); 241 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
245 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0); 242 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
246 243
247 free_list_old_space_.Reset(new FreeList(heap_->old_space())); 244 free_list_old_space_.Reset(new FreeList(heap_->old_space()));
248 free_list_code_space_.Reset(new FreeList(heap_->code_space())); 245 free_list_code_space_.Reset(new FreeList(heap_->code_space()));
249 free_list_map_space_.Reset(new FreeList(heap_->map_space())); 246 free_list_map_space_.Reset(new FreeList(heap_->map_space()));
250 EnsureMarkingDequeIsReserved(); 247 EnsureMarkingDequeIsReserved();
251 EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize); 248 EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize);
252 slots_buffer_allocator_ = new SlotsBufferAllocator();
253 249
254 if (FLAG_flush_code) { 250 if (FLAG_flush_code) {
255 code_flusher_ = new CodeFlusher(isolate()); 251 code_flusher_ = new CodeFlusher(isolate());
256 if (FLAG_trace_code_flushing) { 252 if (FLAG_trace_code_flushing) {
257 PrintF("[code-flushing is now on]\n"); 253 PrintF("[code-flushing is now on]\n");
258 } 254 }
259 } 255 }
260 } 256 }
261 257
262 258
263 void MarkCompactCollector::TearDown() { 259 void MarkCompactCollector::TearDown() {
264 AbortCompaction(); 260 AbortCompaction();
265 delete marking_deque_memory_; 261 delete marking_deque_memory_;
266 delete slots_buffer_allocator_;
267 delete code_flusher_; 262 delete code_flusher_;
268 } 263 }
269 264
270 265
271 void MarkCompactCollector::AddEvacuationCandidate(Page* p) { 266 void MarkCompactCollector::AddEvacuationCandidate(Page* p) {
272 DCHECK(!p->NeverEvacuate()); 267 DCHECK(!p->NeverEvacuate());
273 p->MarkEvacuationCandidate(); 268 p->MarkEvacuationCandidate();
274 evacuation_candidates_.Add(p); 269 evacuation_candidates_.Add(p);
275 } 270 }
276 271
(...skipping 26 matching lines...) Expand all
303 298
304 heap()->old_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); 299 heap()->old_space()->EvictEvacuationCandidatesFromLinearAllocationArea();
305 heap()->code_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); 300 heap()->code_space()->EvictEvacuationCandidatesFromLinearAllocationArea();
306 301
307 compacting_ = evacuation_candidates_.length() > 0; 302 compacting_ = evacuation_candidates_.length() > 0;
308 } 303 }
309 304
310 return compacting_; 305 return compacting_;
311 } 306 }
312 307
313 308 void MarkCompactCollector::ClearInvalidRememberedSetSlots() {
314 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() {
315 { 309 {
316 GCTracer::Scope gc_scope(heap()->tracer(), 310 GCTracer::Scope gc_scope(heap()->tracer(),
317 GCTracer::Scope::MC_CLEAR_STORE_BUFFER); 311 GCTracer::Scope::MC_CLEAR_STORE_BUFFER);
318 RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(heap()); 312 RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(heap());
319 } 313 }
314 // There is not need to filter the old to old set because
315 // it is completely cleared after the mark-compact GC.
316 // The slots that become invalid due to runtime transitions are
317 // cleared eagerly immediately after the transition.
320 318
321 {
322 GCTracer::Scope gc_scope(heap()->tracer(),
323 GCTracer::Scope::MC_CLEAR_SLOTS_BUFFER);
324 for (Page* p : evacuation_candidates_) {
325 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer());
326 }
327 }
328 #ifdef VERIFY_HEAP 319 #ifdef VERIFY_HEAP
329 if (FLAG_verify_heap) { 320 if (FLAG_verify_heap) {
330 VerifyValidStoreAndSlotsBufferEntries(); 321 RememberedSet<OLD_TO_NEW>::VerifyValidSlots(heap());
322 RememberedSet<OLD_TO_OLD>::VerifyValidSlots(heap());
331 } 323 }
332 #endif 324 #endif
333 } 325 }
334 326
335 327
336 #ifdef VERIFY_HEAP
337 static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) {
338 PageIterator it(space);
339 while (it.has_next()) {
340 Page* p = it.next();
341 SlotsBuffer::VerifySlots(heap, p->slots_buffer());
342 }
343 }
344
345
346 void MarkCompactCollector::VerifyValidStoreAndSlotsBufferEntries() {
347 RememberedSet<OLD_TO_NEW>::VerifyValidSlots(heap());
348
349 VerifyValidSlotsBufferEntries(heap(), heap()->old_space());
350 VerifyValidSlotsBufferEntries(heap(), heap()->code_space());
351 VerifyValidSlotsBufferEntries(heap(), heap()->map_space());
352
353 LargeObjectIterator it(heap()->lo_space());
354 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
355 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
356 SlotsBuffer::VerifySlots(heap(), chunk->slots_buffer());
357 }
358 }
359 #endif
360
361
362 void MarkCompactCollector::CollectGarbage() { 328 void MarkCompactCollector::CollectGarbage() {
363 // Make sure that Prepare() has been called. The individual steps below will 329 // Make sure that Prepare() has been called. The individual steps below will
364 // update the state as they proceed. 330 // update the state as they proceed.
365 DCHECK(state_ == PREPARE_GC); 331 DCHECK(state_ == PREPARE_GC);
366 332
367 MarkLiveObjects(); 333 MarkLiveObjects();
368 334
369 DCHECK(heap_->incremental_marking()->IsStopped()); 335 DCHECK(heap_->incremental_marking()->IsStopped());
370 336
371 ClearNonLiveReferences(); 337 ClearNonLiveReferences();
(...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after
701 if (p->IsFlagSet(Page::POPULAR_PAGE)) { 667 if (p->IsFlagSet(Page::POPULAR_PAGE)) {
702 // This page had slots buffer overflow on previous GC, skip it. 668 // This page had slots buffer overflow on previous GC, skip it.
703 p->ClearFlag(Page::POPULAR_PAGE); 669 p->ClearFlag(Page::POPULAR_PAGE);
704 continue; 670 continue;
705 } 671 }
706 // Invariant: Evacuation candidates are just created when marking is 672 // Invariant: Evacuation candidates are just created when marking is
707 // started. This means that sweeping has finished. Furthermore, at the end 673 // started. This means that sweeping has finished. Furthermore, at the end
708 // of a GC all evacuation candidates are cleared and their slot buffers are 674 // of a GC all evacuation candidates are cleared and their slot buffers are
709 // released. 675 // released.
710 CHECK(!p->IsEvacuationCandidate()); 676 CHECK(!p->IsEvacuationCandidate());
711 CHECK(p->slots_buffer() == nullptr); 677 CHECK_NULL(p->old_to_old_slots());
678 CHECK_NULL(p->typed_old_to_old_slots());
712 CHECK(p->SweepingDone()); 679 CHECK(p->SweepingDone());
713 DCHECK(p->area_size() == area_size); 680 DCHECK(p->area_size() == area_size);
714 pages.push_back(std::make_pair(p->LiveBytesFromFreeList(), p)); 681 pages.push_back(std::make_pair(p->LiveBytesFromFreeList(), p));
715 } 682 }
716 683
717 int candidate_count = 0; 684 int candidate_count = 0;
718 int total_live_bytes = 0; 685 int total_live_bytes = 0;
719 686
720 const bool reduce_memory = heap()->ShouldReduceMemory(); 687 const bool reduce_memory = heap()->ShouldReduceMemory();
721 if (FLAG_manual_evacuation_candidates_selection) { 688 if (FLAG_manual_evacuation_candidates_selection) {
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
807 "compaction-selection: space=%s reduce_memory=%d pages=%d " 774 "compaction-selection: space=%s reduce_memory=%d pages=%d "
808 "total_live_bytes=%d\n", 775 "total_live_bytes=%d\n",
809 AllocationSpaceName(space->identity()), reduce_memory, 776 AllocationSpaceName(space->identity()), reduce_memory,
810 candidate_count, total_live_bytes / KB); 777 candidate_count, total_live_bytes / KB);
811 } 778 }
812 } 779 }
813 780
814 781
815 void MarkCompactCollector::AbortCompaction() { 782 void MarkCompactCollector::AbortCompaction() {
816 if (compacting_) { 783 if (compacting_) {
784 RememberedSet<OLD_TO_OLD>::ClearAll(heap());
817 for (Page* p : evacuation_candidates_) { 785 for (Page* p : evacuation_candidates_) {
818 slots_buffer_allocator_->DeallocateChain(p->slots_buffer_address());
819 p->ClearEvacuationCandidate(); 786 p->ClearEvacuationCandidate();
820 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); 787 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
821 } 788 }
822 compacting_ = false; 789 compacting_ = false;
823 evacuation_candidates_.Rewind(0); 790 evacuation_candidates_.Rewind(0);
824 } 791 }
825 DCHECK_EQ(0, evacuation_candidates_.length()); 792 DCHECK_EQ(0, evacuation_candidates_.length());
826 } 793 }
827 794
828 795
(...skipping 395 matching lines...) Expand 10 before | Expand all | Expand 10 after
1224 if (!code->IsSmi() && 1191 if (!code->IsSmi() &&
1225 HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) { 1192 HeapObject::cast(code)->map()->instance_type() == CODE_TYPE) {
1226 // Save a copy that can be reinstated if we need the code again. 1193 // Save a copy that can be reinstated if we need the code again.
1227 re->SetDataAt(JSRegExp::saved_code_index(is_one_byte), code); 1194 re->SetDataAt(JSRegExp::saved_code_index(is_one_byte), code);
1228 1195
1229 // Saving a copy might create a pointer into compaction candidate 1196 // Saving a copy might create a pointer into compaction candidate
1230 // that was not observed by marker. This might happen if JSRegExp data 1197 // that was not observed by marker. This might happen if JSRegExp data
1231 // was marked through the compilation cache before marker reached JSRegExp 1198 // was marked through the compilation cache before marker reached JSRegExp
1232 // object. 1199 // object.
1233 FixedArray* data = FixedArray::cast(re->data()); 1200 FixedArray* data = FixedArray::cast(re->data());
1234 Object** slot = 1201 if (Marking::IsBlackOrGrey(Marking::MarkBitFrom(data))) {
1235 data->data_start() + JSRegExp::saved_code_index(is_one_byte); 1202 Object** slot =
1236 heap->mark_compact_collector()->RecordSlot(data, slot, code); 1203 data->data_start() + JSRegExp::saved_code_index(is_one_byte);
1204 heap->mark_compact_collector()->RecordSlot(data, slot, code);
1205 }
1237 1206
1238 // Set a number in the 0-255 range to guarantee no smi overflow. 1207 // Set a number in the 0-255 range to guarantee no smi overflow.
1239 re->SetDataAt(JSRegExp::code_index(is_one_byte), 1208 re->SetDataAt(JSRegExp::code_index(is_one_byte),
1240 Smi::FromInt(heap->ms_count() & 0xff)); 1209 Smi::FromInt(heap->ms_count() & 0xff));
1241 } else if (code->IsSmi()) { 1210 } else if (code->IsSmi()) {
1242 int value = Smi::cast(code)->value(); 1211 int value = Smi::cast(code)->value();
1243 // The regexp has not been compiled yet or there was a compilation error. 1212 // The regexp has not been compiled yet or there was a compilation error.
1244 if (value == JSRegExp::kUninitializedValue || 1213 if (value == JSRegExp::kUninitializedValue ||
1245 value == JSRegExp::kCompilationErrorValue) { 1214 value == JSRegExp::kCompilationErrorValue) {
1246 return; 1215 return;
(...skipping 276 matching lines...) Expand 10 before | Expand all | Expand 10 after
1523 public: 1492 public:
1524 virtual ~HeapObjectVisitor() {} 1493 virtual ~HeapObjectVisitor() {}
1525 virtual bool Visit(HeapObject* object) = 0; 1494 virtual bool Visit(HeapObject* object) = 0;
1526 }; 1495 };
1527 1496
1528 1497
1529 class MarkCompactCollector::EvacuateVisitorBase 1498 class MarkCompactCollector::EvacuateVisitorBase
1530 : public MarkCompactCollector::HeapObjectVisitor { 1499 : public MarkCompactCollector::HeapObjectVisitor {
1531 public: 1500 public:
1532 EvacuateVisitorBase(Heap* heap, CompactionSpaceCollection* compaction_spaces, 1501 EvacuateVisitorBase(Heap* heap, CompactionSpaceCollection* compaction_spaces,
1533 SlotsBuffer** evacuation_slots_buffer, 1502 LocalSlotsBuffer* old_to_old_slots,
1534 LocalStoreBuffer* local_store_buffer) 1503 LocalSlotsBuffer* old_to_new_slots)
1535 : heap_(heap), 1504 : heap_(heap),
1536 evacuation_slots_buffer_(evacuation_slots_buffer),
1537 compaction_spaces_(compaction_spaces), 1505 compaction_spaces_(compaction_spaces),
1538 local_store_buffer_(local_store_buffer) {} 1506 old_to_old_slots_(old_to_old_slots),
1507 old_to_new_slots_(old_to_new_slots) {}
1539 1508
1540 bool TryEvacuateObject(PagedSpace* target_space, HeapObject* object, 1509 bool TryEvacuateObject(PagedSpace* target_space, HeapObject* object,
1541 HeapObject** target_object) { 1510 HeapObject** target_object) {
1542 int size = object->Size(); 1511 int size = object->Size();
1543 AllocationAlignment alignment = object->RequiredAlignment(); 1512 AllocationAlignment alignment = object->RequiredAlignment();
1544 AllocationResult allocation = target_space->AllocateRaw(size, alignment); 1513 AllocationResult allocation = target_space->AllocateRaw(size, alignment);
1545 if (allocation.To(target_object)) { 1514 if (allocation.To(target_object)) {
1546 heap_->mark_compact_collector()->MigrateObject( 1515 heap_->mark_compact_collector()->MigrateObject(
1547 *target_object, object, size, target_space->identity(), 1516 *target_object, object, size, target_space->identity(),
1548 evacuation_slots_buffer_, local_store_buffer_); 1517 old_to_old_slots_, old_to_new_slots_);
1549 return true; 1518 return true;
1550 } 1519 }
1551 return false; 1520 return false;
1552 } 1521 }
1553 1522
1554 protected: 1523 protected:
1555 Heap* heap_; 1524 Heap* heap_;
1556 SlotsBuffer** evacuation_slots_buffer_;
1557 CompactionSpaceCollection* compaction_spaces_; 1525 CompactionSpaceCollection* compaction_spaces_;
1558 LocalStoreBuffer* local_store_buffer_; 1526 LocalSlotsBuffer* old_to_old_slots_;
1527 LocalSlotsBuffer* old_to_new_slots_;
1559 }; 1528 };
1560 1529
1561 1530
1562 class MarkCompactCollector::EvacuateNewSpaceVisitor final 1531 class MarkCompactCollector::EvacuateNewSpaceVisitor final
1563 : public MarkCompactCollector::EvacuateVisitorBase { 1532 : public MarkCompactCollector::EvacuateVisitorBase {
1564 public: 1533 public:
1565 static const intptr_t kLabSize = 4 * KB; 1534 static const intptr_t kLabSize = 4 * KB;
1566 static const intptr_t kMaxLabObjectSize = 256; 1535 static const intptr_t kMaxLabObjectSize = 256;
1567 1536
1568 explicit EvacuateNewSpaceVisitor(Heap* heap, 1537 explicit EvacuateNewSpaceVisitor(Heap* heap,
1569 CompactionSpaceCollection* compaction_spaces, 1538 CompactionSpaceCollection* compaction_spaces,
1570 SlotsBuffer** evacuation_slots_buffer, 1539 LocalSlotsBuffer* old_to_old_slots,
1571 LocalStoreBuffer* local_store_buffer, 1540 LocalSlotsBuffer* old_to_new_slots,
1572 HashMap* local_pretenuring_feedback) 1541 HashMap* local_pretenuring_feedback)
1573 : EvacuateVisitorBase(heap, compaction_spaces, evacuation_slots_buffer, 1542 : EvacuateVisitorBase(heap, compaction_spaces, old_to_old_slots,
1574 local_store_buffer), 1543 old_to_new_slots),
1575 buffer_(LocalAllocationBuffer::InvalidBuffer()), 1544 buffer_(LocalAllocationBuffer::InvalidBuffer()),
1576 space_to_allocate_(NEW_SPACE), 1545 space_to_allocate_(NEW_SPACE),
1577 promoted_size_(0), 1546 promoted_size_(0),
1578 semispace_copied_size_(0), 1547 semispace_copied_size_(0),
1579 local_pretenuring_feedback_(local_pretenuring_feedback) {} 1548 local_pretenuring_feedback_(local_pretenuring_feedback) {}
1580 1549
1581 bool Visit(HeapObject* object) override { 1550 bool Visit(HeapObject* object) override {
1582 heap_->UpdateAllocationSite<Heap::kCached>(object, 1551 heap_->UpdateAllocationSite<Heap::kCached>(object,
1583 local_pretenuring_feedback_); 1552 local_pretenuring_feedback_);
1584 int size = object->Size(); 1553 int size = object->Size();
1585 HeapObject* target_object = nullptr; 1554 HeapObject* target_object = nullptr;
1586 if (heap_->ShouldBePromoted(object->address(), size) && 1555 if (heap_->ShouldBePromoted(object->address(), size) &&
1587 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, 1556 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object,
1588 &target_object)) { 1557 &target_object)) {
1589 // If we end up needing more special cases, we should factor this out. 1558 // If we end up needing more special cases, we should factor this out.
1590 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { 1559 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
1591 heap_->array_buffer_tracker()->Promote( 1560 heap_->array_buffer_tracker()->Promote(
1592 JSArrayBuffer::cast(target_object)); 1561 JSArrayBuffer::cast(target_object));
1593 } 1562 }
1594 promoted_size_ += size; 1563 promoted_size_ += size;
1595 return true; 1564 return true;
1596 } 1565 }
1597 HeapObject* target = nullptr; 1566 HeapObject* target = nullptr;
1598 AllocationSpace space = AllocateTargetObject(object, &target); 1567 AllocationSpace space = AllocateTargetObject(object, &target);
1599 heap_->mark_compact_collector()->MigrateObject( 1568 heap_->mark_compact_collector()->MigrateObject(
1600 HeapObject::cast(target), object, size, space, 1569 HeapObject::cast(target), object, size, space,
1601 (space == NEW_SPACE) ? nullptr : evacuation_slots_buffer_, 1570 (space == NEW_SPACE) ? nullptr : old_to_old_slots_,
1602 (space == NEW_SPACE) ? nullptr : local_store_buffer_); 1571 (space == NEW_SPACE) ? nullptr : old_to_new_slots_);
1603 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { 1572 if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
1604 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); 1573 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
1605 } 1574 }
1606 semispace_copied_size_ += size; 1575 semispace_copied_size_ += size;
1607 return true; 1576 return true;
1608 } 1577 }
1609 1578
1610 intptr_t promoted_size() { return promoted_size_; } 1579 intptr_t promoted_size() { return promoted_size_; }
1611 intptr_t semispace_copied_size() { return semispace_copied_size_; } 1580 intptr_t semispace_copied_size() { return semispace_copied_size_; }
1612 1581
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
1712 intptr_t semispace_copied_size_; 1681 intptr_t semispace_copied_size_;
1713 HashMap* local_pretenuring_feedback_; 1682 HashMap* local_pretenuring_feedback_;
1714 }; 1683 };
1715 1684
1716 1685
1717 class MarkCompactCollector::EvacuateOldSpaceVisitor final 1686 class MarkCompactCollector::EvacuateOldSpaceVisitor final
1718 : public MarkCompactCollector::EvacuateVisitorBase { 1687 : public MarkCompactCollector::EvacuateVisitorBase {
1719 public: 1688 public:
1720 EvacuateOldSpaceVisitor(Heap* heap, 1689 EvacuateOldSpaceVisitor(Heap* heap,
1721 CompactionSpaceCollection* compaction_spaces, 1690 CompactionSpaceCollection* compaction_spaces,
1722 SlotsBuffer** evacuation_slots_buffer, 1691 LocalSlotsBuffer* old_to_old_slots,
1723 LocalStoreBuffer* local_store_buffer) 1692 LocalSlotsBuffer* old_to_new_slots)
1724 : EvacuateVisitorBase(heap, compaction_spaces, evacuation_slots_buffer, 1693 : EvacuateVisitorBase(heap, compaction_spaces, old_to_old_slots,
1725 local_store_buffer) {} 1694 old_to_new_slots) {}
1726 1695
1727 bool Visit(HeapObject* object) override { 1696 bool Visit(HeapObject* object) override {
1728 CompactionSpace* target_space = compaction_spaces_->Get( 1697 CompactionSpace* target_space = compaction_spaces_->Get(
1729 Page::FromAddress(object->address())->owner()->identity()); 1698 Page::FromAddress(object->address())->owner()->identity());
1730 HeapObject* target_object = nullptr; 1699 HeapObject* target_object = nullptr;
1731 if (TryEvacuateObject(target_space, object, &target_object)) { 1700 if (TryEvacuateObject(target_space, object, &target_object)) {
1732 DCHECK(object->map_word().IsForwardingAddress()); 1701 DCHECK(object->map_word().IsForwardingAddress());
1733 return true; 1702 return true;
1734 } 1703 }
1735 return false; 1704 return false;
(...skipping 436 matching lines...) Expand 10 before | Expand all | Expand 10 after
2172 { 2141 {
2173 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS); 2142 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS);
2174 ClearSimpleMapTransitions(non_live_map_list); 2143 ClearSimpleMapTransitions(non_live_map_list);
2175 ClearFullMapTransitions(); 2144 ClearFullMapTransitions();
2176 } 2145 }
2177 2146
2178 MarkDependentCodeForDeoptimization(dependent_code_list); 2147 MarkDependentCodeForDeoptimization(dependent_code_list);
2179 2148
2180 ClearWeakCollections(); 2149 ClearWeakCollections();
2181 2150
2182 ClearInvalidStoreAndSlotsBufferEntries(); 2151 ClearInvalidRememberedSetSlots();
2183 } 2152 }
2184 2153
2185 2154
2186 void MarkCompactCollector::MarkDependentCodeForDeoptimization( 2155 void MarkCompactCollector::MarkDependentCodeForDeoptimization(
2187 DependentCode* list_head) { 2156 DependentCode* list_head) {
2188 GCTracer::Scope gc_scope(heap()->tracer(), 2157 GCTracer::Scope gc_scope(heap()->tracer(),
2189 GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); 2158 GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE);
2190 Isolate* isolate = this->isolate(); 2159 Isolate* isolate = this->isolate();
2191 DependentCode* current = list_head; 2160 DependentCode* current = list_head;
2192 while (current->length() > 0) { 2161 while (current->length() > 0) {
(...skipping 342 matching lines...) Expand 10 before | Expand all | Expand 10 after
2535 Object* obj = heap()->encountered_transition_arrays(); 2504 Object* obj = heap()->encountered_transition_arrays();
2536 while (obj != Smi::FromInt(0)) { 2505 while (obj != Smi::FromInt(0)) {
2537 TransitionArray* array = TransitionArray::cast(obj); 2506 TransitionArray* array = TransitionArray::cast(obj);
2538 obj = array->next_link(); 2507 obj = array->next_link();
2539 array->set_next_link(undefined, SKIP_WRITE_BARRIER); 2508 array->set_next_link(undefined, SKIP_WRITE_BARRIER);
2540 } 2509 }
2541 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); 2510 heap()->set_encountered_transition_arrays(Smi::FromInt(0));
2542 } 2511 }
2543 2512
2544 void MarkCompactCollector::RecordMigratedSlot( 2513 void MarkCompactCollector::RecordMigratedSlot(
2545 Object* value, Address slot, SlotsBuffer** evacuation_slots_buffer, 2514 Object* value, Address slot, LocalSlotsBuffer* old_to_old_slots,
2546 LocalStoreBuffer* local_store_buffer) { 2515 LocalSlotsBuffer* old_to_new_slots) {
2547 // When parallel compaction is in progress, store and slots buffer entries 2516 // When parallel compaction is in progress, store and slots buffer entries
2548 // require synchronization. 2517 // require synchronization.
2549 if (heap_->InNewSpace(value)) { 2518 if (heap_->InNewSpace(value)) {
2550 if (compaction_in_progress_) { 2519 if (compaction_in_progress_) {
2551 local_store_buffer->Record(slot); 2520 old_to_new_slots->Record(slot);
2552 } else { 2521 } else {
2553 Page* page = Page::FromAddress(slot); 2522 Page* page = Page::FromAddress(slot);
2554 RememberedSet<OLD_TO_NEW>::Insert(page, slot); 2523 RememberedSet<OLD_TO_NEW>::Insert(page, slot);
2555 } 2524 }
2556 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { 2525 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) {
2557 SlotsBuffer::AddTo(slots_buffer_allocator_, evacuation_slots_buffer, 2526 old_to_old_slots->Record(slot);
2558 reinterpret_cast<Object**>(slot),
2559 SlotsBuffer::IGNORE_OVERFLOW);
2560 } 2527 }
2561 } 2528 }
2562 2529
2563 2530 static inline SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
2564 void MarkCompactCollector::RecordMigratedCodeEntrySlot( 2531 if (RelocInfo::IsCodeTarget(rmode)) {
2565 Address code_entry, Address code_entry_slot, 2532 return CODE_TARGET_SLOT;
2566 SlotsBuffer** evacuation_slots_buffer) { 2533 } else if (RelocInfo::IsCell(rmode)) {
2567 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { 2534 return CELL_TARGET_SLOT;
2568 SlotsBuffer::AddTo(slots_buffer_allocator_, evacuation_slots_buffer, 2535 } else if (RelocInfo::IsEmbeddedObject(rmode)) {
2569 SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot, 2536 return EMBEDDED_OBJECT_SLOT;
2570 SlotsBuffer::IGNORE_OVERFLOW); 2537 } else if (RelocInfo::IsDebugBreakSlot(rmode)) {
2538 return DEBUG_TARGET_SLOT;
2571 } 2539 }
2540 UNREACHABLE();
2541 return NUMBER_OF_SLOT_TYPES;
2572 } 2542 }
2573 2543
2574 2544 void MarkCompactCollector::RecordRelocSlot(Code* host, RelocInfo* rinfo,
2575 void MarkCompactCollector::RecordMigratedCodeObjectSlot( 2545 Object* target) {
2576 Address code_object, SlotsBuffer** evacuation_slots_buffer) {
2577 SlotsBuffer::AddTo(slots_buffer_allocator_, evacuation_slots_buffer,
2578 SlotsBuffer::RELOCATED_CODE_OBJECT, code_object,
2579 SlotsBuffer::IGNORE_OVERFLOW);
2580 }
2581
2582
2583 static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
2584 if (RelocInfo::IsCodeTarget(rmode)) {
2585 return SlotsBuffer::CODE_TARGET_SLOT;
2586 } else if (RelocInfo::IsCell(rmode)) {
2587 return SlotsBuffer::CELL_TARGET_SLOT;
2588 } else if (RelocInfo::IsEmbeddedObject(rmode)) {
2589 return SlotsBuffer::EMBEDDED_OBJECT_SLOT;
2590 } else if (RelocInfo::IsDebugBreakSlot(rmode)) {
2591 return SlotsBuffer::DEBUG_TARGET_SLOT;
2592 }
2593 UNREACHABLE();
2594 return SlotsBuffer::NUMBER_OF_SLOT_TYPES;
2595 }
2596
2597
2598 static inline SlotsBuffer::SlotType DecodeSlotType(
2599 SlotsBuffer::ObjectSlot slot) {
2600 return static_cast<SlotsBuffer::SlotType>(reinterpret_cast<intptr_t>(slot));
2601 }
2602
2603
2604 void MarkCompactCollector::RecordRelocSlot(RelocInfo* rinfo, Object* target) {
2605 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); 2546 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
2547 Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host));
2606 RelocInfo::Mode rmode = rinfo->rmode(); 2548 RelocInfo::Mode rmode = rinfo->rmode();
2607 if (target_page->IsEvacuationCandidate() && 2549 if (target_page->IsEvacuationCandidate() &&
2608 (rinfo->host() == NULL || 2550 (rinfo->host() == NULL ||
2609 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) { 2551 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) {
2610 Address addr = rinfo->pc(); 2552 Address addr = rinfo->pc();
2611 SlotsBuffer::SlotType slot_type = SlotTypeForRMode(rmode); 2553 SlotType slot_type = SlotTypeForRMode(rmode);
2612 if (rinfo->IsInConstantPool()) { 2554 if (rinfo->IsInConstantPool()) {
2613 addr = rinfo->constant_pool_entry_address(); 2555 addr = rinfo->constant_pool_entry_address();
2614 if (RelocInfo::IsCodeTarget(rmode)) { 2556 if (RelocInfo::IsCodeTarget(rmode)) {
2615 slot_type = SlotsBuffer::CODE_ENTRY_SLOT; 2557 slot_type = CODE_ENTRY_SLOT;
2616 } else { 2558 } else {
2617 DCHECK(RelocInfo::IsEmbeddedObject(rmode)); 2559 DCHECK(RelocInfo::IsEmbeddedObject(rmode));
2618 slot_type = SlotsBuffer::OBJECT_SLOT; 2560 slot_type = OBJECT_SLOT;
2619 } 2561 }
2620 } 2562 }
2621 bool success = SlotsBuffer::AddTo( 2563 RememberedSet<OLD_TO_OLD>::InsertTyped(source_page, slot_type, addr);
2622 slots_buffer_allocator_, target_page->slots_buffer_address(), slot_type,
2623 addr, SlotsBuffer::FAIL_ON_OVERFLOW);
2624 if (!success) {
2625 EvictPopularEvacuationCandidate(target_page);
2626 }
2627 } 2564 }
2628 } 2565 }
2629 2566
2630 2567
2631 class RecordMigratedSlotVisitor final : public ObjectVisitor { 2568 class RecordMigratedSlotVisitor final : public ObjectVisitor {
2632 public: 2569 public:
2633 RecordMigratedSlotVisitor(MarkCompactCollector* collector, 2570 RecordMigratedSlotVisitor(MarkCompactCollector* collector,
2634 SlotsBuffer** evacuation_slots_buffer, 2571 LocalSlotsBuffer* old_to_old_slots,
2635 LocalStoreBuffer* local_store_buffer) 2572 LocalSlotsBuffer* old_to_new_slots)
2636 : collector_(collector), 2573 : collector_(collector),
2637 evacuation_slots_buffer_(evacuation_slots_buffer), 2574 old_to_old_slots_(old_to_old_slots),
2638 local_store_buffer_(local_store_buffer) {} 2575 old_to_new_slots_(old_to_new_slots) {}
2639 2576
2640 V8_INLINE void VisitPointer(Object** p) override { 2577 V8_INLINE void VisitPointer(Object** p) override {
2641 collector_->RecordMigratedSlot(*p, reinterpret_cast<Address>(p), 2578 collector_->RecordMigratedSlot(*p, reinterpret_cast<Address>(p),
2642 evacuation_slots_buffer_, 2579 old_to_old_slots_, old_to_new_slots_);
2643 local_store_buffer_);
2644 } 2580 }
2645 2581
2646 V8_INLINE void VisitPointers(Object** start, Object** end) override { 2582 V8_INLINE void VisitPointers(Object** start, Object** end) override {
2647 while (start < end) { 2583 while (start < end) {
2648 collector_->RecordMigratedSlot(*start, reinterpret_cast<Address>(start), 2584 collector_->RecordMigratedSlot(*start, reinterpret_cast<Address>(start),
2649 evacuation_slots_buffer_, 2585 old_to_old_slots_, old_to_new_slots_);
2650 local_store_buffer_);
2651 ++start; 2586 ++start;
2652 } 2587 }
2653 } 2588 }
2654 2589
2655 V8_INLINE void VisitCodeEntry(Address code_entry_slot) override { 2590 V8_INLINE void VisitCodeEntry(Address code_entry_slot) override {
2656 if (collector_->compacting_) { 2591 if (collector_->compacting_) {
2657 Address code_entry = Memory::Address_at(code_entry_slot); 2592 Address code_entry = Memory::Address_at(code_entry_slot);
2658 collector_->RecordMigratedCodeEntrySlot(code_entry, code_entry_slot, 2593 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) {
2659 evacuation_slots_buffer_); 2594 old_to_old_slots_->Record(CODE_ENTRY_SLOT, code_entry_slot);
2595 }
2660 } 2596 }
2661 } 2597 }
2662 2598
2663 private: 2599 private:
2664 MarkCompactCollector* collector_; 2600 MarkCompactCollector* collector_;
2665 SlotsBuffer** evacuation_slots_buffer_; 2601 LocalSlotsBuffer* old_to_old_slots_;
2666 LocalStoreBuffer* local_store_buffer_; 2602 LocalSlotsBuffer* old_to_new_slots_;
2667 }; 2603 };
2668 2604
2669 2605
2670 // We scavenge new space simultaneously with sweeping. This is done in two 2606 // We scavenge new space simultaneously with sweeping. This is done in two
2671 // passes. 2607 // passes.
2672 // 2608 //
2673 // The first pass migrates all alive objects from one semispace to another or 2609 // The first pass migrates all alive objects from one semispace to another or
2674 // promotes them to old space. Forwarding address is written directly into 2610 // promotes them to old space. Forwarding address is written directly into
2675 // first word of object without any encoding. If object is dead we write 2611 // first word of object without any encoding. If object is dead we write
2676 // NULL as a forwarding address. 2612 // NULL as a forwarding address.
2677 // 2613 //
2678 // The second pass updates pointers to new space in all spaces. It is possible 2614 // The second pass updates pointers to new space in all spaces. It is possible
2679 // to encounter pointers to dead new space objects during traversal of pointers 2615 // to encounter pointers to dead new space objects during traversal of pointers
2680 // to new space. We should clear them to avoid encountering them during next 2616 // to new space. We should clear them to avoid encountering them during next
2681 // pointer iteration. This is an issue if the store buffer overflows and we 2617 // pointer iteration. This is an issue if the store buffer overflows and we
2682 // have to scan the entire old space, including dead objects, looking for 2618 // have to scan the entire old space, including dead objects, looking for
2683 // pointers to new space. 2619 // pointers to new space.
2684 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src, 2620 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src,
2685 int size, AllocationSpace dest, 2621 int size, AllocationSpace dest,
2686 SlotsBuffer** evacuation_slots_buffer, 2622 LocalSlotsBuffer* old_to_old_slots,
2687 LocalStoreBuffer* local_store_buffer) { 2623 LocalSlotsBuffer* old_to_new_slots) {
2688 Address dst_addr = dst->address(); 2624 Address dst_addr = dst->address();
2689 Address src_addr = src->address(); 2625 Address src_addr = src->address();
2690 DCHECK(heap()->AllowedToBeMigrated(src, dest)); 2626 DCHECK(heap()->AllowedToBeMigrated(src, dest));
2691 DCHECK(dest != LO_SPACE); 2627 DCHECK(dest != LO_SPACE);
2692 if (dest == OLD_SPACE) { 2628 if (dest == OLD_SPACE) {
2693 DCHECK_OBJECT_SIZE(size); 2629 DCHECK_OBJECT_SIZE(size);
2694 DCHECK(evacuation_slots_buffer != nullptr);
2695 DCHECK(IsAligned(size, kPointerSize)); 2630 DCHECK(IsAligned(size, kPointerSize));
2696 2631
2697 heap()->MoveBlock(dst->address(), src->address(), size); 2632 heap()->MoveBlock(dst->address(), src->address(), size);
2698 RecordMigratedSlotVisitor visitor(this, evacuation_slots_buffer, 2633 RecordMigratedSlotVisitor visitor(this, old_to_old_slots, old_to_new_slots);
2699 local_store_buffer);
2700 dst->IterateBody(&visitor); 2634 dst->IterateBody(&visitor);
2701 } else if (dest == CODE_SPACE) { 2635 } else if (dest == CODE_SPACE) {
2702 DCHECK_CODEOBJECT_SIZE(size, heap()->code_space()); 2636 DCHECK_CODEOBJECT_SIZE(size, heap()->code_space());
2703 DCHECK(evacuation_slots_buffer != nullptr);
2704 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); 2637 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr));
2705 heap()->MoveBlock(dst_addr, src_addr, size); 2638 heap()->MoveBlock(dst_addr, src_addr, size);
2706 RecordMigratedCodeObjectSlot(dst_addr, evacuation_slots_buffer); 2639 old_to_old_slots->Record(RELOCATED_CODE_OBJECT, dst_addr);
2707 Code::cast(dst)->Relocate(dst_addr - src_addr); 2640 Code::cast(dst)->Relocate(dst_addr - src_addr);
2708 } else { 2641 } else {
2709 DCHECK_OBJECT_SIZE(size); 2642 DCHECK_OBJECT_SIZE(size);
2710 DCHECK(evacuation_slots_buffer == nullptr); 2643 DCHECK(old_to_old_slots == nullptr);
2711 DCHECK(dest == NEW_SPACE); 2644 DCHECK(dest == NEW_SPACE);
2712 heap()->MoveBlock(dst_addr, src_addr, size); 2645 heap()->MoveBlock(dst_addr, src_addr, size);
2713 } 2646 }
2714 heap()->OnMoveEvent(dst, src, size); 2647 heap()->OnMoveEvent(dst, src, size);
2715 Memory::Address_at(src_addr) = dst_addr; 2648 Memory::Address_at(src_addr) = dst_addr;
2716 } 2649 }
2717 2650
2718 2651 static inline void UpdateTypedSlot(Isolate* isolate, ObjectVisitor* v,
2719 static inline void UpdateSlot(Isolate* isolate, ObjectVisitor* v, 2652 SlotType slot_type, Address addr) {
2720 SlotsBuffer::SlotType slot_type, Address addr) {
2721 switch (slot_type) { 2653 switch (slot_type) {
2722 case SlotsBuffer::CODE_TARGET_SLOT: { 2654 case CODE_TARGET_SLOT: {
2723 RelocInfo rinfo(isolate, addr, RelocInfo::CODE_TARGET, 0, NULL); 2655 RelocInfo rinfo(isolate, addr, RelocInfo::CODE_TARGET, 0, NULL);
2724 rinfo.Visit(isolate, v); 2656 rinfo.Visit(isolate, v);
2725 break; 2657 break;
2726 } 2658 }
2727 case SlotsBuffer::CELL_TARGET_SLOT: { 2659 case CELL_TARGET_SLOT: {
2728 RelocInfo rinfo(isolate, addr, RelocInfo::CELL, 0, NULL); 2660 RelocInfo rinfo(isolate, addr, RelocInfo::CELL, 0, NULL);
2729 rinfo.Visit(isolate, v); 2661 rinfo.Visit(isolate, v);
2730 break; 2662 break;
2731 } 2663 }
2732 case SlotsBuffer::CODE_ENTRY_SLOT: { 2664 case CODE_ENTRY_SLOT: {
2733 v->VisitCodeEntry(addr); 2665 v->VisitCodeEntry(addr);
2734 break; 2666 break;
2735 } 2667 }
2736 case SlotsBuffer::RELOCATED_CODE_OBJECT: { 2668 case RELOCATED_CODE_OBJECT: {
2737 HeapObject* obj = HeapObject::FromAddress(addr); 2669 HeapObject* obj = HeapObject::FromAddress(addr);
2738 Code::BodyDescriptor::IterateBody(obj, v); 2670 Code::BodyDescriptor::IterateBody(obj, v);
2739 break; 2671 break;
2740 } 2672 }
2741 case SlotsBuffer::DEBUG_TARGET_SLOT: { 2673 case DEBUG_TARGET_SLOT: {
2742 RelocInfo rinfo(isolate, addr, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION, 0, 2674 RelocInfo rinfo(isolate, addr, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION, 0,
2743 NULL); 2675 NULL);
2744 if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(isolate, v); 2676 if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(isolate, v);
2745 break; 2677 break;
2746 } 2678 }
2747 case SlotsBuffer::EMBEDDED_OBJECT_SLOT: { 2679 case EMBEDDED_OBJECT_SLOT: {
2748 RelocInfo rinfo(isolate, addr, RelocInfo::EMBEDDED_OBJECT, 0, NULL); 2680 RelocInfo rinfo(isolate, addr, RelocInfo::EMBEDDED_OBJECT, 0, NULL);
2749 rinfo.Visit(isolate, v); 2681 rinfo.Visit(isolate, v);
2750 break; 2682 break;
2751 } 2683 }
2752 case SlotsBuffer::OBJECT_SLOT: { 2684 case OBJECT_SLOT: {
2753 v->VisitPointer(reinterpret_cast<Object**>(addr)); 2685 v->VisitPointer(reinterpret_cast<Object**>(addr));
2754 break; 2686 break;
2755 } 2687 }
2756 default: 2688 default:
2757 UNREACHABLE(); 2689 UNREACHABLE();
2758 break; 2690 break;
2759 } 2691 }
2760 } 2692 }
2761 2693
2762 2694
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
2847 } 2779 }
2848 } 2780 }
2849 2781
2850 private: 2782 private:
2851 inline void UpdatePointer(Object** p) { UpdateSlot(heap_, p); } 2783 inline void UpdatePointer(Object** p) { UpdateSlot(heap_, p); }
2852 2784
2853 Heap* heap_; 2785 Heap* heap_;
2854 }; 2786 };
2855 2787
2856 2788
2857 void MarkCompactCollector::UpdateSlots(SlotsBuffer* buffer) {
2858 PointersUpdatingVisitor v(heap_);
2859 size_t buffer_size = buffer->Size();
2860
2861 for (size_t slot_idx = 0; slot_idx < buffer_size; ++slot_idx) {
2862 SlotsBuffer::ObjectSlot slot = buffer->Get(slot_idx);
2863 if (!SlotsBuffer::IsTypedSlot(slot)) {
2864 PointersUpdatingVisitor::UpdateSlot(heap_, slot);
2865 } else {
2866 ++slot_idx;
2867 DCHECK(slot_idx < buffer_size);
2868 UpdateSlot(heap_->isolate(), &v, DecodeSlotType(slot),
2869 reinterpret_cast<Address>(buffer->Get(slot_idx)));
2870 }
2871 }
2872 }
2873
2874
2875 void MarkCompactCollector::UpdateSlotsRecordedIn(SlotsBuffer* buffer) {
2876 while (buffer != NULL) {
2877 UpdateSlots(buffer);
2878 buffer = buffer->next();
2879 }
2880 }
2881
2882
2883 static void UpdatePointer(HeapObject** address, HeapObject* object) { 2789 static void UpdatePointer(HeapObject** address, HeapObject* object) {
2884 MapWord map_word = object->map_word(); 2790 MapWord map_word = object->map_word();
2885 // Since we only filter invalid slots in old space, the store buffer can 2791 // Since we only filter invalid slots in old space, the store buffer can
2886 // still contain stale pointers in large object and in map spaces. Ignore 2792 // still contain stale pointers in large object and in map spaces. Ignore
2887 // these pointers here. 2793 // these pointers here.
2888 DCHECK(map_word.IsForwardingAddress() || 2794 DCHECK(map_word.IsForwardingAddress() ||
2889 !object->GetHeap()->old_space()->Contains( 2795 !object->GetHeap()->old_space()->Contains(
2890 reinterpret_cast<Address>(address))); 2796 reinterpret_cast<Address>(address)));
2891 if (map_word.IsForwardingAddress()) { 2797 if (map_word.IsForwardingAddress()) {
2892 // Update the corresponding slot. 2798 // Update the corresponding slot.
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
2994 // If the slot is within the last found object in the cell, the slot is 2900 // If the slot is within the last found object in the cell, the slot is
2995 // in a live object. 2901 // in a live object.
2996 // Slots pointing to the first word of an object are invalid and removed. 2902 // Slots pointing to the first word of an object are invalid and removed.
2997 // This can happen when we move the object header while left trimming. 2903 // This can happen when we move the object header while left trimming.
2998 *out_object = object; 2904 *out_object = object;
2999 return true; 2905 return true;
3000 } 2906 }
3001 return false; 2907 return false;
3002 } 2908 }
3003 2909
3004 2910 HeapObject* MarkCompactCollector::FindBlackObjectBySlotSlow(Address slot) {
3005 bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) { 2911 Page* p = Page::FromAddress(slot);
3006 // This function does not support large objects right now. 2912 // This function does not support large objects right now.
3007 Space* owner = p->owner(); 2913 Space* owner = p->owner();
3008 if (owner == heap_->lo_space() || owner == NULL) { 2914 if (owner == heap_->lo_space() || owner == nullptr) {
3009 Object* large_object = heap_->lo_space()->FindObject(slot); 2915 Object* large_object = heap_->lo_space()->FindObject(slot);
3010 // This object has to exist, otherwise we would not have recorded a slot 2916 // This object has to exist, otherwise we would not have recorded a slot
3011 // for it. 2917 // for it.
3012 CHECK(large_object->IsHeapObject()); 2918 CHECK(large_object->IsHeapObject());
3013 HeapObject* large_heap_object = HeapObject::cast(large_object); 2919 HeapObject* large_heap_object = HeapObject::cast(large_object);
2920
3014 if (IsMarked(large_heap_object)) { 2921 if (IsMarked(large_heap_object)) {
3015 return true; 2922 return large_heap_object;
3016 } 2923 }
3017 return false; 2924 return nullptr;
3018 } 2925 }
3019 2926
3020 LiveObjectIterator<kBlackObjects> it(p); 2927 LiveObjectIterator<kBlackObjects> it(p);
3021 HeapObject* object = NULL; 2928 HeapObject* object = nullptr;
3022 while ((object = it.Next()) != NULL) { 2929 while ((object = it.Next()) != nullptr) {
3023 int size = object->Size(); 2930 int size = object->Size();
3024 2931 if (object->address() > slot) return nullptr;
3025 if (object->address() > slot) return false;
3026 if (object->address() <= slot && slot < (object->address() + size)) { 2932 if (object->address() <= slot && slot < (object->address() + size)) {
3027 return true; 2933 return object;
3028 } 2934 }
3029 } 2935 }
3030 return false; 2936 return nullptr;
3031 } 2937 }
3032 2938
3033 2939
3034 bool MarkCompactCollector::IsSlotInLiveObject(Address slot) { 2940 bool MarkCompactCollector::IsSlotInLiveObject(Address slot) {
3035 HeapObject* object = NULL; 2941 HeapObject* object = NULL;
3036 // The target object is black but we don't know if the source slot is black. 2942 // The target object is black but we don't know if the source slot is black.
3037 // The source object could have died and the slot could be part of a free 2943 // The source object could have died and the slot could be part of a free
3038 // space. Find out based on mark bits if the slot is part of a live object. 2944 // space. Find out based on mark bits if the slot is part of a live object.
3039 if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) { 2945 if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) {
3040 return false; 2946 return false;
3041 } 2947 }
3042 2948
3043 DCHECK(object != NULL); 2949 DCHECK(object != NULL);
3044 int offset = static_cast<int>(slot - object->address()); 2950 int offset = static_cast<int>(slot - object->address());
3045 return object->IsValidSlot(offset); 2951 return object->IsValidSlot(offset);
3046 } 2952 }
3047 2953
3048 2954
3049 void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot,
3050 HeapObject* object) {
3051 // The target object has to be black.
3052 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
3053
3054 // The target object is black but we don't know if the source slot is black.
3055 // The source object could have died and the slot could be part of a free
3056 // space. Use the mark bit iterator to find out about liveness of the slot.
3057 CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot));
3058 }
3059
3060
3061 void MarkCompactCollector::EvacuateNewSpacePrologue() { 2955 void MarkCompactCollector::EvacuateNewSpacePrologue() {
3062 NewSpace* new_space = heap()->new_space(); 2956 NewSpace* new_space = heap()->new_space();
3063 NewSpacePageIterator it(new_space->bottom(), new_space->top()); 2957 NewSpacePageIterator it(new_space->bottom(), new_space->top());
3064 // Append the list of new space pages to be processed. 2958 // Append the list of new space pages to be processed.
3065 while (it.has_next()) { 2959 while (it.has_next()) {
3066 newspace_evacuation_candidates_.Add(it.next()); 2960 newspace_evacuation_candidates_.Add(it.next());
3067 } 2961 }
3068 new_space->Flip(); 2962 new_space->Flip();
3069 new_space->ResetAllocationInfo(); 2963 new_space->ResetAllocationInfo();
3070 } 2964 }
3071 2965
3072 void MarkCompactCollector::EvacuateNewSpaceEpilogue() { 2966 void MarkCompactCollector::EvacuateNewSpaceEpilogue() {
3073 newspace_evacuation_candidates_.Rewind(0); 2967 newspace_evacuation_candidates_.Rewind(0);
3074 } 2968 }
3075 2969
3076 2970
3077 void MarkCompactCollector::AddEvacuationSlotsBufferSynchronized(
3078 SlotsBuffer* evacuation_slots_buffer) {
3079 base::LockGuard<base::Mutex> lock_guard(&evacuation_slots_buffers_mutex_);
3080 evacuation_slots_buffers_.Add(evacuation_slots_buffer);
3081 }
3082
3083 class MarkCompactCollector::Evacuator : public Malloced { 2971 class MarkCompactCollector::Evacuator : public Malloced {
3084 public: 2972 public:
3085 Evacuator(MarkCompactCollector* collector, 2973 Evacuator(MarkCompactCollector* collector,
3086 const List<Page*>& evacuation_candidates, 2974 const List<Page*>& evacuation_candidates,
3087 const List<NewSpacePage*>& newspace_evacuation_candidates) 2975 const List<NewSpacePage*>& newspace_evacuation_candidates)
3088 : collector_(collector), 2976 : collector_(collector),
3089 evacuation_candidates_(evacuation_candidates), 2977 evacuation_candidates_(evacuation_candidates),
3090 newspace_evacuation_candidates_(newspace_evacuation_candidates), 2978 newspace_evacuation_candidates_(newspace_evacuation_candidates),
3091 compaction_spaces_(collector->heap()), 2979 compaction_spaces_(collector->heap()),
3092 local_slots_buffer_(nullptr),
3093 local_store_buffer_(collector->heap()),
3094 local_pretenuring_feedback_(HashMap::PointersMatch, 2980 local_pretenuring_feedback_(HashMap::PointersMatch,
3095 kInitialLocalPretenuringFeedbackCapacity), 2981 kInitialLocalPretenuringFeedbackCapacity),
3096 new_space_visitor_(collector->heap(), &compaction_spaces_, 2982 new_space_visitor_(collector->heap(), &compaction_spaces_,
3097 &local_slots_buffer_, &local_store_buffer_, 2983 &old_to_old_slots_, &old_to_new_slots_,
3098 &local_pretenuring_feedback_), 2984 &local_pretenuring_feedback_),
3099 old_space_visitor_(collector->heap(), &compaction_spaces_, 2985 old_space_visitor_(collector->heap(), &compaction_spaces_,
3100 &local_slots_buffer_, &local_store_buffer_), 2986 &old_to_old_slots_, &old_to_new_slots_),
3101 duration_(0.0), 2987 duration_(0.0),
3102 bytes_compacted_(0), 2988 bytes_compacted_(0),
3103 task_id_(0) {} 2989 task_id_(0) {}
3104 2990
3105 // Evacuate the configured set of pages in parallel. 2991 // Evacuate the configured set of pages in parallel.
3106 inline void EvacuatePages(); 2992 inline void EvacuatePages();
3107 2993
3108 // Merge back locally cached info sequentially. Note that this method needs 2994 // Merge back locally cached info sequentially. Note that this method needs
3109 // to be called from the main thread. 2995 // to be called from the main thread.
3110 inline void Finalize(); 2996 inline void Finalize();
(...skipping 16 matching lines...) Expand all
3127 inline bool EvacuateSinglePage(MemoryChunk* p, HeapObjectVisitor* visitor); 3013 inline bool EvacuateSinglePage(MemoryChunk* p, HeapObjectVisitor* visitor);
3128 3014
3129 MarkCompactCollector* collector_; 3015 MarkCompactCollector* collector_;
3130 3016
3131 // Pages to process. 3017 // Pages to process.
3132 const List<Page*>& evacuation_candidates_; 3018 const List<Page*>& evacuation_candidates_;
3133 const List<NewSpacePage*>& newspace_evacuation_candidates_; 3019 const List<NewSpacePage*>& newspace_evacuation_candidates_;
3134 3020
3135 // Locally cached collector data. 3021 // Locally cached collector data.
3136 CompactionSpaceCollection compaction_spaces_; 3022 CompactionSpaceCollection compaction_spaces_;
3137 SlotsBuffer* local_slots_buffer_; 3023 LocalSlotsBuffer old_to_old_slots_;
3138 LocalStoreBuffer local_store_buffer_; 3024 LocalSlotsBuffer old_to_new_slots_;
3139 HashMap local_pretenuring_feedback_; 3025 HashMap local_pretenuring_feedback_;
3140 3026
3141 // Vistors for the corresponding spaces. 3027 // Vistors for the corresponding spaces.
3142 EvacuateNewSpaceVisitor new_space_visitor_; 3028 EvacuateNewSpaceVisitor new_space_visitor_;
3143 EvacuateOldSpaceVisitor old_space_visitor_; 3029 EvacuateOldSpaceVisitor old_space_visitor_;
3144 3030
3145 // Book keeping info. 3031 // Book keeping info.
3146 double duration_; 3032 double duration_;
3147 intptr_t bytes_compacted_; 3033 intptr_t bytes_compacted_;
3148 3034
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
3206 heap()->code_space()->MergeCompactionSpace( 3092 heap()->code_space()->MergeCompactionSpace(
3207 compaction_spaces_.Get(CODE_SPACE)); 3093 compaction_spaces_.Get(CODE_SPACE));
3208 heap()->tracer()->AddCompactionEvent(duration_, bytes_compacted_); 3094 heap()->tracer()->AddCompactionEvent(duration_, bytes_compacted_);
3209 heap()->IncrementPromotedObjectsSize(new_space_visitor_.promoted_size()); 3095 heap()->IncrementPromotedObjectsSize(new_space_visitor_.promoted_size());
3210 heap()->IncrementSemiSpaceCopiedObjectSize( 3096 heap()->IncrementSemiSpaceCopiedObjectSize(
3211 new_space_visitor_.semispace_copied_size()); 3097 new_space_visitor_.semispace_copied_size());
3212 heap()->IncrementYoungSurvivorsCounter( 3098 heap()->IncrementYoungSurvivorsCounter(
3213 new_space_visitor_.promoted_size() + 3099 new_space_visitor_.promoted_size() +
3214 new_space_visitor_.semispace_copied_size()); 3100 new_space_visitor_.semispace_copied_size());
3215 heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_); 3101 heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_);
3216 local_store_buffer_.Process(heap()->store_buffer()); 3102 // Move locally recorded slots to the global remembered sets.
3217 collector_->AddEvacuationSlotsBufferSynchronized(local_slots_buffer_); 3103 old_to_new_slots_.Iterate(
3104 [](Address slot) {
3105 Page* page = Page::FromAddress(slot);
3106 RememberedSet<OLD_TO_NEW>::Insert(page, slot);
3107 },
3108 [](SlotType type, Address slot) { UNREACHABLE(); });
3109 old_to_old_slots_.Iterate(
3110 [](Address slot) {
3111 Page* page = Page::FromAddress(slot);
3112 RememberedSet<OLD_TO_OLD>::Insert(page, slot);
3113 },
3114 [](SlotType type, Address slot) {
3115 Page* page = Page::FromAddress(slot);
3116 RememberedSet<OLD_TO_OLD>::InsertTyped(page, type, slot);
3117 });
3218 } 3118 }
3219 3119
3220 class MarkCompactCollector::CompactionTask : public CancelableTask { 3120 class MarkCompactCollector::CompactionTask : public CancelableTask {
3221 public: 3121 public:
3222 explicit CompactionTask(Heap* heap, Evacuator* evacuator) 3122 explicit CompactionTask(Heap* heap, Evacuator* evacuator)
3223 : CancelableTask(heap->isolate()), heap_(heap), evacuator_(evacuator) { 3123 : CancelableTask(heap->isolate()), heap_(heap), evacuator_(evacuator) {
3224 evacuator->set_task_id(id()); 3124 evacuator->set_task_id(id());
3225 } 3125 }
3226 3126
3227 virtual ~CompactionTask() {} 3127 virtual ~CompactionTask() {}
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after
3514 !ShouldSkipEvacuationSlotRecording(code)) { 3414 !ShouldSkipEvacuationSlotRecording(code)) {
3515 DCHECK(compacting_); 3415 DCHECK(compacting_);
3516 3416
3517 // If the object is white than no slots were recorded on it yet. 3417 // If the object is white than no slots were recorded on it yet.
3518 MarkBit mark_bit = Marking::MarkBitFrom(code); 3418 MarkBit mark_bit = Marking::MarkBitFrom(code);
3519 if (Marking::IsWhite(mark_bit)) return; 3419 if (Marking::IsWhite(mark_bit)) return;
3520 3420
3521 // Ignore all slots that might have been recorded in the body of the 3421 // Ignore all slots that might have been recorded in the body of the
3522 // deoptimized code object. Assumption: no slots will be recorded for 3422 // deoptimized code object. Assumption: no slots will be recorded for
3523 // this object after invalidating it. 3423 // this object after invalidating it.
3524 RemoveObjectSlots(code->instruction_start(), 3424 Page* page = Page::FromAddress(code->address());
3525 code->address() + code->Size()); 3425 Address start = code->instruction_start();
3426 Address end = code->address() + code->Size();
3427 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end);
3526 } 3428 }
3527 } 3429 }
3528 3430
3529 3431
3530 // Return true if the given code is deoptimized or will be deoptimized. 3432 // Return true if the given code is deoptimized or will be deoptimized.
3531 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { 3433 bool MarkCompactCollector::WillBeDeoptimized(Code* code) {
3532 return code->is_optimized_code() && code->marked_for_deoptimization(); 3434 return code->is_optimized_code() && code->marked_for_deoptimization();
3533 } 3435 }
3534 3436
3535 3437
3536 void MarkCompactCollector::RemoveObjectSlots(Address start_slot,
3537 Address end_slot) {
3538 // Remove entries by replacing them with an old-space slot containing a smi
3539 // that is located in an unmovable page.
3540 for (Page* p : evacuation_candidates_) {
3541 DCHECK(p->IsEvacuationCandidate() ||
3542 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3543 if (p->IsEvacuationCandidate()) {
3544 SlotsBuffer::RemoveObjectSlots(heap_, p->slots_buffer(), start_slot,
3545 end_slot);
3546 }
3547 }
3548 }
3549
3550
3551 #ifdef VERIFY_HEAP 3438 #ifdef VERIFY_HEAP
3552 static void VerifyAllBlackObjects(MemoryChunk* page) { 3439 static void VerifyAllBlackObjects(MemoryChunk* page) {
3553 LiveObjectIterator<kAllLiveObjects> it(page); 3440 LiveObjectIterator<kAllLiveObjects> it(page);
3554 HeapObject* object = NULL; 3441 HeapObject* object = NULL;
3555 while ((object = it.Next()) != NULL) { 3442 while ((object = it.Next()) != NULL) {
3556 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); 3443 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
3557 } 3444 }
3558 } 3445 }
3559 #endif // VERIFY_HEAP 3446 #endif // VERIFY_HEAP
3560 3447
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
3692 if (FLAG_verify_heap && !sweeping_in_progress_) { 3579 if (FLAG_verify_heap && !sweeping_in_progress_) {
3693 VerifyEvacuation(heap()); 3580 VerifyEvacuation(heap());
3694 } 3581 }
3695 #endif 3582 #endif
3696 } 3583 }
3697 3584
3698 3585
3699 void MarkCompactCollector::UpdatePointersAfterEvacuation() { 3586 void MarkCompactCollector::UpdatePointersAfterEvacuation() {
3700 GCTracer::Scope gc_scope(heap()->tracer(), 3587 GCTracer::Scope gc_scope(heap()->tracer(),
3701 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS); 3588 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS);
3702 {
3703 GCTracer::Scope gc_scope(
3704 heap()->tracer(),
3705 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED);
3706 UpdateSlotsRecordedIn(migration_slots_buffer_);
3707 if (FLAG_trace_fragmentation_verbose) {
3708 PrintF(" migration slots buffer: %d\n",
3709 SlotsBuffer::SizeOfChain(migration_slots_buffer_));
3710 }
3711 slots_buffer_allocator_->DeallocateChain(&migration_slots_buffer_);
3712 DCHECK(migration_slots_buffer_ == NULL);
3713 3589
3714 // TODO(hpayer): Process the slots buffers in parallel. This has to be done
3715 // after evacuation of all pages finishes.
3716 int buffers = evacuation_slots_buffers_.length();
3717 for (int i = 0; i < buffers; i++) {
3718 SlotsBuffer* buffer = evacuation_slots_buffers_[i];
3719 UpdateSlotsRecordedIn(buffer);
3720 slots_buffer_allocator_->DeallocateChain(&buffer);
3721 }
3722 evacuation_slots_buffers_.Rewind(0);
3723 }
3724
3725 // Second pass: find pointers to new space and update them.
3726 PointersUpdatingVisitor updating_visitor(heap()); 3590 PointersUpdatingVisitor updating_visitor(heap());
3727 3591
3728 { 3592 {
3729 GCTracer::Scope gc_scope( 3593 GCTracer::Scope gc_scope(
3730 heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW); 3594 heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW);
3731 // Update pointers in to space. 3595 // Update pointers in to space.
3732 SemiSpaceIterator to_it(heap()->new_space()); 3596 SemiSpaceIterator to_it(heap()->new_space());
3733 for (HeapObject* object = to_it.Next(); object != NULL; 3597 for (HeapObject* object = to_it.Next(); object != NULL;
3734 object = to_it.Next()) { 3598 object = to_it.Next()) {
3735 Map* map = object->map(); 3599 Map* map = object->map();
3736 object->IterateBody(map->instance_type(), object->SizeFromMap(map), 3600 object->IterateBody(map->instance_type(), object->SizeFromMap(map),
3737 &updating_visitor); 3601 &updating_visitor);
3738 } 3602 }
3739 // Update roots. 3603 // Update roots.
3740 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); 3604 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
3741 3605
3742 RememberedSet<OLD_TO_NEW>::IterateWithWrapper(heap_, UpdatePointer); 3606 RememberedSet<OLD_TO_NEW>::IterateWithWrapper(heap_, UpdatePointer);
3743 } 3607 }
3744 3608
3745 { 3609 {
3610 Heap* heap = this->heap();
3611 GCTracer::Scope gc_scope(
3612 heap->tracer(),
3613 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED);
3614
3615 RememberedSet<OLD_TO_OLD>::Iterate(heap, [heap](Address slot) {
3616 PointersUpdatingVisitor::UpdateSlot(heap,
3617 reinterpret_cast<Object**>(slot));
3618 return REMOVE_SLOT;
3619 });
3620 Isolate* isolate = heap->isolate();
3621 PointersUpdatingVisitor* visitor = &updating_visitor;
3622 RememberedSet<OLD_TO_OLD>::IterateTyped(
3623 heap, [isolate, visitor](SlotType type, Address slot) {
3624 UpdateTypedSlot(isolate, visitor, type, slot);
3625 return REMOVE_SLOT;
3626 });
3627 }
3628
3629 {
3746 GCTracer::Scope gc_scope( 3630 GCTracer::Scope gc_scope(
3747 heap()->tracer(), 3631 heap()->tracer(),
3748 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED); 3632 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED);
3749 for (Page* p : evacuation_candidates_) { 3633 for (Page* p : evacuation_candidates_) {
3750 DCHECK(p->IsEvacuationCandidate() || 3634 DCHECK(p->IsEvacuationCandidate() ||
3751 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); 3635 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3752 3636
3753 if (p->IsEvacuationCandidate()) { 3637 if (p->IsEvacuationCandidate()) {
3754 UpdateSlotsRecordedIn(p->slots_buffer());
3755 if (FLAG_trace_fragmentation_verbose) {
3756 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p),
3757 SlotsBuffer::SizeOfChain(p->slots_buffer()));
3758 }
3759 slots_buffer_allocator_->DeallocateChain(p->slots_buffer_address());
3760
3761 // Important: skip list should be cleared only after roots were updated 3638 // Important: skip list should be cleared only after roots were updated
3762 // because root iteration traverses the stack and might have to find 3639 // because root iteration traverses the stack and might have to find
3763 // code objects from non-updated pc pointing into evacuation candidate. 3640 // code objects from non-updated pc pointing into evacuation candidate.
3764 SkipList* list = p->skip_list(); 3641 SkipList* list = p->skip_list();
3765 if (list != NULL) list->Clear(); 3642 if (list != NULL) list->Clear();
3766 3643
3767 // First pass on aborted pages, fixing up all live objects. 3644 // First pass on aborted pages, fixing up all live objects.
3768 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) { 3645 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) {
3769 p->ClearEvacuationCandidate(); 3646 p->ClearEvacuationCandidate();
3770 VisitLiveObjectsBody(p, &updating_visitor); 3647 VisitLiveObjectsBody(p, &updating_visitor);
(...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after
4012 3889
4013 3890
4014 Isolate* MarkCompactCollector::isolate() const { return heap_->isolate(); } 3891 Isolate* MarkCompactCollector::isolate() const { return heap_->isolate(); }
4015 3892
4016 3893
4017 void MarkCompactCollector::Initialize() { 3894 void MarkCompactCollector::Initialize() {
4018 MarkCompactMarkingVisitor::Initialize(); 3895 MarkCompactMarkingVisitor::Initialize();
4019 IncrementalMarking::Initialize(); 3896 IncrementalMarking::Initialize();
4020 } 3897 }
4021 3898
4022 3899 void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* host, Address slot,
4023 void MarkCompactCollector::EvictPopularEvacuationCandidate(Page* page) {
4024 if (FLAG_trace_fragmentation) {
4025 PrintF("Page %p is too popular. Disabling evacuation.\n",
4026 reinterpret_cast<void*>(page));
4027 }
4028
4029 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kSlotsBufferOverflow);
4030
4031 // TODO(gc) If all evacuation candidates are too popular we
4032 // should stop slots recording entirely.
4033 page->ClearEvacuationCandidate();
4034
4035 DCHECK(!page->IsFlagSet(Page::POPULAR_PAGE));
4036 page->SetFlag(Page::POPULAR_PAGE);
4037
4038 // We were not collecting slots on this page that point
4039 // to other evacuation candidates thus we have to
4040 // rescan the page after evacuation to discover and update all
4041 // pointers to evacuated objects.
4042 page->SetFlag(Page::RESCAN_ON_EVACUATION);
4043 }
4044
4045
4046 void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* object, Address slot,
4047 Code* target) { 3900 Code* target) {
4048 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); 3901 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
3902 Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host));
4049 if (target_page->IsEvacuationCandidate() && 3903 if (target_page->IsEvacuationCandidate() &&
4050 !ShouldSkipEvacuationSlotRecording(object)) { 3904 !ShouldSkipEvacuationSlotRecording(host)) {
4051 if (!SlotsBuffer::AddTo(slots_buffer_allocator_, 3905 RememberedSet<OLD_TO_OLD>::InsertTyped(source_page, CODE_ENTRY_SLOT, slot);
4052 target_page->slots_buffer_address(),
4053 SlotsBuffer::CODE_ENTRY_SLOT, slot,
4054 SlotsBuffer::FAIL_ON_OVERFLOW)) {
4055 EvictPopularEvacuationCandidate(target_page);
4056 }
4057 } 3906 }
4058 } 3907 }
4059 3908
4060 3909
4061 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { 3910 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
4062 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); 3911 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT);
4063 if (is_compacting()) { 3912 if (is_compacting()) {
4064 Code* host = 3913 Code* host =
4065 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( 3914 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer(
4066 pc); 3915 pc);
4067 MarkBit mark_bit = Marking::MarkBitFrom(host); 3916 MarkBit mark_bit = Marking::MarkBitFrom(host);
4068 if (Marking::IsBlack(mark_bit)) { 3917 if (Marking::IsBlack(mark_bit)) {
4069 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); 3918 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
4070 RecordRelocSlot(&rinfo, target); 3919 RecordRelocSlot(host, &rinfo, target);
4071 } 3920 }
4072 } 3921 }
4073 } 3922 }
4074 3923
4075 } // namespace internal 3924 } // namespace internal
4076 } // namespace v8 3925 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/mark-compact-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698