Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(199)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1703823002: Replace slots buffer with remembered set. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: rebase Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
11 #include "src/compilation-cache.h" 11 #include "src/compilation-cache.h"
12 #include "src/deoptimizer.h" 12 #include "src/deoptimizer.h"
13 #include "src/execution.h" 13 #include "src/execution.h"
14 #include "src/frames-inl.h" 14 #include "src/frames-inl.h"
15 #include "src/gdb-jit.h" 15 #include "src/gdb-jit.h"
16 #include "src/global-handles.h" 16 #include "src/global-handles.h"
17 #include "src/heap/array-buffer-tracker.h" 17 #include "src/heap/array-buffer-tracker.h"
18 #include "src/heap/gc-tracer.h" 18 #include "src/heap/gc-tracer.h"
19 #include "src/heap/incremental-marking.h" 19 #include "src/heap/incremental-marking.h"
20 #include "src/heap/mark-compact-inl.h" 20 #include "src/heap/mark-compact-inl.h"
21 #include "src/heap/object-stats.h" 21 #include "src/heap/object-stats.h"
22 #include "src/heap/objects-visiting-inl.h" 22 #include "src/heap/objects-visiting-inl.h"
23 #include "src/heap/objects-visiting.h" 23 #include "src/heap/objects-visiting.h"
24 #include "src/heap/slots-buffer.h"
25 #include "src/heap/spaces-inl.h" 24 #include "src/heap/spaces-inl.h"
26 #include "src/ic/ic.h" 25 #include "src/ic/ic.h"
27 #include "src/ic/stub-cache.h" 26 #include "src/ic/stub-cache.h"
28 #include "src/profiler/cpu-profiler.h" 27 #include "src/profiler/cpu-profiler.h"
29 #include "src/utils-inl.h" 28 #include "src/utils-inl.h"
30 #include "src/v8.h" 29 #include "src/v8.h"
31 30
32 namespace v8 { 31 namespace v8 {
33 namespace internal { 32 namespace internal {
34 33
(...skipping 13 matching lines...) Expand all
48 // MarkCompactCollector 47 // MarkCompactCollector
49 48
50 MarkCompactCollector::MarkCompactCollector(Heap* heap) 49 MarkCompactCollector::MarkCompactCollector(Heap* heap)
51 : // NOLINT 50 : // NOLINT
52 #ifdef DEBUG 51 #ifdef DEBUG
53 state_(IDLE), 52 state_(IDLE),
54 #endif 53 #endif
55 marking_parity_(ODD_MARKING_PARITY), 54 marking_parity_(ODD_MARKING_PARITY),
56 was_marked_incrementally_(false), 55 was_marked_incrementally_(false),
57 evacuation_(false), 56 evacuation_(false),
58 slots_buffer_allocator_(nullptr),
59 migration_slots_buffer_(nullptr),
60 heap_(heap), 57 heap_(heap),
61 marking_deque_memory_(NULL), 58 marking_deque_memory_(NULL),
62 marking_deque_memory_committed_(0), 59 marking_deque_memory_committed_(0),
63 code_flusher_(nullptr), 60 code_flusher_(nullptr),
64 have_code_to_deoptimize_(false), 61 have_code_to_deoptimize_(false),
65 compacting_(false), 62 compacting_(false),
66 sweeping_in_progress_(false), 63 sweeping_in_progress_(false),
67 compaction_in_progress_(false), 64 compaction_in_progress_(false),
68 pending_sweeper_tasks_semaphore_(0), 65 pending_sweeper_tasks_semaphore_(0),
69 pending_compaction_tasks_semaphore_(0) { 66 pending_compaction_tasks_semaphore_(0) {
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
242 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0); 239 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
243 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0); 240 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
244 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0); 241 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
245 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0); 242 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
246 243
247 free_list_old_space_.Reset(new FreeList(heap_->old_space())); 244 free_list_old_space_.Reset(new FreeList(heap_->old_space()));
248 free_list_code_space_.Reset(new FreeList(heap_->code_space())); 245 free_list_code_space_.Reset(new FreeList(heap_->code_space()));
249 free_list_map_space_.Reset(new FreeList(heap_->map_space())); 246 free_list_map_space_.Reset(new FreeList(heap_->map_space()));
250 EnsureMarkingDequeIsReserved(); 247 EnsureMarkingDequeIsReserved();
251 EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize); 248 EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize);
252 slots_buffer_allocator_ = new SlotsBufferAllocator();
253 249
254 if (FLAG_flush_code) { 250 if (FLAG_flush_code) {
255 code_flusher_ = new CodeFlusher(isolate()); 251 code_flusher_ = new CodeFlusher(isolate());
256 if (FLAG_trace_code_flushing) { 252 if (FLAG_trace_code_flushing) {
257 PrintF("[code-flushing is now on]\n"); 253 PrintF("[code-flushing is now on]\n");
258 } 254 }
259 } 255 }
260 } 256 }
261 257
262 258
263 void MarkCompactCollector::TearDown() { 259 void MarkCompactCollector::TearDown() {
264 AbortCompaction(); 260 AbortCompaction();
265 delete marking_deque_memory_; 261 delete marking_deque_memory_;
266 delete slots_buffer_allocator_;
267 delete code_flusher_; 262 delete code_flusher_;
268 } 263 }
269 264
270 265
271 void MarkCompactCollector::AddEvacuationCandidate(Page* p) { 266 void MarkCompactCollector::AddEvacuationCandidate(Page* p) {
272 DCHECK(!p->NeverEvacuate()); 267 DCHECK(!p->NeverEvacuate());
273 p->MarkEvacuationCandidate(); 268 p->MarkEvacuationCandidate();
274 evacuation_candidates_.Add(p); 269 evacuation_candidates_.Add(p);
275 } 270 }
276 271
(...skipping 26 matching lines...) Expand all
303 298
304 heap()->old_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); 299 heap()->old_space()->EvictEvacuationCandidatesFromLinearAllocationArea();
305 heap()->code_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); 300 heap()->code_space()->EvictEvacuationCandidatesFromLinearAllocationArea();
306 301
307 compacting_ = evacuation_candidates_.length() > 0; 302 compacting_ = evacuation_candidates_.length() > 0;
308 } 303 }
309 304
310 return compacting_; 305 return compacting_;
311 } 306 }
312 307
313 308 void MarkCompactCollector::ClearInvalidRememberedSetSlots() {
314 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() {
315 { 309 {
316 GCTracer::Scope gc_scope(heap()->tracer(), 310 GCTracer::Scope gc_scope(heap()->tracer(),
317 GCTracer::Scope::MC_CLEAR_STORE_BUFFER); 311 GCTracer::Scope::MC_CLEAR_STORE_BUFFER);
318 RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(heap()); 312 RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(heap());
319 } 313 }
314 // There is not need to filter the old to old set because
315 // it is completely cleared after the mark-compact GC.
316 // The slots that become invalid due to runtime transitions are
317 // cleared eagerly immediately after the transition.
320 318
321 {
322 GCTracer::Scope gc_scope(heap()->tracer(),
323 GCTracer::Scope::MC_CLEAR_SLOTS_BUFFER);
324 for (Page* p : evacuation_candidates_) {
325 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer());
326 }
327 }
328 #ifdef VERIFY_HEAP 319 #ifdef VERIFY_HEAP
329 if (FLAG_verify_heap) { 320 if (FLAG_verify_heap) {
330 VerifyValidStoreAndSlotsBufferEntries(); 321 RememberedSet<OLD_TO_NEW>::VerifyValidSlots(heap());
322 RememberedSet<OLD_TO_OLD>::VerifyValidSlots(heap());
331 } 323 }
332 #endif 324 #endif
333 } 325 }
334 326
335 327
336 #ifdef VERIFY_HEAP
337 static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) {
338 PageIterator it(space);
339 while (it.has_next()) {
340 Page* p = it.next();
341 SlotsBuffer::VerifySlots(heap, p->slots_buffer());
342 }
343 }
344
345
346 void MarkCompactCollector::VerifyValidStoreAndSlotsBufferEntries() {
347 RememberedSet<OLD_TO_NEW>::VerifyValidSlots(heap());
348
349 VerifyValidSlotsBufferEntries(heap(), heap()->old_space());
350 VerifyValidSlotsBufferEntries(heap(), heap()->code_space());
351 VerifyValidSlotsBufferEntries(heap(), heap()->map_space());
352
353 LargeObjectIterator it(heap()->lo_space());
354 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
355 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
356 SlotsBuffer::VerifySlots(heap(), chunk->slots_buffer());
357 }
358 }
359 #endif
360
361
362 void MarkCompactCollector::CollectGarbage() { 328 void MarkCompactCollector::CollectGarbage() {
363 // Make sure that Prepare() has been called. The individual steps below will 329 // Make sure that Prepare() has been called. The individual steps below will
364 // update the state as they proceed. 330 // update the state as they proceed.
365 DCHECK(state_ == PREPARE_GC); 331 DCHECK(state_ == PREPARE_GC);
366 332
367 MarkLiveObjects(); 333 MarkLiveObjects();
368 334
369 DCHECK(heap_->incremental_marking()->IsStopped()); 335 DCHECK(heap_->incremental_marking()->IsStopped());
370 336
371 ClearNonLiveReferences(); 337 ClearNonLiveReferences();
(...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after
701 if (p->IsFlagSet(Page::POPULAR_PAGE)) { 667 if (p->IsFlagSet(Page::POPULAR_PAGE)) {
702 // This page had slots buffer overflow on previous GC, skip it. 668 // This page had slots buffer overflow on previous GC, skip it.
703 p->ClearFlag(Page::POPULAR_PAGE); 669 p->ClearFlag(Page::POPULAR_PAGE);
704 continue; 670 continue;
705 } 671 }
706 // Invariant: Evacuation candidates are just created when marking is 672 // Invariant: Evacuation candidates are just created when marking is
707 // started. This means that sweeping has finished. Furthermore, at the end 673 // started. This means that sweeping has finished. Furthermore, at the end
708 // of a GC all evacuation candidates are cleared and their slot buffers are 674 // of a GC all evacuation candidates are cleared and their slot buffers are
709 // released. 675 // released.
710 CHECK(!p->IsEvacuationCandidate()); 676 CHECK(!p->IsEvacuationCandidate());
711 CHECK(p->slots_buffer() == nullptr); 677 CHECK_NULL(p->old_to_old_slots());
678 CHECK_NULL(p->typed_old_to_old_slots());
712 CHECK(p->SweepingDone()); 679 CHECK(p->SweepingDone());
713 DCHECK(p->area_size() == area_size); 680 DCHECK(p->area_size() == area_size);
714 pages.push_back(std::make_pair(p->LiveBytesFromFreeList(), p)); 681 pages.push_back(std::make_pair(p->LiveBytesFromFreeList(), p));
715 } 682 }
716 683
717 int candidate_count = 0; 684 int candidate_count = 0;
718 int total_live_bytes = 0; 685 int total_live_bytes = 0;
719 686
720 const bool reduce_memory = heap()->ShouldReduceMemory(); 687 const bool reduce_memory = heap()->ShouldReduceMemory();
721 if (FLAG_manual_evacuation_candidates_selection) { 688 if (FLAG_manual_evacuation_candidates_selection) {
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
807 "compaction-selection: space=%s reduce_memory=%d pages=%d " 774 "compaction-selection: space=%s reduce_memory=%d pages=%d "
808 "total_live_bytes=%d\n", 775 "total_live_bytes=%d\n",
809 AllocationSpaceName(space->identity()), reduce_memory, 776 AllocationSpaceName(space->identity()), reduce_memory,
810 candidate_count, total_live_bytes / KB); 777 candidate_count, total_live_bytes / KB);
811 } 778 }
812 } 779 }
813 780
814 781
815 void MarkCompactCollector::AbortCompaction() { 782 void MarkCompactCollector::AbortCompaction() {
816 if (compacting_) { 783 if (compacting_) {
784 RememberedSet<OLD_TO_OLD>::ClearAll(heap());
817 for (Page* p : evacuation_candidates_) { 785 for (Page* p : evacuation_candidates_) {
818 slots_buffer_allocator_->DeallocateChain(p->slots_buffer_address());
819 p->ClearEvacuationCandidate(); 786 p->ClearEvacuationCandidate();
820 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); 787 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
821 } 788 }
822 compacting_ = false; 789 compacting_ = false;
823 evacuation_candidates_.Rewind(0); 790 evacuation_candidates_.Rewind(0);
824 } 791 }
825 DCHECK_EQ(0, evacuation_candidates_.length()); 792 DCHECK_EQ(0, evacuation_candidates_.length());
826 } 793 }
827 794
828 795
(...skipping 694 matching lines...) Expand 10 before | Expand all | Expand 10 after
1523 public: 1490 public:
1524 virtual ~HeapObjectVisitor() {} 1491 virtual ~HeapObjectVisitor() {}
1525 virtual bool Visit(HeapObject* object) = 0; 1492 virtual bool Visit(HeapObject* object) = 0;
1526 }; 1493 };
1527 1494
1528 1495
1529 class MarkCompactCollector::EvacuateVisitorBase 1496 class MarkCompactCollector::EvacuateVisitorBase
1530 : public MarkCompactCollector::HeapObjectVisitor { 1497 : public MarkCompactCollector::HeapObjectVisitor {
1531 public: 1498 public:
1532 EvacuateVisitorBase(Heap* heap, CompactionSpaceCollection* compaction_spaces, 1499 EvacuateVisitorBase(Heap* heap, CompactionSpaceCollection* compaction_spaces,
1533 SlotsBuffer** evacuation_slots_buffer, 1500 LocalSlotsBuffer* old_to_old_slots,
1534 LocalStoreBuffer* local_store_buffer) 1501 LocalSlotsBuffer* old_to_new_slots)
1535 : heap_(heap), 1502 : heap_(heap),
1536 evacuation_slots_buffer_(evacuation_slots_buffer),
1537 compaction_spaces_(compaction_spaces), 1503 compaction_spaces_(compaction_spaces),
1538 local_store_buffer_(local_store_buffer) {} 1504 old_to_old_slots_(old_to_old_slots),
1505 old_to_new_slots_(old_to_new_slots) {}
1539 1506
1540 bool TryEvacuateObject(PagedSpace* target_space, HeapObject* object, 1507 bool TryEvacuateObject(PagedSpace* target_space, HeapObject* object,
1541 HeapObject** target_object) { 1508 HeapObject** target_object) {
1542 int size = object->Size(); 1509 int size = object->Size();
1543 AllocationAlignment alignment = object->RequiredAlignment(); 1510 AllocationAlignment alignment = object->RequiredAlignment();
1544 AllocationResult allocation = target_space->AllocateRaw(size, alignment); 1511 AllocationResult allocation = target_space->AllocateRaw(size, alignment);
1545 if (allocation.To(target_object)) { 1512 if (allocation.To(target_object)) {
1546 heap_->mark_compact_collector()->MigrateObject( 1513 heap_->mark_compact_collector()->MigrateObject(
1547 *target_object, object, size, target_space->identity(), 1514 *target_object, object, size, target_space->identity(),
1548 evacuation_slots_buffer_, local_store_buffer_); 1515 old_to_old_slots_, old_to_new_slots_);
1549 return true; 1516 return true;
1550 } 1517 }
1551 return false; 1518 return false;
1552 } 1519 }
1553 1520
1554 protected: 1521 protected:
1555 Heap* heap_; 1522 Heap* heap_;
1556 SlotsBuffer** evacuation_slots_buffer_;
1557 CompactionSpaceCollection* compaction_spaces_; 1523 CompactionSpaceCollection* compaction_spaces_;
1558 LocalStoreBuffer* local_store_buffer_; 1524 LocalSlotsBuffer* old_to_old_slots_;
1525 LocalSlotsBuffer* old_to_new_slots_;
1559 }; 1526 };
1560 1527
1561 1528
1562 class MarkCompactCollector::EvacuateNewSpaceVisitor final 1529 class MarkCompactCollector::EvacuateNewSpaceVisitor final
1563 : public MarkCompactCollector::EvacuateVisitorBase { 1530 : public MarkCompactCollector::EvacuateVisitorBase {
1564 public: 1531 public:
1565 static const intptr_t kLabSize = 4 * KB; 1532 static const intptr_t kLabSize = 4 * KB;
1566 static const intptr_t kMaxLabObjectSize = 256; 1533 static const intptr_t kMaxLabObjectSize = 256;
1567 1534
1568 explicit EvacuateNewSpaceVisitor(Heap* heap, 1535 explicit EvacuateNewSpaceVisitor(Heap* heap,
1569 CompactionSpaceCollection* compaction_spaces, 1536 CompactionSpaceCollection* compaction_spaces,
1570 SlotsBuffer** evacuation_slots_buffer, 1537 LocalSlotsBuffer* old_to_old_slots,
1571 LocalStoreBuffer* local_store_buffer, 1538 LocalSlotsBuffer* old_to_new_slots,
1572 HashMap* local_pretenuring_feedback) 1539 HashMap* local_pretenuring_feedback)
1573 : EvacuateVisitorBase(heap, compaction_spaces, evacuation_slots_buffer, 1540 : EvacuateVisitorBase(heap, compaction_spaces, old_to_old_slots,
1574 local_store_buffer), 1541 old_to_new_slots),
1575 buffer_(LocalAllocationBuffer::InvalidBuffer()), 1542 buffer_(LocalAllocationBuffer::InvalidBuffer()),
1576 space_to_allocate_(NEW_SPACE), 1543 space_to_allocate_(NEW_SPACE),
1577 promoted_size_(0), 1544 promoted_size_(0),
1578 semispace_copied_size_(0), 1545 semispace_copied_size_(0),
1579 local_pretenuring_feedback_(local_pretenuring_feedback) {} 1546 local_pretenuring_feedback_(local_pretenuring_feedback) {}
1580 1547
1581 bool Visit(HeapObject* object) override { 1548 bool Visit(HeapObject* object) override {
1582 heap_->UpdateAllocationSite<Heap::kCached>(object, 1549 heap_->UpdateAllocationSite<Heap::kCached>(object,
1583 local_pretenuring_feedback_); 1550 local_pretenuring_feedback_);
1584 int size = object->Size(); 1551 int size = object->Size();
1585 HeapObject* target_object = nullptr; 1552 HeapObject* target_object = nullptr;
1586 if (heap_->ShouldBePromoted(object->address(), size) && 1553 if (heap_->ShouldBePromoted(object->address(), size) &&
1587 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, 1554 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object,
1588 &target_object)) { 1555 &target_object)) {
1589 // If we end up needing more special cases, we should factor this out. 1556 // If we end up needing more special cases, we should factor this out.
1590 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { 1557 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
1591 heap_->array_buffer_tracker()->Promote( 1558 heap_->array_buffer_tracker()->Promote(
1592 JSArrayBuffer::cast(target_object)); 1559 JSArrayBuffer::cast(target_object));
1593 } 1560 }
1594 promoted_size_ += size; 1561 promoted_size_ += size;
1595 return true; 1562 return true;
1596 } 1563 }
1597 HeapObject* target = nullptr; 1564 HeapObject* target = nullptr;
1598 AllocationSpace space = AllocateTargetObject(object, &target); 1565 AllocationSpace space = AllocateTargetObject(object, &target);
1599 heap_->mark_compact_collector()->MigrateObject( 1566 heap_->mark_compact_collector()->MigrateObject(
1600 HeapObject::cast(target), object, size, space, 1567 HeapObject::cast(target), object, size, space,
1601 (space == NEW_SPACE) ? nullptr : evacuation_slots_buffer_, 1568 (space == NEW_SPACE) ? nullptr : old_to_old_slots_,
1602 (space == NEW_SPACE) ? nullptr : local_store_buffer_); 1569 (space == NEW_SPACE) ? nullptr : old_to_new_slots_);
1603 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { 1570 if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
1604 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); 1571 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
1605 } 1572 }
1606 semispace_copied_size_ += size; 1573 semispace_copied_size_ += size;
1607 return true; 1574 return true;
1608 } 1575 }
1609 1576
1610 intptr_t promoted_size() { return promoted_size_; } 1577 intptr_t promoted_size() { return promoted_size_; }
1611 intptr_t semispace_copied_size() { return semispace_copied_size_; } 1578 intptr_t semispace_copied_size() { return semispace_copied_size_; }
1612 1579
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
1712 intptr_t semispace_copied_size_; 1679 intptr_t semispace_copied_size_;
1713 HashMap* local_pretenuring_feedback_; 1680 HashMap* local_pretenuring_feedback_;
1714 }; 1681 };
1715 1682
1716 1683
1717 class MarkCompactCollector::EvacuateOldSpaceVisitor final 1684 class MarkCompactCollector::EvacuateOldSpaceVisitor final
1718 : public MarkCompactCollector::EvacuateVisitorBase { 1685 : public MarkCompactCollector::EvacuateVisitorBase {
1719 public: 1686 public:
1720 EvacuateOldSpaceVisitor(Heap* heap, 1687 EvacuateOldSpaceVisitor(Heap* heap,
1721 CompactionSpaceCollection* compaction_spaces, 1688 CompactionSpaceCollection* compaction_spaces,
1722 SlotsBuffer** evacuation_slots_buffer, 1689 LocalSlotsBuffer* old_to_old_slots,
1723 LocalStoreBuffer* local_store_buffer) 1690 LocalSlotsBuffer* old_to_new_slots)
1724 : EvacuateVisitorBase(heap, compaction_spaces, evacuation_slots_buffer, 1691 : EvacuateVisitorBase(heap, compaction_spaces, old_to_old_slots,
1725 local_store_buffer) {} 1692 old_to_new_slots) {}
1726 1693
1727 bool Visit(HeapObject* object) override { 1694 bool Visit(HeapObject* object) override {
1728 CompactionSpace* target_space = compaction_spaces_->Get( 1695 CompactionSpace* target_space = compaction_spaces_->Get(
1729 Page::FromAddress(object->address())->owner()->identity()); 1696 Page::FromAddress(object->address())->owner()->identity());
1730 HeapObject* target_object = nullptr; 1697 HeapObject* target_object = nullptr;
1731 if (TryEvacuateObject(target_space, object, &target_object)) { 1698 if (TryEvacuateObject(target_space, object, &target_object)) {
1732 DCHECK(object->map_word().IsForwardingAddress()); 1699 DCHECK(object->map_word().IsForwardingAddress());
1733 return true; 1700 return true;
1734 } 1701 }
1735 return false; 1702 return false;
(...skipping 436 matching lines...) Expand 10 before | Expand all | Expand 10 after
2172 { 2139 {
2173 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS); 2140 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS);
2174 ClearSimpleMapTransitions(non_live_map_list); 2141 ClearSimpleMapTransitions(non_live_map_list);
2175 ClearFullMapTransitions(); 2142 ClearFullMapTransitions();
2176 } 2143 }
2177 2144
2178 MarkDependentCodeForDeoptimization(dependent_code_list); 2145 MarkDependentCodeForDeoptimization(dependent_code_list);
2179 2146
2180 ClearWeakCollections(); 2147 ClearWeakCollections();
2181 2148
2182 ClearInvalidStoreAndSlotsBufferEntries(); 2149 ClearInvalidRememberedSetSlots();
2183 } 2150 }
2184 2151
2185 2152
2186 void MarkCompactCollector::MarkDependentCodeForDeoptimization( 2153 void MarkCompactCollector::MarkDependentCodeForDeoptimization(
2187 DependentCode* list_head) { 2154 DependentCode* list_head) {
2188 GCTracer::Scope gc_scope(heap()->tracer(), 2155 GCTracer::Scope gc_scope(heap()->tracer(),
2189 GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); 2156 GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE);
2190 Isolate* isolate = this->isolate(); 2157 Isolate* isolate = this->isolate();
2191 DependentCode* current = list_head; 2158 DependentCode* current = list_head;
2192 while (current->length() > 0) { 2159 while (current->length() > 0) {
(...skipping 342 matching lines...) Expand 10 before | Expand all | Expand 10 after
2535 Object* obj = heap()->encountered_transition_arrays(); 2502 Object* obj = heap()->encountered_transition_arrays();
2536 while (obj != Smi::FromInt(0)) { 2503 while (obj != Smi::FromInt(0)) {
2537 TransitionArray* array = TransitionArray::cast(obj); 2504 TransitionArray* array = TransitionArray::cast(obj);
2538 obj = array->next_link(); 2505 obj = array->next_link();
2539 array->set_next_link(undefined, SKIP_WRITE_BARRIER); 2506 array->set_next_link(undefined, SKIP_WRITE_BARRIER);
2540 } 2507 }
2541 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); 2508 heap()->set_encountered_transition_arrays(Smi::FromInt(0));
2542 } 2509 }
2543 2510
2544 void MarkCompactCollector::RecordMigratedSlot( 2511 void MarkCompactCollector::RecordMigratedSlot(
2545 Object* value, Address slot, SlotsBuffer** evacuation_slots_buffer, 2512 Object* value, Address slot, LocalSlotsBuffer* old_to_old_slots,
2546 LocalStoreBuffer* local_store_buffer) { 2513 LocalSlotsBuffer* old_to_new_slots) {
2547 // When parallel compaction is in progress, store and slots buffer entries 2514 // When parallel compaction is in progress, store and slots buffer entries
2548 // require synchronization. 2515 // require synchronization.
2549 if (heap_->InNewSpace(value)) { 2516 if (heap_->InNewSpace(value)) {
2550 if (compaction_in_progress_) { 2517 if (compaction_in_progress_) {
2551 local_store_buffer->Record(slot); 2518 old_to_new_slots->Record(slot);
2552 } else { 2519 } else {
2553 Page* page = Page::FromAddress(slot); 2520 Page* page = Page::FromAddress(slot);
2554 RememberedSet<OLD_TO_NEW>::Insert(page, slot); 2521 RememberedSet<OLD_TO_NEW>::Insert(page, slot);
2555 } 2522 }
2556 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { 2523 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) {
2557 SlotsBuffer::AddTo(slots_buffer_allocator_, evacuation_slots_buffer, 2524 old_to_old_slots->Record(slot);
2558 reinterpret_cast<Object**>(slot),
2559 SlotsBuffer::IGNORE_OVERFLOW);
2560 } 2525 }
2561 } 2526 }
2562 2527
2563 2528 static inline SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
2564 void MarkCompactCollector::RecordMigratedCodeEntrySlot( 2529 if (RelocInfo::IsCodeTarget(rmode)) {
2565 Address code_entry, Address code_entry_slot, 2530 return CODE_TARGET_SLOT;
2566 SlotsBuffer** evacuation_slots_buffer) { 2531 } else if (RelocInfo::IsCell(rmode)) {
2567 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { 2532 return CELL_TARGET_SLOT;
2568 SlotsBuffer::AddTo(slots_buffer_allocator_, evacuation_slots_buffer, 2533 } else if (RelocInfo::IsEmbeddedObject(rmode)) {
2569 SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot, 2534 return EMBEDDED_OBJECT_SLOT;
2570 SlotsBuffer::IGNORE_OVERFLOW); 2535 } else if (RelocInfo::IsDebugBreakSlot(rmode)) {
2536 return DEBUG_TARGET_SLOT;
2571 } 2537 }
2538 UNREACHABLE();
2539 return NUMBER_OF_SLOT_TYPES;
2572 } 2540 }
2573 2541
2574 2542 void MarkCompactCollector::RecordRelocSlot(Code* host, RelocInfo* rinfo,
2575 void MarkCompactCollector::RecordMigratedCodeObjectSlot( 2543 Object* target) {
2576 Address code_object, SlotsBuffer** evacuation_slots_buffer) {
2577 SlotsBuffer::AddTo(slots_buffer_allocator_, evacuation_slots_buffer,
2578 SlotsBuffer::RELOCATED_CODE_OBJECT, code_object,
2579 SlotsBuffer::IGNORE_OVERFLOW);
2580 }
2581
2582
2583 static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
2584 if (RelocInfo::IsCodeTarget(rmode)) {
2585 return SlotsBuffer::CODE_TARGET_SLOT;
2586 } else if (RelocInfo::IsCell(rmode)) {
2587 return SlotsBuffer::CELL_TARGET_SLOT;
2588 } else if (RelocInfo::IsEmbeddedObject(rmode)) {
2589 return SlotsBuffer::EMBEDDED_OBJECT_SLOT;
2590 } else if (RelocInfo::IsDebugBreakSlot(rmode)) {
2591 return SlotsBuffer::DEBUG_TARGET_SLOT;
2592 }
2593 UNREACHABLE();
2594 return SlotsBuffer::NUMBER_OF_SLOT_TYPES;
2595 }
2596
2597
2598 static inline SlotsBuffer::SlotType DecodeSlotType(
2599 SlotsBuffer::ObjectSlot slot) {
2600 return static_cast<SlotsBuffer::SlotType>(reinterpret_cast<intptr_t>(slot));
2601 }
2602
2603
2604 void MarkCompactCollector::RecordRelocSlot(RelocInfo* rinfo, Object* target) {
2605 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); 2544 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
2545 Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host));
2606 RelocInfo::Mode rmode = rinfo->rmode(); 2546 RelocInfo::Mode rmode = rinfo->rmode();
2607 if (target_page->IsEvacuationCandidate() && 2547 if (target_page->IsEvacuationCandidate() &&
2608 (rinfo->host() == NULL || 2548 (rinfo->host() == NULL ||
2609 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) { 2549 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) {
2610 Address addr = rinfo->pc(); 2550 Address addr = rinfo->pc();
2611 SlotsBuffer::SlotType slot_type = SlotTypeForRMode(rmode); 2551 SlotType slot_type = SlotTypeForRMode(rmode);
2612 if (rinfo->IsInConstantPool()) { 2552 if (rinfo->IsInConstantPool()) {
2613 addr = rinfo->constant_pool_entry_address(); 2553 addr = rinfo->constant_pool_entry_address();
2614 if (RelocInfo::IsCodeTarget(rmode)) { 2554 if (RelocInfo::IsCodeTarget(rmode)) {
2615 slot_type = SlotsBuffer::CODE_ENTRY_SLOT; 2555 slot_type = CODE_ENTRY_SLOT;
2616 } else { 2556 } else {
2617 DCHECK(RelocInfo::IsEmbeddedObject(rmode)); 2557 DCHECK(RelocInfo::IsEmbeddedObject(rmode));
2618 slot_type = SlotsBuffer::OBJECT_SLOT; 2558 slot_type = OBJECT_SLOT;
2619 } 2559 }
2620 } 2560 }
2621 bool success = SlotsBuffer::AddTo( 2561 RememberedSet<OLD_TO_OLD>::InsertTyped(source_page, slot_type, addr);
2622 slots_buffer_allocator_, target_page->slots_buffer_address(), slot_type,
2623 addr, SlotsBuffer::FAIL_ON_OVERFLOW);
2624 if (!success) {
2625 EvictPopularEvacuationCandidate(target_page);
2626 }
2627 } 2562 }
2628 } 2563 }
2629 2564
2630 2565
2631 class RecordMigratedSlotVisitor final : public ObjectVisitor { 2566 class RecordMigratedSlotVisitor final : public ObjectVisitor {
2632 public: 2567 public:
2633 RecordMigratedSlotVisitor(MarkCompactCollector* collector, 2568 RecordMigratedSlotVisitor(MarkCompactCollector* collector,
2634 SlotsBuffer** evacuation_slots_buffer, 2569 LocalSlotsBuffer* old_to_old_slots,
2635 LocalStoreBuffer* local_store_buffer) 2570 LocalSlotsBuffer* old_to_new_slots)
2636 : collector_(collector), 2571 : collector_(collector),
2637 evacuation_slots_buffer_(evacuation_slots_buffer), 2572 old_to_old_slots_(old_to_old_slots),
2638 local_store_buffer_(local_store_buffer) {} 2573 old_to_new_slots_(old_to_new_slots) {}
2639 2574
2640 V8_INLINE void VisitPointer(Object** p) override { 2575 V8_INLINE void VisitPointer(Object** p) override {
2641 collector_->RecordMigratedSlot(*p, reinterpret_cast<Address>(p), 2576 collector_->RecordMigratedSlot(*p, reinterpret_cast<Address>(p),
2642 evacuation_slots_buffer_, 2577 old_to_old_slots_, old_to_new_slots_);
2643 local_store_buffer_);
2644 } 2578 }
2645 2579
2646 V8_INLINE void VisitPointers(Object** start, Object** end) override { 2580 V8_INLINE void VisitPointers(Object** start, Object** end) override {
2647 while (start < end) { 2581 while (start < end) {
2648 collector_->RecordMigratedSlot(*start, reinterpret_cast<Address>(start), 2582 collector_->RecordMigratedSlot(*start, reinterpret_cast<Address>(start),
2649 evacuation_slots_buffer_, 2583 old_to_old_slots_, old_to_new_slots_);
2650 local_store_buffer_);
2651 ++start; 2584 ++start;
2652 } 2585 }
2653 } 2586 }
2654 2587
2655 V8_INLINE void VisitCodeEntry(Address code_entry_slot) override { 2588 V8_INLINE void VisitCodeEntry(Address code_entry_slot) override {
2656 if (collector_->compacting_) { 2589 if (collector_->compacting_) {
2657 Address code_entry = Memory::Address_at(code_entry_slot); 2590 Address code_entry = Memory::Address_at(code_entry_slot);
2658 collector_->RecordMigratedCodeEntrySlot(code_entry, code_entry_slot, 2591 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) {
2659 evacuation_slots_buffer_); 2592 old_to_old_slots_->Record(CODE_ENTRY_SLOT, code_entry_slot);
2593 }
2660 } 2594 }
2661 } 2595 }
2662 2596
2663 private: 2597 private:
2664 MarkCompactCollector* collector_; 2598 MarkCompactCollector* collector_;
2665 SlotsBuffer** evacuation_slots_buffer_; 2599 LocalSlotsBuffer* old_to_old_slots_;
2666 LocalStoreBuffer* local_store_buffer_; 2600 LocalSlotsBuffer* old_to_new_slots_;
2667 }; 2601 };
2668 2602
2669 2603
2670 // We scavenge new space simultaneously with sweeping. This is done in two 2604 // We scavenge new space simultaneously with sweeping. This is done in two
2671 // passes. 2605 // passes.
2672 // 2606 //
2673 // The first pass migrates all alive objects from one semispace to another or 2607 // The first pass migrates all alive objects from one semispace to another or
2674 // promotes them to old space. Forwarding address is written directly into 2608 // promotes them to old space. Forwarding address is written directly into
2675 // first word of object without any encoding. If object is dead we write 2609 // first word of object without any encoding. If object is dead we write
2676 // NULL as a forwarding address. 2610 // NULL as a forwarding address.
2677 // 2611 //
2678 // The second pass updates pointers to new space in all spaces. It is possible 2612 // The second pass updates pointers to new space in all spaces. It is possible
2679 // to encounter pointers to dead new space objects during traversal of pointers 2613 // to encounter pointers to dead new space objects during traversal of pointers
2680 // to new space. We should clear them to avoid encountering them during next 2614 // to new space. We should clear them to avoid encountering them during next
2681 // pointer iteration. This is an issue if the store buffer overflows and we 2615 // pointer iteration. This is an issue if the store buffer overflows and we
2682 // have to scan the entire old space, including dead objects, looking for 2616 // have to scan the entire old space, including dead objects, looking for
2683 // pointers to new space. 2617 // pointers to new space.
2684 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src, 2618 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src,
2685 int size, AllocationSpace dest, 2619 int size, AllocationSpace dest,
2686 SlotsBuffer** evacuation_slots_buffer, 2620 LocalSlotsBuffer* old_to_old_slots,
2687 LocalStoreBuffer* local_store_buffer) { 2621 LocalSlotsBuffer* old_to_new_slots) {
2688 Address dst_addr = dst->address(); 2622 Address dst_addr = dst->address();
2689 Address src_addr = src->address(); 2623 Address src_addr = src->address();
2690 DCHECK(heap()->AllowedToBeMigrated(src, dest)); 2624 DCHECK(heap()->AllowedToBeMigrated(src, dest));
2691 DCHECK(dest != LO_SPACE); 2625 DCHECK(dest != LO_SPACE);
2692 if (dest == OLD_SPACE) { 2626 if (dest == OLD_SPACE) {
2693 DCHECK_OBJECT_SIZE(size); 2627 DCHECK_OBJECT_SIZE(size);
2694 DCHECK(evacuation_slots_buffer != nullptr);
2695 DCHECK(IsAligned(size, kPointerSize)); 2628 DCHECK(IsAligned(size, kPointerSize));
2696 2629
2697 heap()->MoveBlock(dst->address(), src->address(), size); 2630 heap()->MoveBlock(dst->address(), src->address(), size);
2698 RecordMigratedSlotVisitor visitor(this, evacuation_slots_buffer, 2631 RecordMigratedSlotVisitor visitor(this, old_to_old_slots, old_to_new_slots);
2699 local_store_buffer);
2700 dst->IterateBody(&visitor); 2632 dst->IterateBody(&visitor);
2701 } else if (dest == CODE_SPACE) { 2633 } else if (dest == CODE_SPACE) {
2702 DCHECK_CODEOBJECT_SIZE(size, heap()->code_space()); 2634 DCHECK_CODEOBJECT_SIZE(size, heap()->code_space());
2703 DCHECK(evacuation_slots_buffer != nullptr);
2704 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); 2635 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr));
2705 heap()->MoveBlock(dst_addr, src_addr, size); 2636 heap()->MoveBlock(dst_addr, src_addr, size);
2706 RecordMigratedCodeObjectSlot(dst_addr, evacuation_slots_buffer); 2637 old_to_old_slots->Record(RELOCATED_CODE_OBJECT, dst_addr);
2707 Code::cast(dst)->Relocate(dst_addr - src_addr); 2638 Code::cast(dst)->Relocate(dst_addr - src_addr);
2708 } else { 2639 } else {
2709 DCHECK_OBJECT_SIZE(size); 2640 DCHECK_OBJECT_SIZE(size);
2710 DCHECK(evacuation_slots_buffer == nullptr); 2641 DCHECK(old_to_old_slots == nullptr);
2711 DCHECK(dest == NEW_SPACE); 2642 DCHECK(dest == NEW_SPACE);
2712 heap()->MoveBlock(dst_addr, src_addr, size); 2643 heap()->MoveBlock(dst_addr, src_addr, size);
2713 } 2644 }
2714 heap()->OnMoveEvent(dst, src, size); 2645 heap()->OnMoveEvent(dst, src, size);
2715 Memory::Address_at(src_addr) = dst_addr; 2646 Memory::Address_at(src_addr) = dst_addr;
2716 } 2647 }
2717 2648
2718 2649 static inline void UpdateTypedSlot(Isolate* isolate, ObjectVisitor* v,
2719 static inline void UpdateSlot(Isolate* isolate, ObjectVisitor* v, 2650 SlotType slot_type, Address addr) {
2720 SlotsBuffer::SlotType slot_type, Address addr) {
2721 switch (slot_type) { 2651 switch (slot_type) {
2722 case SlotsBuffer::CODE_TARGET_SLOT: { 2652 case CODE_TARGET_SLOT: {
2723 RelocInfo rinfo(isolate, addr, RelocInfo::CODE_TARGET, 0, NULL); 2653 RelocInfo rinfo(isolate, addr, RelocInfo::CODE_TARGET, 0, NULL);
2724 rinfo.Visit(isolate, v); 2654 rinfo.Visit(isolate, v);
2725 break; 2655 break;
2726 } 2656 }
2727 case SlotsBuffer::CELL_TARGET_SLOT: { 2657 case CELL_TARGET_SLOT: {
2728 RelocInfo rinfo(isolate, addr, RelocInfo::CELL, 0, NULL); 2658 RelocInfo rinfo(isolate, addr, RelocInfo::CELL, 0, NULL);
2729 rinfo.Visit(isolate, v); 2659 rinfo.Visit(isolate, v);
2730 break; 2660 break;
2731 } 2661 }
2732 case SlotsBuffer::CODE_ENTRY_SLOT: { 2662 case CODE_ENTRY_SLOT: {
2733 v->VisitCodeEntry(addr); 2663 v->VisitCodeEntry(addr);
2734 break; 2664 break;
2735 } 2665 }
2736 case SlotsBuffer::RELOCATED_CODE_OBJECT: { 2666 case RELOCATED_CODE_OBJECT: {
2737 HeapObject* obj = HeapObject::FromAddress(addr); 2667 HeapObject* obj = HeapObject::FromAddress(addr);
2738 Code::BodyDescriptor::IterateBody(obj, v); 2668 Code::BodyDescriptor::IterateBody(obj, v);
2739 break; 2669 break;
2740 } 2670 }
2741 case SlotsBuffer::DEBUG_TARGET_SLOT: { 2671 case DEBUG_TARGET_SLOT: {
2742 RelocInfo rinfo(isolate, addr, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION, 0, 2672 RelocInfo rinfo(isolate, addr, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION, 0,
2743 NULL); 2673 NULL);
2744 if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(isolate, v); 2674 if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(isolate, v);
2745 break; 2675 break;
2746 } 2676 }
2747 case SlotsBuffer::EMBEDDED_OBJECT_SLOT: { 2677 case EMBEDDED_OBJECT_SLOT: {
2748 RelocInfo rinfo(isolate, addr, RelocInfo::EMBEDDED_OBJECT, 0, NULL); 2678 RelocInfo rinfo(isolate, addr, RelocInfo::EMBEDDED_OBJECT, 0, NULL);
2749 rinfo.Visit(isolate, v); 2679 rinfo.Visit(isolate, v);
2750 break; 2680 break;
2751 } 2681 }
2752 case SlotsBuffer::OBJECT_SLOT: { 2682 case OBJECT_SLOT: {
2753 v->VisitPointer(reinterpret_cast<Object**>(addr)); 2683 v->VisitPointer(reinterpret_cast<Object**>(addr));
2754 break; 2684 break;
2755 } 2685 }
2756 default: 2686 default:
2757 UNREACHABLE(); 2687 UNREACHABLE();
2758 break; 2688 break;
2759 } 2689 }
2760 } 2690 }
2761 2691
2762 2692
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
2847 } 2777 }
2848 } 2778 }
2849 2779
2850 private: 2780 private:
2851 inline void UpdatePointer(Object** p) { UpdateSlot(heap_, p); } 2781 inline void UpdatePointer(Object** p) { UpdateSlot(heap_, p); }
2852 2782
2853 Heap* heap_; 2783 Heap* heap_;
2854 }; 2784 };
2855 2785
2856 2786
2857 void MarkCompactCollector::UpdateSlots(SlotsBuffer* buffer) {
2858 PointersUpdatingVisitor v(heap_);
2859 size_t buffer_size = buffer->Size();
2860
2861 for (size_t slot_idx = 0; slot_idx < buffer_size; ++slot_idx) {
2862 SlotsBuffer::ObjectSlot slot = buffer->Get(slot_idx);
2863 if (!SlotsBuffer::IsTypedSlot(slot)) {
2864 PointersUpdatingVisitor::UpdateSlot(heap_, slot);
2865 } else {
2866 ++slot_idx;
2867 DCHECK(slot_idx < buffer_size);
2868 UpdateSlot(heap_->isolate(), &v, DecodeSlotType(slot),
2869 reinterpret_cast<Address>(buffer->Get(slot_idx)));
2870 }
2871 }
2872 }
2873
2874
2875 void MarkCompactCollector::UpdateSlotsRecordedIn(SlotsBuffer* buffer) {
2876 while (buffer != NULL) {
2877 UpdateSlots(buffer);
2878 buffer = buffer->next();
2879 }
2880 }
2881
2882
2883 static void UpdatePointer(HeapObject** address, HeapObject* object) { 2787 static void UpdatePointer(HeapObject** address, HeapObject* object) {
2884 MapWord map_word = object->map_word(); 2788 MapWord map_word = object->map_word();
2885 // Since we only filter invalid slots in old space, the store buffer can 2789 // Since we only filter invalid slots in old space, the store buffer can
2886 // still contain stale pointers in large object and in map spaces. Ignore 2790 // still contain stale pointers in large object and in map spaces. Ignore
2887 // these pointers here. 2791 // these pointers here.
2888 DCHECK(map_word.IsForwardingAddress() || 2792 DCHECK(map_word.IsForwardingAddress() ||
2889 !object->GetHeap()->old_space()->Contains( 2793 !object->GetHeap()->old_space()->Contains(
2890 reinterpret_cast<Address>(address))); 2794 reinterpret_cast<Address>(address)));
2891 if (map_word.IsForwardingAddress()) { 2795 if (map_word.IsForwardingAddress()) {
2892 // Update the corresponding slot. 2796 // Update the corresponding slot.
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
2994 // If the slot is within the last found object in the cell, the slot is 2898 // If the slot is within the last found object in the cell, the slot is
2995 // in a live object. 2899 // in a live object.
2996 // Slots pointing to the first word of an object are invalid and removed. 2900 // Slots pointing to the first word of an object are invalid and removed.
2997 // This can happen when we move the object header while left trimming. 2901 // This can happen when we move the object header while left trimming.
2998 *out_object = object; 2902 *out_object = object;
2999 return true; 2903 return true;
3000 } 2904 }
3001 return false; 2905 return false;
3002 } 2906 }
3003 2907
3004 2908 HeapObject* MarkCompactCollector::FindBlackObjectBySlotSlow(Address slot) {
3005 bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) { 2909 Page* p = Page::FromAddress(slot);
3006 // This function does not support large objects right now. 2910 // This function does not support large objects right now.
3007 Space* owner = p->owner(); 2911 Space* owner = p->owner();
3008 if (owner == heap_->lo_space() || owner == NULL) { 2912 if (owner == heap_->lo_space() || owner == nullptr) {
3009 Object* large_object = heap_->lo_space()->FindObject(slot); 2913 Object* large_object = heap_->lo_space()->FindObject(slot);
3010 // This object has to exist, otherwise we would not have recorded a slot 2914 // This object has to exist, otherwise we would not have recorded a slot
3011 // for it. 2915 // for it.
3012 CHECK(large_object->IsHeapObject()); 2916 CHECK(large_object->IsHeapObject());
3013 HeapObject* large_heap_object = HeapObject::cast(large_object); 2917 HeapObject* large_heap_object = HeapObject::cast(large_object);
2918
3014 if (IsMarked(large_heap_object)) { 2919 if (IsMarked(large_heap_object)) {
3015 return true; 2920 return large_heap_object;
3016 } 2921 }
3017 return false; 2922 return nullptr;
3018 } 2923 }
3019 2924
3020 LiveObjectIterator<kBlackObjects> it(p); 2925 LiveObjectIterator<kBlackObjects> it(p);
3021 HeapObject* object = NULL; 2926 HeapObject* object = nullptr;
3022 while ((object = it.Next()) != NULL) { 2927 while ((object = it.Next()) != nullptr) {
3023 int size = object->Size(); 2928 int size = object->Size();
3024 2929 if (object->address() > slot) return nullptr;
3025 if (object->address() > slot) return false;
3026 if (object->address() <= slot && slot < (object->address() + size)) { 2930 if (object->address() <= slot && slot < (object->address() + size)) {
3027 return true; 2931 return object;
3028 } 2932 }
3029 } 2933 }
3030 return false; 2934 return nullptr;
3031 } 2935 }
3032 2936
3033 2937
3034 bool MarkCompactCollector::IsSlotInLiveObject(Address slot) { 2938 bool MarkCompactCollector::IsSlotInLiveObject(Address slot) {
3035 HeapObject* object = NULL; 2939 HeapObject* object = NULL;
3036 // The target object is black but we don't know if the source slot is black. 2940 // The target object is black but we don't know if the source slot is black.
3037 // The source object could have died and the slot could be part of a free 2941 // The source object could have died and the slot could be part of a free
3038 // space. Find out based on mark bits if the slot is part of a live object. 2942 // space. Find out based on mark bits if the slot is part of a live object.
3039 if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) { 2943 if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) {
3040 return false; 2944 return false;
3041 } 2945 }
3042 2946
3043 DCHECK(object != NULL); 2947 DCHECK(object != NULL);
3044 int offset = static_cast<int>(slot - object->address()); 2948 int offset = static_cast<int>(slot - object->address());
3045 return object->IsValidSlot(offset); 2949 return object->IsValidSlot(offset);
3046 } 2950 }
3047 2951
3048 2952
3049 void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot,
3050 HeapObject* object) {
3051 // The target object has to be black.
3052 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
3053
3054 // The target object is black but we don't know if the source slot is black.
3055 // The source object could have died and the slot could be part of a free
3056 // space. Use the mark bit iterator to find out about liveness of the slot.
3057 CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot));
3058 }
3059
3060
3061 void MarkCompactCollector::EvacuateNewSpacePrologue() { 2953 void MarkCompactCollector::EvacuateNewSpacePrologue() {
3062 NewSpace* new_space = heap()->new_space(); 2954 NewSpace* new_space = heap()->new_space();
3063 NewSpacePageIterator it(new_space->bottom(), new_space->top()); 2955 NewSpacePageIterator it(new_space->bottom(), new_space->top());
3064 // Append the list of new space pages to be processed. 2956 // Append the list of new space pages to be processed.
3065 while (it.has_next()) { 2957 while (it.has_next()) {
3066 newspace_evacuation_candidates_.Add(it.next()); 2958 newspace_evacuation_candidates_.Add(it.next());
3067 } 2959 }
3068 new_space->Flip(); 2960 new_space->Flip();
3069 new_space->ResetAllocationInfo(); 2961 new_space->ResetAllocationInfo();
3070 } 2962 }
3071 2963
3072 void MarkCompactCollector::EvacuateNewSpaceEpilogue() { 2964 void MarkCompactCollector::EvacuateNewSpaceEpilogue() {
3073 newspace_evacuation_candidates_.Rewind(0); 2965 newspace_evacuation_candidates_.Rewind(0);
3074 } 2966 }
3075 2967
3076 2968
3077 void MarkCompactCollector::AddEvacuationSlotsBufferSynchronized(
3078 SlotsBuffer* evacuation_slots_buffer) {
3079 base::LockGuard<base::Mutex> lock_guard(&evacuation_slots_buffers_mutex_);
3080 evacuation_slots_buffers_.Add(evacuation_slots_buffer);
3081 }
3082
3083 class MarkCompactCollector::Evacuator : public Malloced { 2969 class MarkCompactCollector::Evacuator : public Malloced {
3084 public: 2970 public:
3085 Evacuator(MarkCompactCollector* collector, 2971 Evacuator(MarkCompactCollector* collector,
3086 const List<Page*>& evacuation_candidates, 2972 const List<Page*>& evacuation_candidates,
3087 const List<NewSpacePage*>& newspace_evacuation_candidates) 2973 const List<NewSpacePage*>& newspace_evacuation_candidates)
3088 : collector_(collector), 2974 : collector_(collector),
3089 evacuation_candidates_(evacuation_candidates), 2975 evacuation_candidates_(evacuation_candidates),
3090 newspace_evacuation_candidates_(newspace_evacuation_candidates), 2976 newspace_evacuation_candidates_(newspace_evacuation_candidates),
3091 compaction_spaces_(collector->heap()), 2977 compaction_spaces_(collector->heap()),
3092 local_slots_buffer_(nullptr),
3093 local_store_buffer_(collector->heap()),
3094 local_pretenuring_feedback_(HashMap::PointersMatch, 2978 local_pretenuring_feedback_(HashMap::PointersMatch,
3095 kInitialLocalPretenuringFeedbackCapacity), 2979 kInitialLocalPretenuringFeedbackCapacity),
3096 new_space_visitor_(collector->heap(), &compaction_spaces_, 2980 new_space_visitor_(collector->heap(), &compaction_spaces_,
3097 &local_slots_buffer_, &local_store_buffer_, 2981 &old_to_old_slots_, &old_to_new_slots_,
3098 &local_pretenuring_feedback_), 2982 &local_pretenuring_feedback_),
3099 old_space_visitor_(collector->heap(), &compaction_spaces_, 2983 old_space_visitor_(collector->heap(), &compaction_spaces_,
3100 &local_slots_buffer_, &local_store_buffer_), 2984 &old_to_old_slots_, &old_to_new_slots_),
3101 duration_(0.0), 2985 duration_(0.0),
3102 bytes_compacted_(0), 2986 bytes_compacted_(0),
3103 task_id_(0) {} 2987 task_id_(0) {}
3104 2988
3105 // Evacuate the configured set of pages in parallel. 2989 // Evacuate the configured set of pages in parallel.
3106 inline void EvacuatePages(); 2990 inline void EvacuatePages();
3107 2991
3108 // Merge back locally cached info sequentially. Note that this method needs 2992 // Merge back locally cached info sequentially. Note that this method needs
3109 // to be called from the main thread. 2993 // to be called from the main thread.
3110 inline void Finalize(); 2994 inline void Finalize();
(...skipping 16 matching lines...) Expand all
3127 inline bool EvacuateSinglePage(MemoryChunk* p, HeapObjectVisitor* visitor); 3011 inline bool EvacuateSinglePage(MemoryChunk* p, HeapObjectVisitor* visitor);
3128 3012
3129 MarkCompactCollector* collector_; 3013 MarkCompactCollector* collector_;
3130 3014
3131 // Pages to process. 3015 // Pages to process.
3132 const List<Page*>& evacuation_candidates_; 3016 const List<Page*>& evacuation_candidates_;
3133 const List<NewSpacePage*>& newspace_evacuation_candidates_; 3017 const List<NewSpacePage*>& newspace_evacuation_candidates_;
3134 3018
3135 // Locally cached collector data. 3019 // Locally cached collector data.
3136 CompactionSpaceCollection compaction_spaces_; 3020 CompactionSpaceCollection compaction_spaces_;
3137 SlotsBuffer* local_slots_buffer_; 3021 LocalSlotsBuffer old_to_old_slots_;
3138 LocalStoreBuffer local_store_buffer_; 3022 LocalSlotsBuffer old_to_new_slots_;
3139 HashMap local_pretenuring_feedback_; 3023 HashMap local_pretenuring_feedback_;
3140 3024
3141 // Vistors for the corresponding spaces. 3025 // Vistors for the corresponding spaces.
3142 EvacuateNewSpaceVisitor new_space_visitor_; 3026 EvacuateNewSpaceVisitor new_space_visitor_;
3143 EvacuateOldSpaceVisitor old_space_visitor_; 3027 EvacuateOldSpaceVisitor old_space_visitor_;
3144 3028
3145 // Book keeping info. 3029 // Book keeping info.
3146 double duration_; 3030 double duration_;
3147 intptr_t bytes_compacted_; 3031 intptr_t bytes_compacted_;
3148 3032
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
3206 heap()->code_space()->MergeCompactionSpace( 3090 heap()->code_space()->MergeCompactionSpace(
3207 compaction_spaces_.Get(CODE_SPACE)); 3091 compaction_spaces_.Get(CODE_SPACE));
3208 heap()->tracer()->AddCompactionEvent(duration_, bytes_compacted_); 3092 heap()->tracer()->AddCompactionEvent(duration_, bytes_compacted_);
3209 heap()->IncrementPromotedObjectsSize(new_space_visitor_.promoted_size()); 3093 heap()->IncrementPromotedObjectsSize(new_space_visitor_.promoted_size());
3210 heap()->IncrementSemiSpaceCopiedObjectSize( 3094 heap()->IncrementSemiSpaceCopiedObjectSize(
3211 new_space_visitor_.semispace_copied_size()); 3095 new_space_visitor_.semispace_copied_size());
3212 heap()->IncrementYoungSurvivorsCounter( 3096 heap()->IncrementYoungSurvivorsCounter(
3213 new_space_visitor_.promoted_size() + 3097 new_space_visitor_.promoted_size() +
3214 new_space_visitor_.semispace_copied_size()); 3098 new_space_visitor_.semispace_copied_size());
3215 heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_); 3099 heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_);
3216 local_store_buffer_.Process(heap()->store_buffer()); 3100 // Move locally recorded slots to the global remembered sets.
3217 collector_->AddEvacuationSlotsBufferSynchronized(local_slots_buffer_); 3101 old_to_new_slots_.Iterate(
3102 [](Address slot) {
3103 Page* page = Page::FromAddress(slot);
3104 RememberedSet<OLD_TO_NEW>::Insert(page, slot);
3105 },
3106 [](SlotType type, Address slot) { UNREACHABLE(); });
3107 old_to_old_slots_.Iterate(
3108 [](Address slot) {
3109 Page* page = Page::FromAddress(slot);
3110 RememberedSet<OLD_TO_OLD>::Insert(page, slot);
3111 },
3112 [](SlotType type, Address slot) {
3113 Page* page = Page::FromAddress(slot);
3114 RememberedSet<OLD_TO_OLD>::InsertTyped(page, type, slot);
3115 });
3218 } 3116 }
3219 3117
3220 class MarkCompactCollector::CompactionTask : public CancelableTask { 3118 class MarkCompactCollector::CompactionTask : public CancelableTask {
3221 public: 3119 public:
3222 explicit CompactionTask(Heap* heap, Evacuator* evacuator) 3120 explicit CompactionTask(Heap* heap, Evacuator* evacuator)
3223 : CancelableTask(heap->isolate()), heap_(heap), evacuator_(evacuator) { 3121 : CancelableTask(heap->isolate()), heap_(heap), evacuator_(evacuator) {
3224 evacuator->set_task_id(id()); 3122 evacuator->set_task_id(id());
3225 } 3123 }
3226 3124
3227 virtual ~CompactionTask() {} 3125 virtual ~CompactionTask() {}
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after
3514 !ShouldSkipEvacuationSlotRecording(code)) { 3412 !ShouldSkipEvacuationSlotRecording(code)) {
3515 DCHECK(compacting_); 3413 DCHECK(compacting_);
3516 3414
3517 // If the object is white than no slots were recorded on it yet. 3415 // If the object is white than no slots were recorded on it yet.
3518 MarkBit mark_bit = Marking::MarkBitFrom(code); 3416 MarkBit mark_bit = Marking::MarkBitFrom(code);
3519 if (Marking::IsWhite(mark_bit)) return; 3417 if (Marking::IsWhite(mark_bit)) return;
3520 3418
3521 // Ignore all slots that might have been recorded in the body of the 3419 // Ignore all slots that might have been recorded in the body of the
3522 // deoptimized code object. Assumption: no slots will be recorded for 3420 // deoptimized code object. Assumption: no slots will be recorded for
3523 // this object after invalidating it. 3421 // this object after invalidating it.
3524 RemoveObjectSlots(code->instruction_start(), 3422 Page* page = Page::FromAddress(code->address());
3525 code->address() + code->Size()); 3423 Address start = code->instruction_start();
3424 Address end = code->address() + code->Size();
3425 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end);
3526 } 3426 }
3527 } 3427 }
3528 3428
3529 3429
3530 // Return true if the given code is deoptimized or will be deoptimized. 3430 // Return true if the given code is deoptimized or will be deoptimized.
3531 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { 3431 bool MarkCompactCollector::WillBeDeoptimized(Code* code) {
3532 return code->is_optimized_code() && code->marked_for_deoptimization(); 3432 return code->is_optimized_code() && code->marked_for_deoptimization();
3533 } 3433 }
3534 3434
3535 3435
3536 void MarkCompactCollector::RemoveObjectSlots(Address start_slot,
3537 Address end_slot) {
3538 // Remove entries by replacing them with an old-space slot containing a smi
3539 // that is located in an unmovable page.
3540 for (Page* p : evacuation_candidates_) {
3541 DCHECK(p->IsEvacuationCandidate() ||
3542 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3543 if (p->IsEvacuationCandidate()) {
3544 SlotsBuffer::RemoveObjectSlots(heap_, p->slots_buffer(), start_slot,
3545 end_slot);
3546 }
3547 }
3548 }
3549
3550
3551 #ifdef VERIFY_HEAP 3436 #ifdef VERIFY_HEAP
3552 static void VerifyAllBlackObjects(MemoryChunk* page) { 3437 static void VerifyAllBlackObjects(MemoryChunk* page) {
3553 LiveObjectIterator<kAllLiveObjects> it(page); 3438 LiveObjectIterator<kAllLiveObjects> it(page);
3554 HeapObject* object = NULL; 3439 HeapObject* object = NULL;
3555 while ((object = it.Next()) != NULL) { 3440 while ((object = it.Next()) != NULL) {
3556 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); 3441 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
3557 } 3442 }
3558 } 3443 }
3559 #endif // VERIFY_HEAP 3444 #endif // VERIFY_HEAP
3560 3445
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
3692 if (FLAG_verify_heap && !sweeping_in_progress_) { 3577 if (FLAG_verify_heap && !sweeping_in_progress_) {
3693 VerifyEvacuation(heap()); 3578 VerifyEvacuation(heap());
3694 } 3579 }
3695 #endif 3580 #endif
3696 } 3581 }
3697 3582
3698 3583
3699 void MarkCompactCollector::UpdatePointersAfterEvacuation() { 3584 void MarkCompactCollector::UpdatePointersAfterEvacuation() {
3700 GCTracer::Scope gc_scope(heap()->tracer(), 3585 GCTracer::Scope gc_scope(heap()->tracer(),
3701 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS); 3586 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS);
3702 {
3703 GCTracer::Scope gc_scope(
3704 heap()->tracer(),
3705 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED);
3706 UpdateSlotsRecordedIn(migration_slots_buffer_);
3707 if (FLAG_trace_fragmentation_verbose) {
3708 PrintF(" migration slots buffer: %d\n",
3709 SlotsBuffer::SizeOfChain(migration_slots_buffer_));
3710 }
3711 slots_buffer_allocator_->DeallocateChain(&migration_slots_buffer_);
3712 DCHECK(migration_slots_buffer_ == NULL);
3713 3587
3714 // TODO(hpayer): Process the slots buffers in parallel. This has to be done
3715 // after evacuation of all pages finishes.
3716 int buffers = evacuation_slots_buffers_.length();
3717 for (int i = 0; i < buffers; i++) {
3718 SlotsBuffer* buffer = evacuation_slots_buffers_[i];
3719 UpdateSlotsRecordedIn(buffer);
3720 slots_buffer_allocator_->DeallocateChain(&buffer);
3721 }
3722 evacuation_slots_buffers_.Rewind(0);
3723 }
3724
3725 // Second pass: find pointers to new space and update them.
3726 PointersUpdatingVisitor updating_visitor(heap()); 3588 PointersUpdatingVisitor updating_visitor(heap());
3727 3589
3728 { 3590 {
3729 GCTracer::Scope gc_scope( 3591 GCTracer::Scope gc_scope(
3730 heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW); 3592 heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW);
3731 // Update pointers in to space. 3593 // Update pointers in to space.
3732 SemiSpaceIterator to_it(heap()->new_space()); 3594 SemiSpaceIterator to_it(heap()->new_space());
3733 for (HeapObject* object = to_it.Next(); object != NULL; 3595 for (HeapObject* object = to_it.Next(); object != NULL;
3734 object = to_it.Next()) { 3596 object = to_it.Next()) {
3735 Map* map = object->map(); 3597 Map* map = object->map();
3736 object->IterateBody(map->instance_type(), object->SizeFromMap(map), 3598 object->IterateBody(map->instance_type(), object->SizeFromMap(map),
3737 &updating_visitor); 3599 &updating_visitor);
3738 } 3600 }
3739 // Update roots. 3601 // Update roots.
3740 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); 3602 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
3741 3603
3742 RememberedSet<OLD_TO_NEW>::IterateWithWrapper(heap_, UpdatePointer); 3604 RememberedSet<OLD_TO_NEW>::IterateWithWrapper(heap_, UpdatePointer);
3743 } 3605 }
3744 3606
3745 { 3607 {
3608 Heap* heap = this->heap();
3609 GCTracer::Scope gc_scope(
3610 heap->tracer(),
3611 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED);
3612
3613 RememberedSet<OLD_TO_OLD>::Iterate(heap, [heap](Address slot) {
3614 PointersUpdatingVisitor::UpdateSlot(heap,
3615 reinterpret_cast<Object**>(slot));
3616 return REMOVE_SLOT;
3617 });
3618 Isolate* isolate = heap->isolate();
3619 PointersUpdatingVisitor* visitor = &updating_visitor;
3620 RememberedSet<OLD_TO_OLD>::IterateTyped(
3621 heap, [isolate, visitor](SlotType type, Address slot) {
3622 UpdateTypedSlot(isolate, visitor, type, slot);
3623 return REMOVE_SLOT;
3624 });
3625 }
3626
3627 {
3746 GCTracer::Scope gc_scope( 3628 GCTracer::Scope gc_scope(
3747 heap()->tracer(), 3629 heap()->tracer(),
3748 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED); 3630 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED);
3749 for (Page* p : evacuation_candidates_) { 3631 for (Page* p : evacuation_candidates_) {
3750 DCHECK(p->IsEvacuationCandidate() || 3632 DCHECK(p->IsEvacuationCandidate() ||
3751 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); 3633 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3752 3634
3753 if (p->IsEvacuationCandidate()) { 3635 if (p->IsEvacuationCandidate()) {
3754 UpdateSlotsRecordedIn(p->slots_buffer());
3755 if (FLAG_trace_fragmentation_verbose) {
3756 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p),
3757 SlotsBuffer::SizeOfChain(p->slots_buffer()));
3758 }
3759 slots_buffer_allocator_->DeallocateChain(p->slots_buffer_address());
3760
3761 // Important: skip list should be cleared only after roots were updated 3636 // Important: skip list should be cleared only after roots were updated
3762 // because root iteration traverses the stack and might have to find 3637 // because root iteration traverses the stack and might have to find
3763 // code objects from non-updated pc pointing into evacuation candidate. 3638 // code objects from non-updated pc pointing into evacuation candidate.
3764 SkipList* list = p->skip_list(); 3639 SkipList* list = p->skip_list();
3765 if (list != NULL) list->Clear(); 3640 if (list != NULL) list->Clear();
3766 3641
3767 // First pass on aborted pages, fixing up all live objects. 3642 // First pass on aborted pages, fixing up all live objects.
3768 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) { 3643 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) {
3769 p->ClearEvacuationCandidate(); 3644 p->ClearEvacuationCandidate();
3770 VisitLiveObjectsBody(p, &updating_visitor); 3645 VisitLiveObjectsBody(p, &updating_visitor);
(...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after
4012 3887
4013 3888
4014 Isolate* MarkCompactCollector::isolate() const { return heap_->isolate(); } 3889 Isolate* MarkCompactCollector::isolate() const { return heap_->isolate(); }
4015 3890
4016 3891
4017 void MarkCompactCollector::Initialize() { 3892 void MarkCompactCollector::Initialize() {
4018 MarkCompactMarkingVisitor::Initialize(); 3893 MarkCompactMarkingVisitor::Initialize();
4019 IncrementalMarking::Initialize(); 3894 IncrementalMarking::Initialize();
4020 } 3895 }
4021 3896
4022 3897 void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* host, Address slot,
4023 void MarkCompactCollector::EvictPopularEvacuationCandidate(Page* page) {
4024 if (FLAG_trace_fragmentation) {
4025 PrintF("Page %p is too popular. Disabling evacuation.\n",
4026 reinterpret_cast<void*>(page));
4027 }
4028
4029 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kSlotsBufferOverflow);
4030
4031 // TODO(gc) If all evacuation candidates are too popular we
4032 // should stop slots recording entirely.
4033 page->ClearEvacuationCandidate();
4034
4035 DCHECK(!page->IsFlagSet(Page::POPULAR_PAGE));
4036 page->SetFlag(Page::POPULAR_PAGE);
4037
4038 // We were not collecting slots on this page that point
4039 // to other evacuation candidates thus we have to
4040 // rescan the page after evacuation to discover and update all
4041 // pointers to evacuated objects.
4042 page->SetFlag(Page::RESCAN_ON_EVACUATION);
4043 }
4044
4045
4046 void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* object, Address slot,
4047 Code* target) { 3898 Code* target) {
4048 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); 3899 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
3900 Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host));
4049 if (target_page->IsEvacuationCandidate() && 3901 if (target_page->IsEvacuationCandidate() &&
4050 !ShouldSkipEvacuationSlotRecording(object)) { 3902 !ShouldSkipEvacuationSlotRecording(host)) {
4051 if (!SlotsBuffer::AddTo(slots_buffer_allocator_, 3903 RememberedSet<OLD_TO_OLD>::InsertTyped(source_page, CODE_ENTRY_SLOT, slot);
4052 target_page->slots_buffer_address(),
4053 SlotsBuffer::CODE_ENTRY_SLOT, slot,
4054 SlotsBuffer::FAIL_ON_OVERFLOW)) {
4055 EvictPopularEvacuationCandidate(target_page);
4056 }
4057 } 3904 }
4058 } 3905 }
4059 3906
4060 3907
4061 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { 3908 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
4062 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); 3909 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT);
4063 if (is_compacting()) { 3910 if (is_compacting()) {
4064 Code* host = 3911 Code* host =
4065 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( 3912 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer(
4066 pc); 3913 pc);
4067 MarkBit mark_bit = Marking::MarkBitFrom(host); 3914 MarkBit mark_bit = Marking::MarkBitFrom(host);
4068 if (Marking::IsBlack(mark_bit)) { 3915 if (Marking::IsBlack(mark_bit)) {
4069 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); 3916 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
4070 RecordRelocSlot(&rinfo, target); 3917 RecordRelocSlot(host, &rinfo, target);
4071 } 3918 }
4072 } 3919 }
4073 } 3920 }
4074 3921
4075 } // namespace internal 3922 } // namespace internal
4076 } // namespace v8 3923 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698