Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1725073003: Revert of Replace slots buffer with remembered set. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/mark-compact-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
11 #include "src/compilation-cache.h" 11 #include "src/compilation-cache.h"
12 #include "src/deoptimizer.h" 12 #include "src/deoptimizer.h"
13 #include "src/execution.h" 13 #include "src/execution.h"
14 #include "src/frames-inl.h" 14 #include "src/frames-inl.h"
15 #include "src/gdb-jit.h" 15 #include "src/gdb-jit.h"
16 #include "src/global-handles.h" 16 #include "src/global-handles.h"
17 #include "src/heap/array-buffer-tracker.h" 17 #include "src/heap/array-buffer-tracker.h"
18 #include "src/heap/gc-tracer.h" 18 #include "src/heap/gc-tracer.h"
19 #include "src/heap/incremental-marking.h" 19 #include "src/heap/incremental-marking.h"
20 #include "src/heap/mark-compact-inl.h" 20 #include "src/heap/mark-compact-inl.h"
21 #include "src/heap/object-stats.h" 21 #include "src/heap/object-stats.h"
22 #include "src/heap/objects-visiting-inl.h" 22 #include "src/heap/objects-visiting-inl.h"
23 #include "src/heap/objects-visiting.h" 23 #include "src/heap/objects-visiting.h"
24 #include "src/heap/slots-buffer.h"
24 #include "src/heap/spaces-inl.h" 25 #include "src/heap/spaces-inl.h"
25 #include "src/ic/ic.h" 26 #include "src/ic/ic.h"
26 #include "src/ic/stub-cache.h" 27 #include "src/ic/stub-cache.h"
27 #include "src/profiler/cpu-profiler.h" 28 #include "src/profiler/cpu-profiler.h"
28 #include "src/utils-inl.h" 29 #include "src/utils-inl.h"
29 #include "src/v8.h" 30 #include "src/v8.h"
30 31
31 namespace v8 { 32 namespace v8 {
32 namespace internal { 33 namespace internal {
33 34
(...skipping 13 matching lines...) Expand all
47 // MarkCompactCollector 48 // MarkCompactCollector
48 49
49 MarkCompactCollector::MarkCompactCollector(Heap* heap) 50 MarkCompactCollector::MarkCompactCollector(Heap* heap)
50 : // NOLINT 51 : // NOLINT
51 #ifdef DEBUG 52 #ifdef DEBUG
52 state_(IDLE), 53 state_(IDLE),
53 #endif 54 #endif
54 marking_parity_(ODD_MARKING_PARITY), 55 marking_parity_(ODD_MARKING_PARITY),
55 was_marked_incrementally_(false), 56 was_marked_incrementally_(false),
56 evacuation_(false), 57 evacuation_(false),
58 slots_buffer_allocator_(nullptr),
59 migration_slots_buffer_(nullptr),
57 heap_(heap), 60 heap_(heap),
58 marking_deque_memory_(NULL), 61 marking_deque_memory_(NULL),
59 marking_deque_memory_committed_(0), 62 marking_deque_memory_committed_(0),
60 code_flusher_(nullptr), 63 code_flusher_(nullptr),
61 have_code_to_deoptimize_(false), 64 have_code_to_deoptimize_(false),
62 compacting_(false), 65 compacting_(false),
63 sweeping_in_progress_(false), 66 sweeping_in_progress_(false),
64 compaction_in_progress_(false), 67 compaction_in_progress_(false),
65 pending_sweeper_tasks_semaphore_(0), 68 pending_sweeper_tasks_semaphore_(0),
66 pending_compaction_tasks_semaphore_(0) { 69 pending_compaction_tasks_semaphore_(0) {
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
239 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0); 242 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
240 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0); 243 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
241 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0); 244 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
242 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0); 245 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
243 246
244 free_list_old_space_.Reset(new FreeList(heap_->old_space())); 247 free_list_old_space_.Reset(new FreeList(heap_->old_space()));
245 free_list_code_space_.Reset(new FreeList(heap_->code_space())); 248 free_list_code_space_.Reset(new FreeList(heap_->code_space()));
246 free_list_map_space_.Reset(new FreeList(heap_->map_space())); 249 free_list_map_space_.Reset(new FreeList(heap_->map_space()));
247 EnsureMarkingDequeIsReserved(); 250 EnsureMarkingDequeIsReserved();
248 EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize); 251 EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize);
252 slots_buffer_allocator_ = new SlotsBufferAllocator();
249 253
250 if (FLAG_flush_code) { 254 if (FLAG_flush_code) {
251 code_flusher_ = new CodeFlusher(isolate()); 255 code_flusher_ = new CodeFlusher(isolate());
252 if (FLAG_trace_code_flushing) { 256 if (FLAG_trace_code_flushing) {
253 PrintF("[code-flushing is now on]\n"); 257 PrintF("[code-flushing is now on]\n");
254 } 258 }
255 } 259 }
256 } 260 }
257 261
258 262
259 void MarkCompactCollector::TearDown() { 263 void MarkCompactCollector::TearDown() {
260 AbortCompaction(); 264 AbortCompaction();
261 delete marking_deque_memory_; 265 delete marking_deque_memory_;
266 delete slots_buffer_allocator_;
262 delete code_flusher_; 267 delete code_flusher_;
263 } 268 }
264 269
265 270
266 void MarkCompactCollector::AddEvacuationCandidate(Page* p) { 271 void MarkCompactCollector::AddEvacuationCandidate(Page* p) {
267 DCHECK(!p->NeverEvacuate()); 272 DCHECK(!p->NeverEvacuate());
268 p->MarkEvacuationCandidate(); 273 p->MarkEvacuationCandidate();
269 evacuation_candidates_.Add(p); 274 evacuation_candidates_.Add(p);
270 } 275 }
271 276
(...skipping 26 matching lines...) Expand all
298 303
299 heap()->old_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); 304 heap()->old_space()->EvictEvacuationCandidatesFromLinearAllocationArea();
300 heap()->code_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); 305 heap()->code_space()->EvictEvacuationCandidatesFromLinearAllocationArea();
301 306
302 compacting_ = evacuation_candidates_.length() > 0; 307 compacting_ = evacuation_candidates_.length() > 0;
303 } 308 }
304 309
305 return compacting_; 310 return compacting_;
306 } 311 }
307 312
308 void MarkCompactCollector::ClearInvalidRememberedSetSlots() { 313
314 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() {
309 { 315 {
310 GCTracer::Scope gc_scope(heap()->tracer(), 316 GCTracer::Scope gc_scope(heap()->tracer(),
311 GCTracer::Scope::MC_CLEAR_STORE_BUFFER); 317 GCTracer::Scope::MC_CLEAR_STORE_BUFFER);
312 RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(heap()); 318 RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(heap());
313 } 319 }
314 // There is not need to filter the old to old set because
315 // it is completely cleared after the mark-compact GC.
316 // The slots that become invalid due to runtime transitions are
317 // cleared eagerly immediately after the transition.
318 320
321 {
322 GCTracer::Scope gc_scope(heap()->tracer(),
323 GCTracer::Scope::MC_CLEAR_SLOTS_BUFFER);
324 for (Page* p : evacuation_candidates_) {
325 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer());
326 }
327 }
319 #ifdef VERIFY_HEAP 328 #ifdef VERIFY_HEAP
320 if (FLAG_verify_heap) { 329 if (FLAG_verify_heap) {
321 RememberedSet<OLD_TO_NEW>::VerifyValidSlots(heap()); 330 VerifyValidStoreAndSlotsBufferEntries();
322 RememberedSet<OLD_TO_OLD>::VerifyValidSlots(heap());
323 } 331 }
324 #endif 332 #endif
325 } 333 }
326 334
327 335
336 #ifdef VERIFY_HEAP
337 static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) {
338 PageIterator it(space);
339 while (it.has_next()) {
340 Page* p = it.next();
341 SlotsBuffer::VerifySlots(heap, p->slots_buffer());
342 }
343 }
344
345
346 void MarkCompactCollector::VerifyValidStoreAndSlotsBufferEntries() {
347 RememberedSet<OLD_TO_NEW>::VerifyValidSlots(heap());
348
349 VerifyValidSlotsBufferEntries(heap(), heap()->old_space());
350 VerifyValidSlotsBufferEntries(heap(), heap()->code_space());
351 VerifyValidSlotsBufferEntries(heap(), heap()->map_space());
352
353 LargeObjectIterator it(heap()->lo_space());
354 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
355 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
356 SlotsBuffer::VerifySlots(heap(), chunk->slots_buffer());
357 }
358 }
359 #endif
360
361
328 void MarkCompactCollector::CollectGarbage() { 362 void MarkCompactCollector::CollectGarbage() {
329 // Make sure that Prepare() has been called. The individual steps below will 363 // Make sure that Prepare() has been called. The individual steps below will
330 // update the state as they proceed. 364 // update the state as they proceed.
331 DCHECK(state_ == PREPARE_GC); 365 DCHECK(state_ == PREPARE_GC);
332 366
333 MarkLiveObjects(); 367 MarkLiveObjects();
334 368
335 DCHECK(heap_->incremental_marking()->IsStopped()); 369 DCHECK(heap_->incremental_marking()->IsStopped());
336 370
337 ClearNonLiveReferences(); 371 ClearNonLiveReferences();
(...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after
667 if (p->IsFlagSet(Page::POPULAR_PAGE)) { 701 if (p->IsFlagSet(Page::POPULAR_PAGE)) {
668 // This page had slots buffer overflow on previous GC, skip it. 702 // This page had slots buffer overflow on previous GC, skip it.
669 p->ClearFlag(Page::POPULAR_PAGE); 703 p->ClearFlag(Page::POPULAR_PAGE);
670 continue; 704 continue;
671 } 705 }
672 // Invariant: Evacuation candidates are just created when marking is 706 // Invariant: Evacuation candidates are just created when marking is
673 // started. This means that sweeping has finished. Furthermore, at the end 707 // started. This means that sweeping has finished. Furthermore, at the end
674 // of a GC all evacuation candidates are cleared and their slot buffers are 708 // of a GC all evacuation candidates are cleared and their slot buffers are
675 // released. 709 // released.
676 CHECK(!p->IsEvacuationCandidate()); 710 CHECK(!p->IsEvacuationCandidate());
677 CHECK_NULL(p->old_to_old_slots()); 711 CHECK(p->slots_buffer() == nullptr);
678 CHECK_NULL(p->typed_old_to_old_slots());
679 CHECK(p->SweepingDone()); 712 CHECK(p->SweepingDone());
680 DCHECK(p->area_size() == area_size); 713 DCHECK(p->area_size() == area_size);
681 pages.push_back(std::make_pair(p->LiveBytesFromFreeList(), p)); 714 pages.push_back(std::make_pair(p->LiveBytesFromFreeList(), p));
682 } 715 }
683 716
684 int candidate_count = 0; 717 int candidate_count = 0;
685 int total_live_bytes = 0; 718 int total_live_bytes = 0;
686 719
687 const bool reduce_memory = heap()->ShouldReduceMemory(); 720 const bool reduce_memory = heap()->ShouldReduceMemory();
688 if (FLAG_manual_evacuation_candidates_selection) { 721 if (FLAG_manual_evacuation_candidates_selection) {
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
774 "compaction-selection: space=%s reduce_memory=%d pages=%d " 807 "compaction-selection: space=%s reduce_memory=%d pages=%d "
775 "total_live_bytes=%d\n", 808 "total_live_bytes=%d\n",
776 AllocationSpaceName(space->identity()), reduce_memory, 809 AllocationSpaceName(space->identity()), reduce_memory,
777 candidate_count, total_live_bytes / KB); 810 candidate_count, total_live_bytes / KB);
778 } 811 }
779 } 812 }
780 813
781 814
782 void MarkCompactCollector::AbortCompaction() { 815 void MarkCompactCollector::AbortCompaction() {
783 if (compacting_) { 816 if (compacting_) {
784 RememberedSet<OLD_TO_OLD>::ClearAll(heap());
785 for (Page* p : evacuation_candidates_) { 817 for (Page* p : evacuation_candidates_) {
818 slots_buffer_allocator_->DeallocateChain(p->slots_buffer_address());
786 p->ClearEvacuationCandidate(); 819 p->ClearEvacuationCandidate();
787 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); 820 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
788 } 821 }
789 compacting_ = false; 822 compacting_ = false;
790 evacuation_candidates_.Rewind(0); 823 evacuation_candidates_.Rewind(0);
791 } 824 }
792 DCHECK_EQ(0, evacuation_candidates_.length()); 825 DCHECK_EQ(0, evacuation_candidates_.length());
793 } 826 }
794 827
795 828
(...skipping 694 matching lines...) Expand 10 before | Expand all | Expand 10 after
1490 public: 1523 public:
1491 virtual ~HeapObjectVisitor() {} 1524 virtual ~HeapObjectVisitor() {}
1492 virtual bool Visit(HeapObject* object) = 0; 1525 virtual bool Visit(HeapObject* object) = 0;
1493 }; 1526 };
1494 1527
1495 1528
1496 class MarkCompactCollector::EvacuateVisitorBase 1529 class MarkCompactCollector::EvacuateVisitorBase
1497 : public MarkCompactCollector::HeapObjectVisitor { 1530 : public MarkCompactCollector::HeapObjectVisitor {
1498 public: 1531 public:
1499 EvacuateVisitorBase(Heap* heap, CompactionSpaceCollection* compaction_spaces, 1532 EvacuateVisitorBase(Heap* heap, CompactionSpaceCollection* compaction_spaces,
1500 LocalSlotsBuffer* old_to_old_slots, 1533 SlotsBuffer** evacuation_slots_buffer,
1501 LocalSlotsBuffer* old_to_new_slots) 1534 LocalStoreBuffer* local_store_buffer)
1502 : heap_(heap), 1535 : heap_(heap),
1536 evacuation_slots_buffer_(evacuation_slots_buffer),
1503 compaction_spaces_(compaction_spaces), 1537 compaction_spaces_(compaction_spaces),
1504 old_to_old_slots_(old_to_old_slots), 1538 local_store_buffer_(local_store_buffer) {}
1505 old_to_new_slots_(old_to_new_slots) {}
1506 1539
1507 bool TryEvacuateObject(PagedSpace* target_space, HeapObject* object, 1540 bool TryEvacuateObject(PagedSpace* target_space, HeapObject* object,
1508 HeapObject** target_object) { 1541 HeapObject** target_object) {
1509 int size = object->Size(); 1542 int size = object->Size();
1510 AllocationAlignment alignment = object->RequiredAlignment(); 1543 AllocationAlignment alignment = object->RequiredAlignment();
1511 AllocationResult allocation = target_space->AllocateRaw(size, alignment); 1544 AllocationResult allocation = target_space->AllocateRaw(size, alignment);
1512 if (allocation.To(target_object)) { 1545 if (allocation.To(target_object)) {
1513 heap_->mark_compact_collector()->MigrateObject( 1546 heap_->mark_compact_collector()->MigrateObject(
1514 *target_object, object, size, target_space->identity(), 1547 *target_object, object, size, target_space->identity(),
1515 old_to_old_slots_, old_to_new_slots_); 1548 evacuation_slots_buffer_, local_store_buffer_);
1516 return true; 1549 return true;
1517 } 1550 }
1518 return false; 1551 return false;
1519 } 1552 }
1520 1553
1521 protected: 1554 protected:
1522 Heap* heap_; 1555 Heap* heap_;
1556 SlotsBuffer** evacuation_slots_buffer_;
1523 CompactionSpaceCollection* compaction_spaces_; 1557 CompactionSpaceCollection* compaction_spaces_;
1524 LocalSlotsBuffer* old_to_old_slots_; 1558 LocalStoreBuffer* local_store_buffer_;
1525 LocalSlotsBuffer* old_to_new_slots_;
1526 }; 1559 };
1527 1560
1528 1561
1529 class MarkCompactCollector::EvacuateNewSpaceVisitor final 1562 class MarkCompactCollector::EvacuateNewSpaceVisitor final
1530 : public MarkCompactCollector::EvacuateVisitorBase { 1563 : public MarkCompactCollector::EvacuateVisitorBase {
1531 public: 1564 public:
1532 static const intptr_t kLabSize = 4 * KB; 1565 static const intptr_t kLabSize = 4 * KB;
1533 static const intptr_t kMaxLabObjectSize = 256; 1566 static const intptr_t kMaxLabObjectSize = 256;
1534 1567
1535 explicit EvacuateNewSpaceVisitor(Heap* heap, 1568 explicit EvacuateNewSpaceVisitor(Heap* heap,
1536 CompactionSpaceCollection* compaction_spaces, 1569 CompactionSpaceCollection* compaction_spaces,
1537 LocalSlotsBuffer* old_to_old_slots, 1570 SlotsBuffer** evacuation_slots_buffer,
1538 LocalSlotsBuffer* old_to_new_slots, 1571 LocalStoreBuffer* local_store_buffer,
1539 HashMap* local_pretenuring_feedback) 1572 HashMap* local_pretenuring_feedback)
1540 : EvacuateVisitorBase(heap, compaction_spaces, old_to_old_slots, 1573 : EvacuateVisitorBase(heap, compaction_spaces, evacuation_slots_buffer,
1541 old_to_new_slots), 1574 local_store_buffer),
1542 buffer_(LocalAllocationBuffer::InvalidBuffer()), 1575 buffer_(LocalAllocationBuffer::InvalidBuffer()),
1543 space_to_allocate_(NEW_SPACE), 1576 space_to_allocate_(NEW_SPACE),
1544 promoted_size_(0), 1577 promoted_size_(0),
1545 semispace_copied_size_(0), 1578 semispace_copied_size_(0),
1546 local_pretenuring_feedback_(local_pretenuring_feedback) {} 1579 local_pretenuring_feedback_(local_pretenuring_feedback) {}
1547 1580
1548 bool Visit(HeapObject* object) override { 1581 bool Visit(HeapObject* object) override {
1549 heap_->UpdateAllocationSite<Heap::kCached>(object, 1582 heap_->UpdateAllocationSite<Heap::kCached>(object,
1550 local_pretenuring_feedback_); 1583 local_pretenuring_feedback_);
1551 int size = object->Size(); 1584 int size = object->Size();
1552 HeapObject* target_object = nullptr; 1585 HeapObject* target_object = nullptr;
1553 if (heap_->ShouldBePromoted(object->address(), size) && 1586 if (heap_->ShouldBePromoted(object->address(), size) &&
1554 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, 1587 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object,
1555 &target_object)) { 1588 &target_object)) {
1556 // If we end up needing more special cases, we should factor this out. 1589 // If we end up needing more special cases, we should factor this out.
1557 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { 1590 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
1558 heap_->array_buffer_tracker()->Promote( 1591 heap_->array_buffer_tracker()->Promote(
1559 JSArrayBuffer::cast(target_object)); 1592 JSArrayBuffer::cast(target_object));
1560 } 1593 }
1561 promoted_size_ += size; 1594 promoted_size_ += size;
1562 return true; 1595 return true;
1563 } 1596 }
1564 HeapObject* target = nullptr; 1597 HeapObject* target = nullptr;
1565 AllocationSpace space = AllocateTargetObject(object, &target); 1598 AllocationSpace space = AllocateTargetObject(object, &target);
1566 heap_->mark_compact_collector()->MigrateObject( 1599 heap_->mark_compact_collector()->MigrateObject(
1567 HeapObject::cast(target), object, size, space, 1600 HeapObject::cast(target), object, size, space,
1568 (space == NEW_SPACE) ? nullptr : old_to_old_slots_, 1601 (space == NEW_SPACE) ? nullptr : evacuation_slots_buffer_,
1569 (space == NEW_SPACE) ? nullptr : old_to_new_slots_); 1602 (space == NEW_SPACE) ? nullptr : local_store_buffer_);
1570 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { 1603 if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
1571 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); 1604 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
1572 } 1605 }
1573 semispace_copied_size_ += size; 1606 semispace_copied_size_ += size;
1574 return true; 1607 return true;
1575 } 1608 }
1576 1609
1577 intptr_t promoted_size() { return promoted_size_; } 1610 intptr_t promoted_size() { return promoted_size_; }
1578 intptr_t semispace_copied_size() { return semispace_copied_size_; } 1611 intptr_t semispace_copied_size() { return semispace_copied_size_; }
1579 1612
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
1679 intptr_t semispace_copied_size_; 1712 intptr_t semispace_copied_size_;
1680 HashMap* local_pretenuring_feedback_; 1713 HashMap* local_pretenuring_feedback_;
1681 }; 1714 };
1682 1715
1683 1716
1684 class MarkCompactCollector::EvacuateOldSpaceVisitor final 1717 class MarkCompactCollector::EvacuateOldSpaceVisitor final
1685 : public MarkCompactCollector::EvacuateVisitorBase { 1718 : public MarkCompactCollector::EvacuateVisitorBase {
1686 public: 1719 public:
1687 EvacuateOldSpaceVisitor(Heap* heap, 1720 EvacuateOldSpaceVisitor(Heap* heap,
1688 CompactionSpaceCollection* compaction_spaces, 1721 CompactionSpaceCollection* compaction_spaces,
1689 LocalSlotsBuffer* old_to_old_slots, 1722 SlotsBuffer** evacuation_slots_buffer,
1690 LocalSlotsBuffer* old_to_new_slots) 1723 LocalStoreBuffer* local_store_buffer)
1691 : EvacuateVisitorBase(heap, compaction_spaces, old_to_old_slots, 1724 : EvacuateVisitorBase(heap, compaction_spaces, evacuation_slots_buffer,
1692 old_to_new_slots) {} 1725 local_store_buffer) {}
1693 1726
1694 bool Visit(HeapObject* object) override { 1727 bool Visit(HeapObject* object) override {
1695 CompactionSpace* target_space = compaction_spaces_->Get( 1728 CompactionSpace* target_space = compaction_spaces_->Get(
1696 Page::FromAddress(object->address())->owner()->identity()); 1729 Page::FromAddress(object->address())->owner()->identity());
1697 HeapObject* target_object = nullptr; 1730 HeapObject* target_object = nullptr;
1698 if (TryEvacuateObject(target_space, object, &target_object)) { 1731 if (TryEvacuateObject(target_space, object, &target_object)) {
1699 DCHECK(object->map_word().IsForwardingAddress()); 1732 DCHECK(object->map_word().IsForwardingAddress());
1700 return true; 1733 return true;
1701 } 1734 }
1702 return false; 1735 return false;
(...skipping 436 matching lines...) Expand 10 before | Expand all | Expand 10 after
2139 { 2172 {
2140 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS); 2173 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS);
2141 ClearSimpleMapTransitions(non_live_map_list); 2174 ClearSimpleMapTransitions(non_live_map_list);
2142 ClearFullMapTransitions(); 2175 ClearFullMapTransitions();
2143 } 2176 }
2144 2177
2145 MarkDependentCodeForDeoptimization(dependent_code_list); 2178 MarkDependentCodeForDeoptimization(dependent_code_list);
2146 2179
2147 ClearWeakCollections(); 2180 ClearWeakCollections();
2148 2181
2149 ClearInvalidRememberedSetSlots(); 2182 ClearInvalidStoreAndSlotsBufferEntries();
2150 } 2183 }
2151 2184
2152 2185
2153 void MarkCompactCollector::MarkDependentCodeForDeoptimization( 2186 void MarkCompactCollector::MarkDependentCodeForDeoptimization(
2154 DependentCode* list_head) { 2187 DependentCode* list_head) {
2155 GCTracer::Scope gc_scope(heap()->tracer(), 2188 GCTracer::Scope gc_scope(heap()->tracer(),
2156 GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); 2189 GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE);
2157 Isolate* isolate = this->isolate(); 2190 Isolate* isolate = this->isolate();
2158 DependentCode* current = list_head; 2191 DependentCode* current = list_head;
2159 while (current->length() > 0) { 2192 while (current->length() > 0) {
(...skipping 342 matching lines...) Expand 10 before | Expand all | Expand 10 after
2502 Object* obj = heap()->encountered_transition_arrays(); 2535 Object* obj = heap()->encountered_transition_arrays();
2503 while (obj != Smi::FromInt(0)) { 2536 while (obj != Smi::FromInt(0)) {
2504 TransitionArray* array = TransitionArray::cast(obj); 2537 TransitionArray* array = TransitionArray::cast(obj);
2505 obj = array->next_link(); 2538 obj = array->next_link();
2506 array->set_next_link(undefined, SKIP_WRITE_BARRIER); 2539 array->set_next_link(undefined, SKIP_WRITE_BARRIER);
2507 } 2540 }
2508 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); 2541 heap()->set_encountered_transition_arrays(Smi::FromInt(0));
2509 } 2542 }
2510 2543
2511 void MarkCompactCollector::RecordMigratedSlot( 2544 void MarkCompactCollector::RecordMigratedSlot(
2512 Object* value, Address slot, LocalSlotsBuffer* old_to_old_slots, 2545 Object* value, Address slot, SlotsBuffer** evacuation_slots_buffer,
2513 LocalSlotsBuffer* old_to_new_slots) { 2546 LocalStoreBuffer* local_store_buffer) {
2514 // When parallel compaction is in progress, store and slots buffer entries 2547 // When parallel compaction is in progress, store and slots buffer entries
2515 // require synchronization. 2548 // require synchronization.
2516 if (heap_->InNewSpace(value)) { 2549 if (heap_->InNewSpace(value)) {
2517 if (compaction_in_progress_) { 2550 if (compaction_in_progress_) {
2518 old_to_new_slots->Record(slot); 2551 local_store_buffer->Record(slot);
2519 } else { 2552 } else {
2520 Page* page = Page::FromAddress(slot); 2553 Page* page = Page::FromAddress(slot);
2521 RememberedSet<OLD_TO_NEW>::Insert(page, slot); 2554 RememberedSet<OLD_TO_NEW>::Insert(page, slot);
2522 } 2555 }
2523 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { 2556 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) {
2524 old_to_old_slots->Record(slot); 2557 SlotsBuffer::AddTo(slots_buffer_allocator_, evacuation_slots_buffer,
2558 reinterpret_cast<Object**>(slot),
2559 SlotsBuffer::IGNORE_OVERFLOW);
2525 } 2560 }
2526 } 2561 }
2527 2562
2528 static inline SlotType SlotTypeForRMode(RelocInfo::Mode rmode) { 2563
2564 void MarkCompactCollector::RecordMigratedCodeEntrySlot(
2565 Address code_entry, Address code_entry_slot,
2566 SlotsBuffer** evacuation_slots_buffer) {
2567 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) {
2568 SlotsBuffer::AddTo(slots_buffer_allocator_, evacuation_slots_buffer,
2569 SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot,
2570 SlotsBuffer::IGNORE_OVERFLOW);
2571 }
2572 }
2573
2574
2575 void MarkCompactCollector::RecordMigratedCodeObjectSlot(
2576 Address code_object, SlotsBuffer** evacuation_slots_buffer) {
2577 SlotsBuffer::AddTo(slots_buffer_allocator_, evacuation_slots_buffer,
2578 SlotsBuffer::RELOCATED_CODE_OBJECT, code_object,
2579 SlotsBuffer::IGNORE_OVERFLOW);
2580 }
2581
2582
2583 static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
2529 if (RelocInfo::IsCodeTarget(rmode)) { 2584 if (RelocInfo::IsCodeTarget(rmode)) {
2530 return CODE_TARGET_SLOT; 2585 return SlotsBuffer::CODE_TARGET_SLOT;
2531 } else if (RelocInfo::IsCell(rmode)) { 2586 } else if (RelocInfo::IsCell(rmode)) {
2532 return CELL_TARGET_SLOT; 2587 return SlotsBuffer::CELL_TARGET_SLOT;
2533 } else if (RelocInfo::IsEmbeddedObject(rmode)) { 2588 } else if (RelocInfo::IsEmbeddedObject(rmode)) {
2534 return EMBEDDED_OBJECT_SLOT; 2589 return SlotsBuffer::EMBEDDED_OBJECT_SLOT;
2535 } else if (RelocInfo::IsDebugBreakSlot(rmode)) { 2590 } else if (RelocInfo::IsDebugBreakSlot(rmode)) {
2536 return DEBUG_TARGET_SLOT; 2591 return SlotsBuffer::DEBUG_TARGET_SLOT;
2537 } 2592 }
2538 UNREACHABLE(); 2593 UNREACHABLE();
2539 return NUMBER_OF_SLOT_TYPES; 2594 return SlotsBuffer::NUMBER_OF_SLOT_TYPES;
2540 } 2595 }
2541 2596
2542 void MarkCompactCollector::RecordRelocSlot(Code* host, RelocInfo* rinfo, 2597
2543 Object* target) { 2598 static inline SlotsBuffer::SlotType DecodeSlotType(
2599 SlotsBuffer::ObjectSlot slot) {
2600 return static_cast<SlotsBuffer::SlotType>(reinterpret_cast<intptr_t>(slot));
2601 }
2602
2603
2604 void MarkCompactCollector::RecordRelocSlot(RelocInfo* rinfo, Object* target) {
2544 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); 2605 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
2545 Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host));
2546 RelocInfo::Mode rmode = rinfo->rmode(); 2606 RelocInfo::Mode rmode = rinfo->rmode();
2547 if (target_page->IsEvacuationCandidate() && 2607 if (target_page->IsEvacuationCandidate() &&
2548 (rinfo->host() == NULL || 2608 (rinfo->host() == NULL ||
2549 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) { 2609 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) {
2550 Address addr = rinfo->pc(); 2610 Address addr = rinfo->pc();
2551 SlotType slot_type = SlotTypeForRMode(rmode); 2611 SlotsBuffer::SlotType slot_type = SlotTypeForRMode(rmode);
2552 if (rinfo->IsInConstantPool()) { 2612 if (rinfo->IsInConstantPool()) {
2553 addr = rinfo->constant_pool_entry_address(); 2613 addr = rinfo->constant_pool_entry_address();
2554 if (RelocInfo::IsCodeTarget(rmode)) { 2614 if (RelocInfo::IsCodeTarget(rmode)) {
2555 slot_type = CODE_ENTRY_SLOT; 2615 slot_type = SlotsBuffer::CODE_ENTRY_SLOT;
2556 } else { 2616 } else {
2557 DCHECK(RelocInfo::IsEmbeddedObject(rmode)); 2617 DCHECK(RelocInfo::IsEmbeddedObject(rmode));
2558 slot_type = OBJECT_SLOT; 2618 slot_type = SlotsBuffer::OBJECT_SLOT;
2559 } 2619 }
2560 } 2620 }
2561 RememberedSet<OLD_TO_OLD>::InsertTyped(source_page, slot_type, addr); 2621 bool success = SlotsBuffer::AddTo(
2622 slots_buffer_allocator_, target_page->slots_buffer_address(), slot_type,
2623 addr, SlotsBuffer::FAIL_ON_OVERFLOW);
2624 if (!success) {
2625 EvictPopularEvacuationCandidate(target_page);
2626 }
2562 } 2627 }
2563 } 2628 }
2564 2629
2565 2630
2566 class RecordMigratedSlotVisitor final : public ObjectVisitor { 2631 class RecordMigratedSlotVisitor final : public ObjectVisitor {
2567 public: 2632 public:
2568 RecordMigratedSlotVisitor(MarkCompactCollector* collector, 2633 RecordMigratedSlotVisitor(MarkCompactCollector* collector,
2569 LocalSlotsBuffer* old_to_old_slots, 2634 SlotsBuffer** evacuation_slots_buffer,
2570 LocalSlotsBuffer* old_to_new_slots) 2635 LocalStoreBuffer* local_store_buffer)
2571 : collector_(collector), 2636 : collector_(collector),
2572 old_to_old_slots_(old_to_old_slots), 2637 evacuation_slots_buffer_(evacuation_slots_buffer),
2573 old_to_new_slots_(old_to_new_slots) {} 2638 local_store_buffer_(local_store_buffer) {}
2574 2639
2575 V8_INLINE void VisitPointer(Object** p) override { 2640 V8_INLINE void VisitPointer(Object** p) override {
2576 collector_->RecordMigratedSlot(*p, reinterpret_cast<Address>(p), 2641 collector_->RecordMigratedSlot(*p, reinterpret_cast<Address>(p),
2577 old_to_old_slots_, old_to_new_slots_); 2642 evacuation_slots_buffer_,
2643 local_store_buffer_);
2578 } 2644 }
2579 2645
2580 V8_INLINE void VisitPointers(Object** start, Object** end) override { 2646 V8_INLINE void VisitPointers(Object** start, Object** end) override {
2581 while (start < end) { 2647 while (start < end) {
2582 collector_->RecordMigratedSlot(*start, reinterpret_cast<Address>(start), 2648 collector_->RecordMigratedSlot(*start, reinterpret_cast<Address>(start),
2583 old_to_old_slots_, old_to_new_slots_); 2649 evacuation_slots_buffer_,
2650 local_store_buffer_);
2584 ++start; 2651 ++start;
2585 } 2652 }
2586 } 2653 }
2587 2654
2588 V8_INLINE void VisitCodeEntry(Address code_entry_slot) override { 2655 V8_INLINE void VisitCodeEntry(Address code_entry_slot) override {
2589 if (collector_->compacting_) { 2656 if (collector_->compacting_) {
2590 Address code_entry = Memory::Address_at(code_entry_slot); 2657 Address code_entry = Memory::Address_at(code_entry_slot);
2591 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { 2658 collector_->RecordMigratedCodeEntrySlot(code_entry, code_entry_slot,
2592 old_to_old_slots_->Record(CODE_ENTRY_SLOT, code_entry_slot); 2659 evacuation_slots_buffer_);
2593 }
2594 } 2660 }
2595 } 2661 }
2596 2662
2597 private: 2663 private:
2598 MarkCompactCollector* collector_; 2664 MarkCompactCollector* collector_;
2599 LocalSlotsBuffer* old_to_old_slots_; 2665 SlotsBuffer** evacuation_slots_buffer_;
2600 LocalSlotsBuffer* old_to_new_slots_; 2666 LocalStoreBuffer* local_store_buffer_;
2601 }; 2667 };
2602 2668
2603 2669
2604 // We scavenge new space simultaneously with sweeping. This is done in two 2670 // We scavenge new space simultaneously with sweeping. This is done in two
2605 // passes. 2671 // passes.
2606 // 2672 //
2607 // The first pass migrates all alive objects from one semispace to another or 2673 // The first pass migrates all alive objects from one semispace to another or
2608 // promotes them to old space. Forwarding address is written directly into 2674 // promotes them to old space. Forwarding address is written directly into
2609 // first word of object without any encoding. If object is dead we write 2675 // first word of object without any encoding. If object is dead we write
2610 // NULL as a forwarding address. 2676 // NULL as a forwarding address.
2611 // 2677 //
2612 // The second pass updates pointers to new space in all spaces. It is possible 2678 // The second pass updates pointers to new space in all spaces. It is possible
2613 // to encounter pointers to dead new space objects during traversal of pointers 2679 // to encounter pointers to dead new space objects during traversal of pointers
2614 // to new space. We should clear them to avoid encountering them during next 2680 // to new space. We should clear them to avoid encountering them during next
2615 // pointer iteration. This is an issue if the store buffer overflows and we 2681 // pointer iteration. This is an issue if the store buffer overflows and we
2616 // have to scan the entire old space, including dead objects, looking for 2682 // have to scan the entire old space, including dead objects, looking for
2617 // pointers to new space. 2683 // pointers to new space.
2618 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src, 2684 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src,
2619 int size, AllocationSpace dest, 2685 int size, AllocationSpace dest,
2620 LocalSlotsBuffer* old_to_old_slots, 2686 SlotsBuffer** evacuation_slots_buffer,
2621 LocalSlotsBuffer* old_to_new_slots) { 2687 LocalStoreBuffer* local_store_buffer) {
2622 Address dst_addr = dst->address(); 2688 Address dst_addr = dst->address();
2623 Address src_addr = src->address(); 2689 Address src_addr = src->address();
2624 DCHECK(heap()->AllowedToBeMigrated(src, dest)); 2690 DCHECK(heap()->AllowedToBeMigrated(src, dest));
2625 DCHECK(dest != LO_SPACE); 2691 DCHECK(dest != LO_SPACE);
2626 if (dest == OLD_SPACE) { 2692 if (dest == OLD_SPACE) {
2627 DCHECK_OBJECT_SIZE(size); 2693 DCHECK_OBJECT_SIZE(size);
2694 DCHECK(evacuation_slots_buffer != nullptr);
2628 DCHECK(IsAligned(size, kPointerSize)); 2695 DCHECK(IsAligned(size, kPointerSize));
2629 2696
2630 heap()->MoveBlock(dst->address(), src->address(), size); 2697 heap()->MoveBlock(dst->address(), src->address(), size);
2631 RecordMigratedSlotVisitor visitor(this, old_to_old_slots, old_to_new_slots); 2698 RecordMigratedSlotVisitor visitor(this, evacuation_slots_buffer,
2699 local_store_buffer);
2632 dst->IterateBody(&visitor); 2700 dst->IterateBody(&visitor);
2633 } else if (dest == CODE_SPACE) { 2701 } else if (dest == CODE_SPACE) {
2634 DCHECK_CODEOBJECT_SIZE(size, heap()->code_space()); 2702 DCHECK_CODEOBJECT_SIZE(size, heap()->code_space());
2703 DCHECK(evacuation_slots_buffer != nullptr);
2635 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); 2704 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr));
2636 heap()->MoveBlock(dst_addr, src_addr, size); 2705 heap()->MoveBlock(dst_addr, src_addr, size);
2637 old_to_old_slots->Record(RELOCATED_CODE_OBJECT, dst_addr); 2706 RecordMigratedCodeObjectSlot(dst_addr, evacuation_slots_buffer);
2638 Code::cast(dst)->Relocate(dst_addr - src_addr); 2707 Code::cast(dst)->Relocate(dst_addr - src_addr);
2639 } else { 2708 } else {
2640 DCHECK_OBJECT_SIZE(size); 2709 DCHECK_OBJECT_SIZE(size);
2641 DCHECK(old_to_old_slots == nullptr); 2710 DCHECK(evacuation_slots_buffer == nullptr);
2642 DCHECK(dest == NEW_SPACE); 2711 DCHECK(dest == NEW_SPACE);
2643 heap()->MoveBlock(dst_addr, src_addr, size); 2712 heap()->MoveBlock(dst_addr, src_addr, size);
2644 } 2713 }
2645 heap()->OnMoveEvent(dst, src, size); 2714 heap()->OnMoveEvent(dst, src, size);
2646 Memory::Address_at(src_addr) = dst_addr; 2715 Memory::Address_at(src_addr) = dst_addr;
2647 } 2716 }
2648 2717
2649 static inline void UpdateTypedSlot(Isolate* isolate, ObjectVisitor* v, 2718
2650 SlotType slot_type, Address addr) { 2719 static inline void UpdateSlot(Isolate* isolate, ObjectVisitor* v,
2720 SlotsBuffer::SlotType slot_type, Address addr) {
2651 switch (slot_type) { 2721 switch (slot_type) {
2652 case CODE_TARGET_SLOT: { 2722 case SlotsBuffer::CODE_TARGET_SLOT: {
2653 RelocInfo rinfo(isolate, addr, RelocInfo::CODE_TARGET, 0, NULL); 2723 RelocInfo rinfo(isolate, addr, RelocInfo::CODE_TARGET, 0, NULL);
2654 rinfo.Visit(isolate, v); 2724 rinfo.Visit(isolate, v);
2655 break; 2725 break;
2656 } 2726 }
2657 case CELL_TARGET_SLOT: { 2727 case SlotsBuffer::CELL_TARGET_SLOT: {
2658 RelocInfo rinfo(isolate, addr, RelocInfo::CELL, 0, NULL); 2728 RelocInfo rinfo(isolate, addr, RelocInfo::CELL, 0, NULL);
2659 rinfo.Visit(isolate, v); 2729 rinfo.Visit(isolate, v);
2660 break; 2730 break;
2661 } 2731 }
2662 case CODE_ENTRY_SLOT: { 2732 case SlotsBuffer::CODE_ENTRY_SLOT: {
2663 v->VisitCodeEntry(addr); 2733 v->VisitCodeEntry(addr);
2664 break; 2734 break;
2665 } 2735 }
2666 case RELOCATED_CODE_OBJECT: { 2736 case SlotsBuffer::RELOCATED_CODE_OBJECT: {
2667 HeapObject* obj = HeapObject::FromAddress(addr); 2737 HeapObject* obj = HeapObject::FromAddress(addr);
2668 Code::BodyDescriptor::IterateBody(obj, v); 2738 Code::BodyDescriptor::IterateBody(obj, v);
2669 break; 2739 break;
2670 } 2740 }
2671 case DEBUG_TARGET_SLOT: { 2741 case SlotsBuffer::DEBUG_TARGET_SLOT: {
2672 RelocInfo rinfo(isolate, addr, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION, 0, 2742 RelocInfo rinfo(isolate, addr, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION, 0,
2673 NULL); 2743 NULL);
2674 if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(isolate, v); 2744 if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(isolate, v);
2675 break; 2745 break;
2676 } 2746 }
2677 case EMBEDDED_OBJECT_SLOT: { 2747 case SlotsBuffer::EMBEDDED_OBJECT_SLOT: {
2678 RelocInfo rinfo(isolate, addr, RelocInfo::EMBEDDED_OBJECT, 0, NULL); 2748 RelocInfo rinfo(isolate, addr, RelocInfo::EMBEDDED_OBJECT, 0, NULL);
2679 rinfo.Visit(isolate, v); 2749 rinfo.Visit(isolate, v);
2680 break; 2750 break;
2681 } 2751 }
2682 case OBJECT_SLOT: { 2752 case SlotsBuffer::OBJECT_SLOT: {
2683 v->VisitPointer(reinterpret_cast<Object**>(addr)); 2753 v->VisitPointer(reinterpret_cast<Object**>(addr));
2684 break; 2754 break;
2685 } 2755 }
2686 default: 2756 default:
2687 UNREACHABLE(); 2757 UNREACHABLE();
2688 break; 2758 break;
2689 } 2759 }
2690 } 2760 }
2691 2761
2692 2762
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
2777 } 2847 }
2778 } 2848 }
2779 2849
2780 private: 2850 private:
2781 inline void UpdatePointer(Object** p) { UpdateSlot(heap_, p); } 2851 inline void UpdatePointer(Object** p) { UpdateSlot(heap_, p); }
2782 2852
2783 Heap* heap_; 2853 Heap* heap_;
2784 }; 2854 };
2785 2855
2786 2856
2857 void MarkCompactCollector::UpdateSlots(SlotsBuffer* buffer) {
2858 PointersUpdatingVisitor v(heap_);
2859 size_t buffer_size = buffer->Size();
2860
2861 for (size_t slot_idx = 0; slot_idx < buffer_size; ++slot_idx) {
2862 SlotsBuffer::ObjectSlot slot = buffer->Get(slot_idx);
2863 if (!SlotsBuffer::IsTypedSlot(slot)) {
2864 PointersUpdatingVisitor::UpdateSlot(heap_, slot);
2865 } else {
2866 ++slot_idx;
2867 DCHECK(slot_idx < buffer_size);
2868 UpdateSlot(heap_->isolate(), &v, DecodeSlotType(slot),
2869 reinterpret_cast<Address>(buffer->Get(slot_idx)));
2870 }
2871 }
2872 }
2873
2874
2875 void MarkCompactCollector::UpdateSlotsRecordedIn(SlotsBuffer* buffer) {
2876 while (buffer != NULL) {
2877 UpdateSlots(buffer);
2878 buffer = buffer->next();
2879 }
2880 }
2881
2882
2787 static void UpdatePointer(HeapObject** address, HeapObject* object) { 2883 static void UpdatePointer(HeapObject** address, HeapObject* object) {
2788 MapWord map_word = object->map_word(); 2884 MapWord map_word = object->map_word();
2789 // Since we only filter invalid slots in old space, the store buffer can 2885 // Since we only filter invalid slots in old space, the store buffer can
2790 // still contain stale pointers in large object and in map spaces. Ignore 2886 // still contain stale pointers in large object and in map spaces. Ignore
2791 // these pointers here. 2887 // these pointers here.
2792 DCHECK(map_word.IsForwardingAddress() || 2888 DCHECK(map_word.IsForwardingAddress() ||
2793 !object->GetHeap()->old_space()->Contains( 2889 !object->GetHeap()->old_space()->Contains(
2794 reinterpret_cast<Address>(address))); 2890 reinterpret_cast<Address>(address)));
2795 if (map_word.IsForwardingAddress()) { 2891 if (map_word.IsForwardingAddress()) {
2796 // Update the corresponding slot. 2892 // Update the corresponding slot.
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
2898 // If the slot is within the last found object in the cell, the slot is 2994 // If the slot is within the last found object in the cell, the slot is
2899 // in a live object. 2995 // in a live object.
2900 // Slots pointing to the first word of an object are invalid and removed. 2996 // Slots pointing to the first word of an object are invalid and removed.
2901 // This can happen when we move the object header while left trimming. 2997 // This can happen when we move the object header while left trimming.
2902 *out_object = object; 2998 *out_object = object;
2903 return true; 2999 return true;
2904 } 3000 }
2905 return false; 3001 return false;
2906 } 3002 }
2907 3003
2908 HeapObject* MarkCompactCollector::FindBlackObjectBySlotSlow(Address slot) { 3004
2909 Page* p = Page::FromAddress(slot); 3005 bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) {
2910 // This function does not support large objects right now. 3006 // This function does not support large objects right now.
2911 Space* owner = p->owner(); 3007 Space* owner = p->owner();
2912 if (owner == heap_->lo_space() || owner == nullptr) { 3008 if (owner == heap_->lo_space() || owner == NULL) {
2913 Object* large_object = heap_->lo_space()->FindObject(slot); 3009 Object* large_object = heap_->lo_space()->FindObject(slot);
2914 // This object has to exist, otherwise we would not have recorded a slot 3010 // This object has to exist, otherwise we would not have recorded a slot
2915 // for it. 3011 // for it.
2916 CHECK(large_object->IsHeapObject()); 3012 CHECK(large_object->IsHeapObject());
2917 HeapObject* large_heap_object = HeapObject::cast(large_object); 3013 HeapObject* large_heap_object = HeapObject::cast(large_object);
2918
2919 if (IsMarked(large_heap_object)) { 3014 if (IsMarked(large_heap_object)) {
2920 return large_heap_object; 3015 return true;
2921 } 3016 }
2922 return nullptr; 3017 return false;
2923 } 3018 }
2924 3019
2925 LiveObjectIterator<kBlackObjects> it(p); 3020 LiveObjectIterator<kBlackObjects> it(p);
2926 HeapObject* object = nullptr; 3021 HeapObject* object = NULL;
2927 while ((object = it.Next()) != nullptr) { 3022 while ((object = it.Next()) != NULL) {
2928 int size = object->Size(); 3023 int size = object->Size();
2929 if (object->address() > slot) return nullptr; 3024
3025 if (object->address() > slot) return false;
2930 if (object->address() <= slot && slot < (object->address() + size)) { 3026 if (object->address() <= slot && slot < (object->address() + size)) {
2931 return object; 3027 return true;
2932 } 3028 }
2933 } 3029 }
2934 return nullptr; 3030 return false;
2935 } 3031 }
2936 3032
2937 3033
2938 bool MarkCompactCollector::IsSlotInLiveObject(Address slot) { 3034 bool MarkCompactCollector::IsSlotInLiveObject(Address slot) {
2939 HeapObject* object = NULL; 3035 HeapObject* object = NULL;
2940 // The target object is black but we don't know if the source slot is black. 3036 // The target object is black but we don't know if the source slot is black.
2941 // The source object could have died and the slot could be part of a free 3037 // The source object could have died and the slot could be part of a free
2942 // space. Find out based on mark bits if the slot is part of a live object. 3038 // space. Find out based on mark bits if the slot is part of a live object.
2943 if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) { 3039 if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) {
2944 return false; 3040 return false;
2945 } 3041 }
2946 3042
2947 DCHECK(object != NULL); 3043 DCHECK(object != NULL);
2948 int offset = static_cast<int>(slot - object->address()); 3044 int offset = static_cast<int>(slot - object->address());
2949 return object->IsValidSlot(offset); 3045 return object->IsValidSlot(offset);
2950 } 3046 }
2951 3047
2952 3048
3049 void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot,
3050 HeapObject* object) {
3051 // The target object has to be black.
3052 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
3053
3054 // The target object is black but we don't know if the source slot is black.
3055 // The source object could have died and the slot could be part of a free
3056 // space. Use the mark bit iterator to find out about liveness of the slot.
3057 CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot));
3058 }
3059
3060
2953 void MarkCompactCollector::EvacuateNewSpacePrologue() { 3061 void MarkCompactCollector::EvacuateNewSpacePrologue() {
2954 NewSpace* new_space = heap()->new_space(); 3062 NewSpace* new_space = heap()->new_space();
2955 NewSpacePageIterator it(new_space->bottom(), new_space->top()); 3063 NewSpacePageIterator it(new_space->bottom(), new_space->top());
2956 // Append the list of new space pages to be processed. 3064 // Append the list of new space pages to be processed.
2957 while (it.has_next()) { 3065 while (it.has_next()) {
2958 newspace_evacuation_candidates_.Add(it.next()); 3066 newspace_evacuation_candidates_.Add(it.next());
2959 } 3067 }
2960 new_space->Flip(); 3068 new_space->Flip();
2961 new_space->ResetAllocationInfo(); 3069 new_space->ResetAllocationInfo();
2962 } 3070 }
2963 3071
2964 void MarkCompactCollector::EvacuateNewSpaceEpilogue() { 3072 void MarkCompactCollector::EvacuateNewSpaceEpilogue() {
2965 newspace_evacuation_candidates_.Rewind(0); 3073 newspace_evacuation_candidates_.Rewind(0);
2966 } 3074 }
2967 3075
2968 3076
3077 void MarkCompactCollector::AddEvacuationSlotsBufferSynchronized(
3078 SlotsBuffer* evacuation_slots_buffer) {
3079 base::LockGuard<base::Mutex> lock_guard(&evacuation_slots_buffers_mutex_);
3080 evacuation_slots_buffers_.Add(evacuation_slots_buffer);
3081 }
3082
2969 class MarkCompactCollector::Evacuator : public Malloced { 3083 class MarkCompactCollector::Evacuator : public Malloced {
2970 public: 3084 public:
2971 Evacuator(MarkCompactCollector* collector, 3085 Evacuator(MarkCompactCollector* collector,
2972 const List<Page*>& evacuation_candidates, 3086 const List<Page*>& evacuation_candidates,
2973 const List<NewSpacePage*>& newspace_evacuation_candidates) 3087 const List<NewSpacePage*>& newspace_evacuation_candidates)
2974 : collector_(collector), 3088 : collector_(collector),
2975 evacuation_candidates_(evacuation_candidates), 3089 evacuation_candidates_(evacuation_candidates),
2976 newspace_evacuation_candidates_(newspace_evacuation_candidates), 3090 newspace_evacuation_candidates_(newspace_evacuation_candidates),
2977 compaction_spaces_(collector->heap()), 3091 compaction_spaces_(collector->heap()),
3092 local_slots_buffer_(nullptr),
3093 local_store_buffer_(collector->heap()),
2978 local_pretenuring_feedback_(HashMap::PointersMatch, 3094 local_pretenuring_feedback_(HashMap::PointersMatch,
2979 kInitialLocalPretenuringFeedbackCapacity), 3095 kInitialLocalPretenuringFeedbackCapacity),
2980 new_space_visitor_(collector->heap(), &compaction_spaces_, 3096 new_space_visitor_(collector->heap(), &compaction_spaces_,
2981 &old_to_old_slots_, &old_to_new_slots_, 3097 &local_slots_buffer_, &local_store_buffer_,
2982 &local_pretenuring_feedback_), 3098 &local_pretenuring_feedback_),
2983 old_space_visitor_(collector->heap(), &compaction_spaces_, 3099 old_space_visitor_(collector->heap(), &compaction_spaces_,
2984 &old_to_old_slots_, &old_to_new_slots_), 3100 &local_slots_buffer_, &local_store_buffer_),
2985 duration_(0.0), 3101 duration_(0.0),
2986 bytes_compacted_(0), 3102 bytes_compacted_(0),
2987 task_id_(0) {} 3103 task_id_(0) {}
2988 3104
2989 // Evacuate the configured set of pages in parallel. 3105 // Evacuate the configured set of pages in parallel.
2990 inline void EvacuatePages(); 3106 inline void EvacuatePages();
2991 3107
2992 // Merge back locally cached info sequentially. Note that this method needs 3108 // Merge back locally cached info sequentially. Note that this method needs
2993 // to be called from the main thread. 3109 // to be called from the main thread.
2994 inline void Finalize(); 3110 inline void Finalize();
(...skipping 16 matching lines...) Expand all
3011 inline bool EvacuateSinglePage(MemoryChunk* p, HeapObjectVisitor* visitor); 3127 inline bool EvacuateSinglePage(MemoryChunk* p, HeapObjectVisitor* visitor);
3012 3128
3013 MarkCompactCollector* collector_; 3129 MarkCompactCollector* collector_;
3014 3130
3015 // Pages to process. 3131 // Pages to process.
3016 const List<Page*>& evacuation_candidates_; 3132 const List<Page*>& evacuation_candidates_;
3017 const List<NewSpacePage*>& newspace_evacuation_candidates_; 3133 const List<NewSpacePage*>& newspace_evacuation_candidates_;
3018 3134
3019 // Locally cached collector data. 3135 // Locally cached collector data.
3020 CompactionSpaceCollection compaction_spaces_; 3136 CompactionSpaceCollection compaction_spaces_;
3021 LocalSlotsBuffer old_to_old_slots_; 3137 SlotsBuffer* local_slots_buffer_;
3022 LocalSlotsBuffer old_to_new_slots_; 3138 LocalStoreBuffer local_store_buffer_;
3023 HashMap local_pretenuring_feedback_; 3139 HashMap local_pretenuring_feedback_;
3024 3140
3025 // Vistors for the corresponding spaces. 3141 // Vistors for the corresponding spaces.
3026 EvacuateNewSpaceVisitor new_space_visitor_; 3142 EvacuateNewSpaceVisitor new_space_visitor_;
3027 EvacuateOldSpaceVisitor old_space_visitor_; 3143 EvacuateOldSpaceVisitor old_space_visitor_;
3028 3144
3029 // Book keeping info. 3145 // Book keeping info.
3030 double duration_; 3146 double duration_;
3031 intptr_t bytes_compacted_; 3147 intptr_t bytes_compacted_;
3032 3148
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
3090 heap()->code_space()->MergeCompactionSpace( 3206 heap()->code_space()->MergeCompactionSpace(
3091 compaction_spaces_.Get(CODE_SPACE)); 3207 compaction_spaces_.Get(CODE_SPACE));
3092 heap()->tracer()->AddCompactionEvent(duration_, bytes_compacted_); 3208 heap()->tracer()->AddCompactionEvent(duration_, bytes_compacted_);
3093 heap()->IncrementPromotedObjectsSize(new_space_visitor_.promoted_size()); 3209 heap()->IncrementPromotedObjectsSize(new_space_visitor_.promoted_size());
3094 heap()->IncrementSemiSpaceCopiedObjectSize( 3210 heap()->IncrementSemiSpaceCopiedObjectSize(
3095 new_space_visitor_.semispace_copied_size()); 3211 new_space_visitor_.semispace_copied_size());
3096 heap()->IncrementYoungSurvivorsCounter( 3212 heap()->IncrementYoungSurvivorsCounter(
3097 new_space_visitor_.promoted_size() + 3213 new_space_visitor_.promoted_size() +
3098 new_space_visitor_.semispace_copied_size()); 3214 new_space_visitor_.semispace_copied_size());
3099 heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_); 3215 heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_);
3100 // Move locally recorded slots to the global remembered sets. 3216 local_store_buffer_.Process(heap()->store_buffer());
3101 old_to_new_slots_.Iterate( 3217 collector_->AddEvacuationSlotsBufferSynchronized(local_slots_buffer_);
3102 [](Address slot) {
3103 Page* page = Page::FromAddress(slot);
3104 RememberedSet<OLD_TO_NEW>::Insert(page, slot);
3105 },
3106 [](SlotType type, Address slot) { UNREACHABLE(); });
3107 old_to_old_slots_.Iterate(
3108 [](Address slot) {
3109 Page* page = Page::FromAddress(slot);
3110 RememberedSet<OLD_TO_OLD>::Insert(page, slot);
3111 },
3112 [](SlotType type, Address slot) {
3113 Page* page = Page::FromAddress(slot);
3114 RememberedSet<OLD_TO_OLD>::InsertTyped(page, type, slot);
3115 });
3116 } 3218 }
3117 3219
3118 class MarkCompactCollector::CompactionTask : public CancelableTask { 3220 class MarkCompactCollector::CompactionTask : public CancelableTask {
3119 public: 3221 public:
3120 explicit CompactionTask(Heap* heap, Evacuator* evacuator) 3222 explicit CompactionTask(Heap* heap, Evacuator* evacuator)
3121 : CancelableTask(heap->isolate()), heap_(heap), evacuator_(evacuator) { 3223 : CancelableTask(heap->isolate()), heap_(heap), evacuator_(evacuator) {
3122 evacuator->set_task_id(id()); 3224 evacuator->set_task_id(id());
3123 } 3225 }
3124 3226
3125 virtual ~CompactionTask() {} 3227 virtual ~CompactionTask() {}
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after
3412 !ShouldSkipEvacuationSlotRecording(code)) { 3514 !ShouldSkipEvacuationSlotRecording(code)) {
3413 DCHECK(compacting_); 3515 DCHECK(compacting_);
3414 3516
3415 // If the object is white than no slots were recorded on it yet. 3517 // If the object is white than no slots were recorded on it yet.
3416 MarkBit mark_bit = Marking::MarkBitFrom(code); 3518 MarkBit mark_bit = Marking::MarkBitFrom(code);
3417 if (Marking::IsWhite(mark_bit)) return; 3519 if (Marking::IsWhite(mark_bit)) return;
3418 3520
3419 // Ignore all slots that might have been recorded in the body of the 3521 // Ignore all slots that might have been recorded in the body of the
3420 // deoptimized code object. Assumption: no slots will be recorded for 3522 // deoptimized code object. Assumption: no slots will be recorded for
3421 // this object after invalidating it. 3523 // this object after invalidating it.
3422 Page* page = Page::FromAddress(code->address()); 3524 RemoveObjectSlots(code->instruction_start(),
3423 Address start = code->instruction_start(); 3525 code->address() + code->Size());
3424 Address end = code->address() + code->Size();
3425 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end);
3426 } 3526 }
3427 } 3527 }
3428 3528
3429 3529
3430 // Return true if the given code is deoptimized or will be deoptimized. 3530 // Return true if the given code is deoptimized or will be deoptimized.
3431 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { 3531 bool MarkCompactCollector::WillBeDeoptimized(Code* code) {
3432 return code->is_optimized_code() && code->marked_for_deoptimization(); 3532 return code->is_optimized_code() && code->marked_for_deoptimization();
3433 } 3533 }
3434 3534
3435 3535
3536 void MarkCompactCollector::RemoveObjectSlots(Address start_slot,
3537 Address end_slot) {
3538 // Remove entries by replacing them with an old-space slot containing a smi
3539 // that is located in an unmovable page.
3540 for (Page* p : evacuation_candidates_) {
3541 DCHECK(p->IsEvacuationCandidate() ||
3542 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3543 if (p->IsEvacuationCandidate()) {
3544 SlotsBuffer::RemoveObjectSlots(heap_, p->slots_buffer(), start_slot,
3545 end_slot);
3546 }
3547 }
3548 }
3549
3550
3436 #ifdef VERIFY_HEAP 3551 #ifdef VERIFY_HEAP
3437 static void VerifyAllBlackObjects(MemoryChunk* page) { 3552 static void VerifyAllBlackObjects(MemoryChunk* page) {
3438 LiveObjectIterator<kAllLiveObjects> it(page); 3553 LiveObjectIterator<kAllLiveObjects> it(page);
3439 HeapObject* object = NULL; 3554 HeapObject* object = NULL;
3440 while ((object = it.Next()) != NULL) { 3555 while ((object = it.Next()) != NULL) {
3441 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); 3556 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
3442 } 3557 }
3443 } 3558 }
3444 #endif // VERIFY_HEAP 3559 #endif // VERIFY_HEAP
3445 3560
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
3577 if (FLAG_verify_heap && !sweeping_in_progress_) { 3692 if (FLAG_verify_heap && !sweeping_in_progress_) {
3578 VerifyEvacuation(heap()); 3693 VerifyEvacuation(heap());
3579 } 3694 }
3580 #endif 3695 #endif
3581 } 3696 }
3582 3697
3583 3698
3584 void MarkCompactCollector::UpdatePointersAfterEvacuation() { 3699 void MarkCompactCollector::UpdatePointersAfterEvacuation() {
3585 GCTracer::Scope gc_scope(heap()->tracer(), 3700 GCTracer::Scope gc_scope(heap()->tracer(),
3586 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS); 3701 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS);
3702 {
3703 GCTracer::Scope gc_scope(
3704 heap()->tracer(),
3705 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED);
3706 UpdateSlotsRecordedIn(migration_slots_buffer_);
3707 if (FLAG_trace_fragmentation_verbose) {
3708 PrintF(" migration slots buffer: %d\n",
3709 SlotsBuffer::SizeOfChain(migration_slots_buffer_));
3710 }
3711 slots_buffer_allocator_->DeallocateChain(&migration_slots_buffer_);
3712 DCHECK(migration_slots_buffer_ == NULL);
3587 3713
3714 // TODO(hpayer): Process the slots buffers in parallel. This has to be done
3715 // after evacuation of all pages finishes.
3716 int buffers = evacuation_slots_buffers_.length();
3717 for (int i = 0; i < buffers; i++) {
3718 SlotsBuffer* buffer = evacuation_slots_buffers_[i];
3719 UpdateSlotsRecordedIn(buffer);
3720 slots_buffer_allocator_->DeallocateChain(&buffer);
3721 }
3722 evacuation_slots_buffers_.Rewind(0);
3723 }
3724
3725 // Second pass: find pointers to new space and update them.
3588 PointersUpdatingVisitor updating_visitor(heap()); 3726 PointersUpdatingVisitor updating_visitor(heap());
3589 3727
3590 { 3728 {
3591 GCTracer::Scope gc_scope( 3729 GCTracer::Scope gc_scope(
3592 heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW); 3730 heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW);
3593 // Update pointers in to space. 3731 // Update pointers in to space.
3594 SemiSpaceIterator to_it(heap()->new_space()); 3732 SemiSpaceIterator to_it(heap()->new_space());
3595 for (HeapObject* object = to_it.Next(); object != NULL; 3733 for (HeapObject* object = to_it.Next(); object != NULL;
3596 object = to_it.Next()) { 3734 object = to_it.Next()) {
3597 Map* map = object->map(); 3735 Map* map = object->map();
3598 object->IterateBody(map->instance_type(), object->SizeFromMap(map), 3736 object->IterateBody(map->instance_type(), object->SizeFromMap(map),
3599 &updating_visitor); 3737 &updating_visitor);
3600 } 3738 }
3601 // Update roots. 3739 // Update roots.
3602 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); 3740 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
3603 3741
3604 RememberedSet<OLD_TO_NEW>::IterateWithWrapper(heap_, UpdatePointer); 3742 RememberedSet<OLD_TO_NEW>::IterateWithWrapper(heap_, UpdatePointer);
3605 } 3743 }
3606 3744
3607 { 3745 {
3608 Heap* heap = this->heap();
3609 GCTracer::Scope gc_scope(
3610 heap->tracer(),
3611 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED);
3612
3613 RememberedSet<OLD_TO_OLD>::Iterate(heap, [heap](Address slot) {
3614 PointersUpdatingVisitor::UpdateSlot(heap,
3615 reinterpret_cast<Object**>(slot));
3616 return REMOVE_SLOT;
3617 });
3618 Isolate* isolate = heap->isolate();
3619 PointersUpdatingVisitor* visitor = &updating_visitor;
3620 RememberedSet<OLD_TO_OLD>::IterateTyped(
3621 heap, [isolate, visitor](SlotType type, Address slot) {
3622 UpdateTypedSlot(isolate, visitor, type, slot);
3623 return REMOVE_SLOT;
3624 });
3625 }
3626
3627 {
3628 GCTracer::Scope gc_scope( 3746 GCTracer::Scope gc_scope(
3629 heap()->tracer(), 3747 heap()->tracer(),
3630 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED); 3748 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED);
3631 for (Page* p : evacuation_candidates_) { 3749 for (Page* p : evacuation_candidates_) {
3632 DCHECK(p->IsEvacuationCandidate() || 3750 DCHECK(p->IsEvacuationCandidate() ||
3633 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); 3751 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3634 3752
3635 if (p->IsEvacuationCandidate()) { 3753 if (p->IsEvacuationCandidate()) {
3754 UpdateSlotsRecordedIn(p->slots_buffer());
3755 if (FLAG_trace_fragmentation_verbose) {
3756 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p),
3757 SlotsBuffer::SizeOfChain(p->slots_buffer()));
3758 }
3759 slots_buffer_allocator_->DeallocateChain(p->slots_buffer_address());
3760
3636 // Important: skip list should be cleared only after roots were updated 3761 // Important: skip list should be cleared only after roots were updated
3637 // because root iteration traverses the stack and might have to find 3762 // because root iteration traverses the stack and might have to find
3638 // code objects from non-updated pc pointing into evacuation candidate. 3763 // code objects from non-updated pc pointing into evacuation candidate.
3639 SkipList* list = p->skip_list(); 3764 SkipList* list = p->skip_list();
3640 if (list != NULL) list->Clear(); 3765 if (list != NULL) list->Clear();
3641 3766
3642 // First pass on aborted pages, fixing up all live objects. 3767 // First pass on aborted pages, fixing up all live objects.
3643 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) { 3768 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) {
3644 p->ClearEvacuationCandidate(); 3769 p->ClearEvacuationCandidate();
3645 VisitLiveObjectsBody(p, &updating_visitor); 3770 VisitLiveObjectsBody(p, &updating_visitor);
(...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after
3887 4012
3888 4013
3889 Isolate* MarkCompactCollector::isolate() const { return heap_->isolate(); } 4014 Isolate* MarkCompactCollector::isolate() const { return heap_->isolate(); }
3890 4015
3891 4016
3892 void MarkCompactCollector::Initialize() { 4017 void MarkCompactCollector::Initialize() {
3893 MarkCompactMarkingVisitor::Initialize(); 4018 MarkCompactMarkingVisitor::Initialize();
3894 IncrementalMarking::Initialize(); 4019 IncrementalMarking::Initialize();
3895 } 4020 }
3896 4021
3897 void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* host, Address slot, 4022
4023 void MarkCompactCollector::EvictPopularEvacuationCandidate(Page* page) {
4024 if (FLAG_trace_fragmentation) {
4025 PrintF("Page %p is too popular. Disabling evacuation.\n",
4026 reinterpret_cast<void*>(page));
4027 }
4028
4029 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kSlotsBufferOverflow);
4030
4031 // TODO(gc) If all evacuation candidates are too popular we
4032 // should stop slots recording entirely.
4033 page->ClearEvacuationCandidate();
4034
4035 DCHECK(!page->IsFlagSet(Page::POPULAR_PAGE));
4036 page->SetFlag(Page::POPULAR_PAGE);
4037
4038 // We were not collecting slots on this page that point
4039 // to other evacuation candidates thus we have to
4040 // rescan the page after evacuation to discover and update all
4041 // pointers to evacuated objects.
4042 page->SetFlag(Page::RESCAN_ON_EVACUATION);
4043 }
4044
4045
4046 void MarkCompactCollector::RecordCodeEntrySlot(HeapObject* object, Address slot,
3898 Code* target) { 4047 Code* target) {
3899 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); 4048 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
3900 Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host));
3901 if (target_page->IsEvacuationCandidate() && 4049 if (target_page->IsEvacuationCandidate() &&
3902 !ShouldSkipEvacuationSlotRecording(host)) { 4050 !ShouldSkipEvacuationSlotRecording(object)) {
3903 RememberedSet<OLD_TO_OLD>::InsertTyped(source_page, CODE_ENTRY_SLOT, slot); 4051 if (!SlotsBuffer::AddTo(slots_buffer_allocator_,
4052 target_page->slots_buffer_address(),
4053 SlotsBuffer::CODE_ENTRY_SLOT, slot,
4054 SlotsBuffer::FAIL_ON_OVERFLOW)) {
4055 EvictPopularEvacuationCandidate(target_page);
4056 }
3904 } 4057 }
3905 } 4058 }
3906 4059
3907 4060
3908 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { 4061 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
3909 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); 4062 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT);
3910 if (is_compacting()) { 4063 if (is_compacting()) {
3911 Code* host = 4064 Code* host =
3912 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( 4065 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer(
3913 pc); 4066 pc);
3914 MarkBit mark_bit = Marking::MarkBitFrom(host); 4067 MarkBit mark_bit = Marking::MarkBitFrom(host);
3915 if (Marking::IsBlack(mark_bit)) { 4068 if (Marking::IsBlack(mark_bit)) {
3916 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); 4069 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
3917 RecordRelocSlot(host, &rinfo, target); 4070 RecordRelocSlot(&rinfo, target);
3918 } 4071 }
3919 } 4072 }
3920 } 4073 }
3921 4074
3922 } // namespace internal 4075 } // namespace internal
3923 } // namespace v8 4076 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/mark-compact-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698