| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 1493 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1504 class MarkCompactCollector::HeapObjectVisitor { | 1504 class MarkCompactCollector::HeapObjectVisitor { |
| 1505 public: | 1505 public: |
| 1506 virtual ~HeapObjectVisitor() {} | 1506 virtual ~HeapObjectVisitor() {} |
| 1507 virtual bool Visit(HeapObject* object) = 0; | 1507 virtual bool Visit(HeapObject* object) = 0; |
| 1508 }; | 1508 }; |
| 1509 | 1509 |
| 1510 | 1510 |
| 1511 class MarkCompactCollector::EvacuateVisitorBase | 1511 class MarkCompactCollector::EvacuateVisitorBase |
| 1512 : public MarkCompactCollector::HeapObjectVisitor { | 1512 : public MarkCompactCollector::HeapObjectVisitor { |
| 1513 public: | 1513 public: |
| 1514 EvacuateVisitorBase(Heap* heap, CompactionSpaceCollection* compaction_spaces, | 1514 EvacuateVisitorBase(Heap* heap, CompactionSpaceCollection* compaction_spaces) |
| 1515 LocalSlotsBuffer* old_to_old_slots, | 1515 : heap_(heap), compaction_spaces_(compaction_spaces) {} |
| 1516 LocalSlotsBuffer* old_to_new_slots) | |
| 1517 : heap_(heap), | |
| 1518 compaction_spaces_(compaction_spaces), | |
| 1519 old_to_old_slots_(old_to_old_slots), | |
| 1520 old_to_new_slots_(old_to_new_slots) {} | |
| 1521 | 1516 |
| 1522 bool TryEvacuateObject(PagedSpace* target_space, HeapObject* object, | 1517 bool TryEvacuateObject(PagedSpace* target_space, HeapObject* object, |
| 1523 HeapObject** target_object) { | 1518 HeapObject** target_object) { |
| 1524 int size = object->Size(); | 1519 int size = object->Size(); |
| 1525 AllocationAlignment alignment = object->RequiredAlignment(); | 1520 AllocationAlignment alignment = object->RequiredAlignment(); |
| 1526 AllocationResult allocation = target_space->AllocateRaw(size, alignment); | 1521 AllocationResult allocation = target_space->AllocateRaw(size, alignment); |
| 1527 if (allocation.To(target_object)) { | 1522 if (allocation.To(target_object)) { |
| 1528 heap_->mark_compact_collector()->MigrateObject( | 1523 heap_->mark_compact_collector()->MigrateObject( |
| 1529 *target_object, object, size, target_space->identity(), | 1524 *target_object, object, size, target_space->identity()); |
| 1530 old_to_old_slots_, old_to_new_slots_); | |
| 1531 return true; | 1525 return true; |
| 1532 } | 1526 } |
| 1533 return false; | 1527 return false; |
| 1534 } | 1528 } |
| 1535 | 1529 |
| 1536 protected: | 1530 protected: |
| 1537 Heap* heap_; | 1531 Heap* heap_; |
| 1538 CompactionSpaceCollection* compaction_spaces_; | 1532 CompactionSpaceCollection* compaction_spaces_; |
| 1539 LocalSlotsBuffer* old_to_old_slots_; | |
| 1540 LocalSlotsBuffer* old_to_new_slots_; | |
| 1541 }; | 1533 }; |
| 1542 | 1534 |
| 1543 | 1535 |
| 1544 class MarkCompactCollector::EvacuateNewSpaceVisitor final | 1536 class MarkCompactCollector::EvacuateNewSpaceVisitor final |
| 1545 : public MarkCompactCollector::EvacuateVisitorBase { | 1537 : public MarkCompactCollector::EvacuateVisitorBase { |
| 1546 public: | 1538 public: |
| 1547 static const intptr_t kLabSize = 4 * KB; | 1539 static const intptr_t kLabSize = 4 * KB; |
| 1548 static const intptr_t kMaxLabObjectSize = 256; | 1540 static const intptr_t kMaxLabObjectSize = 256; |
| 1549 | 1541 |
| 1550 explicit EvacuateNewSpaceVisitor(Heap* heap, | 1542 explicit EvacuateNewSpaceVisitor(Heap* heap, |
| 1551 CompactionSpaceCollection* compaction_spaces, | 1543 CompactionSpaceCollection* compaction_spaces, |
| 1552 LocalSlotsBuffer* old_to_old_slots, | |
| 1553 LocalSlotsBuffer* old_to_new_slots, | |
| 1554 HashMap* local_pretenuring_feedback) | 1544 HashMap* local_pretenuring_feedback) |
| 1555 : EvacuateVisitorBase(heap, compaction_spaces, old_to_old_slots, | 1545 : EvacuateVisitorBase(heap, compaction_spaces), |
| 1556 old_to_new_slots), | |
| 1557 buffer_(LocalAllocationBuffer::InvalidBuffer()), | 1546 buffer_(LocalAllocationBuffer::InvalidBuffer()), |
| 1558 space_to_allocate_(NEW_SPACE), | 1547 space_to_allocate_(NEW_SPACE), |
| 1559 promoted_size_(0), | 1548 promoted_size_(0), |
| 1560 semispace_copied_size_(0), | 1549 semispace_copied_size_(0), |
| 1561 local_pretenuring_feedback_(local_pretenuring_feedback) {} | 1550 local_pretenuring_feedback_(local_pretenuring_feedback) {} |
| 1562 | 1551 |
| 1563 bool Visit(HeapObject* object) override { | 1552 bool Visit(HeapObject* object) override { |
| 1564 heap_->UpdateAllocationSite<Heap::kCached>(object, | 1553 heap_->UpdateAllocationSite<Heap::kCached>(object, |
| 1565 local_pretenuring_feedback_); | 1554 local_pretenuring_feedback_); |
| 1566 int size = object->Size(); | 1555 int size = object->Size(); |
| 1567 HeapObject* target_object = nullptr; | 1556 HeapObject* target_object = nullptr; |
| 1568 if (heap_->ShouldBePromoted(object->address(), size) && | 1557 if (heap_->ShouldBePromoted(object->address(), size) && |
| 1569 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, | 1558 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, |
| 1570 &target_object)) { | 1559 &target_object)) { |
| 1571 // If we end up needing more special cases, we should factor this out. | 1560 // If we end up needing more special cases, we should factor this out. |
| 1572 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { | 1561 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { |
| 1573 heap_->array_buffer_tracker()->Promote( | 1562 heap_->array_buffer_tracker()->Promote( |
| 1574 JSArrayBuffer::cast(target_object)); | 1563 JSArrayBuffer::cast(target_object)); |
| 1575 } | 1564 } |
| 1576 promoted_size_ += size; | 1565 promoted_size_ += size; |
| 1577 return true; | 1566 return true; |
| 1578 } | 1567 } |
| 1579 HeapObject* target = nullptr; | 1568 HeapObject* target = nullptr; |
| 1580 AllocationSpace space = AllocateTargetObject(object, &target); | 1569 AllocationSpace space = AllocateTargetObject(object, &target); |
| 1581 heap_->mark_compact_collector()->MigrateObject( | 1570 heap_->mark_compact_collector()->MigrateObject(HeapObject::cast(target), |
| 1582 HeapObject::cast(target), object, size, space, | 1571 object, size, space); |
| 1583 (space == NEW_SPACE) ? nullptr : old_to_old_slots_, | |
| 1584 (space == NEW_SPACE) ? nullptr : old_to_new_slots_); | |
| 1585 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { | 1572 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { |
| 1586 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); | 1573 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); |
| 1587 } | 1574 } |
| 1588 semispace_copied_size_ += size; | 1575 semispace_copied_size_ += size; |
| 1589 return true; | 1576 return true; |
| 1590 } | 1577 } |
| 1591 | 1578 |
| 1592 intptr_t promoted_size() { return promoted_size_; } | 1579 intptr_t promoted_size() { return promoted_size_; } |
| 1593 intptr_t semispace_copied_size() { return semispace_copied_size_; } | 1580 intptr_t semispace_copied_size() { return semispace_copied_size_; } |
| 1594 | 1581 |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1693 intptr_t promoted_size_; | 1680 intptr_t promoted_size_; |
| 1694 intptr_t semispace_copied_size_; | 1681 intptr_t semispace_copied_size_; |
| 1695 HashMap* local_pretenuring_feedback_; | 1682 HashMap* local_pretenuring_feedback_; |
| 1696 }; | 1683 }; |
| 1697 | 1684 |
| 1698 | 1685 |
| 1699 class MarkCompactCollector::EvacuateOldSpaceVisitor final | 1686 class MarkCompactCollector::EvacuateOldSpaceVisitor final |
| 1700 : public MarkCompactCollector::EvacuateVisitorBase { | 1687 : public MarkCompactCollector::EvacuateVisitorBase { |
| 1701 public: | 1688 public: |
| 1702 EvacuateOldSpaceVisitor(Heap* heap, | 1689 EvacuateOldSpaceVisitor(Heap* heap, |
| 1703 CompactionSpaceCollection* compaction_spaces, | 1690 CompactionSpaceCollection* compaction_spaces) |
| 1704 LocalSlotsBuffer* old_to_old_slots, | 1691 : EvacuateVisitorBase(heap, compaction_spaces) {} |
| 1705 LocalSlotsBuffer* old_to_new_slots) | |
| 1706 : EvacuateVisitorBase(heap, compaction_spaces, old_to_old_slots, | |
| 1707 old_to_new_slots) {} | |
| 1708 | 1692 |
| 1709 bool Visit(HeapObject* object) override { | 1693 bool Visit(HeapObject* object) override { |
| 1710 CompactionSpace* target_space = compaction_spaces_->Get( | 1694 CompactionSpace* target_space = compaction_spaces_->Get( |
| 1711 Page::FromAddress(object->address())->owner()->identity()); | 1695 Page::FromAddress(object->address())->owner()->identity()); |
| 1712 HeapObject* target_object = nullptr; | 1696 HeapObject* target_object = nullptr; |
| 1713 if (TryEvacuateObject(target_space, object, &target_object)) { | 1697 if (TryEvacuateObject(target_space, object, &target_object)) { |
| 1714 DCHECK(object->map_word().IsForwardingAddress()); | 1698 DCHECK(object->map_word().IsForwardingAddress()); |
| 1715 return true; | 1699 return true; |
| 1716 } | 1700 } |
| 1717 return false; | 1701 return false; |
| (...skipping 802 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2520 HeapObject* undefined = heap()->undefined_value(); | 2504 HeapObject* undefined = heap()->undefined_value(); |
| 2521 Object* obj = heap()->encountered_transition_arrays(); | 2505 Object* obj = heap()->encountered_transition_arrays(); |
| 2522 while (obj != Smi::FromInt(0)) { | 2506 while (obj != Smi::FromInt(0)) { |
| 2523 TransitionArray* array = TransitionArray::cast(obj); | 2507 TransitionArray* array = TransitionArray::cast(obj); |
| 2524 obj = array->next_link(); | 2508 obj = array->next_link(); |
| 2525 array->set_next_link(undefined, SKIP_WRITE_BARRIER); | 2509 array->set_next_link(undefined, SKIP_WRITE_BARRIER); |
| 2526 } | 2510 } |
| 2527 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); | 2511 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); |
| 2528 } | 2512 } |
| 2529 | 2513 |
| 2530 void MarkCompactCollector::RecordMigratedSlot( | |
| 2531 Object* value, Address slot, LocalSlotsBuffer* old_to_old_slots, | |
| 2532 LocalSlotsBuffer* old_to_new_slots) { | |
| 2533 // When parallel compaction is in progress, store and slots buffer entries | |
| 2534 // require synchronization. | |
| 2535 if (heap_->InNewSpace(value)) { | |
| 2536 old_to_new_slots->Record(slot); | |
| 2537 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { | |
| 2538 old_to_old_slots->Record(slot); | |
| 2539 } | |
| 2540 } | |
| 2541 | |
| 2542 static inline SlotType SlotTypeForRMode(RelocInfo::Mode rmode) { | 2514 static inline SlotType SlotTypeForRMode(RelocInfo::Mode rmode) { |
| 2543 if (RelocInfo::IsCodeTarget(rmode)) { | 2515 if (RelocInfo::IsCodeTarget(rmode)) { |
| 2544 return CODE_TARGET_SLOT; | 2516 return CODE_TARGET_SLOT; |
| 2545 } else if (RelocInfo::IsCell(rmode)) { | 2517 } else if (RelocInfo::IsCell(rmode)) { |
| 2546 return CELL_TARGET_SLOT; | 2518 return CELL_TARGET_SLOT; |
| 2547 } else if (RelocInfo::IsEmbeddedObject(rmode)) { | 2519 } else if (RelocInfo::IsEmbeddedObject(rmode)) { |
| 2548 return EMBEDDED_OBJECT_SLOT; | 2520 return EMBEDDED_OBJECT_SLOT; |
| 2549 } else if (RelocInfo::IsDebugBreakSlot(rmode)) { | 2521 } else if (RelocInfo::IsDebugBreakSlot(rmode)) { |
| 2550 return DEBUG_TARGET_SLOT; | 2522 return DEBUG_TARGET_SLOT; |
| 2551 } | 2523 } |
| (...skipping 20 matching lines...) Expand all Loading... |
| 2572 slot_type = OBJECT_SLOT; | 2544 slot_type = OBJECT_SLOT; |
| 2573 } | 2545 } |
| 2574 } | 2546 } |
| 2575 RememberedSet<OLD_TO_OLD>::InsertTyped(source_page, slot_type, addr); | 2547 RememberedSet<OLD_TO_OLD>::InsertTyped(source_page, slot_type, addr); |
| 2576 } | 2548 } |
| 2577 } | 2549 } |
| 2578 | 2550 |
| 2579 | 2551 |
| 2580 class RecordMigratedSlotVisitor final : public ObjectVisitor { | 2552 class RecordMigratedSlotVisitor final : public ObjectVisitor { |
| 2581 public: | 2553 public: |
| 2582 RecordMigratedSlotVisitor(MarkCompactCollector* collector, | 2554 explicit RecordMigratedSlotVisitor(MarkCompactCollector* collector) |
| 2583 LocalSlotsBuffer* old_to_old_slots, | 2555 : collector_(collector) {} |
| 2584 LocalSlotsBuffer* old_to_new_slots) | |
| 2585 : collector_(collector), | |
| 2586 old_to_old_slots_(old_to_old_slots), | |
| 2587 old_to_new_slots_(old_to_new_slots) {} | |
| 2588 | 2556 |
| 2589 V8_INLINE void VisitPointer(Object** p) override { | 2557 V8_INLINE void VisitPointer(Object** p) override { |
| 2590 collector_->RecordMigratedSlot(*p, reinterpret_cast<Address>(p), | 2558 RecordMigratedSlot(*p, reinterpret_cast<Address>(p)); |
| 2591 old_to_old_slots_, old_to_new_slots_); | |
| 2592 } | 2559 } |
| 2593 | 2560 |
| 2594 V8_INLINE void VisitPointers(Object** start, Object** end) override { | 2561 V8_INLINE void VisitPointers(Object** start, Object** end) override { |
| 2595 while (start < end) { | 2562 while (start < end) { |
| 2596 collector_->RecordMigratedSlot(*start, reinterpret_cast<Address>(start), | 2563 RecordMigratedSlot(*start, reinterpret_cast<Address>(start)); |
| 2597 old_to_old_slots_, old_to_new_slots_); | |
| 2598 ++start; | 2564 ++start; |
| 2599 } | 2565 } |
| 2600 } | 2566 } |
| 2601 | 2567 |
| 2602 V8_INLINE void VisitCodeEntry(Address code_entry_slot) override { | 2568 V8_INLINE void VisitCodeEntry(Address code_entry_slot) override { |
| 2603 if (collector_->compacting_) { | 2569 if (collector_->compacting_) { |
| 2604 Address code_entry = Memory::Address_at(code_entry_slot); | 2570 Address code_entry = Memory::Address_at(code_entry_slot); |
| 2605 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { | 2571 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { |
| 2606 old_to_old_slots_->Record(CODE_ENTRY_SLOT, code_entry_slot); | 2572 RememberedSet<OLD_TO_OLD>::InsertTyped( |
| 2573 Page::FromAddress(code_entry_slot), CODE_ENTRY_SLOT, |
| 2574 code_entry_slot); |
| 2607 } | 2575 } |
| 2608 } | 2576 } |
| 2609 } | 2577 } |
| 2610 | 2578 |
| 2611 private: | 2579 private: |
| 2580 inline void RecordMigratedSlot(Object* value, Address slot) { |
| 2581 if (collector_->heap()->InNewSpace(value)) { |
| 2582 RememberedSet<OLD_TO_NEW>::Insert(Page::FromAddress(slot), slot); |
| 2583 } else if (value->IsHeapObject() && |
| 2584 Page::FromAddress(reinterpret_cast<Address>(value)) |
| 2585 ->IsEvacuationCandidate()) { |
| 2586 RememberedSet<OLD_TO_OLD>::Insert(Page::FromAddress(slot), slot); |
| 2587 } |
| 2588 } |
| 2589 |
| 2612 MarkCompactCollector* collector_; | 2590 MarkCompactCollector* collector_; |
| 2613 LocalSlotsBuffer* old_to_old_slots_; | |
| 2614 LocalSlotsBuffer* old_to_new_slots_; | |
| 2615 }; | 2591 }; |
| 2616 | 2592 |
| 2617 | 2593 |
| 2618 // We scavenge new space simultaneously with sweeping. This is done in two | 2594 // We scavenge new space simultaneously with sweeping. This is done in two |
| 2619 // passes. | 2595 // passes. |
| 2620 // | 2596 // |
| 2621 // The first pass migrates all alive objects from one semispace to another or | 2597 // The first pass migrates all alive objects from one semispace to another or |
| 2622 // promotes them to old space. Forwarding address is written directly into | 2598 // promotes them to old space. Forwarding address is written directly into |
| 2623 // first word of object without any encoding. If object is dead we write | 2599 // first word of object without any encoding. If object is dead we write |
| 2624 // NULL as a forwarding address. | 2600 // NULL as a forwarding address. |
| 2625 // | 2601 // |
| 2626 // The second pass updates pointers to new space in all spaces. It is possible | 2602 // The second pass updates pointers to new space in all spaces. It is possible |
| 2627 // to encounter pointers to dead new space objects during traversal of pointers | 2603 // to encounter pointers to dead new space objects during traversal of pointers |
| 2628 // to new space. We should clear them to avoid encountering them during next | 2604 // to new space. We should clear them to avoid encountering them during next |
| 2629 // pointer iteration. This is an issue if the store buffer overflows and we | 2605 // pointer iteration. This is an issue if the store buffer overflows and we |
| 2630 // have to scan the entire old space, including dead objects, looking for | 2606 // have to scan the entire old space, including dead objects, looking for |
| 2631 // pointers to new space. | 2607 // pointers to new space. |
| 2632 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src, | 2608 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src, |
| 2633 int size, AllocationSpace dest, | 2609 int size, AllocationSpace dest) { |
| 2634 LocalSlotsBuffer* old_to_old_slots, | |
| 2635 LocalSlotsBuffer* old_to_new_slots) { | |
| 2636 Address dst_addr = dst->address(); | 2610 Address dst_addr = dst->address(); |
| 2637 Address src_addr = src->address(); | 2611 Address src_addr = src->address(); |
| 2638 DCHECK(heap()->AllowedToBeMigrated(src, dest)); | 2612 DCHECK(heap()->AllowedToBeMigrated(src, dest)); |
| 2639 DCHECK(dest != LO_SPACE); | 2613 DCHECK(dest != LO_SPACE); |
| 2640 if (dest == OLD_SPACE) { | 2614 if (dest == OLD_SPACE) { |
| 2641 DCHECK_OBJECT_SIZE(size); | 2615 DCHECK_OBJECT_SIZE(size); |
| 2642 DCHECK(IsAligned(size, kPointerSize)); | 2616 DCHECK(IsAligned(size, kPointerSize)); |
| 2643 | 2617 |
| 2644 heap()->MoveBlock(dst->address(), src->address(), size); | 2618 heap()->MoveBlock(dst->address(), src->address(), size); |
| 2645 if (FLAG_ignition && dst->IsBytecodeArray()) { | 2619 if (FLAG_ignition && dst->IsBytecodeArray()) { |
| 2646 PROFILE(isolate(), CodeMoveEvent(AbstractCode::cast(src), dst_addr)); | 2620 PROFILE(isolate(), CodeMoveEvent(AbstractCode::cast(src), dst_addr)); |
| 2647 } | 2621 } |
| 2648 RecordMigratedSlotVisitor visitor(this, old_to_old_slots, old_to_new_slots); | 2622 RecordMigratedSlotVisitor visitor(this); |
| 2649 dst->IterateBody(&visitor); | 2623 dst->IterateBody(&visitor); |
| 2650 } else if (dest == CODE_SPACE) { | 2624 } else if (dest == CODE_SPACE) { |
| 2651 DCHECK_CODEOBJECT_SIZE(size, heap()->code_space()); | 2625 DCHECK_CODEOBJECT_SIZE(size, heap()->code_space()); |
| 2652 PROFILE(isolate(), CodeMoveEvent(AbstractCode::cast(src), dst_addr)); | 2626 PROFILE(isolate(), CodeMoveEvent(AbstractCode::cast(src), dst_addr)); |
| 2653 heap()->MoveBlock(dst_addr, src_addr, size); | 2627 heap()->MoveBlock(dst_addr, src_addr, size); |
| 2654 old_to_old_slots->Record(RELOCATED_CODE_OBJECT, dst_addr); | 2628 RememberedSet<OLD_TO_OLD>::InsertTyped(Page::FromAddress(dst_addr), |
| 2629 RELOCATED_CODE_OBJECT, dst_addr); |
| 2655 Code::cast(dst)->Relocate(dst_addr - src_addr); | 2630 Code::cast(dst)->Relocate(dst_addr - src_addr); |
| 2656 } else { | 2631 } else { |
| 2657 DCHECK_OBJECT_SIZE(size); | 2632 DCHECK_OBJECT_SIZE(size); |
| 2658 DCHECK(old_to_old_slots == nullptr); | |
| 2659 DCHECK(dest == NEW_SPACE); | 2633 DCHECK(dest == NEW_SPACE); |
| 2660 heap()->MoveBlock(dst_addr, src_addr, size); | 2634 heap()->MoveBlock(dst_addr, src_addr, size); |
| 2661 } | 2635 } |
| 2662 heap()->OnMoveEvent(dst, src, size); | 2636 heap()->OnMoveEvent(dst, src, size); |
| 2663 Memory::Address_at(src_addr) = dst_addr; | 2637 Memory::Address_at(src_addr) = dst_addr; |
| 2664 } | 2638 } |
| 2665 | 2639 |
| 2666 static inline void UpdateTypedSlot(Isolate* isolate, ObjectVisitor* v, | 2640 static inline void UpdateTypedSlot(Isolate* isolate, ObjectVisitor* v, |
| 2667 SlotType slot_type, Address addr) { | 2641 SlotType slot_type, Address addr) { |
| 2668 switch (slot_type) { | 2642 switch (slot_type) { |
| (...skipping 288 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2957 | 2931 |
| 2958 | 2932 |
| 2959 class MarkCompactCollector::Evacuator : public Malloced { | 2933 class MarkCompactCollector::Evacuator : public Malloced { |
| 2960 public: | 2934 public: |
| 2961 explicit Evacuator(MarkCompactCollector* collector) | 2935 explicit Evacuator(MarkCompactCollector* collector) |
| 2962 : collector_(collector), | 2936 : collector_(collector), |
| 2963 compaction_spaces_(collector->heap()), | 2937 compaction_spaces_(collector->heap()), |
| 2964 local_pretenuring_feedback_(HashMap::PointersMatch, | 2938 local_pretenuring_feedback_(HashMap::PointersMatch, |
| 2965 kInitialLocalPretenuringFeedbackCapacity), | 2939 kInitialLocalPretenuringFeedbackCapacity), |
| 2966 new_space_visitor_(collector->heap(), &compaction_spaces_, | 2940 new_space_visitor_(collector->heap(), &compaction_spaces_, |
| 2967 &old_to_old_slots_, &old_to_new_slots_, | |
| 2968 &local_pretenuring_feedback_), | 2941 &local_pretenuring_feedback_), |
| 2969 old_space_visitor_(collector->heap(), &compaction_spaces_, | 2942 old_space_visitor_(collector->heap(), &compaction_spaces_), |
| 2970 &old_to_old_slots_, &old_to_new_slots_), | |
| 2971 duration_(0.0), | 2943 duration_(0.0), |
| 2972 bytes_compacted_(0) {} | 2944 bytes_compacted_(0) {} |
| 2973 | 2945 |
| 2974 inline bool EvacuatePage(MemoryChunk* chunk); | 2946 inline bool EvacuatePage(MemoryChunk* chunk); |
| 2975 | 2947 |
| 2976 // Merge back locally cached info sequentially. Note that this method needs | 2948 // Merge back locally cached info sequentially. Note that this method needs |
| 2977 // to be called from the main thread. | 2949 // to be called from the main thread. |
| 2978 inline void Finalize(); | 2950 inline void Finalize(); |
| 2979 | 2951 |
| 2980 CompactionSpaceCollection* compaction_spaces() { return &compaction_spaces_; } | 2952 CompactionSpaceCollection* compaction_spaces() { return &compaction_spaces_; } |
| 2981 | 2953 |
| 2982 private: | 2954 private: |
| 2983 static const int kInitialLocalPretenuringFeedbackCapacity = 256; | 2955 static const int kInitialLocalPretenuringFeedbackCapacity = 256; |
| 2984 | 2956 |
| 2985 Heap* heap() { return collector_->heap(); } | 2957 Heap* heap() { return collector_->heap(); } |
| 2986 | 2958 |
| 2987 void ReportCompactionProgress(double duration, intptr_t bytes_compacted) { | 2959 void ReportCompactionProgress(double duration, intptr_t bytes_compacted) { |
| 2988 duration_ += duration; | 2960 duration_ += duration; |
| 2989 bytes_compacted_ += bytes_compacted; | 2961 bytes_compacted_ += bytes_compacted; |
| 2990 } | 2962 } |
| 2991 | 2963 |
| 2992 inline bool EvacuateSinglePage(MemoryChunk* p, HeapObjectVisitor* visitor); | 2964 inline bool EvacuateSinglePage(MemoryChunk* p, HeapObjectVisitor* visitor); |
| 2993 | 2965 |
| 2994 MarkCompactCollector* collector_; | 2966 MarkCompactCollector* collector_; |
| 2995 | 2967 |
| 2996 // Locally cached collector data. | 2968 // Locally cached collector data. |
| 2997 CompactionSpaceCollection compaction_spaces_; | 2969 CompactionSpaceCollection compaction_spaces_; |
| 2998 LocalSlotsBuffer old_to_old_slots_; | |
| 2999 LocalSlotsBuffer old_to_new_slots_; | |
| 3000 HashMap local_pretenuring_feedback_; | 2970 HashMap local_pretenuring_feedback_; |
| 3001 | 2971 |
| 3002 // Vistors for the corresponding spaces. | 2972 // Visitors for the corresponding spaces. |
| 3003 EvacuateNewSpaceVisitor new_space_visitor_; | 2973 EvacuateNewSpaceVisitor new_space_visitor_; |
| 3004 EvacuateOldSpaceVisitor old_space_visitor_; | 2974 EvacuateOldSpaceVisitor old_space_visitor_; |
| 3005 | 2975 |
| 3006 // Book keeping info. | 2976 // Book keeping info. |
| 3007 double duration_; | 2977 double duration_; |
| 3008 intptr_t bytes_compacted_; | 2978 intptr_t bytes_compacted_; |
| 3009 }; | 2979 }; |
| 3010 | 2980 |
| 3011 bool MarkCompactCollector::Evacuator::EvacuateSinglePage( | 2981 bool MarkCompactCollector::Evacuator::EvacuateSinglePage( |
| 3012 MemoryChunk* p, HeapObjectVisitor* visitor) { | 2982 MemoryChunk* p, HeapObjectVisitor* visitor) { |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3054 heap()->code_space()->MergeCompactionSpace( | 3024 heap()->code_space()->MergeCompactionSpace( |
| 3055 compaction_spaces_.Get(CODE_SPACE)); | 3025 compaction_spaces_.Get(CODE_SPACE)); |
| 3056 heap()->tracer()->AddCompactionEvent(duration_, bytes_compacted_); | 3026 heap()->tracer()->AddCompactionEvent(duration_, bytes_compacted_); |
| 3057 heap()->IncrementPromotedObjectsSize(new_space_visitor_.promoted_size()); | 3027 heap()->IncrementPromotedObjectsSize(new_space_visitor_.promoted_size()); |
| 3058 heap()->IncrementSemiSpaceCopiedObjectSize( | 3028 heap()->IncrementSemiSpaceCopiedObjectSize( |
| 3059 new_space_visitor_.semispace_copied_size()); | 3029 new_space_visitor_.semispace_copied_size()); |
| 3060 heap()->IncrementYoungSurvivorsCounter( | 3030 heap()->IncrementYoungSurvivorsCounter( |
| 3061 new_space_visitor_.promoted_size() + | 3031 new_space_visitor_.promoted_size() + |
| 3062 new_space_visitor_.semispace_copied_size()); | 3032 new_space_visitor_.semispace_copied_size()); |
| 3063 heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_); | 3033 heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_); |
| 3064 // Move locally recorded slots to the global remembered sets. | |
| 3065 old_to_new_slots_.Iterate( | |
| 3066 [](Address slot) { | |
| 3067 Page* page = Page::FromAddress(slot); | |
| 3068 RememberedSet<OLD_TO_NEW>::Insert(page, slot); | |
| 3069 }, | |
| 3070 [](SlotType type, Address slot) { UNREACHABLE(); }); | |
| 3071 old_to_old_slots_.Iterate( | |
| 3072 [](Address slot) { | |
| 3073 Page* page = Page::FromAddress(slot); | |
| 3074 RememberedSet<OLD_TO_OLD>::Insert(page, slot); | |
| 3075 }, | |
| 3076 [](SlotType type, Address slot) { | |
| 3077 Page* page = Page::FromAddress(slot); | |
| 3078 RememberedSet<OLD_TO_OLD>::InsertTyped(page, type, slot); | |
| 3079 }); | |
| 3080 } | 3034 } |
| 3081 | 3035 |
| 3082 int MarkCompactCollector::NumberOfParallelCompactionTasks(int pages, | 3036 int MarkCompactCollector::NumberOfParallelCompactionTasks(int pages, |
| 3083 intptr_t live_bytes) { | 3037 intptr_t live_bytes) { |
| 3084 if (!FLAG_parallel_compaction) return 1; | 3038 if (!FLAG_parallel_compaction) return 1; |
| 3085 // Compute the number of needed tasks based on a target compaction time, the | 3039 // Compute the number of needed tasks based on a target compaction time, the |
| 3086 // profiled compaction speed and marked live memory. | 3040 // profiled compaction speed and marked live memory. |
| 3087 // | 3041 // |
| 3088 // The number of parallel compaction tasks is limited by: | 3042 // The number of parallel compaction tasks is limited by: |
| 3089 // - #evacuation pages | 3043 // - #evacuation pages |
| (...skipping 770 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3860 MarkBit mark_bit = Marking::MarkBitFrom(host); | 3814 MarkBit mark_bit = Marking::MarkBitFrom(host); |
| 3861 if (Marking::IsBlack(mark_bit)) { | 3815 if (Marking::IsBlack(mark_bit)) { |
| 3862 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); | 3816 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); |
| 3863 RecordRelocSlot(host, &rinfo, target); | 3817 RecordRelocSlot(host, &rinfo, target); |
| 3864 } | 3818 } |
| 3865 } | 3819 } |
| 3866 } | 3820 } |
| 3867 | 3821 |
| 3868 } // namespace internal | 3822 } // namespace internal |
| 3869 } // namespace v8 | 3823 } // namespace v8 |
| OLD | NEW |