OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 1552 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1563 } | 1563 } |
1564 return false; | 1564 return false; |
1565 } | 1565 } |
1566 | 1566 |
1567 protected: | 1567 protected: |
1568 Heap* heap_; | 1568 Heap* heap_; |
1569 SlotsBuffer** evacuation_slots_buffer_; | 1569 SlotsBuffer** evacuation_slots_buffer_; |
1570 }; | 1570 }; |
1571 | 1571 |
1572 | 1572 |
1573 class MarkCompactCollector::EvacuateNewSpaceVisitor | 1573 class MarkCompactCollector::EvacuateNewSpaceVisitor final |
1574 : public MarkCompactCollector::EvacuateVisitorBase { | 1574 : public MarkCompactCollector::EvacuateVisitorBase { |
1575 public: | 1575 public: |
| 1576 static const intptr_t kLabSize = 4 * KB; |
| 1577 static const intptr_t kMaxLabObjectSize = 256; |
| 1578 |
1576 explicit EvacuateNewSpaceVisitor(Heap* heap, | 1579 explicit EvacuateNewSpaceVisitor(Heap* heap, |
1577 SlotsBuffer** evacuation_slots_buffer) | 1580 SlotsBuffer** evacuation_slots_buffer) |
1578 : EvacuateVisitorBase(heap, evacuation_slots_buffer) {} | 1581 : EvacuateVisitorBase(heap, evacuation_slots_buffer), |
| 1582 buffer_(LocalAllocationBuffer::InvalidBuffer()), |
| 1583 space_to_allocate_(NEW_SPACE) {} |
1579 | 1584 |
1580 bool Visit(HeapObject* object) override { | 1585 bool Visit(HeapObject* object) override { |
1581 Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT); | 1586 Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT); |
1582 int size = object->Size(); | 1587 int size = object->Size(); |
1583 HeapObject* target_object = nullptr; | 1588 HeapObject* target_object = nullptr; |
1584 if (heap_->ShouldBePromoted(object->address(), size) && | 1589 if (heap_->ShouldBePromoted(object->address(), size) && |
1585 TryEvacuateObject(heap_->old_space(), object, &target_object)) { | 1590 TryEvacuateObject(heap_->old_space(), object, &target_object)) { |
1586 // If we end up needing more special cases, we should factor this out. | 1591 // If we end up needing more special cases, we should factor this out. |
1587 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { | 1592 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { |
1588 heap_->array_buffer_tracker()->Promote( | 1593 heap_->array_buffer_tracker()->Promote( |
1589 JSArrayBuffer::cast(target_object)); | 1594 JSArrayBuffer::cast(target_object)); |
1590 } | 1595 } |
1591 heap_->IncrementPromotedObjectsSize(size); | 1596 heap_->IncrementPromotedObjectsSize(size); |
1592 return true; | 1597 return true; |
1593 } | 1598 } |
1594 | 1599 HeapObject* target = nullptr; |
1595 AllocationAlignment alignment = object->RequiredAlignment(); | 1600 AllocationSpace space = AllocateTargetObject(object, &target); |
1596 AllocationResult allocation = | |
1597 heap_->new_space()->AllocateRaw(size, alignment); | |
1598 if (allocation.IsRetry()) { | |
1599 if (!heap_->new_space()->AddFreshPage()) { | |
1600 // Shouldn't happen. We are sweeping linearly, and to-space | |
1601 // has the same number of pages as from-space, so there is | |
1602 // always room unless we are in an OOM situation. | |
1603 FatalProcessOutOfMemory("MarkCompactCollector: semi-space copy\n"); | |
1604 } | |
1605 allocation = heap_->new_space()->AllocateRaw(size, alignment); | |
1606 DCHECK(!allocation.IsRetry()); | |
1607 } | |
1608 Object* target = allocation.ToObjectChecked(); | |
1609 | |
1610 heap_->mark_compact_collector()->MigrateObject( | 1601 heap_->mark_compact_collector()->MigrateObject( |
1611 HeapObject::cast(target), object, size, NEW_SPACE, nullptr); | 1602 HeapObject::cast(target), object, size, space, |
| 1603 (space == NEW_SPACE) ? nullptr : evacuation_slots_buffer_); |
1612 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { | 1604 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { |
1613 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); | 1605 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); |
1614 } | 1606 } |
1615 heap_->IncrementSemiSpaceCopiedObjectSize(size); | 1607 heap_->IncrementSemiSpaceCopiedObjectSize(size); |
1616 return true; | 1608 return true; |
1617 } | 1609 } |
| 1610 |
| 1611 private: |
| 1612 enum NewSpaceAllocationMode { |
| 1613 kNonstickyBailoutOldSpace, |
| 1614 kStickyBailoutOldSpace, |
| 1615 }; |
| 1616 |
| 1617 inline AllocationSpace AllocateTargetObject(HeapObject* old_object, |
| 1618 HeapObject** target_object) { |
| 1619 const int size = old_object->Size(); |
| 1620 AllocationAlignment alignment = old_object->RequiredAlignment(); |
| 1621 AllocationResult allocation; |
| 1622 if (space_to_allocate_ == NEW_SPACE) { |
| 1623 if (size > kMaxLabObjectSize) { |
| 1624 allocation = |
| 1625 AllocateInNewSpace(size, alignment, kNonstickyBailoutOldSpace); |
| 1626 } else { |
| 1627 allocation = AllocateInLab(size, alignment); |
| 1628 } |
| 1629 } |
| 1630 if (allocation.IsRetry() || (space_to_allocate_ == OLD_SPACE)) { |
| 1631 allocation = AllocateInOldSpace(size, alignment); |
| 1632 } |
| 1633 bool ok = allocation.To(target_object); |
| 1634 DCHECK(ok); |
| 1635 USE(ok); |
| 1636 return space_to_allocate_; |
| 1637 } |
| 1638 |
| 1639 inline bool NewLocalAllocationBuffer() { |
| 1640 AllocationResult result = |
| 1641 AllocateInNewSpace(kLabSize, kWordAligned, kStickyBailoutOldSpace); |
| 1642 LocalAllocationBuffer saved_old_buffer = buffer_; |
| 1643 buffer_ = LocalAllocationBuffer::FromResult(heap_, result, kLabSize); |
| 1644 if (buffer_.IsValid()) { |
| 1645 buffer_.TryMerge(&saved_old_buffer); |
| 1646 return true; |
| 1647 } |
| 1648 return false; |
| 1649 } |
| 1650 |
| 1651 inline AllocationResult AllocateInNewSpace(int size_in_bytes, |
| 1652 AllocationAlignment alignment, |
| 1653 NewSpaceAllocationMode mode) { |
| 1654 AllocationResult allocation = |
| 1655 heap_->new_space()->AllocateRawSynchronized(size_in_bytes, alignment); |
| 1656 if (allocation.IsRetry()) { |
| 1657 if (!heap_->new_space()->AddFreshPageSynchronized()) { |
| 1658 if (mode == kStickyBailoutOldSpace) space_to_allocate_ = OLD_SPACE; |
| 1659 } else { |
| 1660 allocation = heap_->new_space()->AllocateRawSynchronized(size_in_bytes, |
| 1661 alignment); |
| 1662 if (allocation.IsRetry()) { |
| 1663 if (mode == kStickyBailoutOldSpace) space_to_allocate_ = OLD_SPACE; |
| 1664 } |
| 1665 } |
| 1666 } |
| 1667 return allocation; |
| 1668 } |
| 1669 |
| 1670 inline AllocationResult AllocateInOldSpace(int size_in_bytes, |
| 1671 AllocationAlignment alignment) { |
| 1672 AllocationResult allocation = |
| 1673 heap_->old_space()->AllocateRaw(size_in_bytes, alignment); |
| 1674 if (allocation.IsRetry()) { |
| 1675 FatalProcessOutOfMemory( |
| 1676 "MarkCompactCollector: semi-space copy, fallback in old gen\n"); |
| 1677 } |
| 1678 return allocation; |
| 1679 } |
| 1680 |
| 1681 inline AllocationResult AllocateInLab(int size_in_bytes, |
| 1682 AllocationAlignment alignment) { |
| 1683 AllocationResult allocation; |
| 1684 if (!buffer_.IsValid()) { |
| 1685 if (!NewLocalAllocationBuffer()) { |
| 1686 space_to_allocate_ = OLD_SPACE; |
| 1687 return AllocationResult::Retry(OLD_SPACE); |
| 1688 } |
| 1689 } |
| 1690 allocation = buffer_.AllocateRawAligned(size_in_bytes, alignment); |
| 1691 if (allocation.IsRetry()) { |
| 1692 if (!NewLocalAllocationBuffer()) { |
| 1693 space_to_allocate_ = OLD_SPACE; |
| 1694 return AllocationResult::Retry(OLD_SPACE); |
| 1695 } else { |
| 1696 allocation = buffer_.AllocateRawAligned(size_in_bytes, alignment); |
| 1697 if (allocation.IsRetry()) { |
| 1698 space_to_allocate_ = OLD_SPACE; |
| 1699 return AllocationResult::Retry(OLD_SPACE); |
| 1700 } |
| 1701 } |
| 1702 } |
| 1703 return allocation; |
| 1704 } |
| 1705 |
| 1706 LocalAllocationBuffer buffer_; |
| 1707 AllocationSpace space_to_allocate_; |
1618 }; | 1708 }; |
1619 | 1709 |
1620 | 1710 |
1621 class MarkCompactCollector::EvacuateOldSpaceVisitor | 1711 class MarkCompactCollector::EvacuateOldSpaceVisitor final |
1622 : public MarkCompactCollector::EvacuateVisitorBase { | 1712 : public MarkCompactCollector::EvacuateVisitorBase { |
1623 public: | 1713 public: |
1624 EvacuateOldSpaceVisitor(Heap* heap, | 1714 EvacuateOldSpaceVisitor(Heap* heap, |
1625 CompactionSpaceCollection* compaction_spaces, | 1715 CompactionSpaceCollection* compaction_spaces, |
1626 SlotsBuffer** evacuation_slots_buffer) | 1716 SlotsBuffer** evacuation_slots_buffer) |
1627 : EvacuateVisitorBase(heap, evacuation_slots_buffer), | 1717 : EvacuateVisitorBase(heap, evacuation_slots_buffer), |
1628 compaction_spaces_(compaction_spaces) {} | 1718 compaction_spaces_(compaction_spaces) {} |
1629 | 1719 |
1630 bool Visit(HeapObject* object) override { | 1720 bool Visit(HeapObject* object) override { |
1631 CompactionSpace* target_space = compaction_spaces_->Get( | 1721 CompactionSpace* target_space = compaction_spaces_->Get( |
(...skipping 2337 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3969 MarkBit mark_bit = Marking::MarkBitFrom(host); | 4059 MarkBit mark_bit = Marking::MarkBitFrom(host); |
3970 if (Marking::IsBlack(mark_bit)) { | 4060 if (Marking::IsBlack(mark_bit)) { |
3971 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); | 4061 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); |
3972 RecordRelocSlot(&rinfo, target); | 4062 RecordRelocSlot(&rinfo, target); |
3973 } | 4063 } |
3974 } | 4064 } |
3975 } | 4065 } |
3976 | 4066 |
3977 } // namespace internal | 4067 } // namespace internal |
3978 } // namespace v8 | 4068 } // namespace v8 |
OLD | NEW |