OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 1599 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1610 class MarkCompactCollector::EvacuateNewSpaceVisitor final | 1610 class MarkCompactCollector::EvacuateNewSpaceVisitor final |
1611 : public MarkCompactCollector::EvacuateVisitorBase { | 1611 : public MarkCompactCollector::EvacuateVisitorBase { |
1612 public: | 1612 public: |
1613 static const intptr_t kLabSize = 4 * KB; | 1613 static const intptr_t kLabSize = 4 * KB; |
1614 static const intptr_t kMaxLabObjectSize = 256; | 1614 static const intptr_t kMaxLabObjectSize = 256; |
1615 | 1615 |
1616 explicit EvacuateNewSpaceVisitor(Heap* heap, | 1616 explicit EvacuateNewSpaceVisitor(Heap* heap, |
1617 SlotsBuffer** evacuation_slots_buffer) | 1617 SlotsBuffer** evacuation_slots_buffer) |
1618 : EvacuateVisitorBase(heap, evacuation_slots_buffer), | 1618 : EvacuateVisitorBase(heap, evacuation_slots_buffer), |
1619 buffer_(LocalAllocationBuffer::InvalidBuffer()), | 1619 buffer_(LocalAllocationBuffer::InvalidBuffer()), |
1620 space_to_allocate_(NEW_SPACE) {} | 1620 space_to_allocate_(NEW_SPACE), |
| 1621 promoted_size_(0), |
| 1622 semispace_copied_size_(0) {} |
1621 | 1623 |
1622 bool Visit(HeapObject* object) override { | 1624 bool Visit(HeapObject* object) override { |
1623 Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT); | 1625 Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT); |
1624 int size = object->Size(); | 1626 int size = object->Size(); |
1625 HeapObject* target_object = nullptr; | 1627 HeapObject* target_object = nullptr; |
1626 if (heap_->ShouldBePromoted(object->address(), size) && | 1628 if (heap_->ShouldBePromoted(object->address(), size) && |
1627 TryEvacuateObject(heap_->old_space(), object, &target_object)) { | 1629 TryEvacuateObject(heap_->old_space(), object, &target_object)) { |
1628 // If we end up needing more special cases, we should factor this out. | 1630 // If we end up needing more special cases, we should factor this out. |
1629 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { | 1631 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { |
1630 heap_->array_buffer_tracker()->Promote( | 1632 heap_->array_buffer_tracker()->Promote( |
1631 JSArrayBuffer::cast(target_object)); | 1633 JSArrayBuffer::cast(target_object)); |
1632 } | 1634 } |
1633 heap_->IncrementPromotedObjectsSize(size); | 1635 promoted_size_ += size; |
1634 return true; | 1636 return true; |
1635 } | 1637 } |
1636 HeapObject* target = nullptr; | 1638 HeapObject* target = nullptr; |
1637 AllocationSpace space = AllocateTargetObject(object, &target); | 1639 AllocationSpace space = AllocateTargetObject(object, &target); |
1638 heap_->mark_compact_collector()->MigrateObject( | 1640 heap_->mark_compact_collector()->MigrateObject( |
1639 HeapObject::cast(target), object, size, space, | 1641 HeapObject::cast(target), object, size, space, |
1640 (space == NEW_SPACE) ? nullptr : evacuation_slots_buffer_); | 1642 (space == NEW_SPACE) ? nullptr : evacuation_slots_buffer_); |
1641 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { | 1643 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { |
1642 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); | 1644 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); |
1643 } | 1645 } |
1644 heap_->IncrementSemiSpaceCopiedObjectSize(size); | 1646 semispace_copied_size_ += size; |
1645 return true; | 1647 return true; |
1646 } | 1648 } |
1647 | 1649 |
| 1650 intptr_t promoted_size() { return promoted_size_; } |
| 1651 intptr_t semispace_copied_size() { return semispace_copied_size_; } |
| 1652 |
1648 private: | 1653 private: |
1649 enum NewSpaceAllocationMode { | 1654 enum NewSpaceAllocationMode { |
1650 kNonstickyBailoutOldSpace, | 1655 kNonstickyBailoutOldSpace, |
1651 kStickyBailoutOldSpace, | 1656 kStickyBailoutOldSpace, |
1652 }; | 1657 }; |
1653 | 1658 |
1654 inline AllocationSpace AllocateTargetObject(HeapObject* old_object, | 1659 inline AllocationSpace AllocateTargetObject(HeapObject* old_object, |
1655 HeapObject** target_object) { | 1660 HeapObject** target_object) { |
1656 const int size = old_object->Size(); | 1661 const int size = old_object->Size(); |
1657 AllocationAlignment alignment = old_object->RequiredAlignment(); | 1662 AllocationAlignment alignment = old_object->RequiredAlignment(); |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1735 space_to_allocate_ = OLD_SPACE; | 1740 space_to_allocate_ = OLD_SPACE; |
1736 return AllocationResult::Retry(OLD_SPACE); | 1741 return AllocationResult::Retry(OLD_SPACE); |
1737 } | 1742 } |
1738 } | 1743 } |
1739 } | 1744 } |
1740 return allocation; | 1745 return allocation; |
1741 } | 1746 } |
1742 | 1747 |
1743 LocalAllocationBuffer buffer_; | 1748 LocalAllocationBuffer buffer_; |
1744 AllocationSpace space_to_allocate_; | 1749 AllocationSpace space_to_allocate_; |
| 1750 intptr_t promoted_size_; |
| 1751 intptr_t semispace_copied_size_; |
1745 }; | 1752 }; |
1746 | 1753 |
1747 | 1754 |
1748 class MarkCompactCollector::EvacuateOldSpaceVisitor final | 1755 class MarkCompactCollector::EvacuateOldSpaceVisitor final |
1749 : public MarkCompactCollector::EvacuateVisitorBase { | 1756 : public MarkCompactCollector::EvacuateVisitorBase { |
1750 public: | 1757 public: |
1751 EvacuateOldSpaceVisitor(Heap* heap, | 1758 EvacuateOldSpaceVisitor(Heap* heap, |
1752 CompactionSpaceCollection* compaction_spaces, | 1759 CompactionSpaceCollection* compaction_spaces, |
1753 SlotsBuffer** evacuation_slots_buffer) | 1760 SlotsBuffer** evacuation_slots_buffer) |
1754 : EvacuateVisitorBase(heap, evacuation_slots_buffer), | 1761 : EvacuateVisitorBase(heap, evacuation_slots_buffer), |
(...skipping 1334 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3089 | 3096 |
3090 // Store allocation range before flipping semispaces. | 3097 // Store allocation range before flipping semispaces. |
3091 Address from_bottom = new_space->bottom(); | 3098 Address from_bottom = new_space->bottom(); |
3092 Address from_top = new_space->top(); | 3099 Address from_top = new_space->top(); |
3093 | 3100 |
3094 // Flip the semispaces. After flipping, to space is empty, from space has | 3101 // Flip the semispaces. After flipping, to space is empty, from space has |
3095 // live objects. | 3102 // live objects. |
3096 new_space->Flip(); | 3103 new_space->Flip(); |
3097 new_space->ResetAllocationInfo(); | 3104 new_space->ResetAllocationInfo(); |
3098 | 3105 |
3099 int survivors_size = 0; | |
3100 | |
3101 // First pass: traverse all objects in inactive semispace, remove marks, | 3106 // First pass: traverse all objects in inactive semispace, remove marks, |
3102 // migrate live objects and write forwarding addresses. This stage puts | 3107 // migrate live objects and write forwarding addresses. This stage puts |
3103 // new entries in the store buffer and may cause some pages to be marked | 3108 // new entries in the store buffer and may cause some pages to be marked |
3104 // scan-on-scavenge. | 3109 // scan-on-scavenge. |
3105 NewSpacePageIterator it(from_bottom, from_top); | 3110 NewSpacePageIterator it(from_bottom, from_top); |
3106 EvacuateNewSpaceVisitor new_space_visitor(heap(), &migration_slots_buffer_); | 3111 EvacuateNewSpaceVisitor new_space_visitor(heap(), &migration_slots_buffer_); |
3107 while (it.has_next()) { | 3112 while (it.has_next()) { |
3108 NewSpacePage* p = it.next(); | 3113 NewSpacePage* p = it.next(); |
3109 survivors_size += p->LiveBytes(); | |
3110 bool ok = VisitLiveObjects(p, &new_space_visitor, kClearMarkbits); | 3114 bool ok = VisitLiveObjects(p, &new_space_visitor, kClearMarkbits); |
3111 USE(ok); | 3115 USE(ok); |
3112 DCHECK(ok); | 3116 DCHECK(ok); |
3113 } | 3117 } |
3114 | 3118 heap_->IncrementPromotedObjectsSize( |
3115 heap_->IncrementYoungSurvivorsCounter(survivors_size); | 3119 static_cast<int>(new_space_visitor.promoted_size())); |
| 3120 heap_->IncrementSemiSpaceCopiedObjectSize( |
| 3121 static_cast<int>(new_space_visitor.semispace_copied_size())); |
| 3122 heap_->IncrementYoungSurvivorsCounter( |
| 3123 static_cast<int>(new_space_visitor.promoted_size()) + |
| 3124 static_cast<int>(new_space_visitor.semispace_copied_size())); |
3116 new_space->set_age_mark(new_space->top()); | 3125 new_space->set_age_mark(new_space->top()); |
3117 } | 3126 } |
3118 | 3127 |
3119 | 3128 |
3120 void MarkCompactCollector::AddEvacuationSlotsBufferSynchronized( | 3129 void MarkCompactCollector::AddEvacuationSlotsBufferSynchronized( |
3121 SlotsBuffer* evacuation_slots_buffer) { | 3130 SlotsBuffer* evacuation_slots_buffer) { |
3122 base::LockGuard<base::Mutex> lock_guard(&evacuation_slots_buffers_mutex_); | 3131 base::LockGuard<base::Mutex> lock_guard(&evacuation_slots_buffers_mutex_); |
3123 evacuation_slots_buffers_.Add(evacuation_slots_buffer); | 3132 evacuation_slots_buffers_.Add(evacuation_slots_buffer); |
3124 } | 3133 } |
3125 | 3134 |
(...skipping 970 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4096 MarkBit mark_bit = Marking::MarkBitFrom(host); | 4105 MarkBit mark_bit = Marking::MarkBitFrom(host); |
4097 if (Marking::IsBlack(mark_bit)) { | 4106 if (Marking::IsBlack(mark_bit)) { |
4098 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); | 4107 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); |
4099 RecordRelocSlot(&rinfo, target); | 4108 RecordRelocSlot(&rinfo, target); |
4100 } | 4109 } |
4101 } | 4110 } |
4102 } | 4111 } |
4103 | 4112 |
4104 } // namespace internal | 4113 } // namespace internal |
4105 } // namespace v8 | 4114 } // namespace v8 |
OLD | NEW |