Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(127)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1535723002: [heap] Use HashMap as scratchpad backing store (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Revive the founder counter on the AllocationSite Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/scavenger-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 1596 matching lines...) Expand 10 before | Expand all | Expand 10 after
1607 }; 1607 };
1608 1608
1609 1609
1610 class MarkCompactCollector::EvacuateNewSpaceVisitor final 1610 class MarkCompactCollector::EvacuateNewSpaceVisitor final
1611 : public MarkCompactCollector::EvacuateVisitorBase { 1611 : public MarkCompactCollector::EvacuateVisitorBase {
1612 public: 1612 public:
1613 static const intptr_t kLabSize = 4 * KB; 1613 static const intptr_t kLabSize = 4 * KB;
1614 static const intptr_t kMaxLabObjectSize = 256; 1614 static const intptr_t kMaxLabObjectSize = 256;
1615 1615
1616 explicit EvacuateNewSpaceVisitor(Heap* heap, 1616 explicit EvacuateNewSpaceVisitor(Heap* heap,
1617 SlotsBuffer** evacuation_slots_buffer) 1617 SlotsBuffer** evacuation_slots_buffer,
1618 HashMap* local_pretenuring_feedback)
1618 : EvacuateVisitorBase(heap, evacuation_slots_buffer), 1619 : EvacuateVisitorBase(heap, evacuation_slots_buffer),
1619 buffer_(LocalAllocationBuffer::InvalidBuffer()), 1620 buffer_(LocalAllocationBuffer::InvalidBuffer()),
1620 space_to_allocate_(NEW_SPACE), 1621 space_to_allocate_(NEW_SPACE),
1621 promoted_size_(0), 1622 promoted_size_(0),
1622 semispace_copied_size_(0) {} 1623 semispace_copied_size_(0),
1624 local_pretenuring_feedback_(local_pretenuring_feedback) {}
1623 1625
1624 bool Visit(HeapObject* object) override { 1626 bool Visit(HeapObject* object) override {
1625 Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT); 1627 heap_->UpdateAllocationSite(object, local_pretenuring_feedback_);
1626 int size = object->Size(); 1628 int size = object->Size();
1627 HeapObject* target_object = nullptr; 1629 HeapObject* target_object = nullptr;
1628 if (heap_->ShouldBePromoted(object->address(), size) && 1630 if (heap_->ShouldBePromoted(object->address(), size) &&
1629 TryEvacuateObject(heap_->old_space(), object, &target_object)) { 1631 TryEvacuateObject(heap_->old_space(), object, &target_object)) {
1630 // If we end up needing more special cases, we should factor this out. 1632 // If we end up needing more special cases, we should factor this out.
1631 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { 1633 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
1632 heap_->array_buffer_tracker()->Promote( 1634 heap_->array_buffer_tracker()->Promote(
1633 JSArrayBuffer::cast(target_object)); 1635 JSArrayBuffer::cast(target_object));
1634 } 1636 }
1635 promoted_size_ += size; 1637 promoted_size_ += size;
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
1742 } 1744 }
1743 } 1745 }
1744 } 1746 }
1745 return allocation; 1747 return allocation;
1746 } 1748 }
1747 1749
1748 LocalAllocationBuffer buffer_; 1750 LocalAllocationBuffer buffer_;
1749 AllocationSpace space_to_allocate_; 1751 AllocationSpace space_to_allocate_;
1750 intptr_t promoted_size_; 1752 intptr_t promoted_size_;
1751 intptr_t semispace_copied_size_; 1753 intptr_t semispace_copied_size_;
1754 HashMap* local_pretenuring_feedback_;
1752 }; 1755 };
1753 1756
1754 1757
1755 class MarkCompactCollector::EvacuateOldSpaceVisitor final 1758 class MarkCompactCollector::EvacuateOldSpaceVisitor final
1756 : public MarkCompactCollector::EvacuateVisitorBase { 1759 : public MarkCompactCollector::EvacuateVisitorBase {
1757 public: 1760 public:
1758 EvacuateOldSpaceVisitor(Heap* heap, 1761 EvacuateOldSpaceVisitor(Heap* heap,
1759 CompactionSpaceCollection* compaction_spaces, 1762 CompactionSpaceCollection* compaction_spaces,
1760 SlotsBuffer** evacuation_slots_buffer) 1763 SlotsBuffer** evacuation_slots_buffer)
1761 : EvacuateVisitorBase(heap, evacuation_slots_buffer), 1764 : EvacuateVisitorBase(heap, evacuation_slots_buffer),
(...skipping 1317 matching lines...) Expand 10 before | Expand all | Expand 10 after
3079 // The target object has to be black. 3082 // The target object has to be black.
3080 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); 3083 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
3081 3084
3082 // The target object is black but we don't know if the source slot is black. 3085 // The target object is black but we don't know if the source slot is black.
3083 // The source object could have died and the slot could be part of a free 3086 // The source object could have died and the slot could be part of a free
3084 // space. Use the mark bit iterator to find out about liveness of the slot. 3087 // space. Use the mark bit iterator to find out about liveness of the slot.
3085 CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot)); 3088 CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot));
3086 } 3089 }
3087 3090
3088 3091
3089 void MarkCompactCollector::EvacuateNewSpace() { 3092 void MarkCompactCollector::EvacuateNewSpacePrologue() {
3090 // There are soft limits in the allocation code, designed trigger a mark 3093 // There are soft limits in the allocation code, designed trigger a mark
3091 // sweep collection by failing allocations. But since we are already in 3094 // sweep collection by failing allocations. But since we are already in
3092 // a mark-sweep allocation, there is no sense in trying to trigger one. 3095 // a mark-sweep allocation, there is no sense in trying to trigger one.
3093 AlwaysAllocateScope scope(isolate()); 3096 AlwaysAllocateScope scope(isolate());
3094 3097
3095 NewSpace* new_space = heap()->new_space(); 3098 NewSpace* new_space = heap()->new_space();
3096 3099
3097 // Store allocation range before flipping semispaces. 3100 // Store allocation range before flipping semispaces.
3098 Address from_bottom = new_space->bottom(); 3101 Address from_bottom = new_space->bottom();
3099 Address from_top = new_space->top(); 3102 Address from_top = new_space->top();
3100 3103
3101 // Flip the semispaces. After flipping, to space is empty, from space has 3104 // Flip the semispaces. After flipping, to space is empty, from space has
3102 // live objects. 3105 // live objects.
3103 new_space->Flip(); 3106 new_space->Flip();
3104 new_space->ResetAllocationInfo(); 3107 new_space->ResetAllocationInfo();
3105 3108
3109 newspace_evacuation_candidates_.Clear();
3110 NewSpacePageIterator it(from_bottom, from_top);
3111 while (it.has_next()) {
3112 newspace_evacuation_candidates_.Add(it.next());
3113 }
3114 }
3115
3116
3117 HashMap* MarkCompactCollector::EvacuateNewSpaceInParallel() {
3118 HashMap* local_pretenuring_feedback = new HashMap(
3119 HashMap::PointersMatch, kInitialLocalPretenuringFeedbackCapacity);
3120 EvacuateNewSpaceVisitor new_space_visitor(heap(), &migration_slots_buffer_,
3121 local_pretenuring_feedback);
3106 // First pass: traverse all objects in inactive semispace, remove marks, 3122 // First pass: traverse all objects in inactive semispace, remove marks,
3107 // migrate live objects and write forwarding addresses. This stage puts 3123 // migrate live objects and write forwarding addresses. This stage puts
3108 // new entries in the store buffer and may cause some pages to be marked 3124 // new entries in the store buffer and may cause some pages to be marked
3109 // scan-on-scavenge. 3125 // scan-on-scavenge.
3110 NewSpacePageIterator it(from_bottom, from_top); 3126 for (int i = 0; i < newspace_evacuation_candidates_.length(); i++) {
3111 EvacuateNewSpaceVisitor new_space_visitor(heap(), &migration_slots_buffer_); 3127 NewSpacePage* p =
3112 while (it.has_next()) { 3128 reinterpret_cast<NewSpacePage*>(newspace_evacuation_candidates_[i]);
3113 NewSpacePage* p = it.next();
3114 bool ok = VisitLiveObjects(p, &new_space_visitor, kClearMarkbits); 3129 bool ok = VisitLiveObjects(p, &new_space_visitor, kClearMarkbits);
3115 USE(ok); 3130 USE(ok);
3116 DCHECK(ok); 3131 DCHECK(ok);
3117 } 3132 }
3118 heap_->IncrementPromotedObjectsSize( 3133 heap_->IncrementPromotedObjectsSize(
3119 static_cast<int>(new_space_visitor.promoted_size())); 3134 static_cast<int>(new_space_visitor.promoted_size()));
3120 heap_->IncrementSemiSpaceCopiedObjectSize( 3135 heap_->IncrementSemiSpaceCopiedObjectSize(
3121 static_cast<int>(new_space_visitor.semispace_copied_size())); 3136 static_cast<int>(new_space_visitor.semispace_copied_size()));
3122 heap_->IncrementYoungSurvivorsCounter( 3137 heap_->IncrementYoungSurvivorsCounter(
3123 static_cast<int>(new_space_visitor.promoted_size()) + 3138 static_cast<int>(new_space_visitor.promoted_size()) +
3124 static_cast<int>(new_space_visitor.semispace_copied_size())); 3139 static_cast<int>(new_space_visitor.semispace_copied_size()));
3125 new_space->set_age_mark(new_space->top()); 3140 return local_pretenuring_feedback;
3126 } 3141 }
3127 3142
3128 3143
3129 void MarkCompactCollector::AddEvacuationSlotsBufferSynchronized( 3144 void MarkCompactCollector::AddEvacuationSlotsBufferSynchronized(
3130 SlotsBuffer* evacuation_slots_buffer) { 3145 SlotsBuffer* evacuation_slots_buffer) {
3131 base::LockGuard<base::Mutex> lock_guard(&evacuation_slots_buffers_mutex_); 3146 base::LockGuard<base::Mutex> lock_guard(&evacuation_slots_buffers_mutex_);
3132 evacuation_slots_buffers_.Add(evacuation_slots_buffer); 3147 evacuation_slots_buffers_.Add(evacuation_slots_buffer);
3133 } 3148 }
3134 3149
3135 3150
(...skipping 447 matching lines...) Expand 10 before | Expand all | Expand 10 after
3583 } 3598 }
3584 } 3599 }
3585 } 3600 }
3586 } 3601 }
3587 3602
3588 3603
3589 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { 3604 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
3590 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_EVACUATE); 3605 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_EVACUATE);
3591 Heap::RelocationLock relocation_lock(heap()); 3606 Heap::RelocationLock relocation_lock(heap());
3592 3607
3608 HashMap* local_pretenuring_feedback = nullptr;
3593 { 3609 {
3594 GCTracer::Scope gc_scope(heap()->tracer(), 3610 GCTracer::Scope gc_scope(heap()->tracer(),
3595 GCTracer::Scope::MC_EVACUATE_NEW_SPACE); 3611 GCTracer::Scope::MC_EVACUATE_NEW_SPACE);
3596 EvacuationScope evacuation_scope(this); 3612 EvacuationScope evacuation_scope(this);
3597 EvacuateNewSpace(); 3613 EvacuateNewSpacePrologue();
3614 local_pretenuring_feedback = EvacuateNewSpaceInParallel();
3615 heap_->new_space()->set_age_mark(heap_->new_space()->top());
3598 } 3616 }
3599 3617
3600 { 3618 {
3601 GCTracer::Scope gc_scope(heap()->tracer(), 3619 GCTracer::Scope gc_scope(heap()->tracer(),
3602 GCTracer::Scope::MC_EVACUATE_CANDIDATES); 3620 GCTracer::Scope::MC_EVACUATE_CANDIDATES);
3603 EvacuationScope evacuation_scope(this); 3621 EvacuationScope evacuation_scope(this);
3604 EvacuatePagesInParallel(); 3622 EvacuatePagesInParallel();
3605 } 3623 }
3606 3624
3625 {
3626 heap_->MergeAllocationSitePretenuringFeedback(*local_pretenuring_feedback);
3627 delete local_pretenuring_feedback;
3628 }
3629
3607 UpdatePointersAfterEvacuation(); 3630 UpdatePointersAfterEvacuation();
3608 3631
3609 { 3632 {
3610 GCTracer::Scope gc_scope(heap()->tracer(), 3633 GCTracer::Scope gc_scope(heap()->tracer(),
3611 GCTracer::Scope::MC_EVACUATE_CLEAN_UP); 3634 GCTracer::Scope::MC_EVACUATE_CLEAN_UP);
3612 // After updating all pointers, we can finally sweep the aborted pages, 3635 // After updating all pointers, we can finally sweep the aborted pages,
3613 // effectively overriding any forward pointers. 3636 // effectively overriding any forward pointers.
3614 SweepAbortedPages(); 3637 SweepAbortedPages();
3615 3638
3616 // EvacuateNewSpaceAndCandidates iterates over new space objects and for 3639 // EvacuateNewSpaceAndCandidates iterates over new space objects and for
(...skipping 488 matching lines...) Expand 10 before | Expand all | Expand 10 after
4105 MarkBit mark_bit = Marking::MarkBitFrom(host); 4128 MarkBit mark_bit = Marking::MarkBitFrom(host);
4106 if (Marking::IsBlack(mark_bit)) { 4129 if (Marking::IsBlack(mark_bit)) {
4107 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); 4130 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
4108 RecordRelocSlot(&rinfo, target); 4131 RecordRelocSlot(&rinfo, target);
4109 } 4132 }
4110 } 4133 }
4111 } 4134 }
4112 4135
4113 } // namespace internal 4136 } // namespace internal
4114 } // namespace v8 4137 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/scavenger-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698