Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(75)

Side by Side Diff: src/heap/mark-compact.cc

Issue 2026633003: Reland "[heap] Fine-grained JSArrayBuffer tracking" (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Tests should not assert for promotion decisions but only for tracking Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.cc ('k') | src/heap/objects-visiting.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 854 matching lines...) Expand 10 before | Expand all | Expand 10 after
865 // marking cycle. We did not collect any slots. 865 // marking cycle. We did not collect any slots.
866 if (!FLAG_never_compact && !was_marked_incrementally_) { 866 if (!FLAG_never_compact && !was_marked_incrementally_) {
867 StartCompaction(NON_INCREMENTAL_COMPACTION); 867 StartCompaction(NON_INCREMENTAL_COMPACTION);
868 } 868 }
869 869
870 PagedSpaces spaces(heap()); 870 PagedSpaces spaces(heap());
871 for (PagedSpace* space = spaces.next(); space != NULL; 871 for (PagedSpace* space = spaces.next(); space != NULL;
872 space = spaces.next()) { 872 space = spaces.next()) {
873 space->PrepareForMarkCompact(); 873 space->PrepareForMarkCompact();
874 } 874 }
875 heap()->account_amount_of_external_allocated_freed_memory();
875 876
876 #ifdef VERIFY_HEAP 877 #ifdef VERIFY_HEAP
877 if (!was_marked_incrementally_ && FLAG_verify_heap) { 878 if (!was_marked_incrementally_ && FLAG_verify_heap) {
878 VerifyMarkbitsAreClean(); 879 VerifyMarkbitsAreClean();
879 } 880 }
880 #endif 881 #endif
881 } 882 }
882 883
883 884
884 void MarkCompactCollector::Finish() { 885 void MarkCompactCollector::Finish() {
(...skipping 858 matching lines...) Expand 10 before | Expand all | Expand 10 after
1743 local_pretenuring_feedback_(local_pretenuring_feedback) {} 1744 local_pretenuring_feedback_(local_pretenuring_feedback) {}
1744 1745
1745 inline bool Visit(HeapObject* object) override { 1746 inline bool Visit(HeapObject* object) override {
1746 heap_->UpdateAllocationSite<Heap::kCached>(object, 1747 heap_->UpdateAllocationSite<Heap::kCached>(object,
1747 local_pretenuring_feedback_); 1748 local_pretenuring_feedback_);
1748 int size = object->Size(); 1749 int size = object->Size();
1749 HeapObject* target_object = nullptr; 1750 HeapObject* target_object = nullptr;
1750 if (heap_->ShouldBePromoted<DEFAULT_PROMOTION>(object->address(), size) && 1751 if (heap_->ShouldBePromoted<DEFAULT_PROMOTION>(object->address(), size) &&
1751 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, 1752 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object,
1752 &target_object)) { 1753 &target_object)) {
1753 // If we end up needing more special cases, we should factor this out.
1754 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
1755 heap_->array_buffer_tracker()->Promote(
1756 JSArrayBuffer::cast(target_object));
1757 }
1758 promoted_size_ += size; 1754 promoted_size_ += size;
1759 return true; 1755 return true;
1760 } 1756 }
1761 HeapObject* target = nullptr; 1757 HeapObject* target = nullptr;
1762 AllocationSpace space = AllocateTargetObject(object, &target); 1758 AllocationSpace space = AllocateTargetObject(object, &target);
1763 MigrateObject(HeapObject::cast(target), object, size, space); 1759 MigrateObject(HeapObject::cast(target), object, size, space);
1764 if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
1765 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
1766 }
1767 semispace_copied_size_ += size; 1760 semispace_copied_size_ += size;
1768 return true; 1761 return true;
1769 } 1762 }
1770 1763
1771 intptr_t promoted_size() { return promoted_size_; } 1764 intptr_t promoted_size() { return promoted_size_; }
1772 intptr_t semispace_copied_size() { return semispace_copied_size_; } 1765 intptr_t semispace_copied_size() { return semispace_copied_size_; }
1773 1766
1774 private: 1767 private:
1775 enum NewSpaceAllocationMode { 1768 enum NewSpaceAllocationMode {
1776 kNonstickyBailoutOldSpace, 1769 kNonstickyBailoutOldSpace,
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
1881 : heap_(heap), promoted_size_(0) {} 1874 : heap_(heap), promoted_size_(0) {}
1882 1875
1883 static void TryMoveToOldSpace(Page* page, PagedSpace* owner) { 1876 static void TryMoveToOldSpace(Page* page, PagedSpace* owner) {
1884 if (page->heap()->new_space()->ReplaceWithEmptyPage(page)) { 1877 if (page->heap()->new_space()->ReplaceWithEmptyPage(page)) {
1885 Page* new_page = Page::ConvertNewToOld(page, owner); 1878 Page* new_page = Page::ConvertNewToOld(page, owner);
1886 new_page->SetFlag(Page::PAGE_NEW_OLD_PROMOTION); 1879 new_page->SetFlag(Page::PAGE_NEW_OLD_PROMOTION);
1887 } 1880 }
1888 } 1881 }
1889 1882
1890 inline bool Visit(HeapObject* object) { 1883 inline bool Visit(HeapObject* object) {
1891 if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
1892 object->GetHeap()->array_buffer_tracker()->Promote(
1893 JSArrayBuffer::cast(object));
1894 }
1895 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); 1884 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
1896 object->IterateBodyFast(&visitor); 1885 object->IterateBodyFast(&visitor);
1897 promoted_size_ += object->Size(); 1886 promoted_size_ += object->Size();
1898 return true; 1887 return true;
1899 } 1888 }
1900 1889
1901 intptr_t promoted_size() { return promoted_size_; } 1890 intptr_t promoted_size() { return promoted_size_; }
1902 1891
1903 private: 1892 private:
1904 Heap* heap_; 1893 Heap* heap_;
(...skipping 1238 matching lines...) Expand 10 before | Expand all | Expand 10 after
3143 } 3132 }
3144 return success; 3133 return success;
3145 } 3134 }
3146 3135
3147 bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { 3136 bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
3148 bool result = false; 3137 bool result = false;
3149 DCHECK(page->SweepingDone()); 3138 DCHECK(page->SweepingDone());
3150 switch (ComputeEvacuationMode(page)) { 3139 switch (ComputeEvacuationMode(page)) {
3151 case kObjectsNewToOld: 3140 case kObjectsNewToOld:
3152 result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_); 3141 result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_);
3142 ArrayBufferTracker::ProcessBuffers(
3143 page, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
3153 DCHECK(result); 3144 DCHECK(result);
3154 USE(result); 3145 USE(result);
3155 break; 3146 break;
3156 case kPageNewToOld: 3147 case kPageNewToOld:
3157 result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor); 3148 result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor);
3149 // ArrayBufferTracker will be updated during sweeping.
3158 DCHECK(result); 3150 DCHECK(result);
3159 USE(result); 3151 USE(result);
3160 break; 3152 break;
3161 case kObjectsOldToOld: 3153 case kObjectsOldToOld:
3162 result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_); 3154 result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_);
3163 if (!result) { 3155 if (!result) {
3164 // Aborted compaction page. We have to record slots here, since we might 3156 // Aborted compaction page. We have to record slots here, since we might
3165 // not have recorded them in first place. 3157 // not have recorded them in first place.
3166 // Note: We mark the page as aborted here to be able to record slots 3158 // Note: We mark the page as aborted here to be able to record slots
3167 // for code objects in |RecordMigratedSlotVisitor|. 3159 // for code objects in |RecordMigratedSlotVisitor|.
3168 page->SetFlag(Page::COMPACTION_WAS_ABORTED); 3160 page->SetFlag(Page::COMPACTION_WAS_ABORTED);
3169 EvacuateRecordOnlyVisitor record_visitor(collector_->heap()); 3161 EvacuateRecordOnlyVisitor record_visitor(collector_->heap());
3170 result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor); 3162 result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor);
3163 ArrayBufferTracker::ProcessBuffers(
3164 page, ArrayBufferTracker::kUpdateForwardedKeepOthers);
3171 DCHECK(result); 3165 DCHECK(result);
3172 USE(result); 3166 USE(result);
3173 // We need to return failure here to indicate that we want this page 3167 // We need to return failure here to indicate that we want this page
3174 // added to the sweeper. 3168 // added to the sweeper.
3175 return false; 3169 return false;
3176 } 3170 }
3171 ArrayBufferTracker::ProcessBuffers(
3172 page, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
3173
3177 break; 3174 break;
3178 default: 3175 default:
3179 UNREACHABLE(); 3176 UNREACHABLE();
3180 } 3177 }
3181 return result; 3178 return result;
3182 } 3179 }
3183 3180
3184 void MarkCompactCollector::Evacuator::Finalize() { 3181 void MarkCompactCollector::Evacuator::Finalize() {
3185 heap()->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE)); 3182 heap()->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE));
3186 heap()->code_space()->MergeCompactionSpace( 3183 heap()->code_space()->MergeCompactionSpace(
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after
3346 MarkCompactCollector::Sweeper::FreeSpaceTreatmentMode free_space_mode> 3343 MarkCompactCollector::Sweeper::FreeSpaceTreatmentMode free_space_mode>
3347 int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p, 3344 int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p,
3348 ObjectVisitor* v) { 3345 ObjectVisitor* v) {
3349 DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone()); 3346 DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone());
3350 DCHECK(!p->IsFlagSet(Page::BLACK_PAGE)); 3347 DCHECK(!p->IsFlagSet(Page::BLACK_PAGE));
3351 DCHECK_EQ(skip_list_mode == REBUILD_SKIP_LIST, 3348 DCHECK_EQ(skip_list_mode == REBUILD_SKIP_LIST,
3352 space->identity() == CODE_SPACE); 3349 space->identity() == CODE_SPACE);
3353 DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST)); 3350 DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST));
3354 DCHECK(parallelism == SWEEP_ON_MAIN_THREAD || sweeping_mode == SWEEP_ONLY); 3351 DCHECK(parallelism == SWEEP_ON_MAIN_THREAD || sweeping_mode == SWEEP_ONLY);
3355 3352
3353 // Before we sweep objects on the page, we free dead array buffers which
3354 // requires valid mark bits.
3355 ArrayBufferTracker::FreeDead(p);
3356
3356 Address free_start = p->area_start(); 3357 Address free_start = p->area_start();
3357 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0); 3358 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
3358 3359
3359 // If we use the skip list for code space pages, we have to lock the skip 3360 // If we use the skip list for code space pages, we have to lock the skip
3360 // list because it could be accessed concurrently by the runtime or the 3361 // list because it could be accessed concurrently by the runtime or the
3361 // deoptimizer. 3362 // deoptimizer.
3362 SkipList* skip_list = p->skip_list(); 3363 SkipList* skip_list = p->skip_list();
3363 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) { 3364 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) {
3364 skip_list->Clear(); 3365 skip_list->Clear();
3365 } 3366 }
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
3545 // because root iteration traverses the stack and might have to find 3546 // because root iteration traverses the stack and might have to find
3546 // code objects from non-updated pc pointing into evacuation candidate. 3547 // code objects from non-updated pc pointing into evacuation candidate.
3547 SkipList* list = p->skip_list(); 3548 SkipList* list = p->skip_list();
3548 if (list != NULL) list->Clear(); 3549 if (list != NULL) list->Clear();
3549 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) { 3550 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) {
3550 sweeper().AddLatePage(p->owner()->identity(), p); 3551 sweeper().AddLatePage(p->owner()->identity(), p);
3551 p->ClearFlag(Page::COMPACTION_WAS_ABORTED); 3552 p->ClearFlag(Page::COMPACTION_WAS_ABORTED);
3552 } 3553 }
3553 } 3554 }
3554 3555
3555 // EvacuateNewSpaceAndCandidates iterates over new space objects and for
3556 // ArrayBuffers either re-registers them as live or promotes them. This is
3557 // needed to properly free them.
3558 heap()->array_buffer_tracker()->FreeDead(false);
3559
3560 // Deallocate evacuated candidate pages. 3556 // Deallocate evacuated candidate pages.
3561 ReleaseEvacuationCandidates(); 3557 ReleaseEvacuationCandidates();
3562 } 3558 }
3563 3559
3564 #ifdef VERIFY_HEAP 3560 #ifdef VERIFY_HEAP
3565 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) { 3561 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) {
3566 VerifyEvacuation(heap()); 3562 VerifyEvacuation(heap());
3567 } 3563 }
3568 #endif 3564 #endif
3569 } 3565 }
(...skipping 405 matching lines...) Expand 10 before | Expand all | Expand 10 after
3975 MarkBit mark_bit = Marking::MarkBitFrom(host); 3971 MarkBit mark_bit = Marking::MarkBitFrom(host);
3976 if (Marking::IsBlack(mark_bit)) { 3972 if (Marking::IsBlack(mark_bit)) {
3977 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); 3973 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
3978 RecordRelocSlot(host, &rinfo, target); 3974 RecordRelocSlot(host, &rinfo, target);
3979 } 3975 }
3980 } 3976 }
3981 } 3977 }
3982 3978
3983 } // namespace internal 3979 } // namespace internal
3984 } // namespace v8 3980 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/heap.cc ('k') | src/heap/objects-visiting.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698