Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(34)

Side by Side Diff: src/heap/mark-compact.cc

Issue 2036643002: Reland "[heap] Fine-grained JSArrayBuffer tracking" (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Remove unneeded locking to avoid lock-inversion-order errors in TSAN Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.cc ('k') | src/heap/objects-visiting.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 854 matching lines...) Expand 10 before | Expand all | Expand 10 after
865 // marking cycle. We did not collect any slots. 865 // marking cycle. We did not collect any slots.
866 if (!FLAG_never_compact && !was_marked_incrementally_) { 866 if (!FLAG_never_compact && !was_marked_incrementally_) {
867 StartCompaction(NON_INCREMENTAL_COMPACTION); 867 StartCompaction(NON_INCREMENTAL_COMPACTION);
868 } 868 }
869 869
870 PagedSpaces spaces(heap()); 870 PagedSpaces spaces(heap());
871 for (PagedSpace* space = spaces.next(); space != NULL; 871 for (PagedSpace* space = spaces.next(); space != NULL;
872 space = spaces.next()) { 872 space = spaces.next()) {
873 space->PrepareForMarkCompact(); 873 space->PrepareForMarkCompact();
874 } 874 }
875 heap()->account_amount_of_external_allocated_freed_memory();
875 876
876 #ifdef VERIFY_HEAP 877 #ifdef VERIFY_HEAP
877 if (!was_marked_incrementally_ && FLAG_verify_heap) { 878 if (!was_marked_incrementally_ && FLAG_verify_heap) {
878 VerifyMarkbitsAreClean(); 879 VerifyMarkbitsAreClean();
879 } 880 }
880 #endif 881 #endif
881 } 882 }
882 883
883 884
884 void MarkCompactCollector::Finish() { 885 void MarkCompactCollector::Finish() {
(...skipping 859 matching lines...) Expand 10 before | Expand all | Expand 10 after
1744 local_pretenuring_feedback_(local_pretenuring_feedback) {} 1745 local_pretenuring_feedback_(local_pretenuring_feedback) {}
1745 1746
1746 inline bool Visit(HeapObject* object) override { 1747 inline bool Visit(HeapObject* object) override {
1747 heap_->UpdateAllocationSite<Heap::kCached>(object, 1748 heap_->UpdateAllocationSite<Heap::kCached>(object,
1748 local_pretenuring_feedback_); 1749 local_pretenuring_feedback_);
1749 int size = object->Size(); 1750 int size = object->Size();
1750 HeapObject* target_object = nullptr; 1751 HeapObject* target_object = nullptr;
1751 if (heap_->ShouldBePromoted<DEFAULT_PROMOTION>(object->address(), size) && 1752 if (heap_->ShouldBePromoted<DEFAULT_PROMOTION>(object->address(), size) &&
1752 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, 1753 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object,
1753 &target_object)) { 1754 &target_object)) {
1754 // If we end up needing more special cases, we should factor this out.
1755 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
1756 heap_->array_buffer_tracker()->Promote(
1757 JSArrayBuffer::cast(target_object));
1758 }
1759 promoted_size_ += size; 1755 promoted_size_ += size;
1760 return true; 1756 return true;
1761 } 1757 }
1762 HeapObject* target = nullptr; 1758 HeapObject* target = nullptr;
1763 AllocationSpace space = AllocateTargetObject(object, &target); 1759 AllocationSpace space = AllocateTargetObject(object, &target);
1764 MigrateObject(HeapObject::cast(target), object, size, space); 1760 MigrateObject(HeapObject::cast(target), object, size, space);
1765 if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
1766 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
1767 }
1768 semispace_copied_size_ += size; 1761 semispace_copied_size_ += size;
1769 return true; 1762 return true;
1770 } 1763 }
1771 1764
1772 intptr_t promoted_size() { return promoted_size_; } 1765 intptr_t promoted_size() { return promoted_size_; }
1773 intptr_t semispace_copied_size() { return semispace_copied_size_; } 1766 intptr_t semispace_copied_size() { return semispace_copied_size_; }
1774 1767
1775 private: 1768 private:
1776 enum NewSpaceAllocationMode { 1769 enum NewSpaceAllocationMode {
1777 kNonstickyBailoutOldSpace, 1770 kNonstickyBailoutOldSpace,
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
1882 : heap_(heap), promoted_size_(0) {} 1875 : heap_(heap), promoted_size_(0) {}
1883 1876
1884 static void TryMoveToOldSpace(Page* page, PagedSpace* owner) { 1877 static void TryMoveToOldSpace(Page* page, PagedSpace* owner) {
1885 if (page->heap()->new_space()->ReplaceWithEmptyPage(page)) { 1878 if (page->heap()->new_space()->ReplaceWithEmptyPage(page)) {
1886 Page* new_page = Page::ConvertNewToOld(page, owner); 1879 Page* new_page = Page::ConvertNewToOld(page, owner);
1887 new_page->SetFlag(Page::PAGE_NEW_OLD_PROMOTION); 1880 new_page->SetFlag(Page::PAGE_NEW_OLD_PROMOTION);
1888 } 1881 }
1889 } 1882 }
1890 1883
1891 inline bool Visit(HeapObject* object) { 1884 inline bool Visit(HeapObject* object) {
1892 if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
1893 object->GetHeap()->array_buffer_tracker()->Promote(
1894 JSArrayBuffer::cast(object));
1895 }
1896 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); 1885 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
1897 object->IterateBodyFast(&visitor); 1886 object->IterateBodyFast(&visitor);
1898 promoted_size_ += object->Size(); 1887 promoted_size_ += object->Size();
1899 return true; 1888 return true;
1900 } 1889 }
1901 1890
1902 intptr_t promoted_size() { return promoted_size_; } 1891 intptr_t promoted_size() { return promoted_size_; }
1903 1892
1904 private: 1893 private:
1905 Heap* heap_; 1894 Heap* heap_;
(...skipping 1254 matching lines...) Expand 10 before | Expand all | Expand 10 after
3160 } 3149 }
3161 return success; 3150 return success;
3162 } 3151 }
3163 3152
3164 bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { 3153 bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
3165 bool result = false; 3154 bool result = false;
3166 DCHECK(page->SweepingDone()); 3155 DCHECK(page->SweepingDone());
3167 switch (ComputeEvacuationMode(page)) { 3156 switch (ComputeEvacuationMode(page)) {
3168 case kObjectsNewToOld: 3157 case kObjectsNewToOld:
3169 result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_); 3158 result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_);
3159 ArrayBufferTracker::ProcessBuffers(
3160 page, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
3170 DCHECK(result); 3161 DCHECK(result);
3171 USE(result); 3162 USE(result);
3172 break; 3163 break;
3173 case kPageNewToOld: 3164 case kPageNewToOld:
3174 result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor); 3165 result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor);
3166 // ArrayBufferTracker will be updated during sweeping.
3175 DCHECK(result); 3167 DCHECK(result);
3176 USE(result); 3168 USE(result);
3177 break; 3169 break;
3178 case kObjectsOldToOld: 3170 case kObjectsOldToOld:
3179 result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_); 3171 result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_);
3180 if (!result) { 3172 if (!result) {
3181 // Aborted compaction page. We have to record slots here, since we might 3173 // Aborted compaction page. We have to record slots here, since we might
3182 // not have recorded them in first place. 3174 // not have recorded them in first place.
3183 // Note: We mark the page as aborted here to be able to record slots 3175 // Note: We mark the page as aborted here to be able to record slots
3184 // for code objects in |RecordMigratedSlotVisitor|. 3176 // for code objects in |RecordMigratedSlotVisitor|.
3185 page->SetFlag(Page::COMPACTION_WAS_ABORTED); 3177 page->SetFlag(Page::COMPACTION_WAS_ABORTED);
3186 EvacuateRecordOnlyVisitor record_visitor(collector_->heap()); 3178 EvacuateRecordOnlyVisitor record_visitor(collector_->heap());
3187 result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor); 3179 result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor);
3180 ArrayBufferTracker::ProcessBuffers(
3181 page, ArrayBufferTracker::kUpdateForwardedKeepOthers);
3188 DCHECK(result); 3182 DCHECK(result);
3189 USE(result); 3183 USE(result);
3190 // We need to return failure here to indicate that we want this page 3184 // We need to return failure here to indicate that we want this page
3191 // added to the sweeper. 3185 // added to the sweeper.
3192 return false; 3186 return false;
3193 } 3187 }
3188 ArrayBufferTracker::ProcessBuffers(
3189 page, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
3190
3194 break; 3191 break;
3195 default: 3192 default:
3196 UNREACHABLE(); 3193 UNREACHABLE();
3197 } 3194 }
3198 return result; 3195 return result;
3199 } 3196 }
3200 3197
3201 void MarkCompactCollector::Evacuator::Finalize() { 3198 void MarkCompactCollector::Evacuator::Finalize() {
3202 heap()->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE)); 3199 heap()->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE));
3203 heap()->code_space()->MergeCompactionSpace( 3200 heap()->code_space()->MergeCompactionSpace(
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after
3363 MarkCompactCollector::Sweeper::FreeSpaceTreatmentMode free_space_mode> 3360 MarkCompactCollector::Sweeper::FreeSpaceTreatmentMode free_space_mode>
3364 int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p, 3361 int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p,
3365 ObjectVisitor* v) { 3362 ObjectVisitor* v) {
3366 DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone()); 3363 DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone());
3367 DCHECK(!p->IsFlagSet(Page::BLACK_PAGE)); 3364 DCHECK(!p->IsFlagSet(Page::BLACK_PAGE));
3368 DCHECK_EQ(skip_list_mode == REBUILD_SKIP_LIST, 3365 DCHECK_EQ(skip_list_mode == REBUILD_SKIP_LIST,
3369 space->identity() == CODE_SPACE); 3366 space->identity() == CODE_SPACE);
3370 DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST)); 3367 DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST));
3371 DCHECK(parallelism == SWEEP_ON_MAIN_THREAD || sweeping_mode == SWEEP_ONLY); 3368 DCHECK(parallelism == SWEEP_ON_MAIN_THREAD || sweeping_mode == SWEEP_ONLY);
3372 3369
3370 // Before we sweep objects on the page, we free dead array buffers which
3371 // requires valid mark bits.
3372 ArrayBufferTracker::FreeDead(p);
3373
3373 Address free_start = p->area_start(); 3374 Address free_start = p->area_start();
3374 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0); 3375 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
3375 3376
3376 // If we use the skip list for code space pages, we have to lock the skip 3377 // If we use the skip list for code space pages, we have to lock the skip
3377 // list because it could be accessed concurrently by the runtime or the 3378 // list because it could be accessed concurrently by the runtime or the
3378 // deoptimizer. 3379 // deoptimizer.
3379 SkipList* skip_list = p->skip_list(); 3380 SkipList* skip_list = p->skip_list();
3380 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) { 3381 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) {
3381 skip_list->Clear(); 3382 skip_list->Clear();
3382 } 3383 }
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
3562 // because root iteration traverses the stack and might have to find 3563 // because root iteration traverses the stack and might have to find
3563 // code objects from non-updated pc pointing into evacuation candidate. 3564 // code objects from non-updated pc pointing into evacuation candidate.
3564 SkipList* list = p->skip_list(); 3565 SkipList* list = p->skip_list();
3565 if (list != NULL) list->Clear(); 3566 if (list != NULL) list->Clear();
3566 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) { 3567 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) {
3567 sweeper().AddLatePage(p->owner()->identity(), p); 3568 sweeper().AddLatePage(p->owner()->identity(), p);
3568 p->ClearFlag(Page::COMPACTION_WAS_ABORTED); 3569 p->ClearFlag(Page::COMPACTION_WAS_ABORTED);
3569 } 3570 }
3570 } 3571 }
3571 3572
3572 // EvacuateNewSpaceAndCandidates iterates over new space objects and for
3573 // ArrayBuffers either re-registers them as live or promotes them. This is
3574 // needed to properly free them.
3575 heap()->array_buffer_tracker()->FreeDead(false);
3576
3577 // Deallocate evacuated candidate pages. 3573 // Deallocate evacuated candidate pages.
3578 ReleaseEvacuationCandidates(); 3574 ReleaseEvacuationCandidates();
3579 } 3575 }
3580 3576
3581 #ifdef VERIFY_HEAP 3577 #ifdef VERIFY_HEAP
3582 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) { 3578 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) {
3583 VerifyEvacuation(heap()); 3579 VerifyEvacuation(heap());
3584 } 3580 }
3585 #endif 3581 #endif
3586 } 3582 }
(...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after
3902 continue; 3898 continue;
3903 } 3899 }
3904 3900
3905 // One unused page is kept, all further are released before sweeping them. 3901 // One unused page is kept, all further are released before sweeping them.
3906 if (p->LiveBytes() == 0) { 3902 if (p->LiveBytes() == 0) {
3907 if (unused_page_present) { 3903 if (unused_page_present) {
3908 if (FLAG_gc_verbose) { 3904 if (FLAG_gc_verbose) {
3909 PrintIsolate(isolate(), "sweeping: released page: %p", 3905 PrintIsolate(isolate(), "sweeping: released page: %p",
3910 static_cast<void*>(p)); 3906 static_cast<void*>(p));
3911 } 3907 }
3908 ArrayBufferTracker::FreeAll(p);
3912 space->ReleasePage(p); 3909 space->ReleasePage(p);
3913 continue; 3910 continue;
3914 } 3911 }
3915 unused_page_present = true; 3912 unused_page_present = true;
3916 } 3913 }
3917 3914
3918 sweeper().AddPage(space->identity(), p); 3915 sweeper().AddPage(space->identity(), p);
3919 will_be_swept++; 3916 will_be_swept++;
3920 } 3917 }
3921 3918
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
3996 MarkBit mark_bit = Marking::MarkBitFrom(host); 3993 MarkBit mark_bit = Marking::MarkBitFrom(host);
3997 if (Marking::IsBlack(mark_bit)) { 3994 if (Marking::IsBlack(mark_bit)) {
3998 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); 3995 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
3999 RecordRelocSlot(host, &rinfo, target); 3996 RecordRelocSlot(host, &rinfo, target);
4000 } 3997 }
4001 } 3998 }
4002 } 3999 }
4003 4000
4004 } // namespace internal 4001 } // namespace internal
4005 } // namespace v8 4002 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/heap.cc ('k') | src/heap/objects-visiting.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698