Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1146)

Side by Side Diff: src/heap/mark-compact.cc

Issue 2028503003: Revert of Reland "[heap] Fine-grained JSArrayBuffer tracking" (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/incremental-marking.cc ('k') | src/heap/objects-visiting-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
11 #include "src/compilation-cache.h" 11 #include "src/compilation-cache.h"
12 #include "src/deoptimizer.h" 12 #include "src/deoptimizer.h"
13 #include "src/execution.h" 13 #include "src/execution.h"
14 #include "src/frames-inl.h" 14 #include "src/frames-inl.h"
15 #include "src/gdb-jit.h" 15 #include "src/gdb-jit.h"
16 #include "src/global-handles.h" 16 #include "src/global-handles.h"
17 #include "src/heap/array-buffer-tracker-inl.h" 17 #include "src/heap/array-buffer-tracker.h"
18 #include "src/heap/gc-tracer.h" 18 #include "src/heap/gc-tracer.h"
19 #include "src/heap/incremental-marking.h" 19 #include "src/heap/incremental-marking.h"
20 #include "src/heap/mark-compact-inl.h" 20 #include "src/heap/mark-compact-inl.h"
21 #include "src/heap/object-stats.h" 21 #include "src/heap/object-stats.h"
22 #include "src/heap/objects-visiting-inl.h" 22 #include "src/heap/objects-visiting-inl.h"
23 #include "src/heap/objects-visiting.h" 23 #include "src/heap/objects-visiting.h"
24 #include "src/heap/page-parallel-job.h" 24 #include "src/heap/page-parallel-job.h"
25 #include "src/heap/spaces-inl.h" 25 #include "src/heap/spaces-inl.h"
26 #include "src/ic/ic.h" 26 #include "src/ic/ic.h"
27 #include "src/ic/stub-cache.h" 27 #include "src/ic/stub-cache.h"
(...skipping 837 matching lines...) Expand 10 before | Expand all | Expand 10 after
865 // marking cycle. We did not collect any slots. 865 // marking cycle. We did not collect any slots.
866 if (!FLAG_never_compact && !was_marked_incrementally_) { 866 if (!FLAG_never_compact && !was_marked_incrementally_) {
867 StartCompaction(NON_INCREMENTAL_COMPACTION); 867 StartCompaction(NON_INCREMENTAL_COMPACTION);
868 } 868 }
869 869
870 PagedSpaces spaces(heap()); 870 PagedSpaces spaces(heap());
871 for (PagedSpace* space = spaces.next(); space != NULL; 871 for (PagedSpace* space = spaces.next(); space != NULL;
872 space = spaces.next()) { 872 space = spaces.next()) {
873 space->PrepareForMarkCompact(); 873 space->PrepareForMarkCompact();
874 } 874 }
875 if (!was_marked_incrementally_) {
876 ArrayBufferTracker::ResetTrackersInOldSpace(heap_);
877 }
878 heap()->account_amount_of_external_allocated_freed_memory();
879 875
880 #ifdef VERIFY_HEAP 876 #ifdef VERIFY_HEAP
881 if (!was_marked_incrementally_ && FLAG_verify_heap) { 877 if (!was_marked_incrementally_ && FLAG_verify_heap) {
882 VerifyMarkbitsAreClean(); 878 VerifyMarkbitsAreClean();
883 } 879 }
884 #endif 880 #endif
885 } 881 }
886 882
887 883
888 void MarkCompactCollector::Finish() { 884 void MarkCompactCollector::Finish() {
(...skipping 835 matching lines...) Expand 10 before | Expand all | Expand 10 after
1724 local_pretenuring_feedback_(local_pretenuring_feedback) {} 1720 local_pretenuring_feedback_(local_pretenuring_feedback) {}
1725 1721
1726 inline bool Visit(HeapObject* object) override { 1722 inline bool Visit(HeapObject* object) override {
1727 heap_->UpdateAllocationSite<Heap::kCached>(object, 1723 heap_->UpdateAllocationSite<Heap::kCached>(object,
1728 local_pretenuring_feedback_); 1724 local_pretenuring_feedback_);
1729 int size = object->Size(); 1725 int size = object->Size();
1730 HeapObject* target_object = nullptr; 1726 HeapObject* target_object = nullptr;
1731 if (heap_->ShouldBePromoted(object->address(), size) && 1727 if (heap_->ShouldBePromoted(object->address(), size) &&
1732 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, 1728 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object,
1733 &target_object)) { 1729 &target_object)) {
1730 // If we end up needing more special cases, we should factor this out.
1731 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
1732 heap_->array_buffer_tracker()->Promote(
1733 JSArrayBuffer::cast(target_object));
1734 }
1734 promoted_size_ += size; 1735 promoted_size_ += size;
1735 return true; 1736 return true;
1736 } 1737 }
1737 HeapObject* target = nullptr; 1738 HeapObject* target = nullptr;
1738 AllocationSpace space = AllocateTargetObject(object, &target); 1739 AllocationSpace space = AllocateTargetObject(object, &target);
1739 MigrateObject(HeapObject::cast(target), object, size, space); 1740 MigrateObject(HeapObject::cast(target), object, size, space);
1741 if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
1742 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
1743 }
1740 semispace_copied_size_ += size; 1744 semispace_copied_size_ += size;
1741 return true; 1745 return true;
1742 } 1746 }
1743 1747
1744 intptr_t promoted_size() { return promoted_size_; } 1748 intptr_t promoted_size() { return promoted_size_; }
1745 intptr_t semispace_copied_size() { return semispace_copied_size_; } 1749 intptr_t semispace_copied_size() { return semispace_copied_size_; }
1746 1750
1747 private: 1751 private:
1748 enum NewSpaceAllocationMode { 1752 enum NewSpaceAllocationMode {
1749 kNonstickyBailoutOldSpace, 1753 kNonstickyBailoutOldSpace,
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
1854 : heap_(heap), promoted_size_(0) {} 1858 : heap_(heap), promoted_size_(0) {}
1855 1859
1856 static void TryMoveToOldSpace(Page* page, PagedSpace* owner) { 1860 static void TryMoveToOldSpace(Page* page, PagedSpace* owner) {
1857 if (page->heap()->new_space()->ReplaceWithEmptyPage(page)) { 1861 if (page->heap()->new_space()->ReplaceWithEmptyPage(page)) {
1858 Page* new_page = Page::ConvertNewToOld(page, owner); 1862 Page* new_page = Page::ConvertNewToOld(page, owner);
1859 new_page->SetFlag(Page::PAGE_NEW_OLD_PROMOTION); 1863 new_page->SetFlag(Page::PAGE_NEW_OLD_PROMOTION);
1860 } 1864 }
1861 } 1865 }
1862 1866
1863 inline bool Visit(HeapObject* object) { 1867 inline bool Visit(HeapObject* object) {
1868 if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
1869 object->GetHeap()->array_buffer_tracker()->Promote(
1870 JSArrayBuffer::cast(object));
1871 }
1864 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); 1872 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
1865 object->IterateBodyFast(&visitor); 1873 object->IterateBodyFast(&visitor);
1866 promoted_size_ += object->Size(); 1874 promoted_size_ += object->Size();
1867 return true; 1875 return true;
1868 } 1876 }
1869 1877
1870 intptr_t promoted_size() { return promoted_size_; } 1878 intptr_t promoted_size() { return promoted_size_; }
1871 1879
1872 private: 1880 private:
1873 Heap* heap_; 1881 Heap* heap_;
(...skipping 20 matching lines...) Expand all
1894 }; 1902 };
1895 1903
1896 class MarkCompactCollector::EvacuateRecordOnlyVisitor final 1904 class MarkCompactCollector::EvacuateRecordOnlyVisitor final
1897 : public MarkCompactCollector::HeapObjectVisitor { 1905 : public MarkCompactCollector::HeapObjectVisitor {
1898 public: 1906 public:
1899 explicit EvacuateRecordOnlyVisitor(Heap* heap) : heap_(heap) {} 1907 explicit EvacuateRecordOnlyVisitor(Heap* heap) : heap_(heap) {}
1900 1908
1901 inline bool Visit(HeapObject* object) { 1909 inline bool Visit(HeapObject* object) {
1902 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); 1910 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
1903 object->IterateBody(&visitor); 1911 object->IterateBody(&visitor);
1904 if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
1905 ArrayBufferTracker::MarkLive(heap_, JSArrayBuffer::cast(object));
1906 }
1907 return true; 1912 return true;
1908 } 1913 }
1909 1914
1910 private: 1915 private:
1911 Heap* heap_; 1916 Heap* heap_;
1912 }; 1917 };
1913 1918
1914 void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) { 1919 void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) {
1915 PageIterator it(space); 1920 PageIterator it(space);
1916 while (it.has_next()) { 1921 while (it.has_next()) {
(...skipping 1198 matching lines...) Expand 10 before | Expand all | Expand 10 after
3115 } 3120 }
3116 return success; 3121 return success;
3117 } 3122 }
3118 3123
3119 bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { 3124 bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
3120 bool result = false; 3125 bool result = false;
3121 DCHECK(page->SweepingDone()); 3126 DCHECK(page->SweepingDone());
3122 switch (ComputeEvacuationMode(page)) { 3127 switch (ComputeEvacuationMode(page)) {
3123 case kObjectsNewToOld: 3128 case kObjectsNewToOld:
3124 result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_); 3129 result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_);
3125 ArrayBufferTracker::ScanAndFreeDeadArrayBuffers<
3126 LocalArrayBufferTracker::kForwardingPointer>(page);
3127 DCHECK(result); 3130 DCHECK(result);
3128 USE(result); 3131 USE(result);
3129 break; 3132 break;
3130 case kPageNewToOld: 3133 case kPageNewToOld:
3131 result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor); 3134 result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor);
3132 // ArrayBufferTracker will be updated during sweeping.
3133 DCHECK(result); 3135 DCHECK(result);
3134 USE(result); 3136 USE(result);
3135 break; 3137 break;
3136 case kObjectsOldToOld: 3138 case kObjectsOldToOld:
3137 result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_); 3139 result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_);
3138 if (!result) { 3140 if (!result) {
3139 // Aborted compaction page. We have to record slots here, since we might 3141 // Aborted compaction page. We have to record slots here, since we might
3140 // not have recorded them in first place. 3142 // not have recorded them in first place.
3141 // Note: We mark the page as aborted here to be able to record slots 3143 // Note: We mark the page as aborted here to be able to record slots
3142 // for code objects in |RecordMigratedSlotVisitor|. 3144 // for code objects in |RecordMigratedSlotVisitor|.
3143 page->SetFlag(Page::COMPACTION_WAS_ABORTED); 3145 page->SetFlag(Page::COMPACTION_WAS_ABORTED);
3144 EvacuateRecordOnlyVisitor record_visitor(collector_->heap()); 3146 EvacuateRecordOnlyVisitor record_visitor(collector_->heap());
3145 result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor); 3147 result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor);
3146 ArrayBufferTracker::ScanAndFreeDeadArrayBuffers<
3147 LocalArrayBufferTracker::kForwardingPointerOrMarkBit>(page);
3148 DCHECK(result); 3148 DCHECK(result);
3149 USE(result); 3149 USE(result);
3150 // We need to return failure here to indicate that we want this page 3150 // We need to return failure here to indicate that we want this page
3151 // added to the sweeper. 3151 // added to the sweeper.
3152 return false; 3152 return false;
3153 } 3153 }
3154 ArrayBufferTracker::ScanAndFreeDeadArrayBuffers<
3155 LocalArrayBufferTracker::kForwardingPointer>(page);
3156 break; 3154 break;
3157 default: 3155 default:
3158 UNREACHABLE(); 3156 UNREACHABLE();
3159 } 3157 }
3160 return result; 3158 return result;
3161 } 3159 }
3162 3160
3163 void MarkCompactCollector::Evacuator::Finalize() { 3161 void MarkCompactCollector::Evacuator::Finalize() {
3164 heap()->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE)); 3162 heap()->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE));
3165 heap()->code_space()->MergeCompactionSpace( 3163 heap()->code_space()->MergeCompactionSpace(
(...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after
3381 Bitmap::Clear(p); 3379 Bitmap::Clear(p);
3382 3380
3383 if (free_start != p->area_end()) { 3381 if (free_start != p->area_end()) {
3384 int size = static_cast<int>(p->area_end() - free_start); 3382 int size = static_cast<int>(p->area_end() - free_start);
3385 if (free_space_mode == ZAP_FREE_SPACE) { 3383 if (free_space_mode == ZAP_FREE_SPACE) {
3386 memset(free_start, 0xcc, size); 3384 memset(free_start, 0xcc, size);
3387 } 3385 }
3388 freed_bytes = space->UnaccountedFree(free_start, size); 3386 freed_bytes = space->UnaccountedFree(free_start, size);
3389 max_freed_bytes = Max(freed_bytes, max_freed_bytes); 3387 max_freed_bytes = Max(freed_bytes, max_freed_bytes);
3390 } 3388 }
3391 ArrayBufferTracker::FreeDead(p);
3392 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); 3389 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
3393 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); 3390 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes));
3394 } 3391 }
3395 3392
3396 void MarkCompactCollector::InvalidateCode(Code* code) { 3393 void MarkCompactCollector::InvalidateCode(Code* code) {
3397 if (heap_->incremental_marking()->IsCompacting() && 3394 if (heap_->incremental_marking()->IsCompacting() &&
3398 !ShouldSkipEvacuationSlotRecording(code)) { 3395 !ShouldSkipEvacuationSlotRecording(code)) {
3399 DCHECK(compacting_); 3396 DCHECK(compacting_);
3400 3397
3401 // If the object is white than no slots were recorded on it yet. 3398 // If the object is white than no slots were recorded on it yet.
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
3525 // because root iteration traverses the stack and might have to find 3522 // because root iteration traverses the stack and might have to find
3526 // code objects from non-updated pc pointing into evacuation candidate. 3523 // code objects from non-updated pc pointing into evacuation candidate.
3527 SkipList* list = p->skip_list(); 3524 SkipList* list = p->skip_list();
3528 if (list != NULL) list->Clear(); 3525 if (list != NULL) list->Clear();
3529 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) { 3526 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) {
3530 sweeper().AddLatePage(p->owner()->identity(), p); 3527 sweeper().AddLatePage(p->owner()->identity(), p);
3531 p->ClearFlag(Page::COMPACTION_WAS_ABORTED); 3528 p->ClearFlag(Page::COMPACTION_WAS_ABORTED);
3532 } 3529 }
3533 } 3530 }
3534 3531
3532 // EvacuateNewSpaceAndCandidates iterates over new space objects and for
3533 // ArrayBuffers either re-registers them as live or promotes them. This is
3534 // needed to properly free them.
3535 heap()->array_buffer_tracker()->FreeDead(false);
3536
3535 // Deallocate evacuated candidate pages. 3537 // Deallocate evacuated candidate pages.
3536 ReleaseEvacuationCandidates(); 3538 ReleaseEvacuationCandidates();
3537 } 3539 }
3538 3540
3539 #ifdef VERIFY_HEAP 3541 #ifdef VERIFY_HEAP
3540 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) { 3542 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) {
3541 VerifyEvacuation(heap()); 3543 VerifyEvacuation(heap());
3542 } 3544 }
3543 #endif 3545 #endif
3544 } 3546 }
(...skipping 405 matching lines...) Expand 10 before | Expand all | Expand 10 after
3950 MarkBit mark_bit = Marking::MarkBitFrom(host); 3952 MarkBit mark_bit = Marking::MarkBitFrom(host);
3951 if (Marking::IsBlack(mark_bit)) { 3953 if (Marking::IsBlack(mark_bit)) {
3952 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); 3954 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
3953 RecordRelocSlot(host, &rinfo, target); 3955 RecordRelocSlot(host, &rinfo, target);
3954 } 3956 }
3955 } 3957 }
3956 } 3958 }
3957 3959
3958 } // namespace internal 3960 } // namespace internal
3959 } // namespace v8 3961 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/incremental-marking.cc ('k') | src/heap/objects-visiting-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698