Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1964023002: [heap] Fine-grained JSArrayBuffer tracking (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: back to std::map Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
11 #include "src/compilation-cache.h" 11 #include "src/compilation-cache.h"
12 #include "src/deoptimizer.h" 12 #include "src/deoptimizer.h"
13 #include "src/execution.h" 13 #include "src/execution.h"
14 #include "src/frames-inl.h" 14 #include "src/frames-inl.h"
15 #include "src/gdb-jit.h" 15 #include "src/gdb-jit.h"
16 #include "src/global-handles.h" 16 #include "src/global-handles.h"
17 #include "src/heap/array-buffer-tracker.h" 17 #include "src/heap/array-buffer-tracker-inl.h"
18 #include "src/heap/gc-tracer.h" 18 #include "src/heap/gc-tracer.h"
19 #include "src/heap/incremental-marking.h" 19 #include "src/heap/incremental-marking.h"
20 #include "src/heap/mark-compact-inl.h" 20 #include "src/heap/mark-compact-inl.h"
21 #include "src/heap/object-stats.h" 21 #include "src/heap/object-stats.h"
22 #include "src/heap/objects-visiting-inl.h" 22 #include "src/heap/objects-visiting-inl.h"
23 #include "src/heap/objects-visiting.h" 23 #include "src/heap/objects-visiting.h"
24 #include "src/heap/page-parallel-job.h" 24 #include "src/heap/page-parallel-job.h"
25 #include "src/heap/spaces-inl.h" 25 #include "src/heap/spaces-inl.h"
26 #include "src/ic/ic.h" 26 #include "src/ic/ic.h"
27 #include "src/ic/stub-cache.h" 27 #include "src/ic/stub-cache.h"
(...skipping 835 matching lines...) Expand 10 before | Expand all | Expand 10 after
863 // marking cycle. We did not collect any slots. 863 // marking cycle. We did not collect any slots.
864 if (!FLAG_never_compact && !was_marked_incrementally_) { 864 if (!FLAG_never_compact && !was_marked_incrementally_) {
865 StartCompaction(NON_INCREMENTAL_COMPACTION); 865 StartCompaction(NON_INCREMENTAL_COMPACTION);
866 } 866 }
867 867
868 PagedSpaces spaces(heap()); 868 PagedSpaces spaces(heap());
869 for (PagedSpace* space = spaces.next(); space != NULL; 869 for (PagedSpace* space = spaces.next(); space != NULL;
870 space = spaces.next()) { 870 space = spaces.next()) {
871 space->PrepareForMarkCompact(); 871 space->PrepareForMarkCompact();
872 } 872 }
873 if (!was_marked_incrementally_) {
874 heap()->old_space()->ForAllPages([](Page* p) { p->ResetTracker(); });
875 }
876 heap()->account_amount_of_external_allocated_freed_memory();
873 877
874 #ifdef VERIFY_HEAP 878 #ifdef VERIFY_HEAP
875 if (!was_marked_incrementally_ && FLAG_verify_heap) { 879 if (!was_marked_incrementally_ && FLAG_verify_heap) {
876 VerifyMarkbitsAreClean(); 880 VerifyMarkbitsAreClean();
877 } 881 }
878 #endif 882 #endif
879 } 883 }
880 884
881 885
882 void MarkCompactCollector::Finish() { 886 void MarkCompactCollector::Finish() {
(...skipping 835 matching lines...) Expand 10 before | Expand all | Expand 10 after
1718 local_pretenuring_feedback_(local_pretenuring_feedback) {} 1722 local_pretenuring_feedback_(local_pretenuring_feedback) {}
1719 1723
1720 inline bool Visit(HeapObject* object) override { 1724 inline bool Visit(HeapObject* object) override {
1721 heap_->UpdateAllocationSite<Heap::kCached>(object, 1725 heap_->UpdateAllocationSite<Heap::kCached>(object,
1722 local_pretenuring_feedback_); 1726 local_pretenuring_feedback_);
1723 int size = object->Size(); 1727 int size = object->Size();
1724 HeapObject* target_object = nullptr; 1728 HeapObject* target_object = nullptr;
1725 if (heap_->ShouldBePromoted(object->address(), size) && 1729 if (heap_->ShouldBePromoted(object->address(), size) &&
1726 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, 1730 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object,
1727 &target_object)) { 1731 &target_object)) {
1728 // If we end up needing more special cases, we should factor this out.
Hannes Payer (out of office) 2016/05/20 12:21:17 Nice!
1729 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
1730 heap_->array_buffer_tracker()->Promote(
1731 JSArrayBuffer::cast(target_object));
1732 }
1733 promoted_size_ += size; 1732 promoted_size_ += size;
1734 return true; 1733 return true;
1735 } 1734 }
1736 HeapObject* target = nullptr; 1735 HeapObject* target = nullptr;
1737 AllocationSpace space = AllocateTargetObject(object, &target); 1736 AllocationSpace space = AllocateTargetObject(object, &target);
1738 MigrateObject(HeapObject::cast(target), object, size, space); 1737 MigrateObject(HeapObject::cast(target), object, size, space);
1739 if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
1740 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
1741 }
1742 semispace_copied_size_ += size; 1738 semispace_copied_size_ += size;
1743 return true; 1739 return true;
1744 } 1740 }
1745 1741
1746 intptr_t promoted_size() { return promoted_size_; } 1742 intptr_t promoted_size() { return promoted_size_; }
1747 intptr_t semispace_copied_size() { return semispace_copied_size_; } 1743 intptr_t semispace_copied_size() { return semispace_copied_size_; }
1748 1744
1749 private: 1745 private:
1750 enum NewSpaceAllocationMode { 1746 enum NewSpaceAllocationMode {
1751 kNonstickyBailoutOldSpace, 1747 kNonstickyBailoutOldSpace,
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
1856 : heap_(heap), promoted_size_(0) {} 1852 : heap_(heap), promoted_size_(0) {}
1857 1853
1858 static void TryMoveToOldSpace(Page* page, PagedSpace* owner) { 1854 static void TryMoveToOldSpace(Page* page, PagedSpace* owner) {
1859 if (page->heap()->new_space()->ReplaceWithEmptyPage(page)) { 1855 if (page->heap()->new_space()->ReplaceWithEmptyPage(page)) {
1860 Page* new_page = Page::ConvertNewToOld(page, owner); 1856 Page* new_page = Page::ConvertNewToOld(page, owner);
1861 new_page->SetFlag(Page::PAGE_NEW_OLD_PROMOTION); 1857 new_page->SetFlag(Page::PAGE_NEW_OLD_PROMOTION);
1862 } 1858 }
1863 } 1859 }
1864 1860
1865 inline bool Visit(HeapObject* object) { 1861 inline bool Visit(HeapObject* object) {
1866 if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
1867 object->GetHeap()->array_buffer_tracker()->Promote(
1868 JSArrayBuffer::cast(object));
1869 }
1870 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); 1862 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
1871 object->IterateBodyFast(&visitor); 1863 object->IterateBodyFast(&visitor);
1872 promoted_size_ += object->Size(); 1864 promoted_size_ += object->Size();
1873 return true; 1865 return true;
1874 } 1866 }
1875 1867
1876 intptr_t promoted_size() { return promoted_size_; } 1868 intptr_t promoted_size() { return promoted_size_; }
1877 1869
1878 private: 1870 private:
1879 Heap* heap_; 1871 Heap* heap_;
(...skipping 20 matching lines...) Expand all
1900 }; 1892 };
1901 1893
1902 class MarkCompactCollector::EvacuateRecordOnlyVisitor final 1894 class MarkCompactCollector::EvacuateRecordOnlyVisitor final
1903 : public MarkCompactCollector::HeapObjectVisitor { 1895 : public MarkCompactCollector::HeapObjectVisitor {
1904 public: 1896 public:
1905 explicit EvacuateRecordOnlyVisitor(Heap* heap) : heap_(heap) {} 1897 explicit EvacuateRecordOnlyVisitor(Heap* heap) : heap_(heap) {}
1906 1898
1907 inline bool Visit(HeapObject* object) { 1899 inline bool Visit(HeapObject* object) {
1908 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); 1900 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
1909 object->IterateBody(&visitor); 1901 object->IterateBody(&visitor);
1902 if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
1903 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
1904 }
1910 return true; 1905 return true;
1911 } 1906 }
1912 1907
1913 private: 1908 private:
1914 Heap* heap_; 1909 Heap* heap_;
1915 }; 1910 };
1916 1911
1917 void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) { 1912 void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) {
1918 PageIterator it(space); 1913 PageIterator it(space);
1919 while (it.has_next()) { 1914 while (it.has_next()) {
(...skipping 1306 matching lines...) Expand 10 before | Expand all | Expand 10 after
3226 } 3221 }
3227 return success; 3222 return success;
3228 } 3223 }
3229 3224
3230 bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { 3225 bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
3231 bool result = false; 3226 bool result = false;
3232 DCHECK(page->SweepingDone()); 3227 DCHECK(page->SweepingDone());
3233 switch (ComputeEvacuationMode(page)) { 3228 switch (ComputeEvacuationMode(page)) {
3234 case kObjectsNewToOld: 3229 case kObjectsNewToOld:
3235 result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_); 3230 result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_);
3231 page->ScanAndFreeDeadArrayBuffers<
3232 LocalArrayBufferTracker::kForwardingPointer>();
3236 DCHECK(result); 3233 DCHECK(result);
3237 USE(result); 3234 USE(result);
3238 break; 3235 break;
3239 case kPageNewToOld: 3236 case kPageNewToOld:
3240 result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor); 3237 result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor);
3238 // ArrayBufferTracker will be updated during sweeping.
3241 DCHECK(result); 3239 DCHECK(result);
3242 USE(result); 3240 USE(result);
3243 break; 3241 break;
3244 case kObjectsOldToOld: 3242 case kObjectsOldToOld:
3245 result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_); 3243 result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_);
3244 page->ScanAndFreeDeadArrayBuffers<
3245 LocalArrayBufferTracker::kForwardingPointer>();
3246 if (!result) { 3246 if (!result) {
3247 // Aborted compaction page. We can record slots here to have them 3247 // Aborted compaction page. We can record slots here to have them
3248 // processed in parallel later on. 3248 // processed in parallel later on.
3249 EvacuateRecordOnlyVisitor record_visitor(collector_->heap()); 3249 EvacuateRecordOnlyVisitor record_visitor(collector_->heap());
3250 result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor); 3250 result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor);
3251 page->ScanAndFreeDeadArrayBuffers<LocalArrayBufferTracker::kMarkBit>();
3251 DCHECK(result); 3252 DCHECK(result);
3252 USE(result); 3253 USE(result);
3253 // We need to return failure here to indicate that we want this page 3254 // We need to return failure here to indicate that we want this page
3254 // added to the sweeper. 3255 // added to the sweeper.
3255 return false; 3256 return false;
3256 } 3257 }
3257 break; 3258 break;
3258 default: 3259 default:
3259 UNREACHABLE(); 3260 UNREACHABLE();
3260 } 3261 }
(...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after
3482 Bitmap::Clear(p); 3483 Bitmap::Clear(p);
3483 3484
3484 if (free_start != p->area_end()) { 3485 if (free_start != p->area_end()) {
3485 int size = static_cast<int>(p->area_end() - free_start); 3486 int size = static_cast<int>(p->area_end() - free_start);
3486 if (free_space_mode == ZAP_FREE_SPACE) { 3487 if (free_space_mode == ZAP_FREE_SPACE) {
3487 memset(free_start, 0xcc, size); 3488 memset(free_start, 0xcc, size);
3488 } 3489 }
3489 freed_bytes = space->UnaccountedFree(free_start, size); 3490 freed_bytes = space->UnaccountedFree(free_start, size);
3490 max_freed_bytes = Max(freed_bytes, max_freed_bytes); 3491 max_freed_bytes = Max(freed_bytes, max_freed_bytes);
3491 } 3492 }
3493 p->FreeDeadArrayBuffers();
3492 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); 3494 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
3493 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); 3495 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes));
3494 } 3496 }
3495 3497
3496 void MarkCompactCollector::InvalidateCode(Code* code) { 3498 void MarkCompactCollector::InvalidateCode(Code* code) {
3497 if (heap_->incremental_marking()->IsCompacting() && 3499 if (heap_->incremental_marking()->IsCompacting() &&
3498 !ShouldSkipEvacuationSlotRecording(code)) { 3500 !ShouldSkipEvacuationSlotRecording(code)) {
3499 DCHECK(compacting_); 3501 DCHECK(compacting_);
3500 3502
3501 // If the object is white than no slots were recorded on it yet. 3503 // If the object is white than no slots were recorded on it yet.
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
3624 // because root iteration traverses the stack and might have to find 3626 // because root iteration traverses the stack and might have to find
3625 // code objects from non-updated pc pointing into evacuation candidate. 3627 // code objects from non-updated pc pointing into evacuation candidate.
3626 SkipList* list = p->skip_list(); 3628 SkipList* list = p->skip_list();
3627 if (list != NULL) list->Clear(); 3629 if (list != NULL) list->Clear();
3628 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) { 3630 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) {
3629 sweeper().AddLatePage(p->owner()->identity(), p); 3631 sweeper().AddLatePage(p->owner()->identity(), p);
3630 p->ClearFlag(Page::COMPACTION_WAS_ABORTED); 3632 p->ClearFlag(Page::COMPACTION_WAS_ABORTED);
3631 } 3633 }
3632 } 3634 }
3633 3635
3634 // EvacuateNewSpaceAndCandidates iterates over new space objects and for
3635 // ArrayBuffers either re-registers them as live or promotes them. This is
3636 // needed to properly free them.
3637 heap()->array_buffer_tracker()->FreeDead(false);
3638
3639 // Deallocate evacuated candidate pages. 3636 // Deallocate evacuated candidate pages.
3640 ReleaseEvacuationCandidates(); 3637 ReleaseEvacuationCandidates();
3641 } 3638 }
3642 3639
3643 #ifdef VERIFY_HEAP 3640 #ifdef VERIFY_HEAP
3644 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) { 3641 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) {
3645 VerifyEvacuation(heap()); 3642 VerifyEvacuation(heap());
3646 } 3643 }
3647 #endif 3644 #endif
3648 } 3645 }
(...skipping 384 matching lines...) Expand 10 before | Expand all | Expand 10 after
4033 MarkBit mark_bit = Marking::MarkBitFrom(host); 4030 MarkBit mark_bit = Marking::MarkBitFrom(host);
4034 if (Marking::IsBlack(mark_bit)) { 4031 if (Marking::IsBlack(mark_bit)) {
4035 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); 4032 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
4036 RecordRelocSlot(host, &rinfo, target); 4033 RecordRelocSlot(host, &rinfo, target);
4037 } 4034 }
4038 } 4035 }
4039 } 4036 }
4040 4037
4041 } // namespace internal 4038 } // namespace internal
4042 } // namespace v8 4039 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698