OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
11 #include "src/compilation-cache.h" | 11 #include "src/compilation-cache.h" |
12 #include "src/deoptimizer.h" | 12 #include "src/deoptimizer.h" |
13 #include "src/execution.h" | 13 #include "src/execution.h" |
14 #include "src/frames-inl.h" | 14 #include "src/frames-inl.h" |
15 #include "src/gdb-jit.h" | 15 #include "src/gdb-jit.h" |
16 #include "src/global-handles.h" | 16 #include "src/global-handles.h" |
17 #include "src/heap/array-buffer-tracker.h" | 17 #include "src/heap/array-buffer-tracker-inl.h" |
18 #include "src/heap/gc-tracer.h" | 18 #include "src/heap/gc-tracer.h" |
19 #include "src/heap/incremental-marking.h" | 19 #include "src/heap/incremental-marking.h" |
20 #include "src/heap/mark-compact-inl.h" | 20 #include "src/heap/mark-compact-inl.h" |
21 #include "src/heap/object-stats.h" | 21 #include "src/heap/object-stats.h" |
22 #include "src/heap/objects-visiting-inl.h" | 22 #include "src/heap/objects-visiting-inl.h" |
23 #include "src/heap/objects-visiting.h" | 23 #include "src/heap/objects-visiting.h" |
24 #include "src/heap/page-parallel-job.h" | 24 #include "src/heap/page-parallel-job.h" |
25 #include "src/heap/spaces-inl.h" | 25 #include "src/heap/spaces-inl.h" |
26 #include "src/ic/ic.h" | 26 #include "src/ic/ic.h" |
27 #include "src/ic/stub-cache.h" | 27 #include "src/ic/stub-cache.h" |
(...skipping 837 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
865 // marking cycle. We did not collect any slots. | 865 // marking cycle. We did not collect any slots. |
866 if (!FLAG_never_compact && !was_marked_incrementally_) { | 866 if (!FLAG_never_compact && !was_marked_incrementally_) { |
867 StartCompaction(NON_INCREMENTAL_COMPACTION); | 867 StartCompaction(NON_INCREMENTAL_COMPACTION); |
868 } | 868 } |
869 | 869 |
870 PagedSpaces spaces(heap()); | 870 PagedSpaces spaces(heap()); |
871 for (PagedSpace* space = spaces.next(); space != NULL; | 871 for (PagedSpace* space = spaces.next(); space != NULL; |
872 space = spaces.next()) { | 872 space = spaces.next()) { |
873 space->PrepareForMarkCompact(); | 873 space->PrepareForMarkCompact(); |
874 } | 874 } |
| 875 if (!was_marked_incrementally_) { |
| 876 ArrayBufferTracker::ResetTrackersInOldSpace(heap_); |
| 877 } |
| 878 heap()->account_amount_of_external_allocated_freed_memory(); |
875 | 879 |
876 #ifdef VERIFY_HEAP | 880 #ifdef VERIFY_HEAP |
877 if (!was_marked_incrementally_ && FLAG_verify_heap) { | 881 if (!was_marked_incrementally_ && FLAG_verify_heap) { |
878 VerifyMarkbitsAreClean(); | 882 VerifyMarkbitsAreClean(); |
879 } | 883 } |
880 #endif | 884 #endif |
881 } | 885 } |
882 | 886 |
883 | 887 |
884 void MarkCompactCollector::Finish() { | 888 void MarkCompactCollector::Finish() { |
(...skipping 835 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1720 local_pretenuring_feedback_(local_pretenuring_feedback) {} | 1724 local_pretenuring_feedback_(local_pretenuring_feedback) {} |
1721 | 1725 |
1722 inline bool Visit(HeapObject* object) override { | 1726 inline bool Visit(HeapObject* object) override { |
1723 heap_->UpdateAllocationSite<Heap::kCached>(object, | 1727 heap_->UpdateAllocationSite<Heap::kCached>(object, |
1724 local_pretenuring_feedback_); | 1728 local_pretenuring_feedback_); |
1725 int size = object->Size(); | 1729 int size = object->Size(); |
1726 HeapObject* target_object = nullptr; | 1730 HeapObject* target_object = nullptr; |
1727 if (heap_->ShouldBePromoted(object->address(), size) && | 1731 if (heap_->ShouldBePromoted(object->address(), size) && |
1728 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, | 1732 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, |
1729 &target_object)) { | 1733 &target_object)) { |
1730 // If we end up needing more special cases, we should factor this out. | |
1731 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { | |
1732 heap_->array_buffer_tracker()->Promote( | |
1733 JSArrayBuffer::cast(target_object)); | |
1734 } | |
1735 promoted_size_ += size; | 1734 promoted_size_ += size; |
1736 return true; | 1735 return true; |
1737 } | 1736 } |
1738 HeapObject* target = nullptr; | 1737 HeapObject* target = nullptr; |
1739 AllocationSpace space = AllocateTargetObject(object, &target); | 1738 AllocationSpace space = AllocateTargetObject(object, &target); |
1740 MigrateObject(HeapObject::cast(target), object, size, space); | 1739 MigrateObject(HeapObject::cast(target), object, size, space); |
1741 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { | |
1742 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); | |
1743 } | |
1744 semispace_copied_size_ += size; | 1740 semispace_copied_size_ += size; |
1745 return true; | 1741 return true; |
1746 } | 1742 } |
1747 | 1743 |
1748 intptr_t promoted_size() { return promoted_size_; } | 1744 intptr_t promoted_size() { return promoted_size_; } |
1749 intptr_t semispace_copied_size() { return semispace_copied_size_; } | 1745 intptr_t semispace_copied_size() { return semispace_copied_size_; } |
1750 | 1746 |
1751 private: | 1747 private: |
1752 enum NewSpaceAllocationMode { | 1748 enum NewSpaceAllocationMode { |
1753 kNonstickyBailoutOldSpace, | 1749 kNonstickyBailoutOldSpace, |
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1858 : heap_(heap), promoted_size_(0) {} | 1854 : heap_(heap), promoted_size_(0) {} |
1859 | 1855 |
1860 static void TryMoveToOldSpace(Page* page, PagedSpace* owner) { | 1856 static void TryMoveToOldSpace(Page* page, PagedSpace* owner) { |
1861 if (page->heap()->new_space()->ReplaceWithEmptyPage(page)) { | 1857 if (page->heap()->new_space()->ReplaceWithEmptyPage(page)) { |
1862 Page* new_page = Page::ConvertNewToOld(page, owner); | 1858 Page* new_page = Page::ConvertNewToOld(page, owner); |
1863 new_page->SetFlag(Page::PAGE_NEW_OLD_PROMOTION); | 1859 new_page->SetFlag(Page::PAGE_NEW_OLD_PROMOTION); |
1864 } | 1860 } |
1865 } | 1861 } |
1866 | 1862 |
1867 inline bool Visit(HeapObject* object) { | 1863 inline bool Visit(HeapObject* object) { |
1868 if (V8_UNLIKELY(object->IsJSArrayBuffer())) { | |
1869 object->GetHeap()->array_buffer_tracker()->Promote( | |
1870 JSArrayBuffer::cast(object)); | |
1871 } | |
1872 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); | 1864 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); |
1873 object->IterateBodyFast(&visitor); | 1865 object->IterateBodyFast(&visitor); |
1874 promoted_size_ += object->Size(); | 1866 promoted_size_ += object->Size(); |
1875 return true; | 1867 return true; |
1876 } | 1868 } |
1877 | 1869 |
1878 intptr_t promoted_size() { return promoted_size_; } | 1870 intptr_t promoted_size() { return promoted_size_; } |
1879 | 1871 |
1880 private: | 1872 private: |
1881 Heap* heap_; | 1873 Heap* heap_; |
(...skipping 20 matching lines...) Expand all Loading... |
1902 }; | 1894 }; |
1903 | 1895 |
1904 class MarkCompactCollector::EvacuateRecordOnlyVisitor final | 1896 class MarkCompactCollector::EvacuateRecordOnlyVisitor final |
1905 : public MarkCompactCollector::HeapObjectVisitor { | 1897 : public MarkCompactCollector::HeapObjectVisitor { |
1906 public: | 1898 public: |
1907 explicit EvacuateRecordOnlyVisitor(Heap* heap) : heap_(heap) {} | 1899 explicit EvacuateRecordOnlyVisitor(Heap* heap) : heap_(heap) {} |
1908 | 1900 |
1909 inline bool Visit(HeapObject* object) { | 1901 inline bool Visit(HeapObject* object) { |
1910 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); | 1902 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); |
1911 object->IterateBody(&visitor); | 1903 object->IterateBody(&visitor); |
| 1904 if (V8_UNLIKELY(object->IsJSArrayBuffer())) { |
| 1905 ArrayBufferTracker::MarkLive(heap_, JSArrayBuffer::cast(object)); |
| 1906 } |
1912 return true; | 1907 return true; |
1913 } | 1908 } |
1914 | 1909 |
1915 private: | 1910 private: |
1916 Heap* heap_; | 1911 Heap* heap_; |
1917 }; | 1912 }; |
1918 | 1913 |
1919 void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) { | 1914 void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) { |
1920 PageIterator it(space); | 1915 PageIterator it(space); |
1921 while (it.has_next()) { | 1916 while (it.has_next()) { |
(...skipping 1198 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3120 } | 3115 } |
3121 return success; | 3116 return success; |
3122 } | 3117 } |
3123 | 3118 |
3124 bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { | 3119 bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { |
3125 bool result = false; | 3120 bool result = false; |
3126 DCHECK(page->SweepingDone()); | 3121 DCHECK(page->SweepingDone()); |
3127 switch (ComputeEvacuationMode(page)) { | 3122 switch (ComputeEvacuationMode(page)) { |
3128 case kObjectsNewToOld: | 3123 case kObjectsNewToOld: |
3129 result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_); | 3124 result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_); |
| 3125 ArrayBufferTracker::ScanAndFreeDeadArrayBuffers< |
| 3126 LocalArrayBufferTracker::kForwardingPointer>(page); |
3130 DCHECK(result); | 3127 DCHECK(result); |
3131 USE(result); | 3128 USE(result); |
3132 break; | 3129 break; |
3133 case kPageNewToOld: | 3130 case kPageNewToOld: |
3134 result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor); | 3131 result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor); |
| 3132 // ArrayBufferTracker will be updated during sweeping. |
3135 DCHECK(result); | 3133 DCHECK(result); |
3136 USE(result); | 3134 USE(result); |
3137 break; | 3135 break; |
3138 case kObjectsOldToOld: | 3136 case kObjectsOldToOld: |
3139 result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_); | 3137 result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_); |
3140 if (!result) { | 3138 if (!result) { |
3141 // Aborted compaction page. We can record slots here to have them | 3139 // Aborted compaction page. We can record slots here to have them |
3142 // processed in parallel later on. | 3140 // processed in parallel later on. |
3143 EvacuateRecordOnlyVisitor record_visitor(collector_->heap()); | 3141 EvacuateRecordOnlyVisitor record_visitor(collector_->heap()); |
3144 result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor); | 3142 result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor); |
| 3143 ArrayBufferTracker::ScanAndFreeDeadArrayBuffers< |
| 3144 LocalArrayBufferTracker::kForwardingPointerOrMarkBit>(page); |
3145 DCHECK(result); | 3145 DCHECK(result); |
3146 USE(result); | 3146 USE(result); |
3147 // We need to return failure here to indicate that we want this page | 3147 // We need to return failure here to indicate that we want this page |
3148 // added to the sweeper. | 3148 // added to the sweeper. |
3149 return false; | 3149 return false; |
3150 } | 3150 } |
| 3151 ArrayBufferTracker::ScanAndFreeDeadArrayBuffers< |
| 3152 LocalArrayBufferTracker::kForwardingPointer>(page); |
3151 break; | 3153 break; |
3152 default: | 3154 default: |
3153 UNREACHABLE(); | 3155 UNREACHABLE(); |
3154 } | 3156 } |
3155 return result; | 3157 return result; |
3156 } | 3158 } |
3157 | 3159 |
3158 void MarkCompactCollector::Evacuator::Finalize() { | 3160 void MarkCompactCollector::Evacuator::Finalize() { |
3159 heap()->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE)); | 3161 heap()->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE)); |
3160 heap()->code_space()->MergeCompactionSpace( | 3162 heap()->code_space()->MergeCompactionSpace( |
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3377 Bitmap::Clear(p); | 3379 Bitmap::Clear(p); |
3378 | 3380 |
3379 if (free_start != p->area_end()) { | 3381 if (free_start != p->area_end()) { |
3380 int size = static_cast<int>(p->area_end() - free_start); | 3382 int size = static_cast<int>(p->area_end() - free_start); |
3381 if (free_space_mode == ZAP_FREE_SPACE) { | 3383 if (free_space_mode == ZAP_FREE_SPACE) { |
3382 memset(free_start, 0xcc, size); | 3384 memset(free_start, 0xcc, size); |
3383 } | 3385 } |
3384 freed_bytes = space->UnaccountedFree(free_start, size); | 3386 freed_bytes = space->UnaccountedFree(free_start, size); |
3385 max_freed_bytes = Max(freed_bytes, max_freed_bytes); | 3387 max_freed_bytes = Max(freed_bytes, max_freed_bytes); |
3386 } | 3388 } |
| 3389 ArrayBufferTracker::FreeDead(p); |
3387 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); | 3390 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); |
3388 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); | 3391 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); |
3389 } | 3392 } |
3390 | 3393 |
3391 void MarkCompactCollector::InvalidateCode(Code* code) { | 3394 void MarkCompactCollector::InvalidateCode(Code* code) { |
3392 if (heap_->incremental_marking()->IsCompacting() && | 3395 if (heap_->incremental_marking()->IsCompacting() && |
3393 !ShouldSkipEvacuationSlotRecording(code)) { | 3396 !ShouldSkipEvacuationSlotRecording(code)) { |
3394 DCHECK(compacting_); | 3397 DCHECK(compacting_); |
3395 | 3398 |
3396 // If the object is white than no slots were recorded on it yet. | 3399 // If the object is white than no slots were recorded on it yet. |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3520 // because root iteration traverses the stack and might have to find | 3523 // because root iteration traverses the stack and might have to find |
3521 // code objects from non-updated pc pointing into evacuation candidate. | 3524 // code objects from non-updated pc pointing into evacuation candidate. |
3522 SkipList* list = p->skip_list(); | 3525 SkipList* list = p->skip_list(); |
3523 if (list != NULL) list->Clear(); | 3526 if (list != NULL) list->Clear(); |
3524 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) { | 3527 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) { |
3525 sweeper().AddLatePage(p->owner()->identity(), p); | 3528 sweeper().AddLatePage(p->owner()->identity(), p); |
3526 p->ClearFlag(Page::COMPACTION_WAS_ABORTED); | 3529 p->ClearFlag(Page::COMPACTION_WAS_ABORTED); |
3527 } | 3530 } |
3528 } | 3531 } |
3529 | 3532 |
3530 // EvacuateNewSpaceAndCandidates iterates over new space objects and for | |
3531 // ArrayBuffers either re-registers them as live or promotes them. This is | |
3532 // needed to properly free them. | |
3533 heap()->array_buffer_tracker()->FreeDead(false); | |
3534 | |
3535 // Deallocate evacuated candidate pages. | 3533 // Deallocate evacuated candidate pages. |
3536 ReleaseEvacuationCandidates(); | 3534 ReleaseEvacuationCandidates(); |
3537 } | 3535 } |
3538 | 3536 |
3539 #ifdef VERIFY_HEAP | 3537 #ifdef VERIFY_HEAP |
3540 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) { | 3538 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) { |
3541 VerifyEvacuation(heap()); | 3539 VerifyEvacuation(heap()); |
3542 } | 3540 } |
3543 #endif | 3541 #endif |
3544 } | 3542 } |
(...skipping 396 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3941 MarkBit mark_bit = Marking::MarkBitFrom(host); | 3939 MarkBit mark_bit = Marking::MarkBitFrom(host); |
3942 if (Marking::IsBlack(mark_bit)) { | 3940 if (Marking::IsBlack(mark_bit)) { |
3943 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); | 3941 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); |
3944 RecordRelocSlot(host, &rinfo, target); | 3942 RecordRelocSlot(host, &rinfo, target); |
3945 } | 3943 } |
3946 } | 3944 } |
3947 } | 3945 } |
3948 | 3946 |
3949 } // namespace internal | 3947 } // namespace internal |
3950 } // namespace v8 | 3948 } // namespace v8 |
OLD | NEW |