Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 854 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 865 // marking cycle. We did not collect any slots. | 865 // marking cycle. We did not collect any slots. |
| 866 if (!FLAG_never_compact && !was_marked_incrementally_) { | 866 if (!FLAG_never_compact && !was_marked_incrementally_) { |
| 867 StartCompaction(NON_INCREMENTAL_COMPACTION); | 867 StartCompaction(NON_INCREMENTAL_COMPACTION); |
| 868 } | 868 } |
| 869 | 869 |
| 870 PagedSpaces spaces(heap()); | 870 PagedSpaces spaces(heap()); |
| 871 for (PagedSpace* space = spaces.next(); space != NULL; | 871 for (PagedSpace* space = spaces.next(); space != NULL; |
| 872 space = spaces.next()) { | 872 space = spaces.next()) { |
| 873 space->PrepareForMarkCompact(); | 873 space->PrepareForMarkCompact(); |
| 874 } | 874 } |
| 875 heap()->account_amount_of_external_allocated_freed_memory(); | |
| 875 | 876 |
| 876 #ifdef VERIFY_HEAP | 877 #ifdef VERIFY_HEAP |
| 877 if (!was_marked_incrementally_ && FLAG_verify_heap) { | 878 if (!was_marked_incrementally_ && FLAG_verify_heap) { |
| 878 VerifyMarkbitsAreClean(); | 879 VerifyMarkbitsAreClean(); |
| 879 } | 880 } |
| 880 #endif | 881 #endif |
| 881 } | 882 } |
| 882 | 883 |
| 883 | 884 |
| 884 void MarkCompactCollector::Finish() { | 885 void MarkCompactCollector::Finish() { |
| (...skipping 835 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1720 local_pretenuring_feedback_(local_pretenuring_feedback) {} | 1721 local_pretenuring_feedback_(local_pretenuring_feedback) {} |
| 1721 | 1722 |
| 1722 inline bool Visit(HeapObject* object) override { | 1723 inline bool Visit(HeapObject* object) override { |
| 1723 heap_->UpdateAllocationSite<Heap::kCached>(object, | 1724 heap_->UpdateAllocationSite<Heap::kCached>(object, |
| 1724 local_pretenuring_feedback_); | 1725 local_pretenuring_feedback_); |
| 1725 int size = object->Size(); | 1726 int size = object->Size(); |
| 1726 HeapObject* target_object = nullptr; | 1727 HeapObject* target_object = nullptr; |
| 1727 if (heap_->ShouldBePromoted(object->address(), size) && | 1728 if (heap_->ShouldBePromoted(object->address(), size) && |
| 1728 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, | 1729 TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, |
| 1729 &target_object)) { | 1730 &target_object)) { |
| 1730 // If we end up needing more special cases, we should factor this out. | |
| 1731 if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { | |
| 1732 heap_->array_buffer_tracker()->Promote( | |
| 1733 JSArrayBuffer::cast(target_object)); | |
| 1734 } | |
| 1735 promoted_size_ += size; | 1731 promoted_size_ += size; |
| 1736 return true; | 1732 return true; |
| 1737 } | 1733 } |
| 1738 HeapObject* target = nullptr; | 1734 HeapObject* target = nullptr; |
| 1739 AllocationSpace space = AllocateTargetObject(object, &target); | 1735 AllocationSpace space = AllocateTargetObject(object, &target); |
| 1740 MigrateObject(HeapObject::cast(target), object, size, space); | 1736 MigrateObject(HeapObject::cast(target), object, size, space); |
| 1741 if (V8_UNLIKELY(target->IsJSArrayBuffer())) { | |
| 1742 heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); | |
| 1743 } | |
| 1744 semispace_copied_size_ += size; | 1737 semispace_copied_size_ += size; |
| 1745 return true; | 1738 return true; |
| 1746 } | 1739 } |
| 1747 | 1740 |
| 1748 intptr_t promoted_size() { return promoted_size_; } | 1741 intptr_t promoted_size() { return promoted_size_; } |
| 1749 intptr_t semispace_copied_size() { return semispace_copied_size_; } | 1742 intptr_t semispace_copied_size() { return semispace_copied_size_; } |
| 1750 | 1743 |
| 1751 private: | 1744 private: |
| 1752 enum NewSpaceAllocationMode { | 1745 enum NewSpaceAllocationMode { |
| 1753 kNonstickyBailoutOldSpace, | 1746 kNonstickyBailoutOldSpace, |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1858 : heap_(heap), promoted_size_(0) {} | 1851 : heap_(heap), promoted_size_(0) {} |
| 1859 | 1852 |
| 1860 static void TryMoveToOldSpace(Page* page, PagedSpace* owner) { | 1853 static void TryMoveToOldSpace(Page* page, PagedSpace* owner) { |
| 1861 if (page->heap()->new_space()->ReplaceWithEmptyPage(page)) { | 1854 if (page->heap()->new_space()->ReplaceWithEmptyPage(page)) { |
| 1862 Page* new_page = Page::ConvertNewToOld(page, owner); | 1855 Page* new_page = Page::ConvertNewToOld(page, owner); |
| 1863 new_page->SetFlag(Page::PAGE_NEW_OLD_PROMOTION); | 1856 new_page->SetFlag(Page::PAGE_NEW_OLD_PROMOTION); |
| 1864 } | 1857 } |
| 1865 } | 1858 } |
| 1866 | 1859 |
| 1867 inline bool Visit(HeapObject* object) { | 1860 inline bool Visit(HeapObject* object) { |
| 1868 if (V8_UNLIKELY(object->IsJSArrayBuffer())) { | |
| 1869 object->GetHeap()->array_buffer_tracker()->Promote( | |
| 1870 JSArrayBuffer::cast(object)); | |
| 1871 } | |
| 1872 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); | 1861 RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); |
| 1873 object->IterateBodyFast(&visitor); | 1862 object->IterateBodyFast(&visitor); |
| 1874 promoted_size_ += object->Size(); | 1863 promoted_size_ += object->Size(); |
| 1875 return true; | 1864 return true; |
| 1876 } | 1865 } |
| 1877 | 1866 |
| 1878 intptr_t promoted_size() { return promoted_size_; } | 1867 intptr_t promoted_size() { return promoted_size_; } |
| 1879 | 1868 |
| 1880 private: | 1869 private: |
| 1881 Heap* heap_; | 1870 Heap* heap_; |
| (...skipping 1238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3120 } | 3109 } |
| 3121 return success; | 3110 return success; |
| 3122 } | 3111 } |
| 3123 | 3112 |
| 3124 bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { | 3113 bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { |
| 3125 bool result = false; | 3114 bool result = false; |
| 3126 DCHECK(page->SweepingDone()); | 3115 DCHECK(page->SweepingDone()); |
| 3127 switch (ComputeEvacuationMode(page)) { | 3116 switch (ComputeEvacuationMode(page)) { |
| 3128 case kObjectsNewToOld: | 3117 case kObjectsNewToOld: |
| 3129 result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_); | 3118 result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_); |
| 3119 ArrayBufferTracker::ProcessBuffers( | |
| 3120 page, ArrayBufferTracker::kUpdateForwardedRemoveOthers); | |
| 3130 DCHECK(result); | 3121 DCHECK(result); |
| 3131 USE(result); | 3122 USE(result); |
| 3132 break; | 3123 break; |
| 3133 case kPageNewToOld: | 3124 case kPageNewToOld: |
| 3134 result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor); | 3125 result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor); |
| 3126 // ArrayBufferTracker will be updated during sweeping. | |
| 3135 DCHECK(result); | 3127 DCHECK(result); |
| 3136 USE(result); | 3128 USE(result); |
| 3137 break; | 3129 break; |
| 3138 case kObjectsOldToOld: | 3130 case kObjectsOldToOld: |
| 3139 result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_); | 3131 result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_); |
| 3140 if (!result) { | 3132 if (!result) { |
| 3141 // Aborted compaction page. We have to record slots here, since we might | 3133 // Aborted compaction page. We have to record slots here, since we might |
| 3142 // not have recorded them in first place. | 3134 // not have recorded them in first place. |
| 3143 // Note: We mark the page as aborted here to be able to record slots | 3135 // Note: We mark the page as aborted here to be able to record slots |
| 3144 // for code objects in |RecordMigratedSlotVisitor|. | 3136 // for code objects in |RecordMigratedSlotVisitor|. |
| 3145 page->SetFlag(Page::COMPACTION_WAS_ABORTED); | 3137 page->SetFlag(Page::COMPACTION_WAS_ABORTED); |
| 3146 EvacuateRecordOnlyVisitor record_visitor(collector_->heap()); | 3138 EvacuateRecordOnlyVisitor record_visitor(collector_->heap()); |
| 3147 result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor); | 3139 result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor); |
| 3140 ArrayBufferTracker::ProcessBuffers( | |
| 3141 page, ArrayBufferTracker::kUpdateForwardedKeepOthers); | |
| 3148 DCHECK(result); | 3142 DCHECK(result); |
| 3149 USE(result); | 3143 USE(result); |
| 3150 // We need to return failure here to indicate that we want this page | 3144 // We need to return failure here to indicate that we want this page |
| 3151 // added to the sweeper. | 3145 // added to the sweeper. |
| 3152 return false; | 3146 return false; |
| 3153 } | 3147 } |
| 3148 ArrayBufferTracker::ProcessBuffers( | |
| 3149 page, ArrayBufferTracker::kUpdateForwardedRemoveOthers); | |
| 3150 | |
| 3154 break; | 3151 break; |
| 3155 default: | 3152 default: |
| 3156 UNREACHABLE(); | 3153 UNREACHABLE(); |
| 3157 } | 3154 } |
| 3158 return result; | 3155 return result; |
| 3159 } | 3156 } |
| 3160 | 3157 |
| 3161 void MarkCompactCollector::Evacuator::Finalize() { | 3158 void MarkCompactCollector::Evacuator::Finalize() { |
| 3162 heap()->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE)); | 3159 heap()->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE)); |
| 3163 heap()->code_space()->MergeCompactionSpace( | 3160 heap()->code_space()->MergeCompactionSpace( |
| (...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3322 MarkCompactCollector::Sweeper::SkipListRebuildingMode skip_list_mode, | 3319 MarkCompactCollector::Sweeper::SkipListRebuildingMode skip_list_mode, |
| 3323 MarkCompactCollector::Sweeper::FreeSpaceTreatmentMode free_space_mode> | 3320 MarkCompactCollector::Sweeper::FreeSpaceTreatmentMode free_space_mode> |
| 3324 int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p, | 3321 int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p, |
| 3325 ObjectVisitor* v) { | 3322 ObjectVisitor* v) { |
| 3326 DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone()); | 3323 DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone()); |
| 3327 DCHECK(!p->IsFlagSet(Page::BLACK_PAGE)); | 3324 DCHECK(!p->IsFlagSet(Page::BLACK_PAGE)); |
| 3328 DCHECK_EQ(skip_list_mode == REBUILD_SKIP_LIST, | 3325 DCHECK_EQ(skip_list_mode == REBUILD_SKIP_LIST, |
| 3329 space->identity() == CODE_SPACE); | 3326 space->identity() == CODE_SPACE); |
| 3330 DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST)); | 3327 DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST)); |
| 3331 DCHECK(parallelism == SWEEP_ON_MAIN_THREAD || sweeping_mode == SWEEP_ONLY); | 3328 DCHECK(parallelism == SWEEP_ON_MAIN_THREAD || sweeping_mode == SWEEP_ONLY); |
| 3332 | 3329 |
|
Hannes Payer (out of office)
2016/06/01 13:44:17
// Before we sweep objects on the page, we free de
Michael Lippautz
2016/06/01 14:19:19
Done.
| |
| 3330 ArrayBufferTracker::FreeDead(p); | |
| 3331 | |
| 3333 Address free_start = p->area_start(); | 3332 Address free_start = p->area_start(); |
| 3334 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0); | 3333 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0); |
| 3335 | 3334 |
| 3336 // If we use the skip list for code space pages, we have to lock the skip | 3335 // If we use the skip list for code space pages, we have to lock the skip |
| 3337 // list because it could be accessed concurrently by the runtime or the | 3336 // list because it could be accessed concurrently by the runtime or the |
| 3338 // deoptimizer. | 3337 // deoptimizer. |
| 3339 SkipList* skip_list = p->skip_list(); | 3338 SkipList* skip_list = p->skip_list(); |
| 3340 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) { | 3339 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) { |
| 3341 skip_list->Clear(); | 3340 skip_list->Clear(); |
| 3342 } | 3341 } |
| (...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3522 // because root iteration traverses the stack and might have to find | 3521 // because root iteration traverses the stack and might have to find |
| 3523 // code objects from non-updated pc pointing into evacuation candidate. | 3522 // code objects from non-updated pc pointing into evacuation candidate. |
| 3524 SkipList* list = p->skip_list(); | 3523 SkipList* list = p->skip_list(); |
| 3525 if (list != NULL) list->Clear(); | 3524 if (list != NULL) list->Clear(); |
| 3526 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) { | 3525 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) { |
| 3527 sweeper().AddLatePage(p->owner()->identity(), p); | 3526 sweeper().AddLatePage(p->owner()->identity(), p); |
| 3528 p->ClearFlag(Page::COMPACTION_WAS_ABORTED); | 3527 p->ClearFlag(Page::COMPACTION_WAS_ABORTED); |
| 3529 } | 3528 } |
| 3530 } | 3529 } |
| 3531 | 3530 |
| 3532 // EvacuateNewSpaceAndCandidates iterates over new space objects and for | |
| 3533 // ArrayBuffers either re-registers them as live or promotes them. This is | |
| 3534 // needed to properly free them. | |
| 3535 heap()->array_buffer_tracker()->FreeDead(false); | |
| 3536 | |
| 3537 // Deallocate evacuated candidate pages. | 3531 // Deallocate evacuated candidate pages. |
| 3538 ReleaseEvacuationCandidates(); | 3532 ReleaseEvacuationCandidates(); |
| 3539 } | 3533 } |
| 3540 | 3534 |
| 3541 #ifdef VERIFY_HEAP | 3535 #ifdef VERIFY_HEAP |
| 3542 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) { | 3536 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) { |
| 3543 VerifyEvacuation(heap()); | 3537 VerifyEvacuation(heap()); |
| 3544 } | 3538 } |
| 3545 #endif | 3539 #endif |
| 3546 } | 3540 } |
| (...skipping 405 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3952 MarkBit mark_bit = Marking::MarkBitFrom(host); | 3946 MarkBit mark_bit = Marking::MarkBitFrom(host); |
| 3953 if (Marking::IsBlack(mark_bit)) { | 3947 if (Marking::IsBlack(mark_bit)) { |
| 3954 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); | 3948 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); |
| 3955 RecordRelocSlot(host, &rinfo, target); | 3949 RecordRelocSlot(host, &rinfo, target); |
| 3956 } | 3950 } |
| 3957 } | 3951 } |
| 3958 } | 3952 } |
| 3959 | 3953 |
| 3960 } // namespace internal | 3954 } // namespace internal |
| 3961 } // namespace v8 | 3955 } // namespace v8 |
| OLD | NEW |