| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_MARK_COMPACT_H_ | 5 #ifndef V8_HEAP_MARK_COMPACT_H_ |
| 6 #define V8_HEAP_MARK_COMPACT_H_ | 6 #define V8_HEAP_MARK_COMPACT_H_ |
| 7 | 7 |
| 8 #include <deque> | 8 #include <deque> |
| 9 | 9 |
| 10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
| (...skipping 287 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 298 virtual void SetUp() = 0; | 298 virtual void SetUp() = 0; |
| 299 virtual void TearDown() = 0; | 299 virtual void TearDown() = 0; |
| 300 virtual void CollectGarbage() = 0; | 300 virtual void CollectGarbage() = 0; |
| 301 | 301 |
| 302 inline Heap* heap() const { return heap_; } | 302 inline Heap* heap() const { return heap_; } |
| 303 inline Isolate* isolate() { return heap()->isolate(); } | 303 inline Isolate* isolate() { return heap()->isolate(); } |
| 304 | 304 |
| 305 protected: | 305 protected: |
| 306 explicit MarkCompactCollectorBase(Heap* heap) : heap_(heap) {} | 306 explicit MarkCompactCollectorBase(Heap* heap) : heap_(heap) {} |
| 307 | 307 |
| 308 // Marking operations for objects reachable from roots. |
| 308 virtual void MarkLiveObjects() = 0; | 309 virtual void MarkLiveObjects() = 0; |
| 310 // Mark objects reachable (transitively) from objects in the marking |
| 311 // stack. |
| 312 virtual void EmptyMarkingDeque() = 0; |
| 313 virtual void ProcessMarkingDeque() = 0; |
| 314 // Clear non-live references held in side data structures. |
| 315 virtual void ClearNonLiveReferences() = 0; |
| 316 virtual void EvacuatePrologue() = 0; |
| 317 virtual void EvacuateEpilogue() = 0; |
| 318 virtual void Evacuate() = 0; |
| 319 virtual void EvacuatePagesInParallel() = 0; |
| 320 virtual void UpdatePointersAfterEvacuation() = 0; |
| 309 | 321 |
| 310 // The number of parallel compaction tasks, including the main thread. | 322 // The number of parallel compaction tasks, including the main thread. |
| 311 int NumberOfParallelCompactionTasks(int pages, intptr_t live_bytes); | 323 int NumberOfParallelCompactionTasks(int pages, intptr_t live_bytes); |
| 312 | 324 |
| 313 template <class Evacuator, class Collector> | 325 template <class Evacuator, class Collector> |
| 314 void CreateAndExecuteEvacuationTasks( | 326 void CreateAndExecuteEvacuationTasks( |
| 315 Collector* collector, PageParallelJob<EvacuationJobTraits>* job, | 327 Collector* collector, PageParallelJob<EvacuationJobTraits>* job, |
| 316 RecordMigratedSlotVisitor* record_visitor, const intptr_t live_bytes, | 328 RecordMigratedSlotVisitor* record_visitor, |
| 329 MigrationObserver* migration_observer, const intptr_t live_bytes, |
| 317 const int& abandoned_pages); | 330 const int& abandoned_pages); |
| 318 | 331 |
| 319 Heap* heap_; | 332 Heap* heap_; |
| 320 }; | 333 }; |
| 321 | 334 |
| 322 // Collector for young-generation only. | 335 // Collector for young-generation only. |
| 323 class MinorMarkCompactCollector final : public MarkCompactCollectorBase { | 336 class MinorMarkCompactCollector final : public MarkCompactCollectorBase { |
| 324 public: | 337 public: |
| 325 explicit MinorMarkCompactCollector(Heap* heap) | 338 explicit MinorMarkCompactCollector(Heap* heap) |
| 326 : MarkCompactCollectorBase(heap), marking_deque_(heap) {} | 339 : MarkCompactCollectorBase(heap), |
| 340 marking_deque_(heap), |
| 341 page_parallel_job_semaphore_(0) {} |
| 327 | 342 |
| 328 MarkingState marking_state(HeapObject* object) const override { | 343 MarkingState marking_state(HeapObject* object) const override { |
| 329 return MarkingState::External(object); | 344 return MarkingState::External(object); |
| 330 } | 345 } |
| 331 | 346 |
| 332 MarkingState marking_state(MemoryChunk* chunk) const override { | 347 MarkingState marking_state(MemoryChunk* chunk) const override { |
| 333 return MarkingState::External(chunk); | 348 return MarkingState::External(chunk); |
| 334 } | 349 } |
| 335 | 350 |
| 336 void SetUp() override; | 351 void SetUp() override; |
| 337 void TearDown() override; | 352 void TearDown() override; |
| 338 void CollectGarbage() override; | 353 void CollectGarbage() override; |
| 339 | 354 |
| 340 private: | 355 private: |
| 341 class RootMarkingVisitor; | 356 class RootMarkingVisitor; |
| 342 | 357 |
| 343 inline MarkingDeque* marking_deque() { return &marking_deque_; } | 358 inline MarkingDeque* marking_deque() { return &marking_deque_; } |
| 344 | 359 |
| 345 V8_INLINE void MarkObject(HeapObject* obj); | 360 V8_INLINE void MarkObject(HeapObject* obj); |
| 346 V8_INLINE void PushBlack(HeapObject* obj); | 361 V8_INLINE void PushBlack(HeapObject* obj); |
| 347 | 362 |
| 348 SlotCallbackResult CheckAndMarkObject(Heap* heap, Address slot_address); | 363 SlotCallbackResult CheckAndMarkObject(Heap* heap, Address slot_address); |
| 349 void MarkLiveObjects() override; | 364 void MarkLiveObjects() override; |
| 350 void ProcessMarkingDeque(); | 365 void ProcessMarkingDeque() override; |
| 351 void EmptyMarkingDeque(); | 366 void EmptyMarkingDeque() override; |
| 367 void ClearNonLiveReferences() override; |
| 368 |
| 369 void EvacuatePrologue() override; |
| 370 void EvacuateEpilogue() override; |
| 371 void Evacuate() override; |
| 372 void EvacuatePagesInParallel() override; |
| 373 void UpdatePointersAfterEvacuation() override; |
| 352 | 374 |
| 353 MarkingDeque marking_deque_; | 375 MarkingDeque marking_deque_; |
| 376 base::Semaphore page_parallel_job_semaphore_; |
| 377 List<Page*> new_space_evacuation_pages_; |
| 354 | 378 |
| 355 friend class StaticYoungGenerationMarkingVisitor; | 379 friend class StaticYoungGenerationMarkingVisitor; |
| 356 }; | 380 }; |
| 357 | 381 |
| 358 // Collector for young and old generation. | 382 // Collector for young and old generation. |
| 359 class MarkCompactCollector final : public MarkCompactCollectorBase { | 383 class MarkCompactCollector final : public MarkCompactCollectorBase { |
| 360 public: | 384 public: |
| 361 class RootMarkingVisitor; | 385 class RootMarkingVisitor; |
| 362 | 386 |
| 363 class Sweeper { | 387 class Sweeper { |
| (...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 550 | 574 |
| 551 // Finishes GC, performs heap verification if enabled. | 575 // Finishes GC, performs heap verification if enabled. |
| 552 void Finish(); | 576 void Finish(); |
| 553 | 577 |
| 554 // Mark code objects that are active on the stack to prevent them | 578 // Mark code objects that are active on the stack to prevent them |
| 555 // from being flushed. | 579 // from being flushed. |
| 556 void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top); | 580 void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top); |
| 557 | 581 |
| 558 void PrepareForCodeFlushing(); | 582 void PrepareForCodeFlushing(); |
| 559 | 583 |
| 560 // Marking operations for objects reachable from roots. | |
| 561 void MarkLiveObjects() override; | 584 void MarkLiveObjects() override; |
| 562 | 585 |
| 563 // Pushes a black object onto the marking stack and accounts for live bytes. | 586 // Pushes a black object onto the marking stack and accounts for live bytes. |
| 564 // Note that this assumes live bytes have not yet been counted. | 587 // Note that this assumes live bytes have not yet been counted. |
| 565 V8_INLINE void PushBlack(HeapObject* obj); | 588 V8_INLINE void PushBlack(HeapObject* obj); |
| 566 | 589 |
| 567 // Unshifts a black object into the marking stack and accounts for live bytes. | 590 // Unshifts a black object into the marking stack and accounts for live bytes. |
| 568 // Note that this assumes lives bytes have already been counted. | 591 // Note that this assumes lives bytes have already been counted. |
| 569 V8_INLINE void UnshiftBlack(HeapObject* obj); | 592 V8_INLINE void UnshiftBlack(HeapObject* obj); |
| 570 | 593 |
| 571 // Marks the object black and pushes it on the marking stack. | 594 // Marks the object black and pushes it on the marking stack. |
| 572 // This is for non-incremental marking only. | 595 // This is for non-incremental marking only. |
| 573 V8_INLINE void MarkObject(HeapObject* obj); | 596 V8_INLINE void MarkObject(HeapObject* obj); |
| 574 | 597 |
| 575 // Mark the heap roots and all objects reachable from them. | 598 // Mark the heap roots and all objects reachable from them. |
| 576 void MarkRoots(RootMarkingVisitor* visitor); | 599 void MarkRoots(RootMarkingVisitor* visitor); |
| 577 | 600 |
| 578 // Mark the string table specially. References to internalized strings from | 601 // Mark the string table specially. References to internalized strings from |
| 579 // the string table are weak. | 602 // the string table are weak. |
| 580 void MarkStringTable(RootMarkingVisitor* visitor); | 603 void MarkStringTable(RootMarkingVisitor* visitor); |
| 581 | 604 |
| 582 // Mark objects reachable (transitively) from objects in the marking stack | 605 void ProcessMarkingDeque() override; |
| 583 // or overflowed in the heap. | |
| 584 void ProcessMarkingDeque(); | |
| 585 | 606 |
| 586 // Mark objects reachable (transitively) from objects in the marking stack | 607 // Mark objects reachable (transitively) from objects in the marking stack |
| 587 // or overflowed in the heap. This respects references only considered in | 608 // or overflowed in the heap. This respects references only considered in |
| 588 // the final atomic marking pause including the following: | 609 // the final atomic marking pause including the following: |
| 589 // - Processing of objects reachable through Harmony WeakMaps. | 610 // - Processing of objects reachable through Harmony WeakMaps. |
| 590 // - Objects reachable due to host application logic like object groups, | 611 // - Objects reachable due to host application logic like object groups, |
| 591 // implicit references' groups, or embedder heap tracing. | 612 // implicit references' groups, or embedder heap tracing. |
| 592 void ProcessEphemeralMarking(bool only_process_harmony_weak_collections); | 613 void ProcessEphemeralMarking(bool only_process_harmony_weak_collections); |
| 593 | 614 |
| 594 // If the call-site of the top optimized code was not prepared for | 615 // If the call-site of the top optimized code was not prepared for |
| 595 // deoptimization, then treat the maps in the code as strong pointers, | 616 // deoptimization, then treat the maps in the code as strong pointers, |
| 596 // otherwise a map can die and deoptimize the code. | 617 // otherwise a map can die and deoptimize the code. |
| 597 void ProcessTopOptimizedFrame(RootMarkingVisitor* visitor); | 618 void ProcessTopOptimizedFrame(RootMarkingVisitor* visitor); |
| 598 | 619 |
| 599 // Collects a list of dependent code from maps embedded in optimize code. | 620 // Collects a list of dependent code from maps embedded in optimize code. |
| 600 DependentCode* DependentCodeListFromNonLiveMaps(); | 621 DependentCode* DependentCodeListFromNonLiveMaps(); |
| 601 | 622 |
| 602 // Mark objects reachable (transitively) from objects in the marking | 623 // This function empties the marking stack, but may leave overflowed objects |
| 603 // stack. This function empties the marking stack, but may leave | 624 // in the heap, in which case the marking stack's overflow flag will be set. |
| 604 // overflowed objects in the heap, in which case the marking stack's | 625 void EmptyMarkingDeque() override; |
| 605 // overflow flag will be set. | |
| 606 void EmptyMarkingDeque(); | |
| 607 | 626 |
| 608 // Refill the marking stack with overflowed objects from the heap. This | 627 // Refill the marking stack with overflowed objects from the heap. This |
| 609 // function either leaves the marking stack full or clears the overflow | 628 // function either leaves the marking stack full or clears the overflow |
| 610 // flag on the marking stack. | 629 // flag on the marking stack. |
| 611 void RefillMarkingDeque(); | 630 void RefillMarkingDeque(); |
| 612 | 631 |
| 613 // Helper methods for refilling the marking stack by discovering grey objects | 632 // Helper methods for refilling the marking stack by discovering grey objects |
| 614 // on various pages of the heap. Used by {RefillMarkingDeque} only. | 633 // on various pages of the heap. Used by {RefillMarkingDeque} only. |
| 615 template <class T> | 634 template <class T> |
| 616 void DiscoverGreyObjectsWithIterator(T* it); | 635 void DiscoverGreyObjectsWithIterator(T* it); |
| 617 void DiscoverGreyObjectsOnPage(MemoryChunk* p); | 636 void DiscoverGreyObjectsOnPage(MemoryChunk* p); |
| 618 void DiscoverGreyObjectsInSpace(PagedSpace* space); | 637 void DiscoverGreyObjectsInSpace(PagedSpace* space); |
| 619 void DiscoverGreyObjectsInNewSpace(); | 638 void DiscoverGreyObjectsInNewSpace(); |
| 620 | 639 |
| 621 // Callback function for telling whether the object *p is an unmarked | 640 // Callback function for telling whether the object *p is an unmarked |
| 622 // heap object. | 641 // heap object. |
| 623 static bool IsUnmarkedHeapObject(Object** p); | 642 static bool IsUnmarkedHeapObject(Object** p); |
| 624 | 643 |
| 625 // Clear non-live references in weak cells, transition and descriptor arrays, | 644 // Clear non-live references in weak cells, transition and descriptor arrays, |
| 626 // and deoptimize dependent code of non-live maps. | 645 // and deoptimize dependent code of non-live maps. |
| 627 void ClearNonLiveReferences(); | 646 void ClearNonLiveReferences() override; |
| 628 void MarkDependentCodeForDeoptimization(DependentCode* list); | 647 void MarkDependentCodeForDeoptimization(DependentCode* list); |
| 629 // Find non-live targets of simple transitions in the given list. Clear | 648 // Find non-live targets of simple transitions in the given list. Clear |
| 630 // transitions to non-live targets and if needed trim descriptors arrays. | 649 // transitions to non-live targets and if needed trim descriptors arrays. |
| 631 void ClearSimpleMapTransitions(Object* non_live_map_list); | 650 void ClearSimpleMapTransitions(Object* non_live_map_list); |
| 632 void ClearSimpleMapTransition(Map* map, Map* dead_transition); | 651 void ClearSimpleMapTransition(Map* map, Map* dead_transition); |
| 633 // Compact every array in the global list of transition arrays and | 652 // Compact every array in the global list of transition arrays and |
| 634 // trim the corresponding descriptor array if a transition target is non-live. | 653 // trim the corresponding descriptor array if a transition target is non-live. |
| 635 void ClearFullMapTransitions(); | 654 void ClearFullMapTransitions(); |
| 636 bool CompactTransitionArray(Map* map, TransitionArray* transitions, | 655 bool CompactTransitionArray(Map* map, TransitionArray* transitions, |
| 637 DescriptorArray* descriptors); | 656 DescriptorArray* descriptors); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 656 DependentCode** dependent_code_list); | 675 DependentCode** dependent_code_list); |
| 657 void AbortWeakCells(); | 676 void AbortWeakCells(); |
| 658 | 677 |
| 659 void AbortTransitionArrays(); | 678 void AbortTransitionArrays(); |
| 660 | 679 |
| 661 // Starts sweeping of spaces by contributing on the main thread and setting | 680 // Starts sweeping of spaces by contributing on the main thread and setting |
| 662 // up other pages for sweeping. Does not start sweeper tasks. | 681 // up other pages for sweeping. Does not start sweeper tasks. |
| 663 void StartSweepSpaces(); | 682 void StartSweepSpaces(); |
| 664 void StartSweepSpace(PagedSpace* space); | 683 void StartSweepSpace(PagedSpace* space); |
| 665 | 684 |
| 666 void EvacuatePrologue(); | 685 void EvacuatePrologue() override; |
| 667 void EvacuateEpilogue(); | 686 void EvacuateEpilogue() override; |
| 668 void EvacuatePagesInParallel(); | 687 void Evacuate() override; |
| 669 | 688 void EvacuatePagesInParallel() override; |
| 670 void EvacuateNewSpaceAndCandidates(); | 689 void UpdatePointersAfterEvacuation() override; |
| 671 | |
| 672 void UpdatePointersAfterEvacuation(); | |
| 673 | 690 |
| 674 void ReleaseEvacuationCandidates(); | 691 void ReleaseEvacuationCandidates(); |
| 675 | 692 |
| 676 base::Semaphore page_parallel_job_semaphore_; | 693 base::Semaphore page_parallel_job_semaphore_; |
| 677 | 694 |
| 678 #ifdef DEBUG | 695 #ifdef DEBUG |
| 679 enum CollectorState { | 696 enum CollectorState { |
| 680 IDLE, | 697 IDLE, |
| 681 PREPARE_GC, | 698 PREPARE_GC, |
| 682 MARK_LIVE_OBJECTS, | 699 MARK_LIVE_OBJECTS, |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 735 ~EvacuationScope() { collector_->set_evacuation(false); } | 752 ~EvacuationScope() { collector_->set_evacuation(false); } |
| 736 | 753 |
| 737 private: | 754 private: |
| 738 MarkCompactCollector* collector_; | 755 MarkCompactCollector* collector_; |
| 739 }; | 756 }; |
| 740 | 757 |
| 741 } // namespace internal | 758 } // namespace internal |
| 742 } // namespace v8 | 759 } // namespace v8 |
| 743 | 760 |
| 744 #endif // V8_HEAP_MARK_COMPACT_H_ | 761 #endif // V8_HEAP_MARK_COMPACT_H_ |
| OLD | NEW |