OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_MARK_COMPACT_H_ | 5 #ifndef V8_HEAP_MARK_COMPACT_H_ |
6 #define V8_HEAP_MARK_COMPACT_H_ | 6 #define V8_HEAP_MARK_COMPACT_H_ |
7 | 7 |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/heap/spaces.h" | 9 #include "src/heap/spaces.h" |
10 | 10 |
(...skipping 609 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
620 | 620 |
621 // If the call-site of the top optimized code was not prepared for | 621 // If the call-site of the top optimized code was not prepared for |
622 // deoptimization, then treat the maps in the code as strong pointers, | 622 // deoptimization, then treat the maps in the code as strong pointers, |
623 // otherwise a map can die and deoptimize the code. | 623 // otherwise a map can die and deoptimize the code. |
624 void ProcessTopOptimizedFrame(ObjectVisitor* visitor); | 624 void ProcessTopOptimizedFrame(ObjectVisitor* visitor); |
625 | 625 |
626 // Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to | 626 // Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to |
627 // increase chances of reusing of map transition tree in future. | 627 // increase chances of reusing of map transition tree in future. |
628 void RetainMaps(); | 628 void RetainMaps(); |
629 | 629 |
| 630 // Collects a list of dependent code from maps embedded in optimize code. |
| 631 DependentCode* DependentCodeListFromNonLiveMaps(); |
| 632 |
630 // Mark objects reachable (transitively) from objects in the marking | 633 // Mark objects reachable (transitively) from objects in the marking |
631 // stack. This function empties the marking stack, but may leave | 634 // stack. This function empties the marking stack, but may leave |
632 // overflowed objects in the heap, in which case the marking stack's | 635 // overflowed objects in the heap, in which case the marking stack's |
633 // overflow flag will be set. | 636 // overflow flag will be set. |
634 void EmptyMarkingDeque(); | 637 void EmptyMarkingDeque(); |
635 | 638 |
636 // Refill the marking stack with overflowed objects from the heap. This | 639 // Refill the marking stack with overflowed objects from the heap. This |
637 // function either leaves the marking stack full or clears the overflow | 640 // function either leaves the marking stack full or clears the overflow |
638 // flag on the marking stack. | 641 // flag on the marking stack. |
639 void RefillMarkingDeque(); | 642 void RefillMarkingDeque(); |
(...skipping 10 matching lines...) Expand all Loading... |
650 // heap object. | 653 // heap object. |
651 static bool IsUnmarkedHeapObject(Object** p); | 654 static bool IsUnmarkedHeapObject(Object** p); |
652 | 655 |
653 // Map transitions from a live map to a dead map must be killed. | 656 // Map transitions from a live map to a dead map must be killed. |
654 // We replace them with a null descriptor, with the same key. | 657 // We replace them with a null descriptor, with the same key. |
655 void ClearNonLiveReferences(); | 658 void ClearNonLiveReferences(); |
656 void ClearNonLivePrototypeTransitions(Map* map); | 659 void ClearNonLivePrototypeTransitions(Map* map); |
657 void ClearNonLiveMapTransitions(Map* map); | 660 void ClearNonLiveMapTransitions(Map* map); |
658 void ClearMapTransitions(Map* map, Map* dead_transition); | 661 void ClearMapTransitions(Map* map, Map* dead_transition); |
659 bool ClearMapBackPointer(Map* map); | 662 bool ClearMapBackPointer(Map* map); |
| 663 void MarkDependentCodeListForDeoptimization(DependentCode* list_head); |
660 void TrimDescriptorArray(Map* map, DescriptorArray* descriptors, | 664 void TrimDescriptorArray(Map* map, DescriptorArray* descriptors, |
661 int number_of_own_descriptors); | 665 int number_of_own_descriptors); |
662 void TrimEnumCache(Map* map, DescriptorArray* descriptors); | 666 void TrimEnumCache(Map* map, DescriptorArray* descriptors); |
663 | 667 |
664 // Mark all values associated with reachable keys in weak collections | 668 // Mark all values associated with reachable keys in weak collections |
665 // encountered so far. This might push new object or even new weak maps onto | 669 // encountered so far. This might push new object or even new weak maps onto |
666 // the marking stack. | 670 // the marking stack. |
667 void ProcessWeakCollections(); | 671 void ProcessWeakCollections(); |
668 | 672 |
669 // After all reachable objects have been marked those weak map entries | 673 // After all reachable objects have been marked those weak map entries |
670 // with an unreachable key are removed from all encountered weak maps. | 674 // with an unreachable key are removed from all encountered weak maps. |
671 // The linked list of all encountered weak maps is destroyed. | 675 // The linked list of all encountered weak maps is destroyed. |
672 void ClearWeakCollections(); | 676 void ClearWeakCollections(); |
673 | 677 |
674 // We have to remove all encountered weak maps from the list of weak | 678 // We have to remove all encountered weak maps from the list of weak |
675 // collections when incremental marking is aborted. | 679 // collections when incremental marking is aborted. |
676 void AbortWeakCollections(); | 680 void AbortWeakCollections(); |
677 | 681 |
678 void ProcessAndClearWeakCells(); | 682 void ProcessAndClearWeakCells(); |
679 void AbortWeakCells(); | 683 void AbortWeakCells(); |
680 | 684 |
681 // After all reachable objects have been marked, those entries within | 685 // After all reachable objects have been marked, those entries within |
682 // optimized code maps that became unreachable are removed, potentially | 686 // optimized code maps that became unreachable are removed, potentially |
683 // trimming or clearing out the entire optimized code map. | 687 // trimming or clearing out the entire optimized code map. |
684 void ProcessAndClearOptimizedCodeMaps(); | 688 void ProcessAndClearOptimizedCodeMaps(); |
685 | 689 |
| 690 // Process non-live references in maps and optimized code. |
| 691 void ProcessWeakReferences(); |
| 692 |
686 // ----------------------------------------------------------------------- | 693 // ----------------------------------------------------------------------- |
687 // Phase 2: Sweeping to clear mark bits and free non-live objects for | 694 // Phase 2: Sweeping to clear mark bits and free non-live objects for |
688 // a non-compacting collection. | 695 // a non-compacting collection. |
689 // | 696 // |
690 // Before: Live objects are marked and non-live objects are unmarked. | 697 // Before: Live objects are marked and non-live objects are unmarked. |
691 // | 698 // |
692 // After: Live objects are unmarked, non-live regions have been added to | 699 // After: Live objects are unmarked, non-live regions have been added to |
693 // their space's free list. Active eden semispace is compacted by | 700 // their space's free list. Active eden semispace is compacted by |
694 // evacuation. | 701 // evacuation. |
695 // | 702 // |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
857 private: | 864 private: |
858 MarkCompactCollector* collector_; | 865 MarkCompactCollector* collector_; |
859 }; | 866 }; |
860 | 867 |
861 | 868 |
862 const char* AllocationSpaceName(AllocationSpace space); | 869 const char* AllocationSpaceName(AllocationSpace space); |
863 } // namespace internal | 870 } // namespace internal |
864 } // namespace v8 | 871 } // namespace v8 |
865 | 872 |
866 #endif // V8_HEAP_MARK_COMPACT_H_ | 873 #endif // V8_HEAP_MARK_COMPACT_H_ |
OLD | NEW |