OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_MARK_COMPACT_H_ | 5 #ifndef V8_HEAP_MARK_COMPACT_H_ |
6 #define V8_HEAP_MARK_COMPACT_H_ | 6 #define V8_HEAP_MARK_COMPACT_H_ |
7 | 7 |
8 #include <deque> | 8 #include <deque> |
9 | 9 |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
73 return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj, state)); | 73 return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj, state)); |
74 } | 74 } |
75 | 75 |
76 template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> | 76 template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> |
77 V8_INLINE static bool BlackToGrey(HeapObject* obj, | 77 V8_INLINE static bool BlackToGrey(HeapObject* obj, |
78 const MarkingState& state) { | 78 const MarkingState& state) { |
79 DCHECK( | 79 DCHECK( |
80 (access_mode == MarkBit::ATOMIC || IsBlack<access_mode>(obj, state))); | 80 (access_mode == MarkBit::ATOMIC || IsBlack<access_mode>(obj, state))); |
81 MarkBit markbit = MarkBitFrom(obj, state); | 81 MarkBit markbit = MarkBitFrom(obj, state); |
82 if (!Marking::BlackToGrey<access_mode>(markbit)) return false; | 82 if (!Marking::BlackToGrey<access_mode>(markbit)) return false; |
83 state.IncrementLiveBytes(-obj->Size()); | 83 state.IncrementLiveBytes<access_mode>(-obj->Size()); |
84 return true; | 84 return true; |
85 } | 85 } |
86 | 86 |
87 template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> | 87 template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> |
88 V8_INLINE static bool WhiteToGrey(HeapObject* obj, | 88 V8_INLINE static bool WhiteToGrey(HeapObject* obj, |
89 const MarkingState& state) { | 89 const MarkingState& state) { |
90 DCHECK( | 90 DCHECK( |
91 (access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj, state))); | 91 (access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj, state))); |
92 return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj, state)); | 92 return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj, state)); |
93 } | 93 } |
94 | 94 |
95 template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> | 95 template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> |
96 V8_INLINE static bool WhiteToBlack(HeapObject* obj, | 96 V8_INLINE static bool WhiteToBlack(HeapObject* obj, |
97 const MarkingState& state) { | 97 const MarkingState& state) { |
98 DCHECK( | 98 DCHECK( |
99 (access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj, state))); | 99 (access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj, state))); |
100 if (!ObjectMarking::WhiteToGrey<access_mode>(obj, state)) return false; | 100 if (!ObjectMarking::WhiteToGrey<access_mode>(obj, state)) return false; |
101 return ObjectMarking::GreyToBlack<access_mode>(obj, state); | 101 return ObjectMarking::GreyToBlack<access_mode>(obj, state); |
102 } | 102 } |
103 | 103 |
104 template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> | 104 template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> |
105 V8_INLINE static bool GreyToBlack(HeapObject* obj, | 105 V8_INLINE static bool GreyToBlack(HeapObject* obj, |
106 const MarkingState& state) { | 106 const MarkingState& state) { |
107 DCHECK((access_mode == MarkBit::ATOMIC || IsGrey<access_mode>(obj, state))); | 107 DCHECK((access_mode == MarkBit::ATOMIC || IsGrey<access_mode>(obj, state))); |
108 MarkBit markbit = MarkBitFrom(obj, state); | 108 MarkBit markbit = MarkBitFrom(obj, state); |
109 if (!Marking::GreyToBlack<access_mode>(markbit)) return false; | 109 if (!Marking::GreyToBlack<access_mode>(markbit)) return false; |
110 state.IncrementLiveBytes(obj->Size()); | 110 state.IncrementLiveBytes<access_mode>(obj->Size()); |
111 return true; | 111 return true; |
112 } | 112 } |
113 | 113 |
114 private: | 114 private: |
115 DISALLOW_IMPLICIT_CONSTRUCTORS(ObjectMarking); | 115 DISALLOW_IMPLICIT_CONSTRUCTORS(ObjectMarking); |
116 }; | 116 }; |
117 | 117 |
118 // ---------------------------------------------------------------------------- | 118 // ---------------------------------------------------------------------------- |
119 // Marking deque for tracing live objects. | 119 // Marking deque for tracing live objects. |
120 class MarkingDeque { | 120 class MarkingDeque { |
(...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
417 Visitor* visitor, IterationMode iteration_mode); | 417 Visitor* visitor, IterationMode iteration_mode); |
418 | 418 |
419 private: | 419 private: |
420 void RecomputeLiveBytes(MemoryChunk* chunk, const MarkingState& state); | 420 void RecomputeLiveBytes(MemoryChunk* chunk, const MarkingState& state); |
421 }; | 421 }; |
422 | 422 |
423 enum PageEvacuationMode { NEW_TO_NEW, NEW_TO_OLD }; | 423 enum PageEvacuationMode { NEW_TO_NEW, NEW_TO_OLD }; |
424 | 424 |
425 class MinorMarkCompactCollector { | 425 class MinorMarkCompactCollector { |
426 public: | 426 public: |
| 427 MarkingState marking_state(HeapObject* object) const { |
| 428 return MarkingState::External(object); |
| 429 } |
| 430 |
| 431 MarkingState marking_state(MemoryChunk* chunk) const { |
| 432 return MarkingState::External(chunk); |
| 433 } |
| 434 |
427 explicit MinorMarkCompactCollector(Heap* heap) | 435 explicit MinorMarkCompactCollector(Heap* heap) |
428 : heap_(heap), marking_deque_(heap) {} | 436 : heap_(heap), marking_deque_(heap), page_parallel_job_semaphore_(0) {} |
429 | 437 |
430 void SetUp(); | 438 void SetUp(); |
431 void TearDown(); | 439 void TearDown(); |
432 | 440 |
433 void CollectGarbage(); | 441 void CollectGarbage(); |
434 | 442 |
435 inline Heap* heap() const { return heap_; } | 443 inline Heap* heap() const { return heap_; } |
436 | 444 |
437 private: | 445 private: |
438 class RootMarkingVisitor; | 446 class RootMarkingVisitor; |
439 | 447 |
440 inline Isolate* isolate() { return heap()->isolate(); } | 448 inline Isolate* isolate() { return heap()->isolate(); } |
441 inline MarkingDeque* marking_deque() { return &marking_deque_; } | 449 inline MarkingDeque* marking_deque() { return &marking_deque_; } |
442 | 450 |
443 V8_INLINE void MarkObject(HeapObject* obj); | 451 V8_INLINE void MarkObject(HeapObject* obj); |
444 V8_INLINE void PushBlack(HeapObject* obj); | 452 V8_INLINE void PushBlack(HeapObject* obj); |
445 | 453 |
446 SlotCallbackResult CheckAndMarkObject(Heap* heap, Address slot_address); | 454 SlotCallbackResult CheckAndMarkObject(Heap* heap, Address slot_address); |
447 void MarkLiveObjects(); | 455 void MarkLiveObjects(); |
448 void ProcessMarkingDeque(); | 456 void ProcessMarkingDeque(); |
449 void EmptyMarkingDeque(); | 457 void EmptyMarkingDeque(); |
| 458 void ClearNonLiveReferences(); |
| 459 |
| 460 void EvacuatePrologue(); |
| 461 void EvacuateEpilogue(); |
| 462 void EvacuateNewSpace(std::vector<HeapObject*>* black_allocation_objects); |
| 463 void EvacuatePagesInParallel( |
| 464 std::vector<HeapObject*>* black_allocation_objects); |
| 465 void UpdatePointersAfterEvacuation(); |
450 | 466 |
451 Heap* heap_; | 467 Heap* heap_; |
452 MarkingDeque marking_deque_; | 468 MarkingDeque marking_deque_; |
| 469 base::Semaphore page_parallel_job_semaphore_; |
| 470 List<Page*> new_space_evacuation_pages_; |
453 | 471 |
454 friend class StaticYoungGenerationMarkingVisitor; | 472 friend class StaticYoungGenerationMarkingVisitor; |
455 }; | 473 }; |
456 | 474 |
457 // ------------------------------------------------------------------------- | 475 // ------------------------------------------------------------------------- |
458 // Mark-Compact collector | 476 // Mark-Compact collector |
459 class MarkCompactCollector { | 477 class MarkCompactCollector { |
460 public: | 478 public: |
461 class RootMarkingVisitor; | 479 class RootMarkingVisitor; |
462 | 480 |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
534 // Counter is actively maintained by the concurrent tasks to avoid querying | 552 // Counter is actively maintained by the concurrent tasks to avoid querying |
535 // the semaphore for maintaining a task counter on the main thread. | 553 // the semaphore for maintaining a task counter on the main thread. |
536 base::AtomicNumber<intptr_t> num_sweeping_tasks_; | 554 base::AtomicNumber<intptr_t> num_sweeping_tasks_; |
537 }; | 555 }; |
538 | 556 |
539 enum IterationMode { | 557 enum IterationMode { |
540 kKeepMarking, | 558 kKeepMarking, |
541 kClearMarkbits, | 559 kClearMarkbits, |
542 }; | 560 }; |
543 | 561 |
| 562 MarkingState marking_state(HeapObject* object) const { |
| 563 return MarkingState::Internal(object); |
| 564 } |
| 565 |
| 566 MarkingState marking_state(MemoryChunk* chunk) const { |
| 567 return MarkingState::Internal(chunk); |
| 568 } |
| 569 |
544 static void Initialize(); | 570 static void Initialize(); |
545 | 571 |
546 static SlotCallbackResult CheckAndMarkObject(Heap* heap, | 572 static SlotCallbackResult CheckAndMarkObject(Heap* heap, |
547 Address slot_address); | 573 Address slot_address); |
548 | 574 |
549 void SetUp(); | 575 void SetUp(); |
550 | 576 |
551 void TearDown(); | 577 void TearDown(); |
552 | 578 |
553 void CollectEvacuationCandidates(PagedSpace* space); | 579 void CollectEvacuationCandidates(PagedSpace* space); |
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
786 | 812 |
787 // Starts sweeping of spaces by contributing on the main thread and setting | 813 // Starts sweeping of spaces by contributing on the main thread and setting |
788 // up other pages for sweeping. Does not start sweeper tasks. | 814 // up other pages for sweeping. Does not start sweeper tasks. |
789 void StartSweepSpaces(); | 815 void StartSweepSpaces(); |
790 void StartSweepSpace(PagedSpace* space); | 816 void StartSweepSpace(PagedSpace* space); |
791 | 817 |
792 void EvacuatePrologue(); | 818 void EvacuatePrologue(); |
793 void EvacuateEpilogue(); | 819 void EvacuateEpilogue(); |
794 void EvacuatePagesInParallel(); | 820 void EvacuatePagesInParallel(); |
795 | 821 |
796 // The number of parallel compaction tasks, including the main thread. | |
797 int NumberOfParallelCompactionTasks(int pages, intptr_t live_bytes); | |
798 | |
799 void EvacuateNewSpaceAndCandidates(); | 822 void EvacuateNewSpaceAndCandidates(); |
800 | 823 |
801 void UpdatePointersAfterEvacuation(); | 824 void UpdatePointersAfterEvacuation(); |
802 | 825 |
803 void ReleaseEvacuationCandidates(); | 826 void ReleaseEvacuationCandidates(); |
804 | 827 |
805 | 828 |
806 #ifdef DEBUG | 829 #ifdef DEBUG |
807 friend class MarkObjectVisitor; | 830 friend class MarkObjectVisitor; |
808 static void VisitObject(HeapObject* obj); | 831 static void VisitObject(HeapObject* obj); |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
870 | 893 |
871 private: | 894 private: |
872 MarkCompactCollector* collector_; | 895 MarkCompactCollector* collector_; |
873 }; | 896 }; |
874 | 897 |
875 V8_EXPORT_PRIVATE const char* AllocationSpaceName(AllocationSpace space); | 898 V8_EXPORT_PRIVATE const char* AllocationSpaceName(AllocationSpace space); |
876 } // namespace internal | 899 } // namespace internal |
877 } // namespace v8 | 900 } // namespace v8 |
878 | 901 |
879 #endif // V8_HEAP_MARK_COMPACT_H_ | 902 #endif // V8_HEAP_MARK_COMPACT_H_ |
OLD | NEW |