OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_MARK_COMPACT_H_ | 5 #ifndef V8_HEAP_MARK_COMPACT_H_ |
6 #define V8_HEAP_MARK_COMPACT_H_ | 6 #define V8_HEAP_MARK_COMPACT_H_ |
7 | 7 |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/heap/spaces.h" | 9 #include "src/heap/spaces.h" |
10 #include "src/heap/store-buffer.h" | 10 #include "src/heap/store-buffer.h" |
(...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
151 if (IsBlack(mark_bit)) return BLACK_OBJECT; | 151 if (IsBlack(mark_bit)) return BLACK_OBJECT; |
152 if (IsWhite(mark_bit)) return WHITE_OBJECT; | 152 if (IsWhite(mark_bit)) return WHITE_OBJECT; |
153 if (IsGrey(mark_bit)) return GREY_OBJECT; | 153 if (IsGrey(mark_bit)) return GREY_OBJECT; |
154 UNREACHABLE(); | 154 UNREACHABLE(); |
155 return IMPOSSIBLE_COLOR; | 155 return IMPOSSIBLE_COLOR; |
156 } | 156 } |
157 #endif | 157 #endif |
158 | 158 |
159 // Returns true if the transferred color is black. | 159 // Returns true if the transferred color is black. |
160 INLINE(static bool TransferColor(HeapObject* from, HeapObject* to)) { | 160 INLINE(static bool TransferColor(HeapObject* from, HeapObject* to)) { |
| 161 if (Page::FromAddress(to->address())->IsFlagSet(Page::BLACK_PAGE)) |
| 162 return true; |
161 MarkBit from_mark_bit = MarkBitFrom(from); | 163 MarkBit from_mark_bit = MarkBitFrom(from); |
162 MarkBit to_mark_bit = MarkBitFrom(to); | 164 MarkBit to_mark_bit = MarkBitFrom(to); |
163 DCHECK(Marking::IsWhite(to_mark_bit)); | 165 DCHECK(Marking::IsWhite(to_mark_bit)); |
164 if (from_mark_bit.Get()) { | 166 if (from_mark_bit.Get()) { |
165 to_mark_bit.Set(); | 167 to_mark_bit.Set(); |
166 if (from_mark_bit.Next().Get()) { | 168 if (from_mark_bit.Next().Get()) { |
167 to_mark_bit.Next().Set(); | 169 to_mark_bit.Next().Set(); |
168 return true; | 170 return true; |
169 } | 171 } |
170 } | 172 } |
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
309 JSFunction* jsfunction_candidates_head_; | 311 JSFunction* jsfunction_candidates_head_; |
310 SharedFunctionInfo* shared_function_info_candidates_head_; | 312 SharedFunctionInfo* shared_function_info_candidates_head_; |
311 | 313 |
312 DISALLOW_COPY_AND_ASSIGN(CodeFlusher); | 314 DISALLOW_COPY_AND_ASSIGN(CodeFlusher); |
313 }; | 315 }; |
314 | 316 |
315 | 317 |
316 // Defined in isolate.h. | 318 // Defined in isolate.h. |
317 class ThreadLocalTop; | 319 class ThreadLocalTop; |
318 | 320 |
| 321 class MarkBitCellIterator BASE_EMBEDDED { |
| 322 public: |
| 323 explicit MarkBitCellIterator(MemoryChunk* chunk) : chunk_(chunk) { |
| 324 last_cell_index_ = Bitmap::IndexToCell(Bitmap::CellAlignIndex( |
| 325 chunk_->AddressToMarkbitIndex(chunk_->area_end()))); |
| 326 cell_base_ = chunk_->area_start(); |
| 327 cell_index_ = Bitmap::IndexToCell( |
| 328 Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(cell_base_))); |
| 329 cells_ = chunk_->markbits()->cells(); |
| 330 } |
| 331 |
| 332 inline bool Done() { return cell_index_ == last_cell_index_; } |
| 333 |
| 334 inline bool HasNext() { return cell_index_ < last_cell_index_ - 1; } |
| 335 |
| 336 inline MarkBit::CellType* CurrentCell() { |
| 337 DCHECK(cell_index_ == Bitmap::IndexToCell(Bitmap::CellAlignIndex( |
| 338 chunk_->AddressToMarkbitIndex(cell_base_)))); |
| 339 return &cells_[cell_index_]; |
| 340 } |
| 341 |
| 342 inline Address CurrentCellBase() { |
| 343 DCHECK(cell_index_ == Bitmap::IndexToCell(Bitmap::CellAlignIndex( |
| 344 chunk_->AddressToMarkbitIndex(cell_base_)))); |
| 345 return cell_base_; |
| 346 } |
| 347 |
| 348 inline void Advance() { |
| 349 cell_index_++; |
| 350 cell_base_ += 32 * kPointerSize; |
| 351 } |
| 352 |
| 353 // Return the next mark bit cell. If there is no next it returns 0; |
| 354 inline MarkBit::CellType PeekNext() { |
| 355 if (HasNext()) { |
| 356 return cells_[cell_index_ + 1]; |
| 357 } |
| 358 return 0; |
| 359 } |
| 360 |
| 361 private: |
| 362 MemoryChunk* chunk_; |
| 363 MarkBit::CellType* cells_; |
| 364 unsigned int last_cell_index_; |
| 365 unsigned int cell_index_; |
| 366 Address cell_base_; |
| 367 }; |
| 368 |
| 369 // Grey objects can happen on black pages when black objects transition to |
| 370 // grey e.g. when calling RecordWrites on them. |
| 371 enum LiveObjectIterationMode { |
| 372 kBlackObjects, |
| 373 kGreyObjects, |
| 374 kGreyObjectsOnBlackPage, |
| 375 kAllLiveObjects |
| 376 }; |
| 377 |
| 378 template <LiveObjectIterationMode T> |
| 379 class LiveObjectIterator BASE_EMBEDDED { |
| 380 public: |
| 381 explicit LiveObjectIterator(MemoryChunk* chunk) |
| 382 : chunk_(chunk), |
| 383 it_(chunk_), |
| 384 cell_base_(it_.CurrentCellBase()), |
| 385 current_cell_(*it_.CurrentCell()) { |
| 386 // Black pages can only be iterated with kGreyObjectsOnBlackPage mode. |
| 387 if (T != kGreyObjectsOnBlackPage) { |
| 388 DCHECK(!chunk->IsFlagSet(Page::BLACK_PAGE)); |
| 389 } |
| 390 } |
| 391 |
| 392 HeapObject* Next(); |
| 393 |
| 394 private: |
| 395 MemoryChunk* chunk_; |
| 396 MarkBitCellIterator it_; |
| 397 Address cell_base_; |
| 398 MarkBit::CellType current_cell_; |
| 399 }; |
319 | 400 |
320 // ------------------------------------------------------------------------- | 401 // ------------------------------------------------------------------------- |
321 // Mark-Compact collector | 402 // Mark-Compact collector |
322 class MarkCompactCollector { | 403 class MarkCompactCollector { |
323 public: | 404 public: |
324 class Evacuator; | 405 class Evacuator; |
325 | 406 |
326 enum IterationMode { | 407 enum IterationMode { |
327 kKeepMarking, | 408 kKeepMarking, |
328 kClearMarkbits, | 409 kClearMarkbits, |
(...skipping 298 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
627 | 708 |
628 // Refill the marking stack with overflowed objects from the heap. This | 709 // Refill the marking stack with overflowed objects from the heap. This |
629 // function either leaves the marking stack full or clears the overflow | 710 // function either leaves the marking stack full or clears the overflow |
630 // flag on the marking stack. | 711 // flag on the marking stack. |
631 void RefillMarkingDeque(); | 712 void RefillMarkingDeque(); |
632 | 713 |
633 // Helper methods for refilling the marking stack by discovering grey objects | 714 // Helper methods for refilling the marking stack by discovering grey objects |
634 // on various pages of the heap. Used by {RefillMarkingDeque} only. | 715 // on various pages of the heap. Used by {RefillMarkingDeque} only. |
635 template <class T> | 716 template <class T> |
636 void DiscoverGreyObjectsWithIterator(T* it); | 717 void DiscoverGreyObjectsWithIterator(T* it); |
| 718 template <LiveObjectIterationMode T> |
637 void DiscoverGreyObjectsOnPage(MemoryChunk* p); | 719 void DiscoverGreyObjectsOnPage(MemoryChunk* p); |
638 void DiscoverGreyObjectsInSpace(PagedSpace* space); | 720 void DiscoverGreyObjectsInSpace(PagedSpace* space); |
639 void DiscoverGreyObjectsInNewSpace(); | 721 void DiscoverGreyObjectsInNewSpace(); |
640 | 722 |
641 // Callback function for telling whether the object *p is an unmarked | 723 // Callback function for telling whether the object *p is an unmarked |
642 // heap object. | 724 // heap object. |
643 static bool IsUnmarkedHeapObject(Object** p); | 725 static bool IsUnmarkedHeapObject(Object** p); |
644 | 726 |
645 // Clear non-live references in weak cells, transition and descriptor arrays, | 727 // Clear non-live references in weak cells, transition and descriptor arrays, |
646 // and deoptimize dependent code of non-live maps. | 728 // and deoptimize dependent code of non-live maps. |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
769 | 851 |
770 // True if parallel compaction is currently in progress. | 852 // True if parallel compaction is currently in progress. |
771 bool compaction_in_progress_; | 853 bool compaction_in_progress_; |
772 | 854 |
773 // Semaphore used to synchronize sweeper tasks. | 855 // Semaphore used to synchronize sweeper tasks. |
774 base::Semaphore pending_sweeper_tasks_semaphore_; | 856 base::Semaphore pending_sweeper_tasks_semaphore_; |
775 | 857 |
776 // Semaphore used to synchronize compaction tasks. | 858 // Semaphore used to synchronize compaction tasks. |
777 base::Semaphore pending_compaction_tasks_semaphore_; | 859 base::Semaphore pending_compaction_tasks_semaphore_; |
778 | 860 |
| 861 bool black_allocation_; |
| 862 |
779 friend class Heap; | 863 friend class Heap; |
780 friend class StoreBuffer; | 864 friend class StoreBuffer; |
781 }; | 865 }; |
782 | 866 |
783 | 867 |
784 class MarkBitCellIterator BASE_EMBEDDED { | |
785 public: | |
786 explicit MarkBitCellIterator(MemoryChunk* chunk) : chunk_(chunk) { | |
787 last_cell_index_ = Bitmap::IndexToCell(Bitmap::CellAlignIndex( | |
788 chunk_->AddressToMarkbitIndex(chunk_->area_end()))); | |
789 cell_base_ = chunk_->area_start(); | |
790 cell_index_ = Bitmap::IndexToCell( | |
791 Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(cell_base_))); | |
792 cells_ = chunk_->markbits()->cells(); | |
793 } | |
794 | |
795 inline bool Done() { return cell_index_ == last_cell_index_; } | |
796 | |
797 inline bool HasNext() { return cell_index_ < last_cell_index_ - 1; } | |
798 | |
799 inline MarkBit::CellType* CurrentCell() { | |
800 DCHECK(cell_index_ == Bitmap::IndexToCell(Bitmap::CellAlignIndex( | |
801 chunk_->AddressToMarkbitIndex(cell_base_)))); | |
802 return &cells_[cell_index_]; | |
803 } | |
804 | |
805 inline Address CurrentCellBase() { | |
806 DCHECK(cell_index_ == Bitmap::IndexToCell(Bitmap::CellAlignIndex( | |
807 chunk_->AddressToMarkbitIndex(cell_base_)))); | |
808 return cell_base_; | |
809 } | |
810 | |
811 inline void Advance() { | |
812 cell_index_++; | |
813 cell_base_ += 32 * kPointerSize; | |
814 } | |
815 | |
816 // Return the next mark bit cell. If there is no next it returns 0; | |
817 inline MarkBit::CellType PeekNext() { | |
818 if (HasNext()) { | |
819 return cells_[cell_index_ + 1]; | |
820 } | |
821 return 0; | |
822 } | |
823 | |
824 private: | |
825 MemoryChunk* chunk_; | |
826 MarkBit::CellType* cells_; | |
827 unsigned int last_cell_index_; | |
828 unsigned int cell_index_; | |
829 Address cell_base_; | |
830 }; | |
831 | |
832 enum LiveObjectIterationMode { kBlackObjects, kGreyObjects, kAllLiveObjects }; | |
833 | |
834 template <LiveObjectIterationMode T> | |
835 class LiveObjectIterator BASE_EMBEDDED { | |
836 public: | |
837 explicit LiveObjectIterator(MemoryChunk* chunk) | |
838 : chunk_(chunk), | |
839 it_(chunk_), | |
840 cell_base_(it_.CurrentCellBase()), | |
841 current_cell_(*it_.CurrentCell()) {} | |
842 | |
843 HeapObject* Next(); | |
844 | |
845 private: | |
846 MemoryChunk* chunk_; | |
847 MarkBitCellIterator it_; | |
848 Address cell_base_; | |
849 MarkBit::CellType current_cell_; | |
850 }; | |
851 | |
852 | |
853 class EvacuationScope BASE_EMBEDDED { | 868 class EvacuationScope BASE_EMBEDDED { |
854 public: | 869 public: |
855 explicit EvacuationScope(MarkCompactCollector* collector) | 870 explicit EvacuationScope(MarkCompactCollector* collector) |
856 : collector_(collector) { | 871 : collector_(collector) { |
857 collector_->set_evacuation(true); | 872 collector_->set_evacuation(true); |
858 } | 873 } |
859 | 874 |
860 ~EvacuationScope() { collector_->set_evacuation(false); } | 875 ~EvacuationScope() { collector_->set_evacuation(false); } |
861 | 876 |
862 private: | 877 private: |
863 MarkCompactCollector* collector_; | 878 MarkCompactCollector* collector_; |
864 }; | 879 }; |
865 | 880 |
866 | 881 |
867 const char* AllocationSpaceName(AllocationSpace space); | 882 const char* AllocationSpaceName(AllocationSpace space); |
868 } // namespace internal | 883 } // namespace internal |
869 } // namespace v8 | 884 } // namespace v8 |
870 | 885 |
871 #endif // V8_HEAP_MARK_COMPACT_H_ | 886 #endif // V8_HEAP_MARK_COMPACT_H_ |
OLD | NEW |