OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_MARK_COMPACT_H_ | 5 #ifndef V8_HEAP_MARK_COMPACT_H_ |
6 #define V8_HEAP_MARK_COMPACT_H_ | 6 #define V8_HEAP_MARK_COMPACT_H_ |
7 | 7 |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/heap/spaces.h" | 9 #include "src/heap/spaces.h" |
10 #include "src/heap/store-buffer.h" | 10 #include "src/heap/store-buffer.h" |
(...skipping 382 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
393 Address cell_base_; | 393 Address cell_base_; |
394 MarkBit::CellType current_cell_; | 394 MarkBit::CellType current_cell_; |
395 }; | 395 }; |
396 | 396 |
397 // ------------------------------------------------------------------------- | 397 // ------------------------------------------------------------------------- |
398 // Mark-Compact collector | 398 // Mark-Compact collector |
399 class MarkCompactCollector { | 399 class MarkCompactCollector { |
400 public: | 400 public: |
401 class Evacuator; | 401 class Evacuator; |
402 | 402 |
| 403 class Sweeper { |
| 404 public: |
| 405 class SweeperTask; |
| 406 |
| 407 enum SweepingMode { SWEEP_ONLY, SWEEP_AND_VISIT_LIVE_OBJECTS }; |
| 408 enum SkipListRebuildingMode { REBUILD_SKIP_LIST, IGNORE_SKIP_LIST }; |
| 409 enum FreeSpaceTreatmentMode { IGNORE_FREE_SPACE, ZAP_FREE_SPACE }; |
| 410 enum SweepingParallelism { SWEEP_ON_MAIN_THREAD, SWEEP_IN_PARALLEL }; |
| 411 |
| 412 typedef std::vector<Page*> SweepingList; |
| 413 typedef List<Page*> SweptList; |
| 414 |
| 415 template <SweepingMode sweeping_mode, SweepingParallelism parallelism, |
| 416 SkipListRebuildingMode skip_list_mode, |
| 417 FreeSpaceTreatmentMode free_space_mode> |
| 418 static int RawSweep(PagedSpace* space, Page* p, ObjectVisitor* v); |
| 419 |
| 420 explicit Sweeper(Heap* heap) |
| 421 : heap_(heap), |
| 422 pending_sweeper_tasks_semaphore_(0), |
| 423 sweeping_in_progress_(false), |
| 424 num_sweeping_tasks_(0) { |
| 425 ForAllSweepingSpaces([this](AllocationSpace space) { |
| 426 late_sweeping_list_[space] = nullptr; |
| 427 tmp_late_sweeping_list_[space] = nullptr; |
| 428 }); |
| 429 } |
| 430 |
| 431 bool sweeping_in_progress() { return sweeping_in_progress_; } |
| 432 |
| 433 void AddPage(AllocationSpace space, Page* page); |
| 434 void AddLatePage(AllocationSpace space, Page* page); |
| 435 void CommitLateList(AllocationSpace space); |
| 436 |
| 437 int ParallelSweepSpace(AllocationSpace identity, int required_freed_bytes, |
| 438 int max_pages = 0); |
| 439 int ParallelSweepPage(Page* page, PagedSpace* space); |
| 440 |
| 441 void StartSweeping(); |
| 442 void StartSweepingHelper(AllocationSpace space_to_start); |
| 443 void EnsureCompleted(); |
| 444 bool IsSweepingCompleted(); |
| 445 void SweepOrWaitUntilSweepingCompleted(Page* page); |
| 446 |
| 447 void AddSweptPageSafe(PagedSpace* space, Page* page); |
| 448 Page* GetSweptPageSafe(PagedSpace* space); |
| 449 |
| 450 private: |
| 451 static const int kAllocationSpaces = LAST_PAGED_SPACE + 1; |
| 452 |
| 453 template <typename Callback> |
| 454 void ForAllSweepingSpaces(Callback callback) { |
| 455 for (int i = 0; i < kAllocationSpaces; i++) { |
| 456 callback(static_cast<AllocationSpace>(i)); |
| 457 } |
| 458 } |
| 459 |
| 460 SweepingList* GetLateSweepingListSafe(AllocationSpace space); |
| 461 |
| 462 void PrepareToBeSweptPage(AllocationSpace space, Page* page); |
| 463 void ParallelSweepList(SweepingList& list, AllocationSpace out_space, |
| 464 int required_freed_bytes, int max_pages, |
| 465 int* max_freed, int* pages_freed); |
| 466 |
| 467 Heap* heap_; |
| 468 base::Mutex mutex_; |
| 469 base::Semaphore pending_sweeper_tasks_semaphore_; |
| 470 SweptList swept_list_[kAllocationSpaces]; |
| 471 SweepingList sweeping_list_[kAllocationSpaces]; |
| 472 SweepingList* late_sweeping_list_[kAllocationSpaces]; |
| 473 SweepingList* tmp_late_sweeping_list_[kAllocationSpaces]; |
| 474 bool sweeping_in_progress_; |
| 475 int num_sweeping_tasks_; |
| 476 }; |
| 477 |
403 enum IterationMode { | 478 enum IterationMode { |
404 kKeepMarking, | 479 kKeepMarking, |
405 kClearMarkbits, | 480 kClearMarkbits, |
406 }; | 481 }; |
407 | 482 |
408 static void Initialize(); | 483 static void Initialize(); |
409 | 484 |
410 void SetUp(); | 485 void SetUp(); |
411 | 486 |
412 void TearDown(); | 487 void TearDown(); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
444 | 519 |
445 static inline bool IsMarked(Object* obj); | 520 static inline bool IsMarked(Object* obj); |
446 static bool IsUnmarkedHeapObjectWithHeap(Heap* heap, Object** p); | 521 static bool IsUnmarkedHeapObjectWithHeap(Heap* heap, Object** p); |
447 | 522 |
448 inline Heap* heap() const { return heap_; } | 523 inline Heap* heap() const { return heap_; } |
449 inline Isolate* isolate() const; | 524 inline Isolate* isolate() const; |
450 | 525 |
451 CodeFlusher* code_flusher() { return code_flusher_; } | 526 CodeFlusher* code_flusher() { return code_flusher_; } |
452 inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; } | 527 inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; } |
453 | 528 |
454 enum SweepingParallelism { SWEEP_ON_MAIN_THREAD, SWEEP_IN_PARALLEL }; | |
455 | |
456 #ifdef VERIFY_HEAP | 529 #ifdef VERIFY_HEAP |
457 void VerifyValidStoreAndSlotsBufferEntries(); | 530 void VerifyValidStoreAndSlotsBufferEntries(); |
458 void VerifyMarkbitsAreClean(); | 531 void VerifyMarkbitsAreClean(); |
459 static void VerifyMarkbitsAreClean(PagedSpace* space); | 532 static void VerifyMarkbitsAreClean(PagedSpace* space); |
460 static void VerifyMarkbitsAreClean(NewSpace* space); | 533 static void VerifyMarkbitsAreClean(NewSpace* space); |
461 void VerifyWeakEmbeddedObjectsInCode(); | 534 void VerifyWeakEmbeddedObjectsInCode(); |
462 void VerifyOmittedMapChecks(); | 535 void VerifyOmittedMapChecks(); |
463 #endif | 536 #endif |
464 | 537 |
465 INLINE(static bool ShouldSkipEvacuationSlotRecording(Object* host)) { | 538 INLINE(static bool ShouldSkipEvacuationSlotRecording(Object* host)) { |
(...skipping 17 matching lines...) Expand all Loading... |
483 void UpdateSlotsRecordedIn(SlotsBuffer* buffer); | 556 void UpdateSlotsRecordedIn(SlotsBuffer* buffer); |
484 | 557 |
485 void InvalidateCode(Code* code); | 558 void InvalidateCode(Code* code); |
486 | 559 |
487 void ClearMarkbits(); | 560 void ClearMarkbits(); |
488 | 561 |
489 bool is_compacting() const { return compacting_; } | 562 bool is_compacting() const { return compacting_; } |
490 | 563 |
491 MarkingParity marking_parity() { return marking_parity_; } | 564 MarkingParity marking_parity() { return marking_parity_; } |
492 | 565 |
493 // Concurrent and parallel sweeping support. If required_freed_bytes was set | |
494 // to a value larger than 0, then sweeping returns after a block of at least | |
495 // required_freed_bytes was freed. If required_freed_bytes was set to zero | |
496 // then the whole given space is swept. It returns the size of the maximum | |
497 // continuous freed memory chunk. | |
498 int SweepInParallel(PagedSpace* space, int required_freed_bytes, | |
499 int max_pages = 0); | |
500 | |
501 // Sweeps a given page concurrently to the sweeper threads. It returns the | |
502 // size of the maximum continuous freed memory chunk. | |
503 int SweepInParallel(Page* page, PagedSpace* space); | |
504 | |
505 // Ensures that sweeping is finished. | 566 // Ensures that sweeping is finished. |
506 // | 567 // |
507 // Note: Can only be called safely from main thread. | 568 // Note: Can only be called safely from main thread. |
508 void EnsureSweepingCompleted(); | 569 void EnsureSweepingCompleted(); |
509 | 570 |
510 void SweepOrWaitUntilSweepingCompleted(Page* page); | |
511 | |
512 // Help out in sweeping the corresponding space and refill memory that has | 571 // Help out in sweeping the corresponding space and refill memory that has |
513 // been regained. | 572 // been regained. |
514 // | 573 // |
515 // Note: Thread-safe. | 574 // Note: Thread-safe. |
516 void SweepAndRefill(CompactionSpace* space); | 575 void SweepAndRefill(CompactionSpace* space); |
517 | 576 |
518 // If sweeper threads are not active this method will return true. If | |
519 // this is a latency issue we should be smarter here. Otherwise, it will | |
520 // return true if the sweeper threads are done processing the pages. | |
521 bool IsSweepingCompleted(); | |
522 | |
523 // Checks if sweeping is in progress right now on any space. | 577 // Checks if sweeping is in progress right now on any space. |
524 bool sweeping_in_progress() { return sweeping_in_progress_; } | 578 bool sweeping_in_progress() { return sweeper().sweeping_in_progress(); } |
525 | 579 |
526 void set_evacuation(bool evacuation) { evacuation_ = evacuation; } | 580 void set_evacuation(bool evacuation) { evacuation_ = evacuation; } |
527 | 581 |
528 bool evacuation() const { return evacuation_; } | 582 bool evacuation() const { return evacuation_; } |
529 | 583 |
530 // Special case for processing weak references in a full collection. We need | 584 // Special case for processing weak references in a full collection. We need |
531 // to artificially keep AllocationSites alive for a time. | 585 // to artificially keep AllocationSites alive for a time. |
532 void MarkAllocationSite(AllocationSite* site); | 586 void MarkAllocationSite(AllocationSite* site); |
533 | 587 |
534 // Mark objects in implicit references groups if their parent object | 588 // Mark objects in implicit references groups if their parent object |
(...skipping 20 matching lines...) Expand all Loading... |
555 // The following two methods can just be called after marking, when the | 609 // The following two methods can just be called after marking, when the |
556 // whole transitive closure is known. They must be called before sweeping | 610 // whole transitive closure is known. They must be called before sweeping |
557 // when mark bits are still intact. | 611 // when mark bits are still intact. |
558 bool IsSlotInBlackObject(MemoryChunk* p, Address slot); | 612 bool IsSlotInBlackObject(MemoryChunk* p, Address slot); |
559 HeapObject* FindBlackObjectBySlotSlow(Address slot); | 613 HeapObject* FindBlackObjectBySlotSlow(Address slot); |
560 | 614 |
561 // Removes all the slots in the slot buffers that are within the given | 615 // Removes all the slots in the slot buffers that are within the given |
562 // address range. | 616 // address range. |
563 void RemoveObjectSlots(Address start_slot, Address end_slot); | 617 void RemoveObjectSlots(Address start_slot, Address end_slot); |
564 | 618 |
565 base::Mutex* swept_pages_mutex() { return &swept_pages_mutex_; } | 619 Sweeper& sweeper() { return sweeper_; } |
566 List<Page*>* swept_pages(AllocationSpace id) { | |
567 switch (id) { | |
568 case OLD_SPACE: | |
569 return &swept_old_space_pages_; | |
570 case CODE_SPACE: | |
571 return &swept_code_space_pages_; | |
572 case MAP_SPACE: | |
573 return &swept_map_space_pages_; | |
574 default: | |
575 UNREACHABLE(); | |
576 } | |
577 return nullptr; | |
578 } | |
579 | 620 |
580 private: | 621 private: |
581 class EvacuateNewSpaceVisitor; | 622 class EvacuateNewSpaceVisitor; |
582 class EvacuateOldSpaceVisitor; | 623 class EvacuateOldSpaceVisitor; |
583 class EvacuateVisitorBase; | 624 class EvacuateVisitorBase; |
584 class HeapObjectVisitor; | 625 class HeapObjectVisitor; |
585 class SweeperTask; | |
586 | 626 |
587 typedef std::vector<Page*> SweepingList; | 627 typedef std::vector<Page*> SweepingList; |
588 | 628 |
589 explicit MarkCompactCollector(Heap* heap); | 629 explicit MarkCompactCollector(Heap* heap); |
590 | 630 |
591 bool WillBeDeoptimized(Code* code); | 631 bool WillBeDeoptimized(Code* code); |
592 void ClearInvalidRememberedSetSlots(); | 632 void ClearInvalidRememberedSetSlots(); |
593 | 633 |
594 void StartSweeperThreads(); | |
595 | |
596 void ComputeEvacuationHeuristics(int area_size, | 634 void ComputeEvacuationHeuristics(int area_size, |
597 int* target_fragmentation_percent, | 635 int* target_fragmentation_percent, |
598 int* max_evacuated_bytes); | 636 int* max_evacuated_bytes); |
599 | 637 |
600 #ifdef DEBUG | 638 #ifdef DEBUG |
601 enum CollectorState { | 639 enum CollectorState { |
602 IDLE, | 640 IDLE, |
603 PREPARE_GC, | 641 PREPARE_GC, |
604 MARK_LIVE_OBJECTS, | 642 MARK_LIVE_OBJECTS, |
605 SWEEP_SPACES, | 643 SWEEP_SPACES, |
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
754 // Phase 2: Sweeping to clear mark bits and free non-live objects for | 792 // Phase 2: Sweeping to clear mark bits and free non-live objects for |
755 // a non-compacting collection. | 793 // a non-compacting collection. |
756 // | 794 // |
757 // Before: Live objects are marked and non-live objects are unmarked. | 795 // Before: Live objects are marked and non-live objects are unmarked. |
758 // | 796 // |
759 // After: Live objects are unmarked, non-live regions have been added to | 797 // After: Live objects are unmarked, non-live regions have been added to |
760 // their space's free list. Active eden semispace is compacted by | 798 // their space's free list. Active eden semispace is compacted by |
761 // evacuation. | 799 // evacuation. |
762 // | 800 // |
763 | 801 |
764 inline SweepingList& sweeping_list(Space* space); | |
765 | |
766 // If we are not compacting the heap, we simply sweep the spaces except | 802 // If we are not compacting the heap, we simply sweep the spaces except |
767 // for the large object space, clearing mark bits and adding unmarked | 803 // for the large object space, clearing mark bits and adding unmarked |
768 // regions to each space's free list. | 804 // regions to each space's free list. |
769 void SweepSpaces(); | 805 void SweepSpaces(); |
770 | 806 |
771 void EvacuateNewSpacePrologue(); | 807 void EvacuateNewSpacePrologue(); |
772 void EvacuateNewSpaceEpilogue(); | 808 void EvacuateNewSpaceEpilogue(); |
773 | 809 |
774 void EvacuatePagesInParallel(); | 810 void EvacuatePagesInParallel(); |
775 | 811 |
(...skipping 14 matching lines...) Expand all Loading... |
790 void RecomputeLiveBytes(MemoryChunk* page); | 826 void RecomputeLiveBytes(MemoryChunk* page); |
791 | 827 |
792 void SweepAbortedPages(); | 828 void SweepAbortedPages(); |
793 | 829 |
794 void ReleaseEvacuationCandidates(); | 830 void ReleaseEvacuationCandidates(); |
795 | 831 |
796 // Starts sweeping of a space by contributing on the main thread and setting | 832 // Starts sweeping of a space by contributing on the main thread and setting |
797 // up other pages for sweeping. | 833 // up other pages for sweeping. |
798 void StartSweepSpace(PagedSpace* space); | 834 void StartSweepSpace(PagedSpace* space); |
799 | 835 |
800 // Finalizes the parallel sweeping phase. Marks all the pages that were | |
801 // swept in parallel. | |
802 void ParallelSweepSpacesComplete(); | |
803 | |
804 #ifdef DEBUG | 836 #ifdef DEBUG |
805 friend class MarkObjectVisitor; | 837 friend class MarkObjectVisitor; |
806 static void VisitObject(HeapObject* obj); | 838 static void VisitObject(HeapObject* obj); |
807 | 839 |
808 friend class UnmarkObjectVisitor; | 840 friend class UnmarkObjectVisitor; |
809 static void UnmarkObject(HeapObject* obj); | 841 static void UnmarkObject(HeapObject* obj); |
810 #endif | 842 #endif |
811 | 843 |
812 Heap* heap_; | 844 Heap* heap_; |
813 base::VirtualMemory* marking_deque_memory_; | 845 base::VirtualMemory* marking_deque_memory_; |
814 size_t marking_deque_memory_committed_; | 846 size_t marking_deque_memory_committed_; |
815 MarkingDeque marking_deque_; | 847 MarkingDeque marking_deque_; |
816 CodeFlusher* code_flusher_; | 848 CodeFlusher* code_flusher_; |
817 bool have_code_to_deoptimize_; | 849 bool have_code_to_deoptimize_; |
818 | 850 |
819 List<Page*> evacuation_candidates_; | 851 List<Page*> evacuation_candidates_; |
820 List<NewSpacePage*> newspace_evacuation_candidates_; | 852 List<NewSpacePage*> newspace_evacuation_candidates_; |
821 | 853 |
822 base::Mutex swept_pages_mutex_; | |
823 List<Page*> swept_old_space_pages_; | |
824 List<Page*> swept_code_space_pages_; | |
825 List<Page*> swept_map_space_pages_; | |
826 | |
827 SweepingList sweeping_list_old_space_; | |
828 SweepingList sweeping_list_code_space_; | |
829 SweepingList sweeping_list_map_space_; | |
830 | |
831 // True if we are collecting slots to perform evacuation from evacuation | 854 // True if we are collecting slots to perform evacuation from evacuation |
832 // candidates. | 855 // candidates. |
833 bool compacting_; | 856 bool compacting_; |
834 | 857 |
835 // True if concurrent or parallel sweeping is currently in progress. | |
836 bool sweeping_in_progress_; | |
837 | |
838 // Semaphore used to synchronize sweeper tasks. | |
839 base::Semaphore pending_sweeper_tasks_semaphore_; | |
840 | |
841 // Semaphore used to synchronize compaction tasks. | 858 // Semaphore used to synchronize compaction tasks. |
842 base::Semaphore pending_compaction_tasks_semaphore_; | 859 base::Semaphore pending_compaction_tasks_semaphore_; |
843 | 860 |
844 bool black_allocation_; | 861 bool black_allocation_; |
845 | 862 |
| 863 Sweeper sweeper_; |
| 864 |
846 friend class Heap; | 865 friend class Heap; |
847 friend class StoreBuffer; | 866 friend class StoreBuffer; |
848 }; | 867 }; |
849 | 868 |
850 | 869 |
851 class EvacuationScope BASE_EMBEDDED { | 870 class EvacuationScope BASE_EMBEDDED { |
852 public: | 871 public: |
853 explicit EvacuationScope(MarkCompactCollector* collector) | 872 explicit EvacuationScope(MarkCompactCollector* collector) |
854 : collector_(collector) { | 873 : collector_(collector) { |
855 collector_->set_evacuation(true); | 874 collector_->set_evacuation(true); |
856 } | 875 } |
857 | 876 |
858 ~EvacuationScope() { collector_->set_evacuation(false); } | 877 ~EvacuationScope() { collector_->set_evacuation(false); } |
859 | 878 |
860 private: | 879 private: |
861 MarkCompactCollector* collector_; | 880 MarkCompactCollector* collector_; |
862 }; | 881 }; |
863 | 882 |
864 | 883 |
865 const char* AllocationSpaceName(AllocationSpace space); | 884 const char* AllocationSpaceName(AllocationSpace space); |
866 } // namespace internal | 885 } // namespace internal |
867 } // namespace v8 | 886 } // namespace v8 |
868 | 887 |
869 #endif // V8_HEAP_MARK_COMPACT_H_ | 888 #endif // V8_HEAP_MARK_COMPACT_H_ |
OLD | NEW |