OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_MARK_COMPACT_H_ | 5 #ifndef V8_HEAP_MARK_COMPACT_H_ |
6 #define V8_HEAP_MARK_COMPACT_H_ | 6 #define V8_HEAP_MARK_COMPACT_H_ |
7 | 7 |
8 #include <deque> | 8 #include <deque> |
9 | 9 |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
11 #include "src/base/platform/condition-variable.h" | 11 #include "src/base/platform/condition-variable.h" |
12 #include "src/cancelable-task.h" | 12 #include "src/cancelable-task.h" |
13 #include "src/heap/marking.h" | 13 #include "src/heap/marking.h" |
14 #include "src/heap/spaces.h" | 14 #include "src/heap/spaces.h" |
15 #include "src/heap/store-buffer.h" | 15 #include "src/heap/store-buffer.h" |
16 | 16 |
17 namespace v8 { | 17 namespace v8 { |
18 namespace internal { | 18 namespace internal { |
19 | 19 |
| 20 enum class MarkCompactMode { FULL, YOUNG_GENERATION }; |
| 21 |
20 // Callback function, returns whether an object is alive. The heap size | 22 // Callback function, returns whether an object is alive. The heap size |
21 // of the object is returned in size. It optionally updates the offset | 23 // of the object is returned in size. It optionally updates the offset |
22 // to the first live object in the page (only used for old and map objects). | 24 // to the first live object in the page (only used for old and map objects). |
23 typedef bool (*IsAliveFunction)(HeapObject* obj, int* size, int* offset); | 25 typedef bool (*IsAliveFunction)(HeapObject* obj, int* size, int* offset); |
24 | 26 |
25 // Callback function to mark an object in a given heap. | 27 // Callback function to mark an object in a given heap. |
26 typedef void (*MarkObjectFunction)(Heap* heap, HeapObject* object); | 28 typedef void (*MarkObjectFunction)(Heap* heap, HeapObject* object); |
27 | 29 |
28 // Forward declarations. | 30 // Forward declarations. |
29 class CodeFlusher; | 31 class CodeFlusher; |
30 class MarkCompactCollector; | 32 class MarkCompactCollector; |
31 class MarkingVisitor; | 33 class MarkingVisitor; |
| 34 template <MarkCompactMode mode> |
32 class RootMarkingVisitor; | 35 class RootMarkingVisitor; |
33 | 36 |
34 class ObjectMarking : public AllStatic { | 37 class ObjectMarking : public AllStatic { |
35 public: | 38 public: |
36 INLINE(static MarkBit MarkBitFrom(Address addr)) { | 39 INLINE(static MarkBit MarkBitFrom(Address addr)) { |
37 MemoryChunk* p = MemoryChunk::FromAddress(addr); | 40 MemoryChunk* p = MemoryChunk::FromAddress(addr); |
38 return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(addr)); | 41 return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(addr)); |
39 } | 42 } |
40 | 43 |
41 INLINE(static MarkBit MarkBitFrom(HeapObject* obj)) { | 44 INLINE(static MarkBit MarkBitFrom(HeapObject* obj)) { |
(...skipping 17 matching lines...) Expand all Loading... |
59 backing_store_committed_size_(0), | 62 backing_store_committed_size_(0), |
60 array_(nullptr), | 63 array_(nullptr), |
61 top_(0), | 64 top_(0), |
62 bottom_(0), | 65 bottom_(0), |
63 mask_(0), | 66 mask_(0), |
64 overflowed_(false), | 67 overflowed_(false), |
65 in_use_(false), | 68 in_use_(false), |
66 uncommit_task_pending_(false), | 69 uncommit_task_pending_(false), |
67 heap_(heap) {} | 70 heap_(heap) {} |
68 | 71 |
| 72 enum UncommitMode { SEQUENTIAL, CONCURRENT }; |
| 73 |
69 void SetUp(); | 74 void SetUp(); |
70 void TearDown(); | 75 void TearDown(); |
71 | 76 |
72 // Ensures that the marking deque is committed and will stay committed until | 77 // Ensures that the marking deque is committed and will stay committed until |
73 // StopUsing() is called. | 78 // StopUsing() is called. |
74 void StartUsing(); | 79 void StartUsing(); |
75 void StopUsing(); | 80 void StopUsing(UncommitMode mode = CONCURRENT); |
76 void Clear(); | 81 void Clear(); |
77 | 82 |
78 inline bool IsFull() { return ((top_ + 1) & mask_) == bottom_; } | 83 inline bool IsFull() { return ((top_ + 1) & mask_) == bottom_; } |
79 | 84 |
80 inline bool IsEmpty() { return top_ == bottom_; } | 85 inline bool IsEmpty() { return top_ == bottom_; } |
81 | 86 |
82 bool overflowed() const { return overflowed_; } | 87 bool overflowed() const { return overflowed_; } |
83 | 88 |
84 void ClearOverflowed() { overflowed_ = false; } | 89 void ClearOverflowed() { overflowed_ = false; } |
85 | 90 |
(...skipping 323 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
409 base::AtomicNumber<intptr_t> num_sweeping_tasks_; | 414 base::AtomicNumber<intptr_t> num_sweeping_tasks_; |
410 }; | 415 }; |
411 | 416 |
412 enum IterationMode { | 417 enum IterationMode { |
413 kKeepMarking, | 418 kKeepMarking, |
414 kClearMarkbits, | 419 kClearMarkbits, |
415 }; | 420 }; |
416 | 421 |
417 static void Initialize(); | 422 static void Initialize(); |
418 | 423 |
| 424 static SlotCallbackResult CheckAndMarkObject(Heap* heap, |
| 425 Address slot_address); |
| 426 |
419 void SetUp(); | 427 void SetUp(); |
420 | 428 |
421 void TearDown(); | 429 void TearDown(); |
422 | 430 |
423 void CollectEvacuationCandidates(PagedSpace* space); | 431 void CollectEvacuationCandidates(PagedSpace* space); |
424 | 432 |
425 void AddEvacuationCandidate(Page* p); | 433 void AddEvacuationCandidate(Page* p); |
426 | 434 |
427 // Prepares for GC by resetting relocation info in old and map spaces and | 435 // Prepares for GC by resetting relocation info in old and map spaces and |
428 // choosing spaces to compact. | 436 // choosing spaces to compact. |
429 void Prepare(); | 437 void Prepare(); |
430 | 438 |
431 // Performs a global garbage collection. | 439 // Performs a global garbage collection. |
432 void CollectGarbage(); | 440 void CollectGarbage(); |
433 | 441 |
434 bool StartCompaction(); | 442 bool StartCompaction(); |
435 | 443 |
436 void AbortCompaction(); | 444 void AbortCompaction(); |
437 | 445 |
438 #ifdef DEBUG | |
439 // Checks whether performing mark-compact collection. | |
440 bool in_use() { return state_ > PREPARE_GC; } | |
441 bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; } | |
442 #endif | |
443 | |
444 // Determine type of object and emit deletion log event. | 446 // Determine type of object and emit deletion log event. |
445 static void ReportDeleteIfNeeded(HeapObject* obj, Isolate* isolate); | 447 static void ReportDeleteIfNeeded(HeapObject* obj, Isolate* isolate); |
446 | 448 |
447 // Distinguishable invalid map encodings (for single word and multiple words) | 449 // Distinguishable invalid map encodings (for single word and multiple words) |
448 // that indicate free regions. | 450 // that indicate free regions. |
449 static const uint32_t kSingleFreeEncoding = 0; | 451 static const uint32_t kSingleFreeEncoding = 0; |
450 static const uint32_t kMultiFreeEncoding = 1; | 452 static const uint32_t kMultiFreeEncoding = 1; |
451 | 453 |
452 static inline bool IsMarked(Object* obj); | 454 static inline bool IsMarked(Object* obj); |
453 static bool IsUnmarkedHeapObjectWithHeap(Heap* heap, Object** p); | 455 static bool IsUnmarkedHeapObjectWithHeap(Heap* heap, Object** p); |
454 | 456 |
455 inline Heap* heap() const { return heap_; } | 457 inline Heap* heap() const { return heap_; } |
456 inline Isolate* isolate() const; | 458 inline Isolate* isolate() const; |
457 | 459 |
458 CodeFlusher* code_flusher() { return code_flusher_; } | 460 CodeFlusher* code_flusher() { return code_flusher_; } |
459 inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; } | 461 inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; } |
460 | 462 |
461 #ifdef VERIFY_HEAP | |
462 void VerifyValidStoreAndSlotsBufferEntries(); | |
463 void VerifyMarkbitsAreClean(); | |
464 static void VerifyMarkbitsAreClean(PagedSpace* space); | |
465 static void VerifyMarkbitsAreClean(NewSpace* space); | |
466 void VerifyWeakEmbeddedObjectsInCode(); | |
467 void VerifyOmittedMapChecks(); | |
468 #endif | |
469 | |
470 INLINE(static bool ShouldSkipEvacuationSlotRecording(Object* host)) { | 463 INLINE(static bool ShouldSkipEvacuationSlotRecording(Object* host)) { |
471 return Page::FromAddress(reinterpret_cast<Address>(host)) | 464 return Page::FromAddress(reinterpret_cast<Address>(host)) |
472 ->ShouldSkipEvacuationSlotRecording(); | 465 ->ShouldSkipEvacuationSlotRecording(); |
473 } | 466 } |
474 | 467 |
475 static inline bool IsOnEvacuationCandidate(HeapObject* obj) { | 468 static inline bool IsOnEvacuationCandidate(HeapObject* obj) { |
476 return Page::FromAddress(reinterpret_cast<Address>(obj)) | 469 return Page::FromAddress(reinterpret_cast<Address>(obj)) |
477 ->IsEvacuationCandidate(); | 470 ->IsEvacuationCandidate(); |
478 } | 471 } |
479 | 472 |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
518 void MarkAllocationSite(AllocationSite* site); | 511 void MarkAllocationSite(AllocationSite* site); |
519 | 512 |
520 // Mark objects in implicit references groups if their parent object | 513 // Mark objects in implicit references groups if their parent object |
521 // is marked. | 514 // is marked. |
522 void MarkImplicitRefGroups(MarkObjectFunction mark_object); | 515 void MarkImplicitRefGroups(MarkObjectFunction mark_object); |
523 | 516 |
524 MarkingDeque* marking_deque() { return &marking_deque_; } | 517 MarkingDeque* marking_deque() { return &marking_deque_; } |
525 | 518 |
526 Sweeper& sweeper() { return sweeper_; } | 519 Sweeper& sweeper() { return sweeper_; } |
527 | 520 |
| 521 #ifdef DEBUG |
| 522 // Checks whether performing mark-compact collection. |
| 523 bool in_use() { return state_ > PREPARE_GC; } |
| 524 bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; } |
| 525 #endif |
| 526 |
| 527 #ifdef VERIFY_HEAP |
| 528 void VerifyValidStoreAndSlotsBufferEntries(); |
| 529 void VerifyMarkbitsAreClean(); |
| 530 static void VerifyMarkbitsAreClean(PagedSpace* space); |
| 531 static void VerifyMarkbitsAreClean(NewSpace* space); |
| 532 void VerifyWeakEmbeddedObjectsInCode(); |
| 533 void VerifyOmittedMapChecks(); |
| 534 |
| 535 typedef std::vector<HeapObject*> YounGenerationMarkingVerificationState; |
| 536 YounGenerationMarkingVerificationState MarkYoungGenerationForVerification(); |
| 537 void VerifyYoungGenerationMarkbitsUsingForwardingPointers( |
| 538 const YounGenerationMarkingVerificationState& objects); |
| 539 #endif |
| 540 |
528 private: | 541 private: |
529 template <PageEvacuationMode mode> | 542 template <PageEvacuationMode mode> |
530 class EvacuateNewSpacePageVisitor; | 543 class EvacuateNewSpacePageVisitor; |
531 class EvacuateNewSpaceVisitor; | 544 class EvacuateNewSpaceVisitor; |
532 class EvacuateOldSpaceVisitor; | 545 class EvacuateOldSpaceVisitor; |
533 class EvacuateRecordOnlyVisitor; | 546 class EvacuateRecordOnlyVisitor; |
534 class EvacuateVisitorBase; | 547 class EvacuateVisitorBase; |
535 class HeapObjectVisitor; | 548 class HeapObjectVisitor; |
536 class ObjectStatsVisitor; | 549 class ObjectStatsVisitor; |
537 | 550 |
(...skipping 19 matching lines...) Expand all Loading... |
557 // MarkCompactCollector::Prepare() and is otherwise in its | 570 // MarkCompactCollector::Prepare() and is otherwise in its |
558 // normal state. | 571 // normal state. |
559 // | 572 // |
560 // After: Live objects are marked and non-live objects are unmarked. | 573 // After: Live objects are marked and non-live objects are unmarked. |
561 | 574 |
562 friend class CodeMarkingVisitor; | 575 friend class CodeMarkingVisitor; |
563 friend class IncrementalMarkingMarkingVisitor; | 576 friend class IncrementalMarkingMarkingVisitor; |
564 friend class MarkCompactMarkingVisitor; | 577 friend class MarkCompactMarkingVisitor; |
565 friend class MarkingVisitor; | 578 friend class MarkingVisitor; |
566 friend class RecordMigratedSlotVisitor; | 579 friend class RecordMigratedSlotVisitor; |
| 580 template <MarkCompactMode mode> |
567 friend class RootMarkingVisitor; | 581 friend class RootMarkingVisitor; |
568 friend class SharedFunctionInfoMarkingVisitor; | 582 friend class SharedFunctionInfoMarkingVisitor; |
| 583 friend class StaticYoungGenerationMarkingVisitor; |
569 | 584 |
570 // Mark code objects that are active on the stack to prevent them | 585 // Mark code objects that are active on the stack to prevent them |
571 // from being flushed. | 586 // from being flushed. |
572 void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top); | 587 void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top); |
573 | 588 |
574 void PrepareForCodeFlushing(); | 589 void PrepareForCodeFlushing(); |
575 | 590 |
576 // Marking operations for objects reachable from roots. | 591 // Marking operations for objects reachable from roots. |
577 void MarkLiveObjects(); | 592 void MarkLiveObjects(); |
| 593 // Mark the young generation. |
| 594 void MarkLiveObjectsInYoungGeneration(); |
578 | 595 |
579 // Pushes a black object onto the marking stack and accounts for live bytes. | 596 // Pushes a black object onto the marking stack and accounts for live bytes. |
580 // Note that this assumes live bytes have not yet been counted. | 597 // Note that this assumes live bytes have not yet been counted. |
581 INLINE(void PushBlack(HeapObject* obj)); | 598 INLINE(void PushBlack(HeapObject* obj)); |
582 | 599 |
583 // Unshifts a black object into the marking stack and accounts for live bytes. | 600 // Unshifts a black object into the marking stack and accounts for live bytes. |
584 // Note that this assumes lives bytes have already been counted. | 601 // Note that this assumes lives bytes have already been counted. |
585 INLINE(void UnshiftBlack(HeapObject* obj)); | 602 INLINE(void UnshiftBlack(HeapObject* obj)); |
586 | 603 |
587 // Marks the object black and pushes it on the marking stack. | 604 // Marks the object black and pushes it on the marking stack. |
588 // This is for non-incremental marking only. | 605 // This is for non-incremental marking only. |
589 INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit)); | 606 INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit)); |
590 | 607 |
591 // Marks the object black assuming that it is not yet marked. | 608 // Marks the object black assuming that it is not yet marked. |
592 // This is for non-incremental marking only. | 609 // This is for non-incremental marking only. |
593 INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit)); | 610 INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit)); |
594 | 611 |
595 // Mark the heap roots and all objects reachable from them. | 612 // Mark the heap roots and all objects reachable from them. |
596 void MarkRoots(RootMarkingVisitor* visitor); | 613 void MarkRoots(RootMarkingVisitor<MarkCompactMode::FULL>* visitor); |
597 | 614 |
598 // Mark the string table specially. References to internalized strings from | 615 // Mark the string table specially. References to internalized strings from |
599 // the string table are weak. | 616 // the string table are weak. |
600 void MarkStringTable(RootMarkingVisitor* visitor); | 617 void MarkStringTable(RootMarkingVisitor<MarkCompactMode::FULL>* visitor); |
601 | 618 |
602 // Mark objects reachable (transitively) from objects in the marking stack | 619 // Mark objects reachable (transitively) from objects in the marking stack |
603 // or overflowed in the heap. | 620 // or overflowed in the heap. |
| 621 template <MarkCompactMode mode> |
604 void ProcessMarkingDeque(); | 622 void ProcessMarkingDeque(); |
605 | 623 |
606 // Mark objects reachable (transitively) from objects in the marking stack | 624 // Mark objects reachable (transitively) from objects in the marking stack |
607 // or overflowed in the heap. This respects references only considered in | 625 // or overflowed in the heap. This respects references only considered in |
608 // the final atomic marking pause including the following: | 626 // the final atomic marking pause including the following: |
609 // - Processing of objects reachable through Harmony WeakMaps. | 627 // - Processing of objects reachable through Harmony WeakMaps. |
610 // - Objects reachable due to host application logic like object groups, | 628 // - Objects reachable due to host application logic like object groups, |
611 // implicit references' groups, or embedder heap tracing. | 629 // implicit references' groups, or embedder heap tracing. |
612 void ProcessEphemeralMarking(ObjectVisitor* visitor, | 630 void ProcessEphemeralMarking(ObjectVisitor* visitor, |
613 bool only_process_harmony_weak_collections); | 631 bool only_process_harmony_weak_collections); |
614 | 632 |
615 // If the call-site of the top optimized code was not prepared for | 633 // If the call-site of the top optimized code was not prepared for |
616 // deoptimization, then treat the maps in the code as strong pointers, | 634 // deoptimization, then treat the maps in the code as strong pointers, |
617 // otherwise a map can die and deoptimize the code. | 635 // otherwise a map can die and deoptimize the code. |
618 void ProcessTopOptimizedFrame(ObjectVisitor* visitor); | 636 void ProcessTopOptimizedFrame(ObjectVisitor* visitor); |
619 | 637 |
620 // Collects a list of dependent code from maps embedded in optimize code. | 638 // Collects a list of dependent code from maps embedded in optimize code. |
621 DependentCode* DependentCodeListFromNonLiveMaps(); | 639 DependentCode* DependentCodeListFromNonLiveMaps(); |
622 | 640 |
623 // Mark objects reachable (transitively) from objects in the marking | 641 // Mark objects reachable (transitively) from objects in the marking |
624 // stack. This function empties the marking stack, but may leave | 642 // stack. This function empties the marking stack, but may leave |
625 // overflowed objects in the heap, in which case the marking stack's | 643 // overflowed objects in the heap, in which case the marking stack's |
626 // overflow flag will be set. | 644 // overflow flag will be set. |
| 645 template <MarkCompactMode mode> |
627 void EmptyMarkingDeque(); | 646 void EmptyMarkingDeque(); |
628 | 647 |
629 // Refill the marking stack with overflowed objects from the heap. This | 648 // Refill the marking stack with overflowed objects from the heap. This |
630 // function either leaves the marking stack full or clears the overflow | 649 // function either leaves the marking stack full or clears the overflow |
631 // flag on the marking stack. | 650 // flag on the marking stack. |
| 651 template <MarkCompactMode mode> |
632 void RefillMarkingDeque(); | 652 void RefillMarkingDeque(); |
633 | 653 |
634 // Helper methods for refilling the marking stack by discovering grey objects | 654 // Helper methods for refilling the marking stack by discovering grey objects |
635 // on various pages of the heap. Used by {RefillMarkingDeque} only. | 655 // on various pages of the heap. Used by {RefillMarkingDeque} only. |
636 template <class T> | 656 template <class T> |
637 void DiscoverGreyObjectsWithIterator(T* it); | 657 void DiscoverGreyObjectsWithIterator(T* it); |
638 void DiscoverGreyObjectsOnPage(MemoryChunk* p); | 658 void DiscoverGreyObjectsOnPage(MemoryChunk* p); |
639 void DiscoverGreyObjectsInSpace(PagedSpace* space); | 659 void DiscoverGreyObjectsInSpace(PagedSpace* space); |
640 void DiscoverGreyObjectsInNewSpace(); | 660 void DiscoverGreyObjectsInNewSpace(); |
641 | 661 |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
707 | 727 |
708 | 728 |
709 #ifdef DEBUG | 729 #ifdef DEBUG |
710 friend class MarkObjectVisitor; | 730 friend class MarkObjectVisitor; |
711 static void VisitObject(HeapObject* obj); | 731 static void VisitObject(HeapObject* obj); |
712 | 732 |
713 friend class UnmarkObjectVisitor; | 733 friend class UnmarkObjectVisitor; |
714 static void UnmarkObject(HeapObject* obj); | 734 static void UnmarkObject(HeapObject* obj); |
715 #endif | 735 #endif |
716 | 736 |
| 737 #ifdef VERIFY_HEAP |
| 738 std::vector<HeapObject*> GetObjectsInToSpace(); |
| 739 #endif |
| 740 |
717 Heap* heap_; | 741 Heap* heap_; |
718 | 742 |
719 base::Semaphore page_parallel_job_semaphore_; | 743 base::Semaphore page_parallel_job_semaphore_; |
720 | 744 |
721 #ifdef DEBUG | 745 #ifdef DEBUG |
722 enum CollectorState { | 746 enum CollectorState { |
723 IDLE, | 747 IDLE, |
724 PREPARE_GC, | 748 PREPARE_GC, |
725 MARK_LIVE_OBJECTS, | 749 MARK_LIVE_OBJECTS, |
726 SWEEP_SPACES, | 750 SWEEP_SPACES, |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
772 | 796 |
773 private: | 797 private: |
774 MarkCompactCollector* collector_; | 798 MarkCompactCollector* collector_; |
775 }; | 799 }; |
776 | 800 |
777 V8_EXPORT_PRIVATE const char* AllocationSpaceName(AllocationSpace space); | 801 V8_EXPORT_PRIVATE const char* AllocationSpaceName(AllocationSpace space); |
778 } // namespace internal | 802 } // namespace internal |
779 } // namespace v8 | 803 } // namespace v8 |
780 | 804 |
781 #endif // V8_HEAP_MARK_COMPACT_H_ | 805 #endif // V8_HEAP_MARK_COMPACT_H_ |
OLD | NEW |