| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_MARK_COMPACT_H_ | 5 #ifndef V8_HEAP_MARK_COMPACT_H_ |
| 6 #define V8_HEAP_MARK_COMPACT_H_ | 6 #define V8_HEAP_MARK_COMPACT_H_ |
| 7 | 7 |
| 8 #include <deque> | 8 #include <deque> |
| 9 | 9 |
| 10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
| 11 #include "src/base/platform/condition-variable.h" | 11 #include "src/base/platform/condition-variable.h" |
| 12 #include "src/cancelable-task.h" | 12 #include "src/cancelable-task.h" |
| 13 #include "src/heap/marking.h" | 13 #include "src/heap/marking.h" |
| 14 #include "src/heap/spaces.h" | 14 #include "src/heap/spaces.h" |
| 15 #include "src/heap/store-buffer.h" | 15 #include "src/heap/store-buffer.h" |
| 16 | 16 |
| 17 namespace v8 { | 17 namespace v8 { |
| 18 namespace internal { | 18 namespace internal { |
| 19 | 19 |
| 20 enum class MarkCompactMode { FULL, YOUNG_GENERATION }; |
| 21 |
| 20 // Callback function, returns whether an object is alive. The heap size | 22 // Callback function, returns whether an object is alive. The heap size |
| 21 // of the object is returned in size. It optionally updates the offset | 23 // of the object is returned in size. It optionally updates the offset |
| 22 // to the first live object in the page (only used for old and map objects). | 24 // to the first live object in the page (only used for old and map objects). |
| 23 typedef bool (*IsAliveFunction)(HeapObject* obj, int* size, int* offset); | 25 typedef bool (*IsAliveFunction)(HeapObject* obj, int* size, int* offset); |
| 24 | 26 |
| 25 // Callback function to mark an object in a given heap. | 27 // Callback function to mark an object in a given heap. |
| 26 typedef void (*MarkObjectFunction)(Heap* heap, HeapObject* object); | 28 typedef void (*MarkObjectFunction)(Heap* heap, HeapObject* object); |
| 27 | 29 |
| 28 // Forward declarations. | 30 // Forward declarations. |
| 29 class CodeFlusher; | 31 class CodeFlusher; |
| 30 class MarkCompactCollector; | 32 class MarkCompactCollector; |
| 31 class MarkingVisitor; | 33 class MarkingVisitor; |
| 34 template <MarkCompactMode mode> |
| 32 class RootMarkingVisitor; | 35 class RootMarkingVisitor; |
| 33 | 36 |
| 34 class ObjectMarking : public AllStatic { | 37 class ObjectMarking : public AllStatic { |
| 35 public: | 38 public: |
| 36 INLINE(static MarkBit MarkBitFrom(Address addr)) { | 39 INLINE(static MarkBit MarkBitFrom(Address addr)) { |
| 37 MemoryChunk* p = MemoryChunk::FromAddress(addr); | 40 MemoryChunk* p = MemoryChunk::FromAddress(addr); |
| 38 return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(addr)); | 41 return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(addr)); |
| 39 } | 42 } |
| 40 | 43 |
| 41 INLINE(static MarkBit MarkBitFrom(HeapObject* obj)) { | 44 INLINE(static MarkBit MarkBitFrom(HeapObject* obj)) { |
| (...skipping 367 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 409 base::AtomicNumber<intptr_t> num_sweeping_tasks_; | 412 base::AtomicNumber<intptr_t> num_sweeping_tasks_; |
| 410 }; | 413 }; |
| 411 | 414 |
| 412 enum IterationMode { | 415 enum IterationMode { |
| 413 kKeepMarking, | 416 kKeepMarking, |
| 414 kClearMarkbits, | 417 kClearMarkbits, |
| 415 }; | 418 }; |
| 416 | 419 |
| 417 static void Initialize(); | 420 static void Initialize(); |
| 418 | 421 |
| 422 static SlotCallbackResult CheckAndMarkObject(Heap* heap, |
| 423 Address slot_address); |
| 424 |
| 419 void SetUp(); | 425 void SetUp(); |
| 420 | 426 |
| 421 void TearDown(); | 427 void TearDown(); |
| 422 | 428 |
| 423 void CollectEvacuationCandidates(PagedSpace* space); | 429 void CollectEvacuationCandidates(PagedSpace* space); |
| 424 | 430 |
| 425 void AddEvacuationCandidate(Page* p); | 431 void AddEvacuationCandidate(Page* p); |
| 426 | 432 |
| 427 // Prepares for GC by resetting relocation info in old and map spaces and | 433 // Prepares for GC by resetting relocation info in old and map spaces and |
| 428 // choosing spaces to compact. | 434 // choosing spaces to compact. |
| 429 void Prepare(); | 435 void Prepare(); |
| 430 | 436 |
| 431 // Performs a global garbage collection. | 437 // Performs a global garbage collection. |
| 432 void CollectGarbage(); | 438 void CollectGarbage(); |
| 433 | 439 |
| 434 bool StartCompaction(); | 440 bool StartCompaction(); |
| 435 | 441 |
| 436 void AbortCompaction(); | 442 void AbortCompaction(); |
| 437 | 443 |
| 438 #ifdef DEBUG | |
| 439 // Checks whether performing mark-compact collection. | |
| 440 bool in_use() { return state_ > PREPARE_GC; } | |
| 441 bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; } | |
| 442 #endif | |
| 443 | |
| 444 // Determine type of object and emit deletion log event. | 444 // Determine type of object and emit deletion log event. |
| 445 static void ReportDeleteIfNeeded(HeapObject* obj, Isolate* isolate); | 445 static void ReportDeleteIfNeeded(HeapObject* obj, Isolate* isolate); |
| 446 | 446 |
| 447 // Distinguishable invalid map encodings (for single word and multiple words) | 447 // Distinguishable invalid map encodings (for single word and multiple words) |
| 448 // that indicate free regions. | 448 // that indicate free regions. |
| 449 static const uint32_t kSingleFreeEncoding = 0; | 449 static const uint32_t kSingleFreeEncoding = 0; |
| 450 static const uint32_t kMultiFreeEncoding = 1; | 450 static const uint32_t kMultiFreeEncoding = 1; |
| 451 | 451 |
| 452 static inline bool IsMarked(Object* obj); | 452 static inline bool IsMarked(Object* obj); |
| 453 static bool IsUnmarkedHeapObjectWithHeap(Heap* heap, Object** p); | 453 static bool IsUnmarkedHeapObjectWithHeap(Heap* heap, Object** p); |
| 454 | 454 |
| 455 inline Heap* heap() const { return heap_; } | 455 inline Heap* heap() const { return heap_; } |
| 456 inline Isolate* isolate() const; | 456 inline Isolate* isolate() const; |
| 457 | 457 |
| 458 CodeFlusher* code_flusher() { return code_flusher_; } | 458 CodeFlusher* code_flusher() { return code_flusher_; } |
| 459 inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; } | 459 inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; } |
| 460 | 460 |
| 461 #ifdef VERIFY_HEAP | |
| 462 void VerifyValidStoreAndSlotsBufferEntries(); | |
| 463 void VerifyMarkbitsAreClean(); | |
| 464 static void VerifyMarkbitsAreClean(PagedSpace* space); | |
| 465 static void VerifyMarkbitsAreClean(NewSpace* space); | |
| 466 void VerifyWeakEmbeddedObjectsInCode(); | |
| 467 void VerifyOmittedMapChecks(); | |
| 468 #endif | |
| 469 | |
| 470 INLINE(static bool ShouldSkipEvacuationSlotRecording(Object* host)) { | 461 INLINE(static bool ShouldSkipEvacuationSlotRecording(Object* host)) { |
| 471 return Page::FromAddress(reinterpret_cast<Address>(host)) | 462 return Page::FromAddress(reinterpret_cast<Address>(host)) |
| 472 ->ShouldSkipEvacuationSlotRecording(); | 463 ->ShouldSkipEvacuationSlotRecording(); |
| 473 } | 464 } |
| 474 | 465 |
| 475 static inline bool IsOnEvacuationCandidate(HeapObject* obj) { | 466 static inline bool IsOnEvacuationCandidate(HeapObject* obj) { |
| 476 return Page::FromAddress(reinterpret_cast<Address>(obj)) | 467 return Page::FromAddress(reinterpret_cast<Address>(obj)) |
| 477 ->IsEvacuationCandidate(); | 468 ->IsEvacuationCandidate(); |
| 478 } | 469 } |
| 479 | 470 |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 518 void MarkAllocationSite(AllocationSite* site); | 509 void MarkAllocationSite(AllocationSite* site); |
| 519 | 510 |
| 520 // Mark objects in implicit references groups if their parent object | 511 // Mark objects in implicit references groups if their parent object |
| 521 // is marked. | 512 // is marked. |
| 522 void MarkImplicitRefGroups(MarkObjectFunction mark_object); | 513 void MarkImplicitRefGroups(MarkObjectFunction mark_object); |
| 523 | 514 |
| 524 MarkingDeque* marking_deque() { return &marking_deque_; } | 515 MarkingDeque* marking_deque() { return &marking_deque_; } |
| 525 | 516 |
| 526 Sweeper& sweeper() { return sweeper_; } | 517 Sweeper& sweeper() { return sweeper_; } |
| 527 | 518 |
| 519 #ifdef DEBUG |
| 520 // Checks whether performing mark-compact collection. |
| 521 bool in_use() { return state_ > PREPARE_GC; } |
| 522 bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; } |
| 523 #endif |
| 524 |
| 525 #ifdef VERIFY_HEAP |
| 526 void VerifyValidStoreAndSlotsBufferEntries(); |
| 527 void VerifyMarkbitsAreClean(); |
| 528 static void VerifyMarkbitsAreClean(PagedSpace* space); |
| 529 static void VerifyMarkbitsAreClean(NewSpace* space); |
| 530 void VerifyWeakEmbeddedObjectsInCode(); |
| 531 void VerifyOmittedMapChecks(); |
| 532 #endif |
| 533 |
| 528 private: | 534 private: |
| 529 template <PageEvacuationMode mode> | 535 template <PageEvacuationMode mode> |
| 530 class EvacuateNewSpacePageVisitor; | 536 class EvacuateNewSpacePageVisitor; |
| 531 class EvacuateNewSpaceVisitor; | 537 class EvacuateNewSpaceVisitor; |
| 532 class EvacuateOldSpaceVisitor; | 538 class EvacuateOldSpaceVisitor; |
| 533 class EvacuateRecordOnlyVisitor; | 539 class EvacuateRecordOnlyVisitor; |
| 534 class EvacuateVisitorBase; | 540 class EvacuateVisitorBase; |
| 535 class HeapObjectVisitor; | 541 class HeapObjectVisitor; |
| 536 class ObjectStatsVisitor; | 542 class ObjectStatsVisitor; |
| 537 | 543 |
| (...skipping 19 matching lines...) Expand all Loading... |
| 557 // MarkCompactCollector::Prepare() and is otherwise in its | 563 // MarkCompactCollector::Prepare() and is otherwise in its |
| 558 // normal state. | 564 // normal state. |
| 559 // | 565 // |
| 560 // After: Live objects are marked and non-live objects are unmarked. | 566 // After: Live objects are marked and non-live objects are unmarked. |
| 561 | 567 |
| 562 friend class CodeMarkingVisitor; | 568 friend class CodeMarkingVisitor; |
| 563 friend class IncrementalMarkingMarkingVisitor; | 569 friend class IncrementalMarkingMarkingVisitor; |
| 564 friend class MarkCompactMarkingVisitor; | 570 friend class MarkCompactMarkingVisitor; |
| 565 friend class MarkingVisitor; | 571 friend class MarkingVisitor; |
| 566 friend class RecordMigratedSlotVisitor; | 572 friend class RecordMigratedSlotVisitor; |
| 573 template <MarkCompactMode mode> |
| 567 friend class RootMarkingVisitor; | 574 friend class RootMarkingVisitor; |
| 568 friend class SharedFunctionInfoMarkingVisitor; | 575 friend class SharedFunctionInfoMarkingVisitor; |
| 576 friend class StaticYoungGenerationMarkingVisitor; |
| 569 | 577 |
| 570 // Mark code objects that are active on the stack to prevent them | 578 // Mark code objects that are active on the stack to prevent them |
| 571 // from being flushed. | 579 // from being flushed. |
| 572 void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top); | 580 void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top); |
| 573 | 581 |
| 574 void PrepareForCodeFlushing(); | 582 void PrepareForCodeFlushing(); |
| 575 | 583 |
| 576 // Marking operations for objects reachable from roots. | 584 // Marking operations for objects reachable from roots. |
| 577 void MarkLiveObjects(); | 585 void MarkLiveObjects(); |
| 586 // Mark the young generation. |
| 587 void MarkLiveObjectsInYoungGeneration(); |
| 578 | 588 |
| 579 // Pushes a black object onto the marking stack and accounts for live bytes. | 589 // Pushes a black object onto the marking stack and accounts for live bytes. |
| 580 // Note that this assumes live bytes have not yet been counted. | 590 // Note that this assumes live bytes have not yet been counted. |
| 581 INLINE(void PushBlack(HeapObject* obj)); | 591 INLINE(void PushBlack(HeapObject* obj)); |
| 582 | 592 |
| 583 // Unshifts a black object into the marking stack and accounts for live bytes. | 593 // Unshifts a black object into the marking stack and accounts for live bytes. |
| 584 // Note that this assumes lives bytes have already been counted. | 594 // Note that this assumes lives bytes have already been counted. |
| 585 INLINE(void UnshiftBlack(HeapObject* obj)); | 595 INLINE(void UnshiftBlack(HeapObject* obj)); |
| 586 | 596 |
| 587 // Marks the object black and pushes it on the marking stack. | 597 // Marks the object black and pushes it on the marking stack. |
| 588 // This is for non-incremental marking only. | 598 // This is for non-incremental marking only. |
| 589 INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit)); | 599 INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit)); |
| 590 | 600 |
| 591 // Marks the object black assuming that it is not yet marked. | 601 // Marks the object black assuming that it is not yet marked. |
| 592 // This is for non-incremental marking only. | 602 // This is for non-incremental marking only. |
| 593 INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit)); | 603 INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit)); |
| 594 | 604 |
| 595 // Mark the heap roots and all objects reachable from them. | 605 // Mark the heap roots and all objects reachable from them. |
| 596 void MarkRoots(RootMarkingVisitor* visitor); | 606 void MarkRoots(RootMarkingVisitor<MarkCompactMode::FULL>* visitor); |
| 597 | 607 |
| 598 // Mark the string table specially. References to internalized strings from | 608 // Mark the string table specially. References to internalized strings from |
| 599 // the string table are weak. | 609 // the string table are weak. |
| 600 void MarkStringTable(RootMarkingVisitor* visitor); | 610 void MarkStringTable(RootMarkingVisitor<MarkCompactMode::FULL>* visitor); |
| 601 | 611 |
| 602 // Mark objects reachable (transitively) from objects in the marking stack | 612 // Mark objects reachable (transitively) from objects in the marking stack |
| 603 // or overflowed in the heap. | 613 // or overflowed in the heap. |
| 614 template <MarkCompactMode mode> |
| 604 void ProcessMarkingDeque(); | 615 void ProcessMarkingDeque(); |
| 605 | 616 |
| 606 // Mark objects reachable (transitively) from objects in the marking stack | 617 // Mark objects reachable (transitively) from objects in the marking stack |
| 607 // or overflowed in the heap. This respects references only considered in | 618 // or overflowed in the heap. This respects references only considered in |
| 608 // the final atomic marking pause including the following: | 619 // the final atomic marking pause including the following: |
| 609 // - Processing of objects reachable through Harmony WeakMaps. | 620 // - Processing of objects reachable through Harmony WeakMaps. |
| 610 // - Objects reachable due to host application logic like object groups, | 621 // - Objects reachable due to host application logic like object groups, |
| 611 // implicit references' groups, or embedder heap tracing. | 622 // implicit references' groups, or embedder heap tracing. |
| 612 void ProcessEphemeralMarking(ObjectVisitor* visitor, | 623 void ProcessEphemeralMarking(ObjectVisitor* visitor, |
| 613 bool only_process_harmony_weak_collections); | 624 bool only_process_harmony_weak_collections); |
| 614 | 625 |
| 615 // If the call-site of the top optimized code was not prepared for | 626 // If the call-site of the top optimized code was not prepared for |
| 616 // deoptimization, then treat the maps in the code as strong pointers, | 627 // deoptimization, then treat the maps in the code as strong pointers, |
| 617 // otherwise a map can die and deoptimize the code. | 628 // otherwise a map can die and deoptimize the code. |
| 618 void ProcessTopOptimizedFrame(ObjectVisitor* visitor); | 629 void ProcessTopOptimizedFrame(ObjectVisitor* visitor); |
| 619 | 630 |
| 620 // Collects a list of dependent code from maps embedded in optimize code. | 631 // Collects a list of dependent code from maps embedded in optimize code. |
| 621 DependentCode* DependentCodeListFromNonLiveMaps(); | 632 DependentCode* DependentCodeListFromNonLiveMaps(); |
| 622 | 633 |
| 623 // Mark objects reachable (transitively) from objects in the marking | 634 // Mark objects reachable (transitively) from objects in the marking |
| 624 // stack. This function empties the marking stack, but may leave | 635 // stack. This function empties the marking stack, but may leave |
| 625 // overflowed objects in the heap, in which case the marking stack's | 636 // overflowed objects in the heap, in which case the marking stack's |
| 626 // overflow flag will be set. | 637 // overflow flag will be set. |
| 638 template <MarkCompactMode mode> |
| 627 void EmptyMarkingDeque(); | 639 void EmptyMarkingDeque(); |
| 628 | 640 |
| 629 // Refill the marking stack with overflowed objects from the heap. This | 641 // Refill the marking stack with overflowed objects from the heap. This |
| 630 // function either leaves the marking stack full or clears the overflow | 642 // function either leaves the marking stack full or clears the overflow |
| 631 // flag on the marking stack. | 643 // flag on the marking stack. |
| 644 template <MarkCompactMode mode> |
| 632 void RefillMarkingDeque(); | 645 void RefillMarkingDeque(); |
| 633 | 646 |
| 634 // Helper methods for refilling the marking stack by discovering grey objects | 647 // Helper methods for refilling the marking stack by discovering grey objects |
| 635 // on various pages of the heap. Used by {RefillMarkingDeque} only. | 648 // on various pages of the heap. Used by {RefillMarkingDeque} only. |
| 636 template <class T> | 649 template <class T> |
| 637 void DiscoverGreyObjectsWithIterator(T* it); | 650 void DiscoverGreyObjectsWithIterator(T* it); |
| 638 void DiscoverGreyObjectsOnPage(MemoryChunk* p); | 651 void DiscoverGreyObjectsOnPage(MemoryChunk* p); |
| 639 void DiscoverGreyObjectsInSpace(PagedSpace* space); | 652 void DiscoverGreyObjectsInSpace(PagedSpace* space); |
| 640 void DiscoverGreyObjectsInNewSpace(); | 653 void DiscoverGreyObjectsInNewSpace(); |
| 641 | 654 |
| (...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 772 | 785 |
| 773 private: | 786 private: |
| 774 MarkCompactCollector* collector_; | 787 MarkCompactCollector* collector_; |
| 775 }; | 788 }; |
| 776 | 789 |
| 777 V8_EXPORT_PRIVATE const char* AllocationSpaceName(AllocationSpace space); | 790 V8_EXPORT_PRIVATE const char* AllocationSpaceName(AllocationSpace space); |
| 778 } // namespace internal | 791 } // namespace internal |
| 779 } // namespace v8 | 792 } // namespace v8 |
| 780 | 793 |
| 781 #endif // V8_HEAP_MARK_COMPACT_H_ | 794 #endif // V8_HEAP_MARK_COMPACT_H_ |
| OLD | NEW |