| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_MARK_COMPACT_H_ | 5 #ifndef V8_HEAP_MARK_COMPACT_H_ |
| 6 #define V8_HEAP_MARK_COMPACT_H_ | 6 #define V8_HEAP_MARK_COMPACT_H_ |
| 7 | 7 |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/heap/spaces.h" | 9 #include "src/heap/spaces.h" |
| 10 | 10 |
| (...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 367 buffer = buffer->next(); | 367 buffer = buffer->next(); |
| 368 } | 368 } |
| 369 } | 369 } |
| 370 | 370 |
| 371 enum AdditionMode { FAIL_ON_OVERFLOW, IGNORE_OVERFLOW }; | 371 enum AdditionMode { FAIL_ON_OVERFLOW, IGNORE_OVERFLOW }; |
| 372 | 372 |
| 373 static bool ChainLengthThresholdReached(SlotsBuffer* buffer) { | 373 static bool ChainLengthThresholdReached(SlotsBuffer* buffer) { |
| 374 return buffer != NULL && buffer->chain_length_ >= kChainLengthThreshold; | 374 return buffer != NULL && buffer->chain_length_ >= kChainLengthThreshold; |
| 375 } | 375 } |
| 376 | 376 |
| 377 INLINE(static bool AddToSynchronized(SlotsBufferAllocator* allocator, |
| 378 SlotsBuffer** buffer_address, |
| 379 base::Mutex* buffer_mutex, |
| 380 ObjectSlot slot, AdditionMode mode)) { |
| 381 base::LockGuard<base::Mutex> lock_guard(buffer_mutex); |
| 382 return AddTo(allocator, buffer_address, slot, mode); |
| 383 } |
| 384 |
| 377 INLINE(static bool AddTo(SlotsBufferAllocator* allocator, | 385 INLINE(static bool AddTo(SlotsBufferAllocator* allocator, |
| 378 SlotsBuffer** buffer_address, ObjectSlot slot, | 386 SlotsBuffer** buffer_address, ObjectSlot slot, |
| 379 AdditionMode mode)) { | 387 AdditionMode mode)) { |
| 380 SlotsBuffer* buffer = *buffer_address; | 388 SlotsBuffer* buffer = *buffer_address; |
| 381 if (buffer == NULL || buffer->IsFull()) { | 389 if (buffer == NULL || buffer->IsFull()) { |
| 382 if (mode == FAIL_ON_OVERFLOW && ChainLengthThresholdReached(buffer)) { | 390 if (mode == FAIL_ON_OVERFLOW && ChainLengthThresholdReached(buffer)) { |
| 383 allocator->DeallocateChain(buffer_address); | 391 allocator->DeallocateChain(buffer_address); |
| 384 return false; | 392 return false; |
| 385 } | 393 } |
| 386 buffer = allocator->AllocateBuffer(buffer); | 394 buffer = allocator->AllocateBuffer(buffer); |
| 387 *buffer_address = buffer; | 395 *buffer_address = buffer; |
| 388 } | 396 } |
| 389 buffer->Add(slot); | 397 buffer->Add(slot); |
| 390 return true; | 398 return true; |
| 391 } | 399 } |
| 392 | 400 |
| 393 static bool IsTypedSlot(ObjectSlot slot); | 401 static bool IsTypedSlot(ObjectSlot slot); |
| 394 | 402 |
| 403 static bool AddToSynchronized(SlotsBufferAllocator* allocator, |
| 404 SlotsBuffer** buffer_address, |
| 405 base::Mutex* buffer_mutex, SlotType type, |
| 406 Address addr, AdditionMode mode); |
| 407 |
| 395 static bool AddTo(SlotsBufferAllocator* allocator, | 408 static bool AddTo(SlotsBufferAllocator* allocator, |
| 396 SlotsBuffer** buffer_address, SlotType type, Address addr, | 409 SlotsBuffer** buffer_address, SlotType type, Address addr, |
| 397 AdditionMode mode); | 410 AdditionMode mode); |
| 398 | 411 |
| 399 // Eliminates all stale entries from the slots buffer, i.e., slots that | 412 // Eliminates all stale entries from the slots buffer, i.e., slots that |
| 400 // are not part of live objects anymore. This method must be called after | 413 // are not part of live objects anymore. This method must be called after |
| 401 // marking, when the whole transitive closure is known and must be called | 414 // marking, when the whole transitive closure is known and must be called |
| 402 // before sweeping when mark bits are still intact. | 415 // before sweeping when mark bits are still intact. |
| 403 static void RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer); | 416 static void RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer); |
| 404 | 417 |
| (...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 715 | 728 |
| 716 // Synchronize compaction threads. | 729 // Synchronize compaction threads. |
| 717 base::Semaphore pending_compaction_jobs_semaphore_; | 730 base::Semaphore pending_compaction_jobs_semaphore_; |
| 718 | 731 |
| 719 bool evacuation_; | 732 bool evacuation_; |
| 720 | 733 |
| 721 SlotsBufferAllocator slots_buffer_allocator_; | 734 SlotsBufferAllocator slots_buffer_allocator_; |
| 722 | 735 |
| 723 SlotsBuffer* migration_slots_buffer_; | 736 SlotsBuffer* migration_slots_buffer_; |
| 724 | 737 |
| 738 base::Mutex migration_slots_buffer_mutex_; |
| 739 |
| 725 // Finishes GC, performs heap verification if enabled. | 740 // Finishes GC, performs heap verification if enabled. |
| 726 void Finish(); | 741 void Finish(); |
| 727 | 742 |
| 728 // ----------------------------------------------------------------------- | 743 // ----------------------------------------------------------------------- |
| 729 // Phase 1: Marking live objects. | 744 // Phase 1: Marking live objects. |
| 730 // | 745 // |
| 731 // Before: The heap has been prepared for garbage collection by | 746 // Before: The heap has been prepared for garbage collection by |
| 732 // MarkCompactCollector::Prepare() and is otherwise in its | 747 // MarkCompactCollector::Prepare() and is otherwise in its |
| 733 // normal state. | 748 // normal state. |
| 734 // | 749 // |
| (...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 890 | 905 |
| 891 // Finalizes the parallel sweeping phase. Marks all the pages that were | 906 // Finalizes the parallel sweeping phase. Marks all the pages that were |
| 892 // swept in parallel. | 907 // swept in parallel. |
| 893 void ParallelSweepSpacesComplete(); | 908 void ParallelSweepSpacesComplete(); |
| 894 | 909 |
| 895 void ParallelSweepSpaceComplete(PagedSpace* space); | 910 void ParallelSweepSpaceComplete(PagedSpace* space); |
| 896 | 911 |
| 897 // Updates store buffer and slot buffer for a pointer in a migrating object. | 912 // Updates store buffer and slot buffer for a pointer in a migrating object. |
| 898 void RecordMigratedSlot(Object* value, Address slot); | 913 void RecordMigratedSlot(Object* value, Address slot); |
| 899 | 914 |
| 915 // Adds the code entry slot to the slots buffer. |
| 916 void RecordMigratedCodeEntrySlot(Address code_entry, Address code_entry_slot); |
| 917 |
| 918 // Adds the slot of a moved code object. |
| 919 void RecordMigratedCodeObjectSlot(Address code_object); |
| 920 |
| 900 #ifdef DEBUG | 921 #ifdef DEBUG |
| 901 friend class MarkObjectVisitor; | 922 friend class MarkObjectVisitor; |
| 902 static void VisitObject(HeapObject* obj); | 923 static void VisitObject(HeapObject* obj); |
| 903 | 924 |
| 904 friend class UnmarkObjectVisitor; | 925 friend class UnmarkObjectVisitor; |
| 905 static void UnmarkObject(HeapObject* obj); | 926 static void UnmarkObject(HeapObject* obj); |
| 906 #endif | 927 #endif |
| 907 | 928 |
| 908 Heap* heap_; | 929 Heap* heap_; |
| 909 base::VirtualMemory* marking_deque_memory_; | 930 base::VirtualMemory* marking_deque_memory_; |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 975 private: | 996 private: |
| 976 MarkCompactCollector* collector_; | 997 MarkCompactCollector* collector_; |
| 977 }; | 998 }; |
| 978 | 999 |
| 979 | 1000 |
| 980 const char* AllocationSpaceName(AllocationSpace space); | 1001 const char* AllocationSpaceName(AllocationSpace space); |
| 981 } | 1002 } |
| 982 } // namespace v8::internal | 1003 } // namespace v8::internal |
| 983 | 1004 |
| 984 #endif // V8_HEAP_MARK_COMPACT_H_ | 1005 #endif // V8_HEAP_MARK_COMPACT_H_ |
| OLD | NEW |