OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
(...skipping 27 matching lines...) Expand all Loading... | |
38 MarkCompactCollector::MarkCompactCollector(Heap* heap) | 38 MarkCompactCollector::MarkCompactCollector(Heap* heap) |
39 : // NOLINT | 39 : // NOLINT |
40 #ifdef DEBUG | 40 #ifdef DEBUG |
41 state_(IDLE), | 41 state_(IDLE), |
42 #endif | 42 #endif |
43 reduce_memory_footprint_(false), | 43 reduce_memory_footprint_(false), |
44 abort_incremental_marking_(false), | 44 abort_incremental_marking_(false), |
45 finalize_incremental_marking_(false), | 45 finalize_incremental_marking_(false), |
46 marking_parity_(ODD_MARKING_PARITY), | 46 marking_parity_(ODD_MARKING_PARITY), |
47 compacting_(false), | 47 compacting_(false), |
48 was_marked_incrementally_(false), | |
49 sweeping_in_progress_(false), | 48 sweeping_in_progress_(false), |
50 pending_sweeper_jobs_semaphore_(0), | 49 pending_sweeper_jobs_semaphore_(0), |
51 evacuation_(false), | 50 evacuation_(false), |
52 migration_slots_buffer_(NULL), | 51 migration_slots_buffer_(NULL), |
53 heap_(heap), | 52 heap_(heap), |
54 marking_deque_memory_(NULL), | 53 marking_deque_memory_(NULL), |
55 marking_deque_memory_committed_(0), | 54 marking_deque_memory_committed_(0), |
56 code_flusher_(NULL), | 55 code_flusher_(NULL), |
57 have_code_to_deoptimize_(false) { | 56 have_code_to_deoptimize_(false) { |
58 } | 57 } |
(...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
331 SlotsBuffer::VerifySlots(heap, chunk->slots_buffer()); | 330 SlotsBuffer::VerifySlots(heap, chunk->slots_buffer()); |
332 } | 331 } |
333 } | 332 } |
334 #endif | 333 #endif |
335 | 334 |
336 | 335 |
337 void MarkCompactCollector::CollectGarbage() { | 336 void MarkCompactCollector::CollectGarbage() { |
338 // Make sure that Prepare() has been called. The individual steps below will | 337 // Make sure that Prepare() has been called. The individual steps below will |
339 // update the state as they proceed. | 338 // update the state as they proceed. |
340 DCHECK(state_ == PREPARE_GC); | 339 DCHECK(state_ == PREPARE_GC); |
340 DCHECK(heap_->incremental_marking()->IsStopped()); | |
341 | 341 |
342 MarkLiveObjects(); | 342 MarkLiveObjects(); |
343 | 343 |
344 DCHECK(heap_->incremental_marking()->IsStopped()); | |
345 | |
346 // ClearNonLiveReferences can deoptimize code in dependent code arrays. | 344 // ClearNonLiveReferences can deoptimize code in dependent code arrays. |
347 // Process weak cells before so that weak cells in dependent code | 345 // Process weak cells before so that weak cells in dependent code |
348 // arrays are cleared or contain only live code objects. | 346 // arrays are cleared or contain only live code objects. |
349 ProcessAndClearWeakCells(); | 347 ProcessAndClearWeakCells(); |
350 | 348 |
351 ClearNonLiveReferences(); | 349 ClearNonLiveReferences(); |
352 | 350 |
353 ClearWeakCollections(); | 351 ClearWeakCollections(); |
354 | 352 |
355 heap_->set_encountered_weak_cells(Smi::FromInt(0)); | 353 heap_->set_encountered_weak_cells(Smi::FromInt(0)); |
(...skipping 424 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
780 } | 778 } |
781 compacting_ = false; | 779 compacting_ = false; |
782 evacuation_candidates_.Rewind(0); | 780 evacuation_candidates_.Rewind(0); |
783 invalidated_code_.Rewind(0); | 781 invalidated_code_.Rewind(0); |
784 } | 782 } |
785 DCHECK_EQ(0, evacuation_candidates_.length()); | 783 DCHECK_EQ(0, evacuation_candidates_.length()); |
786 } | 784 } |
787 | 785 |
788 | 786 |
789 void MarkCompactCollector::Prepare() { | 787 void MarkCompactCollector::Prepare() { |
790 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); | 788 bool was_marked_incrementally = heap()->incremental_marking()->IsMarking(); |
791 | 789 |
792 #ifdef DEBUG | 790 #ifdef DEBUG |
793 DCHECK(state_ == IDLE); | 791 DCHECK(state_ == IDLE); |
794 state_ = PREPARE_GC; | 792 state_ = PREPARE_GC; |
795 #endif | 793 #endif |
796 | 794 |
797 DCHECK(!FLAG_never_compact || !FLAG_always_compact); | 795 DCHECK(!FLAG_never_compact || !FLAG_always_compact); |
798 | 796 |
799 if (sweeping_in_progress()) { | 797 if (sweeping_in_progress()) { |
800 // Instead of waiting we could also abort the sweeper threads here. | 798 // Instead of waiting we could also abort the sweeper threads here. |
801 EnsureSweepingCompleted(); | 799 EnsureSweepingCompleted(); |
802 } | 800 } |
803 | 801 |
804 // Clear marking bits if incremental marking is aborted. | 802 // Clear marking bits if incremental marking is aborted. |
805 if (was_marked_incrementally_ && abort_incremental_marking_) { | 803 if (was_marked_incrementally && abort_incremental_marking_) { |
806 heap()->incremental_marking()->Stop(); | 804 heap()->incremental_marking()->Stop(); |
807 ClearMarkbits(); | 805 ClearMarkbits(); |
808 AbortWeakCollections(); | 806 AbortWeakCollections(); |
809 AbortWeakCells(); | 807 AbortWeakCells(); |
810 AbortCompaction(); | 808 AbortCompaction(); |
811 was_marked_incrementally_ = false; | 809 was_marked_incrementally = false; |
810 } | |
811 | |
812 if (was_marked_incrementally) { | |
813 heap()->incremental_marking()->Finalize(); | |
814 } else { | |
815 // Abort any pending incremental activities e.g. incremental sweeping. | |
816 heap()->incremental_marking()->Stop(); | |
Hannes Payer (out of office)
2015/08/07 10:53:38
in that case, stop was already performed before.
| |
817 if (marking_deque_.in_use()) { | |
818 marking_deque_.Uninitialize(true); | |
Hannes Payer (out of office)
2015/08/07 10:53:38
Why are we doing this just in this case?
| |
819 } | |
812 } | 820 } |
813 | 821 |
814 // Don't start compaction if we are in the middle of incremental | 822 // Don't start compaction if we are in the middle of incremental |
815 // marking cycle. We did not collect any slots. | 823 // marking cycle. We did not collect any slots. |
816 if (!FLAG_never_compact && !was_marked_incrementally_) { | 824 if (!FLAG_never_compact && !was_marked_incrementally) { |
817 StartCompaction(NON_INCREMENTAL_COMPACTION); | 825 StartCompaction(NON_INCREMENTAL_COMPACTION); |
818 } | 826 } |
819 | 827 |
820 PagedSpaces spaces(heap()); | 828 PagedSpaces spaces(heap()); |
821 for (PagedSpace* space = spaces.next(); space != NULL; | 829 for (PagedSpace* space = spaces.next(); space != NULL; |
822 space = spaces.next()) { | 830 space = spaces.next()) { |
823 space->PrepareForMarkCompact(); | 831 space->PrepareForMarkCompact(); |
824 } | 832 } |
825 | 833 |
826 #ifdef VERIFY_HEAP | 834 #ifdef VERIFY_HEAP |
827 if (!was_marked_incrementally_ && FLAG_verify_heap) { | 835 if (!was_marked_incrementally && FLAG_verify_heap) { |
828 VerifyMarkbitsAreClean(); | 836 VerifyMarkbitsAreClean(); |
829 } | 837 } |
830 #endif | 838 #endif |
831 } | 839 } |
832 | 840 |
833 | 841 |
834 void MarkCompactCollector::Finish() { | 842 void MarkCompactCollector::Finish() { |
835 #ifdef DEBUG | 843 #ifdef DEBUG |
836 DCHECK(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); | 844 DCHECK(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); |
837 state_ = IDLE; | 845 state_ = IDLE; |
(...skipping 1392 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2230 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK); | 2238 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK); |
2231 double start_time = 0.0; | 2239 double start_time = 0.0; |
2232 if (FLAG_print_cumulative_gc_stat) { | 2240 if (FLAG_print_cumulative_gc_stat) { |
2233 start_time = base::OS::TimeCurrentMillis(); | 2241 start_time = base::OS::TimeCurrentMillis(); |
2234 } | 2242 } |
2235 // The recursive GC marker detects when it is nearing stack overflow, | 2243 // The recursive GC marker detects when it is nearing stack overflow, |
2236 // and switches to a different marking system. JS interrupts interfere | 2244 // and switches to a different marking system. JS interrupts interfere |
2237 // with the C stack limit check. | 2245 // with the C stack limit check. |
2238 PostponeInterruptsScope postpone(isolate()); | 2246 PostponeInterruptsScope postpone(isolate()); |
2239 | 2247 |
2240 IncrementalMarking* incremental_marking = heap_->incremental_marking(); | |
2241 if (was_marked_incrementally_) { | |
2242 incremental_marking->Finalize(); | |
2243 } else { | |
2244 // Abort any pending incremental activities e.g. incremental sweeping. | |
2245 incremental_marking->Stop(); | |
2246 if (marking_deque_.in_use()) { | |
2247 marking_deque_.Uninitialize(true); | |
2248 } | |
2249 } | |
2250 | |
2251 #ifdef DEBUG | 2248 #ifdef DEBUG |
2252 DCHECK(state_ == PREPARE_GC); | 2249 DCHECK(state_ == PREPARE_GC); |
2253 state_ = MARK_LIVE_OBJECTS; | 2250 state_ = MARK_LIVE_OBJECTS; |
2254 #endif | 2251 #endif |
2255 | 2252 |
2256 EnsureMarkingDequeIsCommittedAndInitialize( | 2253 EnsureMarkingDequeIsCommittedAndInitialize( |
2257 MarkCompactCollector::kMaxMarkingDequeSize); | 2254 MarkCompactCollector::kMaxMarkingDequeSize); |
2258 | 2255 |
2259 PrepareForCodeFlushing(); | 2256 PrepareForCodeFlushing(); |
2260 | 2257 |
(...skipping 1423 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3684 | 3681 |
3685 { | 3682 { |
3686 GCTracer::Scope gc_scope(heap()->tracer(), | 3683 GCTracer::Scope gc_scope(heap()->tracer(), |
3687 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); | 3684 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); |
3688 SlotsBuffer::UpdateSlotsRecordedIn(heap_, migration_slots_buffer_); | 3685 SlotsBuffer::UpdateSlotsRecordedIn(heap_, migration_slots_buffer_); |
3689 if (FLAG_trace_fragmentation_verbose) { | 3686 if (FLAG_trace_fragmentation_verbose) { |
3690 PrintF(" migration slots buffer: %d\n", | 3687 PrintF(" migration slots buffer: %d\n", |
3691 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); | 3688 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); |
3692 } | 3689 } |
3693 | 3690 |
3694 if (compacting_ && was_marked_incrementally_) { | 3691 if (compacting_ && heap()->incremental_marking()->IsMarking()) { |
3695 GCTracer::Scope gc_scope(heap()->tracer(), | 3692 GCTracer::Scope gc_scope(heap()->tracer(), |
3696 GCTracer::Scope::MC_RESCAN_LARGE_OBJECTS); | 3693 GCTracer::Scope::MC_RESCAN_LARGE_OBJECTS); |
3697 // It's difficult to filter out slots recorded for large objects. | 3694 // It's difficult to filter out slots recorded for large objects. |
3698 LargeObjectIterator it(heap_->lo_space()); | 3695 LargeObjectIterator it(heap_->lo_space()); |
3699 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 3696 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
3700 // LargeObjectSpace is not swept yet thus we have to skip | 3697 // LargeObjectSpace is not swept yet thus we have to skip |
3701 // dead objects explicitly. | 3698 // dead objects explicitly. |
3702 if (!IsMarked(obj)) continue; | 3699 if (!IsMarked(obj)) continue; |
3703 | 3700 |
3704 Page* p = Page::FromAddress(obj->address()); | 3701 Page* p = Page::FromAddress(obj->address()); |
(...skipping 1070 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4775 SlotsBuffer* buffer = *buffer_address; | 4772 SlotsBuffer* buffer = *buffer_address; |
4776 while (buffer != NULL) { | 4773 while (buffer != NULL) { |
4777 SlotsBuffer* next_buffer = buffer->next(); | 4774 SlotsBuffer* next_buffer = buffer->next(); |
4778 DeallocateBuffer(buffer); | 4775 DeallocateBuffer(buffer); |
4779 buffer = next_buffer; | 4776 buffer = next_buffer; |
4780 } | 4777 } |
4781 *buffer_address = NULL; | 4778 *buffer_address = NULL; |
4782 } | 4779 } |
4783 } // namespace internal | 4780 } // namespace internal |
4784 } // namespace v8 | 4781 } // namespace v8 |
OLD | NEW |