OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
(...skipping 29 matching lines...) Expand all Loading... |
40 | 40 |
41 MarkCompactCollector::MarkCompactCollector(Heap* heap) | 41 MarkCompactCollector::MarkCompactCollector(Heap* heap) |
42 : // NOLINT | 42 : // NOLINT |
43 #ifdef DEBUG | 43 #ifdef DEBUG |
44 state_(IDLE), | 44 state_(IDLE), |
45 #endif | 45 #endif |
46 marking_parity_(ODD_MARKING_PARITY), | 46 marking_parity_(ODD_MARKING_PARITY), |
47 compacting_(false), | 47 compacting_(false), |
48 was_marked_incrementally_(false), | 48 was_marked_incrementally_(false), |
49 sweeping_in_progress_(false), | 49 sweeping_in_progress_(false), |
| 50 parallel_compaction_in_progress_(false), |
50 pending_sweeper_jobs_semaphore_(0), | 51 pending_sweeper_jobs_semaphore_(0), |
51 pending_compaction_jobs_semaphore_(0), | 52 pending_compaction_jobs_semaphore_(0), |
52 evacuation_(false), | 53 evacuation_(false), |
53 migration_slots_buffer_(NULL), | 54 migration_slots_buffer_(NULL), |
54 heap_(heap), | 55 heap_(heap), |
55 marking_deque_memory_(NULL), | 56 marking_deque_memory_(NULL), |
56 marking_deque_memory_committed_(0), | 57 marking_deque_memory_committed_(0), |
57 code_flusher_(NULL), | 58 code_flusher_(NULL), |
58 have_code_to_deoptimize_(false) { | 59 have_code_to_deoptimize_(false) { |
59 } | 60 } |
(...skipping 2623 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2683 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); | 2684 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); |
2684 weak_cell_obj = weak_cell->next(); | 2685 weak_cell_obj = weak_cell->next(); |
2685 weak_cell->clear_next(heap()); | 2686 weak_cell->clear_next(heap()); |
2686 } | 2687 } |
2687 heap()->set_encountered_weak_cells(Smi::FromInt(0)); | 2688 heap()->set_encountered_weak_cells(Smi::FromInt(0)); |
2688 } | 2689 } |
2689 | 2690 |
2690 | 2691 |
2691 void MarkCompactCollector::RecordMigratedSlot(Object* value, Address slot) { | 2692 void MarkCompactCollector::RecordMigratedSlot(Object* value, Address slot) { |
2692 if (heap_->InNewSpace(value)) { | 2693 if (heap_->InNewSpace(value)) { |
2693 heap_->store_buffer()->Mark(slot); | 2694 if (parallel_compaction_in_progress_) { |
| 2695 heap_->store_buffer()->MarkSynchronized(slot); |
| 2696 } else { |
| 2697 heap_->store_buffer()->Mark(slot); |
| 2698 } |
2694 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { | 2699 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { |
2695 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, | 2700 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, |
2696 reinterpret_cast<Object**>(slot), | 2701 reinterpret_cast<Object**>(slot), |
2697 SlotsBuffer::IGNORE_OVERFLOW); | 2702 SlotsBuffer::IGNORE_OVERFLOW); |
2698 } | 2703 } |
2699 } | 2704 } |
2700 | 2705 |
2701 | 2706 |
2702 // We scavenge new space simultaneously with sweeping. This is done in two | 2707 // We scavenge new space simultaneously with sweeping. This is done in two |
2703 // passes. | 2708 // passes. |
(...skipping 602 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3306 } | 3311 } |
3307 | 3312 |
3308 // Clear marking bits for current cell. | 3313 // Clear marking bits for current cell. |
3309 *cell = 0; | 3314 *cell = 0; |
3310 } | 3315 } |
3311 p->ResetLiveBytes(); | 3316 p->ResetLiveBytes(); |
3312 } | 3317 } |
3313 | 3318 |
3314 | 3319 |
3315 void MarkCompactCollector::EvacuatePagesInParallel() { | 3320 void MarkCompactCollector::EvacuatePagesInParallel() { |
| 3321 parallel_compaction_in_progress_ = true; |
3316 V8::GetCurrentPlatform()->CallOnBackgroundThread( | 3322 V8::GetCurrentPlatform()->CallOnBackgroundThread( |
3317 new CompactionTask(heap()), v8::Platform::kShortRunningTask); | 3323 new CompactionTask(heap()), v8::Platform::kShortRunningTask); |
3318 } | 3324 } |
3319 | 3325 |
3320 | 3326 |
| 3327 void MarkCompactCollector::WaitUntilCompactionCompleted() { |
| 3328 pending_compaction_jobs_semaphore_.Wait(); |
| 3329 parallel_compaction_in_progress_ = false; |
| 3330 } |
| 3331 |
| 3332 |
3321 void MarkCompactCollector::EvacuatePages() { | 3333 void MarkCompactCollector::EvacuatePages() { |
3322 int npages = evacuation_candidates_.length(); | 3334 int npages = evacuation_candidates_.length(); |
3323 int abandoned_pages = 0; | 3335 int abandoned_pages = 0; |
3324 for (int i = 0; i < npages; i++) { | 3336 for (int i = 0; i < npages; i++) { |
3325 Page* p = evacuation_candidates_[i]; | 3337 Page* p = evacuation_candidates_[i]; |
3326 DCHECK(p->IsEvacuationCandidate() || | 3338 DCHECK(p->IsEvacuationCandidate() || |
3327 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); | 3339 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); |
3328 DCHECK(static_cast<int>(p->parallel_sweeping()) == | 3340 DCHECK(static_cast<int>(p->parallel_sweeping()) == |
3329 MemoryChunk::SWEEPING_DONE); | 3341 MemoryChunk::SWEEPING_DONE); |
3330 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 3342 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); |
(...skipping 288 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3619 EvacuationScope evacuation_scope(this); | 3631 EvacuationScope evacuation_scope(this); |
3620 EvacuateNewSpace(); | 3632 EvacuateNewSpace(); |
3621 } | 3633 } |
3622 | 3634 |
3623 { | 3635 { |
3624 GCTracer::Scope gc_scope(heap()->tracer(), | 3636 GCTracer::Scope gc_scope(heap()->tracer(), |
3625 GCTracer::Scope::MC_EVACUATE_PAGES); | 3637 GCTracer::Scope::MC_EVACUATE_PAGES); |
3626 EvacuationScope evacuation_scope(this); | 3638 EvacuationScope evacuation_scope(this); |
3627 if (FLAG_parallel_compaction) { | 3639 if (FLAG_parallel_compaction) { |
3628 EvacuatePagesInParallel(); | 3640 EvacuatePagesInParallel(); |
3629 pending_compaction_jobs_semaphore_.Wait(); | 3641 WaitUntilCompactionCompleted(); |
3630 } else { | 3642 } else { |
3631 EvacuatePages(); | 3643 EvacuatePages(); |
3632 } | 3644 } |
3633 } | 3645 } |
3634 | 3646 |
3635 // Second pass: find pointers to new space and update them. | 3647 // Second pass: find pointers to new space and update them. |
3636 PointersUpdatingVisitor updating_visitor(heap()); | 3648 PointersUpdatingVisitor updating_visitor(heap()); |
3637 | 3649 |
3638 { | 3650 { |
3639 GCTracer::Scope gc_scope(heap()->tracer(), | 3651 GCTracer::Scope gc_scope(heap()->tracer(), |
(...skipping 1090 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4730 SlotsBuffer* buffer = *buffer_address; | 4742 SlotsBuffer* buffer = *buffer_address; |
4731 while (buffer != NULL) { | 4743 while (buffer != NULL) { |
4732 SlotsBuffer* next_buffer = buffer->next(); | 4744 SlotsBuffer* next_buffer = buffer->next(); |
4733 DeallocateBuffer(buffer); | 4745 DeallocateBuffer(buffer); |
4734 buffer = next_buffer; | 4746 buffer = next_buffer; |
4735 } | 4747 } |
4736 *buffer_address = NULL; | 4748 *buffer_address = NULL; |
4737 } | 4749 } |
4738 } // namespace internal | 4750 } // namespace internal |
4739 } // namespace v8 | 4751 } // namespace v8 |
OLD | NEW |