Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 30 const char* Marking::kBlackBitPattern = "10"; | 30 const char* Marking::kBlackBitPattern = "10"; |
| 31 const char* Marking::kGreyBitPattern = "11"; | 31 const char* Marking::kGreyBitPattern = "11"; |
| 32 const char* Marking::kImpossibleBitPattern = "01"; | 32 const char* Marking::kImpossibleBitPattern = "01"; |
| 33 | 33 |
| 34 | 34 |
| 35 // ------------------------------------------------------------------------- | 35 // ------------------------------------------------------------------------- |
| 36 // MarkCompactCollector | 36 // MarkCompactCollector |
| 37 | 37 |
| 38 MarkCompactCollector::MarkCompactCollector(Heap* heap) | 38 MarkCompactCollector::MarkCompactCollector(Heap* heap) |
| 39 : // NOLINT | 39 : // NOLINT |
| 40 #ifdef DEBUG | 40 #if DCHECK_IS_ON |
| 41 state_(IDLE), | 41 state_(IDLE), |
| 42 #endif | 42 #endif |
| 43 reduce_memory_footprint_(false), | 43 reduce_memory_footprint_(false), |
| 44 abort_incremental_marking_(false), | 44 abort_incremental_marking_(false), |
| 45 marking_parity_(ODD_MARKING_PARITY), | 45 marking_parity_(ODD_MARKING_PARITY), |
| 46 compacting_(false), | 46 compacting_(false), |
| 47 was_marked_incrementally_(false), | 47 was_marked_incrementally_(false), |
| 48 sweeping_in_progress_(false), | 48 sweeping_in_progress_(false), |
| 49 pending_sweeper_jobs_semaphore_(0), | 49 pending_sweeper_jobs_semaphore_(0), |
| 50 evacuation_(false), | 50 evacuation_(false), |
| (...skipping 485 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 536 if (!heap_->incremental_marking()->IsMarking()) return; | 536 if (!heap_->incremental_marking()->IsMarking()) return; |
| 537 | 537 |
| 538 // If the mark doesn't move, we don't check the color of the object. | 538 // If the mark doesn't move, we don't check the color of the object. |
| 539 // It doesn't matter whether the object is black, since it hasn't changed | 539 // It doesn't matter whether the object is black, since it hasn't changed |
| 540 // size, so the adjustment to the live data count will be zero anyway. | 540 // size, so the adjustment to the live data count will be zero anyway. |
| 541 if (old_start == new_start) return; | 541 if (old_start == new_start) return; |
| 542 | 542 |
| 543 MarkBit new_mark_bit = MarkBitFrom(new_start); | 543 MarkBit new_mark_bit = MarkBitFrom(new_start); |
| 544 MarkBit old_mark_bit = MarkBitFrom(old_start); | 544 MarkBit old_mark_bit = MarkBitFrom(old_start); |
| 545 | 545 |
| 546 #ifdef DEBUG | 546 #if DCHECK_IS_ON |
| 547 ObjectColor old_color = Color(old_mark_bit); | 547 ObjectColor old_color = Color(old_mark_bit); |
| 548 #endif | 548 #endif |
| 549 | 549 |
| 550 if (Marking::IsBlack(old_mark_bit)) { | 550 if (Marking::IsBlack(old_mark_bit)) { |
| 551 old_mark_bit.Clear(); | 551 old_mark_bit.Clear(); |
| 552 DCHECK(IsWhite(old_mark_bit)); | 552 DCHECK(IsWhite(old_mark_bit)); |
| 553 Marking::MarkBlack(new_mark_bit); | 553 Marking::MarkBlack(new_mark_bit); |
| 554 return; | 554 return; |
| 555 } else if (Marking::IsGrey(old_mark_bit)) { | 555 } else if (Marking::IsGrey(old_mark_bit)) { |
| 556 old_mark_bit.Clear(); | 556 old_mark_bit.Clear(); |
| 557 old_mark_bit.Next().Clear(); | 557 old_mark_bit.Next().Clear(); |
| 558 DCHECK(IsWhite(old_mark_bit)); | 558 DCHECK(IsWhite(old_mark_bit)); |
| 559 heap_->incremental_marking()->WhiteToGreyAndPush( | 559 heap_->incremental_marking()->WhiteToGreyAndPush( |
| 560 HeapObject::FromAddress(new_start), new_mark_bit); | 560 HeapObject::FromAddress(new_start), new_mark_bit); |
| 561 heap_->incremental_marking()->RestartIfNotMarking(); | 561 heap_->incremental_marking()->RestartIfNotMarking(); |
| 562 } | 562 } |
| 563 | 563 |
| 564 #ifdef DEBUG | 564 #if DCHECK_IS_ON |
| 565 ObjectColor new_color = Color(new_mark_bit); | 565 ObjectColor new_color = Color(new_mark_bit); |
| 566 DCHECK(new_color == old_color); | 566 DCHECK(new_color == old_color); |
| 567 #endif | 567 #endif |
| 568 } | 568 } |
| 569 | 569 |
| 570 | 570 |
| 571 const char* AllocationSpaceName(AllocationSpace space) { | 571 const char* AllocationSpaceName(AllocationSpace space) { |
| 572 switch (space) { | 572 switch (space) { |
| 573 case NEW_SPACE: | 573 case NEW_SPACE: |
| 574 return "NEW_SPACE"; | 574 return "NEW_SPACE"; |
| (...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 808 evacuation_candidates_.Rewind(0); | 808 evacuation_candidates_.Rewind(0); |
| 809 invalidated_code_.Rewind(0); | 809 invalidated_code_.Rewind(0); |
| 810 } | 810 } |
| 811 DCHECK_EQ(0, evacuation_candidates_.length()); | 811 DCHECK_EQ(0, evacuation_candidates_.length()); |
| 812 } | 812 } |
| 813 | 813 |
| 814 | 814 |
| 815 void MarkCompactCollector::Prepare() { | 815 void MarkCompactCollector::Prepare() { |
| 816 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); | 816 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); |
| 817 | 817 |
| 818 #ifdef DEBUG | 818 #if DCHECK_IS_ON |
| 819 DCHECK(state_ == IDLE); | 819 DCHECK(state_ == IDLE); |
| 820 state_ = PREPARE_GC; | 820 state_ = PREPARE_GC; |
| 821 #endif | 821 #endif |
| 822 | 822 |
| 823 DCHECK(!FLAG_never_compact || !FLAG_always_compact); | 823 DCHECK(!FLAG_never_compact || !FLAG_always_compact); |
| 824 | 824 |
| 825 if (sweeping_in_progress()) { | 825 if (sweeping_in_progress()) { |
| 826 // Instead of waiting we could also abort the sweeper threads here. | 826 // Instead of waiting we could also abort the sweeper threads here. |
| 827 EnsureSweepingCompleted(); | 827 EnsureSweepingCompleted(); |
| 828 } | 828 } |
| (...skipping 22 matching lines...) Expand all Loading... | |
| 851 | 851 |
| 852 #ifdef VERIFY_HEAP | 852 #ifdef VERIFY_HEAP |
| 853 if (!was_marked_incrementally_ && FLAG_verify_heap) { | 853 if (!was_marked_incrementally_ && FLAG_verify_heap) { |
| 854 VerifyMarkbitsAreClean(); | 854 VerifyMarkbitsAreClean(); |
| 855 } | 855 } |
| 856 #endif | 856 #endif |
| 857 } | 857 } |
| 858 | 858 |
| 859 | 859 |
| 860 void MarkCompactCollector::Finish() { | 860 void MarkCompactCollector::Finish() { |
| 861 #ifdef DEBUG | 861 #if DCHECK_IS_ON |
| 862 DCHECK(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); | 862 DCHECK(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); |
| 863 state_ = IDLE; | 863 state_ = IDLE; |
| 864 #endif | 864 #endif |
| 865 // The stub cache is not traversed during GC; clear the cache to | 865 // The stub cache is not traversed during GC; clear the cache to |
| 866 // force lazy re-initialization of it. This must be done after the | 866 // force lazy re-initialization of it. This must be done after the |
| 867 // GC, because it relies on the new address of certain old space | 867 // GC, because it relies on the new address of certain old space |
| 868 // objects (empty string, illegal builtin). | 868 // objects (empty string, illegal builtin). |
| 869 isolate()->stub_cache()->Clear(); | 869 isolate()->stub_cache()->Clear(); |
| 870 | 870 |
| 871 if (have_code_to_deoptimize_) { | 871 if (have_code_to_deoptimize_) { |
| (...skipping 419 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1291 HeapObject* object = ShortCircuitConsString(p); | 1291 HeapObject* object = ShortCircuitConsString(p); |
| 1292 collector->RecordSlot(anchor_slot, p, object); | 1292 collector->RecordSlot(anchor_slot, p, object); |
| 1293 MarkBit mark = Marking::MarkBitFrom(object); | 1293 MarkBit mark = Marking::MarkBitFrom(object); |
| 1294 collector->MarkObject(object, mark); | 1294 collector->MarkObject(object, mark); |
| 1295 } | 1295 } |
| 1296 | 1296 |
| 1297 | 1297 |
| 1298 // Visit an unmarked object. | 1298 // Visit an unmarked object. |
| 1299 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, | 1299 INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector, |
| 1300 HeapObject* obj)) { | 1300 HeapObject* obj)) { |
| 1301 #ifdef DEBUG | 1301 #if DCHECK_IS_ON |
|
Jakob Kummerow
2014/12/03 10:53:53
just drop this #if
| |
| 1302 DCHECK(collector->heap()->Contains(obj)); | 1302 DCHECK(collector->heap()->Contains(obj)); |
| 1303 DCHECK(!collector->heap()->mark_compact_collector()->IsMarked(obj)); | 1303 DCHECK(!collector->heap()->mark_compact_collector()->IsMarked(obj)); |
| 1304 #endif | 1304 #endif |
| 1305 Map* map = obj->map(); | 1305 Map* map = obj->map(); |
| 1306 Heap* heap = obj->GetHeap(); | 1306 Heap* heap = obj->GetHeap(); |
| 1307 MarkBit mark = Marking::MarkBitFrom(obj); | 1307 MarkBit mark = Marking::MarkBitFrom(obj); |
| 1308 heap->mark_compact_collector()->SetMark(obj, mark); | 1308 heap->mark_compact_collector()->SetMark(obj, mark); |
| 1309 // Mark the map pointer and the body. | 1309 // Mark the map pointer and the body. |
| 1310 MarkBit map_mark = Marking::MarkBitFrom(map); | 1310 MarkBit map_mark = Marking::MarkBitFrom(map); |
| 1311 heap->mark_compact_collector()->MarkObject(map, map_mark); | 1311 heap->mark_compact_collector()->MarkObject(map, map_mark); |
| (...skipping 861 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2173 | 2173 |
| 2174 IncrementalMarking* incremental_marking = heap_->incremental_marking(); | 2174 IncrementalMarking* incremental_marking = heap_->incremental_marking(); |
| 2175 if (was_marked_incrementally_) { | 2175 if (was_marked_incrementally_) { |
| 2176 incremental_marking->Finalize(); | 2176 incremental_marking->Finalize(); |
| 2177 } else { | 2177 } else { |
| 2178 // Abort any pending incremental activities e.g. incremental sweeping. | 2178 // Abort any pending incremental activities e.g. incremental sweeping. |
| 2179 incremental_marking->Abort(); | 2179 incremental_marking->Abort(); |
| 2180 InitializeMarkingDeque(); | 2180 InitializeMarkingDeque(); |
| 2181 } | 2181 } |
| 2182 | 2182 |
| 2183 #ifdef DEBUG | 2183 #if DCHECK_IS_ON |
| 2184 DCHECK(state_ == PREPARE_GC); | 2184 DCHECK(state_ == PREPARE_GC); |
| 2185 state_ = MARK_LIVE_OBJECTS; | 2185 state_ = MARK_LIVE_OBJECTS; |
| 2186 #endif | 2186 #endif |
| 2187 | 2187 |
| 2188 EnsureMarkingDequeIsCommittedAndInitialize(); | 2188 EnsureMarkingDequeIsCommittedAndInitialize(); |
| 2189 | 2189 |
| 2190 PrepareForCodeFlushing(); | 2190 PrepareForCodeFlushing(); |
| 2191 | 2191 |
| 2192 if (was_marked_incrementally_) { | 2192 if (was_marked_incrementally_) { |
| 2193 // There is no write barrier on cells so we have to scan them now at the end | 2193 // There is no write barrier on cells so we have to scan them now at the end |
| (...skipping 1954 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4148 } | 4148 } |
| 4149 | 4149 |
| 4150 | 4150 |
| 4151 void MarkCompactCollector::SweepSpaces() { | 4151 void MarkCompactCollector::SweepSpaces() { |
| 4152 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP); | 4152 GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP); |
| 4153 double start_time = 0.0; | 4153 double start_time = 0.0; |
| 4154 if (FLAG_print_cumulative_gc_stat) { | 4154 if (FLAG_print_cumulative_gc_stat) { |
| 4155 start_time = base::OS::TimeCurrentMillis(); | 4155 start_time = base::OS::TimeCurrentMillis(); |
| 4156 } | 4156 } |
| 4157 | 4157 |
| 4158 #ifdef DEBUG | 4158 #if DCHECK_IS_ON |
| 4159 state_ = SWEEP_SPACES; | 4159 state_ = SWEEP_SPACES; |
| 4160 #endif | 4160 #endif |
| 4161 MoveEvacuationCandidatesToEndOfPagesList(); | 4161 MoveEvacuationCandidatesToEndOfPagesList(); |
| 4162 | 4162 |
| 4163 // Noncompacting collections simply sweep the spaces to clear the mark | 4163 // Noncompacting collections simply sweep the spaces to clear the mark |
| 4164 // bits and free the nonlive blocks (for old and map spaces). We sweep | 4164 // bits and free the nonlive blocks (for old and map spaces). We sweep |
| 4165 // the map space last because freeing non-live maps overwrites them and | 4165 // the map space last because freeing non-live maps overwrites them and |
| 4166 // the other spaces rely on possibly non-live maps to get the sizes for | 4166 // the other spaces rely on possibly non-live maps to get the sizes for |
| 4167 // non-live objects. | 4167 // non-live objects. |
| 4168 { | 4168 { |
| (...skipping 270 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4439 SlotsBuffer* buffer = *buffer_address; | 4439 SlotsBuffer* buffer = *buffer_address; |
| 4440 while (buffer != NULL) { | 4440 while (buffer != NULL) { |
| 4441 SlotsBuffer* next_buffer = buffer->next(); | 4441 SlotsBuffer* next_buffer = buffer->next(); |
| 4442 DeallocateBuffer(buffer); | 4442 DeallocateBuffer(buffer); |
| 4443 buffer = next_buffer; | 4443 buffer = next_buffer; |
| 4444 } | 4444 } |
| 4445 *buffer_address = NULL; | 4445 *buffer_address = NULL; |
| 4446 } | 4446 } |
| 4447 } | 4447 } |
| 4448 } // namespace v8::internal | 4448 } // namespace v8::internal |
| OLD | NEW |