| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 284 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 295 | 295 |
| 296 heap()->old_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); | 296 heap()->old_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); |
| 297 heap()->code_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); | 297 heap()->code_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); |
| 298 | 298 |
| 299 compacting_ = evacuation_candidates_.length() > 0; | 299 compacting_ = evacuation_candidates_.length() > 0; |
| 300 } | 300 } |
| 301 | 301 |
| 302 return compacting_; | 302 return compacting_; |
| 303 } | 303 } |
| 304 | 304 |
| 305 void MarkCompactCollector::ClearInvalidRememberedSetSlots() { |
| 306 { |
| 307 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_STORE_BUFFER); |
| 308 RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(heap()); |
| 309 } |
| 310 // There is not need to filter the old to old set because |
| 311 // it is completely cleared after the mark-compact GC. |
| 312 // The slots that become invalid due to runtime transitions are |
| 313 // cleared eagerly immediately after the transition. |
| 314 |
| 315 #ifdef VERIFY_HEAP |
| 316 if (FLAG_verify_heap) { |
| 317 RememberedSet<OLD_TO_NEW>::VerifyValidSlots(heap()); |
| 318 RememberedSet<OLD_TO_OLD>::VerifyValidSlots(heap()); |
| 319 } |
| 320 #endif |
| 321 } |
| 322 |
| 305 void MarkCompactCollector::CollectGarbage() { | 323 void MarkCompactCollector::CollectGarbage() { |
| 306 // Make sure that Prepare() has been called. The individual steps below will | 324 // Make sure that Prepare() has been called. The individual steps below will |
| 307 // update the state as they proceed. | 325 // update the state as they proceed. |
| 308 DCHECK(state_ == PREPARE_GC); | 326 DCHECK(state_ == PREPARE_GC); |
| 309 | 327 |
| 310 MarkLiveObjects(); | 328 MarkLiveObjects(); |
| 311 | 329 |
| 312 DCHECK(heap_->incremental_marking()->IsStopped()); | 330 DCHECK(heap_->incremental_marking()->IsStopped()); |
| 313 | 331 |
| 314 ClearNonLiveReferences(); | 332 ClearNonLiveReferences(); |
| (...skipping 2070 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2385 | 2403 |
| 2386 { | 2404 { |
| 2387 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS); | 2405 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS); |
| 2388 ClearSimpleMapTransitions(non_live_map_list); | 2406 ClearSimpleMapTransitions(non_live_map_list); |
| 2389 ClearFullMapTransitions(); | 2407 ClearFullMapTransitions(); |
| 2390 } | 2408 } |
| 2391 | 2409 |
| 2392 MarkDependentCodeForDeoptimization(dependent_code_list); | 2410 MarkDependentCodeForDeoptimization(dependent_code_list); |
| 2393 | 2411 |
| 2394 ClearWeakCollections(); | 2412 ClearWeakCollections(); |
| 2413 |
| 2414 ClearInvalidRememberedSetSlots(); |
| 2395 } | 2415 } |
| 2396 | 2416 |
| 2397 | 2417 |
| 2398 void MarkCompactCollector::MarkDependentCodeForDeoptimization( | 2418 void MarkCompactCollector::MarkDependentCodeForDeoptimization( |
| 2399 DependentCode* list_head) { | 2419 DependentCode* list_head) { |
| 2400 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); | 2420 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); |
| 2401 Isolate* isolate = this->isolate(); | 2421 Isolate* isolate = this->isolate(); |
| 2402 DependentCode* current = list_head; | 2422 DependentCode* current = list_head; |
| 2403 while (current->length() > 0) { | 2423 while (current->length() > 0) { |
| 2404 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( | 2424 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( |
| (...skipping 1392 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3797 // If this page was already swept in the meantime, we can return here. | 3817 // If this page was already swept in the meantime, we can return here. |
| 3798 if (page->concurrent_sweeping_state().Value() != Page::kSweepingPending) { | 3818 if (page->concurrent_sweeping_state().Value() != Page::kSweepingPending) { |
| 3799 page->mutex()->Unlock(); | 3819 page->mutex()->Unlock(); |
| 3800 return 0; | 3820 return 0; |
| 3801 } | 3821 } |
| 3802 page->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress); | 3822 page->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress); |
| 3803 const Sweeper::FreeSpaceTreatmentMode free_space_mode = | 3823 const Sweeper::FreeSpaceTreatmentMode free_space_mode = |
| 3804 Heap::ShouldZapGarbage() ? ZAP_FREE_SPACE : IGNORE_FREE_SPACE; | 3824 Heap::ShouldZapGarbage() ? ZAP_FREE_SPACE : IGNORE_FREE_SPACE; |
| 3805 if (identity == NEW_SPACE) { | 3825 if (identity == NEW_SPACE) { |
| 3806 RawSweep(page, IGNORE_FREE_LIST, free_space_mode); | 3826 RawSweep(page, IGNORE_FREE_LIST, free_space_mode); |
| 3827 } else if (identity == OLD_SPACE) { |
| 3828 max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode); |
| 3829 } else if (identity == CODE_SPACE) { |
| 3830 max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode); |
| 3807 } else { | 3831 } else { |
| 3808 if (identity == OLD_SPACE || identity == MAP_SPACE) { | |
| 3809 RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(heap_, page); | |
| 3810 } else { | |
| 3811 RememberedSet<OLD_TO_NEW>::ClearInvalidTypedSlots(heap_, page); | |
| 3812 } | |
| 3813 max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode); | 3832 max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode); |
| 3814 } | 3833 } |
| 3815 | 3834 |
| 3816 // After finishing sweeping of a page we clean up its remembered set. | 3835 // After finishing sweeping of a page we clean up its remembered set. |
| 3817 if (page->typed_old_to_new_slots()) { | 3836 if (page->typed_old_to_new_slots()) { |
| 3818 page->typed_old_to_new_slots()->FreeToBeFreedChunks(); | 3837 page->typed_old_to_new_slots()->FreeToBeFreedChunks(); |
| 3819 } | 3838 } |
| 3820 if (page->old_to_new_slots()) { | 3839 if (page->old_to_new_slots()) { |
| 3821 page->old_to_new_slots()->FreeToBeFreedBuckets(); | 3840 page->old_to_new_slots()->FreeToBeFreedBuckets(); |
| 3822 } | 3841 } |
| (...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3988 // The target is always in old space, we don't have to record the slot in | 4007 // The target is always in old space, we don't have to record the slot in |
| 3989 // the old-to-new remembered set. | 4008 // the old-to-new remembered set. |
| 3990 DCHECK(!heap()->InNewSpace(target)); | 4009 DCHECK(!heap()->InNewSpace(target)); |
| 3991 RecordRelocSlot(host, &rinfo, target); | 4010 RecordRelocSlot(host, &rinfo, target); |
| 3992 } | 4011 } |
| 3993 } | 4012 } |
| 3994 } | 4013 } |
| 3995 | 4014 |
| 3996 } // namespace internal | 4015 } // namespace internal |
| 3997 } // namespace v8 | 4016 } // namespace v8 |
| OLD | NEW |