OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 284 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
295 | 295 |
296 heap()->old_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); | 296 heap()->old_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); |
297 heap()->code_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); | 297 heap()->code_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); |
298 | 298 |
299 compacting_ = evacuation_candidates_.length() > 0; | 299 compacting_ = evacuation_candidates_.length() > 0; |
300 } | 300 } |
301 | 301 |
302 return compacting_; | 302 return compacting_; |
303 } | 303 } |
304 | 304 |
305 void MarkCompactCollector::ClearInvalidRememberedSetSlots() { | |
306 { | |
307 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_STORE_BUFFER); | |
308 RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(heap()); | |
309 } | |
310 // There is not need to filter the old to old set because | |
311 // it is completely cleared after the mark-compact GC. | |
312 // The slots that become invalid due to runtime transitions are | |
313 // cleared eagerly immediately after the transition. | |
314 | |
315 #ifdef VERIFY_HEAP | |
316 if (FLAG_verify_heap) { | |
317 RememberedSet<OLD_TO_NEW>::VerifyValidSlots(heap()); | |
318 RememberedSet<OLD_TO_OLD>::VerifyValidSlots(heap()); | |
319 } | |
320 #endif | |
321 } | |
322 | |
323 void MarkCompactCollector::CollectGarbage() { | 305 void MarkCompactCollector::CollectGarbage() { |
324 // Make sure that Prepare() has been called. The individual steps below will | 306 // Make sure that Prepare() has been called. The individual steps below will |
325 // update the state as they proceed. | 307 // update the state as they proceed. |
326 DCHECK(state_ == PREPARE_GC); | 308 DCHECK(state_ == PREPARE_GC); |
327 | 309 |
328 MarkLiveObjects(); | 310 MarkLiveObjects(); |
329 | 311 |
330 DCHECK(heap_->incremental_marking()->IsStopped()); | 312 DCHECK(heap_->incremental_marking()->IsStopped()); |
331 | 313 |
332 ClearNonLiveReferences(); | 314 ClearNonLiveReferences(); |
(...skipping 2070 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2403 | 2385 |
2404 { | 2386 { |
2405 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS); | 2387 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS); |
2406 ClearSimpleMapTransitions(non_live_map_list); | 2388 ClearSimpleMapTransitions(non_live_map_list); |
2407 ClearFullMapTransitions(); | 2389 ClearFullMapTransitions(); |
2408 } | 2390 } |
2409 | 2391 |
2410 MarkDependentCodeForDeoptimization(dependent_code_list); | 2392 MarkDependentCodeForDeoptimization(dependent_code_list); |
2411 | 2393 |
2412 ClearWeakCollections(); | 2394 ClearWeakCollections(); |
2413 | |
2414 ClearInvalidRememberedSetSlots(); | |
2415 } | 2395 } |
2416 | 2396 |
2417 | 2397 |
2418 void MarkCompactCollector::MarkDependentCodeForDeoptimization( | 2398 void MarkCompactCollector::MarkDependentCodeForDeoptimization( |
2419 DependentCode* list_head) { | 2399 DependentCode* list_head) { |
2420 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); | 2400 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); |
2421 Isolate* isolate = this->isolate(); | 2401 Isolate* isolate = this->isolate(); |
2422 DependentCode* current = list_head; | 2402 DependentCode* current = list_head; |
2423 while (current->length() > 0) { | 2403 while (current->length() > 0) { |
2424 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( | 2404 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( |
(...skipping 1392 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3817 // If this page was already swept in the meantime, we can return here. | 3797 // If this page was already swept in the meantime, we can return here. |
3818 if (page->concurrent_sweeping_state().Value() != Page::kSweepingPending) { | 3798 if (page->concurrent_sweeping_state().Value() != Page::kSweepingPending) { |
3819 page->mutex()->Unlock(); | 3799 page->mutex()->Unlock(); |
3820 return 0; | 3800 return 0; |
3821 } | 3801 } |
3822 page->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress); | 3802 page->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress); |
3823 const Sweeper::FreeSpaceTreatmentMode free_space_mode = | 3803 const Sweeper::FreeSpaceTreatmentMode free_space_mode = |
3824 Heap::ShouldZapGarbage() ? ZAP_FREE_SPACE : IGNORE_FREE_SPACE; | 3804 Heap::ShouldZapGarbage() ? ZAP_FREE_SPACE : IGNORE_FREE_SPACE; |
3825 if (identity == NEW_SPACE) { | 3805 if (identity == NEW_SPACE) { |
3826 RawSweep(page, IGNORE_FREE_LIST, free_space_mode); | 3806 RawSweep(page, IGNORE_FREE_LIST, free_space_mode); |
3827 } else if (identity == OLD_SPACE) { | |
3828 max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode); | |
3829 } else if (identity == CODE_SPACE) { | |
3830 max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode); | |
3831 } else { | 3807 } else { |
| 3808 if (identity == OLD_SPACE || identity == MAP_SPACE) { |
| 3809 RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(heap_, page); |
| 3810 } else { |
| 3811 RememberedSet<OLD_TO_NEW>::ClearInvalidTypedSlots(heap_, page); |
| 3812 } |
3832 max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode); | 3813 max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode); |
3833 } | 3814 } |
3834 | 3815 |
3835 // After finishing sweeping of a page we clean up its remembered set. | 3816 // After finishing sweeping of a page we clean up its remembered set. |
3836 if (page->typed_old_to_new_slots()) { | 3817 if (page->typed_old_to_new_slots()) { |
3837 page->typed_old_to_new_slots()->FreeToBeFreedChunks(); | 3818 page->typed_old_to_new_slots()->FreeToBeFreedChunks(); |
3838 } | 3819 } |
3839 if (page->old_to_new_slots()) { | 3820 if (page->old_to_new_slots()) { |
3840 page->old_to_new_slots()->FreeToBeFreedBuckets(); | 3821 page->old_to_new_slots()->FreeToBeFreedBuckets(); |
3841 } | 3822 } |
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4007 // The target is always in old space, we don't have to record the slot in | 3988 // The target is always in old space, we don't have to record the slot in |
4008 // the old-to-new remembered set. | 3989 // the old-to-new remembered set. |
4009 DCHECK(!heap()->InNewSpace(target)); | 3990 DCHECK(!heap()->InNewSpace(target)); |
4010 RecordRelocSlot(host, &rinfo, target); | 3991 RecordRelocSlot(host, &rinfo, target); |
4011 } | 3992 } |
4012 } | 3993 } |
4013 } | 3994 } |
4014 | 3995 |
4015 } // namespace internal | 3996 } // namespace internal |
4016 } // namespace v8 | 3997 } // namespace v8 |
OLD | NEW |