OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 284 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
295 | 295 |
296 heap()->old_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); | 296 heap()->old_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); |
297 heap()->code_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); | 297 heap()->code_space()->EvictEvacuationCandidatesFromLinearAllocationArea(); |
298 | 298 |
299 compacting_ = evacuation_candidates_.length() > 0; | 299 compacting_ = evacuation_candidates_.length() > 0; |
300 } | 300 } |
301 | 301 |
302 return compacting_; | 302 return compacting_; |
303 } | 303 } |
304 | 304 |
305 void MarkCompactCollector::ClearInvalidRememberedSetSlots() { | |
306 { | |
307 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_STORE_BUFFER); | |
308 RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(heap()); | |
309 } | |
310 // There is not need to filter the old to old set because | |
311 // it is completely cleared after the mark-compact GC. | |
312 // The slots that become invalid due to runtime transitions are | |
313 // cleared eagerly immediately after the transition. | |
314 | |
315 #ifdef VERIFY_HEAP | |
316 if (FLAG_verify_heap) { | |
317 RememberedSet<OLD_TO_NEW>::VerifyValidSlots(heap()); | |
318 RememberedSet<OLD_TO_OLD>::VerifyValidSlots(heap()); | |
319 } | |
320 #endif | |
321 } | |
322 | |
323 void MarkCompactCollector::CollectGarbage() { | 305 void MarkCompactCollector::CollectGarbage() { |
324 // Make sure that Prepare() has been called. The individual steps below will | 306 // Make sure that Prepare() has been called. The individual steps below will |
325 // update the state as they proceed. | 307 // update the state as they proceed. |
326 DCHECK(state_ == PREPARE_GC); | 308 DCHECK(state_ == PREPARE_GC); |
327 | 309 |
328 MarkLiveObjects(); | 310 MarkLiveObjects(); |
329 | 311 |
330 DCHECK(heap_->incremental_marking()->IsStopped()); | 312 DCHECK(heap_->incremental_marking()->IsStopped()); |
331 | 313 |
332 ClearNonLiveReferences(); | 314 ClearNonLiveReferences(); |
(...skipping 2070 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2403 | 2385 |
2404 { | 2386 { |
2405 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS); | 2387 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS); |
2406 ClearSimpleMapTransitions(non_live_map_list); | 2388 ClearSimpleMapTransitions(non_live_map_list); |
2407 ClearFullMapTransitions(); | 2389 ClearFullMapTransitions(); |
2408 } | 2390 } |
2409 | 2391 |
2410 MarkDependentCodeForDeoptimization(dependent_code_list); | 2392 MarkDependentCodeForDeoptimization(dependent_code_list); |
2411 | 2393 |
2412 ClearWeakCollections(); | 2394 ClearWeakCollections(); |
2413 | |
2414 ClearInvalidRememberedSetSlots(); | |
2415 } | 2395 } |
2416 | 2396 |
2417 | 2397 |
2418 void MarkCompactCollector::MarkDependentCodeForDeoptimization( | 2398 void MarkCompactCollector::MarkDependentCodeForDeoptimization( |
2419 DependentCode* list_head) { | 2399 DependentCode* list_head) { |
2420 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); | 2400 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); |
2421 Isolate* isolate = this->isolate(); | 2401 Isolate* isolate = this->isolate(); |
2422 DependentCode* current = list_head; | 2402 DependentCode* current = list_head; |
2423 while (current->length() > 0) { | 2403 while (current->length() > 0) { |
2424 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( | 2404 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( |
(...skipping 1395 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3820 // If this page was already swept in the meantime, we can return here. | 3800 // If this page was already swept in the meantime, we can return here. |
3821 if (page->concurrent_sweeping_state().Value() != Page::kSweepingPending) { | 3801 if (page->concurrent_sweeping_state().Value() != Page::kSweepingPending) { |
3822 page->mutex()->Unlock(); | 3802 page->mutex()->Unlock(); |
3823 return 0; | 3803 return 0; |
3824 } | 3804 } |
3825 page->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress); | 3805 page->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress); |
3826 const Sweeper::FreeSpaceTreatmentMode free_space_mode = | 3806 const Sweeper::FreeSpaceTreatmentMode free_space_mode = |
3827 Heap::ShouldZapGarbage() ? ZAP_FREE_SPACE : IGNORE_FREE_SPACE; | 3807 Heap::ShouldZapGarbage() ? ZAP_FREE_SPACE : IGNORE_FREE_SPACE; |
3828 if (identity == NEW_SPACE) { | 3808 if (identity == NEW_SPACE) { |
3829 RawSweep(page, IGNORE_FREE_LIST, free_space_mode); | 3809 RawSweep(page, IGNORE_FREE_LIST, free_space_mode); |
3830 } else if (identity == OLD_SPACE) { | |
3831 max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode); | |
3832 } else if (identity == CODE_SPACE) { | |
3833 max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode); | |
3834 } else { | 3810 } else { |
| 3811 if (identity == OLD_SPACE || identity == MAP_SPACE) { |
| 3812 RememberedSet<OLD_TO_NEW>::ClearInvalidSlots(heap_, page); |
| 3813 } else { |
| 3814 RememberedSet<OLD_TO_NEW>::ClearInvalidTypedSlots(heap_, page); |
| 3815 } |
3835 max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode); | 3816 max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode); |
3836 } | 3817 } |
3837 | 3818 |
3838 // After finishing sweeping of a page we clean up its remembered set. | 3819 // After finishing sweeping of a page we clean up its remembered set. |
3839 if (page->typed_old_to_new_slots()) { | 3820 if (page->typed_old_to_new_slots()) { |
3840 page->typed_old_to_new_slots()->FreeToBeFreedChunks(); | 3821 page->typed_old_to_new_slots()->FreeToBeFreedChunks(); |
3841 } | 3822 } |
3842 if (page->old_to_new_slots()) { | 3823 if (page->old_to_new_slots()) { |
3843 page->old_to_new_slots()->FreeToBeFreedBuckets(); | 3824 page->old_to_new_slots()->FreeToBeFreedBuckets(); |
3844 } | 3825 } |
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4010 // The target is always in old space, we don't have to record the slot in | 3991 // The target is always in old space, we don't have to record the slot in |
4011 // the old-to-new remembered set. | 3992 // the old-to-new remembered set. |
4012 DCHECK(!heap()->InNewSpace(target)); | 3993 DCHECK(!heap()->InNewSpace(target)); |
4013 RecordRelocSlot(host, &rinfo, target); | 3994 RecordRelocSlot(host, &rinfo, target); |
4014 } | 3995 } |
4015 } | 3996 } |
4016 } | 3997 } |
4017 | 3998 |
4018 } // namespace internal | 3999 } // namespace internal |
4019 } // namespace v8 | 4000 } // namespace v8 |
OLD | NEW |