OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
193 } | 193 } |
194 } | 194 } |
195 } | 195 } |
196 | 196 |
197 | 197 |
198 static void VerifyEvacuation(NewSpace* space) { | 198 static void VerifyEvacuation(NewSpace* space) { |
199 VerifyEvacuationVisitor visitor; | 199 VerifyEvacuationVisitor visitor; |
200 NewSpacePageRange range(space->bottom(), space->top()); | 200 NewSpacePageRange range(space->bottom(), space->top()); |
201 for (auto it = range.begin(); it != range.end();) { | 201 for (auto it = range.begin(); it != range.end();) { |
202 Page* page = *(it++); | 202 Page* page = *(it++); |
203 if (page->IsFlagSet(Page::CANNOT_BE_VERIFIED)) continue; | |
203 Address current = page->area_start(); | 204 Address current = page->area_start(); |
204 Address limit = it != range.end() ? page->area_end() : space->top(); | 205 Address limit = it != range.end() ? page->area_end() : space->top(); |
205 CHECK(limit == space->top() || !page->Contains(space->top())); | 206 CHECK(limit == space->top() || !page->Contains(space->top())); |
206 while (current < limit) { | 207 while (current < limit) { |
207 HeapObject* object = HeapObject::FromAddress(current); | 208 HeapObject* object = HeapObject::FromAddress(current); |
208 object->Iterate(&visitor); | 209 object->Iterate(&visitor); |
209 current += object->Size(); | 210 current += object->Size(); |
210 } | 211 } |
211 } | 212 } |
212 } | 213 } |
213 | 214 |
214 | 215 |
215 static void VerifyEvacuation(Heap* heap, PagedSpace* space) { | 216 static void VerifyEvacuation(Heap* heap, PagedSpace* space) { |
216 if (FLAG_use_allocation_folding && (space == heap->old_space())) { | 217 if (FLAG_use_allocation_folding && (space == heap->old_space())) { |
217 return; | 218 return; |
218 } | 219 } |
219 for (Page* p : *space) { | 220 for (Page* p : *space) { |
220 if (p->IsEvacuationCandidate()) continue; | 221 if (p->IsEvacuationCandidate()) continue; |
221 VerifyEvacuation(p); | 222 VerifyEvacuation(p); |
222 } | 223 } |
223 } | 224 } |
224 | 225 |
225 | 226 template <MarkCompactMode mode> |
226 static void VerifyEvacuation(Heap* heap) { | 227 static void VerifyEvacuation(Heap* heap) { |
227 VerifyEvacuation(heap, heap->old_space()); | 228 if (mode == MarkCompactMode::FULL) { |
228 VerifyEvacuation(heap, heap->code_space()); | 229 VerifyEvacuation(heap, heap->old_space()); |
229 VerifyEvacuation(heap, heap->map_space()); | 230 VerifyEvacuation(heap, heap->code_space()); |
231 VerifyEvacuation(heap, heap->map_space()); | |
232 } | |
230 VerifyEvacuation(heap->new_space()); | 233 VerifyEvacuation(heap->new_space()); |
231 | 234 |
232 VerifyEvacuationVisitor visitor; | 235 VerifyEvacuationVisitor visitor; |
233 heap->IterateStrongRoots(&visitor, VISIT_ALL); | 236 heap->IterateStrongRoots(&visitor, VISIT_ALL); |
234 } | 237 } |
235 #endif // VERIFY_HEAP | 238 #endif // VERIFY_HEAP |
236 | 239 |
237 | 240 |
238 void MarkCompactCollector::SetUp() { | 241 void MarkCompactCollector::SetUp() { |
239 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0); | 242 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0); |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
289 if (FLAG_trace_fragmentation) { | 292 if (FLAG_trace_fragmentation) { |
290 TraceFragmentation(heap()->map_space()); | 293 TraceFragmentation(heap()->map_space()); |
291 } | 294 } |
292 | 295 |
293 compacting_ = evacuation_candidates_.length() > 0; | 296 compacting_ = evacuation_candidates_.length() > 0; |
294 } | 297 } |
295 | 298 |
296 return compacting_; | 299 return compacting_; |
297 } | 300 } |
298 | 301 |
302 void MarkCompactCollector::CollectGarbageInYoungGeneration() { | |
303 AlwaysAllocateScope scope(isolate()); | |
304 sweeper().EnsureNewSpaceCompleted(); | |
305 MarkLiveObjectsInYoungGeneration(); | |
306 isolate() | |
307 ->global_handles() | |
308 ->IterateNewSpaceWeakUnmodifiedRoots<GlobalHandles::HANDLE_PHANTOM_NODES>( | |
309 nullptr); | |
310 ClearNonLiveReferencesInYoungGeneration(); | |
311 EvacuateNewSpaceAndCandidates<MarkCompactMode::YOUNG_GENERATION>(); | |
Hannes Payer (out of office)
2016/11/23 11:25:37
How about restructuring EvacuateNewSpaceAndCandida
| |
312 } | |
313 | |
299 void MarkCompactCollector::CollectGarbage() { | 314 void MarkCompactCollector::CollectGarbage() { |
300 // Make sure that Prepare() has been called. The individual steps below will | 315 // Make sure that Prepare() has been called. The individual steps below will |
301 // update the state as they proceed. | 316 // update the state as they proceed. |
302 DCHECK(state_ == PREPARE_GC); | 317 DCHECK(state_ == PREPARE_GC); |
303 | 318 |
304 MarkLiveObjects(); | 319 MarkLiveObjects(); |
305 | 320 |
306 DCHECK(heap_->incremental_marking()->IsStopped()); | 321 DCHECK(heap_->incremental_marking()->IsStopped()); |
307 | 322 |
308 ClearNonLiveReferences(); | 323 ClearNonLiveReferences(); |
309 | 324 |
310 RecordObjectStats(); | 325 RecordObjectStats(); |
311 | 326 |
312 #ifdef VERIFY_HEAP | 327 #ifdef VERIFY_HEAP |
313 if (FLAG_verify_heap) { | 328 if (FLAG_verify_heap) { |
314 VerifyMarking(heap_); | 329 VerifyMarking(heap_); |
315 } | 330 } |
316 #endif | 331 #endif |
317 | 332 |
318 StartSweepSpaces(); | 333 StartSweepSpaces(); |
319 | 334 |
320 EvacuateNewSpaceAndCandidates(); | 335 EvacuateNewSpaceAndCandidates<MarkCompactMode::FULL>(); |
321 | 336 |
322 Finish(); | 337 Finish(); |
323 } | 338 } |
324 | 339 |
325 #ifdef VERIFY_HEAP | 340 #ifdef VERIFY_HEAP |
326 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { | 341 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { |
327 for (Page* p : *space) { | 342 for (Page* p : *space) { |
328 CHECK(p->markbits()->IsClean()); | 343 CHECK(p->markbits()->IsClean()); |
329 CHECK_EQ(0, p->LiveBytes()); | 344 CHECK_EQ(0, p->LiveBytes()); |
330 } | 345 } |
(...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
526 void MarkCompactCollector::EnsureSweepingCompleted() { | 541 void MarkCompactCollector::EnsureSweepingCompleted() { |
527 if (!sweeper().sweeping_in_progress()) return; | 542 if (!sweeper().sweeping_in_progress()) return; |
528 | 543 |
529 sweeper().EnsureCompleted(); | 544 sweeper().EnsureCompleted(); |
530 heap()->old_space()->RefillFreeList(); | 545 heap()->old_space()->RefillFreeList(); |
531 heap()->code_space()->RefillFreeList(); | 546 heap()->code_space()->RefillFreeList(); |
532 heap()->map_space()->RefillFreeList(); | 547 heap()->map_space()->RefillFreeList(); |
533 | 548 |
534 #ifdef VERIFY_HEAP | 549 #ifdef VERIFY_HEAP |
535 if (FLAG_verify_heap && !evacuation()) { | 550 if (FLAG_verify_heap && !evacuation()) { |
536 VerifyEvacuation(heap_); | 551 VerifyEvacuation<MarkCompactMode::FULL>(heap_); |
537 } | 552 } |
538 #endif | 553 #endif |
539 } | 554 } |
540 | 555 |
541 bool MarkCompactCollector::Sweeper::AreSweeperTasksRunning() { | 556 bool MarkCompactCollector::Sweeper::AreSweeperTasksRunning() { |
542 DCHECK(FLAG_concurrent_sweeping); | 557 DCHECK(FLAG_concurrent_sweeping); |
543 while (pending_sweeper_tasks_semaphore_.WaitFor( | 558 while (pending_sweeper_tasks_semaphore_.WaitFor( |
544 base::TimeDelta::FromSeconds(0))) { | 559 base::TimeDelta::FromSeconds(0))) { |
545 num_sweeping_tasks_.Increment(-1); | 560 num_sweeping_tasks_.Increment(-1); |
546 } | 561 } |
(...skipping 1944 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2491 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE_HARMONY); | 2506 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE_HARMONY); |
2492 ProcessEphemeralMarking(&root_visitor, true); | 2507 ProcessEphemeralMarking(&root_visitor, true); |
2493 if (heap_->UsingEmbedderHeapTracer()) { | 2508 if (heap_->UsingEmbedderHeapTracer()) { |
2494 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_EPILOGUE); | 2509 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_EPILOGUE); |
2495 heap()->embedder_heap_tracer()->TraceEpilogue(); | 2510 heap()->embedder_heap_tracer()->TraceEpilogue(); |
2496 } | 2511 } |
2497 } | 2512 } |
2498 } | 2513 } |
2499 } | 2514 } |
2500 | 2515 |
2516 void MarkCompactCollector::ClearNonLiveReferencesInYoungGeneration() { | |
2517 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR); | |
2518 | |
2519 { | |
2520 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_STRING_TABLE); | |
2521 | |
2522 ExternalStringTableCleaner external_visitor(heap(), nullptr); | |
2523 heap()->external_string_table_.IterateNewSpaceStrings(&external_visitor); | |
2524 heap()->external_string_table_.CleanupNewSpaceStrings(); | |
2525 } | |
2526 } | |
2501 | 2527 |
2502 void MarkCompactCollector::ClearNonLiveReferences() { | 2528 void MarkCompactCollector::ClearNonLiveReferences() { |
2503 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR); | 2529 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR); |
2504 | 2530 |
2505 { | 2531 { |
2506 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_STRING_TABLE); | 2532 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_STRING_TABLE); |
2507 | 2533 |
2508 // Prune the string table removing all strings only pointed to by the | 2534 // Prune the string table removing all strings only pointed to by the |
2509 // string table. Cannot use string_table() here because the string | 2535 // string table. Cannot use string_table() here because the string |
2510 // table is marked. | 2536 // table is marked. |
(...skipping 514 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3025 return String::cast(map_word.ToForwardingAddress()); | 3051 return String::cast(map_word.ToForwardingAddress()); |
3026 } | 3052 } |
3027 | 3053 |
3028 return String::cast(*p); | 3054 return String::cast(*p); |
3029 } | 3055 } |
3030 | 3056 |
3031 void MarkCompactCollector::EvacuateNewSpacePrologue() { | 3057 void MarkCompactCollector::EvacuateNewSpacePrologue() { |
3032 NewSpace* new_space = heap()->new_space(); | 3058 NewSpace* new_space = heap()->new_space(); |
3033 // Append the list of new space pages to be processed. | 3059 // Append the list of new space pages to be processed. |
3034 for (Page* p : NewSpacePageRange(new_space->bottom(), new_space->top())) { | 3060 for (Page* p : NewSpacePageRange(new_space->bottom(), new_space->top())) { |
3061 p->ClearFlag(Page::CANNOT_BE_VERIFIED); | |
3035 newspace_evacuation_candidates_.Add(p); | 3062 newspace_evacuation_candidates_.Add(p); |
3036 } | 3063 } |
3037 new_space->Flip(); | 3064 new_space->Flip(); |
3038 new_space->ResetAllocationInfo(); | 3065 new_space->ResetAllocationInfo(); |
3039 } | 3066 } |
3040 | 3067 |
3041 class MarkCompactCollector::Evacuator : public Malloced { | 3068 class MarkCompactCollector::Evacuator : public Malloced { |
3042 public: | 3069 public: |
3043 enum EvacuationMode { | 3070 enum EvacuationMode { |
3044 kObjectsNewToOld, | 3071 kObjectsNewToOld, |
(...skipping 230 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3275 // sweeper, which will happen after updating pointers. | 3302 // sweeper, which will happen after updating pointers. |
3276 *data += 1; | 3303 *data += 1; |
3277 } | 3304 } |
3278 break; | 3305 break; |
3279 default: | 3306 default: |
3280 UNREACHABLE(); | 3307 UNREACHABLE(); |
3281 } | 3308 } |
3282 } | 3309 } |
3283 }; | 3310 }; |
3284 | 3311 |
3312 template <MarkCompactMode mode> | |
3285 void MarkCompactCollector::EvacuatePagesInParallel() { | 3313 void MarkCompactCollector::EvacuatePagesInParallel() { |
3286 PageParallelJob<EvacuationJobTraits> job( | 3314 PageParallelJob<EvacuationJobTraits> job( |
3287 heap_, heap_->isolate()->cancelable_task_manager(), | 3315 heap_, heap_->isolate()->cancelable_task_manager(), |
3288 &page_parallel_job_semaphore_); | 3316 &page_parallel_job_semaphore_); |
3289 | 3317 |
3290 int abandoned_pages = 0; | 3318 int abandoned_pages = 0; |
3291 intptr_t live_bytes = 0; | 3319 intptr_t live_bytes = 0; |
3292 for (Page* page : evacuation_candidates_) { | 3320 if (mode == MarkCompactMode::FULL) { |
3293 live_bytes += page->LiveBytes(); | 3321 for (Page* page : evacuation_candidates_) { |
3294 job.AddPage(page, &abandoned_pages); | 3322 live_bytes += page->LiveBytes(); |
3323 job.AddPage(page, &abandoned_pages); | |
3324 } | |
3295 } | 3325 } |
3296 | 3326 |
3297 const bool reduce_memory = heap()->ShouldReduceMemory(); | 3327 const bool reduce_memory = heap()->ShouldReduceMemory(); |
3298 const Address age_mark = heap()->new_space()->age_mark(); | 3328 const Address age_mark = heap()->new_space()->age_mark(); |
3299 for (Page* page : newspace_evacuation_candidates_) { | 3329 for (Page* page : newspace_evacuation_candidates_) { |
3300 live_bytes += page->LiveBytes(); | 3330 live_bytes += page->LiveBytes(); |
3301 if (!reduce_memory && !page->NeverEvacuate() && | 3331 if (!reduce_memory && !page->NeverEvacuate() && |
3302 (page->LiveBytes() > Evacuator::PageEvacuationThreshold()) && | 3332 (page->LiveBytes() > Evacuator::PageEvacuationThreshold()) && |
3303 !page->Contains(age_mark)) { | 3333 !page->Contains(age_mark)) { |
3304 if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) { | 3334 if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) { |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3351 HeapObject* heap_object = HeapObject::cast(object); | 3381 HeapObject* heap_object = HeapObject::cast(object); |
3352 MapWord map_word = heap_object->map_word(); | 3382 MapWord map_word = heap_object->map_word(); |
3353 if (map_word.IsForwardingAddress()) { | 3383 if (map_word.IsForwardingAddress()) { |
3354 return map_word.ToForwardingAddress(); | 3384 return map_word.ToForwardingAddress(); |
3355 } | 3385 } |
3356 } | 3386 } |
3357 return object; | 3387 return object; |
3358 } | 3388 } |
3359 }; | 3389 }; |
3360 | 3390 |
3391 class MinorMCWeakObjectRetainer : public WeakObjectRetainer { | |
3392 public: | |
3393 explicit MinorMCWeakObjectRetainer(Heap* heap) : heap_(heap) {} | |
3394 | |
3395 virtual Object* RetainAs(Object* object) { | |
3396 if (!heap_->InFromSpace(object)) { | |
3397 return object; | |
3398 } | |
3399 | |
3400 MapWord map_word = HeapObject::cast(object)->map_word(); | |
3401 if (map_word.IsForwardingAddress()) { | |
3402 return map_word.ToForwardingAddress(); | |
3403 } | |
3404 return NULL; | |
3405 } | |
3406 | |
3407 private: | |
3408 Heap* heap_; | |
3409 }; | |
3410 | |
3361 MarkCompactCollector::Sweeper::ClearOldToNewSlotsMode | 3411 MarkCompactCollector::Sweeper::ClearOldToNewSlotsMode |
3362 MarkCompactCollector::Sweeper::GetClearOldToNewSlotsMode(Page* p) { | 3412 MarkCompactCollector::Sweeper::GetClearOldToNewSlotsMode(Page* p) { |
3363 AllocationSpace identity = p->owner()->identity(); | 3413 AllocationSpace identity = p->owner()->identity(); |
3364 if (p->old_to_new_slots() && | 3414 if (p->old_to_new_slots() && |
3365 (identity == OLD_SPACE || identity == MAP_SPACE)) { | 3415 (identity == OLD_SPACE || identity == MAP_SPACE)) { |
3366 return MarkCompactCollector::Sweeper::CLEAR_REGULAR_SLOTS; | 3416 return MarkCompactCollector::Sweeper::CLEAR_REGULAR_SLOTS; |
3367 } else if (p->typed_old_to_new_slots() && identity == CODE_SPACE) { | 3417 } else if (p->typed_old_to_new_slots() && identity == CODE_SPACE) { |
3368 return MarkCompactCollector::Sweeper::CLEAR_TYPED_SLOTS; | 3418 return MarkCompactCollector::Sweeper::CLEAR_TYPED_SLOTS; |
3369 } | 3419 } |
3370 return MarkCompactCollector::Sweeper::DO_NOT_CLEAR; | 3420 return MarkCompactCollector::Sweeper::DO_NOT_CLEAR; |
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3574 } | 3624 } |
3575 page->SetLiveBytes(new_live_size); | 3625 page->SetLiveBytes(new_live_size); |
3576 } | 3626 } |
3577 | 3627 |
3578 void MarkCompactCollector::Sweeper::AddSweptPageSafe(PagedSpace* space, | 3628 void MarkCompactCollector::Sweeper::AddSweptPageSafe(PagedSpace* space, |
3579 Page* page) { | 3629 Page* page) { |
3580 base::LockGuard<base::Mutex> guard(&mutex_); | 3630 base::LockGuard<base::Mutex> guard(&mutex_); |
3581 swept_list_[space->identity()].Add(page); | 3631 swept_list_[space->identity()].Add(page); |
3582 } | 3632 } |
3583 | 3633 |
3634 template <MarkCompactMode mode> | |
3584 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { | 3635 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
3585 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE); | 3636 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE); |
3586 Heap::RelocationLock relocation_lock(heap()); | 3637 Heap::RelocationLock relocation_lock(heap()); |
3587 | 3638 |
3588 { | 3639 { |
3589 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_COPY); | 3640 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_COPY); |
3590 EvacuationScope evacuation_scope(this); | 3641 EvacuationScope evacuation_scope(this); |
3591 | 3642 |
3592 EvacuateNewSpacePrologue(); | 3643 EvacuateNewSpacePrologue(); |
3593 EvacuatePagesInParallel(); | 3644 EvacuatePagesInParallel<mode>(); |
3594 heap()->new_space()->set_age_mark(heap()->new_space()->top()); | 3645 heap()->new_space()->set_age_mark(heap()->new_space()->top()); |
3595 } | 3646 } |
3596 | 3647 |
3597 UpdatePointersAfterEvacuation(); | 3648 UpdatePointersAfterEvacuation<mode>(); |
3598 | 3649 |
3599 if (!heap()->new_space()->Rebalance()) { | 3650 { |
3600 FatalProcessOutOfMemory("NewSpace::Rebalance"); | 3651 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_REBALANCE); |
3652 if (!heap()->new_space()->Rebalance()) { | |
3653 FatalProcessOutOfMemory("NewSpace::Rebalance"); | |
3654 } | |
3601 } | 3655 } |
3602 | 3656 |
3603 // Give pages that are queued to be freed back to the OS. Note that filtering | 3657 // Give pages that are queued to be freed back to the OS. Note that filtering |
3604 // slots only handles old space (for unboxed doubles), and thus map space can | 3658 // slots only handles old space (for unboxed doubles), and thus map space can |
3605 // still contain stale pointers. We only free the chunks after pointer updates | 3659 // still contain stale pointers. We only free the chunks after pointer updates |
3606 // to still have access to page headers. | 3660 // to still have access to page headers. |
3607 heap()->memory_allocator()->unmapper()->FreeQueuedChunks(); | 3661 heap()->memory_allocator()->unmapper()->FreeQueuedChunks(); |
3608 | 3662 |
3609 { | 3663 { |
3610 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_CLEAN_UP); | 3664 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_CLEAN_UP); |
3611 | 3665 |
3612 for (Page* p : newspace_evacuation_candidates_) { | 3666 for (Page* p : newspace_evacuation_candidates_) { |
3613 if (p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION)) { | 3667 if (p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION)) { |
3614 p->ClearFlag(Page::PAGE_NEW_NEW_PROMOTION); | 3668 p->ClearFlag(Page::PAGE_NEW_NEW_PROMOTION); |
3615 sweeper().AddPage(p->owner()->identity(), p); | 3669 p->SetFlag(Page::CANNOT_BE_VERIFIED); |
3670 if (mode == MarkCompactMode::FULL) | |
3671 sweeper().AddPage(p->owner()->identity(), p); | |
3616 } else if (p->IsFlagSet(Page::PAGE_NEW_OLD_PROMOTION)) { | 3672 } else if (p->IsFlagSet(Page::PAGE_NEW_OLD_PROMOTION)) { |
3617 p->ClearFlag(Page::PAGE_NEW_OLD_PROMOTION); | 3673 p->ClearFlag(Page::PAGE_NEW_OLD_PROMOTION); |
3674 p->SetFlag(Page::CANNOT_BE_VERIFIED); | |
3618 p->ForAllFreeListCategories( | 3675 p->ForAllFreeListCategories( |
3619 [](FreeListCategory* category) { DCHECK(!category->is_linked()); }); | 3676 [](FreeListCategory* category) { DCHECK(!category->is_linked()); }); |
3620 sweeper().AddPage(p->owner()->identity(), p); | 3677 if (mode == MarkCompactMode::FULL) |
3678 sweeper().AddPage(p->owner()->identity(), p); | |
3679 } | |
3680 if (mode == MarkCompactMode::YOUNG_GENERATION) { | |
3681 p->ClearLiveness(); | |
3621 } | 3682 } |
3622 } | 3683 } |
3623 newspace_evacuation_candidates_.Rewind(0); | 3684 newspace_evacuation_candidates_.Rewind(0); |
3624 | 3685 |
3625 for (Page* p : evacuation_candidates_) { | 3686 if (mode == MarkCompactMode::FULL) { |
3626 // Important: skip list should be cleared only after roots were updated | 3687 for (Page* p : evacuation_candidates_) { |
3627 // because root iteration traverses the stack and might have to find | 3688 // Important: skip list should be cleared only after roots were updated |
3628 // code objects from non-updated pc pointing into evacuation candidate. | 3689 // because root iteration traverses the stack and might have to find |
3629 SkipList* list = p->skip_list(); | 3690 // code objects from non-updated pc pointing into evacuation candidate. |
3630 if (list != NULL) list->Clear(); | 3691 SkipList* list = p->skip_list(); |
3631 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) { | 3692 if (list != NULL) list->Clear(); |
3632 sweeper().AddPage(p->owner()->identity(), p); | 3693 if (p->IsFlagSet(Page::COMPACTION_WAS_ABORTED)) { |
3633 p->ClearFlag(Page::COMPACTION_WAS_ABORTED); | 3694 sweeper().AddPage(p->owner()->identity(), p); |
3695 p->ClearFlag(Page::COMPACTION_WAS_ABORTED); | |
3696 } | |
3634 } | 3697 } |
3698 | |
3699 // Deallocate evacuated candidate pages. | |
3700 ReleaseEvacuationCandidates(); | |
3635 } | 3701 } |
3636 | |
3637 // Deallocate evacuated candidate pages. | |
3638 ReleaseEvacuationCandidates(); | |
3639 } | 3702 } |
3640 | 3703 |
3641 #ifdef VERIFY_HEAP | 3704 #ifdef VERIFY_HEAP |
3642 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) { | 3705 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) { |
3643 VerifyEvacuation(heap()); | 3706 VerifyEvacuation<mode>(heap()); |
3644 } | 3707 } |
3645 #endif | 3708 #endif |
3646 } | 3709 } |
3647 | 3710 |
3648 template <PointerDirection direction> | 3711 template <PointerDirection direction> |
3649 class PointerUpdateJobTraits { | 3712 class PointerUpdateJobTraits { |
3650 public: | 3713 public: |
3651 typedef int PerPageData; // Per page data is not used in this job. | 3714 typedef int PerPageData; // Per page data is not used in this job. |
3652 typedef int PerTaskData; // Per task data is not used in this job. | 3715 typedef int PerTaskData; // Per task data is not used in this job. |
3653 | 3716 |
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3821 Address start = | 3884 Address start = |
3822 page->Contains(space_start) ? space_start : page->area_start(); | 3885 page->Contains(space_start) ? space_start : page->area_start(); |
3823 Address end = page->Contains(space_end) ? space_end : page->area_end(); | 3886 Address end = page->Contains(space_end) ? space_end : page->area_end(); |
3824 job.AddPage(page, std::make_pair(start, end)); | 3887 job.AddPage(page, std::make_pair(start, end)); |
3825 } | 3888 } |
3826 PointersUpdatingVisitor visitor; | 3889 PointersUpdatingVisitor visitor; |
3827 int num_tasks = FLAG_parallel_pointer_update ? job.NumberOfPages() : 1; | 3890 int num_tasks = FLAG_parallel_pointer_update ? job.NumberOfPages() : 1; |
3828 job.Run(num_tasks, [&visitor](int i) { return &visitor; }); | 3891 job.Run(num_tasks, [&visitor](int i) { return &visitor; }); |
3829 } | 3892 } |
3830 | 3893 |
3894 template <MarkCompactMode mode> | |
3831 void MarkCompactCollector::UpdatePointersAfterEvacuation() { | 3895 void MarkCompactCollector::UpdatePointersAfterEvacuation() { |
3832 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS); | 3896 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS); |
3833 | 3897 |
3834 PointersUpdatingVisitor updating_visitor; | 3898 PointersUpdatingVisitor updating_visitor; |
3835 | 3899 |
3836 { | 3900 { |
3837 TRACE_GC(heap()->tracer(), | 3901 TRACE_GC(heap()->tracer(), |
3838 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW); | 3902 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW); |
3839 UpdateToSpacePointersInParallel(heap_, &page_parallel_job_semaphore_); | 3903 UpdateToSpacePointersInParallel(heap_, &page_parallel_job_semaphore_); |
3840 // Update roots. | 3904 // Update roots. |
3841 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); | 3905 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); |
3842 UpdatePointersInParallel<OLD_TO_NEW>(heap_, &page_parallel_job_semaphore_); | 3906 UpdatePointersInParallel<OLD_TO_NEW>(heap_, &page_parallel_job_semaphore_); |
3843 } | 3907 } |
3844 | 3908 |
3845 { | 3909 if (mode == MarkCompactMode::FULL) { |
3846 Heap* heap = this->heap(); | 3910 Heap* heap = this->heap(); |
3847 TRACE_GC(heap->tracer(), | 3911 TRACE_GC(heap->tracer(), |
3848 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED); | 3912 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED); |
3849 UpdatePointersInParallel<OLD_TO_OLD>(heap_, &page_parallel_job_semaphore_); | 3913 UpdatePointersInParallel<OLD_TO_OLD>(heap_, &page_parallel_job_semaphore_); |
3850 } | 3914 } |
3851 | 3915 |
3852 { | 3916 { |
3853 TRACE_GC(heap()->tracer(), | 3917 TRACE_GC(heap()->tracer(), |
3854 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK); | 3918 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK); |
3855 // Update pointers from external string table. | 3919 // Update pointers from external string table. |
3856 heap_->UpdateReferencesInExternalStringTable( | 3920 if (mode == MarkCompactMode::FULL) { |
3857 &UpdateReferenceInExternalStringTableEntry); | 3921 heap_->UpdateReferencesInExternalStringTable( |
3858 | 3922 &UpdateReferenceInExternalStringTableEntry); |
3859 EvacuationWeakObjectRetainer evacuation_object_retainer; | 3923 EvacuationWeakObjectRetainer evacuation_object_retainer; |
3860 heap()->ProcessWeakListRoots(&evacuation_object_retainer); | 3924 heap()->ProcessWeakListRoots(&evacuation_object_retainer); |
3925 } else { | |
3926 heap_->UpdateNewSpaceReferencesInExternalStringTable( | |
3927 &UpdateReferenceInExternalStringTableEntry); | |
3928 MinorMCWeakObjectRetainer weak_object_retainer(heap()); | |
3929 heap()->ProcessYoungWeakReferences(&weak_object_retainer); | |
3930 } | |
3861 } | 3931 } |
3862 } | 3932 } |
3863 | 3933 |
3864 | 3934 |
3865 void MarkCompactCollector::ReleaseEvacuationCandidates() { | 3935 void MarkCompactCollector::ReleaseEvacuationCandidates() { |
3866 for (Page* p : evacuation_candidates_) { | 3936 for (Page* p : evacuation_candidates_) { |
3867 if (!p->IsEvacuationCandidate()) continue; | 3937 if (!p->IsEvacuationCandidate()) continue; |
3868 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 3938 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); |
3869 p->ResetLiveBytes(); | 3939 p->ResetLiveBytes(); |
3870 CHECK(p->SweepingDone()); | 3940 CHECK(p->SweepingDone()); |
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4076 // The target is always in old space, we don't have to record the slot in | 4146 // The target is always in old space, we don't have to record the slot in |
4077 // the old-to-new remembered set. | 4147 // the old-to-new remembered set. |
4078 DCHECK(!heap()->InNewSpace(target)); | 4148 DCHECK(!heap()->InNewSpace(target)); |
4079 RecordRelocSlot(host, &rinfo, target); | 4149 RecordRelocSlot(host, &rinfo, target); |
4080 } | 4150 } |
4081 } | 4151 } |
4082 } | 4152 } |
4083 | 4153 |
4084 } // namespace internal | 4154 } // namespace internal |
4085 } // namespace v8 | 4155 } // namespace v8 |
OLD | NEW |