OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
219 VerifyEvacuation(heap->new_space()); | 219 VerifyEvacuation(heap->new_space()); |
220 | 220 |
221 VerifyEvacuationVisitor visitor; | 221 VerifyEvacuationVisitor visitor; |
222 heap->IterateStrongRoots(&visitor, VISIT_ALL); | 222 heap->IterateStrongRoots(&visitor, VISIT_ALL); |
223 } | 223 } |
224 #endif // VERIFY_HEAP | 224 #endif // VERIFY_HEAP |
225 | 225 |
226 | 226 |
227 void MarkCompactCollector::SetUp() { | 227 void MarkCompactCollector::SetUp() { |
228 free_list_old_space_.Reset(new FreeList(heap_->old_space())); | 228 free_list_old_space_.Reset(new FreeList(heap_->old_space())); |
| 229 free_list_code_space_.Reset(new FreeList(heap_->code_space())); |
229 EnsureMarkingDequeIsReserved(); | 230 EnsureMarkingDequeIsReserved(); |
230 EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize); | 231 EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize); |
231 } | 232 } |
232 | 233 |
233 | 234 |
234 void MarkCompactCollector::TearDown() { | 235 void MarkCompactCollector::TearDown() { |
235 AbortCompaction(); | 236 AbortCompaction(); |
236 delete marking_deque_memory_; | 237 delete marking_deque_memory_; |
237 } | 238 } |
238 | 239 |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
359 ClearInvalidStoreAndSlotsBufferEntries(); | 360 ClearInvalidStoreAndSlotsBufferEntries(); |
360 | 361 |
361 #ifdef VERIFY_HEAP | 362 #ifdef VERIFY_HEAP |
362 if (FLAG_verify_heap) { | 363 if (FLAG_verify_heap) { |
363 VerifyValidStoreAndSlotsBufferEntries(heap_); | 364 VerifyValidStoreAndSlotsBufferEntries(heap_); |
364 } | 365 } |
365 #endif | 366 #endif |
366 | 367 |
367 SweepSpaces(); | 368 SweepSpaces(); |
368 | 369 |
369 #ifdef VERIFY_HEAP | |
370 VerifyWeakEmbeddedObjectsInCode(); | |
371 if (FLAG_omit_map_checks_for_leaf_maps) { | |
372 VerifyOmittedMapChecks(); | |
373 } | |
374 #endif | |
375 | |
376 Finish(); | 370 Finish(); |
377 | 371 |
378 if (marking_parity_ == EVEN_MARKING_PARITY) { | 372 if (marking_parity_ == EVEN_MARKING_PARITY) { |
379 marking_parity_ = ODD_MARKING_PARITY; | 373 marking_parity_ = ODD_MARKING_PARITY; |
380 } else { | 374 } else { |
381 DCHECK(marking_parity_ == ODD_MARKING_PARITY); | 375 DCHECK(marking_parity_ == ODD_MARKING_PARITY); |
382 marking_parity_ = EVEN_MARKING_PARITY; | 376 marking_parity_ = EVEN_MARKING_PARITY; |
383 } | 377 } |
384 } | 378 } |
385 | 379 |
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
492 | 486 |
493 Heap* heap_; | 487 Heap* heap_; |
494 PagedSpace* space_; | 488 PagedSpace* space_; |
495 | 489 |
496 DISALLOW_COPY_AND_ASSIGN(SweeperTask); | 490 DISALLOW_COPY_AND_ASSIGN(SweeperTask); |
497 }; | 491 }; |
498 | 492 |
499 | 493 |
500 void MarkCompactCollector::StartSweeperThreads() { | 494 void MarkCompactCollector::StartSweeperThreads() { |
501 DCHECK(free_list_old_space_.get()->IsEmpty()); | 495 DCHECK(free_list_old_space_.get()->IsEmpty()); |
| 496 DCHECK(free_list_code_space_.get()->IsEmpty()); |
502 V8::GetCurrentPlatform()->CallOnBackgroundThread( | 497 V8::GetCurrentPlatform()->CallOnBackgroundThread( |
503 new SweeperTask(heap(), heap()->old_space()), | 498 new SweeperTask(heap(), heap()->old_space()), |
504 v8::Platform::kShortRunningTask); | 499 v8::Platform::kShortRunningTask); |
| 500 V8::GetCurrentPlatform()->CallOnBackgroundThread( |
| 501 new SweeperTask(heap(), heap()->code_space()), |
| 502 v8::Platform::kShortRunningTask); |
505 } | 503 } |
506 | 504 |
507 | 505 |
508 void MarkCompactCollector::EnsureSweepingCompleted() { | 506 void MarkCompactCollector::EnsureSweepingCompleted() { |
509 DCHECK(sweeping_in_progress_ == true); | 507 DCHECK(sweeping_in_progress_ == true); |
510 | 508 |
511 // If sweeping is not completed or not running at all, we try to complete it | 509 // If sweeping is not completed or not running at all, we try to complete it |
512 // here. | 510 // here. |
513 if (!heap()->concurrent_sweeping_enabled() || !IsSweepingCompleted()) { | 511 if (!heap()->concurrent_sweeping_enabled() || !IsSweepingCompleted()) { |
514 SweepInParallel(heap()->paged_space(OLD_SPACE), 0); | 512 SweepInParallel(heap()->paged_space(OLD_SPACE), 0); |
| 513 SweepInParallel(heap()->paged_space(CODE_SPACE), 0); |
515 } | 514 } |
516 // Wait twice for both jobs. | 515 // Wait twice for both jobs. |
517 if (heap()->concurrent_sweeping_enabled()) { | 516 if (heap()->concurrent_sweeping_enabled()) { |
518 pending_sweeper_jobs_semaphore_.Wait(); | 517 pending_sweeper_jobs_semaphore_.Wait(); |
| 518 pending_sweeper_jobs_semaphore_.Wait(); |
519 } | 519 } |
520 ParallelSweepSpacesComplete(); | 520 ParallelSweepSpacesComplete(); |
521 sweeping_in_progress_ = false; | 521 sweeping_in_progress_ = false; |
522 RefillFreeList(heap()->paged_space(OLD_SPACE)); | 522 RefillFreeList(heap()->paged_space(OLD_SPACE)); |
| 523 RefillFreeList(heap()->paged_space(CODE_SPACE)); |
523 heap()->paged_space(OLD_SPACE)->ResetUnsweptFreeBytes(); | 524 heap()->paged_space(OLD_SPACE)->ResetUnsweptFreeBytes(); |
| 525 heap()->paged_space(CODE_SPACE)->ResetUnsweptFreeBytes(); |
524 | 526 |
525 #ifdef VERIFY_HEAP | 527 #ifdef VERIFY_HEAP |
526 if (FLAG_verify_heap && !evacuation()) { | 528 if (FLAG_verify_heap && !evacuation()) { |
527 VerifyEvacuation(heap_); | 529 VerifyEvacuation(heap_); |
528 } | 530 } |
529 #endif | 531 #endif |
530 } | 532 } |
531 | 533 |
532 | 534 |
533 bool MarkCompactCollector::IsSweepingCompleted() { | 535 bool MarkCompactCollector::IsSweepingCompleted() { |
534 if (!pending_sweeper_jobs_semaphore_.WaitFor( | 536 if (!pending_sweeper_jobs_semaphore_.WaitFor( |
535 base::TimeDelta::FromSeconds(0))) { | 537 base::TimeDelta::FromSeconds(0))) { |
536 return false; | 538 return false; |
537 } | 539 } |
538 pending_sweeper_jobs_semaphore_.Signal(); | 540 pending_sweeper_jobs_semaphore_.Signal(); |
539 return true; | 541 return true; |
540 } | 542 } |
541 | 543 |
542 | 544 |
543 void MarkCompactCollector::RefillFreeList(PagedSpace* space) { | 545 void MarkCompactCollector::RefillFreeList(PagedSpace* space) { |
544 FreeList* free_list; | 546 FreeList* free_list; |
545 | 547 |
546 if (space == heap()->old_space()) { | 548 if (space == heap()->old_space()) { |
547 free_list = free_list_old_space_.get(); | 549 free_list = free_list_old_space_.get(); |
| 550 } else if (space == heap()->code_space()) { |
| 551 free_list = free_list_code_space_.get(); |
548 } else { | 552 } else { |
549 // Any PagedSpace might invoke RefillFreeLists, so we need to make sure | 553 // Any PagedSpace might invoke RefillFreeLists, so we need to make sure |
550 // to only refill them for the old space. | 554 // to only refill them for the old space. |
551 return; | 555 return; |
552 } | 556 } |
553 | 557 |
554 intptr_t freed_bytes = space->free_list()->Concatenate(free_list); | 558 intptr_t freed_bytes = space->free_list()->Concatenate(free_list); |
555 space->AddToAccountingStats(freed_bytes); | 559 space->AddToAccountingStats(freed_bytes); |
556 space->DecrementUnsweptFreeBytes(freed_bytes); | 560 space->DecrementUnsweptFreeBytes(freed_bytes); |
557 } | 561 } |
(...skipping 2932 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3490 DCHECK_EQ(skip_list_mode == REBUILD_SKIP_LIST, | 3494 DCHECK_EQ(skip_list_mode == REBUILD_SKIP_LIST, |
3491 space->identity() == CODE_SPACE); | 3495 space->identity() == CODE_SPACE); |
3492 DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST)); | 3496 DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST)); |
3493 DCHECK(parallelism == MarkCompactCollector::SWEEP_ON_MAIN_THREAD || | 3497 DCHECK(parallelism == MarkCompactCollector::SWEEP_ON_MAIN_THREAD || |
3494 sweeping_mode == SWEEP_ONLY); | 3498 sweeping_mode == SWEEP_ONLY); |
3495 | 3499 |
3496 Address free_start = p->area_start(); | 3500 Address free_start = p->area_start(); |
3497 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0); | 3501 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0); |
3498 int offsets[16]; | 3502 int offsets[16]; |
3499 | 3503 |
| 3504 // If we use the skip list for code space pages, we have to lock the skip |
| 3505 // list because it could be accessed concurrently by the runtime or the |
| 3506 // deoptimizer. |
| 3507 bool skip_list_locked = false; |
3500 SkipList* skip_list = p->skip_list(); | 3508 SkipList* skip_list = p->skip_list(); |
3501 int curr_region = -1; | |
3502 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) { | 3509 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) { |
| 3510 skip_list->Lock(); |
| 3511 skip_list_locked = true; |
3503 skip_list->Clear(); | 3512 skip_list->Clear(); |
3504 } | 3513 } |
3505 | 3514 |
3506 intptr_t freed_bytes = 0; | 3515 intptr_t freed_bytes = 0; |
3507 intptr_t max_freed_bytes = 0; | 3516 intptr_t max_freed_bytes = 0; |
| 3517 int curr_region = -1; |
3508 | 3518 |
3509 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) { | 3519 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) { |
3510 Address cell_base = it.CurrentCellBase(); | 3520 Address cell_base = it.CurrentCellBase(); |
3511 MarkBit::CellType* cell = it.CurrentCell(); | 3521 MarkBit::CellType* cell = it.CurrentCell(); |
3512 int live_objects = MarkWordToObjectStarts(*cell, offsets); | 3522 int live_objects = MarkWordToObjectStarts(*cell, offsets); |
3513 int live_index = 0; | 3523 int live_index = 0; |
3514 for (; live_objects != 0; live_objects--) { | 3524 for (; live_objects != 0; live_objects--) { |
3515 Address free_end = cell_base + offsets[live_index++] * kPointerSize; | 3525 Address free_end = cell_base + offsets[live_index++] * kPointerSize; |
3516 if (free_end != free_start) { | 3526 if (free_end != free_start) { |
3517 int size = static_cast<int>(free_end - free_start); | 3527 int size = static_cast<int>(free_end - free_start); |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3552 } | 3562 } |
3553 p->ResetLiveBytes(); | 3563 p->ResetLiveBytes(); |
3554 | 3564 |
3555 if (parallelism == MarkCompactCollector::SWEEP_IN_PARALLEL) { | 3565 if (parallelism == MarkCompactCollector::SWEEP_IN_PARALLEL) { |
3556 // When concurrent sweeping is active, the page will be marked after | 3566 // When concurrent sweeping is active, the page will be marked after |
3557 // sweeping by the main thread. | 3567 // sweeping by the main thread. |
3558 p->set_parallel_sweeping(MemoryChunk::SWEEPING_FINALIZE); | 3568 p->set_parallel_sweeping(MemoryChunk::SWEEPING_FINALIZE); |
3559 } else { | 3569 } else { |
3560 p->SetWasSwept(); | 3570 p->SetWasSwept(); |
3561 } | 3571 } |
| 3572 if (skip_list_locked) { |
| 3573 DCHECK(skip_list && skip_list_mode == REBUILD_SKIP_LIST); |
| 3574 skip_list->Unlock(); |
| 3575 } |
3562 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); | 3576 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); |
3563 } | 3577 } |
3564 | 3578 |
3565 | 3579 |
3566 // Return true if the given code is deoptimized or will be deoptimized. | 3580 // Return true if the given code is deoptimized or will be deoptimized. |
3567 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { | 3581 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { |
3568 return code->is_optimized_code() && code->marked_for_deoptimization(); | 3582 return code->is_optimized_code() && code->marked_for_deoptimization(); |
3569 } | 3583 } |
3570 | 3584 |
3571 | 3585 |
(...skipping 589 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4161 max_freed_overall = Max(max_freed, max_freed_overall); | 4175 max_freed_overall = Max(max_freed, max_freed_overall); |
4162 if (p == space->end_of_unswept_pages()) break; | 4176 if (p == space->end_of_unswept_pages()) break; |
4163 } | 4177 } |
4164 return max_freed_overall; | 4178 return max_freed_overall; |
4165 } | 4179 } |
4166 | 4180 |
4167 | 4181 |
4168 int MarkCompactCollector::SweepInParallel(Page* page, PagedSpace* space) { | 4182 int MarkCompactCollector::SweepInParallel(Page* page, PagedSpace* space) { |
4169 int max_freed = 0; | 4183 int max_freed = 0; |
4170 if (page->TryParallelSweeping()) { | 4184 if (page->TryParallelSweeping()) { |
4171 FreeList* free_list = free_list_old_space_.get(); | 4185 FreeList* free_list; |
4172 FreeList private_free_list(space); | 4186 FreeList private_free_list(space); |
4173 max_freed = Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST, | 4187 if (space->identity() == CODE_SPACE) { |
4174 IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL); | 4188 free_list = free_list_code_space_.get(); |
| 4189 max_freed = |
| 4190 Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, REBUILD_SKIP_LIST, |
| 4191 IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL); |
| 4192 } else { |
| 4193 free_list = free_list_old_space_.get(); |
| 4194 max_freed = |
| 4195 Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST, |
| 4196 IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL); |
| 4197 } |
4175 free_list->Concatenate(&private_free_list); | 4198 free_list->Concatenate(&private_free_list); |
4176 } | 4199 } |
4177 return max_freed; | 4200 return max_freed; |
4178 } | 4201 } |
4179 | 4202 |
4180 | 4203 |
4181 void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { | 4204 void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { |
4182 space->ClearStats(); | 4205 space->ClearStats(); |
4183 | 4206 |
4184 // We defensively initialize end_of_unswept_pages_ here with the first page | 4207 // We defensively initialize end_of_unswept_pages_ here with the first page |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4221 unused_page_present = true; | 4244 unused_page_present = true; |
4222 } | 4245 } |
4223 | 4246 |
4224 switch (sweeper) { | 4247 switch (sweeper) { |
4225 case CONCURRENT_SWEEPING: | 4248 case CONCURRENT_SWEEPING: |
4226 if (!parallel_sweeping_active) { | 4249 if (!parallel_sweeping_active) { |
4227 if (FLAG_gc_verbose) { | 4250 if (FLAG_gc_verbose) { |
4228 PrintF("Sweeping 0x%" V8PRIxPTR ".\n", | 4251 PrintF("Sweeping 0x%" V8PRIxPTR ".\n", |
4229 reinterpret_cast<intptr_t>(p)); | 4252 reinterpret_cast<intptr_t>(p)); |
4230 } | 4253 } |
4231 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST, | 4254 if (space->identity() == CODE_SPACE) { |
4232 IGNORE_FREE_SPACE>(space, NULL, p, NULL); | 4255 if (FLAG_zap_code_space) { |
| 4256 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST, |
| 4257 ZAP_FREE_SPACE>(space, NULL, p, NULL); |
| 4258 } else { |
| 4259 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST, |
| 4260 IGNORE_FREE_SPACE>(space, NULL, p, NULL); |
| 4261 } |
| 4262 } else { |
| 4263 DCHECK(space->identity() == OLD_SPACE); |
| 4264 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST, |
| 4265 IGNORE_FREE_SPACE>(space, NULL, p, NULL); |
| 4266 } |
4233 pages_swept++; | 4267 pages_swept++; |
4234 parallel_sweeping_active = true; | 4268 parallel_sweeping_active = true; |
4235 } else { | 4269 } else { |
4236 if (FLAG_gc_verbose) { | 4270 if (FLAG_gc_verbose) { |
4237 PrintF("Sweeping 0x%" V8PRIxPTR " in parallel.\n", | 4271 PrintF("Sweeping 0x%" V8PRIxPTR " in parallel.\n", |
4238 reinterpret_cast<intptr_t>(p)); | 4272 reinterpret_cast<intptr_t>(p)); |
4239 } | 4273 } |
4240 p->set_parallel_sweeping(MemoryChunk::SWEEPING_PENDING); | 4274 p->set_parallel_sweeping(MemoryChunk::SWEEPING_PENDING); |
4241 space->IncreaseUnsweptFreeBytes(p); | 4275 space->IncreaseUnsweptFreeBytes(p); |
4242 } | 4276 } |
4243 space->set_end_of_unswept_pages(p); | 4277 space->set_end_of_unswept_pages(p); |
4244 break; | 4278 break; |
4245 case SEQUENTIAL_SWEEPING: { | 4279 case SEQUENTIAL_SWEEPING: { |
4246 if (FLAG_gc_verbose) { | 4280 if (FLAG_gc_verbose) { |
4247 PrintF("Sweeping 0x%" V8PRIxPTR ".\n", reinterpret_cast<intptr_t>(p)); | 4281 PrintF("Sweeping 0x%" V8PRIxPTR ".\n", reinterpret_cast<intptr_t>(p)); |
4248 } | 4282 } |
4249 if (space->identity() == CODE_SPACE && FLAG_zap_code_space) { | 4283 if (space->identity() == CODE_SPACE) { |
4250 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST, | 4284 if (FLAG_zap_code_space) { |
4251 ZAP_FREE_SPACE>(space, NULL, p, NULL); | 4285 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST, |
4252 } else if (space->identity() == CODE_SPACE) { | 4286 ZAP_FREE_SPACE>(space, NULL, p, NULL); |
4253 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST, | 4287 } else { |
4254 IGNORE_FREE_SPACE>(space, NULL, p, NULL); | 4288 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST, |
| 4289 IGNORE_FREE_SPACE>(space, NULL, p, NULL); |
| 4290 } |
4255 } else { | 4291 } else { |
| 4292 DCHECK(space->identity() == OLD_SPACE || |
| 4293 space->identity() == MAP_SPACE); |
4256 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST, | 4294 Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST, |
4257 IGNORE_FREE_SPACE>(space, NULL, p, NULL); | 4295 IGNORE_FREE_SPACE>(space, NULL, p, NULL); |
4258 } | 4296 } |
4259 pages_swept++; | 4297 pages_swept++; |
4260 break; | 4298 break; |
4261 } | 4299 } |
4262 default: { UNREACHABLE(); } | 4300 default: { UNREACHABLE(); } |
4263 } | 4301 } |
4264 } | 4302 } |
4265 | 4303 |
(...skipping 19 matching lines...) Expand all Loading... |
4285 #endif | 4323 #endif |
4286 | 4324 |
4287 MoveEvacuationCandidatesToEndOfPagesList(); | 4325 MoveEvacuationCandidatesToEndOfPagesList(); |
4288 | 4326 |
4289 // Noncompacting collections simply sweep the spaces to clear the mark | 4327 // Noncompacting collections simply sweep the spaces to clear the mark |
4290 // bits and free the nonlive blocks (for old and map spaces). We sweep | 4328 // bits and free the nonlive blocks (for old and map spaces). We sweep |
4291 // the map space last because freeing non-live maps overwrites them and | 4329 // the map space last because freeing non-live maps overwrites them and |
4292 // the other spaces rely on possibly non-live maps to get the sizes for | 4330 // the other spaces rely on possibly non-live maps to get the sizes for |
4293 // non-live objects. | 4331 // non-live objects. |
4294 { | 4332 { |
4295 GCTracer::Scope sweep_scope(heap()->tracer(), | 4333 { |
4296 GCTracer::Scope::MC_SWEEP_OLDSPACE); | 4334 GCTracer::Scope sweep_scope(heap()->tracer(), |
4297 { SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); } | 4335 GCTracer::Scope::MC_SWEEP_OLDSPACE); |
| 4336 SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); |
| 4337 } |
| 4338 { |
| 4339 GCTracer::Scope sweep_scope(heap()->tracer(), |
| 4340 GCTracer::Scope::MC_SWEEP_CODE); |
| 4341 SweepSpace(heap()->code_space(), CONCURRENT_SWEEPING); |
| 4342 } |
| 4343 |
4298 sweeping_in_progress_ = true; | 4344 sweeping_in_progress_ = true; |
4299 if (heap()->concurrent_sweeping_enabled()) { | 4345 if (heap()->concurrent_sweeping_enabled()) { |
4300 StartSweeperThreads(); | 4346 StartSweeperThreads(); |
4301 } | 4347 } |
4302 } | 4348 } |
4303 { | |
4304 GCTracer::Scope sweep_scope(heap()->tracer(), | |
4305 GCTracer::Scope::MC_SWEEP_CODE); | |
4306 SweepSpace(heap()->code_space(), SEQUENTIAL_SWEEPING); | |
4307 } | |
4308 | 4349 |
4309 EvacuateNewSpaceAndCandidates(); | 4350 EvacuateNewSpaceAndCandidates(); |
4310 | 4351 |
4311 heap()->FreeDeadArrayBuffers(false); | 4352 heap()->FreeDeadArrayBuffers(false); |
4312 | 4353 |
4313 // ClearNonLiveReferences depends on precise sweeping of map space to | 4354 // ClearNonLiveReferences depends on precise sweeping of map space to |
4314 // detect whether unmarked map became dead in this collection or in one | 4355 // detect whether unmarked map became dead in this collection or in one |
4315 // of the previous ones. | 4356 // of the previous ones. |
4316 { | 4357 { |
4317 GCTracer::Scope sweep_scope(heap()->tracer(), | 4358 GCTracer::Scope sweep_scope(heap()->tracer(), |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4350 p->set_parallel_sweeping(MemoryChunk::SWEEPING_DONE); | 4391 p->set_parallel_sweeping(MemoryChunk::SWEEPING_DONE); |
4351 p->SetWasSwept(); | 4392 p->SetWasSwept(); |
4352 } | 4393 } |
4353 DCHECK(p->parallel_sweeping() == MemoryChunk::SWEEPING_DONE); | 4394 DCHECK(p->parallel_sweeping() == MemoryChunk::SWEEPING_DONE); |
4354 } | 4395 } |
4355 } | 4396 } |
4356 | 4397 |
4357 | 4398 |
4358 void MarkCompactCollector::ParallelSweepSpacesComplete() { | 4399 void MarkCompactCollector::ParallelSweepSpacesComplete() { |
4359 ParallelSweepSpaceComplete(heap()->old_space()); | 4400 ParallelSweepSpaceComplete(heap()->old_space()); |
| 4401 ParallelSweepSpaceComplete(heap()->code_space()); |
4360 } | 4402 } |
4361 | 4403 |
4362 | 4404 |
4363 void MarkCompactCollector::EnableCodeFlushing(bool enable) { | 4405 void MarkCompactCollector::EnableCodeFlushing(bool enable) { |
4364 if (isolate()->debug()->is_loaded() || | 4406 if (isolate()->debug()->is_loaded() || |
4365 isolate()->debug()->has_break_points()) { | 4407 isolate()->debug()->has_break_points()) { |
4366 enable = false; | 4408 enable = false; |
4367 } | 4409 } |
4368 | 4410 |
4369 if (enable) { | 4411 if (enable) { |
(...skipping 283 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4653 SlotsBuffer* buffer = *buffer_address; | 4695 SlotsBuffer* buffer = *buffer_address; |
4654 while (buffer != NULL) { | 4696 while (buffer != NULL) { |
4655 SlotsBuffer* next_buffer = buffer->next(); | 4697 SlotsBuffer* next_buffer = buffer->next(); |
4656 DeallocateBuffer(buffer); | 4698 DeallocateBuffer(buffer); |
4657 buffer = next_buffer; | 4699 buffer = next_buffer; |
4658 } | 4700 } |
4659 *buffer_address = NULL; | 4701 *buffer_address = NULL; |
4660 } | 4702 } |
4661 } // namespace internal | 4703 } // namespace internal |
4662 } // namespace v8 | 4704 } // namespace v8 |
OLD | NEW |