| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
| (...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 128 PageIterator it(space); | 128 PageIterator it(space); |
| 129 | 129 |
| 130 while (it.has_next()) { | 130 while (it.has_next()) { |
| 131 Page* p = it.next(); | 131 Page* p = it.next(); |
| 132 VerifyMarking(space->heap(), p->area_start(), p->area_end()); | 132 VerifyMarking(space->heap(), p->area_start(), p->area_end()); |
| 133 } | 133 } |
| 134 } | 134 } |
| 135 | 135 |
| 136 | 136 |
| 137 static void VerifyMarking(Heap* heap) { | 137 static void VerifyMarking(Heap* heap) { |
| 138 VerifyMarking(heap->old_pointer_space()); | 138 VerifyMarking(heap->old_space()); |
| 139 VerifyMarking(heap->old_data_space()); | |
| 140 VerifyMarking(heap->code_space()); | 139 VerifyMarking(heap->code_space()); |
| 141 VerifyMarking(heap->cell_space()); | 140 VerifyMarking(heap->cell_space()); |
| 142 VerifyMarking(heap->property_cell_space()); | 141 VerifyMarking(heap->property_cell_space()); |
| 143 VerifyMarking(heap->map_space()); | 142 VerifyMarking(heap->map_space()); |
| 144 VerifyMarking(heap->new_space()); | 143 VerifyMarking(heap->new_space()); |
| 145 | 144 |
| 146 VerifyMarkingVisitor visitor(heap); | 145 VerifyMarkingVisitor visitor(heap); |
| 147 | 146 |
| 148 LargeObjectIterator it(heap->lo_space()); | 147 LargeObjectIterator it(heap->lo_space()); |
| 149 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 148 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 194 while (current < limit) { | 193 while (current < limit) { |
| 195 HeapObject* object = HeapObject::FromAddress(current); | 194 HeapObject* object = HeapObject::FromAddress(current); |
| 196 object->Iterate(&visitor); | 195 object->Iterate(&visitor); |
| 197 current += object->Size(); | 196 current += object->Size(); |
| 198 } | 197 } |
| 199 } | 198 } |
| 200 } | 199 } |
| 201 | 200 |
| 202 | 201 |
| 203 static void VerifyEvacuation(Heap* heap, PagedSpace* space) { | 202 static void VerifyEvacuation(Heap* heap, PagedSpace* space) { |
| 204 if (FLAG_use_allocation_folding && | 203 if (FLAG_use_allocation_folding && (space == heap->old_space())) { |
| 205 (space == heap->old_pointer_space() || space == heap->old_data_space())) { | |
| 206 return; | 204 return; |
| 207 } | 205 } |
| 208 PageIterator it(space); | 206 PageIterator it(space); |
| 209 | 207 |
| 210 while (it.has_next()) { | 208 while (it.has_next()) { |
| 211 Page* p = it.next(); | 209 Page* p = it.next(); |
| 212 if (p->IsEvacuationCandidate()) continue; | 210 if (p->IsEvacuationCandidate()) continue; |
| 213 VerifyEvacuation(p); | 211 VerifyEvacuation(p); |
| 214 } | 212 } |
| 215 } | 213 } |
| 216 | 214 |
| 217 | 215 |
| 218 static void VerifyEvacuation(Heap* heap) { | 216 static void VerifyEvacuation(Heap* heap) { |
| 219 VerifyEvacuation(heap, heap->old_pointer_space()); | 217 VerifyEvacuation(heap, heap->old_space()); |
| 220 VerifyEvacuation(heap, heap->old_data_space()); | |
| 221 VerifyEvacuation(heap, heap->code_space()); | 218 VerifyEvacuation(heap, heap->code_space()); |
| 222 VerifyEvacuation(heap, heap->cell_space()); | 219 VerifyEvacuation(heap, heap->cell_space()); |
| 223 VerifyEvacuation(heap, heap->property_cell_space()); | 220 VerifyEvacuation(heap, heap->property_cell_space()); |
| 224 VerifyEvacuation(heap, heap->map_space()); | 221 VerifyEvacuation(heap, heap->map_space()); |
| 225 VerifyEvacuation(heap->new_space()); | 222 VerifyEvacuation(heap->new_space()); |
| 226 | 223 |
| 227 VerifyEvacuationVisitor visitor; | 224 VerifyEvacuationVisitor visitor; |
| 228 heap->IterateStrongRoots(&visitor, VISIT_ALL); | 225 heap->IterateStrongRoots(&visitor, VISIT_ALL); |
| 229 } | 226 } |
| 230 #endif // VERIFY_HEAP | 227 #endif // VERIFY_HEAP |
| 231 | 228 |
| 232 | 229 |
| 233 void MarkCompactCollector::SetUp() { | 230 void MarkCompactCollector::SetUp() { |
| 234 free_list_old_data_space_.Reset(new FreeList(heap_->old_data_space())); | 231 free_list_old_space_.Reset(new FreeList(heap_->old_space())); |
| 235 free_list_old_pointer_space_.Reset(new FreeList(heap_->old_pointer_space())); | |
| 236 } | 232 } |
| 237 | 233 |
| 238 | 234 |
| 239 void MarkCompactCollector::TearDown() { | 235 void MarkCompactCollector::TearDown() { |
| 240 AbortCompaction(); | 236 AbortCompaction(); |
| 241 delete marking_deque_memory_; | 237 delete marking_deque_memory_; |
| 242 } | 238 } |
| 243 | 239 |
| 244 | 240 |
| 245 void MarkCompactCollector::AddEvacuationCandidate(Page* p) { | 241 void MarkCompactCollector::AddEvacuationCandidate(Page* p) { |
| (...skipping 10 matching lines...) Expand all Loading... |
| 256 PrintF("[%s]: %d pages, %d (%.1f%%) free\n", | 252 PrintF("[%s]: %d pages, %d (%.1f%%) free\n", |
| 257 AllocationSpaceName(space->identity()), number_of_pages, | 253 AllocationSpaceName(space->identity()), number_of_pages, |
| 258 static_cast<int>(free), static_cast<double>(free) * 100 / reserved); | 254 static_cast<int>(free), static_cast<double>(free) * 100 / reserved); |
| 259 } | 255 } |
| 260 | 256 |
| 261 | 257 |
| 262 bool MarkCompactCollector::StartCompaction(CompactionMode mode) { | 258 bool MarkCompactCollector::StartCompaction(CompactionMode mode) { |
| 263 if (!compacting_) { | 259 if (!compacting_) { |
| 264 DCHECK(evacuation_candidates_.length() == 0); | 260 DCHECK(evacuation_candidates_.length() == 0); |
| 265 | 261 |
| 266 CollectEvacuationCandidates(heap()->old_pointer_space()); | 262 CollectEvacuationCandidates(heap()->old_space()); |
| 267 CollectEvacuationCandidates(heap()->old_data_space()); | |
| 268 | 263 |
| 269 if (FLAG_compact_code_space && (mode == NON_INCREMENTAL_COMPACTION || | 264 if (FLAG_compact_code_space && (mode == NON_INCREMENTAL_COMPACTION || |
| 270 FLAG_incremental_code_compaction)) { | 265 FLAG_incremental_code_compaction)) { |
| 271 CollectEvacuationCandidates(heap()->code_space()); | 266 CollectEvacuationCandidates(heap()->code_space()); |
| 272 } else if (FLAG_trace_fragmentation) { | 267 } else if (FLAG_trace_fragmentation) { |
| 273 TraceFragmentation(heap()->code_space()); | 268 TraceFragmentation(heap()->code_space()); |
| 274 } | 269 } |
| 275 | 270 |
| 276 if (FLAG_trace_fragmentation) { | 271 if (FLAG_trace_fragmentation) { |
| 277 TraceFragmentation(heap()->map_space()); | 272 TraceFragmentation(heap()->map_space()); |
| 278 TraceFragmentation(heap()->cell_space()); | 273 TraceFragmentation(heap()->cell_space()); |
| 279 TraceFragmentation(heap()->property_cell_space()); | 274 TraceFragmentation(heap()->property_cell_space()); |
| 280 } | 275 } |
| 281 | 276 |
| 282 heap()->old_pointer_space()->EvictEvacuationCandidatesFromFreeLists(); | 277 heap()->old_space()->EvictEvacuationCandidatesFromFreeLists(); |
| 283 heap()->old_data_space()->EvictEvacuationCandidatesFromFreeLists(); | |
| 284 heap()->code_space()->EvictEvacuationCandidatesFromFreeLists(); | 278 heap()->code_space()->EvictEvacuationCandidatesFromFreeLists(); |
| 285 | 279 |
| 286 compacting_ = evacuation_candidates_.length() > 0; | 280 compacting_ = evacuation_candidates_.length() > 0; |
| 287 } | 281 } |
| 288 | 282 |
| 289 return compacting_; | 283 return compacting_; |
| 290 } | 284 } |
| 291 | 285 |
| 292 | 286 |
| 293 void MarkCompactCollector::CollectGarbage() { | 287 void MarkCompactCollector::CollectGarbage() { |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 360 | 354 |
| 361 while (it.has_next()) { | 355 while (it.has_next()) { |
| 362 NewSpacePage* p = it.next(); | 356 NewSpacePage* p = it.next(); |
| 363 CHECK(p->markbits()->IsClean()); | 357 CHECK(p->markbits()->IsClean()); |
| 364 CHECK_EQ(0, p->LiveBytes()); | 358 CHECK_EQ(0, p->LiveBytes()); |
| 365 } | 359 } |
| 366 } | 360 } |
| 367 | 361 |
| 368 | 362 |
| 369 void MarkCompactCollector::VerifyMarkbitsAreClean() { | 363 void MarkCompactCollector::VerifyMarkbitsAreClean() { |
| 370 VerifyMarkbitsAreClean(heap_->old_pointer_space()); | 364 VerifyMarkbitsAreClean(heap_->old_space()); |
| 371 VerifyMarkbitsAreClean(heap_->old_data_space()); | |
| 372 VerifyMarkbitsAreClean(heap_->code_space()); | 365 VerifyMarkbitsAreClean(heap_->code_space()); |
| 373 VerifyMarkbitsAreClean(heap_->cell_space()); | 366 VerifyMarkbitsAreClean(heap_->cell_space()); |
| 374 VerifyMarkbitsAreClean(heap_->property_cell_space()); | 367 VerifyMarkbitsAreClean(heap_->property_cell_space()); |
| 375 VerifyMarkbitsAreClean(heap_->map_space()); | 368 VerifyMarkbitsAreClean(heap_->map_space()); |
| 376 VerifyMarkbitsAreClean(heap_->new_space()); | 369 VerifyMarkbitsAreClean(heap_->new_space()); |
| 377 | 370 |
| 378 LargeObjectIterator it(heap_->lo_space()); | 371 LargeObjectIterator it(heap_->lo_space()); |
| 379 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 372 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| 380 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 373 MarkBit mark_bit = Marking::MarkBitFrom(obj); |
| 381 CHECK(Marking::IsWhite(mark_bit)); | 374 CHECK(Marking::IsWhite(mark_bit)); |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 420 | 413 |
| 421 while (it.has_next()) { | 414 while (it.has_next()) { |
| 422 Bitmap::Clear(it.next()); | 415 Bitmap::Clear(it.next()); |
| 423 } | 416 } |
| 424 } | 417 } |
| 425 | 418 |
| 426 | 419 |
| 427 void MarkCompactCollector::ClearMarkbits() { | 420 void MarkCompactCollector::ClearMarkbits() { |
| 428 ClearMarkbitsInPagedSpace(heap_->code_space()); | 421 ClearMarkbitsInPagedSpace(heap_->code_space()); |
| 429 ClearMarkbitsInPagedSpace(heap_->map_space()); | 422 ClearMarkbitsInPagedSpace(heap_->map_space()); |
| 430 ClearMarkbitsInPagedSpace(heap_->old_pointer_space()); | 423 ClearMarkbitsInPagedSpace(heap_->old_space()); |
| 431 ClearMarkbitsInPagedSpace(heap_->old_data_space()); | |
| 432 ClearMarkbitsInPagedSpace(heap_->cell_space()); | 424 ClearMarkbitsInPagedSpace(heap_->cell_space()); |
| 433 ClearMarkbitsInPagedSpace(heap_->property_cell_space()); | 425 ClearMarkbitsInPagedSpace(heap_->property_cell_space()); |
| 434 ClearMarkbitsInNewSpace(heap_->new_space()); | 426 ClearMarkbitsInNewSpace(heap_->new_space()); |
| 435 | 427 |
| 436 LargeObjectIterator it(heap_->lo_space()); | 428 LargeObjectIterator it(heap_->lo_space()); |
| 437 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 429 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| 438 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 430 MarkBit mark_bit = Marking::MarkBitFrom(obj); |
| 439 mark_bit.Clear(); | 431 mark_bit.Clear(); |
| 440 mark_bit.Next().Clear(); | 432 mark_bit.Next().Clear(); |
| 441 Page::FromAddress(obj->address())->ResetProgressBar(); | 433 Page::FromAddress(obj->address())->ResetProgressBar(); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 458 } | 450 } |
| 459 | 451 |
| 460 Heap* heap_; | 452 Heap* heap_; |
| 461 PagedSpace* space_; | 453 PagedSpace* space_; |
| 462 | 454 |
| 463 DISALLOW_COPY_AND_ASSIGN(SweeperTask); | 455 DISALLOW_COPY_AND_ASSIGN(SweeperTask); |
| 464 }; | 456 }; |
| 465 | 457 |
| 466 | 458 |
| 467 void MarkCompactCollector::StartSweeperThreads() { | 459 void MarkCompactCollector::StartSweeperThreads() { |
| 468 DCHECK(free_list_old_pointer_space_.get()->IsEmpty()); | 460 DCHECK(free_list_old_space_.get()->IsEmpty()); |
| 469 DCHECK(free_list_old_data_space_.get()->IsEmpty()); | |
| 470 V8::GetCurrentPlatform()->CallOnBackgroundThread( | 461 V8::GetCurrentPlatform()->CallOnBackgroundThread( |
| 471 new SweeperTask(heap(), heap()->old_data_space()), | 462 new SweeperTask(heap(), heap()->old_space()), |
| 472 v8::Platform::kShortRunningTask); | |
| 473 V8::GetCurrentPlatform()->CallOnBackgroundThread( | |
| 474 new SweeperTask(heap(), heap()->old_pointer_space()), | |
| 475 v8::Platform::kShortRunningTask); | 463 v8::Platform::kShortRunningTask); |
| 476 } | 464 } |
| 477 | 465 |
| 478 | 466 |
| 479 void MarkCompactCollector::EnsureSweepingCompleted() { | 467 void MarkCompactCollector::EnsureSweepingCompleted() { |
| 480 DCHECK(sweeping_in_progress_ == true); | 468 DCHECK(sweeping_in_progress_ == true); |
| 481 | 469 |
| 482 // If sweeping is not completed or not running at all, we try to complete it | 470 // If sweeping is not completed or not running at all, we try to complete it |
| 483 // here. | 471 // here. |
| 484 if (!heap()->concurrent_sweeping_enabled() || !IsSweepingCompleted()) { | 472 if (!heap()->concurrent_sweeping_enabled() || !IsSweepingCompleted()) { |
| 485 SweepInParallel(heap()->paged_space(OLD_DATA_SPACE), 0); | 473 SweepInParallel(heap()->paged_space(OLD_SPACE), 0); |
| 486 SweepInParallel(heap()->paged_space(OLD_POINTER_SPACE), 0); | |
| 487 } | 474 } |
| 488 // Wait twice for both jobs. | 475 // Wait twice for both jobs. |
| 489 if (heap()->concurrent_sweeping_enabled()) { | 476 if (heap()->concurrent_sweeping_enabled()) { |
| 490 pending_sweeper_jobs_semaphore_.Wait(); | 477 pending_sweeper_jobs_semaphore_.Wait(); |
| 491 pending_sweeper_jobs_semaphore_.Wait(); | |
| 492 } | 478 } |
| 493 ParallelSweepSpacesComplete(); | 479 ParallelSweepSpacesComplete(); |
| 494 sweeping_in_progress_ = false; | 480 sweeping_in_progress_ = false; |
| 495 RefillFreeList(heap()->paged_space(OLD_DATA_SPACE)); | 481 RefillFreeList(heap()->paged_space(OLD_SPACE)); |
| 496 RefillFreeList(heap()->paged_space(OLD_POINTER_SPACE)); | 482 heap()->paged_space(OLD_SPACE)->ResetUnsweptFreeBytes(); |
| 497 heap()->paged_space(OLD_DATA_SPACE)->ResetUnsweptFreeBytes(); | |
| 498 heap()->paged_space(OLD_POINTER_SPACE)->ResetUnsweptFreeBytes(); | |
| 499 | 483 |
| 500 #ifdef VERIFY_HEAP | 484 #ifdef VERIFY_HEAP |
| 501 if (FLAG_verify_heap && !evacuation()) { | 485 if (FLAG_verify_heap && !evacuation()) { |
| 502 VerifyEvacuation(heap_); | 486 VerifyEvacuation(heap_); |
| 503 } | 487 } |
| 504 #endif | 488 #endif |
| 505 } | 489 } |
| 506 | 490 |
| 507 | 491 |
| 508 bool MarkCompactCollector::IsSweepingCompleted() { | 492 bool MarkCompactCollector::IsSweepingCompleted() { |
| 509 if (!pending_sweeper_jobs_semaphore_.WaitFor( | 493 if (!pending_sweeper_jobs_semaphore_.WaitFor( |
| 510 base::TimeDelta::FromSeconds(0))) { | 494 base::TimeDelta::FromSeconds(0))) { |
| 511 return false; | 495 return false; |
| 512 } | 496 } |
| 513 pending_sweeper_jobs_semaphore_.Signal(); | 497 pending_sweeper_jobs_semaphore_.Signal(); |
| 514 return true; | 498 return true; |
| 515 } | 499 } |
| 516 | 500 |
| 517 | 501 |
| 518 void MarkCompactCollector::RefillFreeList(PagedSpace* space) { | 502 void MarkCompactCollector::RefillFreeList(PagedSpace* space) { |
| 519 FreeList* free_list; | 503 FreeList* free_list; |
| 520 | 504 |
| 521 if (space == heap()->old_pointer_space()) { | 505 if (space == heap()->old_space()) { |
| 522 free_list = free_list_old_pointer_space_.get(); | 506 free_list = free_list_old_space_.get(); |
| 523 } else if (space == heap()->old_data_space()) { | |
| 524 free_list = free_list_old_data_space_.get(); | |
| 525 } else { | 507 } else { |
| 526 // Any PagedSpace might invoke RefillFreeLists, so we need to make sure | 508 // Any PagedSpace might invoke RefillFreeLists, so we need to make sure |
| 527 // to only refill them for old data and pointer spaces. | 509 // to only refill them for the old space. |
| 528 return; | 510 return; |
| 529 } | 511 } |
| 530 | 512 |
| 531 intptr_t freed_bytes = space->free_list()->Concatenate(free_list); | 513 intptr_t freed_bytes = space->free_list()->Concatenate(free_list); |
| 532 space->AddToAccountingStats(freed_bytes); | 514 space->AddToAccountingStats(freed_bytes); |
| 533 space->DecrementUnsweptFreeBytes(freed_bytes); | 515 space->DecrementUnsweptFreeBytes(freed_bytes); |
| 534 } | 516 } |
| 535 | 517 |
| 536 | 518 |
| 537 void Marking::TransferMark(Address old_start, Address new_start) { | 519 void Marking::TransferMark(Address old_start, Address new_start) { |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 571 ObjectColor new_color = Color(new_mark_bit); | 553 ObjectColor new_color = Color(new_mark_bit); |
| 572 DCHECK(new_color == old_color); | 554 DCHECK(new_color == old_color); |
| 573 #endif | 555 #endif |
| 574 } | 556 } |
| 575 | 557 |
| 576 | 558 |
| 577 const char* AllocationSpaceName(AllocationSpace space) { | 559 const char* AllocationSpaceName(AllocationSpace space) { |
| 578 switch (space) { | 560 switch (space) { |
| 579 case NEW_SPACE: | 561 case NEW_SPACE: |
| 580 return "NEW_SPACE"; | 562 return "NEW_SPACE"; |
| 581 case OLD_POINTER_SPACE: | 563 case OLD_SPACE: |
| 582 return "OLD_POINTER_SPACE"; | 564 return "OLD_SPACE"; |
| 583 case OLD_DATA_SPACE: | |
| 584 return "OLD_DATA_SPACE"; | |
| 585 case CODE_SPACE: | 565 case CODE_SPACE: |
| 586 return "CODE_SPACE"; | 566 return "CODE_SPACE"; |
| 587 case MAP_SPACE: | 567 case MAP_SPACE: |
| 588 return "MAP_SPACE"; | 568 return "MAP_SPACE"; |
| 589 case CELL_SPACE: | 569 case CELL_SPACE: |
| 590 return "CELL_SPACE"; | 570 return "CELL_SPACE"; |
| 591 case PROPERTY_CELL_SPACE: | 571 case PROPERTY_CELL_SPACE: |
| 592 return "PROPERTY_CELL_SPACE"; | 572 return "PROPERTY_CELL_SPACE"; |
| 593 case LO_SPACE: | 573 case LO_SPACE: |
| 594 return "LO_SPACE"; | 574 return "LO_SPACE"; |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 645 return 1; | 625 return 1; |
| 646 } | 626 } |
| 647 | 627 |
| 648 if (ratio <= ratio_threshold) return 0; // Not fragmented. | 628 if (ratio <= ratio_threshold) return 0; // Not fragmented. |
| 649 | 629 |
| 650 return static_cast<int>(ratio - ratio_threshold); | 630 return static_cast<int>(ratio - ratio_threshold); |
| 651 } | 631 } |
| 652 | 632 |
| 653 | 633 |
| 654 void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { | 634 void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { |
| 655 DCHECK(space->identity() == OLD_POINTER_SPACE || | 635 DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE); |
| 656 space->identity() == OLD_DATA_SPACE || | |
| 657 space->identity() == CODE_SPACE); | |
| 658 | 636 |
| 659 static const int kMaxMaxEvacuationCandidates = 1000; | 637 static const int kMaxMaxEvacuationCandidates = 1000; |
| 660 int number_of_pages = space->CountTotalPages(); | 638 int number_of_pages = space->CountTotalPages(); |
| 661 int max_evacuation_candidates = | 639 int max_evacuation_candidates = |
| 662 static_cast<int>(std::sqrt(number_of_pages / 2.0) + 1); | 640 static_cast<int>(std::sqrt(number_of_pages / 2.0) + 1); |
| 663 | 641 |
| 664 if (FLAG_stress_compaction || FLAG_always_compact) { | 642 if (FLAG_stress_compaction || FLAG_always_compact) { |
| 665 max_evacuation_candidates = kMaxMaxEvacuationCandidates; | 643 max_evacuation_candidates = kMaxMaxEvacuationCandidates; |
| 666 } | 644 } |
| 667 | 645 |
| (...skipping 1145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1813 (next_cell << (Bitmap::kBitsPerCell - 1))); | 1791 (next_cell << (Bitmap::kBitsPerCell - 1))); |
| 1814 } else { | 1792 } else { |
| 1815 grey_objects = current_cell & (current_cell >> 1); | 1793 grey_objects = current_cell & (current_cell >> 1); |
| 1816 } | 1794 } |
| 1817 | 1795 |
| 1818 int offset = 0; | 1796 int offset = 0; |
| 1819 while (grey_objects != 0) { | 1797 while (grey_objects != 0) { |
| 1820 int trailing_zeros = base::bits::CountTrailingZeros32(grey_objects); | 1798 int trailing_zeros = base::bits::CountTrailingZeros32(grey_objects); |
| 1821 grey_objects >>= trailing_zeros; | 1799 grey_objects >>= trailing_zeros; |
| 1822 offset += trailing_zeros; | 1800 offset += trailing_zeros; |
| 1823 MarkBit markbit(cell, 1 << offset, false); | 1801 MarkBit markbit(cell, 1 << offset); |
| 1824 DCHECK(Marking::IsGrey(markbit)); | 1802 DCHECK(Marking::IsGrey(markbit)); |
| 1825 Marking::GreyToBlack(markbit); | 1803 Marking::GreyToBlack(markbit); |
| 1826 Address addr = cell_base + offset * kPointerSize; | 1804 Address addr = cell_base + offset * kPointerSize; |
| 1827 HeapObject* object = HeapObject::FromAddress(addr); | 1805 HeapObject* object = HeapObject::FromAddress(addr); |
| 1828 MemoryChunk::IncrementLiveBytesFromGC(object->address(), object->Size()); | 1806 MemoryChunk::IncrementLiveBytesFromGC(object->address(), object->Size()); |
| 1829 marking_deque->PushBlack(object); | 1807 marking_deque->PushBlack(object); |
| 1830 if (marking_deque->IsFull()) return; | 1808 if (marking_deque->IsFull()) return; |
| 1831 offset += 2; | 1809 offset += 2; |
| 1832 grey_objects >>= 2; | 1810 grey_objects >>= 2; |
| 1833 } | 1811 } |
| (...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2037 // push them on the marking stack. Stop early if the marking stack fills | 2015 // push them on the marking stack. Stop early if the marking stack fills |
| 2038 // before sweeping completes. If sweeping completes, there are no remaining | 2016 // before sweeping completes. If sweeping completes, there are no remaining |
| 2039 // overflowed objects in the heap so the overflow flag on the markings stack | 2017 // overflowed objects in the heap so the overflow flag on the markings stack |
| 2040 // is cleared. | 2018 // is cleared. |
| 2041 void MarkCompactCollector::RefillMarkingDeque() { | 2019 void MarkCompactCollector::RefillMarkingDeque() { |
| 2042 DCHECK(marking_deque_.overflowed()); | 2020 DCHECK(marking_deque_.overflowed()); |
| 2043 | 2021 |
| 2044 DiscoverGreyObjectsInNewSpace(heap(), &marking_deque_); | 2022 DiscoverGreyObjectsInNewSpace(heap(), &marking_deque_); |
| 2045 if (marking_deque_.IsFull()) return; | 2023 if (marking_deque_.IsFull()) return; |
| 2046 | 2024 |
| 2047 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, | 2025 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->old_space()); |
| 2048 heap()->old_pointer_space()); | |
| 2049 if (marking_deque_.IsFull()) return; | |
| 2050 | |
| 2051 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->old_data_space()); | |
| 2052 if (marking_deque_.IsFull()) return; | 2026 if (marking_deque_.IsFull()) return; |
| 2053 | 2027 |
| 2054 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->code_space()); | 2028 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->code_space()); |
| 2055 if (marking_deque_.IsFull()) return; | 2029 if (marking_deque_.IsFull()) return; |
| 2056 | 2030 |
| 2057 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->map_space()); | 2031 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->map_space()); |
| 2058 if (marking_deque_.IsFull()) return; | 2032 if (marking_deque_.IsFull()) return; |
| 2059 | 2033 |
| 2060 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->cell_space()); | 2034 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->cell_space()); |
| 2061 if (marking_deque_.IsFull()) return; | 2035 if (marking_deque_.IsFull()) return; |
| (...skipping 655 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2717 // to new space. We should clear them to avoid encountering them during next | 2691 // to new space. We should clear them to avoid encountering them during next |
| 2718 // pointer iteration. This is an issue if the store buffer overflows and we | 2692 // pointer iteration. This is an issue if the store buffer overflows and we |
| 2719 // have to scan the entire old space, including dead objects, looking for | 2693 // have to scan the entire old space, including dead objects, looking for |
| 2720 // pointers to new space. | 2694 // pointers to new space. |
| 2721 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src, | 2695 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src, |
| 2722 int size, AllocationSpace dest) { | 2696 int size, AllocationSpace dest) { |
| 2723 Address dst_addr = dst->address(); | 2697 Address dst_addr = dst->address(); |
| 2724 Address src_addr = src->address(); | 2698 Address src_addr = src->address(); |
| 2725 DCHECK(heap()->AllowedToBeMigrated(src, dest)); | 2699 DCHECK(heap()->AllowedToBeMigrated(src, dest)); |
| 2726 DCHECK(dest != LO_SPACE && size <= Page::kMaxRegularHeapObjectSize); | 2700 DCHECK(dest != LO_SPACE && size <= Page::kMaxRegularHeapObjectSize); |
| 2727 if (dest == OLD_POINTER_SPACE) { | 2701 if (dest == OLD_SPACE) { |
| 2728 Address src_slot = src_addr; | 2702 Address src_slot = src_addr; |
| 2729 Address dst_slot = dst_addr; | 2703 Address dst_slot = dst_addr; |
| 2730 DCHECK(IsAligned(size, kPointerSize)); | 2704 DCHECK(IsAligned(size, kPointerSize)); |
| 2731 | 2705 |
| 2732 bool may_contain_raw_values = src->MayContainRawValues(); | 2706 bool may_contain_raw_values = src->MayContainRawValues(); |
| 2733 #if V8_DOUBLE_FIELDS_UNBOXING | 2707 #if V8_DOUBLE_FIELDS_UNBOXING |
| 2734 LayoutDescriptorHelper helper(src->map()); | 2708 LayoutDescriptorHelper helper(src->map()); |
| 2735 bool has_only_tagged_fields = helper.all_fields_tagged(); | 2709 bool has_only_tagged_fields = helper.all_fields_tagged(); |
| 2736 #endif | 2710 #endif |
| 2737 for (int remaining = size / kPointerSize; remaining > 0; remaining--) { | 2711 for (int remaining = size / kPointerSize; remaining > 0; remaining--) { |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2789 } | 2763 } |
| 2790 } | 2764 } |
| 2791 } else if (dest == CODE_SPACE) { | 2765 } else if (dest == CODE_SPACE) { |
| 2792 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); | 2766 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); |
| 2793 heap()->MoveBlock(dst_addr, src_addr, size); | 2767 heap()->MoveBlock(dst_addr, src_addr, size); |
| 2794 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, | 2768 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, |
| 2795 SlotsBuffer::RELOCATED_CODE_OBJECT, dst_addr, | 2769 SlotsBuffer::RELOCATED_CODE_OBJECT, dst_addr, |
| 2796 SlotsBuffer::IGNORE_OVERFLOW); | 2770 SlotsBuffer::IGNORE_OVERFLOW); |
| 2797 Code::cast(dst)->Relocate(dst_addr - src_addr); | 2771 Code::cast(dst)->Relocate(dst_addr - src_addr); |
| 2798 } else { | 2772 } else { |
| 2799 DCHECK(dest == OLD_DATA_SPACE || dest == NEW_SPACE); | 2773 DCHECK(dest == NEW_SPACE); |
| 2800 heap()->MoveBlock(dst_addr, src_addr, size); | 2774 heap()->MoveBlock(dst_addr, src_addr, size); |
| 2801 } | 2775 } |
| 2802 heap()->OnMoveEvent(dst, src, size); | 2776 heap()->OnMoveEvent(dst, src, size); |
| 2803 Memory::Address_at(src_addr) = dst_addr; | 2777 Memory::Address_at(src_addr) = dst_addr; |
| 2804 } | 2778 } |
| 2805 | 2779 |
| 2806 | 2780 |
| 2807 // Visitor for updating pointers from live objects in old spaces to new space. | 2781 // Visitor for updating pointers from live objects in old spaces to new space. |
| 2808 // It does not expect to encounter pointers to dead objects. | 2782 // It does not expect to encounter pointers to dead objects. |
| 2809 class PointersUpdatingVisitor : public ObjectVisitor { | 2783 class PointersUpdatingVisitor : public ObjectVisitor { |
| (...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2909 data[index++] = reinterpret_cast<uintptr_t>(slot); | 2883 data[index++] = reinterpret_cast<uintptr_t>(slot); |
| 2910 data[index++] = 0x15aaaaaaaaUL; | 2884 data[index++] = 0x15aaaaaaaaUL; |
| 2911 | 2885 |
| 2912 Address slot_address = reinterpret_cast<Address>(slot); | 2886 Address slot_address = reinterpret_cast<Address>(slot); |
| 2913 | 2887 |
| 2914 uintptr_t space_owner_id = 0xb001; | 2888 uintptr_t space_owner_id = 0xb001; |
| 2915 if (heap->new_space()->ToSpaceContains(slot_address)) { | 2889 if (heap->new_space()->ToSpaceContains(slot_address)) { |
| 2916 space_owner_id = 1; | 2890 space_owner_id = 1; |
| 2917 } else if (heap->new_space()->FromSpaceContains(slot_address)) { | 2891 } else if (heap->new_space()->FromSpaceContains(slot_address)) { |
| 2918 space_owner_id = 2; | 2892 space_owner_id = 2; |
| 2919 } else if (heap->old_pointer_space()->ContainsSafe(slot_address)) { | 2893 } else if (heap->old_space()->ContainsSafe(slot_address)) { |
| 2920 space_owner_id = 3; | 2894 space_owner_id = 3; |
| 2921 } else if (heap->old_data_space()->ContainsSafe(slot_address)) { | |
| 2922 space_owner_id = 4; | |
| 2923 } else if (heap->code_space()->ContainsSafe(slot_address)) { | 2895 } else if (heap->code_space()->ContainsSafe(slot_address)) { |
| 2924 space_owner_id = 5; | 2896 space_owner_id = 5; |
| 2925 } else if (heap->map_space()->ContainsSafe(slot_address)) { | 2897 } else if (heap->map_space()->ContainsSafe(slot_address)) { |
| 2926 space_owner_id = 6; | 2898 space_owner_id = 6; |
| 2927 } else if (heap->cell_space()->ContainsSafe(slot_address)) { | 2899 } else if (heap->cell_space()->ContainsSafe(slot_address)) { |
| 2928 space_owner_id = 7; | 2900 space_owner_id = 7; |
| 2929 } else if (heap->property_cell_space()->ContainsSafe(slot_address)) { | 2901 } else if (heap->property_cell_space()->ContainsSafe(slot_address)) { |
| 2930 space_owner_id = 8; | 2902 space_owner_id = 8; |
| 2931 } else { | 2903 } else { |
| 2932 // Lo space or other. | 2904 // Lo space or other. |
| (...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3014 } | 2986 } |
| 3015 | 2987 |
| 3016 return String::cast(*p); | 2988 return String::cast(*p); |
| 3017 } | 2989 } |
| 3018 | 2990 |
| 3019 | 2991 |
| 3020 bool MarkCompactCollector::TryPromoteObject(HeapObject* object, | 2992 bool MarkCompactCollector::TryPromoteObject(HeapObject* object, |
| 3021 int object_size) { | 2993 int object_size) { |
| 3022 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); | 2994 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); |
| 3023 | 2995 |
| 3024 OldSpace* target_space = heap()->TargetSpace(object); | 2996 OldSpace* old_space = heap()->old_space(); |
| 3025 | 2997 |
| 3026 DCHECK(target_space == heap()->old_pointer_space() || | |
| 3027 target_space == heap()->old_data_space()); | |
| 3028 HeapObject* target; | 2998 HeapObject* target; |
| 3029 AllocationResult allocation = target_space->AllocateRaw(object_size); | 2999 AllocationResult allocation = old_space->AllocateRaw(object_size); |
| 3030 if (allocation.To(&target)) { | 3000 if (allocation.To(&target)) { |
| 3031 MigrateObject(target, object, object_size, target_space->identity()); | 3001 MigrateObject(target, object, object_size, old_space->identity()); |
| 3032 heap()->IncrementPromotedObjectsSize(object_size); | 3002 heap()->IncrementPromotedObjectsSize(object_size); |
| 3033 return true; | 3003 return true; |
| 3034 } | 3004 } |
| 3035 | 3005 |
| 3036 return false; | 3006 return false; |
| 3037 } | 3007 } |
| 3038 | 3008 |
| 3039 | 3009 |
| 3040 bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot) { | 3010 bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot) { |
| 3041 // This function does not support large objects right now. | 3011 // This function does not support large objects right now. |
| (...skipping 465 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3507 | 3477 |
| 3508 return true; | 3478 return true; |
| 3509 } | 3479 } |
| 3510 | 3480 |
| 3511 | 3481 |
| 3512 static bool IsOnInvalidatedCodeObject(Address addr) { | 3482 static bool IsOnInvalidatedCodeObject(Address addr) { |
| 3513 // We did not record any slots in large objects thus | 3483 // We did not record any slots in large objects thus |
| 3514 // we can safely go to the page from the slot address. | 3484 // we can safely go to the page from the slot address. |
| 3515 Page* p = Page::FromAddress(addr); | 3485 Page* p = Page::FromAddress(addr); |
| 3516 | 3486 |
| 3517 // First check owner's identity because old pointer and old data spaces | 3487 // First check owner's identity because old space is swept concurrently or |
| 3518 // are swept lazily and might still have non-zero mark-bits on some | 3488 // lazily and might still have non-zero mark-bits on some pages. |
| 3519 // pages. | |
| 3520 if (p->owner()->identity() != CODE_SPACE) return false; | 3489 if (p->owner()->identity() != CODE_SPACE) return false; |
| 3521 | 3490 |
| 3522 // In code space only bits on evacuation candidates (but we don't record | 3491 // In code space only bits on evacuation candidates (but we don't record |
| 3523 // any slots on them) and under invalidated code objects are non-zero. | 3492 // any slots on them) and under invalidated code objects are non-zero. |
| 3524 MarkBit mark_bit = | 3493 MarkBit mark_bit = |
| 3525 p->markbits()->MarkBitFromIndex(Page::FastAddressToMarkbitIndex(addr)); | 3494 p->markbits()->MarkBitFromIndex(Page::FastAddressToMarkbitIndex(addr)); |
| 3526 | 3495 |
| 3527 return mark_bit.Get(); | 3496 return mark_bit.Get(); |
| 3528 } | 3497 } |
| 3529 | 3498 |
| (...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3687 if (list != NULL) list->Clear(); | 3656 if (list != NULL) list->Clear(); |
| 3688 } else { | 3657 } else { |
| 3689 if (FLAG_gc_verbose) { | 3658 if (FLAG_gc_verbose) { |
| 3690 PrintF("Sweeping 0x%" V8PRIxPTR " during evacuation.\n", | 3659 PrintF("Sweeping 0x%" V8PRIxPTR " during evacuation.\n", |
| 3691 reinterpret_cast<intptr_t>(p)); | 3660 reinterpret_cast<intptr_t>(p)); |
| 3692 } | 3661 } |
| 3693 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 3662 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); |
| 3694 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); | 3663 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); |
| 3695 | 3664 |
| 3696 switch (space->identity()) { | 3665 switch (space->identity()) { |
| 3697 case OLD_DATA_SPACE: | 3666 case OLD_SPACE: |
| 3698 Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, | 3667 Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, |
| 3699 IGNORE_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p, | 3668 IGNORE_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p, |
| 3700 &updating_visitor); | 3669 &updating_visitor); |
| 3701 break; | |
| 3702 case OLD_POINTER_SPACE: | |
| 3703 Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, | |
| 3704 IGNORE_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p, | |
| 3705 &updating_visitor); | |
| 3706 break; | 3670 break; |
| 3707 case CODE_SPACE: | 3671 case CODE_SPACE: |
| 3708 if (FLAG_zap_code_space) { | 3672 if (FLAG_zap_code_space) { |
| 3709 Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, | 3673 Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, |
| 3710 REBUILD_SKIP_LIST, ZAP_FREE_SPACE>(space, NULL, p, | 3674 REBUILD_SKIP_LIST, ZAP_FREE_SPACE>(space, NULL, p, |
| 3711 &updating_visitor); | 3675 &updating_visitor); |
| 3712 } else { | 3676 } else { |
| 3713 Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, | 3677 Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, |
| 3714 REBUILD_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p, | 3678 REBUILD_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p, |
| 3715 &updating_visitor); | 3679 &updating_visitor); |
| (...skipping 496 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4212 max_freed_overall = Max(max_freed, max_freed_overall); | 4176 max_freed_overall = Max(max_freed, max_freed_overall); |
| 4213 if (p == space->end_of_unswept_pages()) break; | 4177 if (p == space->end_of_unswept_pages()) break; |
| 4214 } | 4178 } |
| 4215 return max_freed_overall; | 4179 return max_freed_overall; |
| 4216 } | 4180 } |
| 4217 | 4181 |
| 4218 | 4182 |
| 4219 int MarkCompactCollector::SweepInParallel(Page* page, PagedSpace* space) { | 4183 int MarkCompactCollector::SweepInParallel(Page* page, PagedSpace* space) { |
| 4220 int max_freed = 0; | 4184 int max_freed = 0; |
| 4221 if (page->TryParallelSweeping()) { | 4185 if (page->TryParallelSweeping()) { |
| 4222 FreeList* free_list = space == heap()->old_pointer_space() | 4186 FreeList* free_list = free_list_old_space_.get(); |
| 4223 ? free_list_old_pointer_space_.get() | |
| 4224 : free_list_old_data_space_.get(); | |
| 4225 FreeList private_free_list(space); | 4187 FreeList private_free_list(space); |
| 4226 max_freed = Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST, | 4188 max_freed = Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST, |
| 4227 IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL); | 4189 IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL); |
| 4228 free_list->Concatenate(&private_free_list); | 4190 free_list->Concatenate(&private_free_list); |
| 4229 } | 4191 } |
| 4230 return max_freed; | 4192 return max_freed; |
| 4231 } | 4193 } |
| 4232 | 4194 |
| 4233 | 4195 |
| 4234 void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { | 4196 void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4339 MoveEvacuationCandidatesToEndOfPagesList(); | 4301 MoveEvacuationCandidatesToEndOfPagesList(); |
| 4340 | 4302 |
| 4341 // Noncompacting collections simply sweep the spaces to clear the mark | 4303 // Noncompacting collections simply sweep the spaces to clear the mark |
| 4342 // bits and free the nonlive blocks (for old and map spaces). We sweep | 4304 // bits and free the nonlive blocks (for old and map spaces). We sweep |
| 4343 // the map space last because freeing non-live maps overwrites them and | 4305 // the map space last because freeing non-live maps overwrites them and |
| 4344 // the other spaces rely on possibly non-live maps to get the sizes for | 4306 // the other spaces rely on possibly non-live maps to get the sizes for |
| 4345 // non-live objects. | 4307 // non-live objects. |
| 4346 { | 4308 { |
| 4347 GCTracer::Scope sweep_scope(heap()->tracer(), | 4309 GCTracer::Scope sweep_scope(heap()->tracer(), |
| 4348 GCTracer::Scope::MC_SWEEP_OLDSPACE); | 4310 GCTracer::Scope::MC_SWEEP_OLDSPACE); |
| 4349 { | 4311 { SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); } |
| 4350 SweepSpace(heap()->old_pointer_space(), CONCURRENT_SWEEPING); | |
| 4351 SweepSpace(heap()->old_data_space(), CONCURRENT_SWEEPING); | |
| 4352 } | |
| 4353 sweeping_in_progress_ = true; | 4312 sweeping_in_progress_ = true; |
| 4354 if (heap()->concurrent_sweeping_enabled()) { | 4313 if (heap()->concurrent_sweeping_enabled()) { |
| 4355 StartSweeperThreads(); | 4314 StartSweeperThreads(); |
| 4356 } | 4315 } |
| 4357 } | 4316 } |
| 4358 RemoveDeadInvalidatedCode(); | 4317 RemoveDeadInvalidatedCode(); |
| 4359 | 4318 |
| 4360 { | 4319 { |
| 4361 GCTracer::Scope sweep_scope(heap()->tracer(), | 4320 GCTracer::Scope sweep_scope(heap()->tracer(), |
| 4362 GCTracer::Scope::MC_SWEEP_CODE); | 4321 GCTracer::Scope::MC_SWEEP_CODE); |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4405 if (p->parallel_sweeping() == MemoryChunk::SWEEPING_FINALIZE) { | 4364 if (p->parallel_sweeping() == MemoryChunk::SWEEPING_FINALIZE) { |
| 4406 p->set_parallel_sweeping(MemoryChunk::SWEEPING_DONE); | 4365 p->set_parallel_sweeping(MemoryChunk::SWEEPING_DONE); |
| 4407 p->SetWasSwept(); | 4366 p->SetWasSwept(); |
| 4408 } | 4367 } |
| 4409 DCHECK(p->parallel_sweeping() == MemoryChunk::SWEEPING_DONE); | 4368 DCHECK(p->parallel_sweeping() == MemoryChunk::SWEEPING_DONE); |
| 4410 } | 4369 } |
| 4411 } | 4370 } |
| 4412 | 4371 |
| 4413 | 4372 |
| 4414 void MarkCompactCollector::ParallelSweepSpacesComplete() { | 4373 void MarkCompactCollector::ParallelSweepSpacesComplete() { |
| 4415 ParallelSweepSpaceComplete(heap()->old_pointer_space()); | 4374 ParallelSweepSpaceComplete(heap()->old_space()); |
| 4416 ParallelSweepSpaceComplete(heap()->old_data_space()); | |
| 4417 } | 4375 } |
| 4418 | 4376 |
| 4419 | 4377 |
| 4420 void MarkCompactCollector::EnableCodeFlushing(bool enable) { | 4378 void MarkCompactCollector::EnableCodeFlushing(bool enable) { |
| 4421 if (isolate()->debug()->is_loaded() || | 4379 if (isolate()->debug()->is_loaded() || |
| 4422 isolate()->debug()->has_break_points()) { | 4380 isolate()->debug()->has_break_points()) { |
| 4423 enable = false; | 4381 enable = false; |
| 4424 } | 4382 } |
| 4425 | 4383 |
| 4426 if (enable) { | 4384 if (enable) { |
| (...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4617 SlotsBuffer* buffer = *buffer_address; | 4575 SlotsBuffer* buffer = *buffer_address; |
| 4618 while (buffer != NULL) { | 4576 while (buffer != NULL) { |
| 4619 SlotsBuffer* next_buffer = buffer->next(); | 4577 SlotsBuffer* next_buffer = buffer->next(); |
| 4620 DeallocateBuffer(buffer); | 4578 DeallocateBuffer(buffer); |
| 4621 buffer = next_buffer; | 4579 buffer = next_buffer; |
| 4622 } | 4580 } |
| 4623 *buffer_address = NULL; | 4581 *buffer_address = NULL; |
| 4624 } | 4582 } |
| 4625 } | 4583 } |
| 4626 } // namespace v8::internal | 4584 } // namespace v8::internal |
| OLD | NEW |