| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
| (...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 128 PageIterator it(space); | 128 PageIterator it(space); |
| 129 | 129 |
| 130 while (it.has_next()) { | 130 while (it.has_next()) { |
| 131 Page* p = it.next(); | 131 Page* p = it.next(); |
| 132 VerifyMarking(space->heap(), p->area_start(), p->area_end()); | 132 VerifyMarking(space->heap(), p->area_start(), p->area_end()); |
| 133 } | 133 } |
| 134 } | 134 } |
| 135 | 135 |
| 136 | 136 |
| 137 static void VerifyMarking(Heap* heap) { | 137 static void VerifyMarking(Heap* heap) { |
| 138 VerifyMarking(heap->old_pointer_space()); | 138 VerifyMarking(heap->old_space()); |
| 139 VerifyMarking(heap->old_data_space()); | |
| 140 VerifyMarking(heap->code_space()); | 139 VerifyMarking(heap->code_space()); |
| 141 VerifyMarking(heap->cell_space()); | 140 VerifyMarking(heap->cell_space()); |
| 142 VerifyMarking(heap->map_space()); | 141 VerifyMarking(heap->map_space()); |
| 143 VerifyMarking(heap->new_space()); | 142 VerifyMarking(heap->new_space()); |
| 144 | 143 |
| 145 VerifyMarkingVisitor visitor(heap); | 144 VerifyMarkingVisitor visitor(heap); |
| 146 | 145 |
| 147 LargeObjectIterator it(heap->lo_space()); | 146 LargeObjectIterator it(heap->lo_space()); |
| 148 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 147 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| 149 if (MarkCompactCollector::IsMarked(obj)) { | 148 if (MarkCompactCollector::IsMarked(obj)) { |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 193 while (current < limit) { | 192 while (current < limit) { |
| 194 HeapObject* object = HeapObject::FromAddress(current); | 193 HeapObject* object = HeapObject::FromAddress(current); |
| 195 object->Iterate(&visitor); | 194 object->Iterate(&visitor); |
| 196 current += object->Size(); | 195 current += object->Size(); |
| 197 } | 196 } |
| 198 } | 197 } |
| 199 } | 198 } |
| 200 | 199 |
| 201 | 200 |
| 202 static void VerifyEvacuation(Heap* heap, PagedSpace* space) { | 201 static void VerifyEvacuation(Heap* heap, PagedSpace* space) { |
| 203 if (FLAG_use_allocation_folding && | 202 if (FLAG_use_allocation_folding && (space == heap->old_space())) { |
| 204 (space == heap->old_pointer_space() || space == heap->old_data_space())) { | |
| 205 return; | 203 return; |
| 206 } | 204 } |
| 207 PageIterator it(space); | 205 PageIterator it(space); |
| 208 | 206 |
| 209 while (it.has_next()) { | 207 while (it.has_next()) { |
| 210 Page* p = it.next(); | 208 Page* p = it.next(); |
| 211 if (p->IsEvacuationCandidate()) continue; | 209 if (p->IsEvacuationCandidate()) continue; |
| 212 VerifyEvacuation(p); | 210 VerifyEvacuation(p); |
| 213 } | 211 } |
| 214 } | 212 } |
| 215 | 213 |
| 216 | 214 |
| 217 static void VerifyEvacuation(Heap* heap) { | 215 static void VerifyEvacuation(Heap* heap) { |
| 218 VerifyEvacuation(heap, heap->old_pointer_space()); | 216 VerifyEvacuation(heap, heap->old_space()); |
| 219 VerifyEvacuation(heap, heap->old_data_space()); | |
| 220 VerifyEvacuation(heap, heap->code_space()); | 217 VerifyEvacuation(heap, heap->code_space()); |
| 221 VerifyEvacuation(heap, heap->cell_space()); | 218 VerifyEvacuation(heap, heap->cell_space()); |
| 222 VerifyEvacuation(heap, heap->map_space()); | 219 VerifyEvacuation(heap, heap->map_space()); |
| 223 VerifyEvacuation(heap->new_space()); | 220 VerifyEvacuation(heap->new_space()); |
| 224 | 221 |
| 225 VerifyEvacuationVisitor visitor; | 222 VerifyEvacuationVisitor visitor; |
| 226 heap->IterateStrongRoots(&visitor, VISIT_ALL); | 223 heap->IterateStrongRoots(&visitor, VISIT_ALL); |
| 227 } | 224 } |
| 228 #endif // VERIFY_HEAP | 225 #endif // VERIFY_HEAP |
| 229 | 226 |
| 230 | 227 |
| 231 void MarkCompactCollector::SetUp() { | 228 void MarkCompactCollector::SetUp() { |
| 232 free_list_old_data_space_.Reset(new FreeList(heap_->old_data_space())); | 229 free_list_old_space_.Reset(new FreeList(heap_->old_space())); |
| 233 free_list_old_pointer_space_.Reset(new FreeList(heap_->old_pointer_space())); | |
| 234 } | 230 } |
| 235 | 231 |
| 236 | 232 |
| 237 void MarkCompactCollector::TearDown() { | 233 void MarkCompactCollector::TearDown() { |
| 238 AbortCompaction(); | 234 AbortCompaction(); |
| 239 delete marking_deque_memory_; | 235 delete marking_deque_memory_; |
| 240 } | 236 } |
| 241 | 237 |
| 242 | 238 |
| 243 void MarkCompactCollector::AddEvacuationCandidate(Page* p) { | 239 void MarkCompactCollector::AddEvacuationCandidate(Page* p) { |
| (...skipping 10 matching lines...) Expand all Loading... |
| 254 PrintF("[%s]: %d pages, %d (%.1f%%) free\n", | 250 PrintF("[%s]: %d pages, %d (%.1f%%) free\n", |
| 255 AllocationSpaceName(space->identity()), number_of_pages, | 251 AllocationSpaceName(space->identity()), number_of_pages, |
| 256 static_cast<int>(free), static_cast<double>(free) * 100 / reserved); | 252 static_cast<int>(free), static_cast<double>(free) * 100 / reserved); |
| 257 } | 253 } |
| 258 | 254 |
| 259 | 255 |
| 260 bool MarkCompactCollector::StartCompaction(CompactionMode mode) { | 256 bool MarkCompactCollector::StartCompaction(CompactionMode mode) { |
| 261 if (!compacting_) { | 257 if (!compacting_) { |
| 262 DCHECK(evacuation_candidates_.length() == 0); | 258 DCHECK(evacuation_candidates_.length() == 0); |
| 263 | 259 |
| 264 CollectEvacuationCandidates(heap()->old_pointer_space()); | 260 CollectEvacuationCandidates(heap()->old_space()); |
| 265 CollectEvacuationCandidates(heap()->old_data_space()); | |
| 266 | 261 |
| 267 if (FLAG_compact_code_space && (mode == NON_INCREMENTAL_COMPACTION || | 262 if (FLAG_compact_code_space && (mode == NON_INCREMENTAL_COMPACTION || |
| 268 FLAG_incremental_code_compaction)) { | 263 FLAG_incremental_code_compaction)) { |
| 269 CollectEvacuationCandidates(heap()->code_space()); | 264 CollectEvacuationCandidates(heap()->code_space()); |
| 270 } else if (FLAG_trace_fragmentation) { | 265 } else if (FLAG_trace_fragmentation) { |
| 271 TraceFragmentation(heap()->code_space()); | 266 TraceFragmentation(heap()->code_space()); |
| 272 } | 267 } |
| 273 | 268 |
| 274 if (FLAG_trace_fragmentation) { | 269 if (FLAG_trace_fragmentation) { |
| 275 TraceFragmentation(heap()->map_space()); | 270 TraceFragmentation(heap()->map_space()); |
| 276 TraceFragmentation(heap()->cell_space()); | 271 TraceFragmentation(heap()->cell_space()); |
| 277 } | 272 } |
| 278 | 273 |
| 279 heap()->old_pointer_space()->EvictEvacuationCandidatesFromFreeLists(); | 274 heap()->old_space()->EvictEvacuationCandidatesFromFreeLists(); |
| 280 heap()->old_data_space()->EvictEvacuationCandidatesFromFreeLists(); | |
| 281 heap()->code_space()->EvictEvacuationCandidatesFromFreeLists(); | 275 heap()->code_space()->EvictEvacuationCandidatesFromFreeLists(); |
| 282 | 276 |
| 283 compacting_ = evacuation_candidates_.length() > 0; | 277 compacting_ = evacuation_candidates_.length() > 0; |
| 284 } | 278 } |
| 285 | 279 |
| 286 return compacting_; | 280 return compacting_; |
| 287 } | 281 } |
| 288 | 282 |
| 289 | 283 |
| 290 void MarkCompactCollector::ClearInvalidSlotsBufferEntries(PagedSpace* space) { | 284 void MarkCompactCollector::ClearInvalidSlotsBufferEntries(PagedSpace* space) { |
| 291 PageIterator it(space); | 285 PageIterator it(space); |
| 292 while (it.has_next()) { | 286 while (it.has_next()) { |
| 293 Page* p = it.next(); | 287 Page* p = it.next(); |
| 294 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer()); | 288 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer()); |
| 295 } | 289 } |
| 296 } | 290 } |
| 297 | 291 |
| 298 | 292 |
| 299 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() { | 293 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() { |
| 300 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); | 294 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); |
| 301 | 295 |
| 302 ClearInvalidSlotsBufferEntries(heap_->old_pointer_space()); | 296 ClearInvalidSlotsBufferEntries(heap_->old_space()); |
| 303 ClearInvalidSlotsBufferEntries(heap_->old_data_space()); | |
| 304 ClearInvalidSlotsBufferEntries(heap_->code_space()); | 297 ClearInvalidSlotsBufferEntries(heap_->code_space()); |
| 305 ClearInvalidSlotsBufferEntries(heap_->cell_space()); | 298 ClearInvalidSlotsBufferEntries(heap_->cell_space()); |
| 306 ClearInvalidSlotsBufferEntries(heap_->map_space()); | 299 ClearInvalidSlotsBufferEntries(heap_->map_space()); |
| 307 | 300 |
| 308 LargeObjectIterator it(heap_->lo_space()); | 301 LargeObjectIterator it(heap_->lo_space()); |
| 309 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | 302 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { |
| 310 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); | 303 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); |
| 311 SlotsBuffer::RemoveInvalidSlots(heap_, chunk->slots_buffer()); | 304 SlotsBuffer::RemoveInvalidSlots(heap_, chunk->slots_buffer()); |
| 312 } | 305 } |
| 313 } | 306 } |
| 314 | 307 |
| 315 | 308 |
| 316 #ifdef VERIFY_HEAP | 309 #ifdef VERIFY_HEAP |
| 317 static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) { | 310 static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) { |
| 318 PageIterator it(space); | 311 PageIterator it(space); |
| 319 while (it.has_next()) { | 312 while (it.has_next()) { |
| 320 Page* p = it.next(); | 313 Page* p = it.next(); |
| 321 SlotsBuffer::VerifySlots(heap, p->slots_buffer()); | 314 SlotsBuffer::VerifySlots(heap, p->slots_buffer()); |
| 322 } | 315 } |
| 323 } | 316 } |
| 324 | 317 |
| 325 | 318 |
| 326 static void VerifyValidStoreAndSlotsBufferEntries(Heap* heap) { | 319 static void VerifyValidStoreAndSlotsBufferEntries(Heap* heap) { |
| 327 heap->store_buffer()->VerifyValidStoreBufferEntries(); | 320 heap->store_buffer()->VerifyValidStoreBufferEntries(); |
| 328 | 321 |
| 329 VerifyValidSlotsBufferEntries(heap, heap->old_pointer_space()); | 322 VerifyValidSlotsBufferEntries(heap, heap->old_space()); |
| 330 VerifyValidSlotsBufferEntries(heap, heap->old_data_space()); | |
| 331 VerifyValidSlotsBufferEntries(heap, heap->code_space()); | 323 VerifyValidSlotsBufferEntries(heap, heap->code_space()); |
| 332 VerifyValidSlotsBufferEntries(heap, heap->cell_space()); | 324 VerifyValidSlotsBufferEntries(heap, heap->cell_space()); |
| 333 VerifyValidSlotsBufferEntries(heap, heap->map_space()); | 325 VerifyValidSlotsBufferEntries(heap, heap->map_space()); |
| 334 | 326 |
| 335 LargeObjectIterator it(heap->lo_space()); | 327 LargeObjectIterator it(heap->lo_space()); |
| 336 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | 328 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { |
| 337 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); | 329 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); |
| 338 SlotsBuffer::VerifySlots(heap, chunk->slots_buffer()); | 330 SlotsBuffer::VerifySlots(heap, chunk->slots_buffer()); |
| 339 } | 331 } |
| 340 } | 332 } |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 411 | 403 |
| 412 while (it.has_next()) { | 404 while (it.has_next()) { |
| 413 NewSpacePage* p = it.next(); | 405 NewSpacePage* p = it.next(); |
| 414 CHECK(p->markbits()->IsClean()); | 406 CHECK(p->markbits()->IsClean()); |
| 415 CHECK_EQ(0, p->LiveBytes()); | 407 CHECK_EQ(0, p->LiveBytes()); |
| 416 } | 408 } |
| 417 } | 409 } |
| 418 | 410 |
| 419 | 411 |
| 420 void MarkCompactCollector::VerifyMarkbitsAreClean() { | 412 void MarkCompactCollector::VerifyMarkbitsAreClean() { |
| 421 VerifyMarkbitsAreClean(heap_->old_pointer_space()); | 413 VerifyMarkbitsAreClean(heap_->old_space()); |
| 422 VerifyMarkbitsAreClean(heap_->old_data_space()); | |
| 423 VerifyMarkbitsAreClean(heap_->code_space()); | 414 VerifyMarkbitsAreClean(heap_->code_space()); |
| 424 VerifyMarkbitsAreClean(heap_->cell_space()); | 415 VerifyMarkbitsAreClean(heap_->cell_space()); |
| 425 VerifyMarkbitsAreClean(heap_->map_space()); | 416 VerifyMarkbitsAreClean(heap_->map_space()); |
| 426 VerifyMarkbitsAreClean(heap_->new_space()); | 417 VerifyMarkbitsAreClean(heap_->new_space()); |
| 427 | 418 |
| 428 LargeObjectIterator it(heap_->lo_space()); | 419 LargeObjectIterator it(heap_->lo_space()); |
| 429 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 420 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| 430 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 421 MarkBit mark_bit = Marking::MarkBitFrom(obj); |
| 431 CHECK(Marking::IsWhite(mark_bit)); | 422 CHECK(Marking::IsWhite(mark_bit)); |
| 432 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); | 423 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 470 | 461 |
| 471 while (it.has_next()) { | 462 while (it.has_next()) { |
| 472 Bitmap::Clear(it.next()); | 463 Bitmap::Clear(it.next()); |
| 473 } | 464 } |
| 474 } | 465 } |
| 475 | 466 |
| 476 | 467 |
| 477 void MarkCompactCollector::ClearMarkbits() { | 468 void MarkCompactCollector::ClearMarkbits() { |
| 478 ClearMarkbitsInPagedSpace(heap_->code_space()); | 469 ClearMarkbitsInPagedSpace(heap_->code_space()); |
| 479 ClearMarkbitsInPagedSpace(heap_->map_space()); | 470 ClearMarkbitsInPagedSpace(heap_->map_space()); |
| 480 ClearMarkbitsInPagedSpace(heap_->old_pointer_space()); | 471 ClearMarkbitsInPagedSpace(heap_->old_space()); |
| 481 ClearMarkbitsInPagedSpace(heap_->old_data_space()); | |
| 482 ClearMarkbitsInPagedSpace(heap_->cell_space()); | 472 ClearMarkbitsInPagedSpace(heap_->cell_space()); |
| 483 ClearMarkbitsInNewSpace(heap_->new_space()); | 473 ClearMarkbitsInNewSpace(heap_->new_space()); |
| 484 | 474 |
| 485 LargeObjectIterator it(heap_->lo_space()); | 475 LargeObjectIterator it(heap_->lo_space()); |
| 486 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 476 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| 487 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 477 MarkBit mark_bit = Marking::MarkBitFrom(obj); |
| 488 mark_bit.Clear(); | 478 mark_bit.Clear(); |
| 489 mark_bit.Next().Clear(); | 479 mark_bit.Next().Clear(); |
| 490 Page::FromAddress(obj->address())->ResetProgressBar(); | 480 Page::FromAddress(obj->address())->ResetProgressBar(); |
| 491 Page::FromAddress(obj->address())->ResetLiveBytes(); | 481 Page::FromAddress(obj->address())->ResetLiveBytes(); |
| (...skipping 15 matching lines...) Expand all Loading... |
| 507 } | 497 } |
| 508 | 498 |
| 509 Heap* heap_; | 499 Heap* heap_; |
| 510 PagedSpace* space_; | 500 PagedSpace* space_; |
| 511 | 501 |
| 512 DISALLOW_COPY_AND_ASSIGN(SweeperTask); | 502 DISALLOW_COPY_AND_ASSIGN(SweeperTask); |
| 513 }; | 503 }; |
| 514 | 504 |
| 515 | 505 |
| 516 void MarkCompactCollector::StartSweeperThreads() { | 506 void MarkCompactCollector::StartSweeperThreads() { |
| 517 DCHECK(free_list_old_pointer_space_.get()->IsEmpty()); | 507 DCHECK(free_list_old_space_.get()->IsEmpty()); |
| 518 DCHECK(free_list_old_data_space_.get()->IsEmpty()); | |
| 519 V8::GetCurrentPlatform()->CallOnBackgroundThread( | 508 V8::GetCurrentPlatform()->CallOnBackgroundThread( |
| 520 new SweeperTask(heap(), heap()->old_data_space()), | 509 new SweeperTask(heap(), heap()->old_space()), |
| 521 v8::Platform::kShortRunningTask); | |
| 522 V8::GetCurrentPlatform()->CallOnBackgroundThread( | |
| 523 new SweeperTask(heap(), heap()->old_pointer_space()), | |
| 524 v8::Platform::kShortRunningTask); | 510 v8::Platform::kShortRunningTask); |
| 525 } | 511 } |
| 526 | 512 |
| 527 | 513 |
| 528 void MarkCompactCollector::EnsureSweepingCompleted() { | 514 void MarkCompactCollector::EnsureSweepingCompleted() { |
| 529 DCHECK(sweeping_in_progress_ == true); | 515 DCHECK(sweeping_in_progress_ == true); |
| 530 | 516 |
| 531 // If sweeping is not completed or not running at all, we try to complete it | 517 // If sweeping is not completed or not running at all, we try to complete it |
| 532 // here. | 518 // here. |
| 533 if (!heap()->concurrent_sweeping_enabled() || !IsSweepingCompleted()) { | 519 if (!heap()->concurrent_sweeping_enabled() || !IsSweepingCompleted()) { |
| 534 SweepInParallel(heap()->paged_space(OLD_DATA_SPACE), 0); | 520 SweepInParallel(heap()->paged_space(OLD_SPACE), 0); |
| 535 SweepInParallel(heap()->paged_space(OLD_POINTER_SPACE), 0); | |
| 536 } | 521 } |
| 537 // Wait twice for both jobs. | 522 // Wait twice for both jobs. |
| 538 if (heap()->concurrent_sweeping_enabled()) { | 523 if (heap()->concurrent_sweeping_enabled()) { |
| 539 pending_sweeper_jobs_semaphore_.Wait(); | 524 pending_sweeper_jobs_semaphore_.Wait(); |
| 540 pending_sweeper_jobs_semaphore_.Wait(); | |
| 541 } | 525 } |
| 542 ParallelSweepSpacesComplete(); | 526 ParallelSweepSpacesComplete(); |
| 543 sweeping_in_progress_ = false; | 527 sweeping_in_progress_ = false; |
| 544 RefillFreeList(heap()->paged_space(OLD_DATA_SPACE)); | 528 RefillFreeList(heap()->paged_space(OLD_SPACE)); |
| 545 RefillFreeList(heap()->paged_space(OLD_POINTER_SPACE)); | 529 heap()->paged_space(OLD_SPACE)->ResetUnsweptFreeBytes(); |
| 546 heap()->paged_space(OLD_DATA_SPACE)->ResetUnsweptFreeBytes(); | |
| 547 heap()->paged_space(OLD_POINTER_SPACE)->ResetUnsweptFreeBytes(); | |
| 548 | 530 |
| 549 #ifdef VERIFY_HEAP | 531 #ifdef VERIFY_HEAP |
| 550 if (FLAG_verify_heap && !evacuation()) { | 532 if (FLAG_verify_heap && !evacuation()) { |
| 551 VerifyEvacuation(heap_); | 533 VerifyEvacuation(heap_); |
| 552 } | 534 } |
| 553 #endif | 535 #endif |
| 554 } | 536 } |
| 555 | 537 |
| 556 | 538 |
| 557 bool MarkCompactCollector::IsSweepingCompleted() { | 539 bool MarkCompactCollector::IsSweepingCompleted() { |
| 558 if (!pending_sweeper_jobs_semaphore_.WaitFor( | 540 if (!pending_sweeper_jobs_semaphore_.WaitFor( |
| 559 base::TimeDelta::FromSeconds(0))) { | 541 base::TimeDelta::FromSeconds(0))) { |
| 560 return false; | 542 return false; |
| 561 } | 543 } |
| 562 pending_sweeper_jobs_semaphore_.Signal(); | 544 pending_sweeper_jobs_semaphore_.Signal(); |
| 563 return true; | 545 return true; |
| 564 } | 546 } |
| 565 | 547 |
| 566 | 548 |
| 567 void MarkCompactCollector::RefillFreeList(PagedSpace* space) { | 549 void MarkCompactCollector::RefillFreeList(PagedSpace* space) { |
| 568 FreeList* free_list; | 550 FreeList* free_list; |
| 569 | 551 |
| 570 if (space == heap()->old_pointer_space()) { | 552 if (space == heap()->old_space()) { |
| 571 free_list = free_list_old_pointer_space_.get(); | 553 free_list = free_list_old_space_.get(); |
| 572 } else if (space == heap()->old_data_space()) { | |
| 573 free_list = free_list_old_data_space_.get(); | |
| 574 } else { | 554 } else { |
| 575 // Any PagedSpace might invoke RefillFreeLists, so we need to make sure | 555 // Any PagedSpace might invoke RefillFreeLists, so we need to make sure |
| 576 // to only refill them for old data and pointer spaces. | 556 // to only refill them for the old space. |
| 577 return; | 557 return; |
| 578 } | 558 } |
| 579 | 559 |
| 580 intptr_t freed_bytes = space->free_list()->Concatenate(free_list); | 560 intptr_t freed_bytes = space->free_list()->Concatenate(free_list); |
| 581 space->AddToAccountingStats(freed_bytes); | 561 space->AddToAccountingStats(freed_bytes); |
| 582 space->DecrementUnsweptFreeBytes(freed_bytes); | 562 space->DecrementUnsweptFreeBytes(freed_bytes); |
| 583 } | 563 } |
| 584 | 564 |
| 585 | 565 |
| 586 void Marking::TransferMark(Address old_start, Address new_start) { | 566 void Marking::TransferMark(Address old_start, Address new_start) { |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 620 ObjectColor new_color = Color(new_mark_bit); | 600 ObjectColor new_color = Color(new_mark_bit); |
| 621 DCHECK(new_color == old_color); | 601 DCHECK(new_color == old_color); |
| 622 #endif | 602 #endif |
| 623 } | 603 } |
| 624 | 604 |
| 625 | 605 |
| 626 const char* AllocationSpaceName(AllocationSpace space) { | 606 const char* AllocationSpaceName(AllocationSpace space) { |
| 627 switch (space) { | 607 switch (space) { |
| 628 case NEW_SPACE: | 608 case NEW_SPACE: |
| 629 return "NEW_SPACE"; | 609 return "NEW_SPACE"; |
| 630 case OLD_POINTER_SPACE: | 610 case OLD_SPACE: |
| 631 return "OLD_POINTER_SPACE"; | 611 return "OLD_SPACE"; |
| 632 case OLD_DATA_SPACE: | |
| 633 return "OLD_DATA_SPACE"; | |
| 634 case CODE_SPACE: | 612 case CODE_SPACE: |
| 635 return "CODE_SPACE"; | 613 return "CODE_SPACE"; |
| 636 case MAP_SPACE: | 614 case MAP_SPACE: |
| 637 return "MAP_SPACE"; | 615 return "MAP_SPACE"; |
| 638 case CELL_SPACE: | 616 case CELL_SPACE: |
| 639 return "CELL_SPACE"; | 617 return "CELL_SPACE"; |
| 640 case LO_SPACE: | 618 case LO_SPACE: |
| 641 return "LO_SPACE"; | 619 return "LO_SPACE"; |
| 642 default: | 620 default: |
| 643 UNREACHABLE(); | 621 UNREACHABLE(); |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 692 return 1; | 670 return 1; |
| 693 } | 671 } |
| 694 | 672 |
| 695 if (ratio <= ratio_threshold) return 0; // Not fragmented. | 673 if (ratio <= ratio_threshold) return 0; // Not fragmented. |
| 696 | 674 |
| 697 return static_cast<int>(ratio - ratio_threshold); | 675 return static_cast<int>(ratio - ratio_threshold); |
| 698 } | 676 } |
| 699 | 677 |
| 700 | 678 |
| 701 void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { | 679 void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { |
| 702 DCHECK(space->identity() == OLD_POINTER_SPACE || | 680 DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE); |
| 703 space->identity() == OLD_DATA_SPACE || | |
| 704 space->identity() == CODE_SPACE); | |
| 705 | 681 |
| 706 static const int kMaxMaxEvacuationCandidates = 1000; | 682 static const int kMaxMaxEvacuationCandidates = 1000; |
| 707 int number_of_pages = space->CountTotalPages(); | 683 int number_of_pages = space->CountTotalPages(); |
| 708 int max_evacuation_candidates = | 684 int max_evacuation_candidates = |
| 709 static_cast<int>(std::sqrt(number_of_pages / 2.0) + 1); | 685 static_cast<int>(std::sqrt(number_of_pages / 2.0) + 1); |
| 710 | 686 |
| 711 if (FLAG_stress_compaction || FLAG_always_compact) { | 687 if (FLAG_stress_compaction || FLAG_always_compact) { |
| 712 max_evacuation_candidates = kMaxMaxEvacuationCandidates; | 688 max_evacuation_candidates = kMaxMaxEvacuationCandidates; |
| 713 } | 689 } |
| 714 | 690 |
| (...skipping 1162 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1877 (next_cell << (Bitmap::kBitsPerCell - 1))); | 1853 (next_cell << (Bitmap::kBitsPerCell - 1))); |
| 1878 } else { | 1854 } else { |
| 1879 grey_objects = current_cell & (current_cell >> 1); | 1855 grey_objects = current_cell & (current_cell >> 1); |
| 1880 } | 1856 } |
| 1881 | 1857 |
| 1882 int offset = 0; | 1858 int offset = 0; |
| 1883 while (grey_objects != 0) { | 1859 while (grey_objects != 0) { |
| 1884 int trailing_zeros = base::bits::CountTrailingZeros32(grey_objects); | 1860 int trailing_zeros = base::bits::CountTrailingZeros32(grey_objects); |
| 1885 grey_objects >>= trailing_zeros; | 1861 grey_objects >>= trailing_zeros; |
| 1886 offset += trailing_zeros; | 1862 offset += trailing_zeros; |
| 1887 MarkBit markbit(cell, 1 << offset, false); | 1863 MarkBit markbit(cell, 1 << offset); |
| 1888 DCHECK(Marking::IsGrey(markbit)); | 1864 DCHECK(Marking::IsGrey(markbit)); |
| 1889 Marking::GreyToBlack(markbit); | 1865 Marking::GreyToBlack(markbit); |
| 1890 Address addr = cell_base + offset * kPointerSize; | 1866 Address addr = cell_base + offset * kPointerSize; |
| 1891 HeapObject* object = HeapObject::FromAddress(addr); | 1867 HeapObject* object = HeapObject::FromAddress(addr); |
| 1892 MemoryChunk::IncrementLiveBytesFromGC(object->address(), object->Size()); | 1868 MemoryChunk::IncrementLiveBytesFromGC(object->address(), object->Size()); |
| 1893 marking_deque->PushBlack(object); | 1869 marking_deque->PushBlack(object); |
| 1894 if (marking_deque->IsFull()) return; | 1870 if (marking_deque->IsFull()) return; |
| 1895 offset += 2; | 1871 offset += 2; |
| 1896 grey_objects >>= 2; | 1872 grey_objects >>= 2; |
| 1897 } | 1873 } |
| (...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2101 // before sweeping completes. If sweeping completes, there are no remaining | 2077 // before sweeping completes. If sweeping completes, there are no remaining |
| 2102 // overflowed objects in the heap so the overflow flag on the markings stack | 2078 // overflowed objects in the heap so the overflow flag on the markings stack |
| 2103 // is cleared. | 2079 // is cleared. |
| 2104 void MarkCompactCollector::RefillMarkingDeque() { | 2080 void MarkCompactCollector::RefillMarkingDeque() { |
| 2105 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kMarkDequeOverflow); | 2081 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kMarkDequeOverflow); |
| 2106 DCHECK(marking_deque_.overflowed()); | 2082 DCHECK(marking_deque_.overflowed()); |
| 2107 | 2083 |
| 2108 DiscoverGreyObjectsInNewSpace(heap(), &marking_deque_); | 2084 DiscoverGreyObjectsInNewSpace(heap(), &marking_deque_); |
| 2109 if (marking_deque_.IsFull()) return; | 2085 if (marking_deque_.IsFull()) return; |
| 2110 | 2086 |
| 2111 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, | 2087 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->old_space()); |
| 2112 heap()->old_pointer_space()); | |
| 2113 if (marking_deque_.IsFull()) return; | |
| 2114 | |
| 2115 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->old_data_space()); | |
| 2116 if (marking_deque_.IsFull()) return; | 2088 if (marking_deque_.IsFull()) return; |
| 2117 | 2089 |
| 2118 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->code_space()); | 2090 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->code_space()); |
| 2119 if (marking_deque_.IsFull()) return; | 2091 if (marking_deque_.IsFull()) return; |
| 2120 | 2092 |
| 2121 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->map_space()); | 2093 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->map_space()); |
| 2122 if (marking_deque_.IsFull()) return; | 2094 if (marking_deque_.IsFull()) return; |
| 2123 | 2095 |
| 2124 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->cell_space()); | 2096 DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->cell_space()); |
| 2125 if (marking_deque_.IsFull()) return; | 2097 if (marking_deque_.IsFull()) return; |
| (...skipping 658 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2784 // to new space. We should clear them to avoid encountering them during next | 2756 // to new space. We should clear them to avoid encountering them during next |
| 2785 // pointer iteration. This is an issue if the store buffer overflows and we | 2757 // pointer iteration. This is an issue if the store buffer overflows and we |
| 2786 // have to scan the entire old space, including dead objects, looking for | 2758 // have to scan the entire old space, including dead objects, looking for |
| 2787 // pointers to new space. | 2759 // pointers to new space. |
| 2788 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src, | 2760 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src, |
| 2789 int size, AllocationSpace dest) { | 2761 int size, AllocationSpace dest) { |
| 2790 Address dst_addr = dst->address(); | 2762 Address dst_addr = dst->address(); |
| 2791 Address src_addr = src->address(); | 2763 Address src_addr = src->address(); |
| 2792 DCHECK(heap()->AllowedToBeMigrated(src, dest)); | 2764 DCHECK(heap()->AllowedToBeMigrated(src, dest)); |
| 2793 DCHECK(dest != LO_SPACE && size <= Page::kMaxRegularHeapObjectSize); | 2765 DCHECK(dest != LO_SPACE && size <= Page::kMaxRegularHeapObjectSize); |
| 2794 if (dest == OLD_POINTER_SPACE) { | 2766 if (dest == OLD_SPACE) { |
| 2795 Address src_slot = src_addr; | 2767 Address src_slot = src_addr; |
| 2796 Address dst_slot = dst_addr; | 2768 Address dst_slot = dst_addr; |
| 2797 DCHECK(IsAligned(size, kPointerSize)); | 2769 DCHECK(IsAligned(size, kPointerSize)); |
| 2798 | 2770 |
| 2799 bool may_contain_raw_values = src->MayContainRawValues(); | 2771 bool may_contain_raw_values = src->MayContainRawValues(); |
| 2800 #if V8_DOUBLE_FIELDS_UNBOXING | 2772 #if V8_DOUBLE_FIELDS_UNBOXING |
| 2801 LayoutDescriptorHelper helper(src->map()); | 2773 LayoutDescriptorHelper helper(src->map()); |
| 2802 bool has_only_tagged_fields = helper.all_fields_tagged(); | 2774 bool has_only_tagged_fields = helper.all_fields_tagged(); |
| 2803 #endif | 2775 #endif |
| 2804 for (int remaining = size / kPointerSize; remaining > 0; remaining--) { | 2776 for (int remaining = size / kPointerSize; remaining > 0; remaining--) { |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2856 } | 2828 } |
| 2857 } | 2829 } |
| 2858 } else if (dest == CODE_SPACE) { | 2830 } else if (dest == CODE_SPACE) { |
| 2859 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); | 2831 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); |
| 2860 heap()->MoveBlock(dst_addr, src_addr, size); | 2832 heap()->MoveBlock(dst_addr, src_addr, size); |
| 2861 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, | 2833 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, |
| 2862 SlotsBuffer::RELOCATED_CODE_OBJECT, dst_addr, | 2834 SlotsBuffer::RELOCATED_CODE_OBJECT, dst_addr, |
| 2863 SlotsBuffer::IGNORE_OVERFLOW); | 2835 SlotsBuffer::IGNORE_OVERFLOW); |
| 2864 Code::cast(dst)->Relocate(dst_addr - src_addr); | 2836 Code::cast(dst)->Relocate(dst_addr - src_addr); |
| 2865 } else { | 2837 } else { |
| 2866 DCHECK(dest == OLD_DATA_SPACE || dest == NEW_SPACE); | 2838 DCHECK(dest == NEW_SPACE); |
| 2867 heap()->MoveBlock(dst_addr, src_addr, size); | 2839 heap()->MoveBlock(dst_addr, src_addr, size); |
| 2868 } | 2840 } |
| 2869 heap()->OnMoveEvent(dst, src, size); | 2841 heap()->OnMoveEvent(dst, src, size); |
| 2870 Memory::Address_at(src_addr) = dst_addr; | 2842 Memory::Address_at(src_addr) = dst_addr; |
| 2871 } | 2843 } |
| 2872 | 2844 |
| 2873 | 2845 |
| 2874 // Visitor for updating pointers from live objects in old spaces to new space. | 2846 // Visitor for updating pointers from live objects in old spaces to new space. |
| 2875 // It does not expect to encounter pointers to dead objects. | 2847 // It does not expect to encounter pointers to dead objects. |
| 2876 class PointersUpdatingVisitor : public ObjectVisitor { | 2848 class PointersUpdatingVisitor : public ObjectVisitor { |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2978 data[index++] = reinterpret_cast<uintptr_t>(slot); | 2950 data[index++] = reinterpret_cast<uintptr_t>(slot); |
| 2979 data[index++] = 0x15aaaaaaaaUL; | 2951 data[index++] = 0x15aaaaaaaaUL; |
| 2980 | 2952 |
| 2981 Address slot_address = reinterpret_cast<Address>(slot); | 2953 Address slot_address = reinterpret_cast<Address>(slot); |
| 2982 | 2954 |
| 2983 uintptr_t space_owner_id = 0xb001; | 2955 uintptr_t space_owner_id = 0xb001; |
| 2984 if (heap->new_space()->ToSpaceContains(slot_address)) { | 2956 if (heap->new_space()->ToSpaceContains(slot_address)) { |
| 2985 space_owner_id = 1; | 2957 space_owner_id = 1; |
| 2986 } else if (heap->new_space()->FromSpaceContains(slot_address)) { | 2958 } else if (heap->new_space()->FromSpaceContains(slot_address)) { |
| 2987 space_owner_id = 2; | 2959 space_owner_id = 2; |
| 2988 } else if (heap->old_pointer_space()->ContainsSafe(slot_address)) { | 2960 } else if (heap->old_space()->ContainsSafe(slot_address)) { |
| 2989 space_owner_id = 3; | 2961 space_owner_id = 3; |
| 2990 } else if (heap->old_data_space()->ContainsSafe(slot_address)) { | 2962 } else if (heap->code_space()->ContainsSafe(slot_address)) { |
| 2991 space_owner_id = 4; | 2963 space_owner_id = 4; |
| 2992 } else if (heap->code_space()->ContainsSafe(slot_address)) { | 2964 } else if (heap->map_space()->ContainsSafe(slot_address)) { |
| 2993 space_owner_id = 5; | 2965 space_owner_id = 5; |
| 2994 } else if (heap->map_space()->ContainsSafe(slot_address)) { | 2966 } else if (heap->cell_space()->ContainsSafe(slot_address)) { |
| 2995 space_owner_id = 6; | 2967 space_owner_id = 6; |
| 2996 } else if (heap->cell_space()->ContainsSafe(slot_address)) { | |
| 2997 space_owner_id = 7; | |
| 2998 } else { | 2968 } else { |
| 2999 // Lo space or other. | 2969 // Lo space or other. |
| 3000 space_owner_id = 8; | 2970 space_owner_id = 7; |
| 3001 } | 2971 } |
| 3002 data[index++] = space_owner_id; | 2972 data[index++] = space_owner_id; |
| 3003 data[index++] = 0x20aaaaaaaaUL; | 2973 data[index++] = 0x20aaaaaaaaUL; |
| 3004 | 2974 |
| 3005 // Find map word lying near before the slot address (usually the map word is | 2975 // Find map word lying near before the slot address (usually the map word is |
| 3006 // at -3 words from the slot but just in case we look up further. | 2976 // at -3 words from the slot but just in case we look up further. |
| 3007 Object** map_slot = slot; | 2977 Object** map_slot = slot; |
| 3008 bool found = false; | 2978 bool found = false; |
| 3009 const int kMaxDistanceToMap = 64; | 2979 const int kMaxDistanceToMap = 64; |
| 3010 for (int i = 0; i < kMaxDistanceToMap; i++, map_slot--) { | 2980 for (int i = 0; i < kMaxDistanceToMap; i++, map_slot--) { |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3081 } | 3051 } |
| 3082 | 3052 |
| 3083 return String::cast(*p); | 3053 return String::cast(*p); |
| 3084 } | 3054 } |
| 3085 | 3055 |
| 3086 | 3056 |
| 3087 bool MarkCompactCollector::TryPromoteObject(HeapObject* object, | 3057 bool MarkCompactCollector::TryPromoteObject(HeapObject* object, |
| 3088 int object_size) { | 3058 int object_size) { |
| 3089 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); | 3059 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); |
| 3090 | 3060 |
| 3091 OldSpace* target_space = heap()->TargetSpace(object); | 3061 OldSpace* old_space = heap()->old_space(); |
| 3092 | 3062 |
| 3093 DCHECK(target_space == heap()->old_pointer_space() || | |
| 3094 target_space == heap()->old_data_space()); | |
| 3095 HeapObject* target; | 3063 HeapObject* target; |
| 3096 AllocationResult allocation = target_space->AllocateRaw(object_size); | 3064 AllocationResult allocation = old_space->AllocateRaw(object_size); |
| 3097 if (allocation.To(&target)) { | 3065 if (allocation.To(&target)) { |
| 3098 MigrateObject(target, object, object_size, target_space->identity()); | 3066 MigrateObject(target, object, object_size, old_space->identity()); |
| 3099 heap()->IncrementPromotedObjectsSize(object_size); | 3067 heap()->IncrementPromotedObjectsSize(object_size); |
| 3100 return true; | 3068 return true; |
| 3101 } | 3069 } |
| 3102 | 3070 |
| 3103 return false; | 3071 return false; |
| 3104 } | 3072 } |
| 3105 | 3073 |
| 3106 | 3074 |
| 3107 bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot, | 3075 bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot, |
| 3108 HeapObject** out_object) { | 3076 HeapObject** out_object) { |
| (...skipping 490 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3599 | 3567 |
| 3600 return true; | 3568 return true; |
| 3601 } | 3569 } |
| 3602 | 3570 |
| 3603 | 3571 |
| 3604 static bool IsOnInvalidatedCodeObject(Address addr) { | 3572 static bool IsOnInvalidatedCodeObject(Address addr) { |
| 3605 // We did not record any slots in large objects thus | 3573 // We did not record any slots in large objects thus |
| 3606 // we can safely go to the page from the slot address. | 3574 // we can safely go to the page from the slot address. |
| 3607 Page* p = Page::FromAddress(addr); | 3575 Page* p = Page::FromAddress(addr); |
| 3608 | 3576 |
| 3609 // First check owner's identity because old pointer and old data spaces | 3577 // First check owner's identity because old space is swept concurrently or |
| 3610 // are swept lazily and might still have non-zero mark-bits on some | 3578 // lazily and might still have non-zero mark-bits on some pages. |
| 3611 // pages. | |
| 3612 if (p->owner()->identity() != CODE_SPACE) return false; | 3579 if (p->owner()->identity() != CODE_SPACE) return false; |
| 3613 | 3580 |
| 3614 // In code space only bits on evacuation candidates (but we don't record | 3581 // In code space only bits on evacuation candidates (but we don't record |
| 3615 // any slots on them) and under invalidated code objects are non-zero. | 3582 // any slots on them) and under invalidated code objects are non-zero. |
| 3616 MarkBit mark_bit = | 3583 MarkBit mark_bit = |
| 3617 p->markbits()->MarkBitFromIndex(Page::FastAddressToMarkbitIndex(addr)); | 3584 p->markbits()->MarkBitFromIndex(Page::FastAddressToMarkbitIndex(addr)); |
| 3618 | 3585 |
| 3619 return mark_bit.Get(); | 3586 return mark_bit.Get(); |
| 3620 } | 3587 } |
| 3621 | 3588 |
| (...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3780 if (list != NULL) list->Clear(); | 3747 if (list != NULL) list->Clear(); |
| 3781 } else { | 3748 } else { |
| 3782 if (FLAG_gc_verbose) { | 3749 if (FLAG_gc_verbose) { |
| 3783 PrintF("Sweeping 0x%" V8PRIxPTR " during evacuation.\n", | 3750 PrintF("Sweeping 0x%" V8PRIxPTR " during evacuation.\n", |
| 3784 reinterpret_cast<intptr_t>(p)); | 3751 reinterpret_cast<intptr_t>(p)); |
| 3785 } | 3752 } |
| 3786 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 3753 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); |
| 3787 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); | 3754 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); |
| 3788 | 3755 |
| 3789 switch (space->identity()) { | 3756 switch (space->identity()) { |
| 3790 case OLD_DATA_SPACE: | 3757 case OLD_SPACE: |
| 3791 Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, | 3758 Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, |
| 3792 IGNORE_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p, | 3759 IGNORE_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p, |
| 3793 &updating_visitor); | 3760 &updating_visitor); |
| 3794 break; | |
| 3795 case OLD_POINTER_SPACE: | |
| 3796 Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, | |
| 3797 IGNORE_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p, | |
| 3798 &updating_visitor); | |
| 3799 break; | 3761 break; |
| 3800 case CODE_SPACE: | 3762 case CODE_SPACE: |
| 3801 if (FLAG_zap_code_space) { | 3763 if (FLAG_zap_code_space) { |
| 3802 Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, | 3764 Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, |
| 3803 REBUILD_SKIP_LIST, ZAP_FREE_SPACE>(space, NULL, p, | 3765 REBUILD_SKIP_LIST, ZAP_FREE_SPACE>(space, NULL, p, |
| 3804 &updating_visitor); | 3766 &updating_visitor); |
| 3805 } else { | 3767 } else { |
| 3806 Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, | 3768 Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, |
| 3807 REBUILD_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p, | 3769 REBUILD_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p, |
| 3808 &updating_visitor); | 3770 &updating_visitor); |
| (...skipping 487 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4296 max_freed_overall = Max(max_freed, max_freed_overall); | 4258 max_freed_overall = Max(max_freed, max_freed_overall); |
| 4297 if (p == space->end_of_unswept_pages()) break; | 4259 if (p == space->end_of_unswept_pages()) break; |
| 4298 } | 4260 } |
| 4299 return max_freed_overall; | 4261 return max_freed_overall; |
| 4300 } | 4262 } |
| 4301 | 4263 |
| 4302 | 4264 |
| 4303 int MarkCompactCollector::SweepInParallel(Page* page, PagedSpace* space) { | 4265 int MarkCompactCollector::SweepInParallel(Page* page, PagedSpace* space) { |
| 4304 int max_freed = 0; | 4266 int max_freed = 0; |
| 4305 if (page->TryParallelSweeping()) { | 4267 if (page->TryParallelSweeping()) { |
| 4306 FreeList* free_list = space == heap()->old_pointer_space() | 4268 FreeList* free_list = free_list_old_space_.get(); |
| 4307 ? free_list_old_pointer_space_.get() | |
| 4308 : free_list_old_data_space_.get(); | |
| 4309 FreeList private_free_list(space); | 4269 FreeList private_free_list(space); |
| 4310 max_freed = Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST, | 4270 max_freed = Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST, |
| 4311 IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL); | 4271 IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL); |
| 4312 free_list->Concatenate(&private_free_list); | 4272 free_list->Concatenate(&private_free_list); |
| 4313 } | 4273 } |
| 4314 return max_freed; | 4274 return max_freed; |
| 4315 } | 4275 } |
| 4316 | 4276 |
| 4317 | 4277 |
| 4318 void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { | 4278 void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4423 MoveEvacuationCandidatesToEndOfPagesList(); | 4383 MoveEvacuationCandidatesToEndOfPagesList(); |
| 4424 | 4384 |
| 4425 // Noncompacting collections simply sweep the spaces to clear the mark | 4385 // Noncompacting collections simply sweep the spaces to clear the mark |
| 4426 // bits and free the nonlive blocks (for old and map spaces). We sweep | 4386 // bits and free the nonlive blocks (for old and map spaces). We sweep |
| 4427 // the map space last because freeing non-live maps overwrites them and | 4387 // the map space last because freeing non-live maps overwrites them and |
| 4428 // the other spaces rely on possibly non-live maps to get the sizes for | 4388 // the other spaces rely on possibly non-live maps to get the sizes for |
| 4429 // non-live objects. | 4389 // non-live objects. |
| 4430 { | 4390 { |
| 4431 GCTracer::Scope sweep_scope(heap()->tracer(), | 4391 GCTracer::Scope sweep_scope(heap()->tracer(), |
| 4432 GCTracer::Scope::MC_SWEEP_OLDSPACE); | 4392 GCTracer::Scope::MC_SWEEP_OLDSPACE); |
| 4433 { | 4393 { SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); } |
| 4434 SweepSpace(heap()->old_pointer_space(), CONCURRENT_SWEEPING); | |
| 4435 SweepSpace(heap()->old_data_space(), CONCURRENT_SWEEPING); | |
| 4436 } | |
| 4437 sweeping_in_progress_ = true; | 4394 sweeping_in_progress_ = true; |
| 4438 if (heap()->concurrent_sweeping_enabled()) { | 4395 if (heap()->concurrent_sweeping_enabled()) { |
| 4439 StartSweeperThreads(); | 4396 StartSweeperThreads(); |
| 4440 } | 4397 } |
| 4441 } | 4398 } |
| 4442 RemoveDeadInvalidatedCode(); | 4399 RemoveDeadInvalidatedCode(); |
| 4443 | 4400 |
| 4444 { | 4401 { |
| 4445 GCTracer::Scope sweep_scope(heap()->tracer(), | 4402 GCTracer::Scope sweep_scope(heap()->tracer(), |
| 4446 GCTracer::Scope::MC_SWEEP_CODE); | 4403 GCTracer::Scope::MC_SWEEP_CODE); |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4494 if (p->parallel_sweeping() == MemoryChunk::SWEEPING_FINALIZE) { | 4451 if (p->parallel_sweeping() == MemoryChunk::SWEEPING_FINALIZE) { |
| 4495 p->set_parallel_sweeping(MemoryChunk::SWEEPING_DONE); | 4452 p->set_parallel_sweeping(MemoryChunk::SWEEPING_DONE); |
| 4496 p->SetWasSwept(); | 4453 p->SetWasSwept(); |
| 4497 } | 4454 } |
| 4498 DCHECK(p->parallel_sweeping() == MemoryChunk::SWEEPING_DONE); | 4455 DCHECK(p->parallel_sweeping() == MemoryChunk::SWEEPING_DONE); |
| 4499 } | 4456 } |
| 4500 } | 4457 } |
| 4501 | 4458 |
| 4502 | 4459 |
| 4503 void MarkCompactCollector::ParallelSweepSpacesComplete() { | 4460 void MarkCompactCollector::ParallelSweepSpacesComplete() { |
| 4504 ParallelSweepSpaceComplete(heap()->old_pointer_space()); | 4461 ParallelSweepSpaceComplete(heap()->old_space()); |
| 4505 ParallelSweepSpaceComplete(heap()->old_data_space()); | |
| 4506 } | 4462 } |
| 4507 | 4463 |
| 4508 | 4464 |
| 4509 void MarkCompactCollector::EnableCodeFlushing(bool enable) { | 4465 void MarkCompactCollector::EnableCodeFlushing(bool enable) { |
| 4510 if (isolate()->debug()->is_loaded() || | 4466 if (isolate()->debug()->is_loaded() || |
| 4511 isolate()->debug()->has_break_points()) { | 4467 isolate()->debug()->has_break_points()) { |
| 4512 enable = false; | 4468 enable = false; |
| 4513 } | 4469 } |
| 4514 | 4470 |
| 4515 if (enable) { | 4471 if (enable) { |
| (...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4684 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kSlotsBufferOverflow); | 4640 isolate()->CountUsage(v8::Isolate::UseCounterFeature::kSlotsBufferOverflow); |
| 4685 | 4641 |
| 4686 // TODO(gc) If all evacuation candidates are too popular we | 4642 // TODO(gc) If all evacuation candidates are too popular we |
| 4687 // should stop slots recording entirely. | 4643 // should stop slots recording entirely. |
| 4688 page->ClearEvacuationCandidate(); | 4644 page->ClearEvacuationCandidate(); |
| 4689 | 4645 |
| 4690 // We were not collecting slots on this page that point | 4646 // We were not collecting slots on this page that point |
| 4691 // to other evacuation candidates thus we have to | 4647 // to other evacuation candidates thus we have to |
| 4692 // rescan the page after evacuation to discover and update all | 4648 // rescan the page after evacuation to discover and update all |
| 4693 // pointers to evacuated objects. | 4649 // pointers to evacuated objects. |
| 4694 if (page->owner()->identity() == OLD_DATA_SPACE) { | 4650 page->SetFlag(Page::RESCAN_ON_EVACUATION); |
| 4695 evacuation_candidates_.RemoveElement(page); | |
| 4696 } else { | |
| 4697 page->SetFlag(Page::RESCAN_ON_EVACUATION); | |
| 4698 } | |
| 4699 } | 4651 } |
| 4700 | 4652 |
| 4701 | 4653 |
| 4702 void MarkCompactCollector::RecordCodeEntrySlot(Address slot, Code* target) { | 4654 void MarkCompactCollector::RecordCodeEntrySlot(Address slot, Code* target) { |
| 4703 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); | 4655 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); |
| 4704 if (target_page->IsEvacuationCandidate() && | 4656 if (target_page->IsEvacuationCandidate() && |
| 4705 !ShouldSkipEvacuationSlotRecording(reinterpret_cast<Object**>(slot))) { | 4657 !ShouldSkipEvacuationSlotRecording(reinterpret_cast<Object**>(slot))) { |
| 4706 if (!SlotsBuffer::AddTo(&slots_buffer_allocator_, | 4658 if (!SlotsBuffer::AddTo(&slots_buffer_allocator_, |
| 4707 target_page->slots_buffer_address(), | 4659 target_page->slots_buffer_address(), |
| 4708 SlotsBuffer::CODE_ENTRY_SLOT, slot, | 4660 SlotsBuffer::CODE_ENTRY_SLOT, slot, |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4787 SlotsBuffer* buffer = *buffer_address; | 4739 SlotsBuffer* buffer = *buffer_address; |
| 4788 while (buffer != NULL) { | 4740 while (buffer != NULL) { |
| 4789 SlotsBuffer* next_buffer = buffer->next(); | 4741 SlotsBuffer* next_buffer = buffer->next(); |
| 4790 DeallocateBuffer(buffer); | 4742 DeallocateBuffer(buffer); |
| 4791 buffer = next_buffer; | 4743 buffer = next_buffer; |
| 4792 } | 4744 } |
| 4793 *buffer_address = NULL; | 4745 *buffer_address = NULL; |
| 4794 } | 4746 } |
| 4795 } | 4747 } |
| 4796 } // namespace v8::internal | 4748 } // namespace v8::internal |
| OLD | NEW |