| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "store-buffer.h" | 5 #include "store-buffer.h" |
| 6 | 6 |
| 7 #include <algorithm> | 7 #include <algorithm> |
| 8 | 8 |
| 9 #include "v8.h" | 9 #include "v8.h" |
| 10 #include "counters.h" | 10 #include "counters.h" |
| (...skipping 314 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 325 } | 325 } |
| 326 | 326 |
| 327 | 327 |
| 328 void StoreBuffer::GCPrologue() { | 328 void StoreBuffer::GCPrologue() { |
| 329 ClearFilteringHashSets(); | 329 ClearFilteringHashSets(); |
| 330 during_gc_ = true; | 330 during_gc_ = true; |
| 331 } | 331 } |
| 332 | 332 |
| 333 | 333 |
| 334 #ifdef VERIFY_HEAP | 334 #ifdef VERIFY_HEAP |
| 335 static void DummyScavengePointer(HeapObject** p, HeapObject* o) { | |
| 336 // Do nothing. | |
| 337 } | |
| 338 | |
| 339 | |
| 340 void StoreBuffer::VerifyPointers(PagedSpace* space, | |
| 341 RegionCallback region_callback) { | |
| 342 PageIterator it(space); | |
| 343 | |
| 344 while (it.has_next()) { | |
| 345 Page* page = it.next(); | |
| 346 FindPointersToNewSpaceOnPage( | |
| 347 reinterpret_cast<PagedSpace*>(page->owner()), | |
| 348 page, | |
| 349 region_callback, | |
| 350 &DummyScavengePointer, | |
| 351 false); | |
| 352 } | |
| 353 } | |
| 354 | |
| 355 | |
| 356 void StoreBuffer::VerifyPointers(LargeObjectSpace* space) { | 335 void StoreBuffer::VerifyPointers(LargeObjectSpace* space) { |
| 357 LargeObjectIterator it(space); | 336 LargeObjectIterator it(space); |
| 358 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | 337 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { |
| 359 if (object->IsFixedArray()) { | 338 if (object->IsFixedArray()) { |
| 360 Address slot_address = object->address(); | 339 Address slot_address = object->address(); |
| 361 Address end = object->address() + object->Size(); | 340 Address end = object->address() + object->Size(); |
| 362 | 341 |
| 363 while (slot_address < end) { | 342 while (slot_address < end) { |
| 364 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address); | 343 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address); |
| 365 // When we are not in GC the Heap::InNewSpace() predicate | 344 // When we are not in GC the Heap::InNewSpace() predicate |
| 366 // checks that pointers which satisfy predicate point into | 345 // checks that pointers which satisfy predicate point into |
| 367 // the active semispace. | 346 // the active semispace. |
| 368 Object* object = reinterpret_cast<Object*>( | 347 Object* object = reinterpret_cast<Object*>( |
| 369 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); | 348 NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot))); |
| 370 heap_->InNewSpace(object); | 349 heap_->InNewSpace(object); |
| 371 slot_address += kPointerSize; | 350 slot_address += kPointerSize; |
| 372 } | 351 } |
| 373 } | 352 } |
| 374 } | 353 } |
| 375 } | 354 } |
| 376 #endif | 355 #endif |
| 377 | 356 |
| 378 | 357 |
| 379 void StoreBuffer::Verify() { | 358 void StoreBuffer::Verify() { |
| 380 #ifdef VERIFY_HEAP | 359 #ifdef VERIFY_HEAP |
| 381 VerifyPointers(heap_->old_pointer_space(), | |
| 382 &StoreBuffer::FindPointersToNewSpaceInRegion); | |
| 383 VerifyPointers(heap_->map_space(), | |
| 384 &StoreBuffer::FindPointersToNewSpaceInMapsRegion); | |
| 385 VerifyPointers(heap_->lo_space()); | 360 VerifyPointers(heap_->lo_space()); |
| 386 #endif | 361 #endif |
| 387 } | 362 } |
| 388 | 363 |
| 389 | 364 |
| 390 void StoreBuffer::GCEpilogue() { | 365 void StoreBuffer::GCEpilogue() { |
| 391 during_gc_ = false; | 366 during_gc_ = false; |
| 392 #ifdef VERIFY_HEAP | 367 #ifdef VERIFY_HEAP |
| 393 if (FLAG_verify_heap) { | 368 if (FLAG_verify_heap) { |
| 394 Verify(); | 369 Verify(); |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 475 ASSERT(map_aligned_start == start); | 450 ASSERT(map_aligned_start == start); |
| 476 ASSERT(map_aligned_end == end); | 451 ASSERT(map_aligned_end == end); |
| 477 | 452 |
| 478 FindPointersToNewSpaceInMaps(map_aligned_start, | 453 FindPointersToNewSpaceInMaps(map_aligned_start, |
| 479 map_aligned_end, | 454 map_aligned_end, |
| 480 slot_callback, | 455 slot_callback, |
| 481 clear_maps); | 456 clear_maps); |
| 482 } | 457 } |
| 483 | 458 |
| 484 | 459 |
| 485 // This function iterates over all the pointers in a paged space in the heap, | |
| 486 // looking for pointers into new space. Within the pages there may be dead | |
| 487 // objects that have not been overwritten by free spaces or fillers because of | |
| 488 // concurrent sweeping. These dead objects may not contain pointers to new | |
| 489 // space. The garbage areas that have been swept properly (these will normally | |
| 490 // be the large ones) will be marked with free space and filler map words. In | |
| 491 // addition any area that has never been used at all for object allocation must | |
| 492 // be marked with a free space or filler. Because the free space and filler | |
| 493 // maps do not move we can always recognize these even after a compaction. | |
| 494 // Normal objects like FixedArrays and JSObjects should not contain references | |
| 495 // to these maps. Constant pool array objects may contain references to these | |
| 496 // maps, however, constant pool arrays cannot contain pointers to new space | |
| 497 // objects, therefore they are skipped. The special garbage section (see | |
| 498 // comment in spaces.h) is skipped since it can contain absolutely anything. | |
| 499 // Any objects that are allocated during iteration may or may not be visited by | |
| 500 // the iteration, but they will not be partially visited. | |
| 501 void StoreBuffer::FindPointersToNewSpaceOnPage( | |
| 502 PagedSpace* space, | |
| 503 Page* page, | |
| 504 RegionCallback region_callback, | |
| 505 ObjectSlotCallback slot_callback, | |
| 506 bool clear_maps) { | |
| 507 Address visitable_start = page->area_start(); | |
| 508 Address end_of_page = page->area_end(); | |
| 509 | |
| 510 Address visitable_end = visitable_start; | |
| 511 | |
| 512 Object* free_space_map = heap_->free_space_map(); | |
| 513 Object* two_pointer_filler_map = heap_->two_pointer_filler_map(); | |
| 514 Object* constant_pool_array_map = heap_->constant_pool_array_map(); | |
| 515 | |
| 516 while (visitable_end < end_of_page) { | |
| 517 // The sweeper thread concurrently may write free space maps and size to | |
| 518 // this page. We need acquire load here to make sure that we get a | |
| 519 // consistent view of maps and their sizes. | |
| 520 Object* o = reinterpret_cast<Object*>( | |
| 521 Acquire_Load(reinterpret_cast<AtomicWord*>(visitable_end))); | |
| 522 // Skip fillers or constant pool arrays (which never contain new-space | |
| 523 // pointers but can contain pointers which can be confused for fillers) | |
| 524 // but not things that look like fillers in the special garbage section | |
| 525 // which can contain anything. | |
| 526 if (o == free_space_map || | |
| 527 o == two_pointer_filler_map || | |
| 528 o == constant_pool_array_map || | |
| 529 (visitable_end == space->top() && visitable_end != space->limit())) { | |
| 530 if (visitable_start != visitable_end) { | |
| 531 // After calling this the special garbage section may have moved. | |
| 532 (this->*region_callback)(visitable_start, | |
| 533 visitable_end, | |
| 534 slot_callback, | |
| 535 clear_maps); | |
| 536 if (visitable_end >= space->top() && visitable_end < space->limit()) { | |
| 537 visitable_end = space->limit(); | |
| 538 visitable_start = visitable_end; | |
| 539 continue; | |
| 540 } | |
| 541 } | |
| 542 if (visitable_end == space->top() && visitable_end != space->limit()) { | |
| 543 visitable_start = visitable_end = space->limit(); | |
| 544 } else { | |
| 545 // At this point we are either at the start of a filler, a | |
| 546 // constant pool array, or we are at the point where the space->top() | |
| 547 // used to be before the visit_pointer_region call above. Either way we | |
| 548 // can skip the object at the current spot: We don't promise to visit | |
| 549 // objects allocated during heap traversal, and if space->top() moved | |
| 550 // then it must be because an object was allocated at this point. | |
| 551 visitable_start = | |
| 552 visitable_end + HeapObject::FromAddress(visitable_end)->Size(); | |
| 553 visitable_end = visitable_start; | |
| 554 } | |
| 555 } else { | |
| 556 ASSERT(o != free_space_map); | |
| 557 ASSERT(o != two_pointer_filler_map); | |
| 558 ASSERT(o != constant_pool_array_map); | |
| 559 ASSERT(visitable_end < space->top() || visitable_end >= space->limit()); | |
| 560 visitable_end += kPointerSize; | |
| 561 } | |
| 562 } | |
| 563 ASSERT(visitable_end == end_of_page); | |
| 564 if (visitable_start != visitable_end) { | |
| 565 (this->*region_callback)(visitable_start, | |
| 566 visitable_end, | |
| 567 slot_callback, | |
| 568 clear_maps); | |
| 569 } | |
| 570 } | |
| 571 | |
| 572 | |
| 573 void StoreBuffer::IteratePointersInStoreBuffer( | 460 void StoreBuffer::IteratePointersInStoreBuffer( |
| 574 ObjectSlotCallback slot_callback, | 461 ObjectSlotCallback slot_callback, |
| 575 bool clear_maps) { | 462 bool clear_maps) { |
| 576 Address* limit = old_top_; | 463 Address* limit = old_top_; |
| 577 old_top_ = old_start_; | 464 old_top_ = old_start_; |
| 578 { | 465 { |
| 579 DontMoveStoreBufferEntriesScope scope(this); | 466 DontMoveStoreBufferEntriesScope scope(this); |
| 580 for (Address* current = old_start_; current < limit; current++) { | 467 for (Address* current = old_start_; current < limit; current++) { |
| 581 #ifdef DEBUG | 468 #ifdef DEBUG |
| 582 Address* saved_top = old_top_; | 469 Address* saved_top = old_top_; |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 649 if (chunk->owner() == heap_->lo_space()) { | 536 if (chunk->owner() == heap_->lo_space()) { |
| 650 LargePage* large_page = reinterpret_cast<LargePage*>(chunk); | 537 LargePage* large_page = reinterpret_cast<LargePage*>(chunk); |
| 651 HeapObject* array = large_page->GetObject(); | 538 HeapObject* array = large_page->GetObject(); |
| 652 ASSERT(array->IsFixedArray()); | 539 ASSERT(array->IsFixedArray()); |
| 653 Address start = array->address(); | 540 Address start = array->address(); |
| 654 Address end = start + array->Size(); | 541 Address end = start + array->Size(); |
| 655 FindPointersToNewSpaceInRegion(start, end, slot_callback, clear_maps); | 542 FindPointersToNewSpaceInRegion(start, end, slot_callback, clear_maps); |
| 656 } else { | 543 } else { |
| 657 Page* page = reinterpret_cast<Page*>(chunk); | 544 Page* page = reinterpret_cast<Page*>(chunk); |
| 658 PagedSpace* owner = reinterpret_cast<PagedSpace*>(page->owner()); | 545 PagedSpace* owner = reinterpret_cast<PagedSpace*>(page->owner()); |
| 659 FindPointersToNewSpaceOnPage( | 546 Address start = page->area_start(); |
| 660 owner, | 547 Address end = page->area_end(); |
| 661 page, | 548 if (owner == heap_->map_space()) { |
| 662 (owner == heap_->map_space() ? | 549 FindPointersToNewSpaceInMapsRegion( |
| 663 &StoreBuffer::FindPointersToNewSpaceInMapsRegion : | 550 start, end, slot_callback, clear_maps); |
| 664 &StoreBuffer::FindPointersToNewSpaceInRegion), | 551 } else { |
| 665 slot_callback, | 552 FindPointersToNewSpaceInRegion( |
| 666 clear_maps); | 553 start, end, slot_callback, clear_maps); |
| 554 } |
| 667 } | 555 } |
| 668 } | 556 } |
| 669 } | 557 } |
| 670 if (callback_ != NULL) { | 558 if (callback_ != NULL) { |
| 671 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); | 559 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); |
| 672 } | 560 } |
| 673 } | 561 } |
| 674 } | 562 } |
| 675 | 563 |
| 676 | 564 |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 721 } | 609 } |
| 722 old_buffer_is_sorted_ = false; | 610 old_buffer_is_sorted_ = false; |
| 723 old_buffer_is_filtered_ = false; | 611 old_buffer_is_filtered_ = false; |
| 724 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); | 612 *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2); |
| 725 ASSERT(old_top_ <= old_limit_); | 613 ASSERT(old_top_ <= old_limit_); |
| 726 } | 614 } |
| 727 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); | 615 heap_->isolate()->counters()->store_buffer_compactions()->Increment(); |
| 728 } | 616 } |
| 729 | 617 |
| 730 } } // namespace v8::internal | 618 } } // namespace v8::internal |
| OLD | NEW |