| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/store-buffer.h" | 5 #include "src/heap/store-buffer.h" |
| 6 | 6 |
| 7 #include <algorithm> | 7 #include <algorithm> |
| 8 | 8 |
| 9 #include "src/counters.h" | 9 #include "src/counters.h" |
| 10 #include "src/heap/incremental-marking.h" | 10 #include "src/heap/incremental-marking.h" |
| (...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 405 Object** slot = reinterpret_cast<Object**>(*current); | 405 Object** slot = reinterpret_cast<Object**>(*current); |
| 406 Object* object = *slot; | 406 Object* object = *slot; |
| 407 CHECK(object->IsHeapObject()); | 407 CHECK(object->IsHeapObject()); |
| 408 CHECK(heap_->InNewSpace(object)); | 408 CHECK(heap_->InNewSpace(object)); |
| 409 heap_->mark_compact_collector()->VerifyIsSlotInLiveObject( | 409 heap_->mark_compact_collector()->VerifyIsSlotInLiveObject( |
| 410 reinterpret_cast<Address>(slot), HeapObject::cast(object)); | 410 reinterpret_cast<Address>(slot), HeapObject::cast(object)); |
| 411 } | 411 } |
| 412 } | 412 } |
| 413 | 413 |
| 414 | 414 |
| 415 class FindPointersToNewSpaceVisitor final : public ObjectVisitor { |
| 416 public: |
| 417 FindPointersToNewSpaceVisitor(StoreBuffer* store_buffer, |
| 418 ObjectSlotCallback callback) |
| 419 : store_buffer_(store_buffer), callback_(callback) {} |
| 420 |
| 421 V8_INLINE void VisitPointers(Object** start, Object** end) override { |
| 422 store_buffer_->FindPointersToNewSpaceInRegion( |
| 423 reinterpret_cast<Address>(start), reinterpret_cast<Address>(end), |
| 424 callback_); |
| 425 } |
| 426 |
| 427 V8_INLINE void VisitCodeEntry(Address code_entry_slot) override {} |
| 428 |
| 429 private: |
| 430 StoreBuffer* store_buffer_; |
| 431 ObjectSlotCallback callback_; |
| 432 }; |
| 433 |
| 434 |
| 415 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) { | 435 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) { |
| 416 // We do not sort or remove duplicated entries from the store buffer because | 436 // We do not sort or remove duplicated entries from the store buffer because |
| 417 // we expect that callback will rebuild the store buffer thus removing | 437 // we expect that callback will rebuild the store buffer thus removing |
| 418 // all duplicates and pointers to old space. | 438 // all duplicates and pointers to old space. |
| 419 bool some_pages_to_scan = PrepareForIteration(); | 439 bool some_pages_to_scan = PrepareForIteration(); |
| 420 | 440 |
| 421 // TODO(gc): we want to skip slots on evacuation candidates | 441 // TODO(gc): we want to skip slots on evacuation candidates |
| 422 // but we can't simply figure that out from slot address | 442 // but we can't simply figure that out from slot address |
| 423 // because slot can belong to a large object. | 443 // because slot can belong to a large object. |
| 424 IteratePointersInStoreBuffer(slot_callback); | 444 IteratePointersInStoreBuffer(slot_callback); |
| 425 | 445 |
| 426 // We are done scanning all the pointers that were in the store buffer, but | 446 // We are done scanning all the pointers that were in the store buffer, but |
| 427 // there may be some pages marked scan_on_scavenge that have pointers to new | 447 // there may be some pages marked scan_on_scavenge that have pointers to new |
| 428 // space that are not in the store buffer. We must scan them now. As we | 448 // space that are not in the store buffer. We must scan them now. As we |
| 429 // scan, the surviving pointers to new space will be added to the store | 449 // scan, the surviving pointers to new space will be added to the store |
| 430 // buffer. If there are still a lot of pointers to new space then we will | 450 // buffer. If there are still a lot of pointers to new space then we will |
| 431 // keep the scan_on_scavenge flag on the page and discard the pointers that | 451 // keep the scan_on_scavenge flag on the page and discard the pointers that |
| 432 // were added to the store buffer. If there are not many pointers to new | 452 // were added to the store buffer. If there are not many pointers to new |
| 433 // space left on the page we will keep the pointers in the store buffer and | 453 // space left on the page we will keep the pointers in the store buffer and |
| 434 // remove the flag from the page. | 454 // remove the flag from the page. |
| 435 if (some_pages_to_scan) { | 455 if (some_pages_to_scan) { |
| 436 if (callback_ != NULL) { | 456 if (callback_ != NULL) { |
| 437 (*callback_)(heap_, NULL, kStoreBufferStartScanningPagesEvent); | 457 (*callback_)(heap_, NULL, kStoreBufferStartScanningPagesEvent); |
| 438 } | 458 } |
| 439 PointerChunkIterator it(heap_); | 459 PointerChunkIterator it(heap_); |
| 440 MemoryChunk* chunk; | 460 MemoryChunk* chunk; |
| 461 FindPointersToNewSpaceVisitor visitor(this, slot_callback); |
| 441 while ((chunk = it.next()) != NULL) { | 462 while ((chunk = it.next()) != NULL) { |
| 442 if (chunk->scan_on_scavenge()) { | 463 if (chunk->scan_on_scavenge()) { |
| 443 chunk->set_scan_on_scavenge(false); | 464 chunk->set_scan_on_scavenge(false); |
| 444 if (callback_ != NULL) { | 465 if (callback_ != NULL) { |
| 445 (*callback_)(heap_, chunk, kStoreBufferScanningPageEvent); | 466 (*callback_)(heap_, chunk, kStoreBufferScanningPageEvent); |
| 446 } | 467 } |
| 447 if (chunk->owner() == heap_->lo_space()) { | 468 if (chunk->owner() == heap_->lo_space()) { |
| 448 LargePage* large_page = reinterpret_cast<LargePage*>(chunk); | 469 LargePage* large_page = reinterpret_cast<LargePage*>(chunk); |
| 449 HeapObject* array = large_page->GetObject(); | 470 HeapObject* array = large_page->GetObject(); |
| 450 DCHECK(array->IsFixedArray()); | 471 DCHECK(array->IsFixedArray()); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 468 slot_callback); | 489 slot_callback); |
| 469 } | 490 } |
| 470 } | 491 } |
| 471 } else { | 492 } else { |
| 472 heap_->mark_compact_collector()->SweepOrWaitUntilSweepingCompleted( | 493 heap_->mark_compact_collector()->SweepOrWaitUntilSweepingCompleted( |
| 473 page); | 494 page); |
| 474 HeapObjectIterator iterator(page); | 495 HeapObjectIterator iterator(page); |
| 475 for (HeapObject* heap_object = iterator.Next(); heap_object != NULL; | 496 for (HeapObject* heap_object = iterator.Next(); heap_object != NULL; |
| 476 heap_object = iterator.Next()) { | 497 heap_object = iterator.Next()) { |
| 477 // We iterate over objects that contain new space pointers only. | 498 // We iterate over objects that contain new space pointers only. |
| 478 Address obj_address = heap_object->address(); | 499 heap_object->IterateBody(&visitor); |
| 479 const int start_offset = HeapObject::kHeaderSize; | |
| 480 const int end_offset = heap_object->Size(); | |
| 481 | |
| 482 switch (heap_object->ContentType()) { | |
| 483 case HeapObjectContents::kTaggedValues: { | |
| 484 Address start_address = obj_address + start_offset; | |
| 485 Address end_address = obj_address + end_offset; | |
| 486 // Object has only tagged fields. | |
| 487 FindPointersToNewSpaceInRegion(start_address, end_address, | |
| 488 slot_callback); | |
| 489 break; | |
| 490 } | |
| 491 | |
| 492 case HeapObjectContents::kMixedValues: { | |
| 493 if (heap_object->IsFixedTypedArrayBase()) { | |
| 494 FindPointersToNewSpaceInRegion( | |
| 495 obj_address + FixedTypedArrayBase::kBasePointerOffset, | |
| 496 obj_address + FixedTypedArrayBase::kHeaderSize, | |
| 497 slot_callback); | |
| 498 } else if (heap_object->IsBytecodeArray()) { | |
| 499 FindPointersToNewSpaceInRegion( | |
| 500 obj_address + BytecodeArray::kConstantPoolOffset, | |
| 501 obj_address + BytecodeArray::kHeaderSize, | |
| 502 slot_callback); | |
| 503 } else if (heap_object->IsJSArrayBuffer()) { | |
| 504 FindPointersToNewSpaceInRegion( | |
| 505 obj_address + JSArrayBuffer::kPropertiesOffset, | |
| 506 obj_address + JSArrayBuffer::kByteLengthOffset + | |
| 507 kPointerSize, | |
| 508 slot_callback); | |
| 509 FindPointersToNewSpaceInRegion( | |
| 510 obj_address + JSArrayBuffer::kSize, | |
| 511 obj_address + JSArrayBuffer::kSizeWithInternalFields, | |
| 512 slot_callback); | |
| 513 } else if (FLAG_unbox_double_fields) { | |
| 514 LayoutDescriptorHelper helper(heap_object->map()); | |
| 515 DCHECK(!helper.all_fields_tagged()); | |
| 516 for (int offset = start_offset; offset < end_offset;) { | |
| 517 int end_of_region_offset; | |
| 518 if (helper.IsTagged(offset, end_offset, | |
| 519 &end_of_region_offset)) { | |
| 520 FindPointersToNewSpaceInRegion( | |
| 521 obj_address + offset, | |
| 522 obj_address + end_of_region_offset, slot_callback); | |
| 523 } | |
| 524 offset = end_of_region_offset; | |
| 525 } | |
| 526 } else { | |
| 527 UNREACHABLE(); | |
| 528 } | |
| 529 break; | |
| 530 } | |
| 531 | |
| 532 case HeapObjectContents::kRawValues: | |
| 533 break; | |
| 534 } | |
| 535 } | 500 } |
| 536 } | 501 } |
| 537 } | 502 } |
| 538 } | 503 } |
| 539 } | 504 } |
| 540 if (callback_ != NULL) { | 505 if (callback_ != NULL) { |
| 541 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); | 506 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); |
| 542 } | 507 } |
| 543 } | 508 } |
| 544 } | 509 } |
| (...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 640 DCHECK(start_of_current_page_ != store_buffer_->Top()); | 605 DCHECK(start_of_current_page_ != store_buffer_->Top()); |
| 641 store_buffer_->SetTop(start_of_current_page_); | 606 store_buffer_->SetTop(start_of_current_page_); |
| 642 } | 607 } |
| 643 } else { | 608 } else { |
| 644 UNREACHABLE(); | 609 UNREACHABLE(); |
| 645 } | 610 } |
| 646 } | 611 } |
| 647 | 612 |
| 648 } // namespace internal | 613 } // namespace internal |
| 649 } // namespace v8 | 614 } // namespace v8 |
| OLD | NEW |