OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/store-buffer.h" | 5 #include "src/heap/store-buffer.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 | 8 |
9 #include "src/counters.h" | 9 #include "src/counters.h" |
10 #include "src/heap/incremental-marking.h" | 10 #include "src/heap/incremental-marking.h" |
(...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
405 Object** slot = reinterpret_cast<Object**>(*current); | 405 Object** slot = reinterpret_cast<Object**>(*current); |
406 Object* object = *slot; | 406 Object* object = *slot; |
407 CHECK(object->IsHeapObject()); | 407 CHECK(object->IsHeapObject()); |
408 CHECK(heap_->InNewSpace(object)); | 408 CHECK(heap_->InNewSpace(object)); |
409 heap_->mark_compact_collector()->VerifyIsSlotInLiveObject( | 409 heap_->mark_compact_collector()->VerifyIsSlotInLiveObject( |
410 reinterpret_cast<Address>(slot), HeapObject::cast(object)); | 410 reinterpret_cast<Address>(slot), HeapObject::cast(object)); |
411 } | 411 } |
412 } | 412 } |
413 | 413 |
414 | 414 |
| 415 class FindPointersToNewSpaceVisitor final : public ObjectVisitor { |
| 416 public: |
| 417 FindPointersToNewSpaceVisitor(StoreBuffer* store_buffer, |
| 418 ObjectSlotCallback callback) |
| 419 : store_buffer_(store_buffer), callback_(callback) {} |
| 420 |
| 421 V8_INLINE void VisitPointers(Object** start, Object** end) override { |
| 422 store_buffer_->FindPointersToNewSpaceInRegion( |
| 423 reinterpret_cast<Address>(start), reinterpret_cast<Address>(end), |
| 424 callback_); |
| 425 } |
| 426 |
| 427 V8_INLINE void VisitCodeEntry(Address code_entry_slot) override {} |
| 428 |
| 429 private: |
| 430 StoreBuffer* store_buffer_; |
| 431 ObjectSlotCallback callback_; |
| 432 }; |
| 433 |
| 434 |
415 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) { | 435 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) { |
416 // We do not sort or remove duplicated entries from the store buffer because | 436 // We do not sort or remove duplicated entries from the store buffer because |
417 // we expect that callback will rebuild the store buffer thus removing | 437 // we expect that callback will rebuild the store buffer thus removing |
418 // all duplicates and pointers to old space. | 438 // all duplicates and pointers to old space. |
419 bool some_pages_to_scan = PrepareForIteration(); | 439 bool some_pages_to_scan = PrepareForIteration(); |
420 | 440 |
421 // TODO(gc): we want to skip slots on evacuation candidates | 441 // TODO(gc): we want to skip slots on evacuation candidates |
422 // but we can't simply figure that out from slot address | 442 // but we can't simply figure that out from slot address |
423 // because slot can belong to a large object. | 443 // because slot can belong to a large object. |
424 IteratePointersInStoreBuffer(slot_callback); | 444 IteratePointersInStoreBuffer(slot_callback); |
425 | 445 |
426 // We are done scanning all the pointers that were in the store buffer, but | 446 // We are done scanning all the pointers that were in the store buffer, but |
427 // there may be some pages marked scan_on_scavenge that have pointers to new | 447 // there may be some pages marked scan_on_scavenge that have pointers to new |
428 // space that are not in the store buffer. We must scan them now. As we | 448 // space that are not in the store buffer. We must scan them now. As we |
429 // scan, the surviving pointers to new space will be added to the store | 449 // scan, the surviving pointers to new space will be added to the store |
430 // buffer. If there are still a lot of pointers to new space then we will | 450 // buffer. If there are still a lot of pointers to new space then we will |
431 // keep the scan_on_scavenge flag on the page and discard the pointers that | 451 // keep the scan_on_scavenge flag on the page and discard the pointers that |
432 // were added to the store buffer. If there are not many pointers to new | 452 // were added to the store buffer. If there are not many pointers to new |
433 // space left on the page we will keep the pointers in the store buffer and | 453 // space left on the page we will keep the pointers in the store buffer and |
434 // remove the flag from the page. | 454 // remove the flag from the page. |
435 if (some_pages_to_scan) { | 455 if (some_pages_to_scan) { |
436 if (callback_ != NULL) { | 456 if (callback_ != NULL) { |
437 (*callback_)(heap_, NULL, kStoreBufferStartScanningPagesEvent); | 457 (*callback_)(heap_, NULL, kStoreBufferStartScanningPagesEvent); |
438 } | 458 } |
439 PointerChunkIterator it(heap_); | 459 PointerChunkIterator it(heap_); |
440 MemoryChunk* chunk; | 460 MemoryChunk* chunk; |
| 461 FindPointersToNewSpaceVisitor visitor(this, slot_callback); |
441 while ((chunk = it.next()) != NULL) { | 462 while ((chunk = it.next()) != NULL) { |
442 if (chunk->scan_on_scavenge()) { | 463 if (chunk->scan_on_scavenge()) { |
443 chunk->set_scan_on_scavenge(false); | 464 chunk->set_scan_on_scavenge(false); |
444 if (callback_ != NULL) { | 465 if (callback_ != NULL) { |
445 (*callback_)(heap_, chunk, kStoreBufferScanningPageEvent); | 466 (*callback_)(heap_, chunk, kStoreBufferScanningPageEvent); |
446 } | 467 } |
447 if (chunk->owner() == heap_->lo_space()) { | 468 if (chunk->owner() == heap_->lo_space()) { |
448 LargePage* large_page = reinterpret_cast<LargePage*>(chunk); | 469 LargePage* large_page = reinterpret_cast<LargePage*>(chunk); |
449 HeapObject* array = large_page->GetObject(); | 470 HeapObject* array = large_page->GetObject(); |
450 DCHECK(array->IsFixedArray()); | 471 DCHECK(array->IsFixedArray()); |
(...skipping 17 matching lines...) Expand all Loading... |
468 slot_callback); | 489 slot_callback); |
469 } | 490 } |
470 } | 491 } |
471 } else { | 492 } else { |
472 heap_->mark_compact_collector()->SweepOrWaitUntilSweepingCompleted( | 493 heap_->mark_compact_collector()->SweepOrWaitUntilSweepingCompleted( |
473 page); | 494 page); |
474 HeapObjectIterator iterator(page); | 495 HeapObjectIterator iterator(page); |
475 for (HeapObject* heap_object = iterator.Next(); heap_object != NULL; | 496 for (HeapObject* heap_object = iterator.Next(); heap_object != NULL; |
476 heap_object = iterator.Next()) { | 497 heap_object = iterator.Next()) { |
477 // We iterate over objects that contain new space pointers only. | 498 // We iterate over objects that contain new space pointers only. |
478 Address obj_address = heap_object->address(); | 499 heap_object->IterateBody(&visitor); |
479 const int start_offset = HeapObject::kHeaderSize; | |
480 const int end_offset = heap_object->Size(); | |
481 | |
482 switch (heap_object->ContentType()) { | |
483 case HeapObjectContents::kTaggedValues: { | |
484 Address start_address = obj_address + start_offset; | |
485 Address end_address = obj_address + end_offset; | |
486 // Object has only tagged fields. | |
487 FindPointersToNewSpaceInRegion(start_address, end_address, | |
488 slot_callback); | |
489 break; | |
490 } | |
491 | |
492 case HeapObjectContents::kMixedValues: { | |
493 if (heap_object->IsFixedTypedArrayBase()) { | |
494 FindPointersToNewSpaceInRegion( | |
495 obj_address + FixedTypedArrayBase::kBasePointerOffset, | |
496 obj_address + FixedTypedArrayBase::kHeaderSize, | |
497 slot_callback); | |
498 } else if (heap_object->IsBytecodeArray()) { | |
499 FindPointersToNewSpaceInRegion( | |
500 obj_address + BytecodeArray::kConstantPoolOffset, | |
501 obj_address + BytecodeArray::kHeaderSize, | |
502 slot_callback); | |
503 } else if (heap_object->IsJSArrayBuffer()) { | |
504 FindPointersToNewSpaceInRegion( | |
505 obj_address + | |
506 JSArrayBuffer::BodyDescriptor::kStartOffset, | |
507 obj_address + JSArrayBuffer::kByteLengthOffset + | |
508 kPointerSize, | |
509 slot_callback); | |
510 FindPointersToNewSpaceInRegion( | |
511 obj_address + JSArrayBuffer::kSize, | |
512 obj_address + JSArrayBuffer::kSizeWithInternalFields, | |
513 slot_callback); | |
514 } else if (FLAG_unbox_double_fields) { | |
515 LayoutDescriptorHelper helper(heap_object->map()); | |
516 DCHECK(!helper.all_fields_tagged()); | |
517 for (int offset = start_offset; offset < end_offset;) { | |
518 int end_of_region_offset; | |
519 if (helper.IsTagged(offset, end_offset, | |
520 &end_of_region_offset)) { | |
521 FindPointersToNewSpaceInRegion( | |
522 obj_address + offset, | |
523 obj_address + end_of_region_offset, slot_callback); | |
524 } | |
525 offset = end_of_region_offset; | |
526 } | |
527 } else { | |
528 UNREACHABLE(); | |
529 } | |
530 break; | |
531 } | |
532 | |
533 case HeapObjectContents::kRawValues: | |
534 break; | |
535 } | |
536 } | 500 } |
537 } | 501 } |
538 } | 502 } |
539 } | 503 } |
540 } | 504 } |
541 if (callback_ != NULL) { | 505 if (callback_ != NULL) { |
542 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); | 506 (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent); |
543 } | 507 } |
544 } | 508 } |
545 } | 509 } |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
641 DCHECK(start_of_current_page_ != store_buffer_->Top()); | 605 DCHECK(start_of_current_page_ != store_buffer_->Top()); |
642 store_buffer_->SetTop(start_of_current_page_); | 606 store_buffer_->SetTop(start_of_current_page_); |
643 } | 607 } |
644 } else { | 608 } else { |
645 UNREACHABLE(); | 609 UNREACHABLE(); |
646 } | 610 } |
647 } | 611 } |
648 | 612 |
649 } // namespace internal | 613 } // namespace internal |
650 } // namespace v8 | 614 } // namespace v8 |
OLD | NEW |