Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(446)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1221643004: Directly remove slot buffer entries in deoptimized code objects. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | test/cctest/test-heap.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/compilation-cache.h" 10 #include "src/compilation-cache.h"
(...skipping 540 matching lines...) Expand 10 before | Expand all | Expand 10 after
551 // to only refill them for the old space. 551 // to only refill them for the old space.
552 return; 552 return;
553 } 553 }
554 554
555 intptr_t freed_bytes = space->free_list()->Concatenate(free_list); 555 intptr_t freed_bytes = space->free_list()->Concatenate(free_list);
556 space->AddToAccountingStats(freed_bytes); 556 space->AddToAccountingStats(freed_bytes);
557 space->DecrementUnsweptFreeBytes(freed_bytes); 557 space->DecrementUnsweptFreeBytes(freed_bytes);
558 } 558 }
559 559
560 560
561 void Marking::SetAllMarkBitsInRange(MarkBit start, MarkBit end) {
562 MarkBit::CellType* start_cell = start.cell();
563 MarkBit::CellType* end_cell = end.cell();
564 MarkBit::CellType start_mask = ~(start.mask() - 1);
565 MarkBit::CellType end_mask = (end.mask() << 1) - 1;
566
567 if (start_cell == end_cell) {
568 *start_cell |= start_mask & end_mask;
569 } else {
570 *start_cell |= start_mask;
571 for (MarkBit::CellType* cell = start_cell + 1; cell < end_cell; cell++) {
572 *cell = ~0;
573 }
574 *end_cell |= end_mask;
575 }
576 }
577
578
579 void Marking::ClearAllMarkBitsOfCellsContainedInRange(MarkBit start,
580 MarkBit end) {
581 MarkBit::CellType* start_cell = start.cell();
582 MarkBit::CellType* end_cell = end.cell();
583 for (MarkBit::CellType* cell = start_cell; cell <= end_cell; cell++) {
584 *cell = 0;
585 }
586 }
587
588
589 void Marking::TransferMark(Address old_start, Address new_start) { 561 void Marking::TransferMark(Address old_start, Address new_start) {
590 // This is only used when resizing an object. 562 // This is only used when resizing an object.
591 DCHECK(MemoryChunk::FromAddress(old_start) == 563 DCHECK(MemoryChunk::FromAddress(old_start) ==
592 MemoryChunk::FromAddress(new_start)); 564 MemoryChunk::FromAddress(new_start));
593 565
594 if (!heap_->incremental_marking()->IsMarking()) return; 566 if (!heap_->incremental_marking()->IsMarking()) return;
595 567
596 // If the mark doesn't move, we don't check the color of the object. 568 // If the mark doesn't move, we don't check the color of the object.
597 // It doesn't matter whether the object is black, since it hasn't changed 569 // It doesn't matter whether the object is black, since it hasn't changed
598 // size, so the adjustment to the live data count will be zero anyway. 570 // size, so the adjustment to the live data count will be zero anyway.
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
771 if (compacting_) { 743 if (compacting_) {
772 int npages = evacuation_candidates_.length(); 744 int npages = evacuation_candidates_.length();
773 for (int i = 0; i < npages; i++) { 745 for (int i = 0; i < npages; i++) {
774 Page* p = evacuation_candidates_[i]; 746 Page* p = evacuation_candidates_[i];
775 slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address()); 747 slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address());
776 p->ClearEvacuationCandidate(); 748 p->ClearEvacuationCandidate();
777 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); 749 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
778 } 750 }
779 compacting_ = false; 751 compacting_ = false;
780 evacuation_candidates_.Rewind(0); 752 evacuation_candidates_.Rewind(0);
781 invalidated_code_.Rewind(0);
782 } 753 }
783 DCHECK_EQ(0, evacuation_candidates_.length()); 754 DCHECK_EQ(0, evacuation_candidates_.length());
784 } 755 }
785 756
786 757
787 void MarkCompactCollector::Prepare() { 758 void MarkCompactCollector::Prepare() {
788 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); 759 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking();
789 760
790 #ifdef DEBUG 761 #ifdef DEBUG
791 DCHECK(state_ == IDLE); 762 DCHECK(state_ == IDLE);
(...skipping 2443 matching lines...) Expand 10 before | Expand all | Expand 10 after
3235 // The target object has to be black. 3206 // The target object has to be black.
3236 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); 3207 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
3237 3208
3238 // The target object is black but we don't know if the source slot is black. 3209 // The target object is black but we don't know if the source slot is black.
3239 // The source object could have died and the slot could be part of a free 3210 // The source object could have died and the slot could be part of a free
3240 // space. Use the mark bit iterator to find out about liveness of the slot. 3211 // space. Use the mark bit iterator to find out about liveness of the slot.
3241 CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot)); 3212 CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot));
3242 } 3213 }
3243 3214
3244 3215
3216 void MarkCompactCollector::RemoveObjectSlots(HeapObject* invalid_object) {
3217 // Remove entries by replacing them with an old-space slot containing a smi
3218 // that is located in an unmovable page.
3219 int npages = evacuation_candidates_.length();
3220 for (int i = 0; i < npages; i++) {
3221 Page* p = evacuation_candidates_[i];
3222 DCHECK(p->IsEvacuationCandidate() ||
3223 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3224 if (p->IsEvacuationCandidate()) {
3225 SlotsBuffer::RemoveObjectSlots(heap_, p->slots_buffer(), invalid_object);
3226 }
3227 }
3228 }
3229
3230
3245 void MarkCompactCollector::EvacuateNewSpace() { 3231 void MarkCompactCollector::EvacuateNewSpace() {
3246 // There are soft limits in the allocation code, designed trigger a mark 3232 // There are soft limits in the allocation code, designed trigger a mark
3247 // sweep collection by failing allocations. But since we are already in 3233 // sweep collection by failing allocations. But since we are already in
3248 // a mark-sweep allocation, there is no sense in trying to trigger one. 3234 // a mark-sweep allocation, there is no sense in trying to trigger one.
3249 AlwaysAllocateScope scope(isolate()); 3235 AlwaysAllocateScope scope(isolate());
3250 3236
3251 NewSpace* new_space = heap()->new_space(); 3237 NewSpace* new_space = heap()->new_space();
3252 3238
3253 // Store allocation range before flipping semispaces. 3239 // Store allocation range before flipping semispaces.
3254 Address from_bottom = new_space->bottom(); 3240 Address from_bottom = new_space->bottom();
(...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after
3551 // When concurrent sweeping is active, the page will be marked after 3537 // When concurrent sweeping is active, the page will be marked after
3552 // sweeping by the main thread. 3538 // sweeping by the main thread.
3553 p->set_parallel_sweeping(MemoryChunk::SWEEPING_FINALIZE); 3539 p->set_parallel_sweeping(MemoryChunk::SWEEPING_FINALIZE);
3554 } else { 3540 } else {
3555 p->SetWasSwept(); 3541 p->SetWasSwept();
3556 } 3542 }
3557 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); 3543 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes));
3558 } 3544 }
3559 3545
3560 3546
3561 static bool SetMarkBitsUnderInvalidatedCode(Code* code, bool value) {
3562 Page* p = Page::FromAddress(code->address());
3563
3564 if (p->IsEvacuationCandidate() || p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) {
3565 return false;
3566 }
3567
3568 Address code_start = code->address();
3569 Address code_end = code_start + code->Size();
3570
3571 uint32_t start_index = MemoryChunk::FastAddressToMarkbitIndex(code_start);
3572 uint32_t end_index =
3573 MemoryChunk::FastAddressToMarkbitIndex(code_end - kPointerSize);
3574
3575 // TODO(hpayer): Filter out invalidated code in
3576 // ClearInvalidSlotsBufferEntries.
3577 Bitmap* b = p->markbits();
3578
3579 MarkBit start_mark_bit = b->MarkBitFromIndex(start_index);
3580 MarkBit end_mark_bit = b->MarkBitFromIndex(end_index);
3581
3582 if (value) {
3583 Marking::SetAllMarkBitsInRange(start_mark_bit, end_mark_bit);
3584 } else {
3585 Marking::ClearAllMarkBitsOfCellsContainedInRange(start_mark_bit,
3586 end_mark_bit);
3587 }
3588
3589 return true;
3590 }
3591
3592
3593 static bool IsOnInvalidatedCodeObject(Address addr) {
3594 // We did not record any slots in large objects thus
3595 // we can safely go to the page from the slot address.
3596 Page* p = Page::FromAddress(addr);
3597
3598 // First check owner's identity because old space is swept concurrently or
3599 // lazily and might still have non-zero mark-bits on some pages.
3600 if (p->owner()->identity() != CODE_SPACE) return false;
3601
3602 // In code space only bits on evacuation candidates (but we don't record
3603 // any slots on them) and under invalidated code objects are non-zero.
3604 MarkBit mark_bit =
3605 p->markbits()->MarkBitFromIndex(Page::FastAddressToMarkbitIndex(addr));
3606
3607 return Marking::IsBlackOrGrey(mark_bit);
3608 }
3609
3610
3611 void MarkCompactCollector::InvalidateCode(Code* code) {
3612 if (heap_->incremental_marking()->IsCompacting() &&
3613 !ShouldSkipEvacuationSlotRecording(code)) {
3614 DCHECK(compacting_);
3615
3616 // If the object is white than no slots were recorded on it yet.
3617 MarkBit mark_bit = Marking::MarkBitFrom(code);
3618 if (Marking::IsWhite(mark_bit)) return;
3619
3620 invalidated_code_.Add(code);
3621 }
3622 }
3623
3624
3625 // Return true if the given code is deoptimized or will be deoptimized. 3547 // Return true if the given code is deoptimized or will be deoptimized.
3626 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { 3548 bool MarkCompactCollector::WillBeDeoptimized(Code* code) {
3627 return code->is_optimized_code() && code->marked_for_deoptimization(); 3549 return code->is_optimized_code() && code->marked_for_deoptimization();
3628 } 3550 }
3629 3551
3630 3552
3631 bool MarkCompactCollector::MarkInvalidatedCode() {
3632 bool code_marked = false;
3633
3634 int length = invalidated_code_.length();
3635 for (int i = 0; i < length; i++) {
3636 Code* code = invalidated_code_[i];
3637
3638 if (SetMarkBitsUnderInvalidatedCode(code, true)) {
3639 code_marked = true;
3640 }
3641 }
3642
3643 return code_marked;
3644 }
3645
3646
3647 void MarkCompactCollector::RemoveDeadInvalidatedCode() {
3648 int length = invalidated_code_.length();
3649 for (int i = 0; i < length; i++) {
3650 if (!IsMarked(invalidated_code_[i])) invalidated_code_[i] = NULL;
3651 }
3652 }
3653
3654
3655 void MarkCompactCollector::ProcessInvalidatedCode(ObjectVisitor* visitor) {
3656 int length = invalidated_code_.length();
3657 for (int i = 0; i < length; i++) {
3658 Code* code = invalidated_code_[i];
3659 if (code != NULL) {
3660 code->Iterate(visitor);
3661 SetMarkBitsUnderInvalidatedCode(code, false);
3662 }
3663 }
3664 invalidated_code_.Rewind(0);
3665 }
3666
3667
3668 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { 3553 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
3669 Heap::RelocationLock relocation_lock(heap()); 3554 Heap::RelocationLock relocation_lock(heap());
3670 3555
3671 bool code_slots_filtering_required;
3672 { 3556 {
3673 GCTracer::Scope gc_scope(heap()->tracer(), 3557 GCTracer::Scope gc_scope(heap()->tracer(),
3674 GCTracer::Scope::MC_SWEEP_NEWSPACE); 3558 GCTracer::Scope::MC_SWEEP_NEWSPACE);
3675 code_slots_filtering_required = MarkInvalidatedCode();
3676 EvacuationScope evacuation_scope(this); 3559 EvacuationScope evacuation_scope(this);
3677 EvacuateNewSpace(); 3560 EvacuateNewSpace();
3678 } 3561 }
3679 3562
3680 { 3563 {
3681 GCTracer::Scope gc_scope(heap()->tracer(), 3564 GCTracer::Scope gc_scope(heap()->tracer(),
3682 GCTracer::Scope::MC_EVACUATE_PAGES); 3565 GCTracer::Scope::MC_EVACUATE_PAGES);
3683 EvacuationScope evacuation_scope(this); 3566 EvacuationScope evacuation_scope(this);
3684 EvacuatePages(); 3567 EvacuatePages();
3685 } 3568 }
(...skipping 26 matching lines...) Expand all
3712 GCTracer::Scope gc_scope(heap()->tracer(), 3595 GCTracer::Scope gc_scope(heap()->tracer(),
3713 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS); 3596 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS);
3714 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(), 3597 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(),
3715 &Heap::ScavengeStoreBufferCallback); 3598 &Heap::ScavengeStoreBufferCallback);
3716 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); 3599 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer);
3717 } 3600 }
3718 3601
3719 { 3602 {
3720 GCTracer::Scope gc_scope(heap()->tracer(), 3603 GCTracer::Scope gc_scope(heap()->tracer(),
3721 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); 3604 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED);
3722 SlotsBuffer::UpdateSlotsRecordedIn(heap_, migration_slots_buffer_, 3605 SlotsBuffer::UpdateSlotsRecordedIn(heap_, migration_slots_buffer_);
3723 code_slots_filtering_required);
3724 if (FLAG_trace_fragmentation_verbose) { 3606 if (FLAG_trace_fragmentation_verbose) {
3725 PrintF(" migration slots buffer: %d\n", 3607 PrintF(" migration slots buffer: %d\n",
3726 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); 3608 SlotsBuffer::SizeOfChain(migration_slots_buffer_));
3727 } 3609 }
3728 3610
3729 if (compacting_ && was_marked_incrementally_) { 3611 if (compacting_ && was_marked_incrementally_) {
3730 // It's difficult to filter out slots recorded for large objects. 3612 // It's difficult to filter out slots recorded for large objects.
3731 LargeObjectIterator it(heap_->lo_space()); 3613 LargeObjectIterator it(heap_->lo_space());
3732 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { 3614 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
3733 // LargeObjectSpace is not swept yet thus we have to skip 3615 // LargeObjectSpace is not swept yet thus we have to skip
(...skipping 13 matching lines...) Expand all
3747 { 3629 {
3748 GCTracer::Scope gc_scope( 3630 GCTracer::Scope gc_scope(
3749 heap()->tracer(), 3631 heap()->tracer(),
3750 GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); 3632 GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED);
3751 for (int i = 0; i < npages; i++) { 3633 for (int i = 0; i < npages; i++) {
3752 Page* p = evacuation_candidates_[i]; 3634 Page* p = evacuation_candidates_[i];
3753 DCHECK(p->IsEvacuationCandidate() || 3635 DCHECK(p->IsEvacuationCandidate() ||
3754 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); 3636 p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3755 3637
3756 if (p->IsEvacuationCandidate()) { 3638 if (p->IsEvacuationCandidate()) {
3757 SlotsBuffer::UpdateSlotsRecordedIn(heap_, p->slots_buffer(), 3639 SlotsBuffer::UpdateSlotsRecordedIn(heap_, p->slots_buffer());
3758 code_slots_filtering_required);
3759 if (FLAG_trace_fragmentation_verbose) { 3640 if (FLAG_trace_fragmentation_verbose) {
3760 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p), 3641 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p),
3761 SlotsBuffer::SizeOfChain(p->slots_buffer())); 3642 SlotsBuffer::SizeOfChain(p->slots_buffer()));
3762 } 3643 }
3763 3644
3764 // Important: skip list should be cleared only after roots were updated 3645 // Important: skip list should be cleared only after roots were updated
3765 // because root iteration traverses the stack and might have to find 3646 // because root iteration traverses the stack and might have to find
3766 // code objects from non-updated pc pointing into evacuation candidate. 3647 // code objects from non-updated pc pointing into evacuation candidate.
3767 SkipList* list = p->skip_list(); 3648 SkipList* list = p->skip_list();
3768 if (list != NULL) list->Clear(); 3649 if (list != NULL) list->Clear();
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
3804 3685
3805 heap_->string_table()->Iterate(&updating_visitor); 3686 heap_->string_table()->Iterate(&updating_visitor);
3806 3687
3807 // Update pointers from external string table. 3688 // Update pointers from external string table.
3808 heap_->UpdateReferencesInExternalStringTable( 3689 heap_->UpdateReferencesInExternalStringTable(
3809 &UpdateReferenceInExternalStringTableEntry); 3690 &UpdateReferenceInExternalStringTableEntry);
3810 3691
3811 EvacuationWeakObjectRetainer evacuation_object_retainer; 3692 EvacuationWeakObjectRetainer evacuation_object_retainer;
3812 heap()->ProcessAllWeakReferences(&evacuation_object_retainer); 3693 heap()->ProcessAllWeakReferences(&evacuation_object_retainer);
3813 3694
3814 // Visit invalidated code (we ignored all slots on it) and clear mark-bits
3815 // under it.
3816 ProcessInvalidatedCode(&updating_visitor);
3817
3818 heap_->isolate()->inner_pointer_to_code_cache()->Flush(); 3695 heap_->isolate()->inner_pointer_to_code_cache()->Flush();
3819 3696
3820 slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_); 3697 slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_);
3821 DCHECK(migration_slots_buffer_ == NULL); 3698 DCHECK(migration_slots_buffer_ == NULL);
3822 3699
3823 // The hashing of weak_object_to_code_table is no longer valid. 3700 // The hashing of weak_object_to_code_table is no longer valid.
3824 heap()->weak_object_to_code_table()->Rehash( 3701 heap()->weak_object_to_code_table()->Rehash(
3825 heap()->isolate()->factory()->undefined_value()); 3702 heap()->isolate()->factory()->undefined_value());
3826 } 3703 }
3827 3704
(...skipping 569 matching lines...) Expand 10 before | Expand all | Expand 10 after
4397 // non-live objects. 4274 // non-live objects.
4398 { 4275 {
4399 GCTracer::Scope sweep_scope(heap()->tracer(), 4276 GCTracer::Scope sweep_scope(heap()->tracer(),
4400 GCTracer::Scope::MC_SWEEP_OLDSPACE); 4277 GCTracer::Scope::MC_SWEEP_OLDSPACE);
4401 { SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); } 4278 { SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); }
4402 sweeping_in_progress_ = true; 4279 sweeping_in_progress_ = true;
4403 if (heap()->concurrent_sweeping_enabled()) { 4280 if (heap()->concurrent_sweeping_enabled()) {
4404 StartSweeperThreads(); 4281 StartSweeperThreads();
4405 } 4282 }
4406 } 4283 }
4407 RemoveDeadInvalidatedCode();
4408
4409 { 4284 {
4410 GCTracer::Scope sweep_scope(heap()->tracer(), 4285 GCTracer::Scope sweep_scope(heap()->tracer(),
4411 GCTracer::Scope::MC_SWEEP_CODE); 4286 GCTracer::Scope::MC_SWEEP_CODE);
4412 SweepSpace(heap()->code_space(), SEQUENTIAL_SWEEPING); 4287 SweepSpace(heap()->code_space(), SEQUENTIAL_SWEEPING);
4413 } 4288 }
4414 4289
4415 EvacuateNewSpaceAndCandidates(); 4290 EvacuateNewSpaceAndCandidates();
4416 4291
4417 heap()->FreeDeadArrayBuffers(false); 4292 heap()->FreeDeadArrayBuffers(false);
4418 4293
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
4558 } else { 4433 } else {
4559 ++slot_idx; 4434 ++slot_idx;
4560 DCHECK(slot_idx < slots_count); 4435 DCHECK(slot_idx < slots_count);
4561 } 4436 }
4562 } 4437 }
4563 buffer = buffer->next(); 4438 buffer = buffer->next();
4564 } 4439 }
4565 } 4440 }
4566 4441
4567 4442
4443 void SlotsBuffer::RemoveObjectSlots(Heap* heap, SlotsBuffer* buffer,
4444 HeapObject* invalid_object) {
4445 // Remove entries by replacing them with an old-space slot containing a smi
4446 // that is located in an unmovable page.
4447 const ObjectSlot kRemovedEntry = HeapObject::RawField(
4448 heap->empty_fixed_array(), FixedArrayBase::kLengthOffset);
4449 DCHECK(Page::FromAddress(reinterpret_cast<Address>(kRemovedEntry))
4450 ->NeverEvacuate());
4451
4452 while (buffer != NULL) {
4453 SlotsBuffer::ObjectSlot* slots = buffer->slots_;
4454 intptr_t slots_count = buffer->idx_;
4455 bool is_typed_slot = false;
4456
4457 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
4458 ObjectSlot slot = slots[slot_idx];
4459 if (!IsTypedSlot(slot)) {
4460 Address slot_address = reinterpret_cast<Address>(slot);
4461 if (slot_address >= invalid_object->address() &&
4462 slot_address <
4463 (invalid_object->address() + invalid_object->Size())) {
4464 slots[slot_idx] = kRemovedEntry;
4465 if (is_typed_slot) {
4466 slots[slot_idx - 1] = kRemovedEntry;
4467 }
4468 }
4469 is_typed_slot = false;
4470 } else {
4471 is_typed_slot = true;
4472 DCHECK(slot_idx < slots_count);
4473 }
4474 }
4475 buffer = buffer->next();
4476 }
4477 }
4478
4479
4568 void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) { 4480 void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) {
4569 while (buffer != NULL) { 4481 while (buffer != NULL) {
4570 SlotsBuffer::ObjectSlot* slots = buffer->slots_; 4482 SlotsBuffer::ObjectSlot* slots = buffer->slots_;
4571 intptr_t slots_count = buffer->idx_; 4483 intptr_t slots_count = buffer->idx_;
4572 4484
4573 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) { 4485 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
4574 ObjectSlot slot = slots[slot_idx]; 4486 ObjectSlot slot = slots[slot_idx];
4575 if (!IsTypedSlot(slot)) { 4487 if (!IsTypedSlot(slot)) {
4576 Object* object = *slot; 4488 Object* object = *slot;
4577 if (object->IsHeapObject()) { 4489 if (object->IsHeapObject()) {
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
4701 } else { 4613 } else {
4702 ++slot_idx; 4614 ++slot_idx;
4703 DCHECK(slot_idx < idx_); 4615 DCHECK(slot_idx < idx_);
4704 UpdateSlot(heap->isolate(), &v, DecodeSlotType(slot), 4616 UpdateSlot(heap->isolate(), &v, DecodeSlotType(slot),
4705 reinterpret_cast<Address>(slots_[slot_idx])); 4617 reinterpret_cast<Address>(slots_[slot_idx]));
4706 } 4618 }
4707 } 4619 }
4708 } 4620 }
4709 4621
4710 4622
4711 void SlotsBuffer::UpdateSlotsWithFilter(Heap* heap) {
4712 PointersUpdatingVisitor v(heap);
4713
4714 for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) {
4715 ObjectSlot slot = slots_[slot_idx];
4716 if (!IsTypedSlot(slot)) {
4717 if (!IsOnInvalidatedCodeObject(reinterpret_cast<Address>(slot))) {
4718 PointersUpdatingVisitor::UpdateSlot(heap, slot);
4719 }
4720 } else {
4721 ++slot_idx;
4722 DCHECK(slot_idx < idx_);
4723 Address pc = reinterpret_cast<Address>(slots_[slot_idx]);
4724 if (!IsOnInvalidatedCodeObject(pc)) {
4725 UpdateSlot(heap->isolate(), &v, DecodeSlotType(slot),
4726 reinterpret_cast<Address>(slots_[slot_idx]));
4727 }
4728 }
4729 }
4730 }
4731
4732
4733 SlotsBuffer* SlotsBufferAllocator::AllocateBuffer(SlotsBuffer* next_buffer) { 4623 SlotsBuffer* SlotsBufferAllocator::AllocateBuffer(SlotsBuffer* next_buffer) {
4734 return new SlotsBuffer(next_buffer); 4624 return new SlotsBuffer(next_buffer);
4735 } 4625 }
4736 4626
4737 4627
4738 void SlotsBufferAllocator::DeallocateBuffer(SlotsBuffer* buffer) { 4628 void SlotsBufferAllocator::DeallocateBuffer(SlotsBuffer* buffer) {
4739 delete buffer; 4629 delete buffer;
4740 } 4630 }
4741 4631
4742 4632
4743 void SlotsBufferAllocator::DeallocateChain(SlotsBuffer** buffer_address) { 4633 void SlotsBufferAllocator::DeallocateChain(SlotsBuffer** buffer_address) {
4744 SlotsBuffer* buffer = *buffer_address; 4634 SlotsBuffer* buffer = *buffer_address;
4745 while (buffer != NULL) { 4635 while (buffer != NULL) {
4746 SlotsBuffer* next_buffer = buffer->next(); 4636 SlotsBuffer* next_buffer = buffer->next();
4747 DeallocateBuffer(buffer); 4637 DeallocateBuffer(buffer);
4748 buffer = next_buffer; 4638 buffer = next_buffer;
4749 } 4639 }
4750 *buffer_address = NULL; 4640 *buffer_address = NULL;
4751 } 4641 }
4752 } // namespace internal 4642 } // namespace internal
4753 } // namespace v8 4643 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | test/cctest/test-heap.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698