| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
| (...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 286 while (it.has_next()) { | 286 while (it.has_next()) { |
| 287 Page* p = it.next(); | 287 Page* p = it.next(); |
| 288 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer()); | 288 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer()); |
| 289 } | 289 } |
| 290 } | 290 } |
| 291 | 291 |
| 292 | 292 |
| 293 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() { | 293 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() { |
| 294 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); | 294 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); |
| 295 | 295 |
| 296 RemoveDeoptimizedCodeSlots(); | |
| 297 | |
| 298 ClearInvalidSlotsBufferEntries(heap_->old_space()); | 296 ClearInvalidSlotsBufferEntries(heap_->old_space()); |
| 299 ClearInvalidSlotsBufferEntries(heap_->code_space()); | 297 ClearInvalidSlotsBufferEntries(heap_->code_space()); |
| 300 ClearInvalidSlotsBufferEntries(heap_->map_space()); | 298 ClearInvalidSlotsBufferEntries(heap_->map_space()); |
| 301 | 299 |
| 302 LargeObjectIterator it(heap_->lo_space()); | 300 LargeObjectIterator it(heap_->lo_space()); |
| 303 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | 301 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { |
| 304 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); | 302 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); |
| 305 SlotsBuffer::RemoveInvalidSlots(heap_, chunk->slots_buffer()); | 303 SlotsBuffer::RemoveInvalidSlots(heap_, chunk->slots_buffer()); |
| 306 } | 304 } |
| 307 } | 305 } |
| (...skipping 454 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 762 if (compacting_) { | 760 if (compacting_) { |
| 763 int npages = evacuation_candidates_.length(); | 761 int npages = evacuation_candidates_.length(); |
| 764 for (int i = 0; i < npages; i++) { | 762 for (int i = 0; i < npages; i++) { |
| 765 Page* p = evacuation_candidates_[i]; | 763 Page* p = evacuation_candidates_[i]; |
| 766 slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address()); | 764 slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address()); |
| 767 p->ClearEvacuationCandidate(); | 765 p->ClearEvacuationCandidate(); |
| 768 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); | 766 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); |
| 769 } | 767 } |
| 770 compacting_ = false; | 768 compacting_ = false; |
| 771 evacuation_candidates_.Rewind(0); | 769 evacuation_candidates_.Rewind(0); |
| 772 invalidated_code_.Rewind(0); | |
| 773 } | 770 } |
| 774 DCHECK_EQ(0, evacuation_candidates_.length()); | 771 DCHECK_EQ(0, evacuation_candidates_.length()); |
| 775 } | 772 } |
| 776 | 773 |
| 777 | 774 |
| 778 void MarkCompactCollector::Prepare() { | 775 void MarkCompactCollector::Prepare() { |
| 779 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); | 776 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); |
| 780 | 777 |
| 781 #ifdef DEBUG | 778 #ifdef DEBUG |
| 782 DCHECK(state_ == IDLE); | 779 DCHECK(state_ == IDLE); |
| (...skipping 2803 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3586 | 3583 |
| 3587 void MarkCompactCollector::InvalidateCode(Code* code) { | 3584 void MarkCompactCollector::InvalidateCode(Code* code) { |
| 3588 if (heap_->incremental_marking()->IsCompacting() && | 3585 if (heap_->incremental_marking()->IsCompacting() && |
| 3589 !ShouldSkipEvacuationSlotRecording(code)) { | 3586 !ShouldSkipEvacuationSlotRecording(code)) { |
| 3590 DCHECK(compacting_); | 3587 DCHECK(compacting_); |
| 3591 | 3588 |
| 3592 // If the object is white than no slots were recorded on it yet. | 3589 // If the object is white than no slots were recorded on it yet. |
| 3593 MarkBit mark_bit = Marking::MarkBitFrom(code); | 3590 MarkBit mark_bit = Marking::MarkBitFrom(code); |
| 3594 if (Marking::IsWhite(mark_bit)) return; | 3591 if (Marking::IsWhite(mark_bit)) return; |
| 3595 | 3592 |
| 3596 invalidated_code_.Add(code); | 3593 // Ignore all slots that might have been recorded in the body of the |
| 3594 // deoptimized code object. Assumption: no slots will be recorded for |
| 3595 // this object after invalidating it. |
| 3596 RemoveObjectSlots(code->instruction_start(), |
| 3597 code->address() + code->Size()); |
| 3597 } | 3598 } |
| 3598 } | 3599 } |
| 3599 | 3600 |
| 3600 | 3601 |
| 3601 // Return true if the given code is deoptimized or will be deoptimized. | 3602 // Return true if the given code is deoptimized or will be deoptimized. |
| 3602 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { | 3603 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { |
| 3603 return code->is_optimized_code() && code->marked_for_deoptimization(); | 3604 return code->is_optimized_code() && code->marked_for_deoptimization(); |
| 3604 } | 3605 } |
| 3605 | 3606 |
| 3606 | 3607 |
| 3607 void MarkCompactCollector::RemoveDeoptimizedCodeSlots() { | |
| 3608 int length = invalidated_code_.length(); | |
| 3609 for (int i = 0; i < length; i++) { | |
| 3610 Code* code = invalidated_code_[i]; | |
| 3611 Page* p = Page::FromAddress(code->address()); | |
| 3612 if (!p->IsEvacuationCandidate() && | |
| 3613 !p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { | |
| 3614 // Ignore all slots that might have been recorded in the body of the | |
| 3615 // deoptimized code object. | |
| 3616 RemoveObjectSlots(code->instruction_start(), | |
| 3617 code->address() + code->Size()); | |
| 3618 } | |
| 3619 } | |
| 3620 } | |
| 3621 | |
| 3622 | |
| 3623 void MarkCompactCollector::RemoveDeadInvalidatedCode() { | |
| 3624 int length = invalidated_code_.length(); | |
| 3625 for (int i = 0; i < length; i++) { | |
| 3626 if (!IsMarked(invalidated_code_[i])) invalidated_code_[i] = NULL; | |
| 3627 } | |
| 3628 } | |
| 3629 | |
| 3630 | |
| 3631 void MarkCompactCollector::ProcessInvalidatedCode(ObjectVisitor* visitor) { | |
| 3632 int length = invalidated_code_.length(); | |
| 3633 for (int i = 0; i < length; i++) { | |
| 3634 Code* code = invalidated_code_[i]; | |
| 3635 if (code != NULL) { | |
| 3636 code->Iterate(visitor); | |
| 3637 } | |
| 3638 } | |
| 3639 invalidated_code_.Rewind(0); | |
| 3640 } | |
| 3641 | |
| 3642 | |
| 3643 void MarkCompactCollector::RemoveObjectSlots(Address start_slot, | 3608 void MarkCompactCollector::RemoveObjectSlots(Address start_slot, |
| 3644 Address end_slot) { | 3609 Address end_slot) { |
| 3645 // Remove entries by replacing them with an old-space slot containing a smi | 3610 // Remove entries by replacing them with an old-space slot containing a smi |
| 3646 // that is located in an unmovable page. | 3611 // that is located in an unmovable page. |
| 3647 int npages = evacuation_candidates_.length(); | 3612 int npages = evacuation_candidates_.length(); |
| 3648 for (int i = 0; i < npages; i++) { | 3613 for (int i = 0; i < npages; i++) { |
| 3649 Page* p = evacuation_candidates_[i]; | 3614 Page* p = evacuation_candidates_[i]; |
| 3650 DCHECK(p->IsEvacuationCandidate() || | 3615 DCHECK(p->IsEvacuationCandidate() || |
| 3651 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); | 3616 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); |
| 3652 if (p->IsEvacuationCandidate()) { | 3617 if (p->IsEvacuationCandidate()) { |
| (...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3794 | 3759 |
| 3795 heap_->string_table()->Iterate(&updating_visitor); | 3760 heap_->string_table()->Iterate(&updating_visitor); |
| 3796 | 3761 |
| 3797 // Update pointers from external string table. | 3762 // Update pointers from external string table. |
| 3798 heap_->UpdateReferencesInExternalStringTable( | 3763 heap_->UpdateReferencesInExternalStringTable( |
| 3799 &UpdateReferenceInExternalStringTableEntry); | 3764 &UpdateReferenceInExternalStringTableEntry); |
| 3800 | 3765 |
| 3801 EvacuationWeakObjectRetainer evacuation_object_retainer; | 3766 EvacuationWeakObjectRetainer evacuation_object_retainer; |
| 3802 heap()->ProcessAllWeakReferences(&evacuation_object_retainer); | 3767 heap()->ProcessAllWeakReferences(&evacuation_object_retainer); |
| 3803 | 3768 |
| 3804 // Visit invalidated code (we ignored all slots on it) and clear mark-bits | |
| 3805 // under it. | |
| 3806 ProcessInvalidatedCode(&updating_visitor); | |
| 3807 | |
| 3808 heap_->isolate()->inner_pointer_to_code_cache()->Flush(); | 3769 heap_->isolate()->inner_pointer_to_code_cache()->Flush(); |
| 3809 | 3770 |
| 3810 slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_); | 3771 slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_); |
| 3811 DCHECK(migration_slots_buffer_ == NULL); | 3772 DCHECK(migration_slots_buffer_ == NULL); |
| 3812 | 3773 |
| 3813 // The hashing of weak_object_to_code_table is no longer valid. | 3774 // The hashing of weak_object_to_code_table is no longer valid. |
| 3814 heap()->weak_object_to_code_table()->Rehash( | 3775 heap()->weak_object_to_code_table()->Rehash( |
| 3815 heap()->isolate()->factory()->undefined_value()); | 3776 heap()->isolate()->factory()->undefined_value()); |
| 3816 } | 3777 } |
| 3817 | 3778 |
| (...skipping 609 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4427 GCTracer::Scope::MC_SWEEP_CODE); | 4388 GCTracer::Scope::MC_SWEEP_CODE); |
| 4428 SweepSpace(heap()->code_space(), CONCURRENT_SWEEPING); | 4389 SweepSpace(heap()->code_space(), CONCURRENT_SWEEPING); |
| 4429 } | 4390 } |
| 4430 | 4391 |
| 4431 sweeping_in_progress_ = true; | 4392 sweeping_in_progress_ = true; |
| 4432 if (heap()->concurrent_sweeping_enabled()) { | 4393 if (heap()->concurrent_sweeping_enabled()) { |
| 4433 StartSweeperThreads(); | 4394 StartSweeperThreads(); |
| 4434 } | 4395 } |
| 4435 } | 4396 } |
| 4436 | 4397 |
| 4437 RemoveDeadInvalidatedCode(); | |
| 4438 | |
| 4439 EvacuateNewSpaceAndCandidates(); | 4398 EvacuateNewSpaceAndCandidates(); |
| 4440 | 4399 |
| 4441 heap()->FreeDeadArrayBuffers(false); | 4400 heap()->FreeDeadArrayBuffers(false); |
| 4442 | 4401 |
| 4443 // ClearNonLiveReferences depends on precise sweeping of map space to | 4402 // ClearNonLiveReferences depends on precise sweeping of map space to |
| 4444 // detect whether unmarked map became dead in this collection or in one | 4403 // detect whether unmarked map became dead in this collection or in one |
| 4445 // of the previous ones. | 4404 // of the previous ones. |
| 4446 { | 4405 { |
| 4447 GCTracer::Scope sweep_scope(heap()->tracer(), | 4406 GCTracer::Scope sweep_scope(heap()->tracer(), |
| 4448 GCTracer::Scope::MC_SWEEP_MAP); | 4407 GCTracer::Scope::MC_SWEEP_MAP); |
| (...skipping 351 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4800 SlotsBuffer* buffer = *buffer_address; | 4759 SlotsBuffer* buffer = *buffer_address; |
| 4801 while (buffer != NULL) { | 4760 while (buffer != NULL) { |
| 4802 SlotsBuffer* next_buffer = buffer->next(); | 4761 SlotsBuffer* next_buffer = buffer->next(); |
| 4803 DeallocateBuffer(buffer); | 4762 DeallocateBuffer(buffer); |
| 4804 buffer = next_buffer; | 4763 buffer = next_buffer; |
| 4805 } | 4764 } |
| 4806 *buffer_address = NULL; | 4765 *buffer_address = NULL; |
| 4807 } | 4766 } |
| 4808 } // namespace internal | 4767 } // namespace internal |
| 4809 } // namespace v8 | 4768 } // namespace v8 |
| OLD | NEW |