OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 1544 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1555 RememberedSet<OLD_TO_OLD>::InsertTyped(Page::FromAddress(code_entry_slot), | 1555 RememberedSet<OLD_TO_OLD>::InsertTyped(Page::FromAddress(code_entry_slot), |
1556 nullptr, CODE_ENTRY_SLOT, | 1556 nullptr, CODE_ENTRY_SLOT, |
1557 code_entry_slot); | 1557 code_entry_slot); |
1558 } | 1558 } |
1559 } | 1559 } |
1560 | 1560 |
1561 inline void VisitCodeTarget(RelocInfo* rinfo) final { | 1561 inline void VisitCodeTarget(RelocInfo* rinfo) final { |
1562 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode())); | 1562 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode())); |
1563 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 1563 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
1564 Code* host = rinfo->host(); | 1564 Code* host = rinfo->host(); |
1565 // target is always in old space, we don't have to record the slot in the | |
ulan
2016/06/10 11:09:34
Nit: since it is a sentence, let's start it with a
| |
1566 // old-to-new remembered set. | |
1567 DCHECK(!collector_->heap()->InNewSpace(target)); | |
1565 collector_->RecordRelocSlot(host, rinfo, target); | 1568 collector_->RecordRelocSlot(host, rinfo, target); |
1566 } | 1569 } |
1567 | 1570 |
1568 inline void VisitDebugTarget(RelocInfo* rinfo) final { | 1571 inline void VisitDebugTarget(RelocInfo* rinfo) final { |
1569 DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && | 1572 DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && |
1570 rinfo->IsPatchedDebugBreakSlotSequence()); | 1573 rinfo->IsPatchedDebugBreakSlotSequence()); |
1571 Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address()); | 1574 Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address()); |
1572 Code* host = rinfo->host(); | 1575 Code* host = rinfo->host(); |
1576 // target is always in old space, we don't have to record the slot in the | |
1577 // old-to-new remembered set. | |
1578 DCHECK(!collector_->heap()->InNewSpace(target)); | |
1573 collector_->RecordRelocSlot(host, rinfo, target); | 1579 collector_->RecordRelocSlot(host, rinfo, target); |
1574 } | 1580 } |
1575 | 1581 |
1576 inline void VisitEmbeddedPointer(RelocInfo* rinfo) final { | 1582 inline void VisitEmbeddedPointer(RelocInfo* rinfo) final { |
1577 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); | 1583 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
1578 HeapObject* object = HeapObject::cast(rinfo->target_object()); | 1584 HeapObject* object = HeapObject::cast(rinfo->target_object()); |
1579 Code* host = rinfo->host(); | 1585 Code* host = rinfo->host(); |
1586 collector_->heap()->RecordWriteIntoCode(host, rinfo, object); | |
1580 collector_->RecordRelocSlot(host, rinfo, object); | 1587 collector_->RecordRelocSlot(host, rinfo, object); |
1581 } | 1588 } |
1582 | 1589 |
1583 inline void VisitCell(RelocInfo* rinfo) final { | 1590 inline void VisitCell(RelocInfo* rinfo) final { |
1584 DCHECK(rinfo->rmode() == RelocInfo::CELL); | 1591 DCHECK(rinfo->rmode() == RelocInfo::CELL); |
1585 Cell* cell = rinfo->target_cell(); | 1592 Cell* cell = rinfo->target_cell(); |
1586 Code* host = rinfo->host(); | 1593 Code* host = rinfo->host(); |
1594 // cell is always in old space, we don't have to record the slot in the | |
1595 // old-to-new remembered set. | |
1596 DCHECK(!collector_->heap()->InNewSpace(cell)); | |
1587 collector_->RecordRelocSlot(host, rinfo, cell); | 1597 collector_->RecordRelocSlot(host, rinfo, cell); |
1588 } | 1598 } |
1589 | 1599 |
1590 // Entries that will never move. | 1600 // Entries that will never move. |
1591 inline void VisitCodeAgeSequence(RelocInfo* rinfo) final { | 1601 inline void VisitCodeAgeSequence(RelocInfo* rinfo) final { |
1592 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode())); | 1602 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode())); |
1593 Code* stub = rinfo->code_age_stub(); | 1603 Code* stub = rinfo->code_age_stub(); |
1594 USE(stub); | 1604 USE(stub); |
1595 DCHECK(!Page::FromAddress(stub->address())->IsEvacuationCandidate()); | 1605 DCHECK(!Page::FromAddress(stub->address())->IsEvacuationCandidate()); |
1596 } | 1606 } |
(...skipping 843 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2440 DependentCode* list_head) { | 2450 DependentCode* list_head) { |
2441 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); | 2451 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); |
2442 Isolate* isolate = this->isolate(); | 2452 Isolate* isolate = this->isolate(); |
2443 DependentCode* current = list_head; | 2453 DependentCode* current = list_head; |
2444 while (current->length() > 0) { | 2454 while (current->length() > 0) { |
2445 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( | 2455 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( |
2446 isolate, DependentCode::kWeakCodeGroup); | 2456 isolate, DependentCode::kWeakCodeGroup); |
2447 current = current->next_link(); | 2457 current = current->next_link(); |
2448 } | 2458 } |
2449 | 2459 |
2460 { | |
2461 ArrayList* list = heap_->weak_new_space_object_to_code_list(); | |
2462 int counter = 0; | |
2463 for (int i = 0; i < list->Length(); i += 2) { | |
2464 WeakCell* obj = WeakCell::cast(list->Get(i)); | |
2465 WeakCell* dep = WeakCell::cast(list->Get(i + 1)); | |
2466 if (obj->cleared() || dep->cleared()) { | |
2467 if (!dep->cleared()) { | |
2468 Code* code = Code::cast(dep->value()); | |
2469 if (!code->marked_for_deoptimization()) { | |
2470 DependentCode::SetMarkedForDeoptimization( | |
2471 code, DependentCode::DependencyGroup::kWeakCodeGroup); | |
2472 code->InvalidateEmbeddedObjects(); | |
2473 have_code_to_deoptimize_ = true; | |
2474 } | |
2475 } | |
2476 } else { | |
2477 list->Set(counter, obj, SKIP_WRITE_BARRIER); | |
2478 RecordSlot(list, list->Slot(counter), obj); | |
2479 counter++; | |
2480 list->Set(counter, dep, SKIP_WRITE_BARRIER); | |
2481 RecordSlot(list, list->Slot(counter), dep); | |
2482 counter++; | |
2483 } | |
2484 } | |
2485 } | |
2486 | |
2450 WeakHashTable* table = heap_->weak_object_to_code_table(); | 2487 WeakHashTable* table = heap_->weak_object_to_code_table(); |
2451 uint32_t capacity = table->Capacity(); | 2488 uint32_t capacity = table->Capacity(); |
2452 for (uint32_t i = 0; i < capacity; i++) { | 2489 for (uint32_t i = 0; i < capacity; i++) { |
2453 uint32_t key_index = table->EntryToIndex(i); | 2490 uint32_t key_index = table->EntryToIndex(i); |
2454 Object* key = table->get(key_index); | 2491 Object* key = table->get(key_index); |
2455 if (!table->IsKey(isolate, key)) continue; | 2492 if (!table->IsKey(isolate, key)) continue; |
2456 uint32_t value_index = table->EntryToValueIndex(i); | 2493 uint32_t value_index = table->EntryToValueIndex(i); |
2457 Object* value = table->get(value_index); | 2494 Object* value = table->get(value_index); |
2458 DCHECK(key->IsWeakCell()); | 2495 DCHECK(key->IsWeakCell()); |
2459 if (WeakCell::cast(key)->cleared()) { | 2496 if (WeakCell::cast(key)->cleared()) { |
(...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2784 HeapObject* undefined = heap()->undefined_value(); | 2821 HeapObject* undefined = heap()->undefined_value(); |
2785 Object* obj = heap()->encountered_transition_arrays(); | 2822 Object* obj = heap()->encountered_transition_arrays(); |
2786 while (obj != Smi::FromInt(0)) { | 2823 while (obj != Smi::FromInt(0)) { |
2787 TransitionArray* array = TransitionArray::cast(obj); | 2824 TransitionArray* array = TransitionArray::cast(obj); |
2788 obj = array->next_link(); | 2825 obj = array->next_link(); |
2789 array->set_next_link(undefined, SKIP_WRITE_BARRIER); | 2826 array->set_next_link(undefined, SKIP_WRITE_BARRIER); |
2790 } | 2827 } |
2791 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); | 2828 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); |
2792 } | 2829 } |
2793 | 2830 |
2794 static inline SlotType SlotTypeForRMode(RelocInfo::Mode rmode) { | |
2795 if (RelocInfo::IsCodeTarget(rmode)) { | |
2796 return CODE_TARGET_SLOT; | |
2797 } else if (RelocInfo::IsCell(rmode)) { | |
2798 return CELL_TARGET_SLOT; | |
2799 } else if (RelocInfo::IsEmbeddedObject(rmode)) { | |
2800 return EMBEDDED_OBJECT_SLOT; | |
2801 } else if (RelocInfo::IsDebugBreakSlot(rmode)) { | |
2802 return DEBUG_TARGET_SLOT; | |
2803 } | |
2804 UNREACHABLE(); | |
2805 return NUMBER_OF_SLOT_TYPES; | |
2806 } | |
2807 | |
2808 void MarkCompactCollector::RecordRelocSlot(Code* host, RelocInfo* rinfo, | 2831 void MarkCompactCollector::RecordRelocSlot(Code* host, RelocInfo* rinfo, |
2809 Object* target) { | 2832 Object* target) { |
2810 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); | 2833 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); |
2811 Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host)); | 2834 Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host)); |
2812 RelocInfo::Mode rmode = rinfo->rmode(); | |
2813 if (target_page->IsEvacuationCandidate() && | 2835 if (target_page->IsEvacuationCandidate() && |
2814 (rinfo->host() == NULL || | 2836 (rinfo->host() == NULL || |
2815 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) { | 2837 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) { |
2838 RelocInfo::Mode rmode = rinfo->rmode(); | |
2816 Address addr = rinfo->pc(); | 2839 Address addr = rinfo->pc(); |
2817 SlotType slot_type = SlotTypeForRMode(rmode); | 2840 SlotType slot_type = SlotTypeForRelocInfoMode(rmode); |
2818 if (rinfo->IsInConstantPool()) { | 2841 if (rinfo->IsInConstantPool()) { |
2819 addr = rinfo->constant_pool_entry_address(); | 2842 addr = rinfo->constant_pool_entry_address(); |
2820 if (RelocInfo::IsCodeTarget(rmode)) { | 2843 if (RelocInfo::IsCodeTarget(rmode)) { |
2821 slot_type = CODE_ENTRY_SLOT; | 2844 slot_type = CODE_ENTRY_SLOT; |
2822 } else { | 2845 } else { |
2823 DCHECK(RelocInfo::IsEmbeddedObject(rmode)); | 2846 DCHECK(RelocInfo::IsEmbeddedObject(rmode)); |
2824 slot_type = OBJECT_SLOT; | 2847 slot_type = OBJECT_SLOT; |
2825 } | 2848 } |
2826 } | 2849 } |
2827 RememberedSet<OLD_TO_OLD>::InsertTyped( | 2850 RememberedSet<OLD_TO_OLD>::InsertTyped( |
(...skipping 596 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3424 memset(free_start, 0xcc, size); | 3447 memset(free_start, 0xcc, size); |
3425 } | 3448 } |
3426 freed_bytes = space->UnaccountedFree(free_start, size); | 3449 freed_bytes = space->UnaccountedFree(free_start, size); |
3427 max_freed_bytes = Max(freed_bytes, max_freed_bytes); | 3450 max_freed_bytes = Max(freed_bytes, max_freed_bytes); |
3428 } | 3451 } |
3429 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); | 3452 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); |
3430 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); | 3453 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); |
3431 } | 3454 } |
3432 | 3455 |
3433 void MarkCompactCollector::InvalidateCode(Code* code) { | 3456 void MarkCompactCollector::InvalidateCode(Code* code) { |
3457 Page* page = Page::FromAddress(code->address()); | |
3458 Address start = code->instruction_start(); | |
3459 Address end = code->address() + code->Size(); | |
3460 | |
3461 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end); | |
3462 | |
3434 if (heap_->incremental_marking()->IsCompacting() && | 3463 if (heap_->incremental_marking()->IsCompacting() && |
3435 !ShouldSkipEvacuationSlotRecording(code)) { | 3464 !ShouldSkipEvacuationSlotRecording(code)) { |
3436 DCHECK(compacting_); | 3465 DCHECK(compacting_); |
3437 | 3466 |
3438 // If the object is white than no slots were recorded on it yet. | 3467 // If the object is white than no slots were recorded on it yet. |
3439 MarkBit mark_bit = Marking::MarkBitFrom(code); | 3468 MarkBit mark_bit = Marking::MarkBitFrom(code); |
3440 if (Marking::IsWhite(mark_bit)) return; | 3469 if (Marking::IsWhite(mark_bit)) return; |
3441 | 3470 |
3442 // Ignore all slots that might have been recorded in the body of the | 3471 // Ignore all slots that might have been recorded in the body of the |
3443 // deoptimized code object. Assumption: no slots will be recorded for | 3472 // deoptimized code object. Assumption: no slots will be recorded for |
3444 // this object after invalidating it. | 3473 // this object after invalidating it. |
3445 Page* page = Page::FromAddress(code->address()); | |
3446 Address start = code->instruction_start(); | |
3447 Address end = code->address() + code->Size(); | |
3448 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end); | 3474 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end); |
3449 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end); | |
3450 } | 3475 } |
3451 } | 3476 } |
3452 | 3477 |
3453 | 3478 |
3454 // Return true if the given code is deoptimized or will be deoptimized. | 3479 // Return true if the given code is deoptimized or will be deoptimized. |
3455 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { | 3480 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { |
3456 return code->is_optimized_code() && code->marked_for_deoptimization(); | 3481 return code->is_optimized_code() && code->marked_for_deoptimization(); |
3457 } | 3482 } |
3458 | 3483 |
3459 | 3484 |
(...skipping 525 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3985 | 4010 |
3986 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { | 4011 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { |
3987 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); | 4012 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); |
3988 if (is_compacting()) { | 4013 if (is_compacting()) { |
3989 Code* host = | 4014 Code* host = |
3990 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( | 4015 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( |
3991 pc); | 4016 pc); |
3992 MarkBit mark_bit = Marking::MarkBitFrom(host); | 4017 MarkBit mark_bit = Marking::MarkBitFrom(host); |
3993 if (Marking::IsBlack(mark_bit)) { | 4018 if (Marking::IsBlack(mark_bit)) { |
3994 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); | 4019 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); |
4020 // target is always in old space, we don't have to record the slot in the | |
4021 // old-to-new remembered set. | |
4022 DCHECK(!heap()->InNewSpace(target)); | |
3995 RecordRelocSlot(host, &rinfo, target); | 4023 RecordRelocSlot(host, &rinfo, target); |
3996 } | 4024 } |
3997 } | 4025 } |
3998 } | 4026 } |
3999 | 4027 |
4000 } // namespace internal | 4028 } // namespace internal |
4001 } // namespace v8 | 4029 } // namespace v8 |
OLD | NEW |