Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(320)

Side by Side Diff: src/heap/mark-compact.cc

Issue 2087463004: Revert of [heap] Avoid the use of cells to point from code to new-space objects. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap-inl.h ('k') | src/heap/remembered-set.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 1597 matching lines...) Expand 10 before | Expand all | Expand 10 after
1608 RememberedSet<OLD_TO_OLD>::InsertTyped(Page::FromAddress(code_entry_slot), 1608 RememberedSet<OLD_TO_OLD>::InsertTyped(Page::FromAddress(code_entry_slot),
1609 nullptr, CODE_ENTRY_SLOT, 1609 nullptr, CODE_ENTRY_SLOT,
1610 code_entry_slot); 1610 code_entry_slot);
1611 } 1611 }
1612 } 1612 }
1613 1613
1614 inline void VisitCodeTarget(RelocInfo* rinfo) final { 1614 inline void VisitCodeTarget(RelocInfo* rinfo) final {
1615 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode())); 1615 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
1616 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); 1616 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1617 Code* host = rinfo->host(); 1617 Code* host = rinfo->host();
1618 // The target is always in old space, we don't have to record the slot in
1619 // the old-to-new remembered set.
1620 DCHECK(!collector_->heap()->InNewSpace(target));
1621 collector_->RecordRelocSlot(host, rinfo, target); 1618 collector_->RecordRelocSlot(host, rinfo, target);
1622 } 1619 }
1623 1620
1624 inline void VisitDebugTarget(RelocInfo* rinfo) final { 1621 inline void VisitDebugTarget(RelocInfo* rinfo) final {
1625 DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && 1622 DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
1626 rinfo->IsPatchedDebugBreakSlotSequence()); 1623 rinfo->IsPatchedDebugBreakSlotSequence());
1627 Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address()); 1624 Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
1628 Code* host = rinfo->host(); 1625 Code* host = rinfo->host();
1629 // The target is always in old space, we don't have to record the slot in
1630 // the old-to-new remembered set.
1631 DCHECK(!collector_->heap()->InNewSpace(target));
1632 collector_->RecordRelocSlot(host, rinfo, target); 1626 collector_->RecordRelocSlot(host, rinfo, target);
1633 } 1627 }
1634 1628
1635 inline void VisitEmbeddedPointer(RelocInfo* rinfo) final { 1629 inline void VisitEmbeddedPointer(RelocInfo* rinfo) final {
1636 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); 1630 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
1637 HeapObject* object = HeapObject::cast(rinfo->target_object()); 1631 HeapObject* object = HeapObject::cast(rinfo->target_object());
1638 Code* host = rinfo->host(); 1632 Code* host = rinfo->host();
1639 collector_->heap()->RecordWriteIntoCode(host, rinfo, object);
1640 collector_->RecordRelocSlot(host, rinfo, object); 1633 collector_->RecordRelocSlot(host, rinfo, object);
1641 } 1634 }
1642 1635
1643 inline void VisitCell(RelocInfo* rinfo) final { 1636 inline void VisitCell(RelocInfo* rinfo) final {
1644 DCHECK(rinfo->rmode() == RelocInfo::CELL); 1637 DCHECK(rinfo->rmode() == RelocInfo::CELL);
1645 Cell* cell = rinfo->target_cell(); 1638 Cell* cell = rinfo->target_cell();
1646 Code* host = rinfo->host(); 1639 Code* host = rinfo->host();
1647 // The cell is always in old space, we don't have to record the slot in
1648 // the old-to-new remembered set.
1649 DCHECK(!collector_->heap()->InNewSpace(cell));
1650 collector_->RecordRelocSlot(host, rinfo, cell); 1640 collector_->RecordRelocSlot(host, rinfo, cell);
1651 } 1641 }
1652 1642
1653 // Entries that will never move. 1643 // Entries that will never move.
1654 inline void VisitCodeAgeSequence(RelocInfo* rinfo) final { 1644 inline void VisitCodeAgeSequence(RelocInfo* rinfo) final {
1655 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode())); 1645 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
1656 Code* stub = rinfo->code_age_stub(); 1646 Code* stub = rinfo->code_age_stub();
1657 USE(stub); 1647 USE(stub);
1658 DCHECK(!Page::FromAddress(stub->address())->IsEvacuationCandidate()); 1648 DCHECK(!Page::FromAddress(stub->address())->IsEvacuationCandidate());
1659 } 1649 }
(...skipping 849 matching lines...) Expand 10 before | Expand all | Expand 10 after
2509 DependentCode* list_head) { 2499 DependentCode* list_head) {
2510 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE); 2500 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE);
2511 Isolate* isolate = this->isolate(); 2501 Isolate* isolate = this->isolate();
2512 DependentCode* current = list_head; 2502 DependentCode* current = list_head;
2513 while (current->length() > 0) { 2503 while (current->length() > 0) {
2514 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( 2504 have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization(
2515 isolate, DependentCode::kWeakCodeGroup); 2505 isolate, DependentCode::kWeakCodeGroup);
2516 current = current->next_link(); 2506 current = current->next_link();
2517 } 2507 }
2518 2508
2519 {
2520 ArrayList* list = heap_->weak_new_space_object_to_code_list();
2521 int counter = 0;
2522 for (int i = 0; i < list->Length(); i += 2) {
2523 WeakCell* obj = WeakCell::cast(list->Get(i));
2524 WeakCell* dep = WeakCell::cast(list->Get(i + 1));
2525 if (obj->cleared() || dep->cleared()) {
2526 if (!dep->cleared()) {
2527 Code* code = Code::cast(dep->value());
2528 if (!code->marked_for_deoptimization()) {
2529 DependentCode::SetMarkedForDeoptimization(
2530 code, DependentCode::DependencyGroup::kWeakCodeGroup);
2531 code->InvalidateEmbeddedObjects();
2532 have_code_to_deoptimize_ = true;
2533 }
2534 }
2535 } else {
2536 // We record the slot manually because marking is finished at this
2537 // point and the write barrier would bailout.
2538 list->Set(counter, obj, SKIP_WRITE_BARRIER);
2539 RecordSlot(list, list->Slot(counter), obj);
2540 counter++;
2541 list->Set(counter, dep, SKIP_WRITE_BARRIER);
2542 RecordSlot(list, list->Slot(counter), dep);
2543 counter++;
2544 }
2545 }
2546 }
2547
2548 WeakHashTable* table = heap_->weak_object_to_code_table(); 2509 WeakHashTable* table = heap_->weak_object_to_code_table();
2549 uint32_t capacity = table->Capacity(); 2510 uint32_t capacity = table->Capacity();
2550 for (uint32_t i = 0; i < capacity; i++) { 2511 for (uint32_t i = 0; i < capacity; i++) {
2551 uint32_t key_index = table->EntryToIndex(i); 2512 uint32_t key_index = table->EntryToIndex(i);
2552 Object* key = table->get(key_index); 2513 Object* key = table->get(key_index);
2553 if (!table->IsKey(isolate, key)) continue; 2514 if (!table->IsKey(isolate, key)) continue;
2554 uint32_t value_index = table->EntryToValueIndex(i); 2515 uint32_t value_index = table->EntryToValueIndex(i);
2555 Object* value = table->get(value_index); 2516 Object* value = table->get(value_index);
2556 DCHECK(key->IsWeakCell()); 2517 DCHECK(key->IsWeakCell());
2557 if (WeakCell::cast(key)->cleared()) { 2518 if (WeakCell::cast(key)->cleared()) {
(...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after
2882 HeapObject* undefined = heap()->undefined_value(); 2843 HeapObject* undefined = heap()->undefined_value();
2883 Object* obj = heap()->encountered_transition_arrays(); 2844 Object* obj = heap()->encountered_transition_arrays();
2884 while (obj != Smi::FromInt(0)) { 2845 while (obj != Smi::FromInt(0)) {
2885 TransitionArray* array = TransitionArray::cast(obj); 2846 TransitionArray* array = TransitionArray::cast(obj);
2886 obj = array->next_link(); 2847 obj = array->next_link();
2887 array->set_next_link(undefined, SKIP_WRITE_BARRIER); 2848 array->set_next_link(undefined, SKIP_WRITE_BARRIER);
2888 } 2849 }
2889 heap()->set_encountered_transition_arrays(Smi::FromInt(0)); 2850 heap()->set_encountered_transition_arrays(Smi::FromInt(0));
2890 } 2851 }
2891 2852
2853 static inline SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
2854 if (RelocInfo::IsCodeTarget(rmode)) {
2855 return CODE_TARGET_SLOT;
2856 } else if (RelocInfo::IsCell(rmode)) {
2857 return CELL_TARGET_SLOT;
2858 } else if (RelocInfo::IsEmbeddedObject(rmode)) {
2859 return EMBEDDED_OBJECT_SLOT;
2860 } else if (RelocInfo::IsDebugBreakSlot(rmode)) {
2861 return DEBUG_TARGET_SLOT;
2862 }
2863 UNREACHABLE();
2864 return NUMBER_OF_SLOT_TYPES;
2865 }
2866
2892 void MarkCompactCollector::RecordRelocSlot(Code* host, RelocInfo* rinfo, 2867 void MarkCompactCollector::RecordRelocSlot(Code* host, RelocInfo* rinfo,
2893 Object* target) { 2868 Object* target) {
2894 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target)); 2869 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
2895 Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host)); 2870 Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host));
2871 RelocInfo::Mode rmode = rinfo->rmode();
2896 if (target_page->IsEvacuationCandidate() && 2872 if (target_page->IsEvacuationCandidate() &&
2897 (rinfo->host() == NULL || 2873 (rinfo->host() == NULL ||
2898 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) { 2874 !ShouldSkipEvacuationSlotRecording(rinfo->host()))) {
2899 RelocInfo::Mode rmode = rinfo->rmode();
2900 Address addr = rinfo->pc(); 2875 Address addr = rinfo->pc();
2901 SlotType slot_type = SlotTypeForRelocInfoMode(rmode); 2876 SlotType slot_type = SlotTypeForRMode(rmode);
2902 if (rinfo->IsInConstantPool()) { 2877 if (rinfo->IsInConstantPool()) {
2903 addr = rinfo->constant_pool_entry_address(); 2878 addr = rinfo->constant_pool_entry_address();
2904 if (RelocInfo::IsCodeTarget(rmode)) { 2879 if (RelocInfo::IsCodeTarget(rmode)) {
2905 slot_type = CODE_ENTRY_SLOT; 2880 slot_type = CODE_ENTRY_SLOT;
2906 } else { 2881 } else {
2907 DCHECK(RelocInfo::IsEmbeddedObject(rmode)); 2882 DCHECK(RelocInfo::IsEmbeddedObject(rmode));
2908 slot_type = OBJECT_SLOT; 2883 slot_type = OBJECT_SLOT;
2909 } 2884 }
2910 } 2885 }
2911 RememberedSet<OLD_TO_OLD>::InsertTyped( 2886 RememberedSet<OLD_TO_OLD>::InsertTyped(
(...skipping 604 matching lines...) Expand 10 before | Expand all | Expand 10 after
3516 p->heap()->CreateFillerObjectAt(free_start, size, 3491 p->heap()->CreateFillerObjectAt(free_start, size,
3517 ClearRecordedSlots::kNo); 3492 ClearRecordedSlots::kNo);
3518 } 3493 }
3519 } 3494 }
3520 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); 3495 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
3521 if (free_list_mode == IGNORE_FREE_LIST) return 0; 3496 if (free_list_mode == IGNORE_FREE_LIST) return 0;
3522 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); 3497 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes));
3523 } 3498 }
3524 3499
3525 void MarkCompactCollector::InvalidateCode(Code* code) { 3500 void MarkCompactCollector::InvalidateCode(Code* code) {
3526 Page* page = Page::FromAddress(code->address());
3527 Address start = code->instruction_start();
3528 Address end = code->address() + code->Size();
3529
3530 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end);
3531
3532 if (heap_->incremental_marking()->IsCompacting() && 3501 if (heap_->incremental_marking()->IsCompacting() &&
3533 !ShouldSkipEvacuationSlotRecording(code)) { 3502 !ShouldSkipEvacuationSlotRecording(code)) {
3534 DCHECK(compacting_); 3503 DCHECK(compacting_);
3535 3504
3536 // If the object is white than no slots were recorded on it yet. 3505 // If the object is white than no slots were recorded on it yet.
3537 MarkBit mark_bit = Marking::MarkBitFrom(code); 3506 MarkBit mark_bit = Marking::MarkBitFrom(code);
3538 if (Marking::IsWhite(mark_bit)) return; 3507 if (Marking::IsWhite(mark_bit)) return;
3539 3508
3540 // Ignore all slots that might have been recorded in the body of the 3509 // Ignore all slots that might have been recorded in the body of the
3541 // deoptimized code object. Assumption: no slots will be recorded for 3510 // deoptimized code object. Assumption: no slots will be recorded for
3542 // this object after invalidating it. 3511 // this object after invalidating it.
3512 Page* page = Page::FromAddress(code->address());
3513 Address start = code->instruction_start();
3514 Address end = code->address() + code->Size();
3543 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end); 3515 RememberedSet<OLD_TO_OLD>::RemoveRangeTyped(page, start, end);
3516 RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(page, start, end);
3544 } 3517 }
3545 } 3518 }
3546 3519
3547 3520
3548 // Return true if the given code is deoptimized or will be deoptimized. 3521 // Return true if the given code is deoptimized or will be deoptimized.
3549 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { 3522 bool MarkCompactCollector::WillBeDeoptimized(Code* code) {
3550 return code->is_optimized_code() && code->marked_for_deoptimization(); 3523 return code->is_optimized_code() && code->marked_for_deoptimization();
3551 } 3524 }
3552 3525
3553 3526
(...skipping 583 matching lines...) Expand 10 before | Expand all | Expand 10 after
4137 4110
4138 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { 4111 void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
4139 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT); 4112 DCHECK(heap()->gc_state() == Heap::MARK_COMPACT);
4140 if (is_compacting()) { 4113 if (is_compacting()) {
4141 Code* host = 4114 Code* host =
4142 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( 4115 isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer(
4143 pc); 4116 pc);
4144 MarkBit mark_bit = Marking::MarkBitFrom(host); 4117 MarkBit mark_bit = Marking::MarkBitFrom(host);
4145 if (Marking::IsBlack(mark_bit)) { 4118 if (Marking::IsBlack(mark_bit)) {
4146 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); 4119 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
4147 // The target is always in old space, we don't have to record the slot in
4148 // the old-to-new remembered set.
4149 DCHECK(!heap()->InNewSpace(target));
4150 RecordRelocSlot(host, &rinfo, target); 4120 RecordRelocSlot(host, &rinfo, target);
4151 } 4121 }
4152 } 4122 }
4153 } 4123 }
4154 4124
4155 } // namespace internal 4125 } // namespace internal
4156 } // namespace v8 4126 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/heap-inl.h ('k') | src/heap/remembered-set.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698