Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(95)

Side by Side Diff: src/heap/mark-compact.cc

Issue 1135353003: Factor out handling of mixed objects preprocessing after migration (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: updates Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/store-buffer.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/compilation-cache.h" 10 #include "src/compilation-cache.h"
(...skipping 2687 matching lines...) Expand 10 before | Expand all | Expand 10 after
2698 // pointer iteration. This is an issue if the store buffer overflows and we 2698 // pointer iteration. This is an issue if the store buffer overflows and we
2699 // have to scan the entire old space, including dead objects, looking for 2699 // have to scan the entire old space, including dead objects, looking for
2700 // pointers to new space. 2700 // pointers to new space.
2701 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src, 2701 void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src,
2702 int size, AllocationSpace dest) { 2702 int size, AllocationSpace dest) {
2703 Address dst_addr = dst->address(); 2703 Address dst_addr = dst->address();
2704 Address src_addr = src->address(); 2704 Address src_addr = src->address();
2705 DCHECK(heap()->AllowedToBeMigrated(src, dest)); 2705 DCHECK(heap()->AllowedToBeMigrated(src, dest));
2706 DCHECK(dest != LO_SPACE && size <= Page::kMaxRegularHeapObjectSize); 2706 DCHECK(dest != LO_SPACE && size <= Page::kMaxRegularHeapObjectSize);
2707 if (dest == OLD_SPACE) { 2707 if (dest == OLD_SPACE) {
2708 Address src_slot = src_addr;
2709 Address dst_slot = dst_addr;
2710 DCHECK(IsAligned(size, kPointerSize)); 2708 DCHECK(IsAligned(size, kPointerSize));
2709 switch (src->ContentType()) {
2710 case HeapObjectContents::kTaggedValues:
2711 MigrateObjectTagged(dst, src, size);
2712 break;
2711 2713
2712 bool may_contain_raw_values = src->MayContainRawValues(); 2714 case HeapObjectContents::kMixedValues:
2713 #if V8_DOUBLE_FIELDS_UNBOXING 2715 MigrateObjectMixed(dst, src, size);
2714 LayoutDescriptorHelper helper(src->map()); 2716 break;
2715 bool has_only_tagged_fields = helper.all_fields_tagged();
2716 #endif
2717 for (int remaining = size / kPointerSize; remaining > 0; remaining--) {
2718 Object* value = Memory::Object_at(src_slot);
2719 2717
2720 Memory::Object_at(dst_slot) = value; 2718 case HeapObjectContents::kRawValues:
2721 2719 MigrateObjectRaw(dst, src, size);
2722 #if V8_DOUBLE_FIELDS_UNBOXING 2720 break;
2723 if (!may_contain_raw_values &&
2724 (has_only_tagged_fields ||
2725 helper.IsTagged(static_cast<int>(src_slot - src_addr))))
2726 #else
2727 if (!may_contain_raw_values)
2728 #endif
2729 {
2730 RecordMigratedSlot(value, dst_slot);
2731 }
2732
2733 src_slot += kPointerSize;
2734 dst_slot += kPointerSize;
2735 } 2721 }
2736 2722
2737 if (compacting_ && dst->IsJSFunction()) { 2723 if (compacting_ && dst->IsJSFunction()) {
2738 Address code_entry_slot = dst_addr + JSFunction::kCodeEntryOffset; 2724 Address code_entry_slot = dst->address() + JSFunction::kCodeEntryOffset;
2739 Address code_entry = Memory::Address_at(code_entry_slot); 2725 Address code_entry = Memory::Address_at(code_entry_slot);
2740 2726
2741 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { 2727 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) {
2742 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, 2728 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_,
2743 SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot, 2729 SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot,
2744 SlotsBuffer::IGNORE_OVERFLOW); 2730 SlotsBuffer::IGNORE_OVERFLOW);
2745 } 2731 }
2746 } 2732 }
2747 } else if (dest == CODE_SPACE) { 2733 } else if (dest == CODE_SPACE) {
2748 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); 2734 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr));
2749 heap()->MoveBlock(dst_addr, src_addr, size); 2735 heap()->MoveBlock(dst_addr, src_addr, size);
2750 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, 2736 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_,
2751 SlotsBuffer::RELOCATED_CODE_OBJECT, dst_addr, 2737 SlotsBuffer::RELOCATED_CODE_OBJECT, dst_addr,
2752 SlotsBuffer::IGNORE_OVERFLOW); 2738 SlotsBuffer::IGNORE_OVERFLOW);
2753 Code::cast(dst)->Relocate(dst_addr - src_addr); 2739 Code::cast(dst)->Relocate(dst_addr - src_addr);
2754 } else { 2740 } else {
2755 DCHECK(dest == NEW_SPACE); 2741 DCHECK(dest == NEW_SPACE);
2756 heap()->MoveBlock(dst_addr, src_addr, size); 2742 heap()->MoveBlock(dst_addr, src_addr, size);
2757 } 2743 }
2758 heap()->OnMoveEvent(dst, src, size); 2744 heap()->OnMoveEvent(dst, src, size);
2759 Memory::Address_at(src_addr) = dst_addr; 2745 Memory::Address_at(src_addr) = dst_addr;
2760 } 2746 }
2761 2747
2762 2748
2749 void MarkCompactCollector::MigrateObjectTagged(HeapObject* dst, HeapObject* src,
2750 int size) {
2751 Address src_slot = src->address();
2752 Address dst_slot = dst->address();
2753 for (int remaining = size / kPointerSize; remaining > 0; remaining--) {
2754 Object* value = Memory::Object_at(src_slot);
2755 Memory::Object_at(dst_slot) = value;
2756 RecordMigratedSlot(value, dst_slot);
2757 src_slot += kPointerSize;
2758 dst_slot += kPointerSize;
2759 }
2760 }
2761
2762
2763 void MarkCompactCollector::MigrateObjectMixed(HeapObject* dst, HeapObject* src,
2764 int size) {
2765 if (FLAG_unbox_double_fields) {
2766 Address dst_addr = dst->address();
2767 Address src_addr = src->address();
2768 Address src_slot = src_addr;
2769 Address dst_slot = dst_addr;
2770
2771 LayoutDescriptorHelper helper(src->map());
2772 DCHECK(!helper.all_fields_tagged());
2773 for (int remaining = size / kPointerSize; remaining > 0; remaining--) {
2774 Object* value = Memory::Object_at(src_slot);
2775
2776 Memory::Object_at(dst_slot) = value;
2777
2778 if (helper.IsTagged(static_cast<int>(src_slot - src_addr))) {
2779 RecordMigratedSlot(value, dst_slot);
2780 }
2781
2782 src_slot += kPointerSize;
2783 dst_slot += kPointerSize;
2784 }
2785 } else {
2786 UNREACHABLE();
2787 }
2788 }
2789
2790
2791 void MarkCompactCollector::MigrateObjectRaw(HeapObject* dst, HeapObject* src,
2792 int size) {
2793 heap()->MoveBlock(dst->address(), src->address(), size);
2794 }
2795
2796
2763 // Visitor for updating pointers from live objects in old spaces to new space. 2797 // Visitor for updating pointers from live objects in old spaces to new space.
2764 // It does not expect to encounter pointers to dead objects. 2798 // It does not expect to encounter pointers to dead objects.
2765 class PointersUpdatingVisitor : public ObjectVisitor { 2799 class PointersUpdatingVisitor : public ObjectVisitor {
2766 public: 2800 public:
2767 explicit PointersUpdatingVisitor(Heap* heap) : heap_(heap) {} 2801 explicit PointersUpdatingVisitor(Heap* heap) : heap_(heap) {}
2768 2802
2769 void VisitPointer(Object** p) { UpdatePointer(p); } 2803 void VisitPointer(Object** p) { UpdatePointer(p); }
2770 2804
2771 void VisitPointers(Object** start, Object** end) { 2805 void VisitPointers(Object** start, Object** end) {
2772 for (Object** p = start; p < end; p++) UpdatePointer(p); 2806 for (Object** p = start; p < end; p++) UpdatePointer(p);
(...skipping 348 matching lines...) Expand 10 before | Expand all | Expand 10 after
3121 return false; 3155 return false;
3122 } 3156 }
3123 3157
3124 // |object| is NULL only when the slot belongs to large object space. 3158 // |object| is NULL only when the slot belongs to large object space.
3125 DCHECK(object != NULL || 3159 DCHECK(object != NULL ||
3126 Page::FromAnyPointerAddress(heap_, slot)->owner() == 3160 Page::FromAnyPointerAddress(heap_, slot)->owner() ==
3127 heap_->lo_space()); 3161 heap_->lo_space());
3128 // We don't need to check large objects' layout descriptor since it can't 3162 // We don't need to check large objects' layout descriptor since it can't
3129 // contain in-object fields anyway. 3163 // contain in-object fields anyway.
3130 if (object != NULL) { 3164 if (object != NULL) {
3131 // TODO(ishell): This is a workaround for crbug/454297. We must not have 3165 switch (object->ContentType()) {
3132 // slots in data objects at all. Remove this once we found the root cause. 3166 case HeapObjectContents::kTaggedValues:
3133 InstanceType type = object->map()->instance_type(); 3167 return true;
3134 // Slots in maps and code can't be invalid because they are never shrunk. 3168
3135 if (type == MAP_TYPE || type == CODE_TYPE) return true; 3169 case HeapObjectContents::kRawValues: {
3136 // Consider slots in objects that contain ONLY raw data as invalid. 3170 InstanceType type = object->map()->instance_type();
3137 if (object->MayContainRawValues()) return false; 3171 // Slots in maps and code can't be invalid because they are never
3138 if (FLAG_unbox_double_fields) { 3172 // shrunk.
3139 // Filter out slots that happen to point to unboxed double fields. 3173 if (type == MAP_TYPE || type == CODE_TYPE) return true;
3140 LayoutDescriptorHelper helper(object->map()); 3174
3141 bool has_only_tagged_fields = helper.all_fields_tagged(); 3175 // Consider slots in objects that contain ONLY raw data as invalid.
3142 if (!has_only_tagged_fields &&
3143 !helper.IsTagged(static_cast<int>(slot - object->address()))) {
3144 return false; 3176 return false;
3145 } 3177 }
3178
3179 case HeapObjectContents::kMixedValues: {
3180 if (FLAG_unbox_double_fields) {
3181 // Filter out slots that happen to point to unboxed double fields.
3182 LayoutDescriptorHelper helper(object->map());
3183 DCHECK(!helper.all_fields_tagged());
3184 return helper.IsTagged(static_cast<int>(slot - object->address()));
3185 }
3186 break;
3187 }
3146 } 3188 }
3189 UNREACHABLE();
3147 } 3190 }
3148 3191
3149 return true; 3192 return true;
3150 } 3193 }
3151 3194
3152 3195
3153 void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot, 3196 void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot,
3154 HeapObject* object) { 3197 HeapObject* object) {
3155 // The target object has to be black. 3198 // The target object has to be black.
3156 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); 3199 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
(...skipping 1506 matching lines...) Expand 10 before | Expand all | Expand 10 after
4663 SlotsBuffer* buffer = *buffer_address; 4706 SlotsBuffer* buffer = *buffer_address;
4664 while (buffer != NULL) { 4707 while (buffer != NULL) {
4665 SlotsBuffer* next_buffer = buffer->next(); 4708 SlotsBuffer* next_buffer = buffer->next();
4666 DeallocateBuffer(buffer); 4709 DeallocateBuffer(buffer);
4667 buffer = next_buffer; 4710 buffer = next_buffer;
4668 } 4711 }
4669 *buffer_address = NULL; 4712 *buffer_address = NULL;
4670 } 4713 }
4671 } // namespace internal 4714 } // namespace internal
4672 } // namespace v8 4715 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/store-buffer.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698