| Index: src/heap/mark-compact.cc
|
| diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc
|
| index ffda9f159d263d2c40e22c3c013881ef1be7f106..9be004cb9144baa8ea7668aa83662e6d27de9d74 100644
|
| --- a/src/heap/mark-compact.cc
|
| +++ b/src/heap/mark-compact.cc
|
| @@ -2621,6 +2621,40 @@ void MarkCompactCollector::RecordRelocSlot(RelocInfo* rinfo, Object* target) {
|
| }
|
|
|
|
|
| +class RecordMigratedSlotVisitor final : public ObjectVisitor {
|
| + public:
|
| + RecordMigratedSlotVisitor(MarkCompactCollector* collector,
|
| + SlotsBuffer** evacuation_slots_buffer)
|
| + : collector_(collector),
|
| + evacuation_slots_buffer_(evacuation_slots_buffer) {}
|
| +
|
| + V8_INLINE void VisitPointer(Object** p) override {
|
| + collector_->RecordMigratedSlot(*p, reinterpret_cast<Address>(p),
|
| + evacuation_slots_buffer_);
|
| + }
|
| +
|
| + V8_INLINE void VisitPointers(Object** start, Object** end) override {
|
| + while (start < end) {
|
| + collector_->RecordMigratedSlot(*start, reinterpret_cast<Address>(start),
|
| + evacuation_slots_buffer_);
|
| + ++start;
|
| + }
|
| + }
|
| +
|
| + V8_INLINE void VisitCodeEntry(Address code_entry_slot) override {
|
| + if (collector_->compacting_) {
|
| + Address code_entry = Memory::Address_at(code_entry_slot);
|
| + collector_->RecordMigratedCodeEntrySlot(code_entry, code_entry_slot,
|
| + evacuation_slots_buffer_);
|
| + }
|
| + }
|
| +
|
| + private:
|
| + MarkCompactCollector* collector_;
|
| + SlotsBuffer** evacuation_slots_buffer_;
|
| +};
|
| +
|
| +
|
| // We scavenge new space simultaneously with sweeping. This is done in two
|
| // passes.
|
| //
|
| @@ -2646,26 +2680,10 @@ void MarkCompactCollector::MigrateObject(
|
| DCHECK_OBJECT_SIZE(size);
|
| DCHECK(evacuation_slots_buffer != nullptr);
|
| DCHECK(IsAligned(size, kPointerSize));
|
| - switch (src->ContentType()) {
|
| - case HeapObjectContents::kTaggedValues:
|
| - MigrateObjectTagged(dst, src, size, evacuation_slots_buffer);
|
| - break;
|
|
|
| - case HeapObjectContents::kMixedValues:
|
| - MigrateObjectMixed(dst, src, size, evacuation_slots_buffer);
|
| - break;
|
| -
|
| - case HeapObjectContents::kRawValues:
|
| - MigrateObjectRaw(dst, src, size);
|
| - break;
|
| - }
|
| -
|
| - if (compacting_ && dst->IsJSFunction()) {
|
| - Address code_entry_slot = dst->address() + JSFunction::kCodeEntryOffset;
|
| - Address code_entry = Memory::Address_at(code_entry_slot);
|
| - RecordMigratedCodeEntrySlot(code_entry, code_entry_slot,
|
| - evacuation_slots_buffer);
|
| - }
|
| + heap()->MoveBlock(dst->address(), src->address(), size);
|
| + RecordMigratedSlotVisitor visitor(this, evacuation_slots_buffer);
|
| + dst->IterateBody(&visitor);
|
| } else if (dest == CODE_SPACE) {
|
| DCHECK_CODEOBJECT_SIZE(size, heap()->code_space());
|
| DCHECK(evacuation_slots_buffer != nullptr);
|
| @@ -2684,91 +2702,6 @@ void MarkCompactCollector::MigrateObject(
|
| }
|
|
|
|
|
| -void MarkCompactCollector::MigrateObjectTagged(
|
| - HeapObject* dst, HeapObject* src, int size,
|
| - SlotsBuffer** evacuation_slots_buffer) {
|
| - Address src_slot = src->address();
|
| - Address dst_slot = dst->address();
|
| - for (int remaining = size / kPointerSize; remaining > 0; remaining--) {
|
| - Object* value = Memory::Object_at(src_slot);
|
| - Memory::Object_at(dst_slot) = value;
|
| - RecordMigratedSlot(value, dst_slot, evacuation_slots_buffer);
|
| - src_slot += kPointerSize;
|
| - dst_slot += kPointerSize;
|
| - }
|
| -}
|
| -
|
| -
|
| -void MarkCompactCollector::MigrateObjectMixed(
|
| - HeapObject* dst, HeapObject* src, int size,
|
| - SlotsBuffer** evacuation_slots_buffer) {
|
| - if (src->IsFixedTypedArrayBase()) {
|
| - heap()->MoveBlock(dst->address(), src->address(), size);
|
| - Address base_pointer_slot =
|
| - dst->address() + FixedTypedArrayBase::kBasePointerOffset;
|
| - RecordMigratedSlot(Memory::Object_at(base_pointer_slot), base_pointer_slot,
|
| - evacuation_slots_buffer);
|
| - } else if (src->IsBytecodeArray()) {
|
| - heap()->MoveBlock(dst->address(), src->address(), size);
|
| - Address constant_pool_slot =
|
| - dst->address() + BytecodeArray::kConstantPoolOffset;
|
| - RecordMigratedSlot(Memory::Object_at(constant_pool_slot),
|
| - constant_pool_slot, evacuation_slots_buffer);
|
| - } else if (src->IsJSArrayBuffer()) {
|
| - heap()->MoveBlock(dst->address(), src->address(), size);
|
| -
|
| - // Visit inherited JSObject properties and byte length of ArrayBuffer
|
| - Address regular_slot =
|
| - dst->address() + JSArrayBuffer::BodyDescriptor::kStartOffset;
|
| - Address regular_slots_end =
|
| - dst->address() + JSArrayBuffer::kByteLengthOffset + kPointerSize;
|
| - while (regular_slot < regular_slots_end) {
|
| - RecordMigratedSlot(Memory::Object_at(regular_slot), regular_slot,
|
| - evacuation_slots_buffer);
|
| - regular_slot += kPointerSize;
|
| - }
|
| -
|
| - // Skip backing store and visit just internal fields
|
| - Address internal_field_slot = dst->address() + JSArrayBuffer::kSize;
|
| - Address internal_fields_end =
|
| - dst->address() + JSArrayBuffer::kSizeWithInternalFields;
|
| - while (internal_field_slot < internal_fields_end) {
|
| - RecordMigratedSlot(Memory::Object_at(internal_field_slot),
|
| - internal_field_slot, evacuation_slots_buffer);
|
| - internal_field_slot += kPointerSize;
|
| - }
|
| - } else if (FLAG_unbox_double_fields) {
|
| - Address dst_addr = dst->address();
|
| - Address src_addr = src->address();
|
| - Address src_slot = src_addr;
|
| - Address dst_slot = dst_addr;
|
| -
|
| - LayoutDescriptorHelper helper(src->map());
|
| - DCHECK(!helper.all_fields_tagged());
|
| - for (int remaining = size / kPointerSize; remaining > 0; remaining--) {
|
| - Object* value = Memory::Object_at(src_slot);
|
| -
|
| - Memory::Object_at(dst_slot) = value;
|
| -
|
| - if (helper.IsTagged(static_cast<int>(src_slot - src_addr))) {
|
| - RecordMigratedSlot(value, dst_slot, evacuation_slots_buffer);
|
| - }
|
| -
|
| - src_slot += kPointerSize;
|
| - dst_slot += kPointerSize;
|
| - }
|
| - } else {
|
| - UNREACHABLE();
|
| - }
|
| -}
|
| -
|
| -
|
| -void MarkCompactCollector::MigrateObjectRaw(HeapObject* dst, HeapObject* src,
|
| - int size) {
|
| - heap()->MoveBlock(dst->address(), src->address(), size);
|
| -}
|
| -
|
| -
|
| static inline void UpdateSlot(Isolate* isolate, ObjectVisitor* v,
|
| SlotsBuffer::SlotType slot_type, Address addr) {
|
| switch (slot_type) {
|
| @@ -3127,23 +3060,21 @@ bool MarkCompactCollector::IsSlotInLiveObject(Address slot) {
|
| }
|
|
|
| case HeapObjectContents::kMixedValues: {
|
| + int offset = static_cast<int>(slot - object->address());
|
| if (object->IsFixedTypedArrayBase()) {
|
| - return static_cast<int>(slot - object->address()) ==
|
| - FixedTypedArrayBase::kBasePointerOffset;
|
| + return offset == FixedTypedArrayBase::kBasePointerOffset;
|
| } else if (object->IsBytecodeArray()) {
|
| - return static_cast<int>(slot - object->address()) ==
|
| - BytecodeArray::kConstantPoolOffset;
|
| + return offset == BytecodeArray::kConstantPoolOffset;
|
| } else if (object->IsJSArrayBuffer()) {
|
| - int off = static_cast<int>(slot - object->address());
|
| - return (off >= JSArrayBuffer::BodyDescriptor::kStartOffset &&
|
| - off <= JSArrayBuffer::kByteLengthOffset) ||
|
| - (off >= JSArrayBuffer::kSize &&
|
| - off < JSArrayBuffer::kSizeWithInternalFields);
|
| + return (offset >= JSArrayBuffer::kPropertiesOffset &&
|
| + offset <= JSArrayBuffer::kByteLengthOffset) ||
|
| + (offset >= JSArrayBuffer::kSize &&
|
| + offset < JSArrayBuffer::kSizeWithInternalFields);
|
| } else if (FLAG_unbox_double_fields) {
|
| // Filter out slots that happen to point to unboxed double fields.
|
| LayoutDescriptorHelper helper(object->map());
|
| DCHECK(!helper.all_fields_tagged());
|
| - return helper.IsTagged(static_cast<int>(slot - object->address()));
|
| + return helper.IsTagged(offset);
|
| }
|
| break;
|
| }
|
|
|