| Index: src/heap/heap.cc
|
| diff --git a/src/heap/heap.cc b/src/heap/heap.cc
|
| index bcec8006331b63550a0aa5f7b90bb7f9c07df6c4..9c21d54469d90176b53e649c79386166ce487030 100644
|
| --- a/src/heap/heap.cc
|
| +++ b/src/heap/heap.cc
|
| @@ -4480,10 +4480,34 @@ void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
|
| }
|
|
|
|
|
| +class IteratePointersToFromSpaceVisitor final : public ObjectVisitor {
|
| + public:
|
| + IteratePointersToFromSpaceVisitor(Heap* heap, HeapObject* target,
|
| + bool record_slots,
|
| + ObjectSlotCallback callback)
|
| + : heap_(heap),
|
| + target_(target),
|
| + record_slots_(record_slots),
|
| + callback_(callback) {}
|
| +
|
| + V8_INLINE void VisitPointers(Object** start, Object** end) override {
|
| + heap_->IterateAndMarkPointersToFromSpace(
|
| + target_, reinterpret_cast<Address>(start),
|
| + reinterpret_cast<Address>(end), record_slots_, callback_);
|
| + }
|
| +
|
| + V8_INLINE void VisitCodeEntry(Address code_entry_slot) override {}
|
| +
|
| + private:
|
| + Heap* heap_;
|
| + HeapObject* target_;
|
| + bool record_slots_;
|
| + ObjectSlotCallback callback_;
|
| +};
|
| +
|
| +
|
| void Heap::IteratePointersToFromSpace(HeapObject* target, int size,
|
| ObjectSlotCallback callback) {
|
| - Address obj_address = target->address();
|
| -
|
| // We are not collecting slots on new space objects during mutation
|
| // thus we have to scan for pointers to evacuation candidates when we
|
| // promote objects. But we should not record any slots in non-black
|
| @@ -4496,53 +4520,9 @@ void Heap::IteratePointersToFromSpace(HeapObject* target, int size,
|
| record_slots = Marking::IsBlack(mark_bit);
|
| }
|
|
|
| - // Do not scavenge JSArrayBuffer's contents
|
| - switch (target->ContentType()) {
|
| - case HeapObjectContents::kTaggedValues: {
|
| - IterateAndMarkPointersToFromSpace(target, obj_address, obj_address + size,
|
| - record_slots, callback);
|
| - break;
|
| - }
|
| - case HeapObjectContents::kMixedValues: {
|
| - if (target->IsFixedTypedArrayBase()) {
|
| - IterateAndMarkPointersToFromSpace(
|
| - target, obj_address + FixedTypedArrayBase::kBasePointerOffset,
|
| - obj_address + FixedTypedArrayBase::kHeaderSize, record_slots,
|
| - callback);
|
| - } else if (target->IsBytecodeArray()) {
|
| - IterateAndMarkPointersToFromSpace(
|
| - target, obj_address + BytecodeArray::kConstantPoolOffset,
|
| - obj_address + BytecodeArray::kHeaderSize, record_slots, callback);
|
| - } else if (target->IsJSArrayBuffer()) {
|
| - IterateAndMarkPointersToFromSpace(
|
| - target, obj_address,
|
| - obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize,
|
| - record_slots, callback);
|
| - IterateAndMarkPointersToFromSpace(
|
| - target, obj_address + JSArrayBuffer::kSize, obj_address + size,
|
| - record_slots, callback);
|
| -#if V8_DOUBLE_FIELDS_UNBOXING
|
| - } else if (FLAG_unbox_double_fields) {
|
| - LayoutDescriptorHelper helper(target->map());
|
| - DCHECK(!helper.all_fields_tagged());
|
| -
|
| - for (int offset = 0; offset < size;) {
|
| - int end_of_region_offset;
|
| - if (helper.IsTagged(offset, size, &end_of_region_offset)) {
|
| - IterateAndMarkPointersToFromSpace(
|
| - target, obj_address + offset,
|
| - obj_address + end_of_region_offset, record_slots, callback);
|
| - }
|
| - offset = end_of_region_offset;
|
| - }
|
| -#endif
|
| - }
|
| - break;
|
| - }
|
| - case HeapObjectContents::kRawValues: {
|
| - break;
|
| - }
|
| - }
|
| + IteratePointersToFromSpaceVisitor visitor(this, target, record_slots,
|
| + callback);
|
| + target->IterateBody(target->map()->instance_type(), size, &visitor);
|
| }
|
|
|
|
|
|
|