| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
| (...skipping 4462 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4473 } else if (record_slots && | 4473 } else if (record_slots && |
| 4474 MarkCompactCollector::IsOnEvacuationCandidate(target)) { | 4474 MarkCompactCollector::IsOnEvacuationCandidate(target)) { |
| 4475 mark_compact_collector()->RecordSlot(object, slot, target); | 4475 mark_compact_collector()->RecordSlot(object, slot, target); |
| 4476 } | 4476 } |
| 4477 } | 4477 } |
| 4478 slot_address += kPointerSize; | 4478 slot_address += kPointerSize; |
| 4479 } | 4479 } |
| 4480 } | 4480 } |
| 4481 | 4481 |
| 4482 | 4482 |
| 4483 class IteratePointersToFromSpaceVisitor final : public ObjectVisitor { |
| 4484 public: |
| 4485 IteratePointersToFromSpaceVisitor(Heap* heap, HeapObject* target, |
| 4486 bool record_slots, |
| 4487 ObjectSlotCallback callback) |
| 4488 : heap_(heap), |
| 4489 target_(target), |
| 4490 record_slots_(record_slots), |
| 4491 callback_(callback) {} |
| 4492 |
| 4493 V8_INLINE void VisitPointers(Object** start, Object** end) override { |
| 4494 heap_->IterateAndMarkPointersToFromSpace( |
| 4495 target_, reinterpret_cast<Address>(start), |
| 4496 reinterpret_cast<Address>(end), record_slots_, callback_); |
| 4497 } |
| 4498 |
| 4499 V8_INLINE void VisitCodeEntry(Address code_entry_slot) override {} |
| 4500 |
| 4501 private: |
| 4502 Heap* heap_; |
| 4503 HeapObject* target_; |
| 4504 bool record_slots_; |
| 4505 ObjectSlotCallback callback_; |
| 4506 }; |
| 4507 |
| 4508 |
| 4483 void Heap::IteratePointersToFromSpace(HeapObject* target, int size, | 4509 void Heap::IteratePointersToFromSpace(HeapObject* target, int size, |
| 4484 ObjectSlotCallback callback) { | 4510 ObjectSlotCallback callback) { |
| 4485 Address obj_address = target->address(); | |
| 4486 | |
| 4487 // We are not collecting slots on new space objects during mutation | 4511 // We are not collecting slots on new space objects during mutation |
| 4488 // thus we have to scan for pointers to evacuation candidates when we | 4512 // thus we have to scan for pointers to evacuation candidates when we |
| 4489 // promote objects. But we should not record any slots in non-black | 4513 // promote objects. But we should not record any slots in non-black |
| 4490 // objects. Grey object's slots would be rescanned. | 4514 // objects. Grey object's slots would be rescanned. |
| 4491 // White object might not survive until the end of collection | 4515 // White object might not survive until the end of collection |
| 4492 // it would be a violation of the invariant to record it's slots. | 4516 // it would be a violation of the invariant to record it's slots. |
| 4493 bool record_slots = false; | 4517 bool record_slots = false; |
| 4494 if (incremental_marking()->IsCompacting()) { | 4518 if (incremental_marking()->IsCompacting()) { |
| 4495 MarkBit mark_bit = Marking::MarkBitFrom(target); | 4519 MarkBit mark_bit = Marking::MarkBitFrom(target); |
| 4496 record_slots = Marking::IsBlack(mark_bit); | 4520 record_slots = Marking::IsBlack(mark_bit); |
| 4497 } | 4521 } |
| 4498 | 4522 |
| 4499 // Do not scavenge JSArrayBuffer's contents | 4523 IteratePointersToFromSpaceVisitor visitor(this, target, record_slots, |
| 4500 switch (target->ContentType()) { | 4524 callback); |
| 4501 case HeapObjectContents::kTaggedValues: { | 4525 target->IterateBody(target->map()->instance_type(), size, &visitor); |
| 4502 IterateAndMarkPointersToFromSpace(target, obj_address, obj_address + size, | |
| 4503 record_slots, callback); | |
| 4504 break; | |
| 4505 } | |
| 4506 case HeapObjectContents::kMixedValues: { | |
| 4507 if (target->IsFixedTypedArrayBase()) { | |
| 4508 IterateAndMarkPointersToFromSpace( | |
| 4509 target, obj_address + FixedTypedArrayBase::kBasePointerOffset, | |
| 4510 obj_address + FixedTypedArrayBase::kHeaderSize, record_slots, | |
| 4511 callback); | |
| 4512 } else if (target->IsBytecodeArray()) { | |
| 4513 IterateAndMarkPointersToFromSpace( | |
| 4514 target, obj_address + BytecodeArray::kConstantPoolOffset, | |
| 4515 obj_address + BytecodeArray::kHeaderSize, record_slots, callback); | |
| 4516 } else if (target->IsJSArrayBuffer()) { | |
| 4517 IterateAndMarkPointersToFromSpace( | |
| 4518 target, obj_address, | |
| 4519 obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize, | |
| 4520 record_slots, callback); | |
| 4521 IterateAndMarkPointersToFromSpace( | |
| 4522 target, obj_address + JSArrayBuffer::kSize, obj_address + size, | |
| 4523 record_slots, callback); | |
| 4524 #if V8_DOUBLE_FIELDS_UNBOXING | |
| 4525 } else if (FLAG_unbox_double_fields) { | |
| 4526 LayoutDescriptorHelper helper(target->map()); | |
| 4527 DCHECK(!helper.all_fields_tagged()); | |
| 4528 | |
| 4529 for (int offset = 0; offset < size;) { | |
| 4530 int end_of_region_offset; | |
| 4531 if (helper.IsTagged(offset, size, &end_of_region_offset)) { | |
| 4532 IterateAndMarkPointersToFromSpace( | |
| 4533 target, obj_address + offset, | |
| 4534 obj_address + end_of_region_offset, record_slots, callback); | |
| 4535 } | |
| 4536 offset = end_of_region_offset; | |
| 4537 } | |
| 4538 #endif | |
| 4539 } | |
| 4540 break; | |
| 4541 } | |
| 4542 case HeapObjectContents::kRawValues: { | |
| 4543 break; | |
| 4544 } | |
| 4545 } | |
| 4546 } | 4526 } |
| 4547 | 4527 |
| 4548 | 4528 |
| 4549 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { | 4529 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { |
| 4550 IterateStrongRoots(v, mode); | 4530 IterateStrongRoots(v, mode); |
| 4551 IterateWeakRoots(v, mode); | 4531 IterateWeakRoots(v, mode); |
| 4552 } | 4532 } |
| 4553 | 4533 |
| 4554 | 4534 |
| 4555 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { | 4535 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { |
| (...skipping 1637 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6193 } | 6173 } |
| 6194 | 6174 |
| 6195 | 6175 |
| 6196 // static | 6176 // static |
| 6197 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6177 int Heap::GetStaticVisitorIdForMap(Map* map) { |
| 6198 return StaticVisitorBase::GetVisitorId(map); | 6178 return StaticVisitorBase::GetVisitorId(map); |
| 6199 } | 6179 } |
| 6200 | 6180 |
| 6201 } // namespace internal | 6181 } // namespace internal |
| 6202 } // namespace v8 | 6182 } // namespace v8 |
| OLD | NEW |