| OLD | NEW |
| 1 // Copyright 2017 the V8 project authors. All rights reserved. | 1 // Copyright 2017 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/concurrent-marking.h" | 5 #include "src/heap/concurrent-marking.h" |
| 6 | 6 |
| 7 #include <stack> | 7 #include <stack> |
| 8 #include <unordered_map> | 8 #include <unordered_map> |
| 9 | 9 |
| 10 #include "src/heap/concurrent-marking-deque.h" | 10 #include "src/heap/concurrent-marking-deque.h" |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 52 : deque_(deque) {} | 52 : deque_(deque) {} |
| 53 | 53 |
| 54 bool ShouldVisit(HeapObject* object) override { | 54 bool ShouldVisit(HeapObject* object) override { |
| 55 return ObjectMarking::GreyToBlack<MarkBit::AccessMode::ATOMIC>( | 55 return ObjectMarking::GreyToBlack<MarkBit::AccessMode::ATOMIC>( |
| 56 object, marking_state(object)); | 56 object, marking_state(object)); |
| 57 } | 57 } |
| 58 | 58 |
| 59 void VisitPointers(HeapObject* host, Object** start, Object** end) override { | 59 void VisitPointers(HeapObject* host, Object** start, Object** end) override { |
| 60 for (Object** p = start; p < end; p++) { | 60 for (Object** p = start; p < end; p++) { |
| 61 Object* object = reinterpret_cast<Object*>( | 61 Object* object = reinterpret_cast<Object*>( |
| 62 base::NoBarrier_Load(reinterpret_cast<const base::AtomicWord*>(p))); | 62 base::Relaxed_Load(reinterpret_cast<const base::AtomicWord*>(p))); |
| 63 if (!object->IsHeapObject()) continue; | 63 if (!object->IsHeapObject()) continue; |
| 64 MarkObject(HeapObject::cast(object)); | 64 MarkObject(HeapObject::cast(object)); |
| 65 } | 65 } |
| 66 } | 66 } |
| 67 | 67 |
| 68 void VisitPointersInSnapshot(const SlotSnapshot& snapshot) { | 68 void VisitPointersInSnapshot(const SlotSnapshot& snapshot) { |
| 69 for (int i = 0; i < snapshot.number_of_slots(); i++) { | 69 for (int i = 0; i < snapshot.number_of_slots(); i++) { |
| 70 Object* object = snapshot.value(i); | 70 Object* object = snapshot.value(i); |
| 71 if (!object->IsHeapObject()) continue; | 71 if (!object->IsHeapObject()) continue; |
| 72 MarkObject(HeapObject::cast(object)); | 72 MarkObject(HeapObject::cast(object)); |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 176 public: | 176 public: |
| 177 explicit SlotSnapshottingVisitor(SlotSnapshot* slot_snapshot) | 177 explicit SlotSnapshottingVisitor(SlotSnapshot* slot_snapshot) |
| 178 : slot_snapshot_(slot_snapshot) { | 178 : slot_snapshot_(slot_snapshot) { |
| 179 slot_snapshot_->clear(); | 179 slot_snapshot_->clear(); |
| 180 } | 180 } |
| 181 | 181 |
| 182 void VisitPointers(HeapObject* host, Object** start, | 182 void VisitPointers(HeapObject* host, Object** start, |
| 183 Object** end) override { | 183 Object** end) override { |
| 184 for (Object** p = start; p < end; p++) { | 184 for (Object** p = start; p < end; p++) { |
| 185 Object* object = reinterpret_cast<Object*>( | 185 Object* object = reinterpret_cast<Object*>( |
| 186 base::NoBarrier_Load(reinterpret_cast<const base::AtomicWord*>(p))); | 186 base::Relaxed_Load(reinterpret_cast<const base::AtomicWord*>(p))); |
| 187 slot_snapshot_->add(p, object); | 187 slot_snapshot_->add(p, object); |
| 188 } | 188 } |
| 189 } | 189 } |
| 190 | 190 |
| 191 private: | 191 private: |
| 192 SlotSnapshot* slot_snapshot_; | 192 SlotSnapshot* slot_snapshot_; |
| 193 }; | 193 }; |
| 194 | 194 |
| 195 const SlotSnapshot& MakeSlotSnapshot(Map* map, HeapObject* object, int size) { | 195 const SlotSnapshot& MakeSlotSnapshot(Map* map, HeapObject* object, int size) { |
| 196 // TODO(ulan): Iterate only the existing fields and skip slack at the end | 196 // TODO(ulan): Iterate only the existing fields and skip slack at the end |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 280 } | 280 } |
| 281 | 281 |
| 282 void ConcurrentMarking::EnsureTaskCompleted() { | 282 void ConcurrentMarking::EnsureTaskCompleted() { |
| 283 if (IsTaskPending()) { | 283 if (IsTaskPending()) { |
| 284 WaitForTaskToComplete(); | 284 WaitForTaskToComplete(); |
| 285 } | 285 } |
| 286 } | 286 } |
| 287 | 287 |
| 288 } // namespace internal | 288 } // namespace internal |
| 289 } // namespace v8 | 289 } // namespace v8 |
| OLD | NEW |