| OLD | NEW |
| 1 // Copyright 2017 the V8 project authors. All rights reserved. | 1 // Copyright 2017 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/concurrent-marking.h" | 5 #include "src/heap/concurrent-marking.h" |
| 6 | 6 |
| 7 #include <stack> | 7 #include <stack> |
| 8 #include <unordered_map> | 8 #include <unordered_map> |
| 9 | 9 |
| 10 #include "src/heap/concurrent-marking-deque.h" | 10 #include "src/heap/concurrent-marking-deque.h" |
| 11 #include "src/heap/heap-inl.h" | 11 #include "src/heap/heap-inl.h" |
| 12 #include "src/heap/heap.h" | 12 #include "src/heap/heap.h" |
| 13 #include "src/heap/marking.h" | 13 #include "src/heap/marking.h" |
| 14 #include "src/heap/objects-visiting-inl.h" | 14 #include "src/heap/objects-visiting-inl.h" |
| 15 #include "src/heap/objects-visiting.h" | 15 #include "src/heap/objects-visiting.h" |
| 16 #include "src/isolate.h" | 16 #include "src/isolate.h" |
| 17 #include "src/locked-queue-inl.h" | 17 #include "src/locked-queue-inl.h" |
| 18 #include "src/utils-inl.h" | 18 #include "src/utils-inl.h" |
| 19 #include "src/utils.h" | 19 #include "src/utils.h" |
| 20 #include "src/v8.h" | 20 #include "src/v8.h" |
| 21 | 21 |
| 22 namespace v8 { | 22 namespace v8 { |
| 23 namespace internal { | 23 namespace internal { |
| 24 | 24 |
| 25 // Helper class for storing in-object slot addresses and values. |
| 26 class SlotSnapshot { |
| 27 public: |
| 28 SlotSnapshot() : number_of_slots_(0) {} |
| 29 int number_of_slots() const { return number_of_slots_; } |
| 30 Object** slot(int i) const { return snapshot_[i].first; } |
| 31 Object* value(int i) const { return snapshot_[i].second; } |
| 32 void clear() { number_of_slots_ = 0; } |
| 33 void add(Object** slot, Object* value) { |
| 34 snapshot_[number_of_slots_].first = slot; |
| 35 snapshot_[number_of_slots_].second = value; |
| 36 ++number_of_slots_; |
| 37 } |
| 38 |
| 39 private: |
| 40 static const int kMaxSnapshotSize = JSObject::kMaxInstanceSize / kPointerSize; |
| 41 int number_of_slots_; |
| 42 std::pair<Object**, Object*> snapshot_[kMaxSnapshotSize]; |
| 43 DISALLOW_COPY_AND_ASSIGN(SlotSnapshot); |
| 44 }; |
| 45 |
| 25 class ConcurrentMarkingVisitor final | 46 class ConcurrentMarkingVisitor final |
| 26 : public HeapVisitor<int, ConcurrentMarkingVisitor> { | 47 : public HeapVisitor<int, ConcurrentMarkingVisitor> { |
| 27 public: | 48 public: |
| 28 using BaseClass = HeapVisitor<int, ConcurrentMarkingVisitor>; | 49 using BaseClass = HeapVisitor<int, ConcurrentMarkingVisitor>; |
| 29 | 50 |
| 30 explicit ConcurrentMarkingVisitor(ConcurrentMarkingDeque* deque) | 51 explicit ConcurrentMarkingVisitor(ConcurrentMarkingDeque* deque) |
| 31 : deque_(deque) {} | 52 : deque_(deque) {} |
| 32 | 53 |
| 33 bool ShouldVisit(HeapObject* object) override { | 54 bool ShouldVisit(HeapObject* object) override { |
| 34 return ObjectMarking::GreyToBlack<MarkBit::AccessMode::ATOMIC>( | 55 return ObjectMarking::GreyToBlack<MarkBit::AccessMode::ATOMIC>( |
| 35 object, marking_state(object)); | 56 object, marking_state(object)); |
| 36 } | 57 } |
| 37 | 58 |
| 38 void VisitPointers(HeapObject* host, Object** start, Object** end) override { | 59 void VisitPointers(HeapObject* host, Object** start, Object** end) override { |
| 39 for (Object** p = start; p < end; p++) { | 60 for (Object** p = start; p < end; p++) { |
| 40 Object* object = reinterpret_cast<Object*>( | 61 Object* object = reinterpret_cast<Object*>( |
| 41 base::NoBarrier_Load(reinterpret_cast<const base::AtomicWord*>(p))); | 62 base::NoBarrier_Load(reinterpret_cast<const base::AtomicWord*>(p))); |
| 42 if (!object->IsHeapObject()) continue; | 63 if (!object->IsHeapObject()) continue; |
| 43 MarkObject(HeapObject::cast(object)); | 64 MarkObject(HeapObject::cast(object)); |
| 44 } | 65 } |
| 45 } | 66 } |
| 46 | 67 |
| 68 void VisitPointersInSnapshot(const SlotSnapshot& snapshot) { |
| 69 for (int i = 0; i < snapshot.number_of_slots(); i++) { |
| 70 Object* object = snapshot.value(i); |
| 71 if (!object->IsHeapObject()) continue; |
| 72 MarkObject(HeapObject::cast(object)); |
| 73 } |
| 74 } |
| 75 |
| 47 // =========================================================================== | 76 // =========================================================================== |
| 48 // JS object ================================================================= | 77 // JS object ================================================================= |
| 49 // =========================================================================== | 78 // =========================================================================== |
| 50 | 79 |
| 51 int VisitJSObject(Map* map, JSObject* object) override { | 80 int VisitJSObject(Map* map, JSObject* object) override { |
| 52 // TODO(ulan): impement snapshot iteration. | 81 int size = JSObject::BodyDescriptor::SizeOf(map, object); |
| 53 return BaseClass::VisitJSObject(map, object); | 82 const SlotSnapshot& snapshot = MakeSlotSnapshot(map, object, size); |
| 83 if (!ShouldVisit(object)) return 0; |
| 84 VisitPointersInSnapshot(snapshot); |
| 85 return size; |
| 54 } | 86 } |
| 55 | 87 |
| 56 int VisitJSObjectFast(Map* map, JSObject* object) override { | 88 int VisitJSObjectFast(Map* map, JSObject* object) override { |
| 57 return VisitJSObject(map, object); | 89 return VisitJSObject(map, object); |
| 58 } | 90 } |
| 59 | 91 |
| 60 int VisitJSApiObject(Map* map, JSObject* object) override { | 92 int VisitJSApiObject(Map* map, JSObject* object) override { |
| 61 return VisitJSObject(map, object); | 93 return VisitJSObject(map, object); |
| 62 } | 94 } |
| 63 | 95 |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 132 } | 164 } |
| 133 | 165 |
| 134 void MarkObject(HeapObject* object) { | 166 void MarkObject(HeapObject* object) { |
| 135 if (ObjectMarking::WhiteToGrey<MarkBit::AccessMode::ATOMIC>( | 167 if (ObjectMarking::WhiteToGrey<MarkBit::AccessMode::ATOMIC>( |
| 136 object, marking_state(object))) { | 168 object, marking_state(object))) { |
| 137 deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kShared); | 169 deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kShared); |
| 138 } | 170 } |
| 139 } | 171 } |
| 140 | 172 |
| 141 private: | 173 private: |
| 174 // Helper class for collecting in-object slot addresses and values. |
| 175 class SlotSnapshottingVisitor final : public ObjectVisitor { |
| 176 public: |
| 177 explicit SlotSnapshottingVisitor(SlotSnapshot* slot_snapshot) |
| 178 : slot_snapshot_(slot_snapshot) { |
| 179 slot_snapshot_->clear(); |
| 180 } |
| 181 |
| 182 void VisitPointers(HeapObject* host, Object** start, |
| 183 Object** end) override { |
| 184 for (Object** p = start; p < end; p++) { |
| 185 Object* object = reinterpret_cast<Object*>( |
| 186 base::NoBarrier_Load(reinterpret_cast<const base::AtomicWord*>(p))); |
| 187 slot_snapshot_->add(p, object); |
| 188 } |
| 189 } |
| 190 |
| 191 private: |
| 192 SlotSnapshot* slot_snapshot_; |
| 193 }; |
| 194 |
| 195 const SlotSnapshot& MakeSlotSnapshot(Map* map, HeapObject* object, int size) { |
| 196 SlotSnapshottingVisitor visitor(&slot_snapshot_); |
| 197 visitor.VisitPointer(object, |
| 198 reinterpret_cast<Object**>(object->map_slot())); |
| 199 JSObject::BodyDescriptor::IterateBody(object, size, &visitor); |
| 200 return slot_snapshot_; |
| 201 } |
| 202 |
| 142 MarkingState marking_state(HeapObject* object) const { | 203 MarkingState marking_state(HeapObject* object) const { |
| 143 return MarkingState::Internal(object); | 204 return MarkingState::Internal(object); |
| 144 } | 205 } |
| 145 | 206 |
| 146 ConcurrentMarkingDeque* deque_; | 207 ConcurrentMarkingDeque* deque_; |
| 208 SlotSnapshot slot_snapshot_; |
| 147 }; | 209 }; |
| 148 | 210 |
| 149 class ConcurrentMarking::Task : public CancelableTask { | 211 class ConcurrentMarking::Task : public CancelableTask { |
| 150 public: | 212 public: |
| 151 Task(Isolate* isolate, ConcurrentMarking* concurrent_marking, | 213 Task(Isolate* isolate, ConcurrentMarking* concurrent_marking, |
| 152 base::Semaphore* on_finish) | 214 base::Semaphore* on_finish) |
| 153 : CancelableTask(isolate), | 215 : CancelableTask(isolate), |
| 154 concurrent_marking_(concurrent_marking), | 216 concurrent_marking_(concurrent_marking), |
| 155 on_finish_(on_finish) {} | 217 on_finish_(on_finish) {} |
| 156 | 218 |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 216 } | 278 } |
| 217 | 279 |
| 218 void ConcurrentMarking::EnsureTaskCompleted() { | 280 void ConcurrentMarking::EnsureTaskCompleted() { |
| 219 if (IsTaskPending()) { | 281 if (IsTaskPending()) { |
| 220 WaitForTaskToComplete(); | 282 WaitForTaskToComplete(); |
| 221 } | 283 } |
| 222 } | 284 } |
| 223 | 285 |
| 224 } // namespace internal | 286 } // namespace internal |
| 225 } // namespace v8 | 287 } // namespace v8 |
| OLD | NEW |