Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_MARK_COMPACT_H_ | 5 #ifndef V8_HEAP_MARK_COMPACT_H_ |
| 6 #define V8_HEAP_MARK_COMPACT_H_ | 6 #define V8_HEAP_MARK_COMPACT_H_ |
| 7 | 7 |
| 8 #include <deque> | 8 #include <deque> |
| 9 | 9 |
| 10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
| 11 #include "src/base/platform/condition-variable.h" | 11 #include "src/base/platform/condition-variable.h" |
| 12 #include "src/cancelable-task.h" | 12 #include "src/cancelable-task.h" |
| 13 #include "src/heap/concurrent-marking-deque.h" | |
| 13 #include "src/heap/marking.h" | 14 #include "src/heap/marking.h" |
| 15 #include "src/heap/sequential-marking-deque.h" | |
| 14 #include "src/heap/spaces.h" | 16 #include "src/heap/spaces.h" |
| 15 #include "src/heap/store-buffer.h" | 17 #include "src/heap/store-buffer.h" |
| 16 | 18 |
| 17 namespace v8 { | 19 namespace v8 { |
| 18 namespace internal { | 20 namespace internal { |
| 19 | 21 |
| 20 // Forward declarations. | 22 // Forward declarations. |
| 21 class CodeFlusher; | 23 class CodeFlusher; |
| 22 class HeapObjectVisitor; | 24 class HeapObjectVisitor; |
| 23 class MarkCompactCollector; | 25 class MarkCompactCollector; |
| 24 class MinorMarkCompactCollector; | 26 class MinorMarkCompactCollector; |
| 25 class MarkingVisitor; | 27 class MarkingVisitor; |
| 26 class ThreadLocalTop; | 28 class ThreadLocalTop; |
| 27 | 29 |
| 30 #ifdef V8_CONCURRENT_MARKING | |
| 31 typedef ConcurrentMarkingDeque MarkingDeque; | |
| 32 #else | |
| 33 typedef SequentialMarkingDeque MarkingDeque; | |
| 34 #endif | |
| 35 | |
| 28 class ObjectMarking : public AllStatic { | 36 class ObjectMarking : public AllStatic { |
| 29 public: | 37 public: |
| 30 V8_INLINE static MarkBit MarkBitFrom(HeapObject* obj, | 38 V8_INLINE static MarkBit MarkBitFrom(HeapObject* obj, |
| 31 const MarkingState& state) { | 39 const MarkingState& state) { |
| 32 const Address address = obj->address(); | 40 const Address address = obj->address(); |
| 33 const MemoryChunk* p = MemoryChunk::FromAddress(address); | 41 const MemoryChunk* p = MemoryChunk::FromAddress(address); |
| 34 return state.bitmap()->MarkBitFromIndex(p->AddressToMarkbitIndex(address)); | 42 return state.bitmap()->MarkBitFromIndex(p->AddressToMarkbitIndex(address)); |
| 35 } | 43 } |
| 36 | 44 |
| 37 static Marking::ObjectColor Color(HeapObject* obj, | 45 static Marking::ObjectColor Color(HeapObject* obj, |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 101 MarkBit markbit = MarkBitFrom(obj, state); | 109 MarkBit markbit = MarkBitFrom(obj, state); |
| 102 if (!Marking::GreyToBlack<access_mode>(markbit)) return false; | 110 if (!Marking::GreyToBlack<access_mode>(markbit)) return false; |
| 103 state.IncrementLiveBytes<access_mode>(obj->Size()); | 111 state.IncrementLiveBytes<access_mode>(obj->Size()); |
| 104 return true; | 112 return true; |
| 105 } | 113 } |
| 106 | 114 |
| 107 private: | 115 private: |
| 108 DISALLOW_IMPLICIT_CONSTRUCTORS(ObjectMarking); | 116 DISALLOW_IMPLICIT_CONSTRUCTORS(ObjectMarking); |
| 109 }; | 117 }; |
| 110 | 118 |
| 111 // ---------------------------------------------------------------------------- | |
|
ulan
2017/04/27 17:49:39
This is moved to sequential-marking-deque.h
| |
| 112 // Marking deque for tracing live objects. | |
| 113 class MarkingDeque { | |
| 114 public: | |
| 115 explicit MarkingDeque(Heap* heap) | |
| 116 : backing_store_(nullptr), | |
| 117 backing_store_committed_size_(0), | |
| 118 array_(nullptr), | |
| 119 top_(0), | |
| 120 bottom_(0), | |
| 121 mask_(0), | |
| 122 overflowed_(false), | |
| 123 in_use_(false), | |
| 124 uncommit_task_pending_(false), | |
| 125 heap_(heap) {} | |
| 126 | |
| 127 void SetUp(); | |
| 128 void TearDown(); | |
| 129 | |
| 130 // Ensures that the marking deque is committed and will stay committed until | |
| 131 // StopUsing() is called. | |
| 132 void StartUsing(); | |
| 133 void StopUsing(); | |
| 134 void Clear(); | |
| 135 | |
| 136 inline bool IsFull() { return ((top_ + 1) & mask_) == bottom_; } | |
| 137 | |
| 138 inline bool IsEmpty() { return top_ == bottom_; } | |
| 139 | |
| 140 bool overflowed() const { return overflowed_; } | |
| 141 | |
| 142 void ClearOverflowed() { overflowed_ = false; } | |
| 143 | |
| 144 void SetOverflowed() { overflowed_ = true; } | |
| 145 | |
| 146 // Push the object on the marking stack if there is room, otherwise mark the | |
| 147 // deque as overflowed and wait for a rescan of the heap. | |
| 148 INLINE(bool Push(HeapObject* object)) { | |
| 149 DCHECK(object->IsHeapObject()); | |
| 150 if (IsFull()) { | |
| 151 SetOverflowed(); | |
| 152 return false; | |
| 153 } else { | |
| 154 array_[top_] = object; | |
| 155 top_ = ((top_ + 1) & mask_); | |
| 156 return true; | |
| 157 } | |
| 158 } | |
| 159 | |
| 160 INLINE(HeapObject* Pop()) { | |
| 161 DCHECK(!IsEmpty()); | |
| 162 top_ = ((top_ - 1) & mask_); | |
| 163 HeapObject* object = array_[top_]; | |
| 164 DCHECK(object->IsHeapObject()); | |
| 165 return object; | |
| 166 } | |
| 167 | |
| 168 // Unshift the object into the marking stack if there is room, otherwise mark | |
| 169 // the deque as overflowed and wait for a rescan of the heap. | |
| 170 INLINE(bool Unshift(HeapObject* object)) { | |
| 171 DCHECK(object->IsHeapObject()); | |
| 172 if (IsFull()) { | |
| 173 SetOverflowed(); | |
| 174 return false; | |
| 175 } else { | |
| 176 bottom_ = ((bottom_ - 1) & mask_); | |
| 177 array_[bottom_] = object; | |
| 178 return true; | |
| 179 } | |
| 180 } | |
| 181 | |
| 182 template <typename Callback> | |
| 183 void Iterate(Callback callback) { | |
| 184 int i = bottom_; | |
| 185 while (i != top_) { | |
| 186 callback(array_[i]); | |
| 187 i = (i + 1) & mask_; | |
| 188 } | |
| 189 } | |
| 190 | |
| 191 HeapObject** array() { return array_; } | |
| 192 int bottom() { return bottom_; } | |
| 193 int top() { return top_; } | |
| 194 int mask() { return mask_; } | |
| 195 void set_top(int top) { top_ = top; } | |
| 196 | |
| 197 private: | |
| 198 // This task uncommits the marking_deque backing store if | |
| 199 // markin_deque->in_use_ is false. | |
| 200 class UncommitTask : public CancelableTask { | |
| 201 public: | |
| 202 explicit UncommitTask(Isolate* isolate, MarkingDeque* marking_deque) | |
| 203 : CancelableTask(isolate), marking_deque_(marking_deque) {} | |
| 204 | |
| 205 private: | |
| 206 // CancelableTask override. | |
| 207 void RunInternal() override { | |
| 208 base::LockGuard<base::Mutex> guard(&marking_deque_->mutex_); | |
| 209 if (!marking_deque_->in_use_) { | |
| 210 marking_deque_->Uncommit(); | |
| 211 } | |
| 212 marking_deque_->uncommit_task_pending_ = false; | |
| 213 } | |
| 214 | |
| 215 MarkingDeque* marking_deque_; | |
| 216 DISALLOW_COPY_AND_ASSIGN(UncommitTask); | |
| 217 }; | |
| 218 | |
| 219 static const size_t kMaxSize = 4 * MB; | |
| 220 static const size_t kMinSize = 256 * KB; | |
| 221 | |
| 222 // Must be called with mutex lock. | |
| 223 void EnsureCommitted(); | |
| 224 | |
| 225 // Must be called with mutex lock. | |
| 226 void Uncommit(); | |
| 227 | |
| 228 // Must be called with mutex lock. | |
| 229 void StartUncommitTask(); | |
| 230 | |
| 231 base::Mutex mutex_; | |
| 232 | |
| 233 base::VirtualMemory* backing_store_; | |
| 234 size_t backing_store_committed_size_; | |
| 235 HeapObject** array_; | |
| 236 // array_[(top - 1) & mask_] is the top element in the deque. The Deque is | |
| 237 // empty when top_ == bottom_. It is full when top_ + 1 == bottom | |
| 238 // (mod mask + 1). | |
| 239 int top_; | |
| 240 int bottom_; | |
| 241 int mask_; | |
| 242 bool overflowed_; | |
| 243 // in_use_ == true after taking mutex lock implies that the marking deque is | |
| 244 // committed and will stay committed at least until in_use_ == false. | |
| 245 bool in_use_; | |
| 246 bool uncommit_task_pending_; | |
| 247 Heap* heap_; | |
| 248 | |
| 249 DISALLOW_COPY_AND_ASSIGN(MarkingDeque); | |
| 250 }; | |
| 251 | |
| 252 | |
| 253 // CodeFlusher collects candidates for code flushing during marking and | 119 // CodeFlusher collects candidates for code flushing during marking and |
| 254 // processes those candidates after marking has completed in order to | 120 // processes those candidates after marking has completed in order to |
| 255 // reset those functions referencing code objects that would otherwise | 121 // reset those functions referencing code objects that would otherwise |
| 256 // be unreachable. Code objects can be referenced in two ways: | 122 // be unreachable. Code objects can be referenced in two ways: |
| 257 // - SharedFunctionInfo references unoptimized code. | 123 // - SharedFunctionInfo references unoptimized code. |
| 258 // - JSFunction references either unoptimized or optimized code. | 124 // - JSFunction references either unoptimized or optimized code. |
| 259 // We are not allowed to flush unoptimized code for functions that got | 125 // We are not allowed to flush unoptimized code for functions that got |
| 260 // optimized or inlined into optimized code, because we might bailout | 126 // optimized or inlined into optimized code, because we might bailout |
| 261 // into the unoptimized code again during deoptimization. | 127 // into the unoptimized code again during deoptimization. |
| 262 class CodeFlusher { | 128 class CodeFlusher { |
| (...skipping 573 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 836 ~EvacuationScope() { collector_->set_evacuation(false); } | 702 ~EvacuationScope() { collector_->set_evacuation(false); } |
| 837 | 703 |
| 838 private: | 704 private: |
| 839 MarkCompactCollector* collector_; | 705 MarkCompactCollector* collector_; |
| 840 }; | 706 }; |
| 841 | 707 |
| 842 } // namespace internal | 708 } // namespace internal |
| 843 } // namespace v8 | 709 } // namespace v8 |
| 844 | 710 |
| 845 #endif // V8_HEAP_MARK_COMPACT_H_ | 711 #endif // V8_HEAP_MARK_COMPACT_H_ |
| OLD | NEW |