OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef MarkingVisitorImpl_h | 5 #ifndef MarkingVisitorImpl_h |
6 #define MarkingVisitorImpl_h | 6 #define MarkingVisitorImpl_h |
7 | 7 |
8 #include "platform/heap/Heap.h" | 8 #include "platform/heap/Heap.h" |
9 #include "platform/heap/ThreadState.h" | 9 #include "platform/heap/ThreadState.h" |
10 #include "platform/heap/Visitor.h" | 10 #include "platform/heap/Visitor.h" |
(...skipping 26 matching lines...) Expand all Loading... |
37 | 37 |
38 if (header->isMarked()) | 38 if (header->isMarked()) |
39 return; | 39 return; |
40 | 40 |
41 ASSERT(ThreadState::current()->isInGC()); | 41 ASSERT(ThreadState::current()->isInGC()); |
42 ASSERT(toDerived()->getMarkingMode() != Visitor::WeakProcessing); | 42 ASSERT(toDerived()->getMarkingMode() != Visitor::WeakProcessing); |
43 | 43 |
44 header->mark(); | 44 header->mark(); |
45 | 45 |
46 if (callback) | 46 if (callback) |
47 Heap::pushTraceCallback(const_cast<void*>(objectPointer), callback); | 47 ThreadHeap::pushTraceCallback(const_cast<void*>(objectPointer), call
back); |
48 } | 48 } |
49 | 49 |
50 inline void mark(const void* objectPointer, TraceCallback callback) | 50 inline void mark(const void* objectPointer, TraceCallback callback) |
51 { | 51 { |
52 if (!objectPointer) | 52 if (!objectPointer) |
53 return; | 53 return; |
54 HeapObjectHeader* header = HeapObjectHeader::fromPayload(objectPointer); | 54 HeapObjectHeader* header = HeapObjectHeader::fromPayload(objectPointer); |
55 markHeader(header, header->payload(), callback); | 55 markHeader(header, header->payload(), callback); |
56 } | 56 } |
57 | 57 |
58 inline void registerDelayedMarkNoTracing(const void* objectPointer) | 58 inline void registerDelayedMarkNoTracing(const void* objectPointer) |
59 { | 59 { |
60 ASSERT(toDerived()->getMarkingMode() != Visitor::WeakProcessing); | 60 ASSERT(toDerived()->getMarkingMode() != Visitor::WeakProcessing); |
61 Heap::pushPostMarkingCallback(const_cast<void*>(objectPointer), &markNoT
racingCallback); | 61 ThreadHeap::pushPostMarkingCallback(const_cast<void*>(objectPointer), &m
arkNoTracingCallback); |
62 } | 62 } |
63 | 63 |
64 inline void registerWeakMembers(const void* closure, const void* objectPoint
er, WeakCallback callback) | 64 inline void registerWeakMembers(const void* closure, const void* objectPoint
er, WeakCallback callback) |
65 { | 65 { |
66 ASSERT(toDerived()->getMarkingMode() != Visitor::WeakProcessing); | 66 ASSERT(toDerived()->getMarkingMode() != Visitor::WeakProcessing); |
67 // We don't want to run weak processings when taking a snapshot. | 67 // We don't want to run weak processings when taking a snapshot. |
68 if (toDerived()->getMarkingMode() == Visitor::SnapshotMarking) | 68 if (toDerived()->getMarkingMode() == Visitor::SnapshotMarking) |
69 return; | 69 return; |
70 Heap::pushThreadLocalWeakCallback(const_cast<void*>(closure), const_cast
<void*>(objectPointer), callback); | 70 ThreadHeap::pushThreadLocalWeakCallback(const_cast<void*>(closure), cons
t_cast<void*>(objectPointer), callback); |
71 } | 71 } |
72 | 72 |
73 inline void registerWeakTable(const void* closure, EphemeronCallback iterati
onCallback, EphemeronCallback iterationDoneCallback) | 73 inline void registerWeakTable(const void* closure, EphemeronCallback iterati
onCallback, EphemeronCallback iterationDoneCallback) |
74 { | 74 { |
75 ASSERT(toDerived()->getMarkingMode() != Visitor::WeakProcessing); | 75 ASSERT(toDerived()->getMarkingMode() != Visitor::WeakProcessing); |
76 Heap::registerWeakTable(const_cast<void*>(closure), iterationCallback, i
terationDoneCallback); | 76 ThreadHeap::registerWeakTable(const_cast<void*>(closure), iterationCallb
ack, iterationDoneCallback); |
77 } | 77 } |
78 | 78 |
79 #if ENABLE(ASSERT) | 79 #if ENABLE(ASSERT) |
80 inline bool weakTableRegistered(const void* closure) | 80 inline bool weakTableRegistered(const void* closure) |
81 { | 81 { |
82 return Heap::weakTableRegistered(closure); | 82 return ThreadHeap::weakTableRegistered(closure); |
83 } | 83 } |
84 #endif | 84 #endif |
85 | 85 |
86 inline bool ensureMarked(const void* objectPointer) | 86 inline bool ensureMarked(const void* objectPointer) |
87 { | 87 { |
88 if (!objectPointer) | 88 if (!objectPointer) |
89 return false; | 89 return false; |
90 if (!toDerived()->shouldMarkObject(objectPointer)) | 90 if (!toDerived()->shouldMarkObject(objectPointer)) |
91 return false; | 91 return false; |
92 #if ENABLE(ASSERT) | 92 #if ENABLE(ASSERT) |
(...skipping 17 matching lines...) Expand all Loading... |
110 return static_cast<Derived*>(this); | 110 return static_cast<Derived*>(this); |
111 } | 111 } |
112 | 112 |
113 protected: | 113 protected: |
114 inline void registerWeakCellWithCallback(void** cell, WeakCallback callback) | 114 inline void registerWeakCellWithCallback(void** cell, WeakCallback callback) |
115 { | 115 { |
116 ASSERT(toDerived()->getMarkingMode() != Visitor::WeakProcessing); | 116 ASSERT(toDerived()->getMarkingMode() != Visitor::WeakProcessing); |
117 // We don't want to run weak processings when taking a snapshot. | 117 // We don't want to run weak processings when taking a snapshot. |
118 if (toDerived()->getMarkingMode() == Visitor::SnapshotMarking) | 118 if (toDerived()->getMarkingMode() == Visitor::SnapshotMarking) |
119 return; | 119 return; |
120 Heap::pushGlobalWeakCallback(cell, callback); | 120 ThreadHeap::pushGlobalWeakCallback(cell, callback); |
121 } | 121 } |
122 | 122 |
123 private: | 123 private: |
124 static void markNoTracingCallback(Visitor* visitor, void* object) | 124 static void markNoTracingCallback(Visitor* visitor, void* object) |
125 { | 125 { |
126 visitor->markNoTracing(object); | 126 visitor->markNoTracing(object); |
127 } | 127 } |
128 }; | 128 }; |
129 | 129 |
130 } // namespace blink | 130 } // namespace blink |
131 | 131 |
132 #endif | 132 #endif |
OLD | NEW |