OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
6 * met: | 6 * met: |
7 * | 7 * |
8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
(...skipping 19 matching lines...) Expand all Loading... |
30 | 30 |
31 #ifndef ThreadState_h | 31 #ifndef ThreadState_h |
32 #define ThreadState_h | 32 #define ThreadState_h |
33 | 33 |
34 #include "platform/PlatformExport.h" | 34 #include "platform/PlatformExport.h" |
35 #include "platform/heap/BlinkGC.h" | 35 #include "platform/heap/BlinkGC.h" |
36 #include "platform/heap/BlinkGCInterruptor.h" | 36 #include "platform/heap/BlinkGCInterruptor.h" |
37 #include "platform/heap/ThreadingTraits.h" | 37 #include "platform/heap/ThreadingTraits.h" |
38 #include "public/platform/WebThread.h" | 38 #include "public/platform/WebThread.h" |
39 #include "wtf/AddressSanitizer.h" | 39 #include "wtf/AddressSanitizer.h" |
| 40 #include "wtf/Allocator.h" |
40 #include "wtf/Forward.h" | 41 #include "wtf/Forward.h" |
41 #include "wtf/HashMap.h" | 42 #include "wtf/HashMap.h" |
42 #include "wtf/HashSet.h" | 43 #include "wtf/HashSet.h" |
43 #include "wtf/ThreadSpecific.h" | 44 #include "wtf/ThreadSpecific.h" |
44 #include "wtf/Threading.h" | 45 #include "wtf/Threading.h" |
45 #include "wtf/ThreadingPrimitives.h" | 46 #include "wtf/ThreadingPrimitives.h" |
46 | 47 |
47 namespace v8 { | 48 namespace v8 { |
48 class Isolate; | 49 class Isolate; |
49 }; | 50 }; |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
112 } \ | 113 } \ |
113 using UsingPreFinalizerMacroNeedsTrailingSemiColon = char | 114 using UsingPreFinalizerMacroNeedsTrailingSemiColon = char |
114 | 115 |
115 #if ENABLE(OILPAN) | 116 #if ENABLE(OILPAN) |
116 #define WILL_BE_USING_PRE_FINALIZER(Class, method) USING_PRE_FINALIZER(Class, me
thod) | 117 #define WILL_BE_USING_PRE_FINALIZER(Class, method) USING_PRE_FINALIZER(Class, me
thod) |
117 #else | 118 #else |
118 #define WILL_BE_USING_PRE_FINALIZER(Class, method) | 119 #define WILL_BE_USING_PRE_FINALIZER(Class, method) |
119 #endif | 120 #endif |
120 | 121 |
121 class PLATFORM_EXPORT ThreadState { | 122 class PLATFORM_EXPORT ThreadState { |
| 123 USING_FAST_MALLOC(ThreadState); |
122 WTF_MAKE_NONCOPYABLE(ThreadState); | 124 WTF_MAKE_NONCOPYABLE(ThreadState); |
123 public: | 125 public: |
124 typedef std::pair<void*, PreFinalizerCallback> PreFinalizer; | 126 typedef std::pair<void*, PreFinalizerCallback> PreFinalizer; |
125 | 127 |
126 // See setGCState() for possible state transitions. | 128 // See setGCState() for possible state transitions. |
127 enum GCState { | 129 enum GCState { |
128 NoGCScheduled, | 130 NoGCScheduled, |
129 IdleGCScheduled, | 131 IdleGCScheduled, |
130 PreciseGCScheduled, | 132 PreciseGCScheduled, |
131 FullGCScheduled, | 133 FullGCScheduled, |
132 PageNavigationGCScheduled, | 134 PageNavigationGCScheduled, |
133 GCRunning, | 135 GCRunning, |
134 EagerSweepScheduled, | 136 EagerSweepScheduled, |
135 LazySweepScheduled, | 137 LazySweepScheduled, |
136 Sweeping, | 138 Sweeping, |
137 SweepingAndIdleGCScheduled, | 139 SweepingAndIdleGCScheduled, |
138 SweepingAndPreciseGCScheduled, | 140 SweepingAndPreciseGCScheduled, |
139 }; | 141 }; |
140 | 142 |
141 // The NoAllocationScope class is used in debug mode to catch unwanted | 143 // The NoAllocationScope class is used in debug mode to catch unwanted |
142 // allocations. E.g. allocations during GC. | 144 // allocations. E.g. allocations during GC. |
143 class NoAllocationScope final { | 145 class NoAllocationScope final { |
| 146 STACK_ALLOCATED(); |
144 public: | 147 public: |
145 explicit NoAllocationScope(ThreadState* state) : m_state(state) | 148 explicit NoAllocationScope(ThreadState* state) : m_state(state) |
146 { | 149 { |
147 m_state->enterNoAllocationScope(); | 150 m_state->enterNoAllocationScope(); |
148 } | 151 } |
149 ~NoAllocationScope() | 152 ~NoAllocationScope() |
150 { | 153 { |
151 m_state->leaveNoAllocationScope(); | 154 m_state->leaveNoAllocationScope(); |
152 } | 155 } |
153 private: | 156 private: |
154 ThreadState* m_state; | 157 ThreadState* m_state; |
155 }; | 158 }; |
156 | 159 |
157 class SweepForbiddenScope final { | 160 class SweepForbiddenScope final { |
| 161 STACK_ALLOCATED(); |
158 public: | 162 public: |
159 explicit SweepForbiddenScope(ThreadState* state) : m_state(state) | 163 explicit SweepForbiddenScope(ThreadState* state) : m_state(state) |
160 { | 164 { |
161 ASSERT(!m_state->m_sweepForbidden); | 165 ASSERT(!m_state->m_sweepForbidden); |
162 m_state->m_sweepForbidden = true; | 166 m_state->m_sweepForbidden = true; |
163 } | 167 } |
164 ~SweepForbiddenScope() | 168 ~SweepForbiddenScope() |
165 { | 169 { |
166 ASSERT(m_state->m_sweepForbidden); | 170 ASSERT(m_state->m_sweepForbidden); |
167 m_state->m_sweepForbidden = false; | 171 m_state->m_sweepForbidden = false; |
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
372 void visitStack(Visitor*); | 376 void visitStack(Visitor*); |
373 | 377 |
374 // Visit the asan fake stack frame corresponding to a slot on the | 378 // Visit the asan fake stack frame corresponding to a slot on the |
375 // real machine stack if there is one. | 379 // real machine stack if there is one. |
376 void visitAsanFakeStackForPointer(Visitor*, Address); | 380 void visitAsanFakeStackForPointer(Visitor*, Address); |
377 | 381 |
378 // Visit all persistents allocated on this thread. | 382 // Visit all persistents allocated on this thread. |
379 void visitPersistents(Visitor*); | 383 void visitPersistents(Visitor*); |
380 | 384 |
381 struct GCSnapshotInfo { | 385 struct GCSnapshotInfo { |
| 386 STACK_ALLOCATED(); |
382 GCSnapshotInfo(size_t numObjectTypes); | 387 GCSnapshotInfo(size_t numObjectTypes); |
383 | 388 |
384 // Map from gcInfoIndex (vector-index) to count/size. | 389 // Map from gcInfoIndex (vector-index) to count/size. |
385 Vector<int> liveCount; | 390 Vector<int> liveCount; |
386 Vector<int> deadCount; | 391 Vector<int> deadCount; |
387 Vector<size_t> liveSize; | 392 Vector<size_t> liveSize; |
388 Vector<size_t> deadSize; | 393 Vector<size_t> deadSize; |
389 }; | 394 }; |
390 | 395 |
391 void pushThreadLocalWeakCallback(void*, WeakCallback); | 396 void pushThreadLocalWeakCallback(void*, WeakCallback); |
(...skipping 279 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
671 // with multiple types of vectors. This won't be an issue in practice, | 676 // with multiple types of vectors. This won't be an issue in practice, |
672 // since there will be less than 2^8 types of objects in common cases. | 677 // since there will be less than 2^8 types of objects in common cases. |
673 static const int likelyToBePromptlyFreedArraySize = (1 << 8); | 678 static const int likelyToBePromptlyFreedArraySize = (1 << 8); |
674 static const int likelyToBePromptlyFreedArrayMask = likelyToBePromptlyFreedA
rraySize - 1; | 679 static const int likelyToBePromptlyFreedArrayMask = likelyToBePromptlyFreedA
rraySize - 1; |
675 OwnPtr<int[]> m_likelyToBePromptlyFreed; | 680 OwnPtr<int[]> m_likelyToBePromptlyFreed; |
676 }; | 681 }; |
677 | 682 |
678 template<ThreadAffinity affinity> class ThreadStateFor; | 683 template<ThreadAffinity affinity> class ThreadStateFor; |
679 | 684 |
680 template<> class ThreadStateFor<MainThreadOnly> { | 685 template<> class ThreadStateFor<MainThreadOnly> { |
| 686 STATIC_ONLY(ThreadStateFor); |
681 public: | 687 public: |
682 static ThreadState* state() | 688 static ThreadState* state() |
683 { | 689 { |
684 // This specialization must only be used from the main thread. | 690 // This specialization must only be used from the main thread. |
685 ASSERT(ThreadState::current()->isMainThread()); | 691 ASSERT(ThreadState::current()->isMainThread()); |
686 return ThreadState::mainThreadState(); | 692 return ThreadState::mainThreadState(); |
687 } | 693 } |
688 }; | 694 }; |
689 | 695 |
690 template<> class ThreadStateFor<AnyThread> { | 696 template<> class ThreadStateFor<AnyThread> { |
| 697 STATIC_ONLY(ThreadStateFor); |
691 public: | 698 public: |
692 static ThreadState* state() { return ThreadState::current(); } | 699 static ThreadState* state() { return ThreadState::current(); } |
693 }; | 700 }; |
694 | 701 |
695 } // namespace blink | 702 } // namespace blink |
696 | 703 |
697 #endif // ThreadState_h | 704 #endif // ThreadState_h |
OLD | NEW |