OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ | 5 #ifndef BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ |
6 #define BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ | 6 #define BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ |
7 | 7 |
| 8 #include <map> |
8 #include <string> | 9 #include <string> |
9 #include <vector> | 10 #include <vector> |
10 | 11 |
11 #include "base/atomicops.h" | 12 #include "base/atomicops.h" |
12 #include "base/base_export.h" | 13 #include "base/base_export.h" |
13 #include "base/containers/small_map.h" | |
14 #include "base/trace_event/trace_event_impl.h" | 14 #include "base/trace_event/trace_event_impl.h" |
15 | 15 |
16 namespace base { | 16 namespace base { |
17 namespace trace_event { | 17 namespace trace_event { |
18 | 18 |
19 // When heap profiling is enabled, tracing keeps track of the allocation | 19 // When heap profiling is enabled, tracing keeps track of the allocation |
20 // context for each allocation intercepted. It is generated by the | 20 // context for each allocation intercepted. It is generated by the |
21 // |AllocationContextTracker| which keeps stacks of context in TLS. | 21 // |AllocationContextTracker| which keeps stacks of context in TLS. |
22 // The tracker is initialized lazily. | 22 // The tracker is initialized lazily. |
23 | 23 |
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
134 | 134 |
135 DISALLOW_COPY_AND_ASSIGN(StackFrameDeduplicator); | 135 DISALLOW_COPY_AND_ASSIGN(StackFrameDeduplicator); |
136 }; | 136 }; |
137 | 137 |
138 // The |AllocationContext| is context metadata that is kept for every allocation | 138 // The |AllocationContext| is context metadata that is kept for every allocation |
139 // when heap profiling is enabled. To simplify memory management for | 139 // when heap profiling is enabled. To simplify memory management for |
140 // bookkeeping, this struct has a fixed size. All |const char*|s here | 140 // bookkeeping, this struct has a fixed size. All |const char*|s here |
141 // must have static lifetime. | 141 // must have static lifetime. |
142 struct BASE_EXPORT AllocationContext { | 142 struct BASE_EXPORT AllocationContext { |
143 Backtrace backtrace; | 143 Backtrace backtrace; |
144 | |
145 // There is room for two arbitrary context fields, which can be set by the | |
146 // |TRACE_ALLOCATION_CONTEXT| macro. A nullptr key indicates that the field is | |
147 // unused. | |
148 std::pair<const char*, const char*> fields[2]; | |
149 }; | 144 }; |
150 | 145 |
151 // The allocation context tracker keeps track of thread-local context for heap | 146 // The allocation context tracker keeps track of thread-local context for heap |
152 // profiling. It includes a pseudo stack of trace events, and it might contain | 147 // profiling. It includes a pseudo stack of trace events. On every allocation |
153 // arbitrary (key, value) context. On every allocation the tracker provides a | 148 // the tracker provides a snapshot of its context in the form of an |
154 // snapshot of its context in the form of an |AllocationContext| that is to be | 149 // |AllocationContext| that is to be stored together with the allocation |
155 // stored together with the allocation details. | 150 // details. |
156 class BASE_EXPORT AllocationContextTracker { | 151 class BASE_EXPORT AllocationContextTracker { |
157 public: | 152 public: |
158 // Globally enables capturing allocation context. | 153 // Globally enables capturing allocation context. |
159 // TODO(ruuda): Should this be replaced by |EnableCapturing| in the future? | 154 // TODO(ruuda): Should this be replaced by |EnableCapturing| in the future? |
160 // Or at least have something that guards agains enable -> disable -> enable? | 155 // Or at least have something that guards agains enable -> disable -> enable? |
161 static void SetCaptureEnabled(bool enabled); | 156 static void SetCaptureEnabled(bool enabled); |
162 | 157 |
163 // Returns whether capturing allocation context is enabled globally. | 158 // Returns whether capturing allocation context is enabled globally. |
164 inline static bool capture_enabled() { | 159 inline static bool capture_enabled() { |
165 // A little lag after heap profiling is enabled or disabled is fine, it is | 160 // A little lag after heap profiling is enabled or disabled is fine, it is |
166 // more important that the check is as cheap as possible when capturing is | 161 // more important that the check is as cheap as possible when capturing is |
167 // not enabled, so do not issue a memory barrier in the fast path. | 162 // not enabled, so do not issue a memory barrier in the fast path. |
168 if (subtle::NoBarrier_Load(&capture_enabled_) == 0) | 163 if (subtle::NoBarrier_Load(&capture_enabled_) == 0) |
169 return false; | 164 return false; |
170 | 165 |
171 // In the slow path, an acquire load is required to pair with the release | 166 // In the slow path, an acquire load is required to pair with the release |
172 // store in |SetCaptureEnabled|. This is to ensure that the TLS slot for | 167 // store in |SetCaptureEnabled|. This is to ensure that the TLS slot for |
173 // the thread-local allocation context tracker has been initialized if | 168 // the thread-local allocation context tracker has been initialized if |
174 // |capture_enabled| returns true. | 169 // |capture_enabled| returns true. |
175 return subtle::Acquire_Load(&capture_enabled_) != 0; | 170 return subtle::Acquire_Load(&capture_enabled_) != 0; |
176 } | 171 } |
177 | 172 |
178 // Pushes a frame onto the thread-local pseudo stack. | 173 // Pushes a frame onto the thread-local pseudo stack. |
179 static void PushPseudoStackFrame(StackFrame frame); | 174 static void PushPseudoStackFrame(StackFrame frame); |
180 | 175 |
181 // Pops a frame from the thread-local pseudo stack. | 176 // Pops a frame from the thread-local pseudo stack. |
182 static void PopPseudoStackFrame(StackFrame frame); | 177 static void PopPseudoStackFrame(StackFrame frame); |
183 | 178 |
184 // Sets a thread-local (key, value) pair. | |
185 static void SetContextField(const char* key, const char* value); | |
186 | |
187 // Removes the (key, value) pair with the specified key from the thread-local | |
188 // context. | |
189 static void UnsetContextField(const char* key); | |
190 | |
191 // Returns a snapshot of the current thread-local context. | 179 // Returns a snapshot of the current thread-local context. |
192 static AllocationContext GetContextSnapshot(); | 180 static AllocationContext GetContextSnapshot(); |
193 | 181 |
194 ~AllocationContextTracker(); | 182 ~AllocationContextTracker(); |
195 | 183 |
196 private: | 184 private: |
197 AllocationContextTracker(); | 185 AllocationContextTracker(); |
198 | 186 |
199 static AllocationContextTracker* GetThreadLocalTracker(); | 187 static AllocationContextTracker* GetThreadLocalTracker(); |
200 | 188 |
201 static subtle::Atomic32 capture_enabled_; | 189 static subtle::Atomic32 capture_enabled_; |
202 | 190 |
203 // The pseudo stack where frames are |TRACE_EVENT| names. | 191 // The pseudo stack where frames are |TRACE_EVENT| names. |
204 AllocationStack pseudo_stack_; | 192 AllocationStack pseudo_stack_; |
205 | 193 |
206 // A dictionary of arbitrary context. | |
207 SmallMap<std::map<const char*, const char*>> context_; | |
208 | |
209 DISALLOW_COPY_AND_ASSIGN(AllocationContextTracker); | 194 DISALLOW_COPY_AND_ASSIGN(AllocationContextTracker); |
210 }; | 195 }; |
211 | 196 |
212 } // namespace trace_event | 197 } // namespace trace_event |
213 } // namespace base | 198 } // namespace base |
214 | 199 |
215 namespace BASE_HASH_NAMESPACE { | 200 namespace BASE_HASH_NAMESPACE { |
216 | 201 |
217 template <> | 202 template <> |
218 struct hash<base::trace_event::Backtrace> { | 203 struct hash<base::trace_event::Backtrace> { |
219 size_t operator()(const base::trace_event::Backtrace& backtrace) const; | 204 size_t operator()(const base::trace_event::Backtrace& backtrace) const; |
220 }; | 205 }; |
221 | 206 |
222 } // BASE_HASH_NAMESPACE | 207 } // BASE_HASH_NAMESPACE |
223 | 208 |
224 #endif // BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ | 209 #endif // BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ |
OLD | NEW |