OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef BASE_TRACE_EVENT_HEAP_PROFILER_ALLOCATION_CONTEXT_TRACKER_H_ | 5 #ifndef BASE_TRACE_EVENT_HEAP_PROFILER_ALLOCATION_CONTEXT_TRACKER_H_ |
6 #define BASE_TRACE_EVENT_HEAP_PROFILER_ALLOCATION_CONTEXT_TRACKER_H_ | 6 #define BASE_TRACE_EVENT_HEAP_PROFILER_ALLOCATION_CONTEXT_TRACKER_H_ |
7 | 7 |
8 #include <vector> | 8 #include <vector> |
9 | 9 |
10 #include "base/atomicops.h" | 10 #include "base/atomicops.h" |
11 #include "base/base_export.h" | 11 #include "base/base_export.h" |
12 #include "base/debug/debugging_flags.h" | |
13 #include "base/debug/stack_trace.h" | |
14 #include "base/logging.h" | 12 #include "base/logging.h" |
15 #include "base/macros.h" | 13 #include "base/macros.h" |
16 #include "base/trace_event/heap_profiler_allocation_context.h" | 14 #include "base/trace_event/heap_profiler_allocation_context.h" |
17 | 15 |
18 #if HAVE_TRACE_STACK_FRAME_POINTERS && !defined(OS_NACL) && \ | |
19 (BUILDFLAG(ENABLE_PROFILING) || !defined(NDEBUG)) | |
20 #define ENABLE_NATIVE_ALLOCATION_TRACES 1 | |
21 #else | |
22 #define ENABLE_NATIVE_ALLOCATION_TRACES 0 | |
23 #endif | |
24 | |
25 namespace base { | 16 namespace base { |
26 namespace trace_event { | 17 namespace trace_event { |
27 | 18 |
28 // The allocation context tracker keeps track of thread-local context for heap | 19 // The allocation context tracker keeps track of thread-local context for heap |
29 // profiling. It includes a pseudo stack of trace events. On every allocation | 20 // profiling. It includes a pseudo stack of trace events. On every allocation |
30 // the tracker provides a snapshot of its context in the form of an | 21 // the tracker provides a snapshot of its context in the form of an |
31 // |AllocationContext| that is to be stored together with the allocation | 22 // |AllocationContext| that is to be stored together with the allocation |
32 // details. | 23 // details. |
33 class BASE_EXPORT AllocationContextTracker { | 24 class BASE_EXPORT AllocationContextTracker { |
34 public: | 25 public: |
35 enum class CaptureMode: int32_t { | 26 // Globally enables capturing allocation context. |
36 DISABLED, // Don't capture anything | 27 // TODO(ruuda): Should this be replaced by |EnableCapturing| in the future? |
37 PSEUDO_STACK, // GetContextSnapshot() returns pseudo stack trace | 28 // Or at least have something that guards agains enable -> disable -> enable? |
38 #if ENABLE_NATIVE_ALLOCATION_TRACES | 29 static void SetCaptureEnabled(bool enabled); |
39 NATIVE_STACK // GetContextSnapshot() returns native (real) stack trace | |
40 #endif | |
41 }; | |
42 | 30 |
43 // Globally sets capturing mode. | 31 // Returns whether capturing allocation context is enabled globally. |
44 // TODO(primiano): How to guard against *_STACK -> DISABLED -> *_STACK? | 32 inline static bool capture_enabled() { |
45 static void SetCaptureMode(CaptureMode mode); | |
46 | |
47 // Returns global capturing mode. | |
48 inline static CaptureMode capture_mode() { | |
49 // A little lag after heap profiling is enabled or disabled is fine, it is | 33 // A little lag after heap profiling is enabled or disabled is fine, it is |
50 // more important that the check is as cheap as possible when capturing is | 34 // more important that the check is as cheap as possible when capturing is |
51 // not enabled, so do not issue a memory barrier in the fast path. | 35 // not enabled, so do not issue a memory barrier in the fast path. |
52 if (subtle::NoBarrier_Load(&capture_mode_) == | 36 if (subtle::NoBarrier_Load(&capture_enabled_) == 0) |
53 static_cast<int32_t>(CaptureMode::DISABLED)) | 37 return false; |
54 return CaptureMode::DISABLED; | |
55 | 38 |
56 // In the slow path, an acquire load is required to pair with the release | 39 // In the slow path, an acquire load is required to pair with the release |
57 // store in |SetCaptureMode|. This is to ensure that the TLS slot for | 40 // store in |SetCaptureEnabled|. This is to ensure that the TLS slot for |
58 // the thread-local allocation context tracker has been initialized if | 41 // the thread-local allocation context tracker has been initialized if |
59 // |capture_mode| returns something other than DISABLED. | 42 // |capture_enabled| returns true. |
60 return static_cast<CaptureMode>(subtle::Acquire_Load(&capture_mode_)); | 43 return subtle::Acquire_Load(&capture_enabled_) != 0; |
61 } | 44 } |
62 | 45 |
63 // Returns the thread-local instance, creating one if necessary. Returns | 46 // Returns the thread-local instance, creating one if necessary. Returns |
64 // always a valid instance, unless it is called re-entrantly, in which case | 47 // always a valid instance, unless it is called re-entrantly, in which case |
65 // returns nullptr in the nested calls. | 48 // returns nullptr in the nested calls. |
66 static AllocationContextTracker* GetInstanceForCurrentThread(); | 49 static AllocationContextTracker* GetInstanceForCurrentThread(); |
67 | 50 |
68 // Set the thread name in the AllocationContextTracker of the current thread | 51 // Set the thread name in the AllocationContextTracker of the current thread |
69 // if capture is enabled. | 52 // if capture is enabled. |
70 static void SetCurrentThreadName(const char* name); | 53 static void SetCurrentThreadName(const char* name); |
(...skipping 19 matching lines...) Expand all Loading... |
90 void PopCurrentTaskContext(const char* context); | 73 void PopCurrentTaskContext(const char* context); |
91 | 74 |
92 // Returns a snapshot of the current thread-local context. | 75 // Returns a snapshot of the current thread-local context. |
93 AllocationContext GetContextSnapshot(); | 76 AllocationContext GetContextSnapshot(); |
94 | 77 |
95 ~AllocationContextTracker(); | 78 ~AllocationContextTracker(); |
96 | 79 |
97 private: | 80 private: |
98 AllocationContextTracker(); | 81 AllocationContextTracker(); |
99 | 82 |
100 static subtle::Atomic32 capture_mode_; | 83 static subtle::Atomic32 capture_enabled_; |
101 | 84 |
102 // The pseudo stack where frames are |TRACE_EVENT| names. | 85 // The pseudo stack where frames are |TRACE_EVENT| names. |
103 std::vector<const char*> pseudo_stack_; | 86 std::vector<const char*> pseudo_stack_; |
104 | 87 |
105 // The thread name is used as the first entry in the pseudo stack. | 88 // The thread name is used as the first entry in the pseudo stack. |
106 const char* thread_name_; | 89 const char* thread_name_; |
107 | 90 |
108 // Stack of tasks' contexts. Context serves as a different dimension than | 91 // Stack of tasks' contexts. Context serves as a different dimension than |
109 // pseudo stack to cluster allocations. | 92 // pseudo stack to cluster allocations. |
110 std::vector<const char*> task_contexts_; | 93 std::vector<const char*> task_contexts_; |
111 | 94 |
112 uint32_t ignore_scope_depth_; | 95 uint32_t ignore_scope_depth_; |
113 | 96 |
114 DISALLOW_COPY_AND_ASSIGN(AllocationContextTracker); | 97 DISALLOW_COPY_AND_ASSIGN(AllocationContextTracker); |
115 }; | 98 }; |
116 | 99 |
117 } // namespace trace_event | 100 } // namespace trace_event |
118 } // namespace base | 101 } // namespace base |
119 | 102 |
120 #endif // BASE_TRACE_EVENT_HEAP_PROFILER_ALLOCATION_CONTEXT_TRACKER_H_ | 103 #endif // BASE_TRACE_EVENT_HEAP_PROFILER_ALLOCATION_CONTEXT_TRACKER_H_ |
OLD | NEW |