Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(306)

Side by Side Diff: base/trace_event/memory_profiler_allocation_context.h

Issue 1372523002: [tracing] Implement trace_event::AllocationContext (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@alloccontext
Patch Set: Rebase + clarify comment Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | base/trace_event/memory_profiler_allocation_context.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ 5 #ifndef BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_
6 #define BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ 6 #define BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_
7 7
8 #include <vector> 8 #include <vector>
9 9
10 #include "base/atomicops.h" 10 #include "base/atomicops.h"
11 #include "base/base_export.h" 11 #include "base/base_export.h"
12 #include "base/containers/small_map.h" 12 #include "base/containers/small_map.h"
13 13
14 namespace base { 14 namespace base {
15 namespace trace_event { 15 namespace trace_event {
16 16
17 // When heap profiling is enabled, tracing keeps track of the allocation 17 // When heap profiling is enabled, tracing keeps track of the allocation
18 // context for each allocation intercepted. It is generated by the 18 // context for each allocation intercepted. It is generated by the
19 // |AllocationContextTracker| which keeps stacks of context in TLS. 19 // |AllocationContextTracker| which keeps stacks of context in TLS.
20 // The tracker is initialized lazily. 20 // The tracker is initialized lazily.
21 21
22 using StackFrame = const char*; 22 using StackFrame = const char*;
23 23
24 // A simple stack of |StackFrame| that unlike |std::stack| allows iterating 24 // A simple stack of |StackFrame| that unlike |std::stack| allows iterating
25 // the stack and guards for underflow. 25 // the stack and guards for underflow.
26 class BASE_EXPORT AllocationStack { 26 class BASE_EXPORT AllocationStack {
27 public: 27 public:
28 // Incrementing the iterator iterates down the stack. 28 // Incrementing the iterator iterates up the stack, from bottom (least recent
29 using ConstIterator = std::vector<StackFrame>::const_reverse_iterator; 29 // call) to top (most recent call).
30 using ConstIterator = std::vector<StackFrame>::const_iterator;
30 31
31 AllocationStack(); 32 AllocationStack();
32 ~AllocationStack(); 33 ~AllocationStack();
33 34
34 inline ConstIterator top() const { return stack_.rbegin(); } 35 inline ConstIterator bottom() const { return stack_.begin(); }
35 inline ConstIterator bottom() const { return stack_.rend(); } 36 inline ConstIterator top() const { return stack_.end(); }
36 37
37 inline void push(StackFrame frame) { 38 inline void push(StackFrame frame) {
38 // Impose a limit on the height to verify that every push is popped, because 39 // Impose a limit on the height to verify that every push is popped, because
39 // in practice the pseudo stack never grows higher than ~20 frames. 40 // in practice the pseudo stack never grows higher than ~20 frames.
40 DCHECK_LT(stack_.size(), 128u); 41 DCHECK_LT(stack_.size(), 128u);
41 stack_.push_back(frame); 42 stack_.push_back(frame);
42 } 43 }
43 44
44 inline void pop() { 45 inline void pop() {
45 if (!stack_.empty()) 46 if (!stack_.empty())
46 stack_.pop_back(); 47 stack_.pop_back();
47 } 48 }
48 49
49 private: 50 private:
50 std::vector<StackFrame> stack_; 51 std::vector<StackFrame> stack_;
51 52
52 DISALLOW_COPY_AND_ASSIGN(AllocationStack); 53 DISALLOW_COPY_AND_ASSIGN(AllocationStack);
53 }; 54 };
54 55
55 class BASE_EXPORT AllocationContext { 56 // The backtrace in the allocation context is a snapshot of the stack. For now,
56 // TODO(ruuda): Fill this in a follow-up CL. 57 // this is the pseudo stack where frames are created by trace event macros. In
58 // the future, we might add the option to use the native call stack. In that
59 // case, |Backtrace| and |AllocationContextTracker::GetContextSnapshot| might
60 // have different implementations that can be selected by a compile time flag.
61
62 // The number of stack frames stored in the backtrace is a trade off between
63 // memory used for tracing and accuracy. Measurements done on a prototype
64 // revealed that:
65 //
66 // - In 60 percent of the cases, stack depth <= 7.
67 // - In 87 percent of the cases, stack depth <= 9.
68 // - In 95 percent of the cases, stack depth <= 11.
69 //
70 // See the design doc (https://goo.gl/4s7v7b) for more details.
71
72 // The allocation context is context metadata that is kept for every allocation
73 // when heap profiling is enabled. To simplify memory management for
74 // bookkeeping, this struct has a fixed size. All |const char*|s here
75 // must have static lifetime.
76 struct BASE_EXPORT AllocationContext {
77 struct Backtrace {
78 // Unused backtrace frames are filled with nullptr frames. If the stack is
79 // higher than what can be stored here, the bottom frames are stored. Based
80 // on the data above, a depth of 12 captures the full stack in the vast
81 // majority of the cases.
82 StackFrame frames[12];
83 } backtrace;
84
85 // There is room for two arbitrary context fields, which can be set by the
86 // |TRACE_ALLOCATION_CONTEXT| macro. A nullptr key indicates that the field is
87 // unused.
88 std::pair<const char*, const char*> fields[2];
57 }; 89 };
58 90
59 // The allocation context tracker keeps track of thread-local context for heap 91 // The allocation context tracker keeps track of thread-local context for heap
60 // profiling. It includes a pseudo stack of trace events, and it might contain 92 // profiling. It includes a pseudo stack of trace events, and it might contain
61 // arbitrary (key, value) context. On every allocation the tracker provides a 93 // arbitrary (key, value) context. On every allocation the tracker provides a
62 // snapshot of its context in the form of an |AllocationContext| that is to be 94 // snapshot of its context in the form of an |AllocationContext| that is to be
63 // stored together with the allocation details. 95 // stored together with the allocation details.
64 class BASE_EXPORT AllocationContextTracker { 96 class BASE_EXPORT AllocationContextTracker {
65 public: 97 public:
66 // Globally enables capturing allocation context. 98 // Globally enables capturing allocation context.
(...skipping 23 matching lines...) Expand all
90 static void PopPseudoStackFrame(StackFrame frame); 122 static void PopPseudoStackFrame(StackFrame frame);
91 123
92 // Sets a thread-local (key, value) pair. 124 // Sets a thread-local (key, value) pair.
93 static void SetContextField(const char* key, const char* value); 125 static void SetContextField(const char* key, const char* value);
94 126
95 // Removes the (key, value) pair with the specified key from the thread-local 127 // Removes the (key, value) pair with the specified key from the thread-local
96 // context. 128 // context.
97 static void UnsetContextField(const char* key); 129 static void UnsetContextField(const char* key);
98 130
99 // Returns a snapshot of the current thread-local context. 131 // Returns a snapshot of the current thread-local context.
100 static AllocationContext GetContext(); 132 static AllocationContext GetContextSnapshot();
101
102 // TODO(ruuda): Remove in a follow-up CL, this is only used for testing now.
103 static AllocationStack* GetPseudoStackForTesting();
104 133
105 ~AllocationContextTracker(); 134 ~AllocationContextTracker();
106 135
107 private: 136 private:
108 AllocationContextTracker(); 137 AllocationContextTracker();
109 138
110 static AllocationContextTracker* GetThreadLocalTracker(); 139 static AllocationContextTracker* GetThreadLocalTracker();
111 140
112 static subtle::Atomic32 capture_enabled_; 141 static subtle::Atomic32 capture_enabled_;
113 142
114 // The pseudo stack where frames are |TRACE_EVENT| names. 143 // The pseudo stack where frames are |TRACE_EVENT| names.
115 AllocationStack pseudo_stack_; 144 AllocationStack pseudo_stack_;
116 145
117 // A dictionary of arbitrary context. 146 // A dictionary of arbitrary context.
118 SmallMap<std::map<const char*, const char*>> context_; 147 SmallMap<std::map<const char*, const char*>> context_;
119 148
120 DISALLOW_COPY_AND_ASSIGN(AllocationContextTracker); 149 DISALLOW_COPY_AND_ASSIGN(AllocationContextTracker);
121 }; 150 };
122 151
123 } // namespace trace_event 152 } // namespace trace_event
124 } // namespace base 153 } // namespace base
125 154
126 #endif // BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ 155 #endif // BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_
OLDNEW
« no previous file with comments | « no previous file | base/trace_event/memory_profiler_allocation_context.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698