Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(201)

Side by Side Diff: base/trace_event/memory_profiler_allocation_context.h

Issue 1372523002: [tracing] Implement trace_event::AllocationContext (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@alloccontext
Patch Set: Fix debug check Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ 5 #ifndef BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_
6 #define BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ 6 #define BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_
7 7
8 #include <vector> 8 #include <vector>
9 9
10 #include "base/atomicops.h" 10 #include "base/atomicops.h"
11 #include "base/base_export.h" 11 #include "base/base_export.h"
12 #include "base/containers/small_map.h" 12 #include "base/containers/small_map.h"
13 13
14 namespace base { 14 namespace base {
15 namespace trace_event { 15 namespace trace_event {
16 16
17 // When heap profiling is enabled, tracing keeps track of the allocation 17 // When heap profiling is enabled, tracing keeps track of the allocation
18 // context for each allocation intercepted. It is generated by the 18 // context for each allocation intercepted. It is generated by the
19 // |AllocationContextTracker| which keeps stacks of context in TLS. 19 // |AllocationContextTracker| which keeps stacks of context in TLS.
20 // The tracker is initialized lazily. 20 // The tracker is initialized lazily.
21 21
22 using StackFrame = const char*; 22 using StackFrame = const char*;
23 23
24 // A simple stack of |StackFrame| that unlike |std::stack| allows iterating 24 // A simple stack of |StackFrame| that unlike |std::stack| allows iterating
25 // the stack and guards for underflow. 25 // the stack and guards for underflow.
26 class BASE_EXPORT AllocationStack { 26 class BASE_EXPORT AllocationStack {
27 public: 27 public:
28 // Incrementing the iterator iterates down the stack. 28 // Incrementing the iterator iterates up the stack, from bottom to top.
Primiano Tucci (use gerrit) 2015/10/05 15:49:27 It'd probably less ambiguous if you said here bott
29 using ConstIterator = std::vector<StackFrame>::const_reverse_iterator; 29 using ConstIterator = std::vector<StackFrame>::const_iterator;
30 30
31 AllocationStack(); 31 AllocationStack();
32 ~AllocationStack(); 32 ~AllocationStack();
33 33
34 inline ConstIterator top() const { return stack_.rbegin(); } 34 inline ConstIterator bottom() const { return stack_.begin(); }
35 inline ConstIterator bottom() const { return stack_.rend(); } 35 inline ConstIterator top() const { return stack_.end(); }
36 36
37 inline void push(StackFrame frame) { 37 inline void push(StackFrame frame) {
38 // Impose a limit on the height to verify that every push is popped, because 38 // Impose a limit on the height to verify that every push is popped, because
39 // in practice the pseudo stack never grows higher than ~20 frames. 39 // in practice the pseudo stack never grows higher than ~20 frames.
40 DCHECK_LT(stack_.size(), 128u); 40 DCHECK_LT(stack_.size(), 128u);
41 stack_.push_back(frame); 41 stack_.push_back(frame);
42 } 42 }
43 43
44 inline void pop() { 44 inline void pop() {
45 if (!stack_.empty()) 45 if (!stack_.empty())
46 stack_.pop_back(); 46 stack_.pop_back();
47 } 47 }
48 48
49 private: 49 private:
50 std::vector<StackFrame> stack_; 50 std::vector<StackFrame> stack_;
51 51
52 DISALLOW_COPY_AND_ASSIGN(AllocationStack); 52 DISALLOW_COPY_AND_ASSIGN(AllocationStack);
53 }; 53 };
54 54
55 class BASE_EXPORT AllocationContext { 55 // The backtrace in the allocation context is a snapshot of the stack. For now,
56 // TODO(ruuda): Fill this in a follow-up CL. 56 // this is the pseudo stack where frames are created by trace event macros. In
57 // the future, we might add the option to use the native call stack. In that
58 // case, |Backtrace| and |AllocationContextTracker::GetContextSnapshot| might
59 // have different implementations that can be selected by a compile time flag.
60
61 // The number of stack frames stored in the backtrace is a trade off between
62 // memory used for tracing and accuracy. Measurements done on a prototype
63 // revealed that:
64 //
65 // - In 60 percent of the cases, stack depth <= 7.
66 // - In 87 percent of the cases, stack depth <= 9.
67 // - In 95 percent of the cases, stack depth <= 11.
68 //
69 // See the design doc (https://goo.gl/4s7v7b) for more details.
70
71 // The allocation context is context metadata that is kept for every allocation
72 // when heap profiling is enabled. To simplify memory management for
73 // bookkeeping, this struct has a fixed size. All |const char*|s here
74 // must have static lifetime.
75 struct BASE_EXPORT AllocationContext {
76 struct Backtrace {
77 // Unused backtrace frames are filled with nullptr frames. If the stack is
78 // higher than what can be stored here, the bottom frames are stored. Based
79 // on the data above, a depth of 12 captures the full stack in the vast
80 // majority of the cases.
81 StackFrame frames[12];
82 } backtrace;
83
84 // There is room for two arbitrary context fields, which can be set by the
85 // |TRACE_ALLOCATION_CONTEXT| macro. A nullptr key indicates that the field is
86 // unused.
87 std::pair<const char*, const char*> fields[2];
57 }; 88 };
58 89
59 // The allocation context tracker keeps track of thread-local context for heap 90 // The allocation context tracker keeps track of thread-local context for heap
60 // profiling. It includes a pseudo stack of trace events, and it might contain 91 // profiling. It includes a pseudo stack of trace events, and it might contain
61 // arbitrary (key, value) context. On every allocation the tracker provides a 92 // arbitrary (key, value) context. On every allocation the tracker provides a
62 // snapshot of its context in the form of an |AllocationContext| that is to be 93 // snapshot of its context in the form of an |AllocationContext| that is to be
63 // stored together with the allocation details. 94 // stored together with the allocation details.
64 class BASE_EXPORT AllocationContextTracker { 95 class BASE_EXPORT AllocationContextTracker {
65 public: 96 public:
66 // Globally enables capturing allocation context. 97 // Globally enables capturing allocation context.
(...skipping 23 matching lines...) Expand all
90 static void PopPseudoStackFrame(StackFrame frame); 121 static void PopPseudoStackFrame(StackFrame frame);
91 122
92 // Sets a thread-local (key, value) pair. 123 // Sets a thread-local (key, value) pair.
93 static void SetContextField(const char* key, const char* value); 124 static void SetContextField(const char* key, const char* value);
94 125
95 // Removes the (key, value) pair with the specified key from the thread-local 126 // Removes the (key, value) pair with the specified key from the thread-local
96 // context. 127 // context.
97 static void UnsetContextField(const char* key); 128 static void UnsetContextField(const char* key);
98 129
99 // Returns a snapshot of the current thread-local context. 130 // Returns a snapshot of the current thread-local context.
100 static AllocationContext GetContext(); 131 static AllocationContext GetContextSnapshot();
101
102 // TODO(ruuda): Remove in a follow-up CL, this is only used for testing now.
103 static AllocationStack* GetPseudoStackForTesting();
104 132
105 ~AllocationContextTracker(); 133 ~AllocationContextTracker();
106 134
107 private: 135 private:
108 AllocationContextTracker(); 136 AllocationContextTracker();
109 137
110 static AllocationContextTracker* GetThreadLocalTracker(); 138 static AllocationContextTracker* GetThreadLocalTracker();
111 139
112 static subtle::Atomic32 capture_enabled_; 140 static subtle::Atomic32 capture_enabled_;
113 141
114 // The pseudo stack where frames are |TRACE_EVENT| names. 142 // The pseudo stack where frames are |TRACE_EVENT| names.
115 AllocationStack pseudo_stack_; 143 AllocationStack pseudo_stack_;
116 144
117 // A dictionary of arbitrary context. 145 // A dictionary of arbitrary context.
118 SmallMap<std::map<const char*, const char*>> context_; 146 SmallMap<std::map<const char*, const char*>> context_;
119 147
120 DISALLOW_COPY_AND_ASSIGN(AllocationContextTracker); 148 DISALLOW_COPY_AND_ASSIGN(AllocationContextTracker);
121 }; 149 };
122 150
123 } // namespace trace_event 151 } // namespace trace_event
124 } // namespace base 152 } // namespace base
125 153
126 #endif // BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ 154 #endif // BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698