OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ | 5 #ifndef BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ |
6 #define BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ | 6 #define BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ |
7 | 7 |
8 #include <vector> | 8 #include <vector> |
9 | 9 |
10 #include "base/atomicops.h" | 10 #include "base/atomicops.h" |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
62 // The number of stack frames stored in the backtrace is a trade off between | 62 // The number of stack frames stored in the backtrace is a trade off between |
63 // memory used for tracing and accuracy. Measurements done on a prototype | 63 // memory used for tracing and accuracy. Measurements done on a prototype |
64 // revealed that: | 64 // revealed that: |
65 // | 65 // |
66 // - In 60 percent of the cases, stack depth <= 7. | 66 // - In 60 percent of the cases, stack depth <= 7. |
67 // - In 87 percent of the cases, stack depth <= 9. | 67 // - In 87 percent of the cases, stack depth <= 9. |
68 // - In 95 percent of the cases, stack depth <= 11. | 68 // - In 95 percent of the cases, stack depth <= 11. |
69 // | 69 // |
70 // See the design doc (https://goo.gl/4s7v7b) for more details. | 70 // See the design doc (https://goo.gl/4s7v7b) for more details. |
71 | 71 |
72 // The allocation context is context metadata that is kept for every allocation | 72 struct BASE_EXPORT Backtrace { |
73 // when heap profiling is enabled. To simplify memory management for | 73 // Unused backtrace frames are filled with nullptr frames. If the stack is |
74 // bookkeeping, this struct has a fixed size. All |const char*|s here | 74 // higher than what can be stored here, the bottom frames are stored. Based |
75 // must have static lifetime. | 75 // on the data above, a depth of 12 captures the full stack in the vast |
76 struct BASE_EXPORT AllocationContext { | 76 // majority of the cases. |
77 struct Backtrace { | 77 StackFrame frames[12]; |
78 // Unused backtrace frames are filled with nullptr frames. If the stack is | 78 }; |
79 // higher than what can be stored here, the bottom frames are stored. Based | |
80 // on the data above, a depth of 12 captures the full stack in the vast | |
81 // majority of the cases. | |
82 StackFrame frames[12]; | |
83 } backtrace; | |
84 | 79 |
85 // There is room for two arbitrary context fields, which can be set by the | 80 bool BASE_EXPORT operator==(const Backtrace& lhs, const Backtrace& rhs); |
86 // |TRACE_ALLOCATION_CONTEXT| macro. A nullptr key indicates that the field is | |
87 // unused. | |
88 std::pair<const char*, const char*> fields[2]; | |
89 }; | |
90 | 81 |
91 // A data structure that allows grouping a set of backtraces in a space- | 82 // A data structure that allows grouping a set of backtraces in a space- |
92 // efficient manner by creating a call tree and writing it as a set of (node, | 83 // efficient manner by creating a call tree and writing it as a set of (node, |
93 // parent) pairs. The tree nodes reference both parent and children. The parent | 84 // parent) pairs. The tree nodes reference both parent and children. The parent |
94 // is referenced by index into |frames_|. The children are referenced via a map | 85 // is referenced by index into |frames_|. The children are referenced via a map |
95 // of |StackFrame|s to index into |frames_|. So there is a trie for bottum-up | 86 // of |StackFrame|s to index into |frames_|. So there is a trie for bottum-up |
96 // lookup of a backtrace for deduplication, and a tree for compact storage in | 87 // lookup of a backtrace for deduplication, and a tree for compact storage in |
97 // the trace log. | 88 // the trace log. |
98 class BASE_EXPORT StackFrameDeduplicator { | 89 class BASE_EXPORT StackFrameDeduplicator { |
99 public: | 90 public: |
(...skipping 10 matching lines...) Expand all Loading... | |
110 | 101 |
111 // Indices into |frames_| of frames called from the current frame. | 102 // Indices into |frames_| of frames called from the current frame. |
112 std::map<StackFrame, int> children; | 103 std::map<StackFrame, int> children; |
113 }; | 104 }; |
114 | 105 |
115 using ConstIterator = std::vector<FrameNode>::const_iterator; | 106 using ConstIterator = std::vector<FrameNode>::const_iterator; |
116 | 107 |
117 StackFrameDeduplicator(); | 108 StackFrameDeduplicator(); |
118 ~StackFrameDeduplicator(); | 109 ~StackFrameDeduplicator(); |
119 | 110 |
120 // Inserts a backtrace and returns the index of its leaf node in the range | 111 // Inserts a backtrace and returns the index of its leaf node in |frames_|. |
121 // defined by |begin| and |end|. I.e. if this returns |n|, the node is | 112 // Returns -1 if the backtrace is empty. |
122 // |begin() + n|. Returns -1 if the backtrace is empty. | 113 int Insert(const Backtrace& bt); |
123 int Insert(const AllocationContext::Backtrace& bt); | |
124 | 114 |
125 // Iterators over the frame nodes in the call tree. | 115 // Iterators over the frame nodes in the call tree. |
126 ConstIterator begin() const { return frames_.begin(); } | 116 ConstIterator begin() const { return frames_.begin(); } |
127 ConstIterator end() const { return frames_.end(); } | 117 ConstIterator end() const { return frames_.end(); } |
128 | 118 |
129 private: | 119 private: |
130 std::map<StackFrame, int> roots_; | 120 std::map<StackFrame, int> roots_; |
131 std::vector<FrameNode> frames_; | 121 std::vector<FrameNode> frames_; |
132 | 122 |
133 DISALLOW_COPY_AND_ASSIGN(StackFrameDeduplicator); | 123 DISALLOW_COPY_AND_ASSIGN(StackFrameDeduplicator); |
134 }; | 124 }; |
135 | 125 |
126 // The allocation context is context metadata that is kept for every allocation | |
Primiano Tucci (use gerrit)
2015/10/27 11:56:20
s/allocation context/AllocationContext/
Ruud van Asseldonk
2015/10/27 11:59:31
Done.
| |
127 // when heap profiling is enabled. To simplify memory management for | |
128 // bookkeeping, this struct has a fixed size. All |const char*|s here | |
129 // must have static lifetime. | |
130 struct BASE_EXPORT AllocationContext { | |
131 Backtrace backtrace; | |
132 | |
133 // There is room for two arbitrary context fields, which can be set by the | |
134 // |TRACE_ALLOCATION_CONTEXT| macro. A nullptr key indicates that the field is | |
135 // unused. | |
136 std::pair<const char*, const char*> fields[2]; | |
137 }; | |
138 | |
136 // The allocation context tracker keeps track of thread-local context for heap | 139 // The allocation context tracker keeps track of thread-local context for heap |
137 // profiling. It includes a pseudo stack of trace events, and it might contain | 140 // profiling. It includes a pseudo stack of trace events, and it might contain |
138 // arbitrary (key, value) context. On every allocation the tracker provides a | 141 // arbitrary (key, value) context. On every allocation the tracker provides a |
139 // snapshot of its context in the form of an |AllocationContext| that is to be | 142 // snapshot of its context in the form of an |AllocationContext| that is to be |
140 // stored together with the allocation details. | 143 // stored together with the allocation details. |
141 class BASE_EXPORT AllocationContextTracker { | 144 class BASE_EXPORT AllocationContextTracker { |
142 public: | 145 public: |
143 // Globally enables capturing allocation context. | 146 // Globally enables capturing allocation context. |
144 // TODO(ruuda): Should this be replaced by |EnableCapturing| in the future? | 147 // TODO(ruuda): Should this be replaced by |EnableCapturing| in the future? |
145 // Or at least have something that guards agains enable -> disable -> enable? | 148 // Or at least have something that guards agains enable -> disable -> enable? |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
190 | 193 |
191 // A dictionary of arbitrary context. | 194 // A dictionary of arbitrary context. |
192 SmallMap<std::map<const char*, const char*>> context_; | 195 SmallMap<std::map<const char*, const char*>> context_; |
193 | 196 |
194 DISALLOW_COPY_AND_ASSIGN(AllocationContextTracker); | 197 DISALLOW_COPY_AND_ASSIGN(AllocationContextTracker); |
195 }; | 198 }; |
196 | 199 |
197 } // namespace trace_event | 200 } // namespace trace_event |
198 } // namespace base | 201 } // namespace base |
199 | 202 |
203 namespace BASE_HASH_NAMESPACE { | |
204 | |
205 template <> | |
206 struct hash<base::trace_event::Backtrace> { | |
207 uint32_t operator()(const base::trace_event::Backtrace& backtrace) const; | |
Primiano Tucci (use gerrit)
2015/10/27 11:56:20
I think you should return size_t here and just let
Ruud van Asseldonk
2015/10/27 11:59:31
Done.
| |
208 }; | |
209 | |
210 } // BASE_HASH_NAMESPACE | |
211 | |
200 #endif // BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ | 212 #endif // BASE_TRACE_EVENT_MEMORY_PROFILER_ALLOCATION_CONTEXT_H_ |
OLD | NEW |