Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1093)

Side by Side Diff: base/trace_event/trace_event_memory_overhead.cc

Issue 2857543002: tracing: Simplify TraceEventMemoryOverhead, use an enum insted of a map (Closed)
Patch Set: Fix compiler issues + omit empty values Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « base/trace_event/trace_event_memory_overhead.h ('k') | base/trace_event/trace_log.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "base/trace_event/trace_event_memory_overhead.h" 5 #include "base/trace_event/trace_event_memory_overhead.h"
6 6
7 #include <algorithm> 7 #include <algorithm>
8 8
9 #include "base/bits.h" 9 #include "base/bits.h"
10 #include "base/memory/ref_counted_memory.h" 10 #include "base/memory/ref_counted_memory.h"
11 #include "base/strings/stringprintf.h" 11 #include "base/strings/stringprintf.h"
12 #include "base/trace_event/memory_allocator_dump.h" 12 #include "base/trace_event/memory_allocator_dump.h"
13 #include "base/trace_event/memory_usage_estimator.h"
13 #include "base/trace_event/process_memory_dump.h" 14 #include "base/trace_event/process_memory_dump.h"
14 #include "base/values.h" 15 #include "base/values.h"
15 16
16 namespace base { 17 namespace base {
17 namespace trace_event { 18 namespace trace_event {
18 19
19 TraceEventMemoryOverhead::TraceEventMemoryOverhead() { 20 namespace {
21
22 const char* ObjectTypeToString(TraceEventMemoryOverhead::ObjectType type) {
23 switch (type) {
24 case TraceEventMemoryOverhead::kOther:
25 return "(Other)";
26 case TraceEventMemoryOverhead::kTraceBuffer:
27 return "TraceBuffer";
28 case TraceEventMemoryOverhead::kTraceBufferChunk:
29 return "TraceBufferChunk";
30 case TraceEventMemoryOverhead::kTraceEvent:
31 return "TraceEvent";
32 case TraceEventMemoryOverhead::kUnusedTraceEvent:
33 return "TraceEvent(Unused)";
34 case TraceEventMemoryOverhead::kTracedValue:
35 return "TracedValue";
36 case TraceEventMemoryOverhead::kConvertableToTraceFormat:
37 return "ConvertableToTraceFormat";
38 case TraceEventMemoryOverhead::kHeapProfilerAllocationRegister:
39 return "AllocationRegister";
40 case TraceEventMemoryOverhead::kHeapProfilerTypeNameDeduplicator:
41 return "TypeNameDeduplicator";
42 case TraceEventMemoryOverhead::kHeapProfilerStackFrameDeduplicator:
43 return "StackFrameDeduplicator";
44 case TraceEventMemoryOverhead::kStdString:
45 return "std::string";
46 case TraceEventMemoryOverhead::kBaseValue:
47 return "base::Value";
48 case TraceEventMemoryOverhead::kTraceEventMemoryOverhead:
49 return "TraceEventMemoryOverhead";
50 case TraceEventMemoryOverhead::kLast:
51 NOTREACHED();
52 }
53 NOTREACHED();
54 return "BUG";
20 } 55 }
21 56
22 TraceEventMemoryOverhead::~TraceEventMemoryOverhead() { 57 } // namespace
58
59 TraceEventMemoryOverhead::TraceEventMemoryOverhead() : allocated_objects_() {}
60
61 TraceEventMemoryOverhead::~TraceEventMemoryOverhead() {}
62
63 void TraceEventMemoryOverhead::AddInternal(ObjectType object_type,
64 size_t count,
65 size_t allocated_size_in_bytes,
66 size_t resident_size_in_bytes) {
67 ObjectCountAndSize& count_and_size =
68 allocated_objects_[static_cast<uint32_t>(object_type)];
69 count_and_size.count += count;
70 count_and_size.allocated_size_in_bytes += allocated_size_in_bytes;
71 count_and_size.resident_size_in_bytes += resident_size_in_bytes;
23 } 72 }
24 73
25 void TraceEventMemoryOverhead::AddOrCreateInternal( 74 void TraceEventMemoryOverhead::Add(ObjectType object_type,
26 const char* object_type,
27 size_t count,
28 size_t allocated_size_in_bytes,
29 size_t resident_size_in_bytes) {
30 auto it = allocated_objects_.find(object_type);
31 if (it == allocated_objects_.end()) {
32 allocated_objects_.insert(std::make_pair(
33 object_type,
34 ObjectCountAndSize(
35 {count, allocated_size_in_bytes, resident_size_in_bytes})));
36 return;
37 }
38 it->second.count += count;
39 it->second.allocated_size_in_bytes += allocated_size_in_bytes;
40 it->second.resident_size_in_bytes += resident_size_in_bytes;
41 }
42
43 void TraceEventMemoryOverhead::Add(const char* object_type,
44 size_t allocated_size_in_bytes) { 75 size_t allocated_size_in_bytes) {
45 Add(object_type, allocated_size_in_bytes, allocated_size_in_bytes); 76 Add(object_type, allocated_size_in_bytes, allocated_size_in_bytes);
46 } 77 }
47 78
48 void TraceEventMemoryOverhead::Add(const char* object_type, 79 void TraceEventMemoryOverhead::Add(ObjectType object_type,
49 size_t allocated_size_in_bytes, 80 size_t allocated_size_in_bytes,
50 size_t resident_size_in_bytes) { 81 size_t resident_size_in_bytes) {
51 AddOrCreateInternal(object_type, 1, allocated_size_in_bytes, 82 AddInternal(object_type, 1, allocated_size_in_bytes, resident_size_in_bytes);
52 resident_size_in_bytes);
53 } 83 }
54 84
55 void TraceEventMemoryOverhead::AddString(const std::string& str) { 85 void TraceEventMemoryOverhead::AddString(const std::string& str) {
56 // The number below are empirical and mainly based on profiling of real-world 86 Add(kStdString, EstimateMemoryUsage(str));
57 // std::string implementations:
58 // - even short string end up malloc()-inc at least 32 bytes.
59 // - longer strings seem to malloc() multiples of 16 bytes.
60 const size_t capacity = bits::Align(str.capacity(), 16);
61 Add("std::string", sizeof(std::string) + std::max<size_t>(capacity, 32u));
62 } 87 }
63 88
64 void TraceEventMemoryOverhead::AddRefCountedString( 89 void TraceEventMemoryOverhead::AddRefCountedString(
65 const RefCountedString& str) { 90 const RefCountedString& str) {
66 Add("RefCountedString", sizeof(RefCountedString)); 91 Add(kOther, sizeof(RefCountedString));
67 AddString(str.data()); 92 AddString(str.data());
68 } 93 }
69 94
70 void TraceEventMemoryOverhead::AddValue(const Value& value) { 95 void TraceEventMemoryOverhead::AddValue(const Value& value) {
71 switch (value.GetType()) { 96 switch (value.GetType()) {
72 case Value::Type::NONE: 97 case Value::Type::NONE:
73 case Value::Type::BOOLEAN: 98 case Value::Type::BOOLEAN:
74 case Value::Type::INTEGER: 99 case Value::Type::INTEGER:
75 case Value::Type::DOUBLE: 100 case Value::Type::DOUBLE:
76 Add("FundamentalValue", sizeof(Value)); 101 Add(kBaseValue, sizeof(Value));
77 break; 102 break;
78 103
79 case Value::Type::STRING: { 104 case Value::Type::STRING: {
80 const Value* string_value = nullptr; 105 const Value* string_value = nullptr;
81 value.GetAsString(&string_value); 106 value.GetAsString(&string_value);
82 Add("StringValue", sizeof(Value)); 107 Add(kBaseValue, sizeof(Value));
83 AddString(string_value->GetString()); 108 AddString(string_value->GetString());
84 } break; 109 } break;
85 110
86 case Value::Type::BINARY: { 111 case Value::Type::BINARY: {
87 Add("BinaryValue", sizeof(Value) + value.GetBlob().size()); 112 Add(kBaseValue, sizeof(Value) + value.GetBlob().size());
88 } break; 113 } break;
89 114
90 case Value::Type::DICTIONARY: { 115 case Value::Type::DICTIONARY: {
91 const DictionaryValue* dictionary_value = nullptr; 116 const DictionaryValue* dictionary_value = nullptr;
92 value.GetAsDictionary(&dictionary_value); 117 value.GetAsDictionary(&dictionary_value);
93 Add("DictionaryValue", sizeof(DictionaryValue)); 118 Add(kBaseValue, sizeof(DictionaryValue));
94 for (DictionaryValue::Iterator it(*dictionary_value); !it.IsAtEnd(); 119 for (DictionaryValue::Iterator it(*dictionary_value); !it.IsAtEnd();
95 it.Advance()) { 120 it.Advance()) {
96 AddString(it.key()); 121 AddString(it.key());
97 AddValue(it.value()); 122 AddValue(it.value());
98 } 123 }
99 } break; 124 } break;
100 125
101 case Value::Type::LIST: { 126 case Value::Type::LIST: {
102 const ListValue* list_value = nullptr; 127 const ListValue* list_value = nullptr;
103 value.GetAsList(&list_value); 128 value.GetAsList(&list_value);
104 Add("ListValue", sizeof(ListValue)); 129 Add(kBaseValue, sizeof(ListValue));
105 for (const auto& v : *list_value) 130 for (const auto& v : *list_value)
106 AddValue(v); 131 AddValue(v);
107 } break; 132 } break;
108 133
109 default: 134 default:
110 NOTREACHED(); 135 NOTREACHED();
111 } 136 }
112 } 137 }
113 138
114 void TraceEventMemoryOverhead::AddSelf() { 139 void TraceEventMemoryOverhead::AddSelf() {
115 size_t estimated_size = sizeof(*this); 140 Add(kTraceEventMemoryOverhead, sizeof(*this));
116 // If the small_map did overflow its static capacity, its elements will be
117 // allocated on the heap and have to be accounted separately.
118 if (allocated_objects_.UsingFullMap())
119 estimated_size += sizeof(map_type::value_type) * allocated_objects_.size();
120 Add("TraceEventMemoryOverhead", estimated_size);
121 } 141 }
122 142
123 size_t TraceEventMemoryOverhead::GetCount(const char* object_type) const { 143 size_t TraceEventMemoryOverhead::GetCount(ObjectType object_type) const {
124 const auto& it = allocated_objects_.find(object_type); 144 CHECK(object_type < kLast);
125 if (it == allocated_objects_.end()) 145 return allocated_objects_[static_cast<uint32_t>(object_type)].count;
126 return 0u;
127 return it->second.count;
128 } 146 }
129 147
130 void TraceEventMemoryOverhead::Update(const TraceEventMemoryOverhead& other) { 148 void TraceEventMemoryOverhead::Update(const TraceEventMemoryOverhead& other) {
131 for (const auto& it : other.allocated_objects_) { 149 for (uint32_t i = 0; i < kLast; i++) {
132 AddOrCreateInternal(it.first, it.second.count, 150 const ObjectCountAndSize& other_entry = other.allocated_objects_[i];
133 it.second.allocated_size_in_bytes, 151 AddInternal(static_cast<ObjectType>(i), other_entry.count,
134 it.second.resident_size_in_bytes); 152 other_entry.allocated_size_in_bytes,
153 other_entry.resident_size_in_bytes);
135 } 154 }
136 } 155 }
137 156
138 void TraceEventMemoryOverhead::DumpInto(const char* base_name, 157 void TraceEventMemoryOverhead::DumpInto(const char* base_name,
139 ProcessMemoryDump* pmd) const { 158 ProcessMemoryDump* pmd) const {
140 for (const auto& it : allocated_objects_) { 159 for (uint32_t i = 0; i < kLast; i++) {
141 std::string dump_name = StringPrintf("%s/%s", base_name, it.first); 160 const ObjectCountAndSize& count_and_size = allocated_objects_[i];
161 if (count_and_size.allocated_size_in_bytes == 0)
162 continue;
163 std::string dump_name = StringPrintf(
164 "%s/%s", base_name, ObjectTypeToString(static_cast<ObjectType>(i)));
142 MemoryAllocatorDump* mad = pmd->CreateAllocatorDump(dump_name); 165 MemoryAllocatorDump* mad = pmd->CreateAllocatorDump(dump_name);
143 mad->AddScalar(MemoryAllocatorDump::kNameSize, 166 mad->AddScalar(MemoryAllocatorDump::kNameSize,
144 MemoryAllocatorDump::kUnitsBytes, 167 MemoryAllocatorDump::kUnitsBytes,
145 it.second.allocated_size_in_bytes); 168 count_and_size.allocated_size_in_bytes);
146 mad->AddScalar("resident_size", MemoryAllocatorDump::kUnitsBytes, 169 mad->AddScalar("resident_size", MemoryAllocatorDump::kUnitsBytes,
147 it.second.resident_size_in_bytes); 170 count_and_size.resident_size_in_bytes);
148 mad->AddScalar(MemoryAllocatorDump::kNameObjectCount, 171 mad->AddScalar(MemoryAllocatorDump::kNameObjectCount,
149 MemoryAllocatorDump::kUnitsObjects, it.second.count); 172 MemoryAllocatorDump::kUnitsObjects, count_and_size.count);
150 } 173 }
151 } 174 }
152 175
153 } // namespace trace_event 176 } // namespace trace_event
154 } // namespace base 177 } // namespace base
OLDNEW
« no previous file with comments | « base/trace_event/trace_event_memory_overhead.h ('k') | base/trace_event/trace_log.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698