Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(13)

Side by Side Diff: base/debug/scoped_heap_usage.cc

Issue 2163783003: Implement a ScopedThreadHeapUsage class to allow profiling per-thread heap usage. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@shim-default
Patch Set: Fix a brain****, may even compile now. Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/debug/scoped_heap_usage.h"
6
7 #include "base/allocator/features.h"
8 #include "build/build_config.h"
9
10 #if defined(OS_MACOSX) || defined(OS_IOS)
11 #include <malloc/malloc.h>
12 #else
13 #include <malloc.h>
14 #endif
15 #include <stdint.h>
16
17 #include "base/allocator/allocator_shim.h"
18 #include "base/threading/thread_local_storage.h"
19
20 namespace base {
21 namespace debug {
22
23 #if BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM)
Primiano Tucci (use gerrit) 2016/08/24 14:11:46 I think you can just restrict this #if to the Init
Sigurður Ásgeirsson 2016/09/01 15:18:18 Done.
24 namespace {
25
26 using base::allocator::AllocatorDispatch;
27
28 ThreadLocalStorage::StaticSlot g_thread_allocator_usage = TLS_INITIALIZER;
29
30 ScopedHeapUsage::AllocatorUsage* const kInitializingSentinel =
31 reinterpret_cast<ScopedHeapUsage::AllocatorUsage*>(-1);
32
33 // Forward declared as it needs to delegate memory allocation to the next
34 // lower shim.
35 ScopedHeapUsage::AllocatorUsage* GetOrCreateThreadUsage();
36
37 size_t GetAllocSizeEstimate(const AllocatorDispatch* next, void* ptr) {
Primiano Tucci (use gerrit) 2016/08/24 15:28:47 So at this point, if we shim GetAllocSize, there s
Sigurður Ásgeirsson 2016/09/01 15:18:18 I can add this if you like, but I don't agree with
38 if (ptr == nullptr || !next->get_size_estimate_function)
39 return 0U;
40
41 return next->get_size_estimate_function(next, ptr);
42 }
43
44 void RecordAlloc(const AllocatorDispatch* next, void* ptr, size_t size) {
45 ScopedHeapUsage::AllocatorUsage* usage = GetOrCreateThreadUsage();
46 if (usage == nullptr)
47 return;
48
49 usage->alloc_ops++;
50 size_t estimate = GetAllocSizeEstimate(next, ptr);
Primiano Tucci (use gerrit) 2016/08/24 14:11:46 yeah here I'd just call GetAllocSizeEstimate(ptr),
Sigurður Ásgeirsson 2016/09/01 15:18:18 As-is, I can test the functionality of this shim i
51 if (estimate) {
52 usage->alloc_bytes += estimate;
53 usage->alloc_overhead_bytes += size - estimate;
54 } else {
55 usage->alloc_bytes += size;
56 }
57
58 uint64_t allocated_bytes = usage->alloc_bytes - usage->free_bytes;
59 if (allocated_bytes > usage->max_allocated_bytes)
60 usage->max_allocated_bytes = allocated_bytes;
61 }
62
63 void RecordFree(const AllocatorDispatch* next, void* ptr) {
64 ScopedHeapUsage::AllocatorUsage* usage = GetOrCreateThreadUsage();
65 if (usage == nullptr)
66 return;
67
68 size_t estimate = GetAllocSizeEstimate(next, ptr);
69 usage->free_ops++;
70 usage->free_bytes += estimate;
71 }
72
73 void* AllocFn(const AllocatorDispatch* self, size_t size) {
74 void* ret = self->next->alloc_function(self, size);
75 if (ret != nullptr)
76 RecordAlloc(self->next, ret, size);
77
78 return ret;
79 }
80
81 void* AllocZeroInitializedFn(const AllocatorDispatch* self,
82 size_t n,
83 size_t size) {
84 void* ret = self->next->alloc_zero_initialized_function(self, n, size);
85 if (ret != nullptr)
86 RecordAlloc(self->next, ret, size);
87
88 return ret;
89 }
90
91 void* AllocAlignedFn(const AllocatorDispatch* self,
92 size_t alignment,
93 size_t size) {
94 void* ret = self->next->alloc_aligned_function(self, alignment, size);
95 if (ret != nullptr)
96 RecordAlloc(self->next, ret, size);
97
98 return ret;
99 }
100
101 void* ReallocFn(const AllocatorDispatch* self, void* address, size_t size) {
102 if (address != nullptr)
103 RecordFree(self->next, address);
104
105 void* ret = self->next->realloc_function(self, address, size);
106 if (ret != nullptr)
Primiano Tucci (use gerrit) 2016/08/24 14:11:46 I'd probably be a bit more conservative here and d
Sigurður Ásgeirsson 2016/09/01 15:18:18 Done.
107 RecordAlloc(self->next, ret, size);
108
109 return ret;
110 }
111
112 void FreeFn(const AllocatorDispatch* self, void* address) {
113 if (address)
Primiano Tucci (use gerrit) 2016/08/24 14:11:46 small nit:y ou seem to mix if (address) with if (a
Sigurður Ásgeirsson 2016/09/01 15:18:18 Done.
114 RecordFree(self->next, address);
115 self->next->free_function(self, address);
116 }
117
118 // The dispatch for the heap interept used.
Primiano Tucci (use gerrit) 2016/08/24 14:11:46 typo: s/interept/intercept/
Sigurður Ásgeirsson 2016/09/01 15:18:18 Done.
119 AllocatorDispatch allocator_dispatch = {
120 &AllocFn, &AllocZeroInitializedFn, &AllocAlignedFn, &ReallocFn, &FreeFn,
121 nullptr};
122
123 ScopedHeapUsage::AllocatorUsage* GetOrCreateThreadUsage() {
124 ScopedHeapUsage::AllocatorUsage* allocator_usage =
125 static_cast<ScopedHeapUsage::AllocatorUsage*>(
126 g_thread_allocator_usage.Get());
127 if (allocator_usage == kInitializingSentinel)
128 return nullptr; // Re-entrancy case.
129
130 if (!allocator_usage) {
131 g_thread_allocator_usage.Set(kInitializingSentinel);
132
133 const AllocatorDispatch* next = allocator_dispatch.next;
Primiano Tucci (use gerrit) 2016/08/24 14:11:46 given the fact that you have a sentinel I think yo
Sigurður Ásgeirsson 2016/09/01 15:18:18 Done.
134 allocator_usage = reinterpret_cast<ScopedHeapUsage::AllocatorUsage*>(
135 next->alloc_zero_initialized_function(next, 1,
136 sizeof(*allocator_usage)));
137 g_thread_allocator_usage.Set(allocator_usage);
138 }
139
140 return allocator_usage;
141 }
142
143 void FreeAllocatorUsage(void* allocator_usage) {
144 const AllocatorDispatch* next = allocator_dispatch.next;
145 next->free_function(next, allocator_usage);
146 }
147
148 } // namespace
149
150 ScopedHeapUsage::ScopedHeapUsage() : thread_usage_(GetOrCreateThreadUsage()) {
Primiano Tucci (use gerrit) 2016/08/24 14:11:46 given that you initialize the other fields in the
Sigurður Ásgeirsson 2016/09/01 15:18:18 Done.
151 usage_at_creation_ = *thread_usage_;
152 // Reset the max allocation tally for this scope.
153 thread_usage_->max_allocated_bytes = 0U;
154 }
155
156 ScopedHeapUsage::~ScopedHeapUsage() {
157 if (usage_at_creation_.max_allocated_bytes >
158 thread_usage_->max_allocated_bytes) {
Primiano Tucci (use gerrit) 2016/08/24 14:11:46 out of curiosity are you caching thread_usage_ her
Sigurður Ásgeirsson 2016/09/01 15:18:18 Done. This does require two TLS lookups per instan
159 // Restore the outer scope's max allocation tally, as it's larger than
160 // our scope's max.
161 thread_usage_->max_allocated_bytes = usage_at_creation_.max_allocated_bytes;
162 }
163 }
164
165 ScopedHeapUsage::AllocatorUsage ScopedHeapUsage::Now() {
166 AllocatorUsage* usage = GetOrCreateThreadUsage();
167 return *usage;
168 }
169
170 void ScopedHeapUsage::Initialize() {
171 if (!g_thread_allocator_usage.initialized())
172 g_thread_allocator_usage.Initialize(FreeAllocatorUsage);
173
174 InsertAllocatorDispatch(&allocator_dispatch);
175 }
176
177 void ScopedHeapUsage::TearDownForTesting() {
178 RemoveAllocatorDispatchForTesting(&allocator_dispatch);
179 }
180 #endif // BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM)
181
182 } // namespace debug
183 } // namespace base
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698