Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(84)

Side by Side Diff: base/debug/scoped_thread_heap_usage_unittest.cc

Issue 2163783003: Implement a ScopedThreadHeapUsage class to allow profiling per-thread heap usage. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@shim-default
Patch Set: Merge ToT and change ASSERT->EXPECT. Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/debug/scoped_thread_heap_usage.h"
6
7 #include <map>
8
9 #include "base/allocator/allocator_shim.h"
10 #include "base/allocator/features.h"
11 #include "testing/gtest/include/gtest/gtest.h"
12
13 namespace base {
14 namespace debug {
15
16 namespace {
17
18 class TestingScopedThreadHeapUsage : public ScopedThreadHeapUsage {
19 public:
20 using ScopedThreadHeapUsage::TearDownForTesting;
21 using ScopedThreadHeapUsage::GetDispatchForTesting;
22 };
23
24 // A fixture class that aloows testing the AllocatorDispatch associated with
chrisha 2016/09/01 20:29:18 allows*
Sigurður Ásgeirsson 2016/09/06 14:58:54 Done.
25 // the ScopedThreadHeapUsage class in isolation against a mocked underlying
26 // heap implementation.
27 class ScopedThreadHeapUsageTest : public testing::Test {
28 public:
29 using AllocatorDispatch = base::allocator::AllocatorDispatch;
30
31 static const size_t kAllocationPadding = 23;
32 enum SizeFunctionKind {
33 EXACT_SIZE_FUNCTION,
34 PADDING_SIZE_FUNCTION,
35 ZERO_SIZE_FUNCTION,
36 };
37
38 ScopedThreadHeapUsageTest() : size_function_kind_(EXACT_SIZE_FUNCTION) {
39 EXPECT_EQ(nullptr, g_self);
40 g_self = this;
41 }
42
43 ~ScopedThreadHeapUsageTest() override {
44 EXPECT_EQ(this, g_self);
45 g_self = nullptr;
46 }
47
48 void set_size_function_kind(SizeFunctionKind kind) {
49 size_function_kind_ = kind;
50 }
51
52 void SetUp() override {
53 dispatch_under_test_ =
54 TestingScopedThreadHeapUsage::GetDispatchForTesting();
55 ASSERT_EQ(nullptr, dispatch_under_test_->next);
56
57 dispatch_under_test_->next = &g_mock_dispatch;
58 }
59
60 void TearDown() override {
61 ASSERT_EQ(&g_mock_dispatch, dispatch_under_test_->next);
62
63 dispatch_under_test_->next = nullptr;
64 }
65
66 void* MockMalloc(size_t size) {
67 return dispatch_under_test_->alloc_function(dispatch_under_test_, size);
68 }
69
70 void* MockCalloc(size_t n, size_t size) {
71 return dispatch_under_test_->alloc_zero_initialized_function(
72 dispatch_under_test_, n, size);
73 }
74
75 void* MockAllocAligned(size_t alignment, size_t size) {
76 return dispatch_under_test_->alloc_aligned_function(dispatch_under_test_,
77 alignment, size);
78 }
79
80 void* MockRealloc(void* address, size_t size) {
81 return dispatch_under_test_->realloc_function(dispatch_under_test_, address,
82 size);
83 }
84
85 void MockFree(void* address) {
86 dispatch_under_test_->free_function(dispatch_under_test_, address);
87 }
88
89 private:
90 void RecordAlloc(void* address, size_t size) {
91 if (address != nullptr)
92 allocation_size_map_[address] = size;
93 }
94
95 void DeleteAlloc(void* address) {
96 if (address != nullptr)
97 EXPECT_EQ(1U, allocation_size_map_.erase(address));
98 }
99
100 size_t GetSizeEstimate(void* address) {
101 auto it = allocation_size_map_.find(address);
102 if (it == allocation_size_map_.end())
103 return 0;
104
105 size_t ret = it->second;
106 switch (size_function_kind_) {
107 case EXACT_SIZE_FUNCTION:
108 break;
109 case PADDING_SIZE_FUNCTION:
110 ret += kAllocationPadding;
111 break;
112 case ZERO_SIZE_FUNCTION:
113 ret = 0;
114 break;
115 }
116
117 return ret;
118 }
119
120 static void* OnAllocFn(const AllocatorDispatch* self, size_t size) {
121 void* ret = malloc(size);
122 g_self->RecordAlloc(ret, size);
123 return ret;
124 }
125
126 static void* OnAllocZeroInitializedFn(const AllocatorDispatch* self,
127 size_t n,
128 size_t size) {
129 void* ret = calloc(n, size);
130 g_self->RecordAlloc(ret, n * size);
131 return ret;
132 }
133
134 static void* OnAllocAlignedFn(const AllocatorDispatch* self,
135 size_t alignment,
136 size_t size) {
137 // This is a cheat as it doesn't return aligned allocations. This has the
138 // advantage of working for all platforms for this test.
139 void* ret = malloc(size);
140 g_self->RecordAlloc(ret, size);
141 return ret;
142 }
143
144 static void* OnReallocFn(const AllocatorDispatch* self,
145 void* address,
146 size_t size) {
147 g_self->DeleteAlloc(address);
148 void* ret = realloc(address, size);
149 g_self->RecordAlloc(ret, size);
150 return ret;
151 }
152
153 static void OnFreeFn(const AllocatorDispatch* self, void* address) {
154 g_self->DeleteAlloc(address);
155 free(address);
156 }
157
158 static size_t OnGetSizeEstimateFn(const AllocatorDispatch* self,
159 void* address) {
160 return g_self->GetSizeEstimate(address);
161 }
162
163 using AllocationSizeMap = std::map<void*, size_t>;
164
165 SizeFunctionKind size_function_kind_;
166 AllocationSizeMap allocation_size_map_;
167 AllocatorDispatch* dispatch_under_test_;
168
169 static base::allocator::AllocatorDispatch g_mock_dispatch;
170 static ScopedThreadHeapUsageTest* g_self;
171 };
172
173 ScopedThreadHeapUsageTest* ScopedThreadHeapUsageTest::g_self = nullptr;
174
175 base::allocator::AllocatorDispatch ScopedThreadHeapUsageTest::g_mock_dispatch =
176 {
177 &ScopedThreadHeapUsageTest::OnAllocFn, // alloc_function
178 &ScopedThreadHeapUsageTest::
179 OnAllocZeroInitializedFn, // alloc_zero_initialized_function
180 &ScopedThreadHeapUsageTest::OnAllocAlignedFn, // alloc_aligned_function
181 &ScopedThreadHeapUsageTest::OnReallocFn, // realloc_function
182 &ScopedThreadHeapUsageTest::OnFreeFn, // free_function
183 &ScopedThreadHeapUsageTest::
184 OnGetSizeEstimateFn, // get_size_estimate_function
185 nullptr, // next
186 };
187
188 } // namespace
189
190 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithExactSizeFunction) {
191 set_size_function_kind(EXACT_SIZE_FUNCTION);
192
193 ScopedThreadHeapUsage scoped_usage;
194
195 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = ScopedThreadHeapUsage::Now();
196
197 EXPECT_EQ(0U, u1.alloc_ops);
198 EXPECT_EQ(0U, u1.alloc_bytes);
199 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
200 EXPECT_EQ(0U, u1.free_ops);
201 EXPECT_EQ(0U, u1.free_bytes);
202 EXPECT_EQ(0U, u1.max_allocated_bytes);
203
204 const size_t kAllocSize = 1029U;
205 void* ptr = MockMalloc(kAllocSize);
206 MockFree(ptr);
207
208 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = ScopedThreadHeapUsage::Now();
209
210 EXPECT_EQ(1U, u2.alloc_ops);
211 EXPECT_EQ(kAllocSize, u2.alloc_bytes);
212 EXPECT_EQ(0U, u2.alloc_overhead_bytes);
213 EXPECT_EQ(1U, u2.free_ops);
214 EXPECT_EQ(kAllocSize, u2.free_bytes);
215 EXPECT_EQ(kAllocSize, u2.max_allocated_bytes);
216 }
217
218 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithZeroSizeFunction) {
219 set_size_function_kind(ZERO_SIZE_FUNCTION);
220
221 ScopedThreadHeapUsage scoped_usage;
222
223 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = ScopedThreadHeapUsage::Now();
224 EXPECT_EQ(0U, u1.alloc_ops);
225 EXPECT_EQ(0U, u1.alloc_bytes);
226 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
227 EXPECT_EQ(0U, u1.free_ops);
228 EXPECT_EQ(0U, u1.free_bytes);
229 EXPECT_EQ(0U, u1.max_allocated_bytes);
230
231 const size_t kAllocSize = 1029U;
232 void* ptr = MockMalloc(kAllocSize);
233 MockFree(ptr);
234
235 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = ScopedThreadHeapUsage::Now();
236
237 // With a get-size function that returns zero, there's no way to get the size
238 // of an allocation that's being freed, hence the shim can't tally freed bytes
239 // nor the high-watermark allocated bytes.
240 EXPECT_EQ(1U, u2.alloc_ops);
241 EXPECT_EQ(kAllocSize, u2.alloc_bytes);
242 EXPECT_EQ(0U, u2.alloc_overhead_bytes);
243 EXPECT_EQ(1U, u2.free_ops);
244 EXPECT_EQ(0U, u2.free_bytes);
245 EXPECT_EQ(0U, u2.max_allocated_bytes);
246 }
247
248 TEST_F(ScopedThreadHeapUsageTest, NestedMaxWorks) {
249 ScopedThreadHeapUsage outer_scoped_usage;
250
251 const size_t kOuterAllocSize = 1029U;
252 void* ptr = MockMalloc(kOuterAllocSize);
253 MockFree(ptr);
254
255 EXPECT_EQ(kOuterAllocSize, ScopedThreadHeapUsage::Now().max_allocated_bytes);
256
257 {
258 ScopedThreadHeapUsage inner_scoped_usage;
259
260 const size_t kInnerAllocSize = 673U;
261 ptr = MockMalloc(kInnerAllocSize);
262 MockFree(ptr);
263
264 EXPECT_EQ(kInnerAllocSize,
265 ScopedThreadHeapUsage::Now().max_allocated_bytes);
266 }
267
268 // The greater, outer allocation size should have been restored.
269 EXPECT_EQ(kOuterAllocSize, ScopedThreadHeapUsage::Now().max_allocated_bytes);
270
271 const size_t kLargerInnerAllocSize = kOuterAllocSize + 673U;
272 {
273 ScopedThreadHeapUsage inner_scoped_usage;
274
275 ptr = MockMalloc(kLargerInnerAllocSize);
276 MockFree(ptr);
277
278 EXPECT_EQ(kLargerInnerAllocSize,
279 ScopedThreadHeapUsage::Now().max_allocated_bytes);
280 }
281
282 // The greater, inner allocation size should have been preserved.
283 EXPECT_EQ(kLargerInnerAllocSize,
284 ScopedThreadHeapUsage::Now().max_allocated_bytes);
285
286 // Now try the case with an outsanding net alloc size when entering the
287 // inner scope.
288 void* outer_ptr = MockMalloc(kOuterAllocSize);
289 EXPECT_EQ(kLargerInnerAllocSize,
290 ScopedThreadHeapUsage::Now().max_allocated_bytes);
291 {
292 ScopedThreadHeapUsage inner_scoped_usage;
293
294 ptr = MockMalloc(kLargerInnerAllocSize);
295 MockFree(ptr);
296
297 EXPECT_EQ(kLargerInnerAllocSize,
298 ScopedThreadHeapUsage::Now().max_allocated_bytes);
299 }
300
301 // While the inner scope saw only the inner net outstanding allocaiton size,
chrisha 2016/09/01 20:29:18 allocation*
Sigurður Ásgeirsson 2016/09/06 14:58:54 Done.
302 // the outer scope saw both outstanding at the same time.
303 EXPECT_EQ(kOuterAllocSize + kLargerInnerAllocSize,
304 ScopedThreadHeapUsage::Now().max_allocated_bytes);
305
306 MockFree(outer_ptr);
307 }
308
309 namespace {
310
311 class ScopedThreadHeapShimTest : public testing::Test {
312 public:
313 void SetUp() override { ScopedThreadHeapUsage::Initialize(); }
314
315 void TearDown() override {
316 TestingScopedThreadHeapUsage::TearDownForTesting();
317 }
318 };
319
320 } // namespace
321
322 #if BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM)
323 TEST_F(ScopedThreadHeapShimTest, HooksIntoMallocWithShim) {
324 const size_t kAllocSize = 9993;
325 // This test verifies that the scoped heap data is affected by malloc &
326 // free.
327 ScopedThreadHeapUsage scoped_usage;
328
329 void* ptr = malloc(kAllocSize);
330 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = ScopedThreadHeapUsage::Now();
331 free(ptr);
332 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = ScopedThreadHeapUsage::Now();
333
334 // Verify that at least one allocation operation was recorded, and that free
335 // operations are at least monotonically growing.
336 EXPECT_LE(1U, u1.alloc_ops);
337 EXPECT_LE(0U, u2.alloc_ops);
338
339 // Verify that at least the bytes above were recorded.
340 EXPECT_LE(kAllocSize, u2.alloc_bytes);
341
342 // Verify that at least the one free operation abobve was recorded.
chrisha 2016/09/01 20:29:18 above*
Sigurður Ásgeirsson 2016/09/06 14:58:54 Done.
343 EXPECT_LE(u1.free_ops + 1, u2.free_ops);
344 }
345 #endif // BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM)
346
347 } // namespace debug
348 } // namespace base
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698