Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(70)

Side by Side Diff: base/debug/scoped_thread_heap_usage_unittest.cc

Issue 2163783003: Implement a ScopedThreadHeapUsage class to allow profiling per-thread heap usage. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@shim-default
Patch Set: Change Init implementation to return a bool status. ::Init is now also tested when the shim is disa… Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/debug/scoped_thread_heap_usage.h"
6
7 #include <map>
8
9 #include "base/allocator/allocator_shim.h"
10 #include "base/allocator/features.h"
11 #include "testing/gtest/include/gtest/gtest.h"
12
13 namespace base {
14 namespace debug {
15
16 namespace {
17
18 class TestingScopedThreadHeapUsage : public ScopedThreadHeapUsage {
19 public:
20 using ScopedThreadHeapUsage::TearDownForTesting;
21 using ScopedThreadHeapUsage::GetDispatchForTesting;
22 using ScopedThreadHeapUsage::EnsureTLSInitializedForTesting;
23 };
24
25 // A fixture class that allows testing the AllocatorDispatch associated with
26 // the ScopedThreadHeapUsage class in isolation against a mocked underlying
27 // heap implementation.
28 class ScopedThreadHeapUsageTest : public testing::Test {
29 public:
30 using AllocatorDispatch = base::allocator::AllocatorDispatch;
31
32 static const size_t kAllocationPadding;
33 enum SizeFunctionKind {
34 EXACT_SIZE_FUNCTION,
35 PADDING_SIZE_FUNCTION,
36 ZERO_SIZE_FUNCTION,
37 };
38
39 ScopedThreadHeapUsageTest() : size_function_kind_(EXACT_SIZE_FUNCTION) {
40 EXPECT_EQ(nullptr, g_self);
41 g_self = this;
42 }
43
44 ~ScopedThreadHeapUsageTest() override {
45 EXPECT_EQ(this, g_self);
46 g_self = nullptr;
47 }
48
49 void set_size_function_kind(SizeFunctionKind kind) {
50 size_function_kind_ = kind;
51 }
52
53 void SetUp() override {
54 TestingScopedThreadHeapUsage::EnsureTLSInitializedForTesting();
55
56 dispatch_under_test_ =
57 TestingScopedThreadHeapUsage::GetDispatchForTesting();
58 ASSERT_EQ(nullptr, dispatch_under_test_->next);
59
60 dispatch_under_test_->next = &g_mock_dispatch;
61 }
62
63 void TearDown() override {
64 ASSERT_EQ(&g_mock_dispatch, dispatch_under_test_->next);
65
66 dispatch_under_test_->next = nullptr;
67 }
68
69 void* MockMalloc(size_t size) {
70 return dispatch_under_test_->alloc_function(dispatch_under_test_, size);
71 }
72
73 void* MockCalloc(size_t n, size_t size) {
74 return dispatch_under_test_->alloc_zero_initialized_function(
75 dispatch_under_test_, n, size);
76 }
77
78 void* MockAllocAligned(size_t alignment, size_t size) {
79 return dispatch_under_test_->alloc_aligned_function(dispatch_under_test_,
80 alignment, size);
81 }
82
83 void* MockRealloc(void* address, size_t size) {
84 return dispatch_under_test_->realloc_function(dispatch_under_test_, address,
85 size);
86 }
87
88 void MockFree(void* address) {
89 dispatch_under_test_->free_function(dispatch_under_test_, address);
90 }
91
92 size_t MockGetSizeEstimate(void* address) {
93 return dispatch_under_test_->get_size_estimate_function(
94 dispatch_under_test_, address);
95 }
96
97 private:
98 void RecordAlloc(void* address, size_t size) {
99 if (address != nullptr)
100 allocation_size_map_[address] = size;
101 }
102
103 void DeleteAlloc(void* address) {
104 if (address != nullptr)
105 EXPECT_EQ(1U, allocation_size_map_.erase(address));
106 }
107
108 size_t GetSizeEstimate(void* address) {
109 auto it = allocation_size_map_.find(address);
110 if (it == allocation_size_map_.end())
111 return 0;
112
113 size_t ret = it->second;
114 switch (size_function_kind_) {
115 case EXACT_SIZE_FUNCTION:
116 break;
117 case PADDING_SIZE_FUNCTION:
118 ret += kAllocationPadding;
119 break;
120 case ZERO_SIZE_FUNCTION:
121 ret = 0;
122 break;
123 }
124
125 return ret;
126 }
127
128 static void* OnAllocFn(const AllocatorDispatch* self, size_t size) {
129 EXPECT_EQ(&g_mock_dispatch, self);
130
131 void* ret = malloc(size);
132 g_self->RecordAlloc(ret, size);
133 return ret;
134 }
135
136 static void* OnAllocZeroInitializedFn(const AllocatorDispatch* self,
137 size_t n,
138 size_t size) {
139 EXPECT_EQ(&g_mock_dispatch, self);
140
141 void* ret = calloc(n, size);
142 g_self->RecordAlloc(ret, n * size);
143 return ret;
144 }
145
146 static void* OnAllocAlignedFn(const AllocatorDispatch* self,
147 size_t alignment,
148 size_t size) {
149 EXPECT_EQ(&g_mock_dispatch, self);
150
151 // This is a cheat as it doesn't return aligned allocations. This has the
152 // advantage of working for all platforms for this test.
153 void* ret = malloc(size);
154 g_self->RecordAlloc(ret, size);
155 return ret;
156 }
157
158 static void* OnReallocFn(const AllocatorDispatch* self,
159 void* address,
160 size_t size) {
161 EXPECT_EQ(&g_mock_dispatch, self);
162
163 g_self->DeleteAlloc(address);
164 void* ret = realloc(address, size);
165 g_self->RecordAlloc(ret, size);
166 return ret;
167 }
168
169 static void OnFreeFn(const AllocatorDispatch* self, void* address) {
170 EXPECT_EQ(&g_mock_dispatch, self);
171
172 g_self->DeleteAlloc(address);
173 free(address);
174 }
175
176 static size_t OnGetSizeEstimateFn(const AllocatorDispatch* self,
177 void* address) {
178 EXPECT_EQ(&g_mock_dispatch, self);
179
180 return g_self->GetSizeEstimate(address);
181 }
182
183 using AllocationSizeMap = std::map<void*, size_t>;
184
185 SizeFunctionKind size_function_kind_;
186 AllocationSizeMap allocation_size_map_;
187 AllocatorDispatch* dispatch_under_test_;
188
189 static base::allocator::AllocatorDispatch g_mock_dispatch;
190 static ScopedThreadHeapUsageTest* g_self;
191 };
192
193 const size_t ScopedThreadHeapUsageTest::kAllocationPadding = 23;
194
195 ScopedThreadHeapUsageTest* ScopedThreadHeapUsageTest::g_self = nullptr;
196
197 base::allocator::AllocatorDispatch ScopedThreadHeapUsageTest::g_mock_dispatch =
198 {
199 &ScopedThreadHeapUsageTest::OnAllocFn, // alloc_function
200 &ScopedThreadHeapUsageTest::
201 OnAllocZeroInitializedFn, // alloc_zero_initialized_function
202 &ScopedThreadHeapUsageTest::OnAllocAlignedFn, // alloc_aligned_function
203 &ScopedThreadHeapUsageTest::OnReallocFn, // realloc_function
204 &ScopedThreadHeapUsageTest::OnFreeFn, // free_function
205 &ScopedThreadHeapUsageTest::
206 OnGetSizeEstimateFn, // get_size_estimate_function
207 nullptr, // next
208 };
209
210 } // namespace
211
212 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithExactSizeFunction) {
213 set_size_function_kind(EXACT_SIZE_FUNCTION);
214
215 ScopedThreadHeapUsage scoped_usage;
216
217 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = ScopedThreadHeapUsage::Now();
218
219 EXPECT_EQ(0U, u1.alloc_ops);
220 EXPECT_EQ(0U, u1.alloc_bytes);
221 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
222 EXPECT_EQ(0U, u1.free_ops);
223 EXPECT_EQ(0U, u1.free_bytes);
224 EXPECT_EQ(0U, u1.max_allocated_bytes);
225
226 const size_t kAllocSize = 1029U;
227 void* ptr = MockMalloc(kAllocSize);
228 MockFree(ptr);
229
230 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = ScopedThreadHeapUsage::Now();
231
232 EXPECT_EQ(1U, u2.alloc_ops);
233 EXPECT_EQ(kAllocSize, u2.alloc_bytes);
234 EXPECT_EQ(0U, u2.alloc_overhead_bytes);
235 EXPECT_EQ(1U, u2.free_ops);
236 EXPECT_EQ(kAllocSize, u2.free_bytes);
237 EXPECT_EQ(kAllocSize, u2.max_allocated_bytes);
238 }
239
240 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithPaddingSizeFunction) {
241 set_size_function_kind(PADDING_SIZE_FUNCTION);
242
243 ScopedThreadHeapUsage scoped_usage;
244
245 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = ScopedThreadHeapUsage::Now();
246
247 EXPECT_EQ(0U, u1.alloc_ops);
248 EXPECT_EQ(0U, u1.alloc_bytes);
249 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
250 EXPECT_EQ(0U, u1.free_ops);
251 EXPECT_EQ(0U, u1.free_bytes);
252 EXPECT_EQ(0U, u1.max_allocated_bytes);
253
254 const size_t kAllocSize = 1029U;
255 void* ptr = MockMalloc(kAllocSize);
256 MockFree(ptr);
257
258 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = ScopedThreadHeapUsage::Now();
259
260 EXPECT_EQ(1U, u2.alloc_ops);
261 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.alloc_bytes);
262 EXPECT_EQ(kAllocationPadding, u2.alloc_overhead_bytes);
263 EXPECT_EQ(1U, u2.free_ops);
264 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.free_bytes);
265 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.max_allocated_bytes);
266 }
267
268 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithZeroSizeFunction) {
269 set_size_function_kind(ZERO_SIZE_FUNCTION);
270
271 ScopedThreadHeapUsage scoped_usage;
272
273 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = ScopedThreadHeapUsage::Now();
274 EXPECT_EQ(0U, u1.alloc_ops);
275 EXPECT_EQ(0U, u1.alloc_bytes);
276 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
277 EXPECT_EQ(0U, u1.free_ops);
278 EXPECT_EQ(0U, u1.free_bytes);
279 EXPECT_EQ(0U, u1.max_allocated_bytes);
280
281 const size_t kAllocSize = 1029U;
282 void* ptr = MockMalloc(kAllocSize);
283 MockFree(ptr);
284
285 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = ScopedThreadHeapUsage::Now();
286
287 // With a get-size function that returns zero, there's no way to get the size
288 // of an allocation that's being freed, hence the shim can't tally freed bytes
289 // nor the high-watermark allocated bytes.
290 EXPECT_EQ(1U, u2.alloc_ops);
291 EXPECT_EQ(kAllocSize, u2.alloc_bytes);
292 EXPECT_EQ(0U, u2.alloc_overhead_bytes);
293 EXPECT_EQ(1U, u2.free_ops);
294 EXPECT_EQ(0U, u2.free_bytes);
295 EXPECT_EQ(0U, u2.max_allocated_bytes);
296 }
297
298 TEST_F(ScopedThreadHeapUsageTest, ReallocCorrectlyTallied) {
299 const size_t kAllocSize = 237U;
300
301 {
302 ScopedThreadHeapUsage scoped_usage;
303
304 // Reallocating nullptr should count as a single alloc.
305 void* ptr = MockRealloc(nullptr, kAllocSize);
306 ScopedThreadHeapUsage::ThreadAllocatorUsage usage =
307 ScopedThreadHeapUsage::Now();
308 EXPECT_EQ(1U, usage.alloc_ops);
309 EXPECT_EQ(kAllocSize, usage.alloc_bytes);
310 EXPECT_EQ(0U, usage.alloc_overhead_bytes);
311 EXPECT_EQ(0U, usage.free_ops);
312 EXPECT_EQ(0U, usage.free_bytes);
313 EXPECT_EQ(kAllocSize, usage.max_allocated_bytes);
314
315 // Reallocating a valid pointer to a zero size should count as a single
316 // free.
317 ptr = MockRealloc(ptr, 0U);
318
319 usage = ScopedThreadHeapUsage::Now();
320 EXPECT_EQ(1U, usage.alloc_ops);
321 EXPECT_EQ(kAllocSize, usage.alloc_bytes);
322 EXPECT_EQ(0U, usage.alloc_overhead_bytes);
323 EXPECT_EQ(1U, usage.free_ops);
324 EXPECT_EQ(kAllocSize, usage.free_bytes);
325 EXPECT_EQ(kAllocSize, usage.max_allocated_bytes);
326
327 // Realloc to zero size may or may not return a nullptr - make sure to
328 // free the zero-size alloc in the latter case.
329 if (ptr != nullptr)
330 MockFree(ptr);
331 }
332
333 {
334 ScopedThreadHeapUsage scoped_usage;
335
336 void* ptr = MockMalloc(kAllocSize);
337 ScopedThreadHeapUsage::ThreadAllocatorUsage usage =
338 ScopedThreadHeapUsage::Now();
339 EXPECT_EQ(1U, usage.alloc_ops);
340
341 // Now try reallocating a valid pointer to a larger size, this should count
342 // as one free and one alloc.
343 const size_t kLargerAllocSize = kAllocSize + 928U;
344 ptr = MockRealloc(ptr, kLargerAllocSize);
345
346 usage = ScopedThreadHeapUsage::Now();
347 EXPECT_EQ(2U, usage.alloc_ops);
348 EXPECT_EQ(kAllocSize + kLargerAllocSize, usage.alloc_bytes);
349 EXPECT_EQ(0U, usage.alloc_overhead_bytes);
350 EXPECT_EQ(1U, usage.free_ops);
351 EXPECT_EQ(kAllocSize, usage.free_bytes);
352 EXPECT_EQ(kLargerAllocSize, usage.max_allocated_bytes);
353
354 MockFree(ptr);
355 }
356 }
357
358 TEST_F(ScopedThreadHeapUsageTest, NestedMaxWorks) {
359 ScopedThreadHeapUsage outer_scoped_usage;
360
361 const size_t kOuterAllocSize = 1029U;
362 void* ptr = MockMalloc(kOuterAllocSize);
363 MockFree(ptr);
364
365 EXPECT_EQ(kOuterAllocSize, ScopedThreadHeapUsage::Now().max_allocated_bytes);
366
367 {
368 ScopedThreadHeapUsage inner_scoped_usage;
369
370 const size_t kInnerAllocSize = 673U;
371 ptr = MockMalloc(kInnerAllocSize);
372 MockFree(ptr);
373
374 EXPECT_EQ(kInnerAllocSize,
375 ScopedThreadHeapUsage::Now().max_allocated_bytes);
376 }
377
378 // The greater, outer allocation size should have been restored.
379 EXPECT_EQ(kOuterAllocSize, ScopedThreadHeapUsage::Now().max_allocated_bytes);
380
381 const size_t kLargerInnerAllocSize = kOuterAllocSize + 673U;
382 {
383 ScopedThreadHeapUsage inner_scoped_usage;
384
385 ptr = MockMalloc(kLargerInnerAllocSize);
386 MockFree(ptr);
387
388 EXPECT_EQ(kLargerInnerAllocSize,
389 ScopedThreadHeapUsage::Now().max_allocated_bytes);
390 }
391
392 // The greater, inner allocation size should have been preserved.
393 EXPECT_EQ(kLargerInnerAllocSize,
394 ScopedThreadHeapUsage::Now().max_allocated_bytes);
395
396 // Now try the case with an outsanding net alloc size when entering the
397 // inner scope.
398 void* outer_ptr = MockMalloc(kOuterAllocSize);
399 EXPECT_EQ(kLargerInnerAllocSize,
400 ScopedThreadHeapUsage::Now().max_allocated_bytes);
401 {
402 ScopedThreadHeapUsage inner_scoped_usage;
403
404 ptr = MockMalloc(kLargerInnerAllocSize);
405 MockFree(ptr);
406
407 EXPECT_EQ(kLargerInnerAllocSize,
408 ScopedThreadHeapUsage::Now().max_allocated_bytes);
409 }
410
411 // While the inner scope saw only the inner net outstanding allocation size,
412 // the outer scope saw both outstanding at the same time.
413 EXPECT_EQ(kOuterAllocSize + kLargerInnerAllocSize,
414 ScopedThreadHeapUsage::Now().max_allocated_bytes);
415
416 MockFree(outer_ptr);
417 }
418
419 TEST_F(ScopedThreadHeapUsageTest, AllShimFunctionsAreProvided) {
420 const size_t kAllocSize = 100;
421 void* alloc = MockMalloc(kAllocSize);
422 size_t estimate = MockGetSizeEstimate(alloc);
423 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
424 MockFree(alloc);
425
426 alloc = MockCalloc(kAllocSize, 1);
427 estimate = MockGetSizeEstimate(alloc);
428 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
429 MockFree(alloc);
430
431 alloc = MockAllocAligned(1, kAllocSize);
432 estimate = MockGetSizeEstimate(alloc);
433 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
434
435 alloc = MockRealloc(alloc, kAllocSize);
436 estimate = MockGetSizeEstimate(alloc);
437 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
438 }
439
440 TEST(ScopedThreadHeapShimTest, HooksIntoMallocWhenShimAvailable) {
441 const bool kAllocatorShimAvailable =
442 #if BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM)
443 true;
444 #else
445 false;
446 #endif
447
448 ASSERT_EQ(kAllocatorShimAvailable, ScopedThreadHeapUsage::Initialize());
449
450 const size_t kAllocSize = 9993;
451 // This test verifies that the scoped heap data is affected by malloc &
452 // free only when the shim is available.
453 ScopedThreadHeapUsage scoped_usage;
454
455 void* ptr = malloc(kAllocSize);
456 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = ScopedThreadHeapUsage::Now();
457 free(ptr);
458 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = ScopedThreadHeapUsage::Now();
459
460 if (kAllocatorShimAvailable) {
461 // Verify that at least one allocation operation was recorded, and that free
462 // operations are at least monotonically growing.
463 EXPECT_LE(1U, u1.alloc_ops);
464 EXPECT_LE(0U, u2.alloc_ops);
465
466 // Verify that at least the bytes above were recorded.
467 EXPECT_LE(kAllocSize, u2.alloc_bytes);
468
469 // Verify that at least the one free operation above was recorded.
470 EXPECT_LE(u1.free_ops + 1, u2.free_ops);
471 } else {
472 EXPECT_EQ(0U, u1.alloc_ops);
473 EXPECT_EQ(0U, u1.alloc_bytes);
474 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
475 EXPECT_EQ(0U, u1.free_ops);
476 EXPECT_EQ(0U, u1.free_bytes);
477 EXPECT_EQ(0U, u1.max_allocated_bytes);
478
479 EXPECT_EQ(0U, u2.alloc_ops);
480 EXPECT_EQ(0U, u2.alloc_bytes);
481 EXPECT_EQ(0U, u2.alloc_overhead_bytes);
482 EXPECT_EQ(0U, u2.free_ops);
483 EXPECT_EQ(0U, u2.free_bytes);
484 EXPECT_EQ(0U, u2.max_allocated_bytes);
485 }
486 }
487
488 } // namespace debug
489 } // namespace base
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698