Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(140)

Side by Side Diff: base/debug/scoped_thread_heap_usage_unittest.cc

Issue 2163783003: Implement a ScopedThreadHeapUsage class to allow profiling per-thread heap usage. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@shim-default
Patch Set: Moar speling [sic]. Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/debug/scoped_thread_heap_usage.h"
6
7 #include <map>
8
9 #include "base/allocator/allocator_shim.h"
10 #include "base/allocator/features.h"
11 #include "testing/gtest/include/gtest/gtest.h"
12
13 namespace base {
14 namespace debug {
15
16 namespace {
17
18 class TestingScopedThreadHeapUsage : public ScopedThreadHeapUsage {
19 public:
20 using ScopedThreadHeapUsage::DisableHeapTrackingForTesting;
21 using ScopedThreadHeapUsage::GetDispatchForTesting;
22 };
23
24 // A fixture class that allows testing the AllocatorDispatch associated with
25 // the ScopedThreadHeapUsage class in isolation against a mocked underlying
26 // heap implementation.
27 class ScopedThreadHeapUsageTest : public testing::Test {
28 public:
29 using AllocatorDispatch = base::allocator::AllocatorDispatch;
30
31 static const size_t kAllocationPadding;
32 enum SizeFunctionKind {
33 EXACT_SIZE_FUNCTION,
34 PADDING_SIZE_FUNCTION,
35 ZERO_SIZE_FUNCTION,
36 };
37
38 ScopedThreadHeapUsageTest() : size_function_kind_(EXACT_SIZE_FUNCTION) {
39 EXPECT_EQ(nullptr, g_self);
40 g_self = this;
41 }
42
43 ~ScopedThreadHeapUsageTest() override {
44 EXPECT_EQ(this, g_self);
45 g_self = nullptr;
46 }
47
48 void set_size_function_kind(SizeFunctionKind kind) {
49 size_function_kind_ = kind;
50 }
51
52 void SetUp() override {
53 ScopedThreadHeapUsage::Initialize();
54
55 dispatch_under_test_ =
56 TestingScopedThreadHeapUsage::GetDispatchForTesting();
57 ASSERT_EQ(nullptr, dispatch_under_test_->next);
58
59 dispatch_under_test_->next = &g_mock_dispatch;
60 }
61
62 void TearDown() override {
63 ASSERT_EQ(&g_mock_dispatch, dispatch_under_test_->next);
64
65 dispatch_under_test_->next = nullptr;
66 }
67
68 void* MockMalloc(size_t size) {
69 return dispatch_under_test_->alloc_function(dispatch_under_test_, size);
70 }
71
72 void* MockCalloc(size_t n, size_t size) {
73 return dispatch_under_test_->alloc_zero_initialized_function(
74 dispatch_under_test_, n, size);
75 }
76
77 void* MockAllocAligned(size_t alignment, size_t size) {
78 return dispatch_under_test_->alloc_aligned_function(dispatch_under_test_,
79 alignment, size);
80 }
81
82 void* MockRealloc(void* address, size_t size) {
83 return dispatch_under_test_->realloc_function(dispatch_under_test_, address,
84 size);
85 }
86
87 void MockFree(void* address) {
88 dispatch_under_test_->free_function(dispatch_under_test_, address);
89 }
90
91 size_t MockGetSizeEstimate(void* address) {
92 return dispatch_under_test_->get_size_estimate_function(
93 dispatch_under_test_, address);
94 }
95
96 private:
97 void RecordAlloc(void* address, size_t size) {
98 if (address != nullptr)
99 allocation_size_map_[address] = size;
100 }
101
102 void DeleteAlloc(void* address) {
103 if (address != nullptr)
104 EXPECT_EQ(1U, allocation_size_map_.erase(address));
105 }
106
107 size_t GetSizeEstimate(void* address) {
108 auto it = allocation_size_map_.find(address);
109 if (it == allocation_size_map_.end())
110 return 0;
111
112 size_t ret = it->second;
113 switch (size_function_kind_) {
114 case EXACT_SIZE_FUNCTION:
115 break;
116 case PADDING_SIZE_FUNCTION:
117 ret += kAllocationPadding;
118 break;
119 case ZERO_SIZE_FUNCTION:
120 ret = 0;
121 break;
122 }
123
124 return ret;
125 }
126
127 static void* OnAllocFn(const AllocatorDispatch* self, size_t size) {
128 EXPECT_EQ(&g_mock_dispatch, self);
129
130 void* ret = malloc(size);
131 g_self->RecordAlloc(ret, size);
132 return ret;
133 }
134
135 static void* OnAllocZeroInitializedFn(const AllocatorDispatch* self,
136 size_t n,
137 size_t size) {
138 EXPECT_EQ(&g_mock_dispatch, self);
139
140 void* ret = calloc(n, size);
141 g_self->RecordAlloc(ret, n * size);
142 return ret;
143 }
144
145 static void* OnAllocAlignedFn(const AllocatorDispatch* self,
146 size_t alignment,
147 size_t size) {
148 EXPECT_EQ(&g_mock_dispatch, self);
149
150 // This is a cheat as it doesn't return aligned allocations. This has the
151 // advantage of working for all platforms for this test.
152 void* ret = malloc(size);
153 g_self->RecordAlloc(ret, size);
154 return ret;
155 }
156
157 static void* OnReallocFn(const AllocatorDispatch* self,
158 void* address,
159 size_t size) {
160 EXPECT_EQ(&g_mock_dispatch, self);
161
162 g_self->DeleteAlloc(address);
163 void* ret = realloc(address, size);
164 g_self->RecordAlloc(ret, size);
165 return ret;
166 }
167
168 static void OnFreeFn(const AllocatorDispatch* self, void* address) {
169 EXPECT_EQ(&g_mock_dispatch, self);
170
171 g_self->DeleteAlloc(address);
172 free(address);
173 }
174
175 static size_t OnGetSizeEstimateFn(const AllocatorDispatch* self,
176 void* address) {
177 EXPECT_EQ(&g_mock_dispatch, self);
178
179 return g_self->GetSizeEstimate(address);
180 }
181
182 using AllocationSizeMap = std::map<void*, size_t>;
183
184 SizeFunctionKind size_function_kind_;
185 AllocationSizeMap allocation_size_map_;
186 AllocatorDispatch* dispatch_under_test_;
187
188 static base::allocator::AllocatorDispatch g_mock_dispatch;
189 static ScopedThreadHeapUsageTest* g_self;
190 };
191
192 const size_t ScopedThreadHeapUsageTest::kAllocationPadding = 23;
193
194 ScopedThreadHeapUsageTest* ScopedThreadHeapUsageTest::g_self = nullptr;
195
196 base::allocator::AllocatorDispatch ScopedThreadHeapUsageTest::g_mock_dispatch =
197 {
198 &ScopedThreadHeapUsageTest::OnAllocFn, // alloc_function
199 &ScopedThreadHeapUsageTest::
200 OnAllocZeroInitializedFn, // alloc_zero_initialized_function
201 &ScopedThreadHeapUsageTest::OnAllocAlignedFn, // alloc_aligned_function
202 &ScopedThreadHeapUsageTest::OnReallocFn, // realloc_function
203 &ScopedThreadHeapUsageTest::OnFreeFn, // free_function
204 &ScopedThreadHeapUsageTest::
205 OnGetSizeEstimateFn, // get_size_estimate_function
206 nullptr, // next
207 };
208
209 } // namespace
210
211 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithExactSizeFunction) {
212 set_size_function_kind(EXACT_SIZE_FUNCTION);
213
214 ScopedThreadHeapUsage scoped_usage;
215
216 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = ScopedThreadHeapUsage::Now();
217
218 EXPECT_EQ(0U, u1.alloc_ops);
219 EXPECT_EQ(0U, u1.alloc_bytes);
220 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
221 EXPECT_EQ(0U, u1.free_ops);
222 EXPECT_EQ(0U, u1.free_bytes);
223 EXPECT_EQ(0U, u1.max_allocated_bytes);
224
225 const size_t kAllocSize = 1029U;
226 void* ptr = MockMalloc(kAllocSize);
227 MockFree(ptr);
228
229 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = ScopedThreadHeapUsage::Now();
230
231 EXPECT_EQ(1U, u2.alloc_ops);
232 EXPECT_EQ(kAllocSize, u2.alloc_bytes);
233 EXPECT_EQ(0U, u2.alloc_overhead_bytes);
234 EXPECT_EQ(1U, u2.free_ops);
235 EXPECT_EQ(kAllocSize, u2.free_bytes);
236 EXPECT_EQ(kAllocSize, u2.max_allocated_bytes);
237 }
238
239 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithPaddingSizeFunction) {
240 set_size_function_kind(PADDING_SIZE_FUNCTION);
241
242 ScopedThreadHeapUsage scoped_usage;
243
244 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = ScopedThreadHeapUsage::Now();
245
246 EXPECT_EQ(0U, u1.alloc_ops);
247 EXPECT_EQ(0U, u1.alloc_bytes);
248 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
249 EXPECT_EQ(0U, u1.free_ops);
250 EXPECT_EQ(0U, u1.free_bytes);
251 EXPECT_EQ(0U, u1.max_allocated_bytes);
252
253 const size_t kAllocSize = 1029U;
254 void* ptr = MockMalloc(kAllocSize);
255 MockFree(ptr);
256
257 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = ScopedThreadHeapUsage::Now();
258
259 EXPECT_EQ(1U, u2.alloc_ops);
260 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.alloc_bytes);
261 EXPECT_EQ(kAllocationPadding, u2.alloc_overhead_bytes);
262 EXPECT_EQ(1U, u2.free_ops);
263 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.free_bytes);
264 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.max_allocated_bytes);
265 }
266
267 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithZeroSizeFunction) {
268 set_size_function_kind(ZERO_SIZE_FUNCTION);
269
270 ScopedThreadHeapUsage scoped_usage;
271
272 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = ScopedThreadHeapUsage::Now();
273 EXPECT_EQ(0U, u1.alloc_ops);
274 EXPECT_EQ(0U, u1.alloc_bytes);
275 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
276 EXPECT_EQ(0U, u1.free_ops);
277 EXPECT_EQ(0U, u1.free_bytes);
278 EXPECT_EQ(0U, u1.max_allocated_bytes);
279
280 const size_t kAllocSize = 1029U;
281 void* ptr = MockMalloc(kAllocSize);
282 MockFree(ptr);
283
284 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = ScopedThreadHeapUsage::Now();
285
286 // With a get-size function that returns zero, there's no way to get the size
287 // of an allocation that's being freed, hence the shim can't tally freed bytes
288 // nor the high-watermark allocated bytes.
289 EXPECT_EQ(1U, u2.alloc_ops);
290 EXPECT_EQ(kAllocSize, u2.alloc_bytes);
291 EXPECT_EQ(0U, u2.alloc_overhead_bytes);
292 EXPECT_EQ(1U, u2.free_ops);
293 EXPECT_EQ(0U, u2.free_bytes);
294 EXPECT_EQ(0U, u2.max_allocated_bytes);
295 }
296
297 TEST_F(ScopedThreadHeapUsageTest, ReallocCorrectlyTallied) {
298 const size_t kAllocSize = 237U;
299
300 {
301 ScopedThreadHeapUsage scoped_usage;
302
303 // Reallocating nullptr should count as a single alloc.
304 void* ptr = MockRealloc(nullptr, kAllocSize);
305 ScopedThreadHeapUsage::ThreadAllocatorUsage usage =
306 ScopedThreadHeapUsage::Now();
307 EXPECT_EQ(1U, usage.alloc_ops);
308 EXPECT_EQ(kAllocSize, usage.alloc_bytes);
309 EXPECT_EQ(0U, usage.alloc_overhead_bytes);
310 EXPECT_EQ(0U, usage.free_ops);
311 EXPECT_EQ(0U, usage.free_bytes);
312 EXPECT_EQ(kAllocSize, usage.max_allocated_bytes);
313
314 // Reallocating a valid pointer to a zero size should count as a single
315 // free.
316 ptr = MockRealloc(ptr, 0U);
317
318 usage = ScopedThreadHeapUsage::Now();
319 EXPECT_EQ(1U, usage.alloc_ops);
320 EXPECT_EQ(kAllocSize, usage.alloc_bytes);
321 EXPECT_EQ(0U, usage.alloc_overhead_bytes);
322 EXPECT_EQ(1U, usage.free_ops);
323 EXPECT_EQ(kAllocSize, usage.free_bytes);
324 EXPECT_EQ(kAllocSize, usage.max_allocated_bytes);
325
326 // Realloc to zero size may or may not return a nullptr - make sure to
327 // free the zero-size alloc in the latter case.
328 if (ptr != nullptr)
329 MockFree(ptr);
330 }
331
332 {
333 ScopedThreadHeapUsage scoped_usage;
334
335 void* ptr = MockMalloc(kAllocSize);
336 ScopedThreadHeapUsage::ThreadAllocatorUsage usage =
337 ScopedThreadHeapUsage::Now();
338 EXPECT_EQ(1U, usage.alloc_ops);
339
340 // Now try reallocating a valid pointer to a larger size, this should count
341 // as one free and one alloc.
342 const size_t kLargerAllocSize = kAllocSize + 928U;
343 ptr = MockRealloc(ptr, kLargerAllocSize);
344
345 usage = ScopedThreadHeapUsage::Now();
346 EXPECT_EQ(2U, usage.alloc_ops);
347 EXPECT_EQ(kAllocSize + kLargerAllocSize, usage.alloc_bytes);
348 EXPECT_EQ(0U, usage.alloc_overhead_bytes);
349 EXPECT_EQ(1U, usage.free_ops);
350 EXPECT_EQ(kAllocSize, usage.free_bytes);
351 EXPECT_EQ(kLargerAllocSize, usage.max_allocated_bytes);
352
353 MockFree(ptr);
354 }
355 }
356
357 TEST_F(ScopedThreadHeapUsageTest, NestedMaxWorks) {
358 ScopedThreadHeapUsage outer_scoped_usage;
359
360 const size_t kOuterAllocSize = 1029U;
361 void* ptr = MockMalloc(kOuterAllocSize);
362 MockFree(ptr);
363
364 EXPECT_EQ(kOuterAllocSize, ScopedThreadHeapUsage::Now().max_allocated_bytes);
365
366 {
367 ScopedThreadHeapUsage inner_scoped_usage;
368
369 const size_t kInnerAllocSize = 673U;
370 ptr = MockMalloc(kInnerAllocSize);
371 MockFree(ptr);
372
373 EXPECT_EQ(kInnerAllocSize,
374 ScopedThreadHeapUsage::Now().max_allocated_bytes);
375 }
376
377 // The greater, outer allocation size should have been restored.
378 EXPECT_EQ(kOuterAllocSize, ScopedThreadHeapUsage::Now().max_allocated_bytes);
379
380 const size_t kLargerInnerAllocSize = kOuterAllocSize + 673U;
381 {
382 ScopedThreadHeapUsage inner_scoped_usage;
383
384 ptr = MockMalloc(kLargerInnerAllocSize);
385 MockFree(ptr);
386
387 EXPECT_EQ(kLargerInnerAllocSize,
388 ScopedThreadHeapUsage::Now().max_allocated_bytes);
389 }
390
391 // The greater, inner allocation size should have been preserved.
392 EXPECT_EQ(kLargerInnerAllocSize,
393 ScopedThreadHeapUsage::Now().max_allocated_bytes);
394
395 // Now try the case with an outstanding net alloc size when entering the
396 // inner scope.
397 void* outer_ptr = MockMalloc(kOuterAllocSize);
398 EXPECT_EQ(kLargerInnerAllocSize,
399 ScopedThreadHeapUsage::Now().max_allocated_bytes);
400 {
401 ScopedThreadHeapUsage inner_scoped_usage;
402
403 ptr = MockMalloc(kLargerInnerAllocSize);
404 MockFree(ptr);
405
406 EXPECT_EQ(kLargerInnerAllocSize,
407 ScopedThreadHeapUsage::Now().max_allocated_bytes);
408 }
409
410 // While the inner scope saw only the inner net outstanding allocation size,
411 // the outer scope saw both outstanding at the same time.
412 EXPECT_EQ(kOuterAllocSize + kLargerInnerAllocSize,
413 ScopedThreadHeapUsage::Now().max_allocated_bytes);
414
415 MockFree(outer_ptr);
416 }
417
418 TEST_F(ScopedThreadHeapUsageTest, AllShimFunctionsAreProvided) {
419 const size_t kAllocSize = 100;
420 void* alloc = MockMalloc(kAllocSize);
421 size_t estimate = MockGetSizeEstimate(alloc);
422 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
423 MockFree(alloc);
424
425 alloc = MockCalloc(kAllocSize, 1);
426 estimate = MockGetSizeEstimate(alloc);
427 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
428 MockFree(alloc);
429
430 alloc = MockAllocAligned(1, kAllocSize);
431 estimate = MockGetSizeEstimate(alloc);
432 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
433
434 alloc = MockRealloc(alloc, kAllocSize);
435 estimate = MockGetSizeEstimate(alloc);
436 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
437 MockFree(alloc);
438 }
439
440 #if BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM)
441 TEST(ScopedThreadHeapShimTest, HooksIntoMallocWhenShimAvailable) {
442 ScopedThreadHeapUsage::Initialize();
443 ScopedThreadHeapUsage::EnableHeapTracking();
444
445 const size_t kAllocSize = 9993;
446 // This test verifies that the scoped heap data is affected by malloc &
447 // free only when the shim is available.
448 ScopedThreadHeapUsage scoped_usage;
449
450 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = ScopedThreadHeapUsage::Now();
451 void* ptr = malloc(kAllocSize);
452 // Prevent the compiler from optimizing out the malloc/free pair.
453 ASSERT_NE(nullptr, ptr);
454
455 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = ScopedThreadHeapUsage::Now();
456 free(ptr);
457 ScopedThreadHeapUsage::ThreadAllocatorUsage u3 = ScopedThreadHeapUsage::Now();
458
459 // Verify that at least one allocation operation was recorded, and that free
460 // operations are at least monotonically growing.
461 EXPECT_LE(0U, u1.alloc_ops);
462 EXPECT_LE(u1.alloc_ops + 1, u2.alloc_ops);
463 EXPECT_LE(u1.alloc_ops + 1, u3.alloc_ops);
464
465 // Verify that at least the bytes above were recorded.
466 EXPECT_LE(u1.alloc_bytes + kAllocSize, u2.alloc_bytes);
467
468 // Verify that at least the one free operation above was recorded.
469 EXPECT_LE(u2.free_ops + 1, u3.free_ops);
470
471 TestingScopedThreadHeapUsage::DisableHeapTrackingForTesting();
472 }
473 #endif // BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM)
474
475 } // namespace debug
476 } // namespace base
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698