OLD | NEW |
| (Empty) |
1 // Copyright 2016 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "base/debug/scoped_thread_heap_usage.h" | |
6 | |
7 #include <map> | |
8 | |
9 #include "base/allocator/allocator_shim.h" | |
10 #include "base/allocator/features.h" | |
11 #include "testing/gtest/include/gtest/gtest.h" | |
12 | |
13 namespace base { | |
14 namespace debug { | |
15 | |
16 namespace { | |
17 | |
18 class TestingScopedThreadHeapUsage : public ScopedThreadHeapUsage { | |
19 public: | |
20 using ScopedThreadHeapUsage::DisableHeapTrackingForTesting; | |
21 using ScopedThreadHeapUsage::GetDispatchForTesting; | |
22 }; | |
23 | |
24 // A fixture class that allows testing the AllocatorDispatch associated with | |
25 // the ScopedThreadHeapUsage class in isolation against a mocked underlying | |
26 // heap implementation. | |
27 class ScopedThreadHeapUsageTest : public testing::Test { | |
28 public: | |
29 using AllocatorDispatch = base::allocator::AllocatorDispatch; | |
30 | |
31 static const size_t kAllocationPadding; | |
32 enum SizeFunctionKind { | |
33 EXACT_SIZE_FUNCTION, | |
34 PADDING_SIZE_FUNCTION, | |
35 ZERO_SIZE_FUNCTION, | |
36 }; | |
37 | |
38 ScopedThreadHeapUsageTest() : size_function_kind_(EXACT_SIZE_FUNCTION) { | |
39 EXPECT_EQ(nullptr, g_self); | |
40 g_self = this; | |
41 } | |
42 | |
43 ~ScopedThreadHeapUsageTest() override { | |
44 EXPECT_EQ(this, g_self); | |
45 g_self = nullptr; | |
46 } | |
47 | |
48 void set_size_function_kind(SizeFunctionKind kind) { | |
49 size_function_kind_ = kind; | |
50 } | |
51 | |
52 void SetUp() override { | |
53 ScopedThreadHeapUsage::Initialize(); | |
54 | |
55 dispatch_under_test_ = | |
56 TestingScopedThreadHeapUsage::GetDispatchForTesting(); | |
57 ASSERT_EQ(nullptr, dispatch_under_test_->next); | |
58 | |
59 dispatch_under_test_->next = &g_mock_dispatch; | |
60 } | |
61 | |
62 void TearDown() override { | |
63 ASSERT_EQ(&g_mock_dispatch, dispatch_under_test_->next); | |
64 | |
65 dispatch_under_test_->next = nullptr; | |
66 } | |
67 | |
68 void* MockMalloc(size_t size) { | |
69 return dispatch_under_test_->alloc_function(dispatch_under_test_, size); | |
70 } | |
71 | |
72 void* MockCalloc(size_t n, size_t size) { | |
73 return dispatch_under_test_->alloc_zero_initialized_function( | |
74 dispatch_under_test_, n, size); | |
75 } | |
76 | |
77 void* MockAllocAligned(size_t alignment, size_t size) { | |
78 return dispatch_under_test_->alloc_aligned_function(dispatch_under_test_, | |
79 alignment, size); | |
80 } | |
81 | |
82 void* MockRealloc(void* address, size_t size) { | |
83 return dispatch_under_test_->realloc_function(dispatch_under_test_, address, | |
84 size); | |
85 } | |
86 | |
87 void MockFree(void* address) { | |
88 dispatch_under_test_->free_function(dispatch_under_test_, address); | |
89 } | |
90 | |
91 size_t MockGetSizeEstimate(void* address) { | |
92 return dispatch_under_test_->get_size_estimate_function( | |
93 dispatch_under_test_, address); | |
94 } | |
95 | |
96 private: | |
97 void RecordAlloc(void* address, size_t size) { | |
98 if (address != nullptr) | |
99 allocation_size_map_[address] = size; | |
100 } | |
101 | |
102 void DeleteAlloc(void* address) { | |
103 if (address != nullptr) | |
104 EXPECT_EQ(1U, allocation_size_map_.erase(address)); | |
105 } | |
106 | |
107 size_t GetSizeEstimate(void* address) { | |
108 auto it = allocation_size_map_.find(address); | |
109 if (it == allocation_size_map_.end()) | |
110 return 0; | |
111 | |
112 size_t ret = it->second; | |
113 switch (size_function_kind_) { | |
114 case EXACT_SIZE_FUNCTION: | |
115 break; | |
116 case PADDING_SIZE_FUNCTION: | |
117 ret += kAllocationPadding; | |
118 break; | |
119 case ZERO_SIZE_FUNCTION: | |
120 ret = 0; | |
121 break; | |
122 } | |
123 | |
124 return ret; | |
125 } | |
126 | |
127 static void* OnAllocFn(const AllocatorDispatch* self, size_t size) { | |
128 EXPECT_EQ(&g_mock_dispatch, self); | |
129 | |
130 void* ret = malloc(size); | |
131 g_self->RecordAlloc(ret, size); | |
132 return ret; | |
133 } | |
134 | |
135 static void* OnAllocZeroInitializedFn(const AllocatorDispatch* self, | |
136 size_t n, | |
137 size_t size) { | |
138 EXPECT_EQ(&g_mock_dispatch, self); | |
139 | |
140 void* ret = calloc(n, size); | |
141 g_self->RecordAlloc(ret, n * size); | |
142 return ret; | |
143 } | |
144 | |
145 static void* OnAllocAlignedFn(const AllocatorDispatch* self, | |
146 size_t alignment, | |
147 size_t size) { | |
148 EXPECT_EQ(&g_mock_dispatch, self); | |
149 | |
150 // This is a cheat as it doesn't return aligned allocations. This has the | |
151 // advantage of working for all platforms for this test. | |
152 void* ret = malloc(size); | |
153 g_self->RecordAlloc(ret, size); | |
154 return ret; | |
155 } | |
156 | |
157 static void* OnReallocFn(const AllocatorDispatch* self, | |
158 void* address, | |
159 size_t size) { | |
160 EXPECT_EQ(&g_mock_dispatch, self); | |
161 | |
162 g_self->DeleteAlloc(address); | |
163 void* ret = realloc(address, size); | |
164 g_self->RecordAlloc(ret, size); | |
165 return ret; | |
166 } | |
167 | |
168 static void OnFreeFn(const AllocatorDispatch* self, void* address) { | |
169 EXPECT_EQ(&g_mock_dispatch, self); | |
170 | |
171 g_self->DeleteAlloc(address); | |
172 free(address); | |
173 } | |
174 | |
175 static size_t OnGetSizeEstimateFn(const AllocatorDispatch* self, | |
176 void* address) { | |
177 EXPECT_EQ(&g_mock_dispatch, self); | |
178 | |
179 return g_self->GetSizeEstimate(address); | |
180 } | |
181 | |
182 using AllocationSizeMap = std::map<void*, size_t>; | |
183 | |
184 SizeFunctionKind size_function_kind_; | |
185 AllocationSizeMap allocation_size_map_; | |
186 AllocatorDispatch* dispatch_under_test_; | |
187 | |
188 static base::allocator::AllocatorDispatch g_mock_dispatch; | |
189 static ScopedThreadHeapUsageTest* g_self; | |
190 }; | |
191 | |
192 const size_t ScopedThreadHeapUsageTest::kAllocationPadding = 23; | |
193 | |
194 ScopedThreadHeapUsageTest* ScopedThreadHeapUsageTest::g_self = nullptr; | |
195 | |
196 base::allocator::AllocatorDispatch ScopedThreadHeapUsageTest::g_mock_dispatch = | |
197 { | |
198 &ScopedThreadHeapUsageTest::OnAllocFn, // alloc_function | |
199 &ScopedThreadHeapUsageTest:: | |
200 OnAllocZeroInitializedFn, // alloc_zero_initialized_function | |
201 &ScopedThreadHeapUsageTest::OnAllocAlignedFn, // alloc_aligned_function | |
202 &ScopedThreadHeapUsageTest::OnReallocFn, // realloc_function | |
203 &ScopedThreadHeapUsageTest::OnFreeFn, // free_function | |
204 &ScopedThreadHeapUsageTest:: | |
205 OnGetSizeEstimateFn, // get_size_estimate_function | |
206 nullptr, // next | |
207 }; | |
208 | |
209 } // namespace | |
210 | |
211 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithExactSizeFunction) { | |
212 set_size_function_kind(EXACT_SIZE_FUNCTION); | |
213 | |
214 ScopedThreadHeapUsage scoped_usage; | |
215 | |
216 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = | |
217 ScopedThreadHeapUsage::CurrentUsage(); | |
218 | |
219 EXPECT_EQ(0U, u1.alloc_ops); | |
220 EXPECT_EQ(0U, u1.alloc_bytes); | |
221 EXPECT_EQ(0U, u1.alloc_overhead_bytes); | |
222 EXPECT_EQ(0U, u1.free_ops); | |
223 EXPECT_EQ(0U, u1.free_bytes); | |
224 EXPECT_EQ(0U, u1.max_allocated_bytes); | |
225 | |
226 const size_t kAllocSize = 1029U; | |
227 void* ptr = MockMalloc(kAllocSize); | |
228 MockFree(ptr); | |
229 | |
230 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = | |
231 ScopedThreadHeapUsage::CurrentUsage(); | |
232 | |
233 EXPECT_EQ(1U, u2.alloc_ops); | |
234 EXPECT_EQ(kAllocSize, u2.alloc_bytes); | |
235 EXPECT_EQ(0U, u2.alloc_overhead_bytes); | |
236 EXPECT_EQ(1U, u2.free_ops); | |
237 EXPECT_EQ(kAllocSize, u2.free_bytes); | |
238 EXPECT_EQ(kAllocSize, u2.max_allocated_bytes); | |
239 } | |
240 | |
241 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithPaddingSizeFunction) { | |
242 set_size_function_kind(PADDING_SIZE_FUNCTION); | |
243 | |
244 ScopedThreadHeapUsage scoped_usage; | |
245 | |
246 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = | |
247 ScopedThreadHeapUsage::CurrentUsage(); | |
248 | |
249 EXPECT_EQ(0U, u1.alloc_ops); | |
250 EXPECT_EQ(0U, u1.alloc_bytes); | |
251 EXPECT_EQ(0U, u1.alloc_overhead_bytes); | |
252 EXPECT_EQ(0U, u1.free_ops); | |
253 EXPECT_EQ(0U, u1.free_bytes); | |
254 EXPECT_EQ(0U, u1.max_allocated_bytes); | |
255 | |
256 const size_t kAllocSize = 1029U; | |
257 void* ptr = MockMalloc(kAllocSize); | |
258 MockFree(ptr); | |
259 | |
260 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = | |
261 ScopedThreadHeapUsage::CurrentUsage(); | |
262 | |
263 EXPECT_EQ(1U, u2.alloc_ops); | |
264 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.alloc_bytes); | |
265 EXPECT_EQ(kAllocationPadding, u2.alloc_overhead_bytes); | |
266 EXPECT_EQ(1U, u2.free_ops); | |
267 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.free_bytes); | |
268 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.max_allocated_bytes); | |
269 } | |
270 | |
271 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithZeroSizeFunction) { | |
272 set_size_function_kind(ZERO_SIZE_FUNCTION); | |
273 | |
274 ScopedThreadHeapUsage scoped_usage; | |
275 | |
276 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = | |
277 ScopedThreadHeapUsage::CurrentUsage(); | |
278 EXPECT_EQ(0U, u1.alloc_ops); | |
279 EXPECT_EQ(0U, u1.alloc_bytes); | |
280 EXPECT_EQ(0U, u1.alloc_overhead_bytes); | |
281 EXPECT_EQ(0U, u1.free_ops); | |
282 EXPECT_EQ(0U, u1.free_bytes); | |
283 EXPECT_EQ(0U, u1.max_allocated_bytes); | |
284 | |
285 const size_t kAllocSize = 1029U; | |
286 void* ptr = MockMalloc(kAllocSize); | |
287 MockFree(ptr); | |
288 | |
289 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = | |
290 ScopedThreadHeapUsage::CurrentUsage(); | |
291 | |
292 // With a get-size function that returns zero, there's no way to get the size | |
293 // of an allocation that's being freed, hence the shim can't tally freed bytes | |
294 // nor the high-watermark allocated bytes. | |
295 EXPECT_EQ(1U, u2.alloc_ops); | |
296 EXPECT_EQ(kAllocSize, u2.alloc_bytes); | |
297 EXPECT_EQ(0U, u2.alloc_overhead_bytes); | |
298 EXPECT_EQ(1U, u2.free_ops); | |
299 EXPECT_EQ(0U, u2.free_bytes); | |
300 EXPECT_EQ(0U, u2.max_allocated_bytes); | |
301 } | |
302 | |
303 TEST_F(ScopedThreadHeapUsageTest, ReallocCorrectlyTallied) { | |
304 const size_t kAllocSize = 237U; | |
305 | |
306 { | |
307 ScopedThreadHeapUsage scoped_usage; | |
308 | |
309 // Reallocating nullptr should count as a single alloc. | |
310 void* ptr = MockRealloc(nullptr, kAllocSize); | |
311 ScopedThreadHeapUsage::ThreadAllocatorUsage usage = | |
312 ScopedThreadHeapUsage::CurrentUsage(); | |
313 EXPECT_EQ(1U, usage.alloc_ops); | |
314 EXPECT_EQ(kAllocSize, usage.alloc_bytes); | |
315 EXPECT_EQ(0U, usage.alloc_overhead_bytes); | |
316 EXPECT_EQ(0U, usage.free_ops); | |
317 EXPECT_EQ(0U, usage.free_bytes); | |
318 EXPECT_EQ(kAllocSize, usage.max_allocated_bytes); | |
319 | |
320 // Reallocating a valid pointer to a zero size should count as a single | |
321 // free. | |
322 ptr = MockRealloc(ptr, 0U); | |
323 | |
324 usage = ScopedThreadHeapUsage::CurrentUsage(); | |
325 EXPECT_EQ(1U, usage.alloc_ops); | |
326 EXPECT_EQ(kAllocSize, usage.alloc_bytes); | |
327 EXPECT_EQ(0U, usage.alloc_overhead_bytes); | |
328 EXPECT_EQ(1U, usage.free_ops); | |
329 EXPECT_EQ(kAllocSize, usage.free_bytes); | |
330 EXPECT_EQ(kAllocSize, usage.max_allocated_bytes); | |
331 | |
332 // Realloc to zero size may or may not return a nullptr - make sure to | |
333 // free the zero-size alloc in the latter case. | |
334 if (ptr != nullptr) | |
335 MockFree(ptr); | |
336 } | |
337 | |
338 { | |
339 ScopedThreadHeapUsage scoped_usage; | |
340 | |
341 void* ptr = MockMalloc(kAllocSize); | |
342 ScopedThreadHeapUsage::ThreadAllocatorUsage usage = | |
343 ScopedThreadHeapUsage::CurrentUsage(); | |
344 EXPECT_EQ(1U, usage.alloc_ops); | |
345 | |
346 // Now try reallocating a valid pointer to a larger size, this should count | |
347 // as one free and one alloc. | |
348 const size_t kLargerAllocSize = kAllocSize + 928U; | |
349 ptr = MockRealloc(ptr, kLargerAllocSize); | |
350 | |
351 usage = ScopedThreadHeapUsage::CurrentUsage(); | |
352 EXPECT_EQ(2U, usage.alloc_ops); | |
353 EXPECT_EQ(kAllocSize + kLargerAllocSize, usage.alloc_bytes); | |
354 EXPECT_EQ(0U, usage.alloc_overhead_bytes); | |
355 EXPECT_EQ(1U, usage.free_ops); | |
356 EXPECT_EQ(kAllocSize, usage.free_bytes); | |
357 EXPECT_EQ(kLargerAllocSize, usage.max_allocated_bytes); | |
358 | |
359 MockFree(ptr); | |
360 } | |
361 } | |
362 | |
363 TEST_F(ScopedThreadHeapUsageTest, NestedMaxWorks) { | |
364 ScopedThreadHeapUsage outer_scoped_usage; | |
365 | |
366 const size_t kOuterAllocSize = 1029U; | |
367 void* ptr = MockMalloc(kOuterAllocSize); | |
368 MockFree(ptr); | |
369 | |
370 EXPECT_EQ(kOuterAllocSize, | |
371 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | |
372 | |
373 { | |
374 ScopedThreadHeapUsage inner_scoped_usage; | |
375 | |
376 const size_t kInnerAllocSize = 673U; | |
377 ptr = MockMalloc(kInnerAllocSize); | |
378 MockFree(ptr); | |
379 | |
380 EXPECT_EQ(kInnerAllocSize, | |
381 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | |
382 } | |
383 | |
384 // The greater, outer allocation size should have been restored. | |
385 EXPECT_EQ(kOuterAllocSize, | |
386 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | |
387 | |
388 const size_t kLargerInnerAllocSize = kOuterAllocSize + 673U; | |
389 { | |
390 ScopedThreadHeapUsage inner_scoped_usage; | |
391 | |
392 ptr = MockMalloc(kLargerInnerAllocSize); | |
393 MockFree(ptr); | |
394 | |
395 EXPECT_EQ(kLargerInnerAllocSize, | |
396 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | |
397 } | |
398 | |
399 // The greater, inner allocation size should have been preserved. | |
400 EXPECT_EQ(kLargerInnerAllocSize, | |
401 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | |
402 | |
403 // Now try the case with an outstanding net alloc size when entering the | |
404 // inner scope. | |
405 void* outer_ptr = MockMalloc(kOuterAllocSize); | |
406 EXPECT_EQ(kLargerInnerAllocSize, | |
407 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | |
408 { | |
409 ScopedThreadHeapUsage inner_scoped_usage; | |
410 | |
411 ptr = MockMalloc(kLargerInnerAllocSize); | |
412 MockFree(ptr); | |
413 | |
414 EXPECT_EQ(kLargerInnerAllocSize, | |
415 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | |
416 } | |
417 | |
418 // While the inner scope saw only the inner net outstanding allocation size, | |
419 // the outer scope saw both outstanding at the same time. | |
420 EXPECT_EQ(kOuterAllocSize + kLargerInnerAllocSize, | |
421 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | |
422 | |
423 MockFree(outer_ptr); | |
424 } | |
425 | |
426 TEST_F(ScopedThreadHeapUsageTest, AllShimFunctionsAreProvided) { | |
427 const size_t kAllocSize = 100; | |
428 void* alloc = MockMalloc(kAllocSize); | |
429 size_t estimate = MockGetSizeEstimate(alloc); | |
430 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); | |
431 MockFree(alloc); | |
432 | |
433 alloc = MockCalloc(kAllocSize, 1); | |
434 estimate = MockGetSizeEstimate(alloc); | |
435 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); | |
436 MockFree(alloc); | |
437 | |
438 alloc = MockAllocAligned(1, kAllocSize); | |
439 estimate = MockGetSizeEstimate(alloc); | |
440 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); | |
441 | |
442 alloc = MockRealloc(alloc, kAllocSize); | |
443 estimate = MockGetSizeEstimate(alloc); | |
444 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); | |
445 MockFree(alloc); | |
446 } | |
447 | |
448 #if BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) | |
449 TEST(ScopedThreadHeapShimTest, HooksIntoMallocWhenShimAvailable) { | |
450 ScopedThreadHeapUsage::Initialize(); | |
451 ScopedThreadHeapUsage::EnableHeapTracking(); | |
452 | |
453 const size_t kAllocSize = 9993; | |
454 // This test verifies that the scoped heap data is affected by malloc & | |
455 // free only when the shim is available. | |
456 ScopedThreadHeapUsage scoped_usage; | |
457 | |
458 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = | |
459 ScopedThreadHeapUsage::CurrentUsage(); | |
460 void* ptr = malloc(kAllocSize); | |
461 // Prevent the compiler from optimizing out the malloc/free pair. | |
462 ASSERT_NE(nullptr, ptr); | |
463 | |
464 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = | |
465 ScopedThreadHeapUsage::CurrentUsage(); | |
466 free(ptr); | |
467 ScopedThreadHeapUsage::ThreadAllocatorUsage u3 = | |
468 ScopedThreadHeapUsage::CurrentUsage(); | |
469 | |
470 // Verify that at least one allocation operation was recorded, and that free | |
471 // operations are at least monotonically growing. | |
472 EXPECT_LE(0U, u1.alloc_ops); | |
473 EXPECT_LE(u1.alloc_ops + 1, u2.alloc_ops); | |
474 EXPECT_LE(u1.alloc_ops + 1, u3.alloc_ops); | |
475 | |
476 // Verify that at least the bytes above were recorded. | |
477 EXPECT_LE(u1.alloc_bytes + kAllocSize, u2.alloc_bytes); | |
478 | |
479 // Verify that at least the one free operation above was recorded. | |
480 EXPECT_LE(u2.free_ops + 1, u3.free_ops); | |
481 | |
482 TestingScopedThreadHeapUsage::DisableHeapTrackingForTesting(); | |
483 } | |
484 #endif // BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) | |
485 | |
486 } // namespace debug | |
487 } // namespace base | |
OLD | NEW |