Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(600)

Side by Side Diff: base/debug/scoped_thread_heap_usage_unittest.cc

Issue 2386123003: Add heap allocator usage to task profiler. (Closed)
Patch Set: Figure out where the @#$%! corruption is coming from. Move heap tracking to TaskStopwatch." Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2016 The Chromium Authors. All rights reserved. 1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "base/debug/scoped_thread_heap_usage.h" 5 #include "base/debug/scoped_thread_heap_usage.h"
6 6
7 #include <map> 7 #include <map>
8 8
9 #include "base/allocator/allocator_shim.h" 9 #include "base/allocator/allocator_shim.h"
10 #include "base/allocator/features.h" 10 #include "base/allocator/features.h"
11 #include "testing/gtest/include/gtest/gtest.h" 11 #include "testing/gtest/include/gtest/gtest.h"
12 12
13 namespace base { 13 namespace base {
14 namespace debug { 14 namespace debug {
15 15
16 namespace { 16 namespace {
17 17
18 class TestingScopedThreadHeapUsage : public ScopedThreadHeapUsage { 18 class TestingHeapUsageTracker : public HeapUsageTracker {
19 public: 19 public:
20 using ScopedThreadHeapUsage::DisableHeapTrackingForTesting; 20 using HeapUsageTracker::DisableHeapTrackingForTesting;
21 using ScopedThreadHeapUsage::GetDispatchForTesting; 21 using HeapUsageTracker::GetDispatchForTesting;
22 using HeapUsageTracker::EnsureTLSInitializedForTesting;
22 }; 23 };
23 24
24 // A fixture class that allows testing the AllocatorDispatch associated with 25 // A fixture class that allows testing the AllocatorDispatch associated with
25 // the ScopedThreadHeapUsage class in isolation against a mocked underlying 26 // the HeapUsageTracker class in isolation against a mocked underlying
26 // heap implementation. 27 // heap implementation.
27 class ScopedThreadHeapUsageTest : public testing::Test { 28 class HeapUsageTrackerTest : public testing::Test {
28 public: 29 public:
29 using AllocatorDispatch = base::allocator::AllocatorDispatch; 30 using AllocatorDispatch = base::allocator::AllocatorDispatch;
30 31
31 static const size_t kAllocationPadding; 32 static const size_t kAllocationPadding;
32 enum SizeFunctionKind { 33 enum SizeFunctionKind {
33 EXACT_SIZE_FUNCTION, 34 EXACT_SIZE_FUNCTION,
34 PADDING_SIZE_FUNCTION, 35 PADDING_SIZE_FUNCTION,
35 ZERO_SIZE_FUNCTION, 36 ZERO_SIZE_FUNCTION,
36 }; 37 };
37 38
38 ScopedThreadHeapUsageTest() : size_function_kind_(EXACT_SIZE_FUNCTION) { 39 HeapUsageTrackerTest() : size_function_kind_(EXACT_SIZE_FUNCTION) {
39 EXPECT_EQ(nullptr, g_self); 40 EXPECT_EQ(nullptr, g_self);
40 g_self = this; 41 g_self = this;
41 } 42 }
42 43
43 ~ScopedThreadHeapUsageTest() override { 44 ~HeapUsageTrackerTest() override {
44 EXPECT_EQ(this, g_self); 45 EXPECT_EQ(this, g_self);
45 g_self = nullptr; 46 g_self = nullptr;
46 } 47 }
47 48
48 void set_size_function_kind(SizeFunctionKind kind) { 49 void set_size_function_kind(SizeFunctionKind kind) {
49 size_function_kind_ = kind; 50 size_function_kind_ = kind;
50 } 51 }
51 52
52 void SetUp() override { 53 void SetUp() override {
53 ScopedThreadHeapUsage::Initialize(); 54 TestingHeapUsageTracker::EnsureTLSInitializedForTesting();
54 55
55 dispatch_under_test_ = 56 dispatch_under_test_ = TestingHeapUsageTracker::GetDispatchForTesting();
56 TestingScopedThreadHeapUsage::GetDispatchForTesting();
57 ASSERT_EQ(nullptr, dispatch_under_test_->next); 57 ASSERT_EQ(nullptr, dispatch_under_test_->next);
58 58
59 dispatch_under_test_->next = &g_mock_dispatch; 59 dispatch_under_test_->next = &g_mock_dispatch;
60 } 60 }
61 61
62 void TearDown() override { 62 void TearDown() override {
63 ASSERT_EQ(&g_mock_dispatch, dispatch_under_test_->next); 63 ASSERT_EQ(&g_mock_dispatch, dispatch_under_test_->next);
64 64
65 dispatch_under_test_->next = nullptr; 65 dispatch_under_test_->next = nullptr;
66 } 66 }
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after
179 return g_self->GetSizeEstimate(address); 179 return g_self->GetSizeEstimate(address);
180 } 180 }
181 181
182 using AllocationSizeMap = std::map<void*, size_t>; 182 using AllocationSizeMap = std::map<void*, size_t>;
183 183
184 SizeFunctionKind size_function_kind_; 184 SizeFunctionKind size_function_kind_;
185 AllocationSizeMap allocation_size_map_; 185 AllocationSizeMap allocation_size_map_;
186 AllocatorDispatch* dispatch_under_test_; 186 AllocatorDispatch* dispatch_under_test_;
187 187
188 static base::allocator::AllocatorDispatch g_mock_dispatch; 188 static base::allocator::AllocatorDispatch g_mock_dispatch;
189 static ScopedThreadHeapUsageTest* g_self; 189 static HeapUsageTrackerTest* g_self;
190 }; 190 };
191 191
192 const size_t ScopedThreadHeapUsageTest::kAllocationPadding = 23; 192 const size_t HeapUsageTrackerTest::kAllocationPadding = 23;
193 193
194 ScopedThreadHeapUsageTest* ScopedThreadHeapUsageTest::g_self = nullptr; 194 HeapUsageTrackerTest* HeapUsageTrackerTest::g_self = nullptr;
195 195
196 base::allocator::AllocatorDispatch ScopedThreadHeapUsageTest::g_mock_dispatch = 196 base::allocator::AllocatorDispatch HeapUsageTrackerTest::g_mock_dispatch = {
197 { 197 &HeapUsageTrackerTest::OnAllocFn, // alloc_function
198 &ScopedThreadHeapUsageTest::OnAllocFn, // alloc_function 198 &HeapUsageTrackerTest::
199 &ScopedThreadHeapUsageTest:: 199 OnAllocZeroInitializedFn, // alloc_zero_initialized_function
200 OnAllocZeroInitializedFn, // alloc_zero_initialized_function 200 &HeapUsageTrackerTest::OnAllocAlignedFn, // alloc_aligned_function
201 &ScopedThreadHeapUsageTest::OnAllocAlignedFn, // alloc_aligned_function 201 &HeapUsageTrackerTest::OnReallocFn, // realloc_function
202 &ScopedThreadHeapUsageTest::OnReallocFn, // realloc_function 202 &HeapUsageTrackerTest::OnFreeFn, // free_function
203 &ScopedThreadHeapUsageTest::OnFreeFn, // free_function 203 &HeapUsageTrackerTest::OnGetSizeEstimateFn, // get_size_estimate_function
204 &ScopedThreadHeapUsageTest:: 204 nullptr, // next
205 OnGetSizeEstimateFn, // get_size_estimate_function
206 nullptr, // next
207 }; 205 };
208 206
209 } // namespace 207 } // namespace
210 208
211 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithExactSizeFunction) { 209 TEST_F(HeapUsageTrackerTest, SimpleUsageWithExactSizeFunction) {
212 set_size_function_kind(EXACT_SIZE_FUNCTION); 210 set_size_function_kind(EXACT_SIZE_FUNCTION);
213 211
214 ScopedThreadHeapUsage scoped_usage; 212 HeapUsageTracker usage_tracker;
213 usage_tracker.Start();
215 214
216 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = 215 ThreadAllocatorUsage u1 = HeapUsageTracker::CurrentUsage();
217 ScopedThreadHeapUsage::CurrentUsage();
218 216
219 EXPECT_EQ(0U, u1.alloc_ops); 217 EXPECT_EQ(0U, u1.alloc_ops);
220 EXPECT_EQ(0U, u1.alloc_bytes); 218 EXPECT_EQ(0U, u1.alloc_bytes);
221 EXPECT_EQ(0U, u1.alloc_overhead_bytes); 219 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
222 EXPECT_EQ(0U, u1.free_ops); 220 EXPECT_EQ(0U, u1.free_ops);
223 EXPECT_EQ(0U, u1.free_bytes); 221 EXPECT_EQ(0U, u1.free_bytes);
224 EXPECT_EQ(0U, u1.max_allocated_bytes); 222 EXPECT_EQ(0U, u1.max_allocated_bytes);
225 223
226 const size_t kAllocSize = 1029U; 224 const size_t kAllocSize = 1029U;
227 void* ptr = MockMalloc(kAllocSize); 225 void* ptr = MockMalloc(kAllocSize);
228 MockFree(ptr); 226 MockFree(ptr);
229 227
230 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = 228 usage_tracker.Stop(false);
231 ScopedThreadHeapUsage::CurrentUsage(); 229 ThreadAllocatorUsage u2 = usage_tracker.usage();
232 230
233 EXPECT_EQ(1U, u2.alloc_ops); 231 EXPECT_EQ(1U, u2.alloc_ops);
234 EXPECT_EQ(kAllocSize, u2.alloc_bytes); 232 EXPECT_EQ(kAllocSize, u2.alloc_bytes);
235 EXPECT_EQ(0U, u2.alloc_overhead_bytes); 233 EXPECT_EQ(0U, u2.alloc_overhead_bytes);
236 EXPECT_EQ(1U, u2.free_ops); 234 EXPECT_EQ(1U, u2.free_ops);
237 EXPECT_EQ(kAllocSize, u2.free_bytes); 235 EXPECT_EQ(kAllocSize, u2.free_bytes);
238 EXPECT_EQ(kAllocSize, u2.max_allocated_bytes); 236 EXPECT_EQ(kAllocSize, u2.max_allocated_bytes);
239 } 237 }
240 238
241 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithPaddingSizeFunction) { 239 TEST_F(HeapUsageTrackerTest, SimpleUsageWithPaddingSizeFunction) {
242 set_size_function_kind(PADDING_SIZE_FUNCTION); 240 set_size_function_kind(PADDING_SIZE_FUNCTION);
243 241
244 ScopedThreadHeapUsage scoped_usage; 242 HeapUsageTracker usage_tracker;
243 usage_tracker.Start();
245 244
246 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = 245 ThreadAllocatorUsage u1 = HeapUsageTracker::CurrentUsage();
247 ScopedThreadHeapUsage::CurrentUsage();
248 246
249 EXPECT_EQ(0U, u1.alloc_ops); 247 EXPECT_EQ(0U, u1.alloc_ops);
250 EXPECT_EQ(0U, u1.alloc_bytes); 248 EXPECT_EQ(0U, u1.alloc_bytes);
251 EXPECT_EQ(0U, u1.alloc_overhead_bytes); 249 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
252 EXPECT_EQ(0U, u1.free_ops); 250 EXPECT_EQ(0U, u1.free_ops);
253 EXPECT_EQ(0U, u1.free_bytes); 251 EXPECT_EQ(0U, u1.free_bytes);
254 EXPECT_EQ(0U, u1.max_allocated_bytes); 252 EXPECT_EQ(0U, u1.max_allocated_bytes);
255 253
256 const size_t kAllocSize = 1029U; 254 const size_t kAllocSize = 1029U;
257 void* ptr = MockMalloc(kAllocSize); 255 void* ptr = MockMalloc(kAllocSize);
258 MockFree(ptr); 256 MockFree(ptr);
259 257
260 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = 258 usage_tracker.Stop(false);
261 ScopedThreadHeapUsage::CurrentUsage(); 259 ThreadAllocatorUsage u2 = usage_tracker.usage();
262 260
263 EXPECT_EQ(1U, u2.alloc_ops); 261 EXPECT_EQ(1U, u2.alloc_ops);
264 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.alloc_bytes); 262 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.alloc_bytes);
265 EXPECT_EQ(kAllocationPadding, u2.alloc_overhead_bytes); 263 EXPECT_EQ(kAllocationPadding, u2.alloc_overhead_bytes);
266 EXPECT_EQ(1U, u2.free_ops); 264 EXPECT_EQ(1U, u2.free_ops);
267 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.free_bytes); 265 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.free_bytes);
268 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.max_allocated_bytes); 266 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.max_allocated_bytes);
269 } 267 }
270 268
271 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithZeroSizeFunction) { 269 TEST_F(HeapUsageTrackerTest, SimpleUsageWithZeroSizeFunction) {
272 set_size_function_kind(ZERO_SIZE_FUNCTION); 270 set_size_function_kind(ZERO_SIZE_FUNCTION);
273 271
274 ScopedThreadHeapUsage scoped_usage; 272 HeapUsageTracker usage_tracker;
273 usage_tracker.Start();
275 274
276 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = 275 ThreadAllocatorUsage u1 = HeapUsageTracker::CurrentUsage();
277 ScopedThreadHeapUsage::CurrentUsage();
278 EXPECT_EQ(0U, u1.alloc_ops); 276 EXPECT_EQ(0U, u1.alloc_ops);
279 EXPECT_EQ(0U, u1.alloc_bytes); 277 EXPECT_EQ(0U, u1.alloc_bytes);
280 EXPECT_EQ(0U, u1.alloc_overhead_bytes); 278 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
281 EXPECT_EQ(0U, u1.free_ops); 279 EXPECT_EQ(0U, u1.free_ops);
282 EXPECT_EQ(0U, u1.free_bytes); 280 EXPECT_EQ(0U, u1.free_bytes);
283 EXPECT_EQ(0U, u1.max_allocated_bytes); 281 EXPECT_EQ(0U, u1.max_allocated_bytes);
284 282
285 const size_t kAllocSize = 1029U; 283 const size_t kAllocSize = 1029U;
286 void* ptr = MockMalloc(kAllocSize); 284 void* ptr = MockMalloc(kAllocSize);
287 MockFree(ptr); 285 MockFree(ptr);
288 286
289 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = 287 usage_tracker.Stop(false);
290 ScopedThreadHeapUsage::CurrentUsage(); 288 ThreadAllocatorUsage u2 = usage_tracker.usage();
291 289
292 // With a get-size function that returns zero, there's no way to get the size 290 // With a get-size function that returns zero, there's no way to get the size
293 // of an allocation that's being freed, hence the shim can't tally freed bytes 291 // of an allocation that's being freed, hence the shim can't tally freed bytes
294 // nor the high-watermark allocated bytes. 292 // nor the high-watermark allocated bytes.
295 EXPECT_EQ(1U, u2.alloc_ops); 293 EXPECT_EQ(1U, u2.alloc_ops);
296 EXPECT_EQ(kAllocSize, u2.alloc_bytes); 294 EXPECT_EQ(kAllocSize, u2.alloc_bytes);
297 EXPECT_EQ(0U, u2.alloc_overhead_bytes); 295 EXPECT_EQ(0U, u2.alloc_overhead_bytes);
298 EXPECT_EQ(1U, u2.free_ops); 296 EXPECT_EQ(1U, u2.free_ops);
299 EXPECT_EQ(0U, u2.free_bytes); 297 EXPECT_EQ(0U, u2.free_bytes);
300 EXPECT_EQ(0U, u2.max_allocated_bytes); 298 EXPECT_EQ(0U, u2.max_allocated_bytes);
301 } 299 }
302 300
303 TEST_F(ScopedThreadHeapUsageTest, ReallocCorrectlyTallied) { 301 TEST_F(HeapUsageTrackerTest, ReallocCorrectlyTallied) {
304 const size_t kAllocSize = 237U; 302 const size_t kAllocSize = 237U;
305 303
306 { 304 {
307 ScopedThreadHeapUsage scoped_usage; 305 HeapUsageTracker usage_tracker;
306 usage_tracker.Start();
308 307
309 // Reallocating nullptr should count as a single alloc. 308 // Reallocating nullptr should count as a single alloc.
310 void* ptr = MockRealloc(nullptr, kAllocSize); 309 void* ptr = MockRealloc(nullptr, kAllocSize);
311 ScopedThreadHeapUsage::ThreadAllocatorUsage usage = 310 ThreadAllocatorUsage usage = HeapUsageTracker::CurrentUsage();
312 ScopedThreadHeapUsage::CurrentUsage();
313 EXPECT_EQ(1U, usage.alloc_ops); 311 EXPECT_EQ(1U, usage.alloc_ops);
314 EXPECT_EQ(kAllocSize, usage.alloc_bytes); 312 EXPECT_EQ(kAllocSize, usage.alloc_bytes);
315 EXPECT_EQ(0U, usage.alloc_overhead_bytes); 313 EXPECT_EQ(0U, usage.alloc_overhead_bytes);
316 EXPECT_EQ(0U, usage.free_ops); 314 EXPECT_EQ(0U, usage.free_ops);
317 EXPECT_EQ(0U, usage.free_bytes); 315 EXPECT_EQ(0U, usage.free_bytes);
318 EXPECT_EQ(kAllocSize, usage.max_allocated_bytes); 316 EXPECT_EQ(kAllocSize, usage.max_allocated_bytes);
319 317
320 // Reallocating a valid pointer to a zero size should count as a single 318 // Reallocating a valid pointer to a zero size should count as a single
321 // free. 319 // free.
322 ptr = MockRealloc(ptr, 0U); 320 ptr = MockRealloc(ptr, 0U);
323 321
324 usage = ScopedThreadHeapUsage::CurrentUsage(); 322 usage = HeapUsageTracker::CurrentUsage();
325 EXPECT_EQ(1U, usage.alloc_ops); 323 EXPECT_EQ(1U, usage.alloc_ops);
326 EXPECT_EQ(kAllocSize, usage.alloc_bytes); 324 EXPECT_EQ(kAllocSize, usage.alloc_bytes);
327 EXPECT_EQ(0U, usage.alloc_overhead_bytes); 325 EXPECT_EQ(0U, usage.alloc_overhead_bytes);
328 EXPECT_EQ(1U, usage.free_ops); 326 EXPECT_EQ(1U, usage.free_ops);
329 EXPECT_EQ(kAllocSize, usage.free_bytes); 327 EXPECT_EQ(kAllocSize, usage.free_bytes);
330 EXPECT_EQ(kAllocSize, usage.max_allocated_bytes); 328 EXPECT_EQ(kAllocSize, usage.max_allocated_bytes);
331 329
332 // Realloc to zero size may or may not return a nullptr - make sure to 330 // Realloc to zero size may or may not return a nullptr - make sure to
333 // free the zero-size alloc in the latter case. 331 // free the zero-size alloc in the latter case.
334 if (ptr != nullptr) 332 if (ptr != nullptr)
335 MockFree(ptr); 333 MockFree(ptr);
334
335 usage_tracker.Stop(false);
336 } 336 }
337 337
338 { 338 {
339 ScopedThreadHeapUsage scoped_usage; 339 HeapUsageTracker usage_tracker;
340 usage_tracker.Start();
340 341
341 void* ptr = MockMalloc(kAllocSize); 342 void* ptr = MockMalloc(kAllocSize);
342 ScopedThreadHeapUsage::ThreadAllocatorUsage usage = 343 ThreadAllocatorUsage usage = HeapUsageTracker::CurrentUsage();
343 ScopedThreadHeapUsage::CurrentUsage();
344 EXPECT_EQ(1U, usage.alloc_ops); 344 EXPECT_EQ(1U, usage.alloc_ops);
345 345
346 // Now try reallocating a valid pointer to a larger size, this should count 346 // Now try reallocating a valid pointer to a larger size, this should count
347 // as one free and one alloc. 347 // as one free and one alloc.
348 const size_t kLargerAllocSize = kAllocSize + 928U; 348 const size_t kLargerAllocSize = kAllocSize + 928U;
349 ptr = MockRealloc(ptr, kLargerAllocSize); 349 ptr = MockRealloc(ptr, kLargerAllocSize);
350 350
351 usage = ScopedThreadHeapUsage::CurrentUsage(); 351 usage = HeapUsageTracker::CurrentUsage();
352 EXPECT_EQ(2U, usage.alloc_ops); 352 EXPECT_EQ(2U, usage.alloc_ops);
353 EXPECT_EQ(kAllocSize + kLargerAllocSize, usage.alloc_bytes); 353 EXPECT_EQ(kAllocSize + kLargerAllocSize, usage.alloc_bytes);
354 EXPECT_EQ(0U, usage.alloc_overhead_bytes); 354 EXPECT_EQ(0U, usage.alloc_overhead_bytes);
355 EXPECT_EQ(1U, usage.free_ops); 355 EXPECT_EQ(1U, usage.free_ops);
356 EXPECT_EQ(kAllocSize, usage.free_bytes); 356 EXPECT_EQ(kAllocSize, usage.free_bytes);
357 EXPECT_EQ(kLargerAllocSize, usage.max_allocated_bytes); 357 EXPECT_EQ(kLargerAllocSize, usage.max_allocated_bytes);
358 358
359 MockFree(ptr); 359 MockFree(ptr);
360
361 usage_tracker.Stop(false);
360 } 362 }
361 } 363 }
362 364
363 TEST_F(ScopedThreadHeapUsageTest, NestedMaxWorks) { 365 TEST_F(HeapUsageTrackerTest, NestedMaxWorks) {
364 ScopedThreadHeapUsage outer_scoped_usage; 366 HeapUsageTracker usage_tracker;
367 usage_tracker.Start();
365 368
366 const size_t kOuterAllocSize = 1029U; 369 const size_t kOuterAllocSize = 1029U;
367 void* ptr = MockMalloc(kOuterAllocSize); 370 void* ptr = MockMalloc(kOuterAllocSize);
368 MockFree(ptr); 371 MockFree(ptr);
369 372
370 EXPECT_EQ(kOuterAllocSize, 373 EXPECT_EQ(kOuterAllocSize,
371 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); 374 HeapUsageTracker::CurrentUsage().max_allocated_bytes);
372 375
373 { 376 {
374 ScopedThreadHeapUsage inner_scoped_usage; 377 HeapUsageTracker inner_usage_tracker;
378 inner_usage_tracker.Start();
375 379
376 const size_t kInnerAllocSize = 673U; 380 const size_t kInnerAllocSize = 673U;
377 ptr = MockMalloc(kInnerAllocSize); 381 ptr = MockMalloc(kInnerAllocSize);
378 MockFree(ptr); 382 MockFree(ptr);
379 383
380 EXPECT_EQ(kInnerAllocSize, 384 inner_usage_tracker.Stop(false);
381 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); 385
386 EXPECT_EQ(kInnerAllocSize, inner_usage_tracker.usage().max_allocated_bytes);
382 } 387 }
383 388
384 // The greater, outer allocation size should have been restored. 389 // The greater, outer allocation size should have been restored.
385 EXPECT_EQ(kOuterAllocSize, 390 EXPECT_EQ(kOuterAllocSize,
386 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); 391 HeapUsageTracker::CurrentUsage().max_allocated_bytes);
387 392
388 const size_t kLargerInnerAllocSize = kOuterAllocSize + 673U; 393 const size_t kLargerInnerAllocSize = kOuterAllocSize + 673U;
389 { 394 {
390 ScopedThreadHeapUsage inner_scoped_usage; 395 HeapUsageTracker inner_usage_tracker;
396 inner_usage_tracker.Start();
391 397
392 ptr = MockMalloc(kLargerInnerAllocSize); 398 ptr = MockMalloc(kLargerInnerAllocSize);
393 MockFree(ptr); 399 MockFree(ptr);
394 400
401 inner_usage_tracker.Stop(false);
395 EXPECT_EQ(kLargerInnerAllocSize, 402 EXPECT_EQ(kLargerInnerAllocSize,
396 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); 403 inner_usage_tracker.usage().max_allocated_bytes);
397 } 404 }
398 405
399 // The greater, inner allocation size should have been preserved. 406 // The greater, inner allocation size should have been preserved.
400 EXPECT_EQ(kLargerInnerAllocSize, 407 EXPECT_EQ(kLargerInnerAllocSize,
401 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); 408 HeapUsageTracker::CurrentUsage().max_allocated_bytes);
402 409
403 // Now try the case with an outstanding net alloc size when entering the 410 // Now try the case with an outstanding net alloc size when entering the
404 // inner scope. 411 // inner scope.
405 void* outer_ptr = MockMalloc(kOuterAllocSize); 412 void* outer_ptr = MockMalloc(kOuterAllocSize);
406 EXPECT_EQ(kLargerInnerAllocSize, 413 EXPECT_EQ(kLargerInnerAllocSize,
407 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); 414 HeapUsageTracker::CurrentUsage().max_allocated_bytes);
408 { 415 {
409 ScopedThreadHeapUsage inner_scoped_usage; 416 HeapUsageTracker inner_usage_tracker;
417 inner_usage_tracker.Start();
410 418
411 ptr = MockMalloc(kLargerInnerAllocSize); 419 ptr = MockMalloc(kLargerInnerAllocSize);
412 MockFree(ptr); 420 MockFree(ptr);
413 421
422 inner_usage_tracker.Stop(false);
414 EXPECT_EQ(kLargerInnerAllocSize, 423 EXPECT_EQ(kLargerInnerAllocSize,
415 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); 424 inner_usage_tracker.usage().max_allocated_bytes);
416 } 425 }
417 426
418 // While the inner scope saw only the inner net outstanding allocation size, 427 // While the inner scope saw only the inner net outstanding allocation size,
419 // the outer scope saw both outstanding at the same time. 428 // the outer scope saw both outstanding at the same time.
420 EXPECT_EQ(kOuterAllocSize + kLargerInnerAllocSize, 429 EXPECT_EQ(kOuterAllocSize + kLargerInnerAllocSize,
421 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); 430 HeapUsageTracker::CurrentUsage().max_allocated_bytes);
422 431
423 MockFree(outer_ptr); 432 MockFree(outer_ptr);
433
434 // Test a net-negative scope.
435 ptr = MockMalloc(kLargerInnerAllocSize);
436 {
437 HeapUsageTracker inner_usage_tracker;
438 inner_usage_tracker.Start();
439
440 MockFree(ptr);
441
442 const size_t kInnerAllocSize = 1;
443 ptr = MockMalloc(kInnerAllocSize);
444
445 inner_usage_tracker.Stop(false);
446 // Since the scope is still net-negative, the max is clamped at zero.
447 EXPECT_EQ(0, inner_usage_tracker.usage().max_allocated_bytes);
448 }
424 } 449 }
425 450
426 TEST_F(ScopedThreadHeapUsageTest, AllShimFunctionsAreProvided) { 451 TEST_F(HeapUsageTrackerTest, AllShimFunctionsAreProvided) {
427 const size_t kAllocSize = 100; 452 const size_t kAllocSize = 100;
428 void* alloc = MockMalloc(kAllocSize); 453 void* alloc = MockMalloc(kAllocSize);
429 size_t estimate = MockGetSizeEstimate(alloc); 454 size_t estimate = MockGetSizeEstimate(alloc);
430 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); 455 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
431 MockFree(alloc); 456 MockFree(alloc);
432 457
433 alloc = MockCalloc(kAllocSize, 1); 458 alloc = MockCalloc(kAllocSize, 1);
434 estimate = MockGetSizeEstimate(alloc); 459 estimate = MockGetSizeEstimate(alloc);
435 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); 460 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
436 MockFree(alloc); 461 MockFree(alloc);
437 462
438 alloc = MockAllocAligned(1, kAllocSize); 463 alloc = MockAllocAligned(1, kAllocSize);
439 estimate = MockGetSizeEstimate(alloc); 464 estimate = MockGetSizeEstimate(alloc);
440 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); 465 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
441 466
442 alloc = MockRealloc(alloc, kAllocSize); 467 alloc = MockRealloc(alloc, kAllocSize);
443 estimate = MockGetSizeEstimate(alloc); 468 estimate = MockGetSizeEstimate(alloc);
444 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); 469 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
445 MockFree(alloc); 470 MockFree(alloc);
446 } 471 }
447 472
448 #if BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) 473 #if BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM)
449 TEST(ScopedThreadHeapShimTest, HooksIntoMallocWhenShimAvailable) { 474 TEST(ScopedThreadHeapShimTest, HooksIntoMallocWhenShimAvailable) {
450 ScopedThreadHeapUsage::Initialize(); 475 ASSERT_FALSE(HeapUsageTracker::IsHeapTrackingEnabled());
451 ScopedThreadHeapUsage::EnableHeapTracking(); 476
477 HeapUsageTracker::EnableHeapTracking();
478
479 ASSERT_TRUE(HeapUsageTracker::IsHeapTrackingEnabled());
452 480
453 const size_t kAllocSize = 9993; 481 const size_t kAllocSize = 9993;
454 // This test verifies that the scoped heap data is affected by malloc & 482 // This test verifies that the scoped heap data is affected by malloc &
455 // free only when the shim is available. 483 // free only when the shim is available.
456 ScopedThreadHeapUsage scoped_usage; 484 HeapUsageTracker usage_tracker;
485 usage_tracker.Start();
457 486
458 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = 487 ThreadAllocatorUsage u1 = HeapUsageTracker::CurrentUsage();
459 ScopedThreadHeapUsage::CurrentUsage();
460 void* ptr = malloc(kAllocSize); 488 void* ptr = malloc(kAllocSize);
461 // Prevent the compiler from optimizing out the malloc/free pair. 489 // Prevent the compiler from optimizing out the malloc/free pair.
462 ASSERT_NE(nullptr, ptr); 490 ASSERT_NE(nullptr, ptr);
463 491
464 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = 492 ThreadAllocatorUsage u2 = HeapUsageTracker::CurrentUsage();
465 ScopedThreadHeapUsage::CurrentUsage();
466 free(ptr); 493 free(ptr);
467 ScopedThreadHeapUsage::ThreadAllocatorUsage u3 = 494
468 ScopedThreadHeapUsage::CurrentUsage(); 495 usage_tracker.Stop(false);
496 ThreadAllocatorUsage u3 = usage_tracker.usage();
469 497
470 // Verify that at least one allocation operation was recorded, and that free 498 // Verify that at least one allocation operation was recorded, and that free
471 // operations are at least monotonically growing. 499 // operations are at least monotonically growing.
472 EXPECT_LE(0U, u1.alloc_ops); 500 EXPECT_LE(0U, u1.alloc_ops);
473 EXPECT_LE(u1.alloc_ops + 1, u2.alloc_ops); 501 EXPECT_LE(u1.alloc_ops + 1, u2.alloc_ops);
474 EXPECT_LE(u1.alloc_ops + 1, u3.alloc_ops); 502 EXPECT_LE(u1.alloc_ops + 1, u3.alloc_ops);
475 503
476 // Verify that at least the bytes above were recorded. 504 // Verify that at least the bytes above were recorded.
477 EXPECT_LE(u1.alloc_bytes + kAllocSize, u2.alloc_bytes); 505 EXPECT_LE(u1.alloc_bytes + kAllocSize, u2.alloc_bytes);
478 506
479 // Verify that at least the one free operation above was recorded. 507 // Verify that at least the one free operation above was recorded.
480 EXPECT_LE(u2.free_ops + 1, u3.free_ops); 508 EXPECT_LE(u2.free_ops + 1, u3.free_ops);
481 509
482 TestingScopedThreadHeapUsage::DisableHeapTrackingForTesting(); 510 TestingHeapUsageTracker::DisableHeapTrackingForTesting();
511
512 ASSERT_FALSE(HeapUsageTracker::IsHeapTrackingEnabled());
483 } 513 }
484 #endif // BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) 514 #endif // BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM)
485 515
486 } // namespace debug 516 } // namespace debug
487 } // namespace base 517 } // namespace base
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698