Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "base/debug/scoped_thread_heap_usage.h" | 5 #include "base/debug/thread_heap_usage_tracker.h" |
| 6 | 6 |
| 7 #include <map> | 7 #include <map> |
| 8 | 8 |
| 9 #include "base/allocator/allocator_shim.h" | 9 #include "base/allocator/allocator_shim.h" |
| 10 #include "base/allocator/features.h" | 10 #include "base/allocator/features.h" |
| 11 #include "testing/gtest/include/gtest/gtest.h" | 11 #include "testing/gtest/include/gtest/gtest.h" |
| 12 | 12 |
| 13 namespace base { | 13 namespace base { |
| 14 namespace debug { | 14 namespace debug { |
| 15 | 15 |
| 16 namespace { | 16 namespace { |
| 17 | 17 |
| 18 class TestingScopedThreadHeapUsage : public ScopedThreadHeapUsage { | 18 class TestingThreadHeapUsageTracker : public ThreadHeapUsageTracker { |
| 19 public: | 19 public: |
| 20 using ScopedThreadHeapUsage::DisableHeapTrackingForTesting; | 20 using ThreadHeapUsageTracker::DisableHeapTrackingForTesting; |
| 21 using ScopedThreadHeapUsage::GetDispatchForTesting; | 21 using ThreadHeapUsageTracker::GetDispatchForTesting; |
| 22 using ThreadHeapUsageTracker::EnsureTLSInitializedForTesting; | |
|
Lei Zhang
2016/10/17 20:54:23
nit: Alphabetical order.
Sigurður Ásgeirsson
2016/10/18 13:39:23
Done.
| |
| 22 }; | 23 }; |
| 23 | 24 |
| 24 // A fixture class that allows testing the AllocatorDispatch associated with | 25 // A fixture class that allows testing the AllocatorDispatch associated with |
| 25 // the ScopedThreadHeapUsage class in isolation against a mocked underlying | 26 // the ThreadHeapUsageTracker class in isolation against a mocked |
| 27 // underlying | |
| 26 // heap implementation. | 28 // heap implementation. |
| 27 class ScopedThreadHeapUsageTest : public testing::Test { | 29 class ThreadHeapUsageTrackerTest : public testing::Test { |
| 28 public: | 30 public: |
| 29 using AllocatorDispatch = base::allocator::AllocatorDispatch; | 31 using AllocatorDispatch = base::allocator::AllocatorDispatch; |
| 30 | 32 |
| 31 static const size_t kAllocationPadding; | 33 static const size_t kAllocationPadding; |
| 32 enum SizeFunctionKind { | 34 enum SizeFunctionKind { |
| 33 EXACT_SIZE_FUNCTION, | 35 EXACT_SIZE_FUNCTION, |
| 34 PADDING_SIZE_FUNCTION, | 36 PADDING_SIZE_FUNCTION, |
| 35 ZERO_SIZE_FUNCTION, | 37 ZERO_SIZE_FUNCTION, |
| 36 }; | 38 }; |
| 37 | 39 |
| 38 ScopedThreadHeapUsageTest() : size_function_kind_(EXACT_SIZE_FUNCTION) { | 40 ThreadHeapUsageTrackerTest() : size_function_kind_(EXACT_SIZE_FUNCTION) { |
| 39 EXPECT_EQ(nullptr, g_self); | 41 EXPECT_EQ(nullptr, g_self); |
| 40 g_self = this; | 42 g_self = this; |
| 41 } | 43 } |
| 42 | 44 |
| 43 ~ScopedThreadHeapUsageTest() override { | 45 ~ThreadHeapUsageTrackerTest() override { |
| 44 EXPECT_EQ(this, g_self); | 46 EXPECT_EQ(this, g_self); |
| 45 g_self = nullptr; | 47 g_self = nullptr; |
| 46 } | 48 } |
| 47 | 49 |
| 48 void set_size_function_kind(SizeFunctionKind kind) { | 50 void set_size_function_kind(SizeFunctionKind kind) { |
| 49 size_function_kind_ = kind; | 51 size_function_kind_ = kind; |
| 50 } | 52 } |
| 51 | 53 |
| 52 void SetUp() override { | 54 void SetUp() override { |
| 53 ScopedThreadHeapUsage::Initialize(); | 55 TestingThreadHeapUsageTracker::EnsureTLSInitializedForTesting(); |
| 54 | 56 |
| 55 dispatch_under_test_ = | 57 dispatch_under_test_ = |
| 56 TestingScopedThreadHeapUsage::GetDispatchForTesting(); | 58 TestingThreadHeapUsageTracker::GetDispatchForTesting(); |
| 57 ASSERT_EQ(nullptr, dispatch_under_test_->next); | 59 ASSERT_EQ(nullptr, dispatch_under_test_->next); |
| 58 | 60 |
| 59 dispatch_under_test_->next = &g_mock_dispatch; | 61 dispatch_under_test_->next = &g_mock_dispatch; |
| 60 } | 62 } |
| 61 | 63 |
| 62 void TearDown() override { | 64 void TearDown() override { |
| 63 ASSERT_EQ(&g_mock_dispatch, dispatch_under_test_->next); | 65 ASSERT_EQ(&g_mock_dispatch, dispatch_under_test_->next); |
| 64 | 66 |
| 65 dispatch_under_test_->next = nullptr; | 67 dispatch_under_test_->next = nullptr; |
| 66 } | 68 } |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 179 return g_self->GetSizeEstimate(address); | 181 return g_self->GetSizeEstimate(address); |
| 180 } | 182 } |
| 181 | 183 |
| 182 using AllocationSizeMap = std::map<void*, size_t>; | 184 using AllocationSizeMap = std::map<void*, size_t>; |
| 183 | 185 |
| 184 SizeFunctionKind size_function_kind_; | 186 SizeFunctionKind size_function_kind_; |
| 185 AllocationSizeMap allocation_size_map_; | 187 AllocationSizeMap allocation_size_map_; |
| 186 AllocatorDispatch* dispatch_under_test_; | 188 AllocatorDispatch* dispatch_under_test_; |
| 187 | 189 |
| 188 static base::allocator::AllocatorDispatch g_mock_dispatch; | 190 static base::allocator::AllocatorDispatch g_mock_dispatch; |
| 189 static ScopedThreadHeapUsageTest* g_self; | 191 static ThreadHeapUsageTrackerTest* g_self; |
| 190 }; | 192 }; |
| 191 | 193 |
| 192 const size_t ScopedThreadHeapUsageTest::kAllocationPadding = 23; | 194 const size_t ThreadHeapUsageTrackerTest::kAllocationPadding = 23; |
| 193 | 195 |
| 194 ScopedThreadHeapUsageTest* ScopedThreadHeapUsageTest::g_self = nullptr; | 196 ThreadHeapUsageTrackerTest* ThreadHeapUsageTrackerTest::g_self = nullptr; |
| 195 | 197 |
| 196 base::allocator::AllocatorDispatch ScopedThreadHeapUsageTest::g_mock_dispatch = | 198 base::allocator::AllocatorDispatch ThreadHeapUsageTrackerTest::g_mock_dispatch = |
| 197 { | 199 { |
| 198 &ScopedThreadHeapUsageTest::OnAllocFn, // alloc_function | 200 &ThreadHeapUsageTrackerTest::OnAllocFn, // alloc_function |
| 199 &ScopedThreadHeapUsageTest:: | 201 &ThreadHeapUsageTrackerTest:: |
| 200 OnAllocZeroInitializedFn, // alloc_zero_initialized_function | 202 OnAllocZeroInitializedFn, // alloc_zero_initialized_function |
| 201 &ScopedThreadHeapUsageTest::OnAllocAlignedFn, // alloc_aligned_function | 203 &ThreadHeapUsageTrackerTest:: |
| 202 &ScopedThreadHeapUsageTest::OnReallocFn, // realloc_function | 204 OnAllocAlignedFn, // alloc_aligned_function |
| 203 &ScopedThreadHeapUsageTest::OnFreeFn, // free_function | 205 &ThreadHeapUsageTrackerTest::OnReallocFn, // realloc_function |
| 204 &ScopedThreadHeapUsageTest:: | 206 &ThreadHeapUsageTrackerTest::OnFreeFn, // free_function |
| 207 &ThreadHeapUsageTrackerTest:: | |
| 205 OnGetSizeEstimateFn, // get_size_estimate_function | 208 OnGetSizeEstimateFn, // get_size_estimate_function |
| 206 nullptr, // next | 209 nullptr, // next |
| 207 }; | 210 }; |
| 208 | 211 |
| 209 } // namespace | 212 } // namespace |
| 210 | 213 |
| 211 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithExactSizeFunction) { | 214 TEST_F(ThreadHeapUsageTrackerTest, SimpleUsageWithExactSizeFunction) { |
| 212 set_size_function_kind(EXACT_SIZE_FUNCTION); | 215 set_size_function_kind(EXACT_SIZE_FUNCTION); |
| 213 | 216 |
| 214 ScopedThreadHeapUsage scoped_usage; | 217 ThreadHeapUsageTracker usage_tracker; |
| 218 usage_tracker.Start(); | |
| 215 | 219 |
| 216 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = | 220 ThreadHeapUsage u1 = ThreadHeapUsageTracker::CurrentUsage(); |
| 217 ScopedThreadHeapUsage::CurrentUsage(); | |
| 218 | 221 |
| 219 EXPECT_EQ(0U, u1.alloc_ops); | 222 EXPECT_EQ(0U, u1.alloc_ops); |
| 220 EXPECT_EQ(0U, u1.alloc_bytes); | 223 EXPECT_EQ(0U, u1.alloc_bytes); |
| 221 EXPECT_EQ(0U, u1.alloc_overhead_bytes); | 224 EXPECT_EQ(0U, u1.alloc_overhead_bytes); |
| 222 EXPECT_EQ(0U, u1.free_ops); | 225 EXPECT_EQ(0U, u1.free_ops); |
| 223 EXPECT_EQ(0U, u1.free_bytes); | 226 EXPECT_EQ(0U, u1.free_bytes); |
| 224 EXPECT_EQ(0U, u1.max_allocated_bytes); | 227 EXPECT_EQ(0U, u1.max_allocated_bytes); |
| 225 | 228 |
| 226 const size_t kAllocSize = 1029U; | 229 const size_t kAllocSize = 1029U; |
| 227 void* ptr = MockMalloc(kAllocSize); | 230 void* ptr = MockMalloc(kAllocSize); |
| 228 MockFree(ptr); | 231 MockFree(ptr); |
| 229 | 232 |
| 230 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = | 233 usage_tracker.Stop(false); |
| 231 ScopedThreadHeapUsage::CurrentUsage(); | 234 ThreadHeapUsage u2 = usage_tracker.usage(); |
| 232 | 235 |
| 233 EXPECT_EQ(1U, u2.alloc_ops); | 236 EXPECT_EQ(1U, u2.alloc_ops); |
| 234 EXPECT_EQ(kAllocSize, u2.alloc_bytes); | 237 EXPECT_EQ(kAllocSize, u2.alloc_bytes); |
| 235 EXPECT_EQ(0U, u2.alloc_overhead_bytes); | 238 EXPECT_EQ(0U, u2.alloc_overhead_bytes); |
| 236 EXPECT_EQ(1U, u2.free_ops); | 239 EXPECT_EQ(1U, u2.free_ops); |
| 237 EXPECT_EQ(kAllocSize, u2.free_bytes); | 240 EXPECT_EQ(kAllocSize, u2.free_bytes); |
| 238 EXPECT_EQ(kAllocSize, u2.max_allocated_bytes); | 241 EXPECT_EQ(kAllocSize, u2.max_allocated_bytes); |
| 239 } | 242 } |
| 240 | 243 |
| 241 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithPaddingSizeFunction) { | 244 TEST_F(ThreadHeapUsageTrackerTest, SimpleUsageWithPaddingSizeFunction) { |
| 242 set_size_function_kind(PADDING_SIZE_FUNCTION); | 245 set_size_function_kind(PADDING_SIZE_FUNCTION); |
| 243 | 246 |
| 244 ScopedThreadHeapUsage scoped_usage; | 247 ThreadHeapUsageTracker usage_tracker; |
| 248 usage_tracker.Start(); | |
| 245 | 249 |
| 246 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = | 250 ThreadHeapUsage u1 = ThreadHeapUsageTracker::CurrentUsage(); |
| 247 ScopedThreadHeapUsage::CurrentUsage(); | |
| 248 | 251 |
| 249 EXPECT_EQ(0U, u1.alloc_ops); | 252 EXPECT_EQ(0U, u1.alloc_ops); |
| 250 EXPECT_EQ(0U, u1.alloc_bytes); | 253 EXPECT_EQ(0U, u1.alloc_bytes); |
| 251 EXPECT_EQ(0U, u1.alloc_overhead_bytes); | 254 EXPECT_EQ(0U, u1.alloc_overhead_bytes); |
| 252 EXPECT_EQ(0U, u1.free_ops); | 255 EXPECT_EQ(0U, u1.free_ops); |
| 253 EXPECT_EQ(0U, u1.free_bytes); | 256 EXPECT_EQ(0U, u1.free_bytes); |
| 254 EXPECT_EQ(0U, u1.max_allocated_bytes); | 257 EXPECT_EQ(0U, u1.max_allocated_bytes); |
| 255 | 258 |
| 256 const size_t kAllocSize = 1029U; | 259 const size_t kAllocSize = 1029U; |
| 257 void* ptr = MockMalloc(kAllocSize); | 260 void* ptr = MockMalloc(kAllocSize); |
| 258 MockFree(ptr); | 261 MockFree(ptr); |
| 259 | 262 |
| 260 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = | 263 usage_tracker.Stop(false); |
| 261 ScopedThreadHeapUsage::CurrentUsage(); | 264 ThreadHeapUsage u2 = usage_tracker.usage(); |
| 262 | 265 |
| 263 EXPECT_EQ(1U, u2.alloc_ops); | 266 EXPECT_EQ(1U, u2.alloc_ops); |
| 264 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.alloc_bytes); | 267 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.alloc_bytes); |
| 265 EXPECT_EQ(kAllocationPadding, u2.alloc_overhead_bytes); | 268 EXPECT_EQ(kAllocationPadding, u2.alloc_overhead_bytes); |
| 266 EXPECT_EQ(1U, u2.free_ops); | 269 EXPECT_EQ(1U, u2.free_ops); |
| 267 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.free_bytes); | 270 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.free_bytes); |
| 268 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.max_allocated_bytes); | 271 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.max_allocated_bytes); |
| 269 } | 272 } |
| 270 | 273 |
| 271 TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithZeroSizeFunction) { | 274 TEST_F(ThreadHeapUsageTrackerTest, SimpleUsageWithZeroSizeFunction) { |
| 272 set_size_function_kind(ZERO_SIZE_FUNCTION); | 275 set_size_function_kind(ZERO_SIZE_FUNCTION); |
| 273 | 276 |
| 274 ScopedThreadHeapUsage scoped_usage; | 277 ThreadHeapUsageTracker usage_tracker; |
| 278 usage_tracker.Start(); | |
| 275 | 279 |
| 276 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = | 280 ThreadHeapUsage u1 = ThreadHeapUsageTracker::CurrentUsage(); |
| 277 ScopedThreadHeapUsage::CurrentUsage(); | |
| 278 EXPECT_EQ(0U, u1.alloc_ops); | 281 EXPECT_EQ(0U, u1.alloc_ops); |
| 279 EXPECT_EQ(0U, u1.alloc_bytes); | 282 EXPECT_EQ(0U, u1.alloc_bytes); |
| 280 EXPECT_EQ(0U, u1.alloc_overhead_bytes); | 283 EXPECT_EQ(0U, u1.alloc_overhead_bytes); |
| 281 EXPECT_EQ(0U, u1.free_ops); | 284 EXPECT_EQ(0U, u1.free_ops); |
| 282 EXPECT_EQ(0U, u1.free_bytes); | 285 EXPECT_EQ(0U, u1.free_bytes); |
| 283 EXPECT_EQ(0U, u1.max_allocated_bytes); | 286 EXPECT_EQ(0U, u1.max_allocated_bytes); |
| 284 | 287 |
| 285 const size_t kAllocSize = 1029U; | 288 const size_t kAllocSize = 1029U; |
| 286 void* ptr = MockMalloc(kAllocSize); | 289 void* ptr = MockMalloc(kAllocSize); |
| 287 MockFree(ptr); | 290 MockFree(ptr); |
| 288 | 291 |
| 289 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = | 292 usage_tracker.Stop(false); |
| 290 ScopedThreadHeapUsage::CurrentUsage(); | 293 ThreadHeapUsage u2 = usage_tracker.usage(); |
| 291 | 294 |
| 292 // With a get-size function that returns zero, there's no way to get the size | 295 // With a get-size function that returns zero, there's no way to get the size |
| 293 // of an allocation that's being freed, hence the shim can't tally freed bytes | 296 // of an allocation that's being freed, hence the shim can't tally freed bytes |
| 294 // nor the high-watermark allocated bytes. | 297 // nor the high-watermark allocated bytes. |
| 295 EXPECT_EQ(1U, u2.alloc_ops); | 298 EXPECT_EQ(1U, u2.alloc_ops); |
| 296 EXPECT_EQ(kAllocSize, u2.alloc_bytes); | 299 EXPECT_EQ(kAllocSize, u2.alloc_bytes); |
| 297 EXPECT_EQ(0U, u2.alloc_overhead_bytes); | 300 EXPECT_EQ(0U, u2.alloc_overhead_bytes); |
| 298 EXPECT_EQ(1U, u2.free_ops); | 301 EXPECT_EQ(1U, u2.free_ops); |
| 299 EXPECT_EQ(0U, u2.free_bytes); | 302 EXPECT_EQ(0U, u2.free_bytes); |
| 300 EXPECT_EQ(0U, u2.max_allocated_bytes); | 303 EXPECT_EQ(0U, u2.max_allocated_bytes); |
| 301 } | 304 } |
| 302 | 305 |
| 303 TEST_F(ScopedThreadHeapUsageTest, ReallocCorrectlyTallied) { | 306 TEST_F(ThreadHeapUsageTrackerTest, ReallocCorrectlyTallied) { |
| 304 const size_t kAllocSize = 237U; | 307 const size_t kAllocSize = 237U; |
| 305 | 308 |
| 306 { | 309 { |
| 307 ScopedThreadHeapUsage scoped_usage; | 310 ThreadHeapUsageTracker usage_tracker; |
| 311 usage_tracker.Start(); | |
| 308 | 312 |
| 309 // Reallocating nullptr should count as a single alloc. | 313 // Reallocating nullptr should count as a single alloc. |
| 310 void* ptr = MockRealloc(nullptr, kAllocSize); | 314 void* ptr = MockRealloc(nullptr, kAllocSize); |
| 311 ScopedThreadHeapUsage::ThreadAllocatorUsage usage = | 315 ThreadHeapUsage usage = ThreadHeapUsageTracker::CurrentUsage(); |
| 312 ScopedThreadHeapUsage::CurrentUsage(); | |
| 313 EXPECT_EQ(1U, usage.alloc_ops); | 316 EXPECT_EQ(1U, usage.alloc_ops); |
| 314 EXPECT_EQ(kAllocSize, usage.alloc_bytes); | 317 EXPECT_EQ(kAllocSize, usage.alloc_bytes); |
| 315 EXPECT_EQ(0U, usage.alloc_overhead_bytes); | 318 EXPECT_EQ(0U, usage.alloc_overhead_bytes); |
| 316 EXPECT_EQ(0U, usage.free_ops); | 319 EXPECT_EQ(0U, usage.free_ops); |
| 317 EXPECT_EQ(0U, usage.free_bytes); | 320 EXPECT_EQ(0U, usage.free_bytes); |
| 318 EXPECT_EQ(kAllocSize, usage.max_allocated_bytes); | 321 EXPECT_EQ(kAllocSize, usage.max_allocated_bytes); |
| 319 | 322 |
| 320 // Reallocating a valid pointer to a zero size should count as a single | 323 // Reallocating a valid pointer to a zero size should count as a single |
| 321 // free. | 324 // free. |
| 322 ptr = MockRealloc(ptr, 0U); | 325 ptr = MockRealloc(ptr, 0U); |
| 323 | 326 |
| 324 usage = ScopedThreadHeapUsage::CurrentUsage(); | 327 usage_tracker.Stop(false); |
| 325 EXPECT_EQ(1U, usage.alloc_ops); | 328 EXPECT_EQ(1U, usage_tracker.usage().alloc_ops); |
| 326 EXPECT_EQ(kAllocSize, usage.alloc_bytes); | 329 EXPECT_EQ(kAllocSize, usage_tracker.usage().alloc_bytes); |
| 327 EXPECT_EQ(0U, usage.alloc_overhead_bytes); | 330 EXPECT_EQ(0U, usage_tracker.usage().alloc_overhead_bytes); |
| 328 EXPECT_EQ(1U, usage.free_ops); | 331 EXPECT_EQ(1U, usage_tracker.usage().free_ops); |
| 329 EXPECT_EQ(kAllocSize, usage.free_bytes); | 332 EXPECT_EQ(kAllocSize, usage_tracker.usage().free_bytes); |
| 330 EXPECT_EQ(kAllocSize, usage.max_allocated_bytes); | 333 EXPECT_EQ(kAllocSize, usage_tracker.usage().max_allocated_bytes); |
| 331 | 334 |
| 332 // Realloc to zero size may or may not return a nullptr - make sure to | 335 // Realloc to zero size may or may not return a nullptr - make sure to |
| 333 // free the zero-size alloc in the latter case. | 336 // free the zero-size alloc in the latter case. |
| 334 if (ptr != nullptr) | 337 if (ptr != nullptr) |
| 335 MockFree(ptr); | 338 MockFree(ptr); |
| 336 } | 339 } |
| 337 | 340 |
| 338 { | 341 { |
| 339 ScopedThreadHeapUsage scoped_usage; | 342 ThreadHeapUsageTracker usage_tracker; |
| 343 usage_tracker.Start(); | |
| 340 | 344 |
| 341 void* ptr = MockMalloc(kAllocSize); | 345 void* ptr = MockMalloc(kAllocSize); |
| 342 ScopedThreadHeapUsage::ThreadAllocatorUsage usage = | 346 ThreadHeapUsage usage = ThreadHeapUsageTracker::CurrentUsage(); |
| 343 ScopedThreadHeapUsage::CurrentUsage(); | |
| 344 EXPECT_EQ(1U, usage.alloc_ops); | 347 EXPECT_EQ(1U, usage.alloc_ops); |
| 345 | 348 |
| 346 // Now try reallocating a valid pointer to a larger size, this should count | 349 // Now try reallocating a valid pointer to a larger size, this should count |
| 347 // as one free and one alloc. | 350 // as one free and one alloc. |
| 348 const size_t kLargerAllocSize = kAllocSize + 928U; | 351 const size_t kLargerAllocSize = kAllocSize + 928U; |
| 349 ptr = MockRealloc(ptr, kLargerAllocSize); | 352 ptr = MockRealloc(ptr, kLargerAllocSize); |
| 350 | 353 |
| 351 usage = ScopedThreadHeapUsage::CurrentUsage(); | 354 usage_tracker.Stop(false); |
| 352 EXPECT_EQ(2U, usage.alloc_ops); | 355 EXPECT_EQ(2U, usage_tracker.usage().alloc_ops); |
| 353 EXPECT_EQ(kAllocSize + kLargerAllocSize, usage.alloc_bytes); | 356 EXPECT_EQ(kAllocSize + kLargerAllocSize, usage_tracker.usage().alloc_bytes); |
| 354 EXPECT_EQ(0U, usage.alloc_overhead_bytes); | 357 EXPECT_EQ(0U, usage_tracker.usage().alloc_overhead_bytes); |
| 355 EXPECT_EQ(1U, usage.free_ops); | 358 EXPECT_EQ(1U, usage_tracker.usage().free_ops); |
| 356 EXPECT_EQ(kAllocSize, usage.free_bytes); | 359 EXPECT_EQ(kAllocSize, usage_tracker.usage().free_bytes); |
| 357 EXPECT_EQ(kLargerAllocSize, usage.max_allocated_bytes); | 360 EXPECT_EQ(kLargerAllocSize, usage_tracker.usage().max_allocated_bytes); |
| 358 | 361 |
| 359 MockFree(ptr); | 362 MockFree(ptr); |
| 360 } | 363 } |
| 361 } | 364 } |
| 362 | 365 |
| 363 TEST_F(ScopedThreadHeapUsageTest, NestedMaxWorks) { | 366 TEST_F(ThreadHeapUsageTrackerTest, NestedMaxWorks) { |
| 364 ScopedThreadHeapUsage outer_scoped_usage; | 367 ThreadHeapUsageTracker usage_tracker; |
| 368 usage_tracker.Start(); | |
| 365 | 369 |
| 366 const size_t kOuterAllocSize = 1029U; | 370 const size_t kOuterAllocSize = 1029U; |
| 367 void* ptr = MockMalloc(kOuterAllocSize); | 371 void* ptr = MockMalloc(kOuterAllocSize); |
| 368 MockFree(ptr); | 372 MockFree(ptr); |
| 369 | 373 |
| 370 EXPECT_EQ(kOuterAllocSize, | 374 EXPECT_EQ(kOuterAllocSize, |
| 371 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | 375 ThreadHeapUsageTracker::CurrentUsage().max_allocated_bytes); |
| 372 | 376 |
| 373 { | 377 { |
| 374 ScopedThreadHeapUsage inner_scoped_usage; | 378 ThreadHeapUsageTracker inner_usage_tracker; |
| 379 inner_usage_tracker.Start(); | |
| 375 | 380 |
| 376 const size_t kInnerAllocSize = 673U; | 381 const size_t kInnerAllocSize = 673U; |
| 377 ptr = MockMalloc(kInnerAllocSize); | 382 ptr = MockMalloc(kInnerAllocSize); |
| 378 MockFree(ptr); | 383 MockFree(ptr); |
| 379 | 384 |
| 380 EXPECT_EQ(kInnerAllocSize, | 385 inner_usage_tracker.Stop(false); |
| 381 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | 386 |
| 387 EXPECT_EQ(kInnerAllocSize, inner_usage_tracker.usage().max_allocated_bytes); | |
| 382 } | 388 } |
| 383 | 389 |
| 384 // The greater, outer allocation size should have been restored. | 390 // The greater, outer allocation size should have been restored. |
| 385 EXPECT_EQ(kOuterAllocSize, | 391 EXPECT_EQ(kOuterAllocSize, |
| 386 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | 392 ThreadHeapUsageTracker::CurrentUsage().max_allocated_bytes); |
| 387 | 393 |
| 388 const size_t kLargerInnerAllocSize = kOuterAllocSize + 673U; | 394 const size_t kLargerInnerAllocSize = kOuterAllocSize + 673U; |
| 389 { | 395 { |
| 390 ScopedThreadHeapUsage inner_scoped_usage; | 396 ThreadHeapUsageTracker inner_usage_tracker; |
| 397 inner_usage_tracker.Start(); | |
| 391 | 398 |
| 392 ptr = MockMalloc(kLargerInnerAllocSize); | 399 ptr = MockMalloc(kLargerInnerAllocSize); |
| 393 MockFree(ptr); | 400 MockFree(ptr); |
| 394 | 401 |
| 402 inner_usage_tracker.Stop(false); | |
| 395 EXPECT_EQ(kLargerInnerAllocSize, | 403 EXPECT_EQ(kLargerInnerAllocSize, |
| 396 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | 404 inner_usage_tracker.usage().max_allocated_bytes); |
| 397 } | 405 } |
| 398 | 406 |
| 399 // The greater, inner allocation size should have been preserved. | 407 // The greater, inner allocation size should have been preserved. |
| 400 EXPECT_EQ(kLargerInnerAllocSize, | 408 EXPECT_EQ(kLargerInnerAllocSize, |
| 401 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | 409 ThreadHeapUsageTracker::CurrentUsage().max_allocated_bytes); |
| 402 | 410 |
| 403 // Now try the case with an outstanding net alloc size when entering the | 411 // Now try the case with an outstanding net alloc size when entering the |
| 404 // inner scope. | 412 // inner scope. |
| 405 void* outer_ptr = MockMalloc(kOuterAllocSize); | 413 void* outer_ptr = MockMalloc(kOuterAllocSize); |
| 406 EXPECT_EQ(kLargerInnerAllocSize, | 414 EXPECT_EQ(kLargerInnerAllocSize, |
| 407 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | 415 ThreadHeapUsageTracker::CurrentUsage().max_allocated_bytes); |
| 408 { | 416 { |
| 409 ScopedThreadHeapUsage inner_scoped_usage; | 417 ThreadHeapUsageTracker inner_usage_tracker; |
| 418 inner_usage_tracker.Start(); | |
| 410 | 419 |
| 411 ptr = MockMalloc(kLargerInnerAllocSize); | 420 ptr = MockMalloc(kLargerInnerAllocSize); |
| 412 MockFree(ptr); | 421 MockFree(ptr); |
| 413 | 422 |
| 423 inner_usage_tracker.Stop(false); | |
| 414 EXPECT_EQ(kLargerInnerAllocSize, | 424 EXPECT_EQ(kLargerInnerAllocSize, |
| 415 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | 425 inner_usage_tracker.usage().max_allocated_bytes); |
| 416 } | 426 } |
| 417 | 427 |
| 418 // While the inner scope saw only the inner net outstanding allocation size, | 428 // While the inner scope saw only the inner net outstanding allocation size, |
| 419 // the outer scope saw both outstanding at the same time. | 429 // the outer scope saw both outstanding at the same time. |
| 420 EXPECT_EQ(kOuterAllocSize + kLargerInnerAllocSize, | 430 EXPECT_EQ(kOuterAllocSize + kLargerInnerAllocSize, |
| 421 ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); | 431 ThreadHeapUsageTracker::CurrentUsage().max_allocated_bytes); |
| 422 | 432 |
| 423 MockFree(outer_ptr); | 433 MockFree(outer_ptr); |
| 434 | |
| 435 // Test a net-negative scope. | |
| 436 ptr = MockMalloc(kLargerInnerAllocSize); | |
| 437 { | |
| 438 ThreadHeapUsageTracker inner_usage_tracker; | |
| 439 inner_usage_tracker.Start(); | |
| 440 | |
| 441 MockFree(ptr); | |
| 442 | |
| 443 const size_t kInnerAllocSize = 1; | |
| 444 ptr = MockMalloc(kInnerAllocSize); | |
| 445 | |
| 446 inner_usage_tracker.Stop(false); | |
| 447 // Since the scope is still net-negative, the max is clamped at zero. | |
| 448 EXPECT_EQ(0U, inner_usage_tracker.usage().max_allocated_bytes); | |
| 449 } | |
| 424 } | 450 } |
| 425 | 451 |
| 426 TEST_F(ScopedThreadHeapUsageTest, AllShimFunctionsAreProvided) { | 452 TEST_F(ThreadHeapUsageTrackerTest, NoStopImpliesInclusive) { |
| 453 ThreadHeapUsageTracker usage_tracker; | |
| 454 usage_tracker.Start(); | |
| 455 | |
| 456 const size_t kOuterAllocSize = 1029U; | |
| 457 void* ptr = MockMalloc(kOuterAllocSize); | |
| 458 MockFree(ptr); | |
| 459 | |
| 460 ThreadHeapUsage usage = ThreadHeapUsageTracker::CurrentUsage(); | |
| 461 EXPECT_EQ(kOuterAllocSize, usage.max_allocated_bytes); | |
| 462 | |
| 463 const size_t kInnerLargerAllocSize = kOuterAllocSize + 673U; | |
| 464 | |
| 465 { | |
| 466 ThreadHeapUsageTracker inner_usage_tracker; | |
| 467 inner_usage_tracker.Start(); | |
| 468 | |
| 469 // Make a larger allocation than the outer scope. | |
| 470 ptr = MockMalloc(kInnerLargerAllocSize); | |
| 471 MockFree(ptr); | |
| 472 | |
| 473 // inner_usage_tracker goes out of scope without a Stop(). | |
| 474 } | |
| 475 | |
| 476 ThreadHeapUsage current = ThreadHeapUsageTracker::CurrentUsage(); | |
| 477 EXPECT_EQ(usage.alloc_ops + 1, current.alloc_ops); | |
| 478 EXPECT_EQ(usage.alloc_bytes + kInnerLargerAllocSize, current.alloc_bytes); | |
| 479 EXPECT_EQ(usage.free_ops + 1, current.free_ops); | |
| 480 EXPECT_EQ(usage.free_bytes + kInnerLargerAllocSize, current.free_bytes); | |
| 481 EXPECT_EQ(kInnerLargerAllocSize, current.max_allocated_bytes); | |
| 482 } | |
| 483 | |
| 484 TEST_F(ThreadHeapUsageTrackerTest, ExclusiveScopesWork) { | |
| 485 ThreadHeapUsageTracker usage_tracker; | |
| 486 usage_tracker.Start(); | |
| 487 | |
| 488 const size_t kOuterAllocSize = 1029U; | |
| 489 void* ptr = MockMalloc(kOuterAllocSize); | |
| 490 MockFree(ptr); | |
| 491 | |
| 492 ThreadHeapUsage usage = ThreadHeapUsageTracker::CurrentUsage(); | |
| 493 EXPECT_EQ(kOuterAllocSize, usage.max_allocated_bytes); | |
| 494 | |
| 495 { | |
| 496 ThreadHeapUsageTracker inner_usage_tracker; | |
| 497 inner_usage_tracker.Start(); | |
| 498 | |
| 499 // Make a larger allocation than the outer scope. | |
| 500 ptr = MockMalloc(kOuterAllocSize + 673U); | |
| 501 MockFree(ptr); | |
| 502 | |
| 503 // This tracker is exlusive, all activity should be private to this scope. | |
| 504 inner_usage_tracker.Stop(true); | |
| 505 } | |
| 506 | |
| 507 ThreadHeapUsage current = ThreadHeapUsageTracker::CurrentUsage(); | |
| 508 EXPECT_EQ(usage.alloc_ops, current.alloc_ops); | |
| 509 EXPECT_EQ(usage.alloc_bytes, current.alloc_bytes); | |
| 510 EXPECT_EQ(usage.alloc_overhead_bytes, current.alloc_overhead_bytes); | |
| 511 EXPECT_EQ(usage.free_ops, current.free_ops); | |
| 512 EXPECT_EQ(usage.free_bytes, current.free_bytes); | |
| 513 EXPECT_EQ(usage.max_allocated_bytes, current.max_allocated_bytes); | |
| 514 } | |
| 515 | |
| 516 TEST_F(ThreadHeapUsageTrackerTest, AllShimFunctionsAreProvided) { | |
| 427 const size_t kAllocSize = 100; | 517 const size_t kAllocSize = 100; |
| 428 void* alloc = MockMalloc(kAllocSize); | 518 void* alloc = MockMalloc(kAllocSize); |
| 429 size_t estimate = MockGetSizeEstimate(alloc); | 519 size_t estimate = MockGetSizeEstimate(alloc); |
| 430 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); | 520 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); |
| 431 MockFree(alloc); | 521 MockFree(alloc); |
| 432 | 522 |
| 433 alloc = MockCalloc(kAllocSize, 1); | 523 alloc = MockCalloc(kAllocSize, 1); |
| 434 estimate = MockGetSizeEstimate(alloc); | 524 estimate = MockGetSizeEstimate(alloc); |
| 435 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); | 525 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); |
| 436 MockFree(alloc); | 526 MockFree(alloc); |
| 437 | 527 |
| 438 alloc = MockAllocAligned(1, kAllocSize); | 528 alloc = MockAllocAligned(1, kAllocSize); |
| 439 estimate = MockGetSizeEstimate(alloc); | 529 estimate = MockGetSizeEstimate(alloc); |
| 440 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); | 530 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); |
| 441 | 531 |
| 442 alloc = MockRealloc(alloc, kAllocSize); | 532 alloc = MockRealloc(alloc, kAllocSize); |
| 443 estimate = MockGetSizeEstimate(alloc); | 533 estimate = MockGetSizeEstimate(alloc); |
| 444 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); | 534 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize); |
| 445 MockFree(alloc); | 535 MockFree(alloc); |
| 446 } | 536 } |
| 447 | 537 |
| 448 #if BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) | 538 #if BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) |
| 449 TEST(ScopedThreadHeapShimTest, HooksIntoMallocWhenShimAvailable) { | 539 TEST(ThreadHeapUsageShimTest, HooksIntoMallocWhenShimAvailable) { |
| 450 ScopedThreadHeapUsage::Initialize(); | 540 ASSERT_FALSE(ThreadHeapUsageTracker::IsHeapTrackingEnabled()); |
| 451 ScopedThreadHeapUsage::EnableHeapTracking(); | 541 |
| 542 ThreadHeapUsageTracker::EnableHeapTracking(); | |
| 543 | |
| 544 ASSERT_TRUE(ThreadHeapUsageTracker::IsHeapTrackingEnabled()); | |
| 452 | 545 |
| 453 const size_t kAllocSize = 9993; | 546 const size_t kAllocSize = 9993; |
| 454 // This test verifies that the scoped heap data is affected by malloc & | 547 // This test verifies that the scoped heap data is affected by malloc & |
| 455 // free only when the shim is available. | 548 // free only when the shim is available. |
| 456 ScopedThreadHeapUsage scoped_usage; | 549 ThreadHeapUsageTracker usage_tracker; |
| 550 usage_tracker.Start(); | |
| 457 | 551 |
| 458 ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = | 552 ThreadHeapUsage u1 = ThreadHeapUsageTracker::CurrentUsage(); |
| 459 ScopedThreadHeapUsage::CurrentUsage(); | |
| 460 void* ptr = malloc(kAllocSize); | 553 void* ptr = malloc(kAllocSize); |
| 461 // Prevent the compiler from optimizing out the malloc/free pair. | 554 // Prevent the compiler from optimizing out the malloc/free pair. |
| 462 ASSERT_NE(nullptr, ptr); | 555 ASSERT_NE(nullptr, ptr); |
| 463 | 556 |
| 464 ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = | 557 ThreadHeapUsage u2 = ThreadHeapUsageTracker::CurrentUsage(); |
| 465 ScopedThreadHeapUsage::CurrentUsage(); | |
| 466 free(ptr); | 558 free(ptr); |
| 467 ScopedThreadHeapUsage::ThreadAllocatorUsage u3 = | 559 |
| 468 ScopedThreadHeapUsage::CurrentUsage(); | 560 usage_tracker.Stop(false); |
| 561 ThreadHeapUsage u3 = usage_tracker.usage(); | |
| 469 | 562 |
| 470 // Verify that at least one allocation operation was recorded, and that free | 563 // Verify that at least one allocation operation was recorded, and that free |
| 471 // operations are at least monotonically growing. | 564 // operations are at least monotonically growing. |
| 472 EXPECT_LE(0U, u1.alloc_ops); | 565 EXPECT_LE(0U, u1.alloc_ops); |
| 473 EXPECT_LE(u1.alloc_ops + 1, u2.alloc_ops); | 566 EXPECT_LE(u1.alloc_ops + 1, u2.alloc_ops); |
| 474 EXPECT_LE(u1.alloc_ops + 1, u3.alloc_ops); | 567 EXPECT_LE(u1.alloc_ops + 1, u3.alloc_ops); |
| 475 | 568 |
| 476 // Verify that at least the bytes above were recorded. | 569 // Verify that at least the bytes above were recorded. |
| 477 EXPECT_LE(u1.alloc_bytes + kAllocSize, u2.alloc_bytes); | 570 EXPECT_LE(u1.alloc_bytes + kAllocSize, u2.alloc_bytes); |
| 478 | 571 |
| 479 // Verify that at least the one free operation above was recorded. | 572 // Verify that at least the one free operation above was recorded. |
| 480 EXPECT_LE(u2.free_ops + 1, u3.free_ops); | 573 EXPECT_LE(u2.free_ops + 1, u3.free_ops); |
| 481 | 574 |
| 482 TestingScopedThreadHeapUsage::DisableHeapTrackingForTesting(); | 575 TestingThreadHeapUsageTracker::DisableHeapTrackingForTesting(); |
| 576 | |
| 577 ASSERT_FALSE(ThreadHeapUsageTracker::IsHeapTrackingEnabled()); | |
| 483 } | 578 } |
| 484 #endif // BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) | 579 #endif // BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) |
| 485 | 580 |
| 486 } // namespace debug | 581 } // namespace debug |
| 487 } // namespace base | 582 } // namespace base |
| OLD | NEW |