Chromium Code Reviews| Index: base/debug/scoped_thread_heap_usage_unittest.cc |
| diff --git a/base/debug/scoped_thread_heap_usage_unittest.cc b/base/debug/scoped_thread_heap_usage_unittest.cc |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..59f30ebd2b5441d8155b817ffec40c2e69260f7a |
| --- /dev/null |
| +++ b/base/debug/scoped_thread_heap_usage_unittest.cc |
| @@ -0,0 +1,348 @@ |
| +// Copyright 2016 The Chromium Authors. All rights reserved. |
| +// Use of this source code is governed by a BSD-style license that can be |
| +// found in the LICENSE file. |
| + |
| +#include "base/debug/scoped_thread_heap_usage.h" |
| + |
| +#include <map> |
| + |
| +#include "base/allocator/allocator_shim.h" |
| +#include "base/allocator/features.h" |
| +#include "testing/gtest/include/gtest/gtest.h" |
| + |
| +namespace base { |
| +namespace debug { |
| + |
| +namespace { |
| + |
| +class TestingScopedThreadHeapUsage : public ScopedThreadHeapUsage { |
| + public: |
| + using ScopedThreadHeapUsage::TearDownForTesting; |
| + using ScopedThreadHeapUsage::GetDispatchForTesting; |
| +}; |
| + |
| +// A fixture class that aloows testing the AllocatorDispatch associated with |
|
chrisha
2016/09/01 20:29:18
allows*
Sigurður Ásgeirsson
2016/09/06 14:58:54
Done.
|
| +// the ScopedThreadHeapUsage class in isolation against a mocked underlying |
| +// heap implementation. |
| +class ScopedThreadHeapUsageTest : public testing::Test { |
| + public: |
| + using AllocatorDispatch = base::allocator::AllocatorDispatch; |
| + |
| + static const size_t kAllocationPadding = 23; |
| + enum SizeFunctionKind { |
| + EXACT_SIZE_FUNCTION, |
| + PADDING_SIZE_FUNCTION, |
| + ZERO_SIZE_FUNCTION, |
| + }; |
| + |
| + ScopedThreadHeapUsageTest() : size_function_kind_(EXACT_SIZE_FUNCTION) { |
| + EXPECT_EQ(nullptr, g_self); |
| + g_self = this; |
| + } |
| + |
| + ~ScopedThreadHeapUsageTest() override { |
| + EXPECT_EQ(this, g_self); |
| + g_self = nullptr; |
| + } |
| + |
| + void set_size_function_kind(SizeFunctionKind kind) { |
| + size_function_kind_ = kind; |
| + } |
| + |
| + void SetUp() override { |
| + dispatch_under_test_ = |
| + TestingScopedThreadHeapUsage::GetDispatchForTesting(); |
| + ASSERT_EQ(nullptr, dispatch_under_test_->next); |
| + |
| + dispatch_under_test_->next = &g_mock_dispatch; |
| + } |
| + |
| + void TearDown() override { |
| + ASSERT_EQ(&g_mock_dispatch, dispatch_under_test_->next); |
| + |
| + dispatch_under_test_->next = nullptr; |
| + } |
| + |
| + void* MockMalloc(size_t size) { |
| + return dispatch_under_test_->alloc_function(dispatch_under_test_, size); |
| + } |
| + |
| + void* MockCalloc(size_t n, size_t size) { |
| + return dispatch_under_test_->alloc_zero_initialized_function( |
| + dispatch_under_test_, n, size); |
| + } |
| + |
| + void* MockAllocAligned(size_t alignment, size_t size) { |
| + return dispatch_under_test_->alloc_aligned_function(dispatch_under_test_, |
| + alignment, size); |
| + } |
| + |
| + void* MockRealloc(void* address, size_t size) { |
| + return dispatch_under_test_->realloc_function(dispatch_under_test_, address, |
| + size); |
| + } |
| + |
| + void MockFree(void* address) { |
| + dispatch_under_test_->free_function(dispatch_under_test_, address); |
| + } |
| + |
| + private: |
| + void RecordAlloc(void* address, size_t size) { |
| + if (address != nullptr) |
| + allocation_size_map_[address] = size; |
| + } |
| + |
| + void DeleteAlloc(void* address) { |
| + if (address != nullptr) |
| + EXPECT_EQ(1U, allocation_size_map_.erase(address)); |
| + } |
| + |
| + size_t GetSizeEstimate(void* address) { |
| + auto it = allocation_size_map_.find(address); |
| + if (it == allocation_size_map_.end()) |
| + return 0; |
| + |
| + size_t ret = it->second; |
| + switch (size_function_kind_) { |
| + case EXACT_SIZE_FUNCTION: |
| + break; |
| + case PADDING_SIZE_FUNCTION: |
| + ret += kAllocationPadding; |
| + break; |
| + case ZERO_SIZE_FUNCTION: |
| + ret = 0; |
| + break; |
| + } |
| + |
| + return ret; |
| + } |
| + |
| + static void* OnAllocFn(const AllocatorDispatch* self, size_t size) { |
| + void* ret = malloc(size); |
| + g_self->RecordAlloc(ret, size); |
| + return ret; |
| + } |
| + |
| + static void* OnAllocZeroInitializedFn(const AllocatorDispatch* self, |
| + size_t n, |
| + size_t size) { |
| + void* ret = calloc(n, size); |
| + g_self->RecordAlloc(ret, n * size); |
| + return ret; |
| + } |
| + |
| + static void* OnAllocAlignedFn(const AllocatorDispatch* self, |
| + size_t alignment, |
| + size_t size) { |
| + // This is a cheat as it doesn't return aligned allocations. This has the |
| + // advantage of working for all platforms for this test. |
| + void* ret = malloc(size); |
| + g_self->RecordAlloc(ret, size); |
| + return ret; |
| + } |
| + |
| + static void* OnReallocFn(const AllocatorDispatch* self, |
| + void* address, |
| + size_t size) { |
| + g_self->DeleteAlloc(address); |
| + void* ret = realloc(address, size); |
| + g_self->RecordAlloc(ret, size); |
| + return ret; |
| + } |
| + |
| + static void OnFreeFn(const AllocatorDispatch* self, void* address) { |
| + g_self->DeleteAlloc(address); |
| + free(address); |
| + } |
| + |
| + static size_t OnGetSizeEstimateFn(const AllocatorDispatch* self, |
| + void* address) { |
| + return g_self->GetSizeEstimate(address); |
| + } |
| + |
| + using AllocationSizeMap = std::map<void*, size_t>; |
| + |
| + SizeFunctionKind size_function_kind_; |
| + AllocationSizeMap allocation_size_map_; |
| + AllocatorDispatch* dispatch_under_test_; |
| + |
| + static base::allocator::AllocatorDispatch g_mock_dispatch; |
| + static ScopedThreadHeapUsageTest* g_self; |
| +}; |
| + |
| +ScopedThreadHeapUsageTest* ScopedThreadHeapUsageTest::g_self = nullptr; |
| + |
| +base::allocator::AllocatorDispatch ScopedThreadHeapUsageTest::g_mock_dispatch = |
| + { |
| + &ScopedThreadHeapUsageTest::OnAllocFn, // alloc_function |
| + &ScopedThreadHeapUsageTest:: |
| + OnAllocZeroInitializedFn, // alloc_zero_initialized_function |
| + &ScopedThreadHeapUsageTest::OnAllocAlignedFn, // alloc_aligned_function |
| + &ScopedThreadHeapUsageTest::OnReallocFn, // realloc_function |
| + &ScopedThreadHeapUsageTest::OnFreeFn, // free_function |
| + &ScopedThreadHeapUsageTest:: |
| + OnGetSizeEstimateFn, // get_size_estimate_function |
| + nullptr, // next |
| +}; |
| + |
| +} // namespace |
| + |
| +TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithExactSizeFunction) { |
| + set_size_function_kind(EXACT_SIZE_FUNCTION); |
| + |
| + ScopedThreadHeapUsage scoped_usage; |
| + |
| + ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = ScopedThreadHeapUsage::Now(); |
| + |
| + EXPECT_EQ(0U, u1.alloc_ops); |
| + EXPECT_EQ(0U, u1.alloc_bytes); |
| + EXPECT_EQ(0U, u1.alloc_overhead_bytes); |
| + EXPECT_EQ(0U, u1.free_ops); |
| + EXPECT_EQ(0U, u1.free_bytes); |
| + EXPECT_EQ(0U, u1.max_allocated_bytes); |
| + |
| + const size_t kAllocSize = 1029U; |
| + void* ptr = MockMalloc(kAllocSize); |
| + MockFree(ptr); |
| + |
| + ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = ScopedThreadHeapUsage::Now(); |
| + |
| + EXPECT_EQ(1U, u2.alloc_ops); |
| + EXPECT_EQ(kAllocSize, u2.alloc_bytes); |
| + EXPECT_EQ(0U, u2.alloc_overhead_bytes); |
| + EXPECT_EQ(1U, u2.free_ops); |
| + EXPECT_EQ(kAllocSize, u2.free_bytes); |
| + EXPECT_EQ(kAllocSize, u2.max_allocated_bytes); |
| +} |
| + |
| +TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithZeroSizeFunction) { |
| + set_size_function_kind(ZERO_SIZE_FUNCTION); |
| + |
| + ScopedThreadHeapUsage scoped_usage; |
| + |
| + ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = ScopedThreadHeapUsage::Now(); |
| + EXPECT_EQ(0U, u1.alloc_ops); |
| + EXPECT_EQ(0U, u1.alloc_bytes); |
| + EXPECT_EQ(0U, u1.alloc_overhead_bytes); |
| + EXPECT_EQ(0U, u1.free_ops); |
| + EXPECT_EQ(0U, u1.free_bytes); |
| + EXPECT_EQ(0U, u1.max_allocated_bytes); |
| + |
| + const size_t kAllocSize = 1029U; |
| + void* ptr = MockMalloc(kAllocSize); |
| + MockFree(ptr); |
| + |
| + ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = ScopedThreadHeapUsage::Now(); |
| + |
| + // With a get-size function that returns zero, there's no way to get the size |
| + // of an allocation that's being freed, hence the shim can't tally freed bytes |
| + // nor the high-watermark allocated bytes. |
| + EXPECT_EQ(1U, u2.alloc_ops); |
| + EXPECT_EQ(kAllocSize, u2.alloc_bytes); |
| + EXPECT_EQ(0U, u2.alloc_overhead_bytes); |
| + EXPECT_EQ(1U, u2.free_ops); |
| + EXPECT_EQ(0U, u2.free_bytes); |
| + EXPECT_EQ(0U, u2.max_allocated_bytes); |
| +} |
| + |
| +TEST_F(ScopedThreadHeapUsageTest, NestedMaxWorks) { |
| + ScopedThreadHeapUsage outer_scoped_usage; |
| + |
| + const size_t kOuterAllocSize = 1029U; |
| + void* ptr = MockMalloc(kOuterAllocSize); |
| + MockFree(ptr); |
| + |
| + EXPECT_EQ(kOuterAllocSize, ScopedThreadHeapUsage::Now().max_allocated_bytes); |
| + |
| + { |
| + ScopedThreadHeapUsage inner_scoped_usage; |
| + |
| + const size_t kInnerAllocSize = 673U; |
| + ptr = MockMalloc(kInnerAllocSize); |
| + MockFree(ptr); |
| + |
| + EXPECT_EQ(kInnerAllocSize, |
| + ScopedThreadHeapUsage::Now().max_allocated_bytes); |
| + } |
| + |
| + // The greater, outer allocation size should have been restored. |
| + EXPECT_EQ(kOuterAllocSize, ScopedThreadHeapUsage::Now().max_allocated_bytes); |
| + |
| + const size_t kLargerInnerAllocSize = kOuterAllocSize + 673U; |
| + { |
| + ScopedThreadHeapUsage inner_scoped_usage; |
| + |
| + ptr = MockMalloc(kLargerInnerAllocSize); |
| + MockFree(ptr); |
| + |
| + EXPECT_EQ(kLargerInnerAllocSize, |
| + ScopedThreadHeapUsage::Now().max_allocated_bytes); |
| + } |
| + |
| + // The greater, inner allocation size should have been preserved. |
| + EXPECT_EQ(kLargerInnerAllocSize, |
| + ScopedThreadHeapUsage::Now().max_allocated_bytes); |
| + |
| + // Now try the case with an outsanding net alloc size when entering the |
| + // inner scope. |
| + void* outer_ptr = MockMalloc(kOuterAllocSize); |
| + EXPECT_EQ(kLargerInnerAllocSize, |
| + ScopedThreadHeapUsage::Now().max_allocated_bytes); |
| + { |
| + ScopedThreadHeapUsage inner_scoped_usage; |
| + |
| + ptr = MockMalloc(kLargerInnerAllocSize); |
| + MockFree(ptr); |
| + |
| + EXPECT_EQ(kLargerInnerAllocSize, |
| + ScopedThreadHeapUsage::Now().max_allocated_bytes); |
| + } |
| + |
| + // While the inner scope saw only the inner net outstanding allocaiton size, |
|
chrisha
2016/09/01 20:29:18
allocation*
Sigurður Ásgeirsson
2016/09/06 14:58:54
Done.
|
| + // the outer scope saw both outstanding at the same time. |
| + EXPECT_EQ(kOuterAllocSize + kLargerInnerAllocSize, |
| + ScopedThreadHeapUsage::Now().max_allocated_bytes); |
| + |
| + MockFree(outer_ptr); |
| +} |
| + |
| +namespace { |
| + |
| +class ScopedThreadHeapShimTest : public testing::Test { |
| + public: |
| + void SetUp() override { ScopedThreadHeapUsage::Initialize(); } |
| + |
| + void TearDown() override { |
| + TestingScopedThreadHeapUsage::TearDownForTesting(); |
| + } |
| +}; |
| + |
| +} // namespace |
| + |
| +#if BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) |
| +TEST_F(ScopedThreadHeapShimTest, HooksIntoMallocWithShim) { |
| + const size_t kAllocSize = 9993; |
| + // This test verifies that the scoped heap data is affected by malloc & |
| + // free. |
| + ScopedThreadHeapUsage scoped_usage; |
| + |
| + void* ptr = malloc(kAllocSize); |
| + ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = ScopedThreadHeapUsage::Now(); |
| + free(ptr); |
| + ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = ScopedThreadHeapUsage::Now(); |
| + |
| + // Verify that at least one allocation operation was recorded, and that free |
| + // operations are at least monotonically growing. |
| + EXPECT_LE(1U, u1.alloc_ops); |
| + EXPECT_LE(0U, u2.alloc_ops); |
| + |
| + // Verify that at least the bytes above were recorded. |
| + EXPECT_LE(kAllocSize, u2.alloc_bytes); |
| + |
| + // Verify that at least the one free operation abobve was recorded. |
|
chrisha
2016/09/01 20:29:18
above*
Sigurður Ásgeirsson
2016/09/06 14:58:54
Done.
|
| + EXPECT_LE(u1.free_ops + 1, u2.free_ops); |
| +} |
| +#endif // BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) |
| + |
| +} // namespace debug |
| +} // namespace base |