Index: base/debug/scoped_thread_heap_usage_unittest.cc |
diff --git a/base/debug/scoped_thread_heap_usage_unittest.cc b/base/debug/scoped_thread_heap_usage_unittest.cc |
index da66a32b6306887c7f49b7e4afd93e3695b0ac69..417f1548a643e72aa49bf1ce17f457910cd307a5 100644 |
--- a/base/debug/scoped_thread_heap_usage_unittest.cc |
+++ b/base/debug/scoped_thread_heap_usage_unittest.cc |
@@ -15,16 +15,17 @@ namespace debug { |
namespace { |
-class TestingScopedThreadHeapUsage : public ScopedThreadHeapUsage { |
+class TestingHeapUsageTracker : public HeapUsageTracker { |
public: |
- using ScopedThreadHeapUsage::DisableHeapTrackingForTesting; |
- using ScopedThreadHeapUsage::GetDispatchForTesting; |
+ using HeapUsageTracker::DisableHeapTrackingForTesting; |
+ using HeapUsageTracker::GetDispatchForTesting; |
+ using HeapUsageTracker::EnsureTLSInitializedForTesting; |
}; |
// A fixture class that allows testing the AllocatorDispatch associated with |
-// the ScopedThreadHeapUsage class in isolation against a mocked underlying |
+// the HeapUsageTracker class in isolation against a mocked underlying |
// heap implementation. |
-class ScopedThreadHeapUsageTest : public testing::Test { |
+class HeapUsageTrackerTest : public testing::Test { |
public: |
using AllocatorDispatch = base::allocator::AllocatorDispatch; |
@@ -35,12 +36,12 @@ class ScopedThreadHeapUsageTest : public testing::Test { |
ZERO_SIZE_FUNCTION, |
}; |
- ScopedThreadHeapUsageTest() : size_function_kind_(EXACT_SIZE_FUNCTION) { |
+ HeapUsageTrackerTest() : size_function_kind_(EXACT_SIZE_FUNCTION) { |
EXPECT_EQ(nullptr, g_self); |
g_self = this; |
} |
- ~ScopedThreadHeapUsageTest() override { |
+ ~HeapUsageTrackerTest() override { |
EXPECT_EQ(this, g_self); |
g_self = nullptr; |
} |
@@ -50,10 +51,9 @@ class ScopedThreadHeapUsageTest : public testing::Test { |
} |
void SetUp() override { |
- ScopedThreadHeapUsage::Initialize(); |
+ TestingHeapUsageTracker::EnsureTLSInitializedForTesting(); |
- dispatch_under_test_ = |
- TestingScopedThreadHeapUsage::GetDispatchForTesting(); |
+ dispatch_under_test_ = TestingHeapUsageTracker::GetDispatchForTesting(); |
ASSERT_EQ(nullptr, dispatch_under_test_->next); |
dispatch_under_test_->next = &g_mock_dispatch; |
@@ -186,35 +186,33 @@ class ScopedThreadHeapUsageTest : public testing::Test { |
AllocatorDispatch* dispatch_under_test_; |
static base::allocator::AllocatorDispatch g_mock_dispatch; |
- static ScopedThreadHeapUsageTest* g_self; |
+ static HeapUsageTrackerTest* g_self; |
}; |
-const size_t ScopedThreadHeapUsageTest::kAllocationPadding = 23; |
- |
-ScopedThreadHeapUsageTest* ScopedThreadHeapUsageTest::g_self = nullptr; |
- |
-base::allocator::AllocatorDispatch ScopedThreadHeapUsageTest::g_mock_dispatch = |
- { |
- &ScopedThreadHeapUsageTest::OnAllocFn, // alloc_function |
- &ScopedThreadHeapUsageTest:: |
- OnAllocZeroInitializedFn, // alloc_zero_initialized_function |
- &ScopedThreadHeapUsageTest::OnAllocAlignedFn, // alloc_aligned_function |
- &ScopedThreadHeapUsageTest::OnReallocFn, // realloc_function |
- &ScopedThreadHeapUsageTest::OnFreeFn, // free_function |
- &ScopedThreadHeapUsageTest:: |
- OnGetSizeEstimateFn, // get_size_estimate_function |
- nullptr, // next |
+const size_t HeapUsageTrackerTest::kAllocationPadding = 23; |
+ |
+HeapUsageTrackerTest* HeapUsageTrackerTest::g_self = nullptr; |
+ |
+base::allocator::AllocatorDispatch HeapUsageTrackerTest::g_mock_dispatch = { |
+ &HeapUsageTrackerTest::OnAllocFn, // alloc_function |
+ &HeapUsageTrackerTest:: |
+ OnAllocZeroInitializedFn, // alloc_zero_initialized_function |
+ &HeapUsageTrackerTest::OnAllocAlignedFn, // alloc_aligned_function |
+ &HeapUsageTrackerTest::OnReallocFn, // realloc_function |
+ &HeapUsageTrackerTest::OnFreeFn, // free_function |
+ &HeapUsageTrackerTest::OnGetSizeEstimateFn, // get_size_estimate_function |
+ nullptr, // next |
}; |
} // namespace |
-TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithExactSizeFunction) { |
+TEST_F(HeapUsageTrackerTest, SimpleUsageWithExactSizeFunction) { |
set_size_function_kind(EXACT_SIZE_FUNCTION); |
- ScopedThreadHeapUsage scoped_usage; |
+ HeapUsageTracker usage_tracker; |
+ usage_tracker.Start(); |
- ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = |
- ScopedThreadHeapUsage::CurrentUsage(); |
+ ThreadAllocatorUsage u1 = HeapUsageTracker::CurrentUsage(); |
EXPECT_EQ(0U, u1.alloc_ops); |
EXPECT_EQ(0U, u1.alloc_bytes); |
@@ -227,8 +225,8 @@ TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithExactSizeFunction) { |
void* ptr = MockMalloc(kAllocSize); |
MockFree(ptr); |
- ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = |
- ScopedThreadHeapUsage::CurrentUsage(); |
+ usage_tracker.Stop(false); |
+ ThreadAllocatorUsage u2 = usage_tracker.usage(); |
EXPECT_EQ(1U, u2.alloc_ops); |
EXPECT_EQ(kAllocSize, u2.alloc_bytes); |
@@ -238,13 +236,13 @@ TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithExactSizeFunction) { |
EXPECT_EQ(kAllocSize, u2.max_allocated_bytes); |
} |
-TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithPaddingSizeFunction) { |
+TEST_F(HeapUsageTrackerTest, SimpleUsageWithPaddingSizeFunction) { |
set_size_function_kind(PADDING_SIZE_FUNCTION); |
- ScopedThreadHeapUsage scoped_usage; |
+ HeapUsageTracker usage_tracker; |
+ usage_tracker.Start(); |
- ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = |
- ScopedThreadHeapUsage::CurrentUsage(); |
+ ThreadAllocatorUsage u1 = HeapUsageTracker::CurrentUsage(); |
EXPECT_EQ(0U, u1.alloc_ops); |
EXPECT_EQ(0U, u1.alloc_bytes); |
@@ -257,8 +255,8 @@ TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithPaddingSizeFunction) { |
void* ptr = MockMalloc(kAllocSize); |
MockFree(ptr); |
- ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = |
- ScopedThreadHeapUsage::CurrentUsage(); |
+ usage_tracker.Stop(false); |
+ ThreadAllocatorUsage u2 = usage_tracker.usage(); |
EXPECT_EQ(1U, u2.alloc_ops); |
EXPECT_EQ(kAllocSize + kAllocationPadding, u2.alloc_bytes); |
@@ -268,13 +266,13 @@ TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithPaddingSizeFunction) { |
EXPECT_EQ(kAllocSize + kAllocationPadding, u2.max_allocated_bytes); |
} |
-TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithZeroSizeFunction) { |
+TEST_F(HeapUsageTrackerTest, SimpleUsageWithZeroSizeFunction) { |
set_size_function_kind(ZERO_SIZE_FUNCTION); |
- ScopedThreadHeapUsage scoped_usage; |
+ HeapUsageTracker usage_tracker; |
+ usage_tracker.Start(); |
- ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = |
- ScopedThreadHeapUsage::CurrentUsage(); |
+ ThreadAllocatorUsage u1 = HeapUsageTracker::CurrentUsage(); |
EXPECT_EQ(0U, u1.alloc_ops); |
EXPECT_EQ(0U, u1.alloc_bytes); |
EXPECT_EQ(0U, u1.alloc_overhead_bytes); |
@@ -286,8 +284,8 @@ TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithZeroSizeFunction) { |
void* ptr = MockMalloc(kAllocSize); |
MockFree(ptr); |
- ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = |
- ScopedThreadHeapUsage::CurrentUsage(); |
+ usage_tracker.Stop(false); |
+ ThreadAllocatorUsage u2 = usage_tracker.usage(); |
// With a get-size function that returns zero, there's no way to get the size |
// of an allocation that's being freed, hence the shim can't tally freed bytes |
@@ -300,16 +298,16 @@ TEST_F(ScopedThreadHeapUsageTest, SimpleUsageWithZeroSizeFunction) { |
EXPECT_EQ(0U, u2.max_allocated_bytes); |
} |
-TEST_F(ScopedThreadHeapUsageTest, ReallocCorrectlyTallied) { |
+TEST_F(HeapUsageTrackerTest, ReallocCorrectlyTallied) { |
const size_t kAllocSize = 237U; |
{ |
- ScopedThreadHeapUsage scoped_usage; |
+ HeapUsageTracker usage_tracker; |
+ usage_tracker.Start(); |
// Reallocating nullptr should count as a single alloc. |
void* ptr = MockRealloc(nullptr, kAllocSize); |
- ScopedThreadHeapUsage::ThreadAllocatorUsage usage = |
- ScopedThreadHeapUsage::CurrentUsage(); |
+ ThreadAllocatorUsage usage = HeapUsageTracker::CurrentUsage(); |
EXPECT_EQ(1U, usage.alloc_ops); |
EXPECT_EQ(kAllocSize, usage.alloc_bytes); |
EXPECT_EQ(0U, usage.alloc_overhead_bytes); |
@@ -321,7 +319,7 @@ TEST_F(ScopedThreadHeapUsageTest, ReallocCorrectlyTallied) { |
// free. |
ptr = MockRealloc(ptr, 0U); |
- usage = ScopedThreadHeapUsage::CurrentUsage(); |
+ usage = HeapUsageTracker::CurrentUsage(); |
EXPECT_EQ(1U, usage.alloc_ops); |
EXPECT_EQ(kAllocSize, usage.alloc_bytes); |
EXPECT_EQ(0U, usage.alloc_overhead_bytes); |
@@ -333,14 +331,16 @@ TEST_F(ScopedThreadHeapUsageTest, ReallocCorrectlyTallied) { |
// free the zero-size alloc in the latter case. |
if (ptr != nullptr) |
MockFree(ptr); |
+ |
+ usage_tracker.Stop(false); |
} |
{ |
- ScopedThreadHeapUsage scoped_usage; |
+ HeapUsageTracker usage_tracker; |
+ usage_tracker.Start(); |
void* ptr = MockMalloc(kAllocSize); |
- ScopedThreadHeapUsage::ThreadAllocatorUsage usage = |
- ScopedThreadHeapUsage::CurrentUsage(); |
+ ThreadAllocatorUsage usage = HeapUsageTracker::CurrentUsage(); |
EXPECT_EQ(1U, usage.alloc_ops); |
// Now try reallocating a valid pointer to a larger size, this should count |
@@ -348,7 +348,7 @@ TEST_F(ScopedThreadHeapUsageTest, ReallocCorrectlyTallied) { |
const size_t kLargerAllocSize = kAllocSize + 928U; |
ptr = MockRealloc(ptr, kLargerAllocSize); |
- usage = ScopedThreadHeapUsage::CurrentUsage(); |
+ usage = HeapUsageTracker::CurrentUsage(); |
EXPECT_EQ(2U, usage.alloc_ops); |
EXPECT_EQ(kAllocSize + kLargerAllocSize, usage.alloc_bytes); |
EXPECT_EQ(0U, usage.alloc_overhead_bytes); |
@@ -357,73 +357,98 @@ TEST_F(ScopedThreadHeapUsageTest, ReallocCorrectlyTallied) { |
EXPECT_EQ(kLargerAllocSize, usage.max_allocated_bytes); |
MockFree(ptr); |
+ |
+ usage_tracker.Stop(false); |
} |
} |
-TEST_F(ScopedThreadHeapUsageTest, NestedMaxWorks) { |
- ScopedThreadHeapUsage outer_scoped_usage; |
+TEST_F(HeapUsageTrackerTest, NestedMaxWorks) { |
+ HeapUsageTracker usage_tracker; |
+ usage_tracker.Start(); |
const size_t kOuterAllocSize = 1029U; |
void* ptr = MockMalloc(kOuterAllocSize); |
MockFree(ptr); |
EXPECT_EQ(kOuterAllocSize, |
- ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); |
+ HeapUsageTracker::CurrentUsage().max_allocated_bytes); |
{ |
- ScopedThreadHeapUsage inner_scoped_usage; |
+ HeapUsageTracker inner_usage_tracker; |
+ inner_usage_tracker.Start(); |
const size_t kInnerAllocSize = 673U; |
ptr = MockMalloc(kInnerAllocSize); |
MockFree(ptr); |
- EXPECT_EQ(kInnerAllocSize, |
- ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); |
+ inner_usage_tracker.Stop(false); |
+ |
+ EXPECT_EQ(kInnerAllocSize, inner_usage_tracker.usage().max_allocated_bytes); |
} |
// The greater, outer allocation size should have been restored. |
EXPECT_EQ(kOuterAllocSize, |
- ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); |
+ HeapUsageTracker::CurrentUsage().max_allocated_bytes); |
const size_t kLargerInnerAllocSize = kOuterAllocSize + 673U; |
{ |
- ScopedThreadHeapUsage inner_scoped_usage; |
+ HeapUsageTracker inner_usage_tracker; |
+ inner_usage_tracker.Start(); |
ptr = MockMalloc(kLargerInnerAllocSize); |
MockFree(ptr); |
+ inner_usage_tracker.Stop(false); |
EXPECT_EQ(kLargerInnerAllocSize, |
- ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); |
+ inner_usage_tracker.usage().max_allocated_bytes); |
} |
// The greater, inner allocation size should have been preserved. |
EXPECT_EQ(kLargerInnerAllocSize, |
- ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); |
+ HeapUsageTracker::CurrentUsage().max_allocated_bytes); |
// Now try the case with an outstanding net alloc size when entering the |
// inner scope. |
void* outer_ptr = MockMalloc(kOuterAllocSize); |
EXPECT_EQ(kLargerInnerAllocSize, |
- ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); |
+ HeapUsageTracker::CurrentUsage().max_allocated_bytes); |
{ |
- ScopedThreadHeapUsage inner_scoped_usage; |
+ HeapUsageTracker inner_usage_tracker; |
+ inner_usage_tracker.Start(); |
ptr = MockMalloc(kLargerInnerAllocSize); |
MockFree(ptr); |
+ inner_usage_tracker.Stop(false); |
EXPECT_EQ(kLargerInnerAllocSize, |
- ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); |
+ inner_usage_tracker.usage().max_allocated_bytes); |
} |
// While the inner scope saw only the inner net outstanding allocation size, |
// the outer scope saw both outstanding at the same time. |
EXPECT_EQ(kOuterAllocSize + kLargerInnerAllocSize, |
- ScopedThreadHeapUsage::CurrentUsage().max_allocated_bytes); |
+ HeapUsageTracker::CurrentUsage().max_allocated_bytes); |
MockFree(outer_ptr); |
+ |
+ // Test a net-negative scope. |
+ ptr = MockMalloc(kLargerInnerAllocSize); |
+ { |
+ HeapUsageTracker inner_usage_tracker; |
+ inner_usage_tracker.Start(); |
+ |
+ MockFree(ptr); |
+ |
+ const size_t kInnerAllocSize = 1; |
+ ptr = MockMalloc(kInnerAllocSize); |
+ |
+ inner_usage_tracker.Stop(false); |
+ // Since the scope is still net-negative, the max is clamped at zero. |
+ EXPECT_EQ(0, inner_usage_tracker.usage().max_allocated_bytes); |
+ } |
} |
-TEST_F(ScopedThreadHeapUsageTest, AllShimFunctionsAreProvided) { |
+TEST_F(HeapUsageTrackerTest, AllShimFunctionsAreProvided) { |
const size_t kAllocSize = 100; |
void* alloc = MockMalloc(kAllocSize); |
size_t estimate = MockGetSizeEstimate(alloc); |
@@ -447,25 +472,28 @@ TEST_F(ScopedThreadHeapUsageTest, AllShimFunctionsAreProvided) { |
#if BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) |
TEST(ScopedThreadHeapShimTest, HooksIntoMallocWhenShimAvailable) { |
- ScopedThreadHeapUsage::Initialize(); |
- ScopedThreadHeapUsage::EnableHeapTracking(); |
+ ASSERT_FALSE(HeapUsageTracker::IsHeapTrackingEnabled()); |
+ |
+ HeapUsageTracker::EnableHeapTracking(); |
+ |
+ ASSERT_TRUE(HeapUsageTracker::IsHeapTrackingEnabled()); |
const size_t kAllocSize = 9993; |
// This test verifies that the scoped heap data is affected by malloc & |
// free only when the shim is available. |
- ScopedThreadHeapUsage scoped_usage; |
+ HeapUsageTracker usage_tracker; |
+ usage_tracker.Start(); |
- ScopedThreadHeapUsage::ThreadAllocatorUsage u1 = |
- ScopedThreadHeapUsage::CurrentUsage(); |
+ ThreadAllocatorUsage u1 = HeapUsageTracker::CurrentUsage(); |
void* ptr = malloc(kAllocSize); |
// Prevent the compiler from optimizing out the malloc/free pair. |
ASSERT_NE(nullptr, ptr); |
- ScopedThreadHeapUsage::ThreadAllocatorUsage u2 = |
- ScopedThreadHeapUsage::CurrentUsage(); |
+ ThreadAllocatorUsage u2 = HeapUsageTracker::CurrentUsage(); |
free(ptr); |
- ScopedThreadHeapUsage::ThreadAllocatorUsage u3 = |
- ScopedThreadHeapUsage::CurrentUsage(); |
+ |
+ usage_tracker.Stop(false); |
+ ThreadAllocatorUsage u3 = usage_tracker.usage(); |
// Verify that at least one allocation operation was recorded, and that free |
// operations are at least monotonically growing. |
@@ -479,7 +507,9 @@ TEST(ScopedThreadHeapShimTest, HooksIntoMallocWhenShimAvailable) { |
// Verify that at least the one free operation above was recorded. |
EXPECT_LE(u2.free_ops + 1, u3.free_ops); |
- TestingScopedThreadHeapUsage::DisableHeapTrackingForTesting(); |
+ TestingHeapUsageTracker::DisableHeapTrackingForTesting(); |
+ |
+ ASSERT_FALSE(HeapUsageTracker::IsHeapTrackingEnabled()); |
} |
#endif // BUILDFLAG(USE_EXPERIMENTAL_ALLOCATOR_SHIM) |