Index: tools/android/heap_profiler/heap_profiler_unittest.cc |
diff --git a/tools/android/heap_profiler/heap_profiler_unittest.cc b/tools/android/heap_profiler/heap_profiler_unittest.cc |
new file mode 100644 |
index 0000000000000000000000000000000000000000..8249c9e421e7c2f9fe1eb28a88a52483d9b7c4b7 |
--- /dev/null |
+++ b/tools/android/heap_profiler/heap_profiler_unittest.cc |
@@ -0,0 +1,456 @@ |
+// Copyright 2014 The Chromium Authors. All rights reserved. |
+// Use of this source code is governed by a BSD-style license that can be |
+// found in the LICENSE file. |
+ |
+#include <stdint.h> |
+#include <string.h> |
+#include <map> |
+ |
+#include "base/compiler_specific.h" |
+#include "testing/gtest/include/gtest/gtest.h" |
+#include "tools/android/heap_profiler/heap_profiler.h" |
+ |
+namespace { |
+ |
+class HeapProfilerTest : public testing::Test { |
+ public: |
+ virtual void SetUp() OVERRIDE { heap_profiler_init(&stats_); } |
+ |
+ virtual void TearDown() OVERRIDE { |
+ CheckVMAvsStacktaceConsistency(); |
+ heap_profiler_cleanup(); |
+ } |
+ |
+ protected: |
+ struct StackTrace { |
+ uintptr_t frames[HEAP_PROFILER_MAX_DEPTH]; |
+ size_t depth; |
+ }; |
+ |
+ StackTrace GenStackTrace(size_t depth, uintptr_t base) { |
+ assert(depth <= HEAP_PROFILER_MAX_DEPTH); |
+ StackTrace st; |
+ for (size_t i = 0; i < depth; ++i) |
+ st.frames[i] = base + i * 0x10UL; |
+ st.depth = depth; |
+ return st; |
+ } |
+ |
+ void ExpectVMA(uintptr_t start, |
+ uintptr_t end, |
+ const StackTrace& st, |
+ uint32_t flags) { |
+ for (uint32_t i = 0; i < stats_.max_allocs; ++i) { |
+ const VMA& vma = stats_.allocs[i]; |
+ if (vma.start != start || vma.end != end) |
+ continue; |
+ // Check that the stack trace match. |
+ for (uint32_t j = 0; j < st.depth; ++j) { |
+ EXPECT_EQ(st.frames[j], vma.st->frames[j]) |
+ << "Stacktrace not matching @ depth " << j; |
+ } |
+ EXPECT_EQ(flags, vma.flags); |
+ return; |
+ } |
+ |
+ FAIL() << "Could not find VMA [" << std::hex << start << "," << end << "]"; |
+ } |
+ |
+ void CheckVMAvsStacktaceConsistency() { |
+ uint32_t allocs_seen = 0; |
+ uint32_t stack_traces_seen = 0; |
+ std::map<StacktraceEntry*, uintptr_t> stacktrace_bytes_seen_by_vma; |
+ |
+ for (uint32_t i = 0; i < stats_.max_allocs; ++i) { |
+ VMA* vma = &stats_.allocs[i]; |
+ if (vma->start == 0 && vma->end == 0) |
+ continue; |
+ ++allocs_seen; |
+ stacktrace_bytes_seen_by_vma[vma->st] += vma->end - vma->start + 1; |
+ } |
+ |
+ for (uint32_t i = 0; i < stats_.max_stack_traces; ++i) { |
+ StacktraceEntry* st = &stats_.stack_traces[i]; |
+ if (st->alloc_bytes == 0) |
+ continue; |
+ ++stack_traces_seen; |
+ EXPECT_EQ(1, stacktrace_bytes_seen_by_vma.count(st)); |
+ EXPECT_EQ(stacktrace_bytes_seen_by_vma[st], st->alloc_bytes); |
+ } |
+ |
+ EXPECT_EQ(allocs_seen, stats_.num_allocs); |
+ EXPECT_EQ(stack_traces_seen, stats_.num_stack_traces); |
+ } |
+ |
+ HeapStats stats_; |
+}; |
+ |
+TEST_F(HeapProfilerTest, SimpleAlloc) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x2000, 2048, st1.frames, st1.depth, 0); |
+ |
+ EXPECT_EQ(2, stats_.num_allocs); |
+ EXPECT_EQ(1, stats_.num_stack_traces); |
+ EXPECT_EQ(1024 + 2048, stats_.total_alloc_bytes); |
+ ExpectVMA(0x1000, 0x13ff, st1, 0); |
+ ExpectVMA(0x2000, 0x27ff, st1, 0); |
+} |
+ |
+TEST_F(HeapProfilerTest, AllocMultipleStacks) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ StackTrace st2 = GenStackTrace(4, 0x1000); |
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x2000, 2048, st2.frames, st2.depth, 0); |
+ heap_profiler_alloc((void*)0x3000, 32, st1.frames, st1.depth, 0); |
+ |
+ EXPECT_EQ(3, stats_.num_allocs); |
+ EXPECT_EQ(2, stats_.num_stack_traces); |
+ EXPECT_EQ(1024 + 2048 + 32, stats_.total_alloc_bytes); |
+ ExpectVMA(0x1000, 0x13ff, st1, 0); |
+ ExpectVMA(0x2000, 0x27ff, st2, 0); |
+ ExpectVMA(0x3000, 0x301f, st1, 0); |
+} |
+ |
+TEST_F(HeapProfilerTest, SimpleAllocAndFree) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0); |
+ heap_profiler_free((void*)0x1000, 1024, NULL); |
+ |
+ EXPECT_EQ(0, stats_.num_allocs); |
+ EXPECT_EQ(0, stats_.num_stack_traces); |
+ EXPECT_EQ(0, stats_.total_alloc_bytes); |
+} |
+ |
+TEST_F(HeapProfilerTest, Realloc) { |
+ StackTrace st1 = GenStackTrace(8, 0); |
+ heap_profiler_alloc((void*)0, 32, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0, 32, st1.frames, st1.depth, 0); |
+} |
+ |
+TEST_F(HeapProfilerTest, AllocAndFreeMultipleStacks) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ StackTrace st2 = GenStackTrace(6, 0x1000); |
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x2000, 2048, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x3000, 32, st2.frames, st2.depth, 0); |
+ heap_profiler_alloc((void*)0x4000, 64, st2.frames, st2.depth, 0); |
+ |
+ heap_profiler_free((void*)0x1000, 1024, NULL); |
+ heap_profiler_free((void*)0x3000, 32, NULL); |
+ |
+ EXPECT_EQ(2, stats_.num_allocs); |
+ EXPECT_EQ(2, stats_.num_stack_traces); |
+ EXPECT_EQ(2048 + 64, stats_.total_alloc_bytes); |
+ ExpectVMA(0x2000, 0x27ff, st1, 0); |
+ ExpectVMA(0x4000, 0x403f, st2, 0); |
+} |
+ |
+TEST_F(HeapProfilerTest, AllocAndFreeAll) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ StackTrace st2 = GenStackTrace(6, 0x1000); |
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x2000, 2048, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x3000, 32, st2.frames, st2.depth, 0); |
+ heap_profiler_alloc((void*)0x4000, 64, st2.frames, st2.depth, 0); |
+ |
+ heap_profiler_free((void*)0x1000, 1024, NULL); |
+ heap_profiler_free((void*)0x2000, 2048, NULL); |
+ heap_profiler_free((void*)0x3000, 32, NULL); |
+ heap_profiler_free((void*)0x4000, 64, NULL); |
+ |
+ EXPECT_EQ(0, stats_.num_allocs); |
+ EXPECT_EQ(0, stats_.num_stack_traces); |
+ EXPECT_EQ(0, stats_.total_alloc_bytes); |
+} |
+ |
+TEST_F(HeapProfilerTest, AllocAndFreeWithZeroSize) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ StackTrace st2 = GenStackTrace(6, 0x1000); |
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x2000, 2048, st2.frames, st2.depth, 0); |
+ |
+ heap_profiler_free((void*)0x1000, 0, NULL); |
+ |
+ EXPECT_EQ(1, stats_.num_allocs); |
+ EXPECT_EQ(1, stats_.num_stack_traces); |
+ EXPECT_EQ(2048, stats_.total_alloc_bytes); |
+} |
+ |
+TEST_F(HeapProfilerTest, AllocAndFreeContiguous) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ StackTrace st2 = GenStackTrace(6, 0x1000); |
+ heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x2000, 4096, st2.frames, st2.depth, 0); |
+ |
+ heap_profiler_free((void*)0x1000, 8192, NULL); |
+ |
+ EXPECT_EQ(0, stats_.num_allocs); |
+ EXPECT_EQ(0, stats_.num_stack_traces); |
+ EXPECT_EQ(0, stats_.total_alloc_bytes); |
+} |
+ |
+TEST_F(HeapProfilerTest, SparseAllocsOneLargeOuterFree) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ StackTrace st2 = GenStackTrace(6, 0x1000); |
+ |
+ heap_profiler_alloc((void*)0x1010, 1, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x1400, 2, st2.frames, st2.depth, 0); |
+ heap_profiler_alloc((void*)0x1600, 5, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x9000, 4096, st2.frames, st2.depth, 0); |
+ |
+ heap_profiler_free((void*)0x0800, 8192, NULL); |
+ EXPECT_EQ(1, stats_.num_allocs); |
+ EXPECT_EQ(1, stats_.num_stack_traces); |
+ EXPECT_EQ(4096, stats_.total_alloc_bytes); |
+ ExpectVMA(0x9000, 0x9fff, st2, 0); |
+} |
+ |
+TEST_F(HeapProfilerTest, SparseAllocsOneLargePartiallyOverlappingFree) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ StackTrace st2 = GenStackTrace(6, 0x1000); |
+ StackTrace st3 = GenStackTrace(4, 0x2000); |
+ |
+ // This will be untouched. |
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0); |
+ |
+ // These will be partially freed in one shot (% 64 a bytes "margin"). |
+ heap_profiler_alloc((void*)0x2000, 128, st2.frames, st2.depth, 0); |
+ heap_profiler_alloc((void*)0x2400, 128, st2.frames, st2.depth, 0); |
+ heap_profiler_alloc((void*)0x2f80, 128, st2.frames, st2.depth, 0); |
+ |
+ // This will be untouched. |
+ heap_profiler_alloc((void*)0x3000, 1024, st3.frames, st3.depth, 0); |
+ |
+ heap_profiler_free((void*)0x2040, 4096 - 64 - 64, NULL); |
+ EXPECT_EQ(4, stats_.num_allocs); |
+ EXPECT_EQ(3, stats_.num_stack_traces); |
+ EXPECT_EQ(1024 + 64 + 64 + 1024, stats_.total_alloc_bytes); |
+ |
+ ExpectVMA(0x1000, 0x13ff, st1, 0); |
+ ExpectVMA(0x2000, 0x203f, st2, 0); |
+ ExpectVMA(0x2fc0, 0x2fff, st2, 0); |
+ ExpectVMA(0x3000, 0x33ff, st3, 0); |
+} |
+ |
+TEST_F(HeapProfilerTest, AllocAndFreeScattered) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x2000, 4096, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x3000, 4096, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x4000, 4096, st1.frames, st1.depth, 0); |
+ |
+ heap_profiler_free((void*)0x800, 4096, NULL); |
+ EXPECT_EQ(4, stats_.num_allocs); |
+ EXPECT_EQ(2048 + 4096 + 4096 + 4096, stats_.total_alloc_bytes); |
+ |
+ heap_profiler_free((void*)0x1800, 4096, NULL); |
+ EXPECT_EQ(3, stats_.num_allocs); |
+ EXPECT_EQ(2048 + 4096 + 4096, stats_.total_alloc_bytes); |
+ |
+ heap_profiler_free((void*)0x2800, 4096, NULL); |
+ EXPECT_EQ(2, stats_.num_allocs); |
+ EXPECT_EQ(2048 + 4096, stats_.total_alloc_bytes); |
+ |
+ heap_profiler_free((void*)0x3800, 4096, NULL); |
+ EXPECT_EQ(1, stats_.num_allocs); |
+ EXPECT_EQ(2048, stats_.total_alloc_bytes); |
+ |
+ heap_profiler_free((void*)0x4800, 4096, NULL); |
+ EXPECT_EQ(0, stats_.num_allocs); |
+ EXPECT_EQ(0, stats_.num_stack_traces); |
+ EXPECT_EQ(0, stats_.total_alloc_bytes); |
+} |
+ |
+TEST_F(HeapProfilerTest, AllocAndOverFreeContiguous) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ StackTrace st2 = GenStackTrace(6, 0x1000); |
+ heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x2000, 4096, st2.frames, st2.depth, 0); |
+ |
+ heap_profiler_free((void*)0, 16834, NULL); |
+ |
+ EXPECT_EQ(0, stats_.num_allocs); |
+ EXPECT_EQ(0, stats_.num_stack_traces); |
+ EXPECT_EQ(0, stats_.total_alloc_bytes); |
+} |
+ |
+TEST_F(HeapProfilerTest, AllocContiguousAndPunchHole) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ StackTrace st2 = GenStackTrace(6, 0x1000); |
+ heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x2000, 4096, st2.frames, st2.depth, 0); |
+ |
+ // Punch a 4k hole in the middle of the two contiguous 4k allocs. |
+ heap_profiler_free((void*)0x1800, 4096, NULL); |
+ |
+ EXPECT_EQ(2, stats_.num_allocs); |
+ EXPECT_EQ(2, stats_.num_stack_traces); |
+ EXPECT_EQ(4096, stats_.total_alloc_bytes); |
+} |
+ |
+TEST_F(HeapProfilerTest, AllocAndPartialFree) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ StackTrace st2 = GenStackTrace(6, 0x1000); |
+ StackTrace st3 = GenStackTrace(7, 0x2000); |
+ StackTrace st4 = GenStackTrace(9, 0x3000); |
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x2000, 1024, st2.frames, st2.depth, 0); |
+ heap_profiler_alloc((void*)0x3000, 1024, st3.frames, st3.depth, 0); |
+ heap_profiler_alloc((void*)0x4000, 1024, st4.frames, st4.depth, 0); |
+ |
+ heap_profiler_free((void*)0x1000, 128, NULL); // Shrink left by 128B. |
+ heap_profiler_free((void*)0x2380, 128, NULL); // Shrink right by 128B. |
+ heap_profiler_free((void*)0x3100, 512, NULL); // 512B hole in the middle. |
+ heap_profiler_free((void*)0x4000, 512, NULL); // Free up the 4th vma... |
+ heap_profiler_free((void*)0x4200, 512, NULL); // ...but do it in two halves. |
+ |
+ EXPECT_EQ(4, stats_.num_allocs); // 1 + 2 + two sides around the hole 3. |
+ EXPECT_EQ(3, stats_.num_stack_traces); // st4 should be gone. |
+ EXPECT_EQ(896 + 896 + 512, stats_.total_alloc_bytes); |
+} |
+ |
+TEST_F(HeapProfilerTest, RandomIndividualAllocAndFrees) { |
+ static const size_t NUM_ST = 128; |
+ static const size_t NUM_OPS = 1000; |
+ |
+ StackTrace st[NUM_ST]; |
+ for (uint32_t i = 0; i < NUM_ST; ++i) |
+ st[i] = GenStackTrace((i % 10) + 2, i * 128); |
+ |
+ for (size_t i = 0; i < NUM_OPS; ++i) { |
+ uintptr_t start = ((i + 7) << 8) & (0xffffff); |
+ size_t size = (start >> 16) & 0x0fff; |
+ if (i & 1) { |
+ StackTrace* s = &st[start % NUM_ST]; |
+ heap_profiler_alloc((void*)start, size, s->frames, s->depth, 0); |
+ } else { |
+ heap_profiler_free((void*)start, size, NULL); |
+ } |
+ CheckVMAvsStacktaceConsistency(); |
+ } |
+} |
+ |
+TEST_F(HeapProfilerTest, RandomAllocAndFreesBatches) { |
+ static const size_t NUM_ST = 128; |
+ static const size_t NUM_ALLOCS = 100; |
+ |
+ StackTrace st[NUM_ST]; |
+ for (size_t i = 0; i < NUM_ST; ++i) |
+ st[i] = GenStackTrace((i % 10) + 2, i * NUM_ST); |
+ |
+ for (int repeat = 0; repeat < 5; ++repeat) { |
+ for (size_t i = 0; i < NUM_ALLOCS; ++i) { |
+ StackTrace* s = &st[i % NUM_ST]; |
+ heap_profiler_alloc( |
+ (void*)(i * 4096), ((i + 1) * 32) % 4097, s->frames, s->depth, 0); |
+ CheckVMAvsStacktaceConsistency(); |
+ } |
+ |
+ for (size_t i = 0; i < NUM_ALLOCS; ++i) { |
+ heap_profiler_free((void*)(i * 1024), ((i + 1) * 64) % 16000, NULL); |
+ CheckVMAvsStacktaceConsistency(); |
+ } |
+ } |
+} |
+ |
+TEST_F(HeapProfilerTest, UnwindStackTooLargeShouldSaturate) { |
+ StackTrace st1 = GenStackTrace(HEAP_PROFILER_MAX_DEPTH, 0x0); |
+ uintptr_t many_frames[100] = {}; |
+ memcpy(many_frames, st1.frames, sizeof(uintptr_t) * st1.depth); |
+ heap_profiler_alloc((void*)0x1000, 1024, many_frames, 100, 0); |
+ ExpectVMA(0x1000, 0x13ff, st1, 0); |
+} |
+ |
+TEST_F(HeapProfilerTest, NoUnwindShouldNotCrashButNoop) { |
+ heap_profiler_alloc((void*)0x1000, 1024, NULL, 0, 0); |
+ EXPECT_EQ(0, stats_.num_allocs); |
+ EXPECT_EQ(0, stats_.num_stack_traces); |
+ EXPECT_EQ(0, stats_.total_alloc_bytes); |
+} |
+ |
+TEST_F(HeapProfilerTest, FreeNonExisting) { |
+ StackTrace st1 = GenStackTrace(5, 0x0); |
+ heap_profiler_free((void*)0x1000, 1024, NULL); |
+ heap_profiler_free((void*)0x1400, 1024, NULL); |
+ EXPECT_EQ(0, stats_.num_allocs); |
+ EXPECT_EQ(0, stats_.num_stack_traces); |
+ EXPECT_EQ(0, stats_.total_alloc_bytes); |
+ heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0); |
+ EXPECT_EQ(1, stats_.num_allocs); |
+ EXPECT_EQ(1024, stats_.total_alloc_bytes); |
+} |
+ |
+TEST_F(HeapProfilerTest, FlagsConsistency) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ uint32_t flags = 0; |
+ heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 42); |
+ heap_profiler_alloc((void*)0x2000, 4096, st1.frames, st1.depth, 142); |
+ |
+ ExpectVMA(0x1000, 0x1fff, st1, 42); |
+ ExpectVMA(0x2000, 0x2fff, st1, 142); |
+ |
+ // Punch a 4k hole in the middle of the two contiguous 4k allocs. |
+ heap_profiler_free((void*)0x1800, 4096, NULL); |
+ |
+ ExpectVMA(0x1000, 0x17ff, st1, 42); |
+ heap_profiler_free((void*)0x1000, 2048, &flags); |
+ EXPECT_EQ(42, flags); |
+ |
+ ExpectVMA(0x2800, 0x2fff, st1, 142); |
+ heap_profiler_free((void*)0x2800, 2048, &flags); |
+ EXPECT_EQ(142, flags); |
+} |
+ |
+TEST_F(HeapProfilerTest, BeConsistentOnOOM) { |
+ static const size_t NUM_ALLOCS = 512 * 1024; |
+ uintptr_t frames[1]; |
+ |
+ for (uintptr_t i = 0; i < NUM_ALLOCS; ++i) { |
+ frames[0] = i; |
+ heap_profiler_alloc((void*)(i * 32), 32, frames, 1, 0); |
+ } |
+ |
+ CheckVMAvsStacktaceConsistency(); |
+ // Check that we're saturating, otherwise this entire test is pointless. |
+ EXPECT_LT(stats_.num_allocs, NUM_ALLOCS); |
+ EXPECT_LT(stats_.num_stack_traces, NUM_ALLOCS); |
+ |
+ for (uintptr_t i = 0; i < NUM_ALLOCS; ++i) |
+ heap_profiler_free((void*)(i * 32), 32, NULL); |
+ |
+ EXPECT_EQ(0, stats_.num_allocs); |
+ EXPECT_EQ(0, stats_.total_alloc_bytes); |
+ EXPECT_EQ(0, stats_.num_stack_traces); |
+} |
+ |
+#ifdef __LP64__ |
+TEST_F(HeapProfilerTest, Test64Bit) { |
+ StackTrace st1 = GenStackTrace(8, 0x0); |
+ StackTrace st2 = GenStackTrace(10, 0x7fffffff70000000LL); |
+ StackTrace st3 = GenStackTrace(10, 0xffffffff70000000LL); |
+ heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0); |
+ heap_profiler_alloc((void*)0x7ffffffffffff000LL, 4096, st2.frames, st2.depth); |
+ heap_profiler_alloc((void*)0xfffffffffffff000LL, 4096, st3.frames, st3.depth); |
+ EXPECT_EQ(3, stats_.num_allocs); |
+ EXPECT_EQ(3, stats_.num_stack_traces); |
+ EXPECT_EQ(4096 + 4096 + 4096, stats_.total_alloc_bytes); |
+ |
+ heap_profiler_free((void*)0x1000, 4096, NULL); |
+ EXPECT_EQ(2, stats_.num_allocs); |
+ EXPECT_EQ(2, stats_.num_stack_traces); |
+ EXPECT_EQ(4096 + 4096, stats_.total_alloc_bytes); |
+ |
+ heap_profiler_free((void*)0x7ffffffffffff000LL, 4096, NULL); |
+ EXPECT_EQ(1, stats_.num_allocs); |
+ EXPECT_EQ(1, stats_.num_stack_traces); |
+ EXPECT_EQ(4096, stats_.total_alloc_bytes); |
+ |
+ heap_profiler_free((void*)0xfffffffffffff000LL, 4096, NULL); |
+ EXPECT_EQ(0, stats_.num_allocs); |
+ EXPECT_EQ(0, stats_.num_stack_traces); |
+ EXPECT_EQ(0, stats_.total_alloc_bytes); |
+} |
+#endif |
+ |
+} // namespace |