Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(12)

Side by Side Diff: tools/android/heap_profiler/heap_profiler_unittest.cc

Issue 323893002: [Android] Introduce libheap_profiler for memory profiling. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Fix gyp Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <stdint.h>
6 #include <string.h>
7 #include <map>
8
9 #include "base/compiler_specific.h"
10 #include "testing/gtest/include/gtest/gtest.h"
11 #include "tools/android/heap_profiler/heap_profiler.h"
12
13 namespace {
14
15 class HeapProfilerTest : public testing::Test {
16 public:
17 virtual void SetUp() OVERRIDE { heap_profiler_init(&stats_); }
18
19 virtual void TearDown() OVERRIDE {
20 CheckVMAvsStacktaceConsistency();
21 heap_profiler_cleanup();
22 }
23
24 protected:
25 struct StackTrace {
26 uintptr_t frames[HEAP_PROFILER_MAX_DEPTH];
27 size_t depth;
28 };
29
30 StackTrace GenStackTrace(size_t depth, uintptr_t base) {
31 assert(depth <= HEAP_PROFILER_MAX_DEPTH);
32 StackTrace st;
33 for (size_t i = 0; i < depth; ++i)
34 st.frames[i] = base + i * 0x10UL;
35 st.depth = depth;
36 return st;
37 }
38
39 void ExpectVMA(uintptr_t start,
40 uintptr_t end,
41 const StackTrace& st,
42 uint32_t flags) {
43 for (uint32_t i = 0; i < stats_.max_allocs; ++i) {
44 const VMA& vma = stats_.allocs[i];
45 if (vma.start != start || vma.end != end)
46 continue;
47 // Check that the stack trace match.
48 for (uint32_t j = 0; j < st.depth; ++j) {
49 EXPECT_EQ(st.frames[j], vma.st->frames[j])
50 << "Stacktrace not matching @ depth " << j;
51 }
52 EXPECT_EQ(flags, vma.flags);
53 return;
54 }
55
56 FAIL() << "Could not find VMA [" << std::hex << start << "," << end << "]";
57 }
58
59 void CheckVMAvsStacktaceConsistency() {
60 uint32_t allocs_seen = 0;
61 uint32_t stack_traces_seen = 0;
62 std::map<StacktraceEntry*, uintptr_t> stacktrace_bytes_seen_by_vma;
63
64 for (uint32_t i = 0; i < stats_.max_allocs; ++i) {
65 VMA* vma = &stats_.allocs[i];
66 if (vma->start == 0 && vma->end == 0)
67 continue;
68 ++allocs_seen;
69 stacktrace_bytes_seen_by_vma[vma->st] += vma->end - vma->start + 1;
70 }
71
72 for (uint32_t i = 0; i < stats_.max_stack_traces; ++i) {
73 StacktraceEntry* st = &stats_.stack_traces[i];
74 if (st->alloc_bytes == 0)
75 continue;
76 ++stack_traces_seen;
77 EXPECT_EQ(1, stacktrace_bytes_seen_by_vma.count(st));
78 EXPECT_EQ(stacktrace_bytes_seen_by_vma[st], st->alloc_bytes);
79 }
80
81 EXPECT_EQ(allocs_seen, stats_.num_allocs);
82 EXPECT_EQ(stack_traces_seen, stats_.num_stack_traces);
83 }
84
85 HeapStats stats_;
86 };
87
88 TEST_F(HeapProfilerTest, SimpleAlloc) {
89 StackTrace st1 = GenStackTrace(8, 0x0);
90 heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
91 heap_profiler_alloc((void*)0x2000, 2048, st1.frames, st1.depth, 0);
92
93 EXPECT_EQ(2, stats_.num_allocs);
94 EXPECT_EQ(1, stats_.num_stack_traces);
95 EXPECT_EQ(1024 + 2048, stats_.total_alloc_bytes);
96 ExpectVMA(0x1000, 0x13ff, st1, 0);
97 ExpectVMA(0x2000, 0x27ff, st1, 0);
98 }
99
100 TEST_F(HeapProfilerTest, AllocMultipleStacks) {
101 StackTrace st1 = GenStackTrace(8, 0x0);
102 StackTrace st2 = GenStackTrace(4, 0x1000);
103 heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
104 heap_profiler_alloc((void*)0x2000, 2048, st2.frames, st2.depth, 0);
105 heap_profiler_alloc((void*)0x3000, 32, st1.frames, st1.depth, 0);
106
107 EXPECT_EQ(3, stats_.num_allocs);
108 EXPECT_EQ(2, stats_.num_stack_traces);
109 EXPECT_EQ(1024 + 2048 + 32, stats_.total_alloc_bytes);
110 ExpectVMA(0x1000, 0x13ff, st1, 0);
111 ExpectVMA(0x2000, 0x27ff, st2, 0);
112 ExpectVMA(0x3000, 0x301f, st1, 0);
113 }
114
115 TEST_F(HeapProfilerTest, SimpleAllocAndFree) {
116 StackTrace st1 = GenStackTrace(8, 0x0);
117 heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
118 heap_profiler_free((void*)0x1000, 1024, NULL);
119
120 EXPECT_EQ(0, stats_.num_allocs);
121 EXPECT_EQ(0, stats_.num_stack_traces);
122 EXPECT_EQ(0, stats_.total_alloc_bytes);
123 }
124
125 TEST_F(HeapProfilerTest, Realloc) {
126 StackTrace st1 = GenStackTrace(8, 0);
127 heap_profiler_alloc((void*)0, 32, st1.frames, st1.depth, 0);
128 heap_profiler_alloc((void*)0, 32, st1.frames, st1.depth, 0);
129 }
130
131 TEST_F(HeapProfilerTest, AllocAndFreeMultipleStacks) {
132 StackTrace st1 = GenStackTrace(8, 0x0);
133 StackTrace st2 = GenStackTrace(6, 0x1000);
134 heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
135 heap_profiler_alloc((void*)0x2000, 2048, st1.frames, st1.depth, 0);
136 heap_profiler_alloc((void*)0x3000, 32, st2.frames, st2.depth, 0);
137 heap_profiler_alloc((void*)0x4000, 64, st2.frames, st2.depth, 0);
138
139 heap_profiler_free((void*)0x1000, 1024, NULL);
140 heap_profiler_free((void*)0x3000, 32, NULL);
141
142 EXPECT_EQ(2, stats_.num_allocs);
143 EXPECT_EQ(2, stats_.num_stack_traces);
144 EXPECT_EQ(2048 + 64, stats_.total_alloc_bytes);
145 ExpectVMA(0x2000, 0x27ff, st1, 0);
146 ExpectVMA(0x4000, 0x403f, st2, 0);
147 }
148
149 TEST_F(HeapProfilerTest, AllocAndFreeAll) {
150 StackTrace st1 = GenStackTrace(8, 0x0);
151 StackTrace st2 = GenStackTrace(6, 0x1000);
152 heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
153 heap_profiler_alloc((void*)0x2000, 2048, st1.frames, st1.depth, 0);
154 heap_profiler_alloc((void*)0x3000, 32, st2.frames, st2.depth, 0);
155 heap_profiler_alloc((void*)0x4000, 64, st2.frames, st2.depth, 0);
156
157 heap_profiler_free((void*)0x1000, 1024, NULL);
158 heap_profiler_free((void*)0x2000, 2048, NULL);
159 heap_profiler_free((void*)0x3000, 32, NULL);
160 heap_profiler_free((void*)0x4000, 64, NULL);
161
162 EXPECT_EQ(0, stats_.num_allocs);
163 EXPECT_EQ(0, stats_.num_stack_traces);
164 EXPECT_EQ(0, stats_.total_alloc_bytes);
165 }
166
167 TEST_F(HeapProfilerTest, AllocAndFreeWithZeroSize) {
168 StackTrace st1 = GenStackTrace(8, 0x0);
169 StackTrace st2 = GenStackTrace(6, 0x1000);
170 heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
171 heap_profiler_alloc((void*)0x2000, 2048, st2.frames, st2.depth, 0);
172
173 heap_profiler_free((void*)0x1000, 0, NULL);
174
175 EXPECT_EQ(1, stats_.num_allocs);
176 EXPECT_EQ(1, stats_.num_stack_traces);
177 EXPECT_EQ(2048, stats_.total_alloc_bytes);
178 }
179
180 TEST_F(HeapProfilerTest, AllocAndFreeContiguous) {
181 StackTrace st1 = GenStackTrace(8, 0x0);
182 StackTrace st2 = GenStackTrace(6, 0x1000);
183 heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
184 heap_profiler_alloc((void*)0x2000, 4096, st2.frames, st2.depth, 0);
185
186 heap_profiler_free((void*)0x1000, 8192, NULL);
187
188 EXPECT_EQ(0, stats_.num_allocs);
189 EXPECT_EQ(0, stats_.num_stack_traces);
190 EXPECT_EQ(0, stats_.total_alloc_bytes);
191 }
192
193 TEST_F(HeapProfilerTest, SparseAllocsOneLargeOuterFree) {
194 StackTrace st1 = GenStackTrace(8, 0x0);
195 StackTrace st2 = GenStackTrace(6, 0x1000);
196
197 heap_profiler_alloc((void*)0x1010, 1, st1.frames, st1.depth, 0);
198 heap_profiler_alloc((void*)0x1400, 2, st2.frames, st2.depth, 0);
199 heap_profiler_alloc((void*)0x1600, 5, st1.frames, st1.depth, 0);
200 heap_profiler_alloc((void*)0x9000, 4096, st2.frames, st2.depth, 0);
201
202 heap_profiler_free((void*)0x0800, 8192, NULL);
203 EXPECT_EQ(1, stats_.num_allocs);
204 EXPECT_EQ(1, stats_.num_stack_traces);
205 EXPECT_EQ(4096, stats_.total_alloc_bytes);
206 ExpectVMA(0x9000, 0x9fff, st2, 0);
207 }
208
209 TEST_F(HeapProfilerTest, SparseAllocsOneLargePartiallyOverlappingFree) {
210 StackTrace st1 = GenStackTrace(8, 0x0);
211 StackTrace st2 = GenStackTrace(6, 0x1000);
212 StackTrace st3 = GenStackTrace(4, 0x2000);
213
214 // This will be untouched.
215 heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
216
217 // These will be partially freed in one shot (% 64 a bytes "margin").
218 heap_profiler_alloc((void*)0x2000, 128, st2.frames, st2.depth, 0);
219 heap_profiler_alloc((void*)0x2400, 128, st2.frames, st2.depth, 0);
220 heap_profiler_alloc((void*)0x2f80, 128, st2.frames, st2.depth, 0);
221
222 // This will be untouched.
223 heap_profiler_alloc((void*)0x3000, 1024, st3.frames, st3.depth, 0);
224
225 heap_profiler_free((void*)0x2040, 4096 - 64 - 64, NULL);
226 EXPECT_EQ(4, stats_.num_allocs);
227 EXPECT_EQ(3, stats_.num_stack_traces);
228 EXPECT_EQ(1024 + 64 + 64 + 1024, stats_.total_alloc_bytes);
229
230 ExpectVMA(0x1000, 0x13ff, st1, 0);
231 ExpectVMA(0x2000, 0x203f, st2, 0);
232 ExpectVMA(0x2fc0, 0x2fff, st2, 0);
233 ExpectVMA(0x3000, 0x33ff, st3, 0);
234 }
235
236 TEST_F(HeapProfilerTest, AllocAndFreeScattered) {
237 StackTrace st1 = GenStackTrace(8, 0x0);
238 heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
239 heap_profiler_alloc((void*)0x2000, 4096, st1.frames, st1.depth, 0);
240 heap_profiler_alloc((void*)0x3000, 4096, st1.frames, st1.depth, 0);
241 heap_profiler_alloc((void*)0x4000, 4096, st1.frames, st1.depth, 0);
242
243 heap_profiler_free((void*)0x800, 4096, NULL);
244 EXPECT_EQ(4, stats_.num_allocs);
245 EXPECT_EQ(2048 + 4096 + 4096 + 4096, stats_.total_alloc_bytes);
246
247 heap_profiler_free((void*)0x1800, 4096, NULL);
248 EXPECT_EQ(3, stats_.num_allocs);
249 EXPECT_EQ(2048 + 4096 + 4096, stats_.total_alloc_bytes);
250
251 heap_profiler_free((void*)0x2800, 4096, NULL);
252 EXPECT_EQ(2, stats_.num_allocs);
253 EXPECT_EQ(2048 + 4096, stats_.total_alloc_bytes);
254
255 heap_profiler_free((void*)0x3800, 4096, NULL);
256 EXPECT_EQ(1, stats_.num_allocs);
257 EXPECT_EQ(2048, stats_.total_alloc_bytes);
258
259 heap_profiler_free((void*)0x4800, 4096, NULL);
260 EXPECT_EQ(0, stats_.num_allocs);
261 EXPECT_EQ(0, stats_.num_stack_traces);
262 EXPECT_EQ(0, stats_.total_alloc_bytes);
263 }
264
265 TEST_F(HeapProfilerTest, AllocAndOverFreeContiguous) {
266 StackTrace st1 = GenStackTrace(8, 0x0);
267 StackTrace st2 = GenStackTrace(6, 0x1000);
268 heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
269 heap_profiler_alloc((void*)0x2000, 4096, st2.frames, st2.depth, 0);
270
271 heap_profiler_free((void*)0, 16834, NULL);
272
273 EXPECT_EQ(0, stats_.num_allocs);
274 EXPECT_EQ(0, stats_.num_stack_traces);
275 EXPECT_EQ(0, stats_.total_alloc_bytes);
276 }
277
278 TEST_F(HeapProfilerTest, AllocContiguousAndPunchHole) {
279 StackTrace st1 = GenStackTrace(8, 0x0);
280 StackTrace st2 = GenStackTrace(6, 0x1000);
281 heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
282 heap_profiler_alloc((void*)0x2000, 4096, st2.frames, st2.depth, 0);
283
284 // Punch a 4k hole in the middle of the two contiguous 4k allocs.
285 heap_profiler_free((void*)0x1800, 4096, NULL);
286
287 EXPECT_EQ(2, stats_.num_allocs);
288 EXPECT_EQ(2, stats_.num_stack_traces);
289 EXPECT_EQ(4096, stats_.total_alloc_bytes);
290 }
291
292 TEST_F(HeapProfilerTest, AllocAndPartialFree) {
293 StackTrace st1 = GenStackTrace(8, 0x0);
294 StackTrace st2 = GenStackTrace(6, 0x1000);
295 StackTrace st3 = GenStackTrace(7, 0x2000);
296 StackTrace st4 = GenStackTrace(9, 0x3000);
297 heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
298 heap_profiler_alloc((void*)0x2000, 1024, st2.frames, st2.depth, 0);
299 heap_profiler_alloc((void*)0x3000, 1024, st3.frames, st3.depth, 0);
300 heap_profiler_alloc((void*)0x4000, 1024, st4.frames, st4.depth, 0);
301
302 heap_profiler_free((void*)0x1000, 128, NULL); // Shrink left by 128B.
303 heap_profiler_free((void*)0x2380, 128, NULL); // Shrink right by 128B.
304 heap_profiler_free((void*)0x3100, 512, NULL); // 512B hole in the middle.
305 heap_profiler_free((void*)0x4000, 512, NULL); // Free up the 4th vma...
306 heap_profiler_free((void*)0x4200, 512, NULL); // ...but do it in two halves.
307
308 EXPECT_EQ(4, stats_.num_allocs); // 1 + 2 + two sides around the hole 3.
309 EXPECT_EQ(3, stats_.num_stack_traces); // st4 should be gone.
310 EXPECT_EQ(896 + 896 + 512, stats_.total_alloc_bytes);
311 }
312
313 TEST_F(HeapProfilerTest, RandomIndividualAllocAndFrees) {
314 static const size_t NUM_ST = 128;
315 static const size_t NUM_OPS = 1000;
316
317 StackTrace st[NUM_ST];
318 for (uint32_t i = 0; i < NUM_ST; ++i)
319 st[i] = GenStackTrace((i % 10) + 2, i * 128);
320
321 for (size_t i = 0; i < NUM_OPS; ++i) {
322 uintptr_t start = ((i + 7) << 8) & (0xffffff);
323 size_t size = (start >> 16) & 0x0fff;
324 if (i & 1) {
325 StackTrace* s = &st[start % NUM_ST];
326 heap_profiler_alloc((void*)start, size, s->frames, s->depth, 0);
327 } else {
328 heap_profiler_free((void*)start, size, NULL);
329 }
330 CheckVMAvsStacktaceConsistency();
331 }
332 }
333
334 TEST_F(HeapProfilerTest, RandomAllocAndFreesBatches) {
335 static const size_t NUM_ST = 128;
336 static const size_t NUM_ALLOCS = 100;
337
338 StackTrace st[NUM_ST];
339 for (size_t i = 0; i < NUM_ST; ++i)
340 st[i] = GenStackTrace((i % 10) + 2, i * NUM_ST);
341
342 for (int repeat = 0; repeat < 5; ++repeat) {
343 for (size_t i = 0; i < NUM_ALLOCS; ++i) {
344 StackTrace* s = &st[i % NUM_ST];
345 heap_profiler_alloc(
346 (void*)(i * 4096), ((i + 1) * 32) % 4097, s->frames, s->depth, 0);
347 CheckVMAvsStacktaceConsistency();
348 }
349
350 for (size_t i = 0; i < NUM_ALLOCS; ++i) {
351 heap_profiler_free((void*)(i * 1024), ((i + 1) * 64) % 16000, NULL);
352 CheckVMAvsStacktaceConsistency();
353 }
354 }
355 }
356
357 TEST_F(HeapProfilerTest, UnwindStackTooLargeShouldSaturate) {
358 StackTrace st1 = GenStackTrace(HEAP_PROFILER_MAX_DEPTH, 0x0);
359 uintptr_t many_frames[100] = {};
360 memcpy(many_frames, st1.frames, sizeof(uintptr_t) * st1.depth);
361 heap_profiler_alloc((void*)0x1000, 1024, many_frames, 100, 0);
362 ExpectVMA(0x1000, 0x13ff, st1, 0);
363 }
364
365 TEST_F(HeapProfilerTest, NoUnwindShouldNotCrashButNoop) {
366 heap_profiler_alloc((void*)0x1000, 1024, NULL, 0, 0);
367 EXPECT_EQ(0, stats_.num_allocs);
368 EXPECT_EQ(0, stats_.num_stack_traces);
369 EXPECT_EQ(0, stats_.total_alloc_bytes);
370 }
371
372 TEST_F(HeapProfilerTest, FreeNonExisting) {
373 StackTrace st1 = GenStackTrace(5, 0x0);
374 heap_profiler_free((void*)0x1000, 1024, NULL);
375 heap_profiler_free((void*)0x1400, 1024, NULL);
376 EXPECT_EQ(0, stats_.num_allocs);
377 EXPECT_EQ(0, stats_.num_stack_traces);
378 EXPECT_EQ(0, stats_.total_alloc_bytes);
379 heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
380 EXPECT_EQ(1, stats_.num_allocs);
381 EXPECT_EQ(1024, stats_.total_alloc_bytes);
382 }
383
384 TEST_F(HeapProfilerTest, FlagsConsistency) {
385 StackTrace st1 = GenStackTrace(8, 0x0);
386 uint32_t flags = 0;
387 heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 42);
388 heap_profiler_alloc((void*)0x2000, 4096, st1.frames, st1.depth, 142);
389
390 ExpectVMA(0x1000, 0x1fff, st1, 42);
391 ExpectVMA(0x2000, 0x2fff, st1, 142);
392
393 // Punch a 4k hole in the middle of the two contiguous 4k allocs.
394 heap_profiler_free((void*)0x1800, 4096, NULL);
395
396 ExpectVMA(0x1000, 0x17ff, st1, 42);
397 heap_profiler_free((void*)0x1000, 2048, &flags);
398 EXPECT_EQ(42, flags);
399
400 ExpectVMA(0x2800, 0x2fff, st1, 142);
401 heap_profiler_free((void*)0x2800, 2048, &flags);
402 EXPECT_EQ(142, flags);
403 }
404
405 TEST_F(HeapProfilerTest, BeConsistentOnOOM) {
406 static const size_t NUM_ALLOCS = 512 * 1024;
407 uintptr_t frames[1];
408
409 for (uintptr_t i = 0; i < NUM_ALLOCS; ++i) {
410 frames[0] = i;
411 heap_profiler_alloc((void*)(i * 32), 32, frames, 1, 0);
412 }
413
414 CheckVMAvsStacktaceConsistency();
415 // Check that we're saturating, otherwise this entire test is pointless.
416 EXPECT_LT(stats_.num_allocs, NUM_ALLOCS);
417 EXPECT_LT(stats_.num_stack_traces, NUM_ALLOCS);
418
419 for (uintptr_t i = 0; i < NUM_ALLOCS; ++i)
420 heap_profiler_free((void*)(i * 32), 32, NULL);
421
422 EXPECT_EQ(0, stats_.num_allocs);
423 EXPECT_EQ(0, stats_.total_alloc_bytes);
424 EXPECT_EQ(0, stats_.num_stack_traces);
425 }
426
427 #ifdef __LP64__
428 TEST_F(HeapProfilerTest, Test64Bit) {
429 StackTrace st1 = GenStackTrace(8, 0x0);
430 StackTrace st2 = GenStackTrace(10, 0x7fffffff70000000LL);
431 StackTrace st3 = GenStackTrace(10, 0xffffffff70000000LL);
432 heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
433 heap_profiler_alloc((void*)0x7ffffffffffff000LL, 4096, st2.frames, st2.depth);
434 heap_profiler_alloc((void*)0xfffffffffffff000LL, 4096, st3.frames, st3.depth);
435 EXPECT_EQ(3, stats_.num_allocs);
436 EXPECT_EQ(3, stats_.num_stack_traces);
437 EXPECT_EQ(4096 + 4096 + 4096, stats_.total_alloc_bytes);
438
439 heap_profiler_free((void*)0x1000, 4096, NULL);
440 EXPECT_EQ(2, stats_.num_allocs);
441 EXPECT_EQ(2, stats_.num_stack_traces);
442 EXPECT_EQ(4096 + 4096, stats_.total_alloc_bytes);
443
444 heap_profiler_free((void*)0x7ffffffffffff000LL, 4096, NULL);
445 EXPECT_EQ(1, stats_.num_allocs);
446 EXPECT_EQ(1, stats_.num_stack_traces);
447 EXPECT_EQ(4096, stats_.total_alloc_bytes);
448
449 heap_profiler_free((void*)0xfffffffffffff000LL, 4096, NULL);
450 EXPECT_EQ(0, stats_.num_allocs);
451 EXPECT_EQ(0, stats_.num_stack_traces);
452 EXPECT_EQ(0, stats_.total_alloc_bytes);
453 }
454 #endif
455
456 } // namespace
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698