OLD | NEW |
---|---|
(Empty) | |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
bjanakiraman1
2015/09/01 21:27:27
Add more comments on what exactly this test is doi
Simon Que
2015/09/01 23:23:31
Done.
| |
4 | |
5 #include "components/metrics/leak_detector/leak_detector_impl.h" | |
6 | |
7 #include <math.h> | |
8 #include <stdint.h> | |
9 | |
10 #include <complex> | |
11 #include <new> | |
12 #include <set> | |
13 #include <vector> | |
14 | |
15 #include "base/macros.h" | |
16 #include "base/memory/scoped_ptr.h" | |
17 #include "testing/gtest/include/gtest/gtest.h" | |
18 | |
19 namespace leak_detector { | |
20 | |
21 namespace { | |
22 | |
23 // Makes working with complex numbers easier. | |
24 using Complex = std::complex<double>; | |
25 | |
26 // The mapping location in memory for a fictional executable. | |
27 const uintptr_t kMappingAddr = 0x800000; | |
28 const size_t kMappingSize = 0x200000; | |
29 | |
30 // Some call stacks within the fictional executable. | |
31 // * - outside the mapping range, e.g. JIT code. | |
32 const uintptr_t kRawStack0[] = { | |
33 0x800100, | |
34 0x900000, | |
35 0x880080, | |
36 0x810000, | |
37 }; | |
38 const uintptr_t kRawStack1[] = { | |
39 0x940000, | |
40 0x980000, | |
41 0xdeadbeef, // * | |
42 0x9a0000, | |
43 }; | |
44 const uintptr_t kRawStack2[] = { | |
45 0x8f0d00, | |
46 0x803abc, | |
47 0x9100a0, | |
48 }; | |
49 const uintptr_t kRawStack3[] = { | |
50 0x90fcde, | |
51 0x900df00d, // * | |
52 0x801000, | |
53 0x880088, | |
54 0xdeadcafe, // * | |
55 0x9f0000, | |
56 0x8700a0, | |
57 0x96037c, | |
58 }; | |
59 const uintptr_t kRawStack4[] = { | |
60 0x8c0000, | |
61 0x85d00d, | |
62 0x921337, | |
63 0x780000, // * | |
64 }; | |
65 const uintptr_t kRawStack5[] = { | |
66 0x990000, | |
67 0x888888, | |
68 0x830ac0, | |
69 0x8e0000, | |
70 0xc00000, // * | |
71 }; | |
72 | |
73 // This struct makes it easier to pass call stack info to | |
74 // LeakDetectorImplTest::Alloc(). | |
75 struct TestCallStack { | |
76 size_t depth; | |
77 const void* const* stack; | |
78 }; | |
79 | |
80 const TestCallStack kStack0 = | |
81 { arraysize(kRawStack0), reinterpret_cast<const void* const*>(kRawStack0) }; | |
82 const TestCallStack kStack1 = | |
83 { arraysize(kRawStack1), reinterpret_cast<const void* const*>(kRawStack1) }; | |
84 const TestCallStack kStack2 = | |
85 { arraysize(kRawStack2), reinterpret_cast<const void* const*>(kRawStack2) }; | |
86 const TestCallStack kStack3 = | |
87 { arraysize(kRawStack3), reinterpret_cast<const void* const*>(kRawStack3) }; | |
88 const TestCallStack kStack4 = | |
89 { arraysize(kRawStack4), reinterpret_cast<const void* const*>(kRawStack4) }; | |
90 const TestCallStack kStack5 = | |
91 { arraysize(kRawStack5), reinterpret_cast<const void* const*>(kRawStack5) }; | |
92 | |
93 } // namespace | |
94 | |
95 class LeakDetectorImplTest : public ::testing::Test { | |
bjanakiraman1
2015/09/01 21:27:27
Add some comments on what methods / classes in the
Simon Que
2015/09/01 23:23:30
Done.
| |
96 public: | |
97 LeakDetectorImplTest() | |
98 : total_num_allocs_(0), | |
99 total_num_frees_(0), | |
100 total_alloced_size_(0), | |
101 next_analysis_total_alloced_size_(kAllocedSizeAnalysisInterval) {} | |
102 | |
103 void SetUp() override { | |
104 CustomAllocator::InitializeForUnitTest(); | |
105 | |
106 const int kSizeSuspicionThreshold = 4; | |
107 const int kCallStackSuspicionThreshold = 4; | |
108 detector_.reset( | |
109 new LeakDetectorImpl(kMappingAddr, | |
110 kMappingSize, | |
111 kSizeSuspicionThreshold, | |
112 kCallStackSuspicionThreshold, | |
113 true /* verbose */)); | |
114 } | |
115 | |
116 void TearDown() override { | |
117 // Free any memory that was leaked by test cases. Do not use Free() because | |
118 // that will try to modify |alloced_ptrs_|. | |
119 for (void* ptr : alloced_ptrs_) | |
120 delete [] reinterpret_cast<char*>(ptr); | |
121 alloced_ptrs_.clear(); | |
122 | |
123 // Must destroy all objects using CustomAllocator before shutting it down. | |
124 detector_.reset(); | |
125 stored_reports_.clear(); | |
126 | |
127 CustomAllocator::Shutdown(); | |
128 } | |
129 | |
130 protected: | |
131 // Alloc and free functions that automatically pass allocation info to | |
bjanakiraman1
2015/09/01 21:27:27
Alloc and free are emulating what tcmalloc hook fu
Simon Que
2015/09/01 23:23:30
Done.
| |
132 // |detector_|. | |
133 void* Alloc(size_t size, const TestCallStack& stack) { | |
134 void* ptr = new char[size]; | |
135 detector_->RecordAlloc(ptr, size, stack.depth, stack.stack); | |
136 | |
137 EXPECT_TRUE(alloced_ptrs_.find(ptr) == alloced_ptrs_.end()); | |
138 alloced_ptrs_.insert(ptr); | |
139 | |
140 ++total_num_allocs_; | |
141 total_alloced_size_ += size; | |
142 if (total_alloced_size_ >= next_analysis_total_alloced_size_) { | |
143 InternalVector<InternalLeakReport> reports; | |
144 detector_->TestForLeaks(false /* do_logging */, &reports); | |
145 for (const InternalLeakReport& report : reports) | |
146 stored_reports_.insert(report); | |
147 | |
148 // Determine when the next leak analysis should occur. | |
149 while (total_alloced_size_ >= next_analysis_total_alloced_size_) | |
150 next_analysis_total_alloced_size_ += kAllocedSizeAnalysisInterval; | |
151 } | |
152 return ptr; | |
153 } | |
154 | |
155 // See comment for Alloc(). | |
156 void Free(void* ptr) { | |
157 auto find_ptr_iter = alloced_ptrs_.find(ptr); | |
158 EXPECT_FALSE(find_ptr_iter == alloced_ptrs_.end()); | |
159 if (find_ptr_iter == alloced_ptrs_.end()) | |
160 return; | |
161 | |
162 alloced_ptrs_.erase(find_ptr_iter); | |
163 detector_->RecordFree(ptr); | |
164 ++total_num_frees_; | |
165 delete [] reinterpret_cast<char*>(ptr); | |
166 } | |
167 | |
168 // TEST CASE: Julia set fractal computation. Pass in has_leak=true to trigger | |
169 // the memory leak. | |
170 void JuliaSet(bool has_leak); | |
171 | |
172 // Instance of the class being tested. | |
173 scoped_ptr<LeakDetectorImpl> detector_; | |
174 | |
175 // Number of pointers allocated and freed so far. | |
176 size_t total_num_allocs_; | |
177 size_t total_num_frees_; | |
178 | |
179 // The interval between consecutive analyses (LeakDetectorImpl::TestForLeaks), | |
180 // in number of bytes allocated. e.g. if |kAllocedSizeAnalysisInterval| = 1024 | |
181 // then call TestForLeaks() every 1024 bytes of allocation that occur. | |
182 static const size_t kAllocedSizeAnalysisInterval = 8192; | |
183 | |
184 // Keeps count of total size allocated by Alloc(). | |
185 size_t total_alloced_size_; | |
186 | |
187 // The cumulative allocation size at which to trigger the TestForLeaks() call. | |
188 size_t next_analysis_total_alloced_size_; | |
189 | |
190 // Stores all pointers to memory allocated by by Alloc() so we can manually | |
191 // free the leaked pointers at the end. | |
bjanakiraman1
2015/09/01 21:27:27
Explain why this is present (duplicate bookkeeping
Simon Que
2015/09/01 23:23:31
Done.
| |
192 std::set<void*> alloced_ptrs_; | |
193 | |
194 // Store leak reports here. Use a set so duplicate reports are not stored. | |
195 std::set<InternalLeakReport> stored_reports_; | |
196 | |
197 private: | |
198 DISALLOW_COPY_AND_ASSIGN(LeakDetectorImplTest); | |
199 }; | |
200 | |
201 void LeakDetectorImplTest::JuliaSet(bool has_leak) { | |
202 // The center region of the complex plane that is the basis for our Julia set | |
203 // computations is a circle of radius kRadius. | |
204 const static double kRadius = 2; | |
205 | |
206 // To track points in the complex plane, we will use a rectangular grid in the | |
207 // range defined by [-kRadius, kRadius] along both axes. | |
208 const static double kRangeMin = -kRadius; | |
209 const static double kRangeMax = kRadius; | |
210 | |
211 // Divide each axis into intervals, each of which is associated with a point | |
212 // on that axis at its center. | |
213 const static double kIntervalInverse = 64; | |
214 const static double kInterval = 1.0 / kIntervalInverse; | |
215 const static int kNumPoints = (kRangeMax - kRangeMin) / kInterval + 1; | |
216 | |
217 // Contains some useful functions for converting between points on the complex | |
218 // plane and in a gridlike data structure. | |
219 struct ComplexPlane { | |
220 static int GetXGridIndex(const Complex& value) { | |
221 return (value.real() + kInterval / 2 - kRangeMin) / kInterval; | |
222 } | |
223 static int GetYGridIndex(const Complex& value) { | |
224 return (value.imag() + kInterval / 2 - kRangeMin) / kInterval; | |
225 } | |
226 static Complex GetComplexForGridPoint(size_t x, size_t y) { | |
227 return Complex(kRangeMin + x * kInterval, kRangeMin + y * kInterval); | |
228 } | |
229 }; | |
230 | |
231 // Make sure the choice of interval doesn't result in any loss of precision. | |
232 ASSERT_EQ(1.0, kInterval * kIntervalInverse); | |
233 | |
234 // Create a grid for part of the complex plane, with each axis within the | |
235 // range [kRangeMin, kRangeMax]. | |
236 const size_t width = kNumPoints; | |
237 const size_t height = kNumPoints; | |
238 std::vector<Complex*> grid(width * height); | |
239 | |
240 // Initialize an object for each point within the inner circle |z| < kRadius. | |
241 for (size_t i = 0; i < width; ++i) { | |
242 for (size_t j = 0; j < height; ++j) { | |
243 Complex point = ComplexPlane::GetComplexForGridPoint(i, j); | |
244 // Do not store any values outside the inner circle. | |
245 if (abs(point) <= kRadius) { | |
246 grid[i + j * width] = | |
247 new(Alloc(sizeof(Complex), kStack0)) Complex(point); | |
248 } | |
249 } | |
250 } | |
251 EXPECT_LE(alloced_ptrs_.size(), width * height); | |
252 | |
253 // Create a new grid for the result of the transformation. | |
254 std::vector<Complex*> next_grid(width * height, nullptr); | |
255 | |
256 const int kNumIterations = 20; | |
257 for (int n = 0; n < kNumIterations; ++n) { | |
258 for (int i = 0; i < kNumPoints; ++i) { | |
259 for (int j = 0; j < kNumPoints; ++j) { | |
260 if (!grid[i + j * width]) | |
261 continue; | |
262 | |
263 // NOTE: The below code is NOT an efficient way to compute a Julia set. | |
264 // This is only to test the leak detector with some nontrivial code. | |
265 | |
266 // A simple polynomial function for generating Julia sets is: | |
267 // f(z) = z^n + c | |
268 | |
269 // But in this algorithm, we need the inverse: | |
270 // fInv(z) = (z - c)^(1/n) | |
271 | |
272 // Here, let's use n=5 and c=0.544. | |
273 const Complex c(0.544, 0); | |
274 const Complex& z = *grid[i + j * width]; | |
275 | |
276 // This is the principal root. | |
277 Complex root = pow(z - c, 0.2); | |
278 | |
279 // Discard the result if it is too far out from the center of the plane. | |
280 if (abs(root) > kRadius) | |
281 continue; | |
282 | |
283 // The below code only allocates Complex objects of the same size. The | |
284 // leak detector expects various sizes, so increase the allocation size | |
285 // by a different amount at each call site. | |
286 | |
287 // Nth root produces N results. | |
288 // Place all root results on |next_grid|. | |
289 | |
290 // First, place the principal root. | |
291 int next_i = ComplexPlane::GetXGridIndex(root); | |
292 int next_j = ComplexPlane::GetYGridIndex(root); | |
293 if (!next_grid[next_i + next_j * width]) { | |
294 next_grid[next_i + next_j * width] = | |
295 new(Alloc(sizeof(Complex) + 24, kStack1)) Complex(root); | |
296 } | |
297 | |
298 double magnitude = abs(root); | |
299 double angle = arg(root); | |
300 // To generate other roots, rotate the principal root by increments of | |
301 // 1/N of a full circle. | |
302 const double kAngleIncrement = M_PI * 2 / 5; | |
303 | |
304 // Second root. | |
305 root = std::polar(magnitude, angle + kAngleIncrement); | |
306 next_i = ComplexPlane::GetXGridIndex(root); | |
307 next_j = ComplexPlane::GetYGridIndex(root); | |
308 if (!next_grid[next_i + next_j * width]) { | |
309 next_grid[next_i + next_j * width] = | |
310 new(Alloc(sizeof(Complex) + 40, kStack2)) Complex(root); | |
311 } | |
312 | |
313 // In some of the sections below, |has_leak| will trigger a memory leak | |
314 // by overwriting the old Complex pointer value without freeing it. | |
315 // Due to the nature of complex roots being confined to equal sections | |
316 // of the complex plane, each new pointer will displace an old pointer | |
317 // that was allocated from the same line of code. | |
318 | |
319 // Third root. | |
320 root = std::polar(magnitude, angle + kAngleIncrement * 2); | |
321 next_i = ComplexPlane::GetXGridIndex(root); | |
322 next_j = ComplexPlane::GetYGridIndex(root); | |
323 // *** LEAK *** | |
324 if (has_leak || !next_grid[next_i + next_j * width]) { | |
325 next_grid[next_i + next_j * width] = | |
326 new(Alloc(sizeof(Complex) + 40, kStack3)) Complex(root); | |
327 } | |
328 | |
329 // Fourth root. | |
330 root = std::polar(magnitude, angle + kAngleIncrement * 3); | |
331 next_i = ComplexPlane::GetXGridIndex(root); | |
332 next_j = ComplexPlane::GetYGridIndex(root); | |
333 // *** LEAK *** | |
334 if (has_leak || !next_grid[next_i + next_j * width]) { | |
335 next_grid[next_i + next_j * width] = | |
336 new(Alloc(sizeof(Complex) + 52, kStack4)) Complex(root); | |
337 } | |
338 | |
339 // Fifth root. | |
340 root = std::polar(magnitude, angle + kAngleIncrement * 4); | |
341 next_i = ComplexPlane::GetXGridIndex(root); | |
342 next_j = ComplexPlane::GetYGridIndex(root); | |
343 if (!next_grid[next_i + next_j * width]) { | |
344 next_grid[next_i + next_j * width] = | |
345 new(Alloc(sizeof(Complex) + 52, kStack5)) Complex(root); | |
346 } | |
347 } | |
348 } | |
349 | |
350 // Clear the previously allocated points. | |
351 for (Complex*& point : grid) { | |
352 if (point) { | |
353 Free(point); | |
354 point = nullptr; | |
355 } | |
356 } | |
357 | |
358 // Now swap the two grids for the next iteration. | |
359 grid.swap(next_grid); | |
360 } | |
361 | |
362 // Clear the previously allocated points. | |
363 for (Complex*& point : grid) { | |
364 if (point) { | |
365 Free(point); | |
366 point = nullptr; | |
367 } | |
368 } | |
369 } | |
370 | |
371 TEST_F(LeakDetectorImplTest, CheckTestFramework) { | |
372 EXPECT_EQ(0U, total_num_allocs_); | |
373 EXPECT_EQ(0U, total_num_frees_); | |
374 EXPECT_EQ(0U, alloced_ptrs_.size()); | |
375 | |
376 // Allocate some memory. | |
377 void* ptr0 = Alloc(12, kStack0); | |
378 void* ptr1 = Alloc(16, kStack0); | |
379 void* ptr2 = Alloc(24, kStack0); | |
380 EXPECT_EQ(3U, total_num_allocs_); | |
381 EXPECT_EQ(0U, total_num_frees_); | |
382 EXPECT_EQ(3U, alloced_ptrs_.size()); | |
383 | |
384 // Free one of the pointers. | |
385 Free(ptr1); | |
386 EXPECT_EQ(3U, total_num_allocs_); | |
387 EXPECT_EQ(1U, total_num_frees_); | |
388 EXPECT_EQ(2U, alloced_ptrs_.size()); | |
389 | |
390 // Allocate some more memory. | |
391 void* ptr3 = Alloc(72, kStack1); | |
392 void* ptr4 = Alloc(104, kStack1); | |
393 void* ptr5 = Alloc(96, kStack1); | |
394 void* ptr6 = Alloc(24, kStack1); | |
395 EXPECT_EQ(7U, total_num_allocs_); | |
396 EXPECT_EQ(1U, total_num_frees_); | |
397 EXPECT_EQ(6U, alloced_ptrs_.size()); | |
398 | |
399 // Free more pointers. | |
400 Free(ptr2); | |
401 Free(ptr4); | |
402 Free(ptr6); | |
403 EXPECT_EQ(7U, total_num_allocs_); | |
404 EXPECT_EQ(4U, total_num_frees_); | |
405 EXPECT_EQ(3U, alloced_ptrs_.size()); | |
406 | |
407 // Free remaining memory. | |
408 Free(ptr0); | |
409 Free(ptr3); | |
410 Free(ptr5); | |
411 EXPECT_EQ(7U, total_num_allocs_); | |
412 EXPECT_EQ(7U, total_num_frees_); | |
413 EXPECT_EQ(0U, alloced_ptrs_.size()); | |
414 } | |
415 | |
416 TEST_F(LeakDetectorImplTest, JuliaSetNoLeak) { | |
417 JuliaSet(false); | |
Simon Que
2015/09/01 21:09:48
Comment on flag meaning.
Simon Que
2015/09/01 23:23:30
Done.
| |
418 | |
419 EXPECT_EQ(total_num_allocs_, total_num_frees_); | |
420 EXPECT_EQ(0U, alloced_ptrs_.size()); | |
421 ASSERT_EQ(0U, stored_reports_.size()); | |
422 } | |
423 | |
424 TEST_F(LeakDetectorImplTest, JuliaSetWithLeak) { | |
425 JuliaSet(true); | |
426 | |
427 EXPECT_GT(total_num_allocs_, total_num_frees_); | |
428 EXPECT_GT(alloced_ptrs_.size(), 0U); | |
429 ASSERT_EQ(2U, stored_reports_.size()); | |
430 | |
bjanakiraman1
2015/09/01 21:27:27
Explain where KrawStack* comes from.
Simon Que
2015/09/01 23:23:30
Done. No longer directly referencing kRawStack* an
| |
431 // The reports should be stored in order of size. | |
432 const InternalLeakReport& report1 = *stored_reports_.begin(); | |
433 EXPECT_EQ(sizeof(Complex) + 40, report1.alloc_size_bytes); | |
434 EXPECT_EQ(kStack3.depth, report1.call_stack.size()); | |
435 for (size_t i = 0; i < kStack3.depth && i < report1.call_stack.size(); ++i) { | |
436 // The call stack's addresses may not fall within the mapping address. | |
437 // Those that don't will not be converted to mapping offsets. | |
438 if (kRawStack3[i] >= kMappingAddr && | |
439 kRawStack3[i] <= kMappingAddr + kMappingSize) { | |
440 EXPECT_EQ(kRawStack3[i] - kMappingAddr, report1.call_stack[i]); | |
441 } else { | |
442 EXPECT_EQ(kRawStack3[i], report1.call_stack[i]); | |
443 } | |
444 } | |
445 | |
446 const InternalLeakReport& report2 = *(++stored_reports_.begin()); | |
447 EXPECT_EQ(sizeof(Complex) + 52, report2.alloc_size_bytes); | |
448 EXPECT_EQ(kStack4.depth, report2.call_stack.size()); | |
449 for (size_t i = 0; i < kStack4.depth && i < report2.call_stack.size(); ++i) { | |
450 // The call stack's addresses may not fall within the mapping address. | |
451 // Those that don't will not be converted to mapping offsets. | |
452 if (kRawStack4[i] >= kMappingAddr && | |
453 kRawStack4[i] <= kMappingAddr + kMappingSize) { | |
454 EXPECT_EQ(kRawStack4[i] - kMappingAddr, report2.call_stack[i]); | |
455 } else { | |
456 EXPECT_EQ(kRawStack4[i], report2.call_stack[i]); | |
457 } | |
458 } | |
459 } | |
460 | |
461 } // namespace leak_detector | |
OLD | NEW |