OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "base/profiler/native_stack_sampler.h" | 5 #include "base/profiler/native_stack_sampler.h" |
6 | 6 |
7 #include <objbase.h> | 7 #include <objbase.h> |
8 #include <windows.h> | 8 #include <windows.h> |
9 #include <stddef.h> | 9 #include <stddef.h> |
10 #include <winternl.h> | 10 #include <winternl.h> |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
85 void RewritePointerIfInOriginalStack(uintptr_t top, uintptr_t bottom, | 85 void RewritePointerIfInOriginalStack(uintptr_t top, uintptr_t bottom, |
86 void* stack_copy, const void** pointer) { | 86 void* stack_copy, const void** pointer) { |
87 const uintptr_t value = reinterpret_cast<uintptr_t>(*pointer); | 87 const uintptr_t value = reinterpret_cast<uintptr_t>(*pointer); |
88 if (value >= bottom && value < top) { | 88 if (value >= bottom && value < top) { |
89 *pointer = reinterpret_cast<const void*>( | 89 *pointer = reinterpret_cast<const void*>( |
90 static_cast<unsigned char*>(stack_copy) + (value - bottom)); | 90 static_cast<unsigned char*>(stack_copy) + (value - bottom)); |
91 } | 91 } |
92 } | 92 } |
93 #endif | 93 #endif |
94 | 94 |
95 void CopyMemoryFromStack(void* to, const void* from, size_t length) | |
96 NO_SANITIZE("address") { | |
97 #if defined(ADDRESS_SANITIZER) | |
98 // The following loop is an inlined version of memcpy. The code must be | |
99 // inlined to avoid instrumentation when using ASAN (memory sanitizer). The | |
100 // stack profiler is generating false positive when walking the stack. | |
101 for (size_t pos = 0; pos < length; ++pos) | |
102 reinterpret_cast<char*>(to)[pos] = reinterpret_cast<const char*>(from)[pos]; | |
103 #else | |
104 std::memcpy(to, from, length); | |
105 #endif | |
106 } | |
107 | |
95 // Rewrites possible pointers to locations within the stack to point to the | 108 // Rewrites possible pointers to locations within the stack to point to the |
96 // corresponding locations in the copy, and rewrites the non-volatile registers | 109 // corresponding locations in the copy, and rewrites the non-volatile registers |
97 // in |context| likewise. This is necessary to handle stack frames with dynamic | 110 // in |context| likewise. This is necessary to handle stack frames with dynamic |
98 // stack allocation, where a pointer to the beginning of the dynamic allocation | 111 // stack allocation, where a pointer to the beginning of the dynamic allocation |
99 // area is stored on the stack and/or in a non-volatile register. | 112 // area is stored on the stack and/or in a non-volatile register. |
100 // | 113 // |
101 // Eager rewriting of anything that looks like a pointer to the stack, as done | 114 // Eager rewriting of anything that looks like a pointer to the stack, as done |
102 // in this function, does not adversely affect the stack unwinding. The only | 115 // in this function, does not adversely affect the stack unwinding. The only |
103 // other values on the stack the unwinding depends on are return addresses, | 116 // other values on the stack the unwinding depends on are return addresses, |
104 // which should not point within the stack memory. The rewriting is guaranteed | 117 // which should not point within the stack memory. The rewriting is guaranteed |
(...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
346 | 359 |
347 if ((top - bottom) > stack_copy_buffer_size) | 360 if ((top - bottom) > stack_copy_buffer_size) |
348 return; | 361 return; |
349 | 362 |
350 // Dereferencing a pointer in the guard page in a thread that doesn't own | 363 // Dereferencing a pointer in the guard page in a thread that doesn't own |
351 // the stack results in a STATUS_GUARD_PAGE_VIOLATION exception and a crash. | 364 // the stack results in a STATUS_GUARD_PAGE_VIOLATION exception and a crash. |
352 // This occurs very rarely, but reliably over the population. | 365 // This occurs very rarely, but reliably over the population. |
353 if (PointsToGuardPage(bottom)) | 366 if (PointsToGuardPage(bottom)) |
354 return; | 367 return; |
355 | 368 |
356 std::memcpy(stack_copy_buffer, reinterpret_cast<const void*>(bottom), | 369 base::CopyMemoryFromStack( |
Mike Wittman
2016/11/17 19:32:50
nit: no need for base::
etienneb
2016/11/17 19:35:03
Done.
| |
357 top - bottom); | 370 stack_copy_buffer, reinterpret_cast<const void*>(bottom), top - bottom); |
358 } | 371 } |
359 | 372 |
360 if (test_delegate) | 373 if (test_delegate) |
361 test_delegate->OnPreStackWalk(); | 374 test_delegate->OnPreStackWalk(); |
362 | 375 |
363 RewritePointersToStackMemory(top, bottom, &thread_context, stack_copy_buffer); | 376 RewritePointersToStackMemory(top, bottom, &thread_context, stack_copy_buffer); |
364 | 377 |
365 RecordStack(&thread_context, stack); | 378 RecordStack(&thread_context, stack); |
366 } | 379 } |
367 | 380 |
(...skipping 10 matching lines...) Expand all Loading... | |
378 std::vector<StackSamplingProfiler::Module>* modules) override; | 391 std::vector<StackSamplingProfiler::Module>* modules) override; |
379 void RecordStackSample(StackSamplingProfiler::Sample* sample) override; | 392 void RecordStackSample(StackSamplingProfiler::Sample* sample) override; |
380 void ProfileRecordingStopped() override; | 393 void ProfileRecordingStopped() override; |
381 | 394 |
382 private: | 395 private: |
383 enum { | 396 enum { |
384 // Intended to hold the largest stack used by Chrome. The default Win32 | 397 // Intended to hold the largest stack used by Chrome. The default Win32 |
385 // reserved stack size is 1 MB and Chrome Windows threads currently always | 398 // reserved stack size is 1 MB and Chrome Windows threads currently always |
386 // use the default, but this allows for expansion if it occurs. The size | 399 // use the default, but this allows for expansion if it occurs. The size |
387 // beyond the actual stack size consists of unallocated virtual memory pages | 400 // beyond the actual stack size consists of unallocated virtual memory pages |
388 // so carries little cost (just a bit of wated address space). | 401 // so carries little cost (just a bit of wasted address space). |
389 kStackCopyBufferSize = 2 * 1024 * 1024 | 402 kStackCopyBufferSize = 2 * 1024 * 1024 |
390 }; | 403 }; |
391 | 404 |
392 // Attempts to query the module filename, base address, and id for | 405 // Attempts to query the module filename, base address, and id for |
393 // |module_handle|, and store them in |module|. Returns true if it succeeded. | 406 // |module_handle|, and store them in |module|. Returns true if it succeeded. |
394 static bool GetModuleForHandle(HMODULE module_handle, | 407 static bool GetModuleForHandle(HMODULE module_handle, |
395 StackSamplingProfiler::Module* module); | 408 StackSamplingProfiler::Module* module); |
396 | 409 |
397 // Gets the index for the Module corresponding to |module_handle| in | 410 // Gets the index for the Module corresponding to |module_handle| in |
398 // |modules|, adding it if it's not already present. Returns | 411 // |modules|, adding it if it's not already present. Returns |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
532 | 545 |
533 if (thread_handle) { | 546 if (thread_handle) { |
534 return std::unique_ptr<NativeStackSampler>(new NativeStackSamplerWin( | 547 return std::unique_ptr<NativeStackSampler>(new NativeStackSamplerWin( |
535 win::ScopedHandle(thread_handle), test_delegate)); | 548 win::ScopedHandle(thread_handle), test_delegate)); |
536 } | 549 } |
537 #endif | 550 #endif |
538 return std::unique_ptr<NativeStackSampler>(); | 551 return std::unique_ptr<NativeStackSampler>(); |
539 } | 552 } |
540 | 553 |
541 } // namespace base | 554 } // namespace base |
OLD | NEW |