OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "base/profiler/native_stack_sampler.h" | 5 #include "base/profiler/native_stack_sampler.h" |
6 | 6 |
7 #include <objbase.h> | 7 #include <objbase.h> |
8 #include <windows.h> | 8 #include <windows.h> |
9 #include <stddef.h> | 9 #include <stddef.h> |
10 #include <winternl.h> | 10 #include <winternl.h> |
(...skipping 301 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
312 // IMPORTANT NOTE: No allocations from the default heap may occur in the | 312 // IMPORTANT NOTE: No allocations from the default heap may occur in the |
313 // ScopedSuspendThread scope, including indirectly via use of DCHECK/CHECK or | 313 // ScopedSuspendThread scope, including indirectly via use of DCHECK/CHECK or |
314 // other logging statements. Otherwise this code can deadlock on heap locks in | 314 // other logging statements. Otherwise this code can deadlock on heap locks in |
315 // the default heap acquired by the target thread before it was suspended. | 315 // the default heap acquired by the target thread before it was suspended. |
316 void SuspendThreadAndRecordStack( | 316 void SuspendThreadAndRecordStack( |
317 HANDLE thread_handle, | 317 HANDLE thread_handle, |
318 const void* base_address, | 318 const void* base_address, |
319 void* stack_copy_buffer, | 319 void* stack_copy_buffer, |
320 size_t stack_copy_buffer_size, | 320 size_t stack_copy_buffer_size, |
321 std::vector<RecordedFrame>* stack, | 321 std::vector<RecordedFrame>* stack, |
322 NativeStackSamplerTestDelegate* test_delegate) { | 322 NativeStackSamplerTestDelegate* test_delegate) NO_SANITIZE("address") { |
323 DCHECK(stack->empty()); | 323 DCHECK(stack->empty()); |
324 | 324 |
325 CONTEXT thread_context = {0}; | 325 CONTEXT thread_context = {0}; |
326 thread_context.ContextFlags = CONTEXT_FULL; | 326 thread_context.ContextFlags = CONTEXT_FULL; |
327 // The stack bounds are saved to uintptr_ts for use outside | 327 // The stack bounds are saved to uintptr_ts for use outside |
328 // ScopedSuspendThread, as the thread's memory is not safe to dereference | 328 // ScopedSuspendThread, as the thread's memory is not safe to dereference |
329 // beyond that point. | 329 // beyond that point. |
330 const uintptr_t top = reinterpret_cast<uintptr_t>(base_address); | 330 const uintptr_t top = reinterpret_cast<uintptr_t>(base_address); |
331 uintptr_t bottom = 0u; | 331 uintptr_t bottom = 0u; |
332 | 332 |
(...skipping 13 matching lines...) Expand all Loading... | |
346 | 346 |
347 if ((top - bottom) > stack_copy_buffer_size) | 347 if ((top - bottom) > stack_copy_buffer_size) |
348 return; | 348 return; |
349 | 349 |
350 // Dereferencing a pointer in the guard page in a thread that doesn't own | 350 // Dereferencing a pointer in the guard page in a thread that doesn't own |
351 // the stack results in a STATUS_GUARD_PAGE_VIOLATION exception and a crash. | 351 // the stack results in a STATUS_GUARD_PAGE_VIOLATION exception and a crash. |
352 // This occurs very rarely, but reliably over the population. | 352 // This occurs very rarely, but reliably over the population. |
353 if (PointsToGuardPage(bottom)) | 353 if (PointsToGuardPage(bottom)) |
354 return; | 354 return; |
355 | 355 |
356 #if defined(ADDRESS_SANITIZER) | |
Mike Wittman
2016/11/17 01:57:54
nit: can we encapsulate this ifdef in its own func
etienneb
2016/11/17 19:29:54
Done.
| |
357 // The following loop is an inlined version of memcpy. The code must be | |
358 // inlined to avoid instrumentation when using ASAN (memory sanitizer). The | |
359 // stack profiler is generating false positive when walking the stack. | |
360 for (size_t pos = 0; pos < top - bottom; ++pos) | |
361 reinterpret_cast<char*>(stack_copy_buffer)[pos] = | |
362 reinterpret_cast<const char*>(bottom)[pos]; | |
363 #else | |
356 std::memcpy(stack_copy_buffer, reinterpret_cast<const void*>(bottom), | 364 std::memcpy(stack_copy_buffer, reinterpret_cast<const void*>(bottom), |
357 top - bottom); | 365 top - bottom); |
366 #endif | |
358 } | 367 } |
359 | 368 |
360 if (test_delegate) | 369 if (test_delegate) |
361 test_delegate->OnPreStackWalk(); | 370 test_delegate->OnPreStackWalk(); |
362 | 371 |
363 RewritePointersToStackMemory(top, bottom, &thread_context, stack_copy_buffer); | 372 RewritePointersToStackMemory(top, bottom, &thread_context, stack_copy_buffer); |
364 | 373 |
365 RecordStack(&thread_context, stack); | 374 RecordStack(&thread_context, stack); |
366 } | 375 } |
367 | 376 |
(...skipping 10 matching lines...) Expand all Loading... | |
378 std::vector<StackSamplingProfiler::Module>* modules) override; | 387 std::vector<StackSamplingProfiler::Module>* modules) override; |
379 void RecordStackSample(StackSamplingProfiler::Sample* sample) override; | 388 void RecordStackSample(StackSamplingProfiler::Sample* sample) override; |
380 void ProfileRecordingStopped() override; | 389 void ProfileRecordingStopped() override; |
381 | 390 |
382 private: | 391 private: |
383 enum { | 392 enum { |
384 // Intended to hold the largest stack used by Chrome. The default Win32 | 393 // Intended to hold the largest stack used by Chrome. The default Win32 |
385 // reserved stack size is 1 MB and Chrome Windows threads currently always | 394 // reserved stack size is 1 MB and Chrome Windows threads currently always |
386 // use the default, but this allows for expansion if it occurs. The size | 395 // use the default, but this allows for expansion if it occurs. The size |
387 // beyond the actual stack size consists of unallocated virtual memory pages | 396 // beyond the actual stack size consists of unallocated virtual memory pages |
388 // so carries little cost (just a bit of wated address space). | 397 // so carries little cost (just a bit of wasted address space). |
389 kStackCopyBufferSize = 2 * 1024 * 1024 | 398 kStackCopyBufferSize = 2 * 1024 * 1024 |
390 }; | 399 }; |
391 | 400 |
392 // Attempts to query the module filename, base address, and id for | 401 // Attempts to query the module filename, base address, and id for |
393 // |module_handle|, and store them in |module|. Returns true if it succeeded. | 402 // |module_handle|, and store them in |module|. Returns true if it succeeded. |
394 static bool GetModuleForHandle(HMODULE module_handle, | 403 static bool GetModuleForHandle(HMODULE module_handle, |
395 StackSamplingProfiler::Module* module); | 404 StackSamplingProfiler::Module* module); |
396 | 405 |
397 // Gets the index for the Module corresponding to |module_handle| in | 406 // Gets the index for the Module corresponding to |module_handle| in |
398 // |modules|, adding it if it's not already present. Returns | 407 // |modules|, adding it if it's not already present. Returns |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
532 | 541 |
533 if (thread_handle) { | 542 if (thread_handle) { |
534 return std::unique_ptr<NativeStackSampler>(new NativeStackSamplerWin( | 543 return std::unique_ptr<NativeStackSampler>(new NativeStackSamplerWin( |
535 win::ScopedHandle(thread_handle), test_delegate)); | 544 win::ScopedHandle(thread_handle), test_delegate)); |
536 } | 545 } |
537 #endif | 546 #endif |
538 return std::unique_ptr<NativeStackSampler>(); | 547 return std::unique_ptr<NativeStackSampler>(); |
539 } | 548 } |
540 | 549 |
541 } // namespace base | 550 } // namespace base |
OLD | NEW |