OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "base/profiler/native_stack_sampler.h" | 5 #include "base/profiler/native_stack_sampler.h" |
6 | 6 |
7 #include <objbase.h> | 7 #include <objbase.h> |
8 #include <windows.h> | 8 #include <windows.h> |
9 #include <stddef.h> | 9 #include <stddef.h> |
10 #include <winternl.h> | 10 #include <winternl.h> |
(...skipping 301 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
312 // IMPORTANT NOTE: No allocations from the default heap may occur in the | 312 // IMPORTANT NOTE: No allocations from the default heap may occur in the |
313 // ScopedSuspendThread scope, including indirectly via use of DCHECK/CHECK or | 313 // ScopedSuspendThread scope, including indirectly via use of DCHECK/CHECK or |
314 // other logging statements. Otherwise this code can deadlock on heap locks in | 314 // other logging statements. Otherwise this code can deadlock on heap locks in |
315 // the default heap acquired by the target thread before it was suspended. | 315 // the default heap acquired by the target thread before it was suspended. |
316 void SuspendThreadAndRecordStack( | 316 void SuspendThreadAndRecordStack( |
317 HANDLE thread_handle, | 317 HANDLE thread_handle, |
318 const void* base_address, | 318 const void* base_address, |
319 void* stack_copy_buffer, | 319 void* stack_copy_buffer, |
320 size_t stack_copy_buffer_size, | 320 size_t stack_copy_buffer_size, |
321 std::vector<RecordedFrame>* stack, | 321 std::vector<RecordedFrame>* stack, |
322 NativeStackSamplerTestDelegate* test_delegate) { | 322 NativeStackSamplerTestDelegate* test_delegate) NO_SANITIZE("address") { |
323 DCHECK(stack->empty()); | 323 DCHECK(stack->empty()); |
324 | 324 |
325 CONTEXT thread_context = {0}; | 325 CONTEXT thread_context = {0}; |
326 thread_context.ContextFlags = CONTEXT_FULL; | 326 thread_context.ContextFlags = CONTEXT_FULL; |
327 // The stack bounds are saved to uintptr_ts for use outside | 327 // The stack bounds are saved to uintptr_ts for use outside |
328 // ScopedSuspendThread, as the thread's memory is not safe to dereference | 328 // ScopedSuspendThread, as the thread's memory is not safe to dereference |
329 // beyond that point. | 329 // beyond that point. |
330 const uintptr_t top = reinterpret_cast<uintptr_t>(base_address); | 330 const uintptr_t top = reinterpret_cast<uintptr_t>(base_address); |
331 uintptr_t bottom = 0u; | 331 uintptr_t bottom = 0u; |
332 | 332 |
(...skipping 13 matching lines...) Expand all Loading... | |
346 | 346 |
347 if ((top - bottom) > stack_copy_buffer_size) | 347 if ((top - bottom) > stack_copy_buffer_size) |
348 return; | 348 return; |
349 | 349 |
350 // Dereferencing a pointer in the guard page in a thread that doesn't own | 350 // Dereferencing a pointer in the guard page in a thread that doesn't own |
351 // the stack results in a STATUS_GUARD_PAGE_VIOLATION exception and a crash. | 351 // the stack results in a STATUS_GUARD_PAGE_VIOLATION exception and a crash. |
352 // This occurs very rarely, but reliably over the population. | 352 // This occurs very rarely, but reliably over the population. |
353 if (PointsToGuardPage(bottom)) | 353 if (PointsToGuardPage(bottom)) |
354 return; | 354 return; |
355 | 355 |
356 std::memcpy(stack_copy_buffer, reinterpret_cast<const void*>(bottom), | 356 // The following loop is an inlined version of memcpy. The code must be |
Mark Mentovai
2016/11/15 16:52:04
Can we keep this if !defined(ADDRESS_SANITIZER)? S
etienneb
2016/11/15 16:55:41
I'm not against it. Typically we try to avoid ifde
| |
357 top - bottom); | 357 // inlined to avoid instrumentation when using ASAN (memory sanitizer). The |
358 // stack profiler is generating false positive when walking the stack. | |
359 for (size_t pos = 0; pos < top - bottom; ++pos) | |
360 reinterpret_cast<char*>(stack_copy_buffer)[pos] = | |
361 reinterpret_cast<const char*>(bottom)[pos]; | |
358 } | 362 } |
359 | 363 |
360 if (test_delegate) | 364 if (test_delegate) |
361 test_delegate->OnPreStackWalk(); | 365 test_delegate->OnPreStackWalk(); |
362 | 366 |
363 RewritePointersToStackMemory(top, bottom, &thread_context, stack_copy_buffer); | 367 RewritePointersToStackMemory(top, bottom, &thread_context, stack_copy_buffer); |
364 | 368 |
365 RecordStack(&thread_context, stack); | 369 RecordStack(&thread_context, stack); |
366 } | 370 } |
367 | 371 |
(...skipping 10 matching lines...) Expand all Loading... | |
378 std::vector<StackSamplingProfiler::Module>* modules) override; | 382 std::vector<StackSamplingProfiler::Module>* modules) override; |
379 void RecordStackSample(StackSamplingProfiler::Sample* sample) override; | 383 void RecordStackSample(StackSamplingProfiler::Sample* sample) override; |
380 void ProfileRecordingStopped() override; | 384 void ProfileRecordingStopped() override; |
381 | 385 |
382 private: | 386 private: |
383 enum { | 387 enum { |
384 // Intended to hold the largest stack used by Chrome. The default Win32 | 388 // Intended to hold the largest stack used by Chrome. The default Win32 |
385 // reserved stack size is 1 MB and Chrome Windows threads currently always | 389 // reserved stack size is 1 MB and Chrome Windows threads currently always |
386 // use the default, but this allows for expansion if it occurs. The size | 390 // use the default, but this allows for expansion if it occurs. The size |
387 // beyond the actual stack size consists of unallocated virtual memory pages | 391 // beyond the actual stack size consists of unallocated virtual memory pages |
388 // so carries little cost (just a bit of wated address space). | 392 // so carries little cost (just a bit of wasted address space). |
389 kStackCopyBufferSize = 2 * 1024 * 1024 | 393 kStackCopyBufferSize = 2 * 1024 * 1024 |
390 }; | 394 }; |
391 | 395 |
392 // Attempts to query the module filename, base address, and id for | 396 // Attempts to query the module filename, base address, and id for |
393 // |module_handle|, and store them in |module|. Returns true if it succeeded. | 397 // |module_handle|, and store them in |module|. Returns true if it succeeded. |
394 static bool GetModuleForHandle(HMODULE module_handle, | 398 static bool GetModuleForHandle(HMODULE module_handle, |
395 StackSamplingProfiler::Module* module); | 399 StackSamplingProfiler::Module* module); |
396 | 400 |
397 // Gets the index for the Module corresponding to |module_handle| in | 401 // Gets the index for the Module corresponding to |module_handle| in |
398 // |modules|, adding it if it's not already present. Returns | 402 // |modules|, adding it if it's not already present. Returns |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
532 | 536 |
533 if (thread_handle) { | 537 if (thread_handle) { |
534 return std::unique_ptr<NativeStackSampler>(new NativeStackSamplerWin( | 538 return std::unique_ptr<NativeStackSampler>(new NativeStackSamplerWin( |
535 win::ScopedHandle(thread_handle), test_delegate)); | 539 win::ScopedHandle(thread_handle), test_delegate)); |
536 } | 540 } |
537 #endif | 541 #endif |
538 return std::unique_ptr<NativeStackSampler>(); | 542 return std::unique_ptr<NativeStackSampler>(); |
539 } | 543 } |
540 | 544 |
541 } // namespace base | 545 } // namespace base |
OLD | NEW |