| Index: base/profiler/native_stack_sampler_win.cc
|
| diff --git a/base/profiler/native_stack_sampler_win.cc b/base/profiler/native_stack_sampler_win.cc
|
| index 063374f19d5b1edfd7d758d98e4047a1b8d67bd6..283abdd4c6fbeedb11040c912eb4bcb9f2c18a1b 100644
|
| --- a/base/profiler/native_stack_sampler_win.cc
|
| +++ b/base/profiler/native_stack_sampler_win.cc
|
| @@ -319,7 +319,7 @@ void SuspendThreadAndRecordStack(
|
| void* stack_copy_buffer,
|
| size_t stack_copy_buffer_size,
|
| std::vector<RecordedFrame>* stack,
|
| - NativeStackSamplerTestDelegate* test_delegate) {
|
| + NativeStackSamplerTestDelegate* test_delegate) NO_SANITIZE("address") {
|
| DCHECK(stack->empty());
|
|
|
| CONTEXT thread_context = {0};
|
| @@ -353,8 +353,12 @@ void SuspendThreadAndRecordStack(
|
| if (PointsToGuardPage(bottom))
|
| return;
|
|
|
| - std::memcpy(stack_copy_buffer, reinterpret_cast<const void*>(bottom),
|
| - top - bottom);
|
| + // The following loop is an inlined version of memcpy. The code must be
|
| + // inlined to avoid instrumentation when using ASAN (memory sanitizer). The
|
| + // stack profiler is generating false positive when walking the stack.
|
| + for (size_t pos = 0; pos < top - bottom; ++pos)
|
| + reinterpret_cast<char*>(stack_copy_buffer)[pos] =
|
| + reinterpret_cast<const char*>(bottom)[pos];
|
| }
|
|
|
| if (test_delegate)
|
| @@ -385,7 +389,7 @@ class NativeStackSamplerWin : public NativeStackSampler {
|
| // reserved stack size is 1 MB and Chrome Windows threads currently always
|
| // use the default, but this allows for expansion if it occurs. The size
|
| // beyond the actual stack size consists of unallocated virtual memory pages
|
| - // so carries little cost (just a bit of wated address space).
|
| + // so carries little cost (just a bit of wasted address space).
|
| kStackCopyBufferSize = 2 * 1024 * 1024
|
| };
|
|
|
|
|