Chromium Code Reviews| Index: base/trace_event/heap_profiler_allocation_context_tracker.cc |
| diff --git a/base/trace_event/heap_profiler_allocation_context_tracker.cc b/base/trace_event/heap_profiler_allocation_context_tracker.cc |
| index b47dc16eddd02b2565bd18511d0490a17c488056..6b7383e2cbfee98c472317e5cf0028f691cc3caf 100644 |
| --- a/base/trace_event/heap_profiler_allocation_context_tracker.cc |
| +++ b/base/trace_event/heap_profiler_allocation_context_tracker.cc |
| @@ -203,30 +203,34 @@ bool AllocationContextTracker::GetContextSnapshot(AllocationContext* ctx) { |
| // from main() and up. Stack unwinding produces top frames, i.e. |
| // from this point and up until main(). We request many frames to |
| // make sure we reach main(), and then copy bottom portion of them. |
| - const void* frames[128]; |
| - static_assert(arraysize(frames) >= Backtrace::kMaxFrameCount, |
| - "not requesting enough frames to fill Backtrace"); |
| -#if HAVE_TRACE_STACK_FRAME_POINTERS && !defined(OS_NACL) |
| - size_t frame_count = debug::TraceStackFramePointers( |
| - frames, |
| - arraysize(frames), |
| - 1 /* exclude this function from the trace */ ); |
| -#else |
| - size_t frame_count = 0; |
| - NOTREACHED(); |
| -#endif |
| - |
| - // Copy frames backwards |
| - size_t backtrace_capacity = backtrace_end - backtrace; |
| - int32_t top_frame_index = (backtrace_capacity >= frame_count) |
| - ? 0 |
| - : frame_count - backtrace_capacity; |
| - for (int32_t i = frame_count - 1; i >= top_frame_index; --i) { |
| - const void* frame = frames[i]; |
| - *backtrace++ = StackFrame::FromProgramCounter(frame); |
| - } |
| - break; |
| +#if !defined(OS_NACL) // We don't build base/debug/stack_trace.cc for NaCl. |
| +#if HAVE_TRACE_STACK_FRAME_POINTERS |
| + const void* frames[128]; |
| + static_assert(arraysize(frames) >= Backtrace::kMaxFrameCount, |
| + "not requesting enough frames to fill Backtrace"); |
| + size_t frame_count = debug::TraceStackFramePointers( |
| + frames, arraysize(frames), |
| + 1 /* exclude this function from the trace */); |
| +#else // HAVE_TRACE_STACK_FRAME_POINTERS |
| + // Fall-back to capturing the stack with base::debug::StackTrace, |
| + // which is likely slower, but more reliable. |
| + base::debug::StackTrace stack_trace(Backtrace::kMaxFrameCount); |
| + size_t frame_count = 0u; |
| + const void* const* frames = stack_trace.Addresses(&frame_count); |
|
dcheng
2017/03/28 06:16:23
Nit: this will capture the constructor for StackTr
Wez
2017/03/28 06:38:21
That is a fair point; it doesn't affect the functi
|
| +#endif // HAVE_TRACE_STACK_FRAME_POINTERS |
| + |
| + // Copy frames backwards |
| + size_t backtrace_capacity = backtrace_end - backtrace; |
| + int32_t top_frame_index = (backtrace_capacity >= frame_count) |
| + ? 0 |
| + : frame_count - backtrace_capacity; |
| + for (int32_t i = frame_count - 1; i >= top_frame_index; --i) { |
| + const void* frame = frames[i]; |
| + *backtrace++ = StackFrame::FromProgramCounter(frame); |
| } |
| +#endif // !defined(OS_NACL) |
| + break; |
| + } |
| } |
| ctx->backtrace.frame_count = backtrace - std::begin(ctx->backtrace.frames); |