Index: base/trace_event/heap_profiler_allocation_context_tracker.cc |
diff --git a/base/trace_event/heap_profiler_allocation_context_tracker.cc b/base/trace_event/heap_profiler_allocation_context_tracker.cc |
index e192bc833fd950507a5756387b84b5fc92e827ed..7a75513d7659338b751093a9ac1b3db127a405ec 100644 |
--- a/base/trace_event/heap_profiler_allocation_context_tracker.cc |
+++ b/base/trace_event/heap_profiler_allocation_context_tracker.cc |
@@ -14,7 +14,8 @@ |
namespace base { |
namespace trace_event { |
-subtle::Atomic32 AllocationContextTracker::capture_enabled_ = 0; |
+subtle::Atomic32 AllocationContextTracker::capture_mode_ = |
+ static_cast<int32_t>(AllocationContextTracker::CaptureMode::DISABLED); |
namespace { |
@@ -60,21 +61,21 @@ AllocationContextTracker::~AllocationContextTracker() {} |
// static |
void AllocationContextTracker::SetCurrentThreadName(const char* name) { |
- if (name && capture_enabled()) { |
+ if (name && capture_mode() != CaptureMode::DISABLED) { |
GetInstanceForCurrentThread()->thread_name_ = name; |
} |
} |
// static |
-void AllocationContextTracker::SetCaptureEnabled(bool enabled) { |
+void AllocationContextTracker::SetCaptureMode(CaptureMode mode) { |
// When enabling capturing, also initialize the TLS slot. This does not create |
// a TLS instance yet. |
- if (enabled && !g_tls_alloc_ctx_tracker.initialized()) |
+ if (mode != CaptureMode::DISABLED && !g_tls_alloc_ctx_tracker.initialized()) |
g_tls_alloc_ctx_tracker.Initialize(DestructAllocationContextTracker); |
- // Release ordering ensures that when a thread observes |capture_enabled_| to |
+ // Release ordering ensures that when a thread observes |capture_mode_| to |
// be true through an acquire load, the TLS slot has been initialized. |
- subtle::Release_Store(&capture_enabled_, enabled); |
+ subtle::Release_Store(&capture_mode_, static_cast<int32_t>(mode)); |
} |
void AllocationContextTracker::PushPseudoStackFrame( |
@@ -129,26 +130,66 @@ AllocationContext AllocationContextTracker::GetContextSnapshot() { |
return ctx; |
} |
- // Fill the backtrace. |
- { |
- auto backtrace = std::begin(ctx.backtrace.frames); |
- auto backtrace_end = std::end(ctx.backtrace.frames); |
+ CaptureMode mode = static_cast<CaptureMode>( |
+ subtle::NoBarrier_Load(&capture_mode_)); |
- // Add the thread name as the first entry |
- if (thread_name_) { |
- *backtrace++ = StackFrame::FromThreadName(thread_name_); |
- } |
+ auto backtrace = std::begin(ctx.backtrace.frames); |
+ auto backtrace_end = std::end(ctx.backtrace.frames); |
- for (const char* event_name: pseudo_stack_) { |
- if (backtrace == backtrace_end) { |
+ // Add the thread name as the first entry |
+ if (thread_name_) { |
+ *backtrace++ = StackFrame::FromThreadName(thread_name_); |
+ } |
+ |
+ switch (mode) { |
+ case CaptureMode::DISABLED: |
+ { |
break; |
} |
- *backtrace++ = StackFrame::FromTraceEventName(event_name); |
- } |
- |
- ctx.backtrace.frame_count = backtrace - std::begin(ctx.backtrace.frames); |
+ case CaptureMode::PSEUDO_STACK: |
+ { |
+ for (const auto& event_name: pseudo_stack_) { |
+ if (backtrace == backtrace_end) { |
+ break; |
+ } |
+ *backtrace++ = StackFrame::FromTraceEventName(event_name); |
+ } |
+ break; |
+ } |
+#if ENABLE_NATIVE_ALLOCATION_TRACES |
+ case CaptureMode::NATIVE_STACK: |
+ { |
+ // base::trace_event::AllocationContextTracker::GetContextSnapshot() |
+ const size_t kKnownFrameCount = 1; |
+ |
+ // Backtrace contract requires us to return bottom frames, i.e. |
+ // from main() and up. Stack unwinding produces top frames, i.e. |
+ // from this point and up until main(). We request many frames to |
+ // make sure we reach main(), and then copy bottom portion of them. |
+ const void* frames[50]; |
+ static_assert(arraysize(frames) >= Backtrace::kMaxFrameCount, |
+ "not requesting enough frames to fill Backtrace"); |
+ size_t frame_count = debug::TraceStackFramePointers( |
+ frames, |
+ arraysize(frames), |
+ kKnownFrameCount); |
+ |
+ // Copy frames backwards |
+ size_t backtrace_capacity = backtrace_end - backtrace; |
+ ssize_t top_frame_index = (backtrace_capacity >= frame_count) ? |
+ 0 : |
+ frame_count - backtrace_capacity; |
+ for (ssize_t i = frame_count - 1; i >= top_frame_index; --i) { |
+ const void* frame = frames[i]; |
+ *backtrace++ = StackFrame::FromProgramCounter(frame); |
+ } |
+ break; |
+ } |
+#endif // ENABLE_NATIVE_ALLOCATION_TRACES |
} |
+ ctx.backtrace.frame_count = backtrace - std::begin(ctx.backtrace.frames); |
+ |
// TODO(ssid): Fix crbug.com/594803 to add file name as 3rd dimension |
// (component name) in the heap profiler and not piggy back on the type name. |
ctx.type_name = task_contexts_.empty() ? nullptr : task_contexts_.back(); |