Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "base/profiler/stack_sampling_profiler.h" | 5 #include "base/profiler/stack_sampling_profiler.h" |
| 6 | 6 |
| 7 #include <algorithm> | 7 #include <algorithm> |
| 8 #include <utility> | 8 #include <utility> |
| 9 | 9 |
| 10 #include "base/bind.h" | 10 #include "base/bind.h" |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 75 void AsyncRunner::RunCallbackAndDeleteInstance( | 75 void AsyncRunner::RunCallbackAndDeleteInstance( |
| 76 std::unique_ptr<AsyncRunner> object_to_be_deleted, | 76 std::unique_ptr<AsyncRunner> object_to_be_deleted, |
| 77 const StackSamplingProfiler::CompletedCallback& callback, | 77 const StackSamplingProfiler::CompletedCallback& callback, |
| 78 scoped_refptr<SingleThreadTaskRunner> task_runner, | 78 scoped_refptr<SingleThreadTaskRunner> task_runner, |
| 79 StackSamplingProfiler::CallStackProfiles profiles) { | 79 StackSamplingProfiler::CallStackProfiles profiles) { |
| 80 callback.Run(std::move(profiles)); | 80 callback.Run(std::move(profiles)); |
| 81 // Delete the instance on the original calling thread. | 81 // Delete the instance on the original calling thread. |
| 82 task_runner->DeleteSoon(FROM_HERE, object_to_be_deleted.release()); | 82 task_runner->DeleteSoon(FROM_HERE, object_to_be_deleted.release()); |
| 83 } | 83 } |
| 84 | 84 |
| 85 void ChangeAtomicFlags(subtle::Atomic32* flags, | |
| 86 subtle::Atomic32 set, | |
| 87 subtle::Atomic32 clear) { | |
| 88 DCHECK(set != 0 || clear != 0); | |
| 89 DCHECK_EQ(0, set & clear); | |
| 90 | |
| 91 subtle::Atomic32 bits = subtle::NoBarrier_Load(flags); | |
| 92 while (true) { | |
| 93 subtle::Atomic32 existing = | |
| 94 subtle::NoBarrier_CompareAndSwap(flags, bits, (bits | set) & ~clear); | |
| 95 if (existing == bits) | |
| 96 break; | |
| 97 bits = existing; | |
| 98 } | |
| 99 } | |
| 100 | |
| 85 } // namespace | 101 } // namespace |
| 86 | 102 |
| 87 // StackSamplingProfiler::Module ---------------------------------------------- | 103 // StackSamplingProfiler::Module ---------------------------------------------- |
| 88 | 104 |
| 89 StackSamplingProfiler::Module::Module() : base_address(0u) {} | 105 StackSamplingProfiler::Module::Module() : base_address(0u) {} |
| 90 StackSamplingProfiler::Module::Module(uintptr_t base_address, | 106 StackSamplingProfiler::Module::Module(uintptr_t base_address, |
| 91 const std::string& id, | 107 const std::string& id, |
| 92 const FilePath& filename) | 108 const FilePath& filename) |
| 93 : base_address(base_address), id(id), filename(filename) {} | 109 : base_address(base_address), id(id), filename(filename) {} |
| 94 | 110 |
| 95 StackSamplingProfiler::Module::~Module() {} | 111 StackSamplingProfiler::Module::~Module() {} |
| 96 | 112 |
| 97 // StackSamplingProfiler::Frame ----------------------------------------------- | 113 // StackSamplingProfiler::Frame ----------------------------------------------- |
| 98 | 114 |
| 99 StackSamplingProfiler::Frame::Frame(uintptr_t instruction_pointer, | 115 StackSamplingProfiler::Frame::Frame(uintptr_t instruction_pointer, |
| 100 size_t module_index) | 116 size_t module_index) |
| 101 : instruction_pointer(instruction_pointer), module_index(module_index) {} | 117 : instruction_pointer(instruction_pointer), module_index(module_index) {} |
| 102 | 118 |
| 103 StackSamplingProfiler::Frame::~Frame() {} | 119 StackSamplingProfiler::Frame::~Frame() {} |
| 104 | 120 |
| 105 StackSamplingProfiler::Frame::Frame() | 121 StackSamplingProfiler::Frame::Frame() |
| 106 : instruction_pointer(0), module_index(kUnknownModuleIndex) { | 122 : instruction_pointer(0), module_index(kUnknownModuleIndex) { |
| 107 } | 123 } |
| 108 | 124 |
| 125 // StackSamplingProfiler::Sample ---------------------------------------------- | |
| 126 | |
| 127 StackSamplingProfiler::Sample::Sample() {} | |
| 128 | |
| 129 StackSamplingProfiler::Sample::Sample(const Sample& sample) = default; | |
| 130 | |
| 131 StackSamplingProfiler::Sample::~Sample() {} | |
| 132 | |
| 133 StackSamplingProfiler::Sample::Sample(const Frame& frame) { | |
| 134 frames.push_back(std::move(frame)); | |
| 135 } | |
| 136 | |
| 137 StackSamplingProfiler::Sample::Sample(const std::vector<Frame>& frames) | |
| 138 : frames(frames) {} | |
| 139 | |
| 109 // StackSamplingProfiler::CallStackProfile ------------------------------------ | 140 // StackSamplingProfiler::CallStackProfile ------------------------------------ |
| 110 | 141 |
| 111 StackSamplingProfiler::CallStackProfile::CallStackProfile() {} | 142 StackSamplingProfiler::CallStackProfile::CallStackProfile() {} |
| 112 | 143 |
| 113 StackSamplingProfiler::CallStackProfile::CallStackProfile( | 144 StackSamplingProfiler::CallStackProfile::CallStackProfile( |
| 114 CallStackProfile&& other) = default; | 145 CallStackProfile&& other) = default; |
| 115 | 146 |
| 116 StackSamplingProfiler::CallStackProfile::~CallStackProfile() {} | 147 StackSamplingProfiler::CallStackProfile::~CallStackProfile() {} |
| 117 | 148 |
| 118 StackSamplingProfiler::CallStackProfile& | 149 StackSamplingProfiler::CallStackProfile& |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 177 // stop_event_. | 208 // stop_event_. |
| 178 if (stop_event_.TimedWait( | 209 if (stop_event_.TimedWait( |
| 179 std::max(params_.sampling_interval - previous_elapsed_sample_time, | 210 std::max(params_.sampling_interval - previous_elapsed_sample_time, |
| 180 TimeDelta()))) { | 211 TimeDelta()))) { |
| 181 *was_stopped = true; | 212 *was_stopped = true; |
| 182 break; | 213 break; |
| 183 } | 214 } |
| 184 } | 215 } |
| 185 ElapsedTimer sample_timer; | 216 ElapsedTimer sample_timer; |
| 186 profile->samples.push_back(Sample()); | 217 profile->samples.push_back(Sample()); |
| 187 native_sampler_->RecordStackSample(&profile->samples.back()); | 218 Sample& sample = profile->samples.back(); |
| 219 native_sampler_->RecordStackSample(&sample); | |
|
Alexei Svitkine (slow)
2016/11/16 19:10:23
Nit: Remove this change since it doesn't seem nece
bcwhite
2016/11/16 23:10:48
Done.
| |
| 188 previous_elapsed_sample_time = sample_timer.Elapsed(); | 220 previous_elapsed_sample_time = sample_timer.Elapsed(); |
| 189 } | 221 } |
| 190 | 222 |
| 191 *elapsed_time = profile_timer.Elapsed(); | 223 *elapsed_time = profile_timer.Elapsed(); |
| 192 profile->profile_duration = *elapsed_time; | 224 profile->profile_duration = *elapsed_time; |
| 193 native_sampler_->ProfileRecordingStopped(); | 225 native_sampler_->ProfileRecordingStopped(); |
| 194 } | 226 } |
| 195 | 227 |
| 196 // In an analogous manner to CollectProfile() and samples exceeding the expected | 228 // In an analogous manner to CollectProfile() and samples exceeding the expected |
| 197 // total sampling time, bursts may also exceed the burst_interval. We adopt the | 229 // total sampling time, bursts may also exceed the burst_interval. We adopt the |
| (...skipping 24 matching lines...) Expand all Loading... | |
| 222 return; | 254 return; |
| 223 } | 255 } |
| 224 } | 256 } |
| 225 | 257 |
| 226 void StackSamplingProfiler::SamplingThread::Stop() { | 258 void StackSamplingProfiler::SamplingThread::Stop() { |
| 227 stop_event_.Signal(); | 259 stop_event_.Signal(); |
| 228 } | 260 } |
| 229 | 261 |
| 230 // StackSamplingProfiler ------------------------------------------------------ | 262 // StackSamplingProfiler ------------------------------------------------------ |
| 231 | 263 |
| 264 subtle::Atomic32 StackSamplingProfiler::process_phases_ = 0; | |
| 265 | |
| 232 StackSamplingProfiler::SamplingParams::SamplingParams() | 266 StackSamplingProfiler::SamplingParams::SamplingParams() |
| 233 : initial_delay(TimeDelta::FromMilliseconds(0)), | 267 : initial_delay(TimeDelta::FromMilliseconds(0)), |
| 234 bursts(1), | 268 bursts(1), |
| 235 burst_interval(TimeDelta::FromMilliseconds(10000)), | 269 burst_interval(TimeDelta::FromMilliseconds(10000)), |
| 236 samples_per_burst(300), | 270 samples_per_burst(300), |
| 237 sampling_interval(TimeDelta::FromMilliseconds(100)) { | 271 sampling_interval(TimeDelta::FromMilliseconds(100)) { |
| 238 } | 272 } |
| 239 | 273 |
| 240 StackSamplingProfiler::StackSamplingProfiler( | 274 StackSamplingProfiler::StackSamplingProfiler( |
| 241 PlatformThreadId thread_id, | 275 PlatformThreadId thread_id, |
| (...skipping 23 matching lines...) Expand all Loading... | |
| 265 const CompletedCallback& callback) { | 299 const CompletedCallback& callback) { |
| 266 CHECK(ThreadTaskRunnerHandle::Get()); | 300 CHECK(ThreadTaskRunnerHandle::Get()); |
| 267 AsyncRunner::Run(thread_id, params, callback); | 301 AsyncRunner::Run(thread_id, params, callback); |
| 268 } | 302 } |
| 269 | 303 |
| 270 void StackSamplingProfiler::Start() { | 304 void StackSamplingProfiler::Start() { |
| 271 if (completed_callback_.is_null()) | 305 if (completed_callback_.is_null()) |
| 272 return; | 306 return; |
| 273 | 307 |
| 274 std::unique_ptr<NativeStackSampler> native_sampler = | 308 std::unique_ptr<NativeStackSampler> native_sampler = |
| 275 NativeStackSampler::Create(thread_id_, test_delegate_); | 309 NativeStackSampler::Create(thread_id_, &RecordAnnotations, |
| 310 test_delegate_); | |
| 276 if (!native_sampler) | 311 if (!native_sampler) |
| 277 return; | 312 return; |
| 278 | 313 |
| 279 sampling_thread_.reset(new SamplingThread(std::move(native_sampler), params_, | 314 sampling_thread_.reset(new SamplingThread(std::move(native_sampler), params_, |
| 280 completed_callback_)); | 315 completed_callback_)); |
| 281 if (!PlatformThread::Create(0, sampling_thread_.get(), | 316 if (!PlatformThread::Create(0, sampling_thread_.get(), |
| 282 &sampling_thread_handle_)) | 317 &sampling_thread_handle_)) |
| 283 sampling_thread_.reset(); | 318 sampling_thread_.reset(); |
| 284 } | 319 } |
| 285 | 320 |
| 286 void StackSamplingProfiler::Stop() { | 321 void StackSamplingProfiler::Stop() { |
| 287 if (sampling_thread_) | 322 if (sampling_thread_) |
| 288 sampling_thread_->Stop(); | 323 sampling_thread_->Stop(); |
| 289 } | 324 } |
| 290 | 325 |
| 326 // static | |
| 327 void StackSamplingProfiler::SetProcessPhase(int phase) { | |
| 328 DCHECK_LE(0, phase); | |
| 329 DCHECK_GT(static_cast<int>(sizeof(process_phases_) * 8), phase); | |
| 330 DCHECK_EQ(0, subtle::NoBarrier_Load(&process_phases_) & (1 << phase)); | |
| 331 ChangeAtomicFlags(&process_phases_, 1 << phase, 0); | |
| 332 } | |
| 333 | |
| 334 // static | |
| 335 void StackSamplingProfiler::ResetAnnotationsForTesting() { | |
| 336 subtle::NoBarrier_Store(&process_phases_, 0u); | |
| 337 } | |
| 338 | |
| 339 // The code inside this method must not do anything that could acquire a mutex, | |
|
Alexei Svitkine (slow)
2016/11/16 19:10:24
Nit: Move this comment inside the method.
bcwhite
2016/11/16 23:10:48
Done.
| |
| 340 // including allocating memory (which includes LOG messages) because that mutex | |
| 341 // could be held by a stopped thread, thus resulting in deadlock. | |
| 342 // static | |
| 343 void StackSamplingProfiler::RecordAnnotations(Sample* sample) { | |
| 344 sample->process_phases = subtle::NoBarrier_Load(&process_phases_); | |
| 345 } | |
| 346 | |
| 291 // StackSamplingProfiler::Frame global functions ------------------------------ | 347 // StackSamplingProfiler::Frame global functions ------------------------------ |
| 292 | 348 |
| 293 bool operator==(const StackSamplingProfiler::Module& a, | 349 bool operator==(const StackSamplingProfiler::Module& a, |
| 294 const StackSamplingProfiler::Module& b) { | 350 const StackSamplingProfiler::Module& b) { |
| 295 return a.base_address == b.base_address && a.id == b.id && | 351 return a.base_address == b.base_address && a.id == b.id && |
| 296 a.filename == b.filename; | 352 a.filename == b.filename; |
| 297 } | 353 } |
| 298 | 354 |
| 355 bool operator==(const StackSamplingProfiler::Sample& a, | |
| 356 const StackSamplingProfiler::Sample& b) { | |
| 357 return a.process_phases == b.process_phases && a.frames == b.frames; | |
| 358 } | |
| 359 | |
| 360 bool operator!=(const StackSamplingProfiler::Sample& a, | |
| 361 const StackSamplingProfiler::Sample& b) { | |
| 362 return !(a == b); | |
| 363 } | |
| 364 | |
| 365 bool operator<(const StackSamplingProfiler::Sample& a, | |
| 366 const StackSamplingProfiler::Sample& b) { | |
| 367 if (a.process_phases < b.process_phases) | |
| 368 return true; | |
| 369 if (a.process_phases > b.process_phases) | |
| 370 return false; | |
| 371 | |
| 372 return a.frames < b.frames; | |
| 373 } | |
| 374 | |
| 299 bool operator==(const StackSamplingProfiler::Frame &a, | 375 bool operator==(const StackSamplingProfiler::Frame &a, |
| 300 const StackSamplingProfiler::Frame &b) { | 376 const StackSamplingProfiler::Frame &b) { |
| 301 return a.instruction_pointer == b.instruction_pointer && | 377 return a.instruction_pointer == b.instruction_pointer && |
| 302 a.module_index == b.module_index; | 378 a.module_index == b.module_index; |
| 303 } | 379 } |
| 304 | 380 |
| 305 bool operator<(const StackSamplingProfiler::Frame &a, | 381 bool operator<(const StackSamplingProfiler::Frame &a, |
| 306 const StackSamplingProfiler::Frame &b) { | 382 const StackSamplingProfiler::Frame &b) { |
| 307 return (a.module_index < b.module_index) || | 383 return (a.module_index < b.module_index) || |
| 308 (a.module_index == b.module_index && | 384 (a.module_index == b.module_index && |
| 309 a.instruction_pointer < b.instruction_pointer); | 385 a.instruction_pointer < b.instruction_pointer); |
| 310 } | 386 } |
| 311 | 387 |
| 312 } // namespace base | 388 } // namespace base |
| OLD | NEW |