Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "base/profiler/stack_sampling_profiler.h" | 5 #include "base/profiler/stack_sampling_profiler.h" |
| 6 | 6 |
| 7 #include <algorithm> | 7 #include <algorithm> |
| 8 #include <map> | 8 #include <map> |
| 9 #include <utility> | 9 #include <utility> |
| 10 | 10 |
| (...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 243 void PerformCollectionTask(int id); | 243 void PerformCollectionTask(int id); |
| 244 void ShutdownTask(int add_events); | 244 void ShutdownTask(int add_events); |
| 245 | 245 |
| 246 // Updates the |next_sample_time| time based on configured parameters. | 246 // Updates the |next_sample_time| time based on configured parameters. |
| 247 // Returns true if there is a next sample or false if sampling is complete. | 247 // Returns true if there is a next sample or false if sampling is complete. |
| 248 bool UpdateNextSampleTime(CollectionContext* collection); | 248 bool UpdateNextSampleTime(CollectionContext* collection); |
| 249 | 249 |
| 250 // Thread: | 250 // Thread: |
| 251 void CleanUp() override; | 251 void CleanUp() override; |
| 252 | 252 |
| 253 // A stack-buffer used by the native sampler for its work. | |
|
Mike Wittman
2017/05/01 20:28:04
Mention that this is shared across all active samp
bcwhite
2017/05/08 14:00:59
Done.
| |
| 254 std::unique_ptr<NativeStackSampler::StackBuffer> native_buffer_; | |
|
Mike Wittman
2017/05/01 20:28:03
stack_buffer_ is probably a better name.
bcwhite
2017/05/08 14:00:59
Done.
| |
| 255 | |
| 253 // A map of IDs to collection contexts. Because this class is a singleton | 256 // A map of IDs to collection contexts. Because this class is a singleton |
| 254 // that is never destroyed, context objects will never be destructed except | 257 // that is never destroyed, context objects will never be destructed except |
| 255 // by explicit action. Thus, it's acceptable to pass unretained pointers | 258 // by explicit action. Thus, it's acceptable to pass unretained pointers |
| 256 // to these objects when posting tasks. | 259 // to these objects when posting tasks. |
| 257 std::map<int, std::unique_ptr<CollectionContext>> active_collections_; | 260 std::map<int, std::unique_ptr<CollectionContext>> active_collections_; |
| 258 | 261 |
| 259 // State maintained about the current execution (or non-execution) of | 262 // State maintained about the current execution (or non-execution) of |
| 260 // the thread. This state must always be accessed while holding the | 263 // the thread. This state must always be accessed while holding the |
| 261 // lock. A copy of the task-runner is maintained here for use by any | 264 // lock. A copy of the task-runner is maintained here for use by any |
| 262 // calling thread; this is necessary because Thread's accessor for it is | 265 // calling thread; this is necessary because Thread's accessor for it is |
| (...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 414 DCHECK_NE(GetThreadId(), PlatformThread::CurrentId()); | 417 DCHECK_NE(GetThreadId(), PlatformThread::CurrentId()); |
| 415 return thread_execution_state_task_runner_; | 418 return thread_execution_state_task_runner_; |
| 416 } | 419 } |
| 417 | 420 |
| 418 if (thread_execution_state_ == EXITING) { | 421 if (thread_execution_state_ == EXITING) { |
| 419 // The previous instance has only been partially cleaned up. It is necessary | 422 // The previous instance has only been partially cleaned up. It is necessary |
| 420 // to call Stop() before Start(). | 423 // to call Stop() before Start(). |
| 421 Stop(); | 424 Stop(); |
| 422 } | 425 } |
| 423 | 426 |
| 427 DCHECK(!native_buffer_); | |
| 428 native_buffer_ = NativeStackSampler::CreateStackBuffer(); | |
| 429 | |
| 424 // The thread is not running. Start it and get associated runner. The task- | 430 // The thread is not running. Start it and get associated runner. The task- |
| 425 // runner has to be saved for future use because though it can be used from | 431 // runner has to be saved for future use because though it can be used from |
| 426 // any thread, it can be acquired via task_runner() only on the created | 432 // any thread, it can be acquired via task_runner() only on the created |
| 427 // thread and the thread that creates it (i.e. this thread) for thread-safety | 433 // thread and the thread that creates it (i.e. this thread) for thread-safety |
| 428 // reasons which are alleviated in SamplingThread by gating access to it with | 434 // reasons which are alleviated in SamplingThread by gating access to it with |
| 429 // the |thread_execution_state_lock_|. | 435 // the |thread_execution_state_lock_|. |
| 430 Start(); | 436 Start(); |
| 431 thread_execution_state_ = RUNNING; | 437 thread_execution_state_ = RUNNING; |
| 432 thread_execution_state_task_runner_ = Thread::task_runner(); | 438 thread_execution_state_task_runner_ = Thread::task_runner(); |
| 433 | 439 |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 510 profile.sampling_period = collection->params.sampling_interval; | 516 profile.sampling_period = collection->params.sampling_interval; |
| 511 collection->profile_start_time = Time::Now(); | 517 collection->profile_start_time = Time::Now(); |
| 512 collection->native_sampler->ProfileRecordingStarting(&profile.modules); | 518 collection->native_sampler->ProfileRecordingStarting(&profile.modules); |
| 513 } | 519 } |
| 514 | 520 |
| 515 // The currently active profile being captured. | 521 // The currently active profile being captured. |
| 516 CallStackProfile& profile = collection->profiles.back(); | 522 CallStackProfile& profile = collection->profiles.back(); |
| 517 | 523 |
| 518 // Record a single sample. | 524 // Record a single sample. |
| 519 profile.samples.push_back(Sample()); | 525 profile.samples.push_back(Sample()); |
| 520 collection->native_sampler->RecordStackSample(&profile.samples.back()); | 526 collection->native_sampler->RecordStackSample(native_buffer_.get(), |
| 527 &profile.samples.back()); | |
| 521 | 528 |
| 522 // If this is the last sample of a burst, record the total time. | 529 // If this is the last sample of a burst, record the total time. |
| 523 if (collection->sample == collection->params.samples_per_burst - 1) { | 530 if (collection->sample == collection->params.samples_per_burst - 1) { |
| 524 profile.profile_duration = Time::Now() - collection->profile_start_time; | 531 profile.profile_duration = Time::Now() - collection->profile_start_time; |
| 525 collection->native_sampler->ProfileRecordingStopped(); | 532 collection->native_sampler->ProfileRecordingStopped(native_buffer_.get()); |
| 526 } | 533 } |
| 527 } | 534 } |
| 528 | 535 |
| 529 void StackSamplingProfiler::SamplingThread::ScheduleShutdownIfIdle() { | 536 void StackSamplingProfiler::SamplingThread::ScheduleShutdownIfIdle() { |
| 530 DCHECK_EQ(GetThreadId(), PlatformThread::CurrentId()); | 537 DCHECK_EQ(GetThreadId(), PlatformThread::CurrentId()); |
| 531 | 538 |
| 532 if (!active_collections_.empty()) | 539 if (!active_collections_.empty()) |
| 533 return; | 540 return; |
| 534 | 541 |
| 535 int add_events; | 542 int add_events; |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 640 // (again) in order for Stop/Start to be called (again) should more work | 647 // (again) in order for Stop/Start to be called (again) should more work |
| 641 // come in. Holding the |thread_execution_state_lock_| ensures the necessary | 648 // come in. Holding the |thread_execution_state_lock_| ensures the necessary |
| 642 // happens-after with regard to this detach and future Thread API calls. | 649 // happens-after with regard to this detach and future Thread API calls. |
| 643 DetachFromSequence(); | 650 DetachFromSequence(); |
| 644 | 651 |
| 645 // Set the thread_state variable so the thread will be restarted when new | 652 // Set the thread_state variable so the thread will be restarted when new |
| 646 // work comes in. Remove the |thread_execution_state_task_runner_| to avoid | 653 // work comes in. Remove the |thread_execution_state_task_runner_| to avoid |
| 647 // confusion. | 654 // confusion. |
| 648 thread_execution_state_ = EXITING; | 655 thread_execution_state_ = EXITING; |
| 649 thread_execution_state_task_runner_ = nullptr; | 656 thread_execution_state_task_runner_ = nullptr; |
| 657 native_buffer_.reset(); | |
| 650 } | 658 } |
| 651 | 659 |
| 652 bool StackSamplingProfiler::SamplingThread::UpdateNextSampleTime( | 660 bool StackSamplingProfiler::SamplingThread::UpdateNextSampleTime( |
| 653 CollectionContext* collection) { | 661 CollectionContext* collection) { |
| 654 // This will keep a consistent average interval between samples but will | 662 // This will keep a consistent average interval between samples but will |
| 655 // result in constant series of acquisitions, thus nearly locking out the | 663 // result in constant series of acquisitions, thus nearly locking out the |
| 656 // target thread, if the interval is smaller than the time it takes to | 664 // target thread, if the interval is smaller than the time it takes to |
| 657 // actually acquire the sample. Anything sampling that quickly is going | 665 // actually acquire the sample. Anything sampling that quickly is going |
| 658 // to be a problem anyway so don't worry about it. | 666 // to be a problem anyway so don't worry about it. |
| 659 if (++collection->sample < collection->params.samples_per_burst) { | 667 if (++collection->sample < collection->params.samples_per_burst) { |
| (...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 833 } | 841 } |
| 834 | 842 |
| 835 bool operator<(const StackSamplingProfiler::Frame &a, | 843 bool operator<(const StackSamplingProfiler::Frame &a, |
| 836 const StackSamplingProfiler::Frame &b) { | 844 const StackSamplingProfiler::Frame &b) { |
| 837 return (a.module_index < b.module_index) || | 845 return (a.module_index < b.module_index) || |
| 838 (a.module_index == b.module_index && | 846 (a.module_index == b.module_index && |
| 839 a.instruction_pointer < b.instruction_pointer); | 847 a.instruction_pointer < b.instruction_pointer); |
| 840 } | 848 } |
| 841 | 849 |
| 842 } // namespace base | 850 } // namespace base |
| OLD | NEW |