Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <stddef.h> | 5 #include <stddef.h> |
| 6 #include <stdint.h> | 6 #include <stdint.h> |
| 7 | 7 |
| 8 #include <cstdlib> | 8 #include <cstdlib> |
| 9 #include <memory> | 9 #include <memory> |
| 10 #include <utility> | 10 #include <utility> |
| (...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 344 target_thread.SignalThreadToFinish(); | 344 target_thread.SignalThreadToFinish(); |
| 345 | 345 |
| 346 PlatformThread::Join(target_thread_handle); | 346 PlatformThread::Join(target_thread_handle); |
| 347 } | 347 } |
| 348 | 348 |
| 349 template <class Function> | 349 template <class Function> |
| 350 void WithTargetThread(Function function) { | 350 void WithTargetThread(Function function) { |
| 351 WithTargetThread(function, StackConfiguration(StackConfiguration::NORMAL)); | 351 WithTargetThread(function, StackConfiguration(StackConfiguration::NORMAL)); |
| 352 } | 352 } |
| 353 | 353 |
| 354 // Waits for one of multiple samplings to complete. | |
| 355 void CreateProfilers(PlatformThreadId target_thread_id, | |
|
Mike Wittman
2017/03/18 01:38:41
Nice, encapsulating this functionality makes the t
bcwhite
2017/03/20 21:50:51
Done.
Mike Wittman
2017/03/21 16:50:38
param should be passed as a vector also.
bcwhite
2017/03/22 17:48:54
Done.
| |
| 356 std::unique_ptr<StackSamplingProfiler>* profilers, | |
| 357 std::unique_ptr<WaitableEvent>* completed, | |
| 358 CallStackProfiles* profiles, | |
| 359 SamplingParams* params, | |
|
Mike Wittman
2017/03/18 01:38:41
const
bcwhite
2017/03/20 21:50:51
Done.
| |
| 360 size_t count) { | |
| 361 for (size_t i = 0; i < count; ++i) { | |
| 362 completed[i] = | |
| 363 MakeUnique<WaitableEvent>(WaitableEvent::ResetPolicy::AUTOMATIC, | |
| 364 WaitableEvent::InitialState::NOT_SIGNALED); | |
| 365 const StackSamplingProfiler::CompletedCallback callback = | |
| 366 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles[i]), | |
| 367 Unretained(completed[i].get())); | |
| 368 profilers[i] = MakeUnique<StackSamplingProfiler>(target_thread_id, | |
| 369 params[i], callback); | |
| 370 } | |
| 371 } | |
| 372 | |
| 354 // Captures profiles as specified by |params| on the TargetThread, and returns | 373 // Captures profiles as specified by |params| on the TargetThread, and returns |
| 355 // them in |profiles|. Waits up to |profiler_wait_time| for the profiler to | 374 // them in |profiles|. Waits up to |profiler_wait_time| for the profiler to |
| 356 // complete. | 375 // complete. |
| 357 void CaptureProfiles(const SamplingParams& params, TimeDelta profiler_wait_time, | 376 void CaptureProfiles(const SamplingParams& params, TimeDelta profiler_wait_time, |
| 358 CallStackProfiles* profiles) { | 377 CallStackProfiles* profiles) { |
| 359 profiles->clear(); | 378 profiles->clear(); |
| 360 | 379 |
| 361 WithTargetThread([¶ms, profiles, | 380 WithTargetThread([¶ms, profiles, |
| 362 profiler_wait_time](PlatformThreadId target_thread_id) { | 381 profiler_wait_time](PlatformThreadId target_thread_id) { |
| 363 WaitableEvent sampling_thread_completed( | 382 WaitableEvent sampling_thread_completed( |
| 364 WaitableEvent::ResetPolicy::MANUAL, | 383 WaitableEvent::ResetPolicy::MANUAL, |
| 365 WaitableEvent::InitialState::NOT_SIGNALED); | 384 WaitableEvent::InitialState::NOT_SIGNALED); |
| 366 const StackSamplingProfiler::CompletedCallback callback = | 385 const StackSamplingProfiler::CompletedCallback callback = |
| 367 Bind(&SaveProfilesAndSignalEvent, Unretained(profiles), | 386 Bind(&SaveProfilesAndSignalEvent, Unretained(profiles), |
| 368 Unretained(&sampling_thread_completed)); | 387 Unretained(&sampling_thread_completed)); |
| 369 StackSamplingProfiler profiler(target_thread_id, params, callback); | 388 StackSamplingProfiler profiler(target_thread_id, params, callback); |
| 370 profiler.Start(); | 389 profiler.Start(); |
| 371 sampling_thread_completed.TimedWait(profiler_wait_time); | 390 sampling_thread_completed.TimedWait(profiler_wait_time); |
| 372 profiler.Stop(); | 391 profiler.Stop(); |
| 373 sampling_thread_completed.Wait(); | 392 sampling_thread_completed.Wait(); |
| 374 }); | 393 }); |
| 375 } | 394 } |
| 376 | 395 |
| 396 // Waits for one of multiple samplings to complete. | |
| 397 size_t WaitForSamplingComplete( | |
| 398 std::vector<std::unique_ptr<WaitableEvent>>* sampling_completed) { | |
| 399 // Map unique_ptrs to something that WaitMany can accept. | |
| 400 std::vector<WaitableEvent*> sampling_completed_rawptrs( | |
| 401 sampling_completed->size()); | |
| 402 std::transform( | |
| 403 sampling_completed->begin(), sampling_completed->end(), | |
| 404 sampling_completed_rawptrs.begin(), | |
| 405 [](const std::unique_ptr<WaitableEvent>& elem) { return elem.get(); }); | |
| 406 // Wait for one profiler to finish. | |
| 407 return WaitableEvent::WaitMany(sampling_completed_rawptrs.data(), | |
| 408 sampling_completed_rawptrs.size()); | |
| 409 } | |
| 410 | |
| 377 // If this executable was linked with /INCREMENTAL (the default for non-official | 411 // If this executable was linked with /INCREMENTAL (the default for non-official |
| 378 // debug and release builds on Windows), function addresses do not correspond to | 412 // debug and release builds on Windows), function addresses do not correspond to |
| 379 // function code itself, but instead to instructions in the Incremental Link | 413 // function code itself, but instead to instructions in the Incremental Link |
| 380 // Table that jump to the functions. Checks for a jump instruction and if | 414 // Table that jump to the functions. Checks for a jump instruction and if |
| 381 // present does a little decompilation to find the function's actual starting | 415 // present does a little decompilation to find the function's actual starting |
| 382 // address. | 416 // address. |
| 383 const void* MaybeFixupFunctionAddressForILT(const void* function_address) { | 417 const void* MaybeFixupFunctionAddressForILT(const void* function_address) { |
| 384 #if defined(_WIN64) | 418 #if defined(_WIN64) |
| 385 const unsigned char* opcode = | 419 const unsigned char* opcode = |
| 386 reinterpret_cast<const unsigned char*>(function_address); | 420 reinterpret_cast<const unsigned char*>(function_address); |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 437 // asynchronous library loading has completed before walking the stack. If | 471 // asynchronous library loading has completed before walking the stack. If |
| 438 // false, the unloading may still be occurring during the stack walk. | 472 // false, the unloading may still be occurring during the stack walk. |
| 439 void TestLibraryUnload(bool wait_until_unloaded) { | 473 void TestLibraryUnload(bool wait_until_unloaded) { |
| 440 // Test delegate that supports intervening between the copying of the stack | 474 // Test delegate that supports intervening between the copying of the stack |
| 441 // and the walking of the stack. | 475 // and the walking of the stack. |
| 442 class StackCopiedSignaler : public NativeStackSamplerTestDelegate { | 476 class StackCopiedSignaler : public NativeStackSamplerTestDelegate { |
| 443 public: | 477 public: |
| 444 StackCopiedSignaler(WaitableEvent* stack_copied, | 478 StackCopiedSignaler(WaitableEvent* stack_copied, |
| 445 WaitableEvent* start_stack_walk, | 479 WaitableEvent* start_stack_walk, |
| 446 bool wait_to_walk_stack) | 480 bool wait_to_walk_stack) |
| 447 : stack_copied_(stack_copied), start_stack_walk_(start_stack_walk), | 481 : stack_copied_(stack_copied), |
| 448 wait_to_walk_stack_(wait_to_walk_stack) { | 482 start_stack_walk_(start_stack_walk), |
| 449 } | 483 wait_to_walk_stack_(wait_to_walk_stack) {} |
| 450 | 484 |
| 451 void OnPreStackWalk() override { | 485 void OnPreStackWalk() override { |
| 452 stack_copied_->Signal(); | 486 stack_copied_->Signal(); |
| 453 if (wait_to_walk_stack_) | 487 if (wait_to_walk_stack_) |
| 454 start_stack_walk_->Wait(); | 488 start_stack_walk_->Wait(); |
| 455 } | 489 } |
| 456 | 490 |
| 457 private: | 491 private: |
| 458 WaitableEvent* const stack_copied_; | 492 WaitableEvent* const stack_copied_; |
| 459 WaitableEvent* const start_stack_walk_; | 493 WaitableEvent* const start_stack_walk_; |
| (...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 713 reinterpret_cast<const void*>(&TargetThread::CallWithAlloca)) | 747 reinterpret_cast<const void*>(&TargetThread::CallWithAlloca)) |
| 714 << " was not found in stack:\n" | 748 << " was not found in stack:\n" |
| 715 << FormatSampleForDiagnosticOutput(sample, profile.modules); | 749 << FormatSampleForDiagnosticOutput(sample, profile.modules); |
| 716 | 750 |
| 717 // These frames should be adjacent on the stack. | 751 // These frames should be adjacent on the stack. |
| 718 EXPECT_EQ(1, alloca_frame - end_frame) | 752 EXPECT_EQ(1, alloca_frame - end_frame) |
| 719 << "Stack:\n" | 753 << "Stack:\n" |
| 720 << FormatSampleForDiagnosticOutput(sample, profile.modules); | 754 << FormatSampleForDiagnosticOutput(sample, profile.modules); |
| 721 } | 755 } |
| 722 | 756 |
| 723 // Checks that the fire-and-forget interface works. | |
| 724 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | |
| 725 #define MAYBE_StartAndRunAsync StartAndRunAsync | |
| 726 #else | |
| 727 #define MAYBE_StartAndRunAsync DISABLED_StartAndRunAsync | |
| 728 #endif | |
| 729 TEST(StackSamplingProfilerTest, MAYBE_StartAndRunAsync) { | |
| 730 // StartAndRunAsync requires the caller to have a message loop. | |
| 731 MessageLoop message_loop; | |
| 732 | |
| 733 SamplingParams params; | |
| 734 params.samples_per_burst = 1; | |
| 735 | |
| 736 CallStackProfiles profiles; | |
| 737 WithTargetThread([¶ms, &profiles](PlatformThreadId target_thread_id) { | |
| 738 WaitableEvent sampling_thread_completed( | |
| 739 WaitableEvent::ResetPolicy::AUTOMATIC, | |
| 740 WaitableEvent::InitialState::NOT_SIGNALED); | |
| 741 const StackSamplingProfiler::CompletedCallback callback = | |
| 742 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles), | |
| 743 Unretained(&sampling_thread_completed)); | |
| 744 StackSamplingProfiler::StartAndRunAsync(target_thread_id, params, callback); | |
| 745 RunLoop().RunUntilIdle(); | |
| 746 sampling_thread_completed.Wait(); | |
| 747 }); | |
| 748 | |
| 749 ASSERT_EQ(1u, profiles.size()); | |
| 750 } | |
| 751 | |
| 752 // Checks that the expected number of profiles and samples are present in the | 757 // Checks that the expected number of profiles and samples are present in the |
| 753 // call stack profiles produced. | 758 // call stack profiles produced. |
| 754 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | 759 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) |
| 755 #define MAYBE_MultipleProfilesAndSamples MultipleProfilesAndSamples | 760 #define MAYBE_MultipleProfilesAndSamples MultipleProfilesAndSamples |
| 756 #else | 761 #else |
| 757 #define MAYBE_MultipleProfilesAndSamples DISABLED_MultipleProfilesAndSamples | 762 #define MAYBE_MultipleProfilesAndSamples DISABLED_MultipleProfilesAndSamples |
| 758 #endif | 763 #endif |
| 759 TEST(StackSamplingProfilerTest, MAYBE_MultipleProfilesAndSamples) { | 764 TEST(StackSamplingProfilerTest, MAYBE_MultipleProfilesAndSamples) { |
| 760 SamplingParams params; | 765 SamplingParams params; |
| 761 params.burst_interval = params.sampling_interval = | 766 params.burst_interval = params.sampling_interval = |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 853 }); | 858 }); |
| 854 } | 859 } |
| 855 | 860 |
| 856 // Checks that the same profiler may be run multiple times. | 861 // Checks that the same profiler may be run multiple times. |
| 857 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | 862 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) |
| 858 #define MAYBE_CanRunMultipleTimes CanRunMultipleTimes | 863 #define MAYBE_CanRunMultipleTimes CanRunMultipleTimes |
| 859 #else | 864 #else |
| 860 #define MAYBE_CanRunMultipleTimes DISABLED_CanRunMultipleTimes | 865 #define MAYBE_CanRunMultipleTimes DISABLED_CanRunMultipleTimes |
| 861 #endif | 866 #endif |
| 862 TEST(StackSamplingProfilerTest, MAYBE_CanRunMultipleTimes) { | 867 TEST(StackSamplingProfilerTest, MAYBE_CanRunMultipleTimes) { |
| 868 StackSamplingProfiler::TestAPI::DisableIdleShutdown(); | |
| 869 | |
| 870 WithTargetThread([](PlatformThreadId target_thread_id) { | |
| 871 SamplingParams params; | |
| 872 params.sampling_interval = TimeDelta::FromMilliseconds(0); | |
| 873 params.samples_per_burst = 1; | |
| 874 | |
| 875 CallStackProfiles profiles; | |
| 876 WaitableEvent sampling_completed(WaitableEvent::ResetPolicy::MANUAL, | |
| 877 WaitableEvent::InitialState::NOT_SIGNALED); | |
| 878 const StackSamplingProfiler::CompletedCallback callback = | |
| 879 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles), | |
| 880 Unretained(&sampling_completed)); | |
| 881 StackSamplingProfiler profiler(target_thread_id, params, callback); | |
| 882 | |
| 883 // Just start and stop to execute code paths. | |
| 884 profiler.Start(); | |
| 885 profiler.Stop(); | |
| 886 sampling_completed.Wait(); | |
| 887 | |
| 888 // Ensure a second request will run and not block. | |
| 889 sampling_completed.Reset(); | |
| 890 profiles.clear(); | |
| 891 profiler.Start(); | |
| 892 sampling_completed.Wait(); | |
| 893 profiler.Stop(); | |
| 894 ASSERT_EQ(1u, profiles.size()); | |
| 895 }); | |
| 896 } | |
| 897 | |
| 898 // Checks that the different profilers may be run. | |
| 899 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | |
| 900 #define MAYBE_CanRunMultipleProfilers CanRunMultipleProfilers | |
| 901 #else | |
| 902 #define MAYBE_CanRunMultipleProfilers DISABLED_CanRunMultipleProfilers | |
| 903 #endif | |
| 904 TEST(StackSamplingProfilerTest, MAYBE_CanRunMultipleProfilers) { | |
| 905 StackSamplingProfiler::TestAPI::DisableIdleShutdown(); | |
| 906 | |
| 863 SamplingParams params; | 907 SamplingParams params; |
| 864 params.sampling_interval = TimeDelta::FromMilliseconds(0); | 908 params.sampling_interval = TimeDelta::FromMilliseconds(0); |
| 865 params.samples_per_burst = 1; | 909 params.samples_per_burst = 1; |
| 866 | 910 |
| 867 std::vector<CallStackProfile> profiles; | 911 std::vector<CallStackProfile> profiles; |
| 868 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles); | 912 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles); |
| 869 ASSERT_EQ(1u, profiles.size()); | 913 ASSERT_EQ(1u, profiles.size()); |
| 870 | 914 |
| 871 profiles.clear(); | 915 profiles.clear(); |
| 872 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles); | 916 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles); |
| 873 ASSERT_EQ(1u, profiles.size()); | 917 ASSERT_EQ(1u, profiles.size()); |
| 874 } | 918 } |
| 875 | 919 |
| 876 // Checks that requests to start profiling while another profile is taking place | 920 // Checks that additional requests will restart a stopped profiler. |
| 877 // are ignored. | 921 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) |
| 878 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | 922 #define MAYBE_WillRestartSampler WillRestartSampler |
|
Mike Wittman
2017/03/18 01:38:41
WillRestartSamplerAfterIdleShutdown would be a bet
bcwhite
2017/03/20 21:50:51
Done.
| |
| 879 #define MAYBE_ConcurrentProfiling ConcurrentProfiling | 923 #else |
| 880 #else | 924 #define MAYBE_WillRestartSampler DISABLED_WillRestartSampler |
| 881 #define MAYBE_ConcurrentProfiling DISABLED_ConcurrentProfiling | 925 #endif |
| 882 #endif | 926 TEST(StackSamplingProfilerTest, MAYBE_WillRestartSampler) { |
| 883 TEST(StackSamplingProfilerTest, MAYBE_ConcurrentProfiling) { | 927 StackSamplingProfiler::TestAPI::DisableIdleShutdown(); |
| 928 | |
| 929 SamplingParams params; | |
| 930 params.sampling_interval = TimeDelta::FromMilliseconds(0); | |
| 931 params.samples_per_burst = 1; | |
| 932 | |
| 933 std::vector<CallStackProfile> profiles; | |
| 934 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles); | |
| 935 ASSERT_EQ(1u, profiles.size()); | |
| 936 | |
| 937 // Capture thread should still be running at this point. | |
| 938 ASSERT_TRUE(StackSamplingProfiler::TestAPI::IsSamplingThreadRunning()); | |
| 939 | |
| 940 // Initiate an "idle" shutdown. The task will be run immediately but on | |
| 941 // another thread so wait for it to complete. | |
| 942 StackSamplingProfiler::TestAPI::InitiateSamplingThreadIdleShutdown(); | |
| 943 while (StackSamplingProfiler::TestAPI::IsSamplingThreadRunning()) | |
|
Mike Wittman
2017/03/18 01:38:41
Thinking about this in terms of the underlying Sta
bcwhite
2017/03/20 21:50:51
But then you wouldn't be able to tell that the thr
Mike Wittman
2017/03/21 16:50:38
That's true. But whether the thread stopped after
bcwhite
2017/03/22 17:48:54
Done.
| |
| 944 PlatformThread::Sleep(base::TimeDelta::FromMilliseconds(1)); | |
| 945 | |
| 946 // Ensure another capture will start the sampling thread and run. | |
| 947 profiles.clear(); | |
| 948 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles); | |
| 949 ASSERT_EQ(1u, profiles.size()); | |
| 950 EXPECT_TRUE(StackSamplingProfiler::TestAPI::IsSamplingThreadRunning()); | |
| 951 } | |
| 952 | |
| 953 // Checks that it's safe to stop a task after it's completed and the sampling | |
| 954 // thread has shut-down for being idle. | |
| 955 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | |
| 956 #define MAYBE_StopAfterIdle StopAfterIdle | |
|
Mike Wittman
2017/03/18 01:38:41
StopAfterIdleShutdown would be a better name.
bcwhite
2017/03/20 21:50:51
Done.
| |
| 957 #else | |
| 958 #define MAYBE_StopAfterIdle DISABLED_StopAfterIdle | |
| 959 #endif | |
| 960 TEST(StackSamplingProfilerTest, MAYBE_StopAfterIdle) { | |
| 961 StackSamplingProfiler::TestAPI::DisableIdleShutdown(); | |
| 962 | |
| 963 WithTargetThread([](PlatformThreadId target_thread_id) { | |
| 964 SamplingParams params; | |
| 965 params.sampling_interval = TimeDelta::FromMilliseconds(0); | |
| 966 params.samples_per_burst = 1; | |
| 967 | |
| 968 CallStackProfiles profiles; | |
| 969 WaitableEvent sampling_completed(WaitableEvent::ResetPolicy::MANUAL, | |
| 970 WaitableEvent::InitialState::NOT_SIGNALED); | |
| 971 const StackSamplingProfiler::CompletedCallback callback = | |
| 972 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles), | |
| 973 Unretained(&sampling_completed)); | |
| 974 StackSamplingProfiler profiler(target_thread_id, params, callback); | |
| 975 | |
| 976 // Let it run and then stop due to being idle. | |
| 977 profiler.Start(); | |
| 978 sampling_completed.Wait(); | |
| 979 StackSamplingProfiler::TestAPI::InitiateSamplingThreadIdleShutdown(); | |
| 980 while (StackSamplingProfiler::TestAPI::IsSamplingThreadRunning()) | |
|
Mike Wittman
2017/03/18 01:38:41
I don't think we need this for the same reason as
| |
| 981 PlatformThread::Sleep(base::TimeDelta::FromMilliseconds(1)); | |
| 982 | |
| 983 // Ensure it's still safe to stop. | |
| 984 profiler.Stop(); | |
| 985 EXPECT_FALSE(StackSamplingProfiler::TestAPI::IsSamplingThreadRunning()); | |
| 986 }); | |
| 987 } | |
| 988 | |
| 989 // Checks that synchronized multiple sampling requests execute in parallel. | |
| 990 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | |
| 991 #define MAYBE_ConcurrentProfiling_InSync ConcurrentProfiling_InSync | |
| 992 #else | |
| 993 #define MAYBE_ConcurrentProfiling_InSync DISABLED_ConcurrentProfiling_InSync | |
| 994 #endif | |
| 995 TEST(StackSamplingProfilerTest, MAYBE_ConcurrentProfiling_InSync) { | |
| 884 WithTargetThread([](PlatformThreadId target_thread_id) { | 996 WithTargetThread([](PlatformThreadId target_thread_id) { |
| 885 SamplingParams params[2]; | 997 SamplingParams params[2]; |
| 998 | |
| 999 // Providing an initial delay makes it more likely that both will be | |
| 1000 // scheduled before either starts to run. Once started, samples will | |
| 1001 // run at their scheduled, interleaved times regardless of whatever | |
|
Mike Wittman
2017/03/18 01:38:41
This doesn't make sense to me. How can the samples
bcwhite
2017/03/20 21:50:51
Fixed wording.
| |
| 1002 // interval the thread wakes up. | |
| 886 params[0].initial_delay = TimeDelta::FromMilliseconds(10); | 1003 params[0].initial_delay = TimeDelta::FromMilliseconds(10); |
| 887 params[0].sampling_interval = TimeDelta::FromMilliseconds(0); | 1004 params[0].sampling_interval = TimeDelta::FromMilliseconds(1); |
| 888 params[0].samples_per_burst = 1; | 1005 params[0].samples_per_burst = 9; |
| 889 | 1006 |
| 890 params[1].sampling_interval = TimeDelta::FromMilliseconds(0); | 1007 params[1].initial_delay = TimeDelta::FromMilliseconds(11); |
| 891 params[1].samples_per_burst = 1; | 1008 params[1].sampling_interval = TimeDelta::FromMilliseconds(1); |
| 1009 params[1].samples_per_burst = 8; | |
| 892 | 1010 |
| 893 CallStackProfiles profiles[2]; | 1011 CallStackProfiles profiles[2]; |
| 894 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed(2); | 1012 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed( |
| 895 std::vector<std::unique_ptr<StackSamplingProfiler>> profiler(2); | 1013 arraysize(params)); |
| 896 for (int i = 0; i < 2; ++i) { | 1014 std::vector<std::unique_ptr<StackSamplingProfiler>> profiler( |
| 897 sampling_completed[i] = | 1015 arraysize(params)); |
| 898 MakeUnique<WaitableEvent>(WaitableEvent::ResetPolicy::AUTOMATIC, | 1016 CreateProfilers(target_thread_id, &profiler[0], &sampling_completed[0], |
| 899 WaitableEvent::InitialState::NOT_SIGNALED); | 1017 profiles, params, arraysize(params)); |
| 900 const StackSamplingProfiler::CompletedCallback callback = | |
| 901 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles[i]), | |
| 902 Unretained(sampling_completed[i].get())); | |
| 903 profiler[i] = MakeUnique<StackSamplingProfiler>(target_thread_id, | |
| 904 params[i], callback); | |
| 905 } | |
| 906 | 1018 |
| 907 profiler[0]->Start(); | 1019 profiler[0]->Start(); |
| 908 profiler[1]->Start(); | 1020 profiler[1]->Start(); |
| 909 | 1021 |
| 910 std::vector<WaitableEvent*> sampling_completed_rawptrs( | |
| 911 sampling_completed.size()); | |
| 912 std::transform( | |
| 913 sampling_completed.begin(), sampling_completed.end(), | |
| 914 sampling_completed_rawptrs.begin(), | |
| 915 [](const std::unique_ptr<WaitableEvent>& elem) { return elem.get(); }); | |
| 916 // Wait for one profiler to finish. | 1022 // Wait for one profiler to finish. |
| 917 size_t completed_profiler = | 1023 size_t completed_profiler = WaitForSamplingComplete(&sampling_completed); |
| 918 WaitableEvent::WaitMany(sampling_completed_rawptrs.data(), 2); | |
| 919 EXPECT_EQ(1u, profiles[completed_profiler].size()); | 1024 EXPECT_EQ(1u, profiles[completed_profiler].size()); |
| 920 | 1025 |
| 921 size_t other_profiler = 1 - completed_profiler; | 1026 size_t other_profiler = 1 - completed_profiler; |
| 922 // Give the other profiler a chance to run and observe that it hasn't. | 1027 // Wait for the other profiler to finish. |
| 923 EXPECT_FALSE(sampling_completed[other_profiler]->TimedWait( | |
| 924 TimeDelta::FromMilliseconds(25))); | |
| 925 | |
| 926 // Start the other profiler again and it should run. | |
| 927 profiler[other_profiler]->Start(); | |
| 928 sampling_completed[other_profiler]->Wait(); | 1028 sampling_completed[other_profiler]->Wait(); |
| 929 EXPECT_EQ(1u, profiles[other_profiler].size()); | 1029 EXPECT_EQ(1u, profiles[other_profiler].size()); |
| 930 }); | 1030 |
| 931 } | 1031 // Ensure each got the correct number of samples. |
| 932 | 1032 EXPECT_EQ(9u, profiles[0][0].samples.size()); |
| 1033 EXPECT_EQ(8u, profiles[1][0].samples.size()); | |
| 1034 }); | |
| 1035 } | |
| 1036 | |
| 1037 // Checks that several mixed sampling requests execute in parallel. | |
| 1038 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | |
| 1039 #define MAYBE_ConcurrentProfiling_Mixed ConcurrentProfiling_Mixed | |
| 1040 #else | |
| 1041 #define MAYBE_ConcurrentProfiling_Mixed DISABLED_ConcurrentProfiling_Mixed | |
| 1042 #endif | |
| 1043 TEST(StackSamplingProfilerTest, MAYBE_ConcurrentProfiling_Mixed) { | |
| 1044 WithTargetThread([](PlatformThreadId target_thread_id) { | |
| 1045 SamplingParams params[3]; | |
| 1046 params[0].initial_delay = TimeDelta::FromMilliseconds(8); | |
| 1047 params[0].sampling_interval = TimeDelta::FromMilliseconds(4); | |
| 1048 params[0].samples_per_burst = 10; | |
| 1049 | |
| 1050 params[1].initial_delay = TimeDelta::FromMilliseconds(9); | |
| 1051 params[1].sampling_interval = TimeDelta::FromMilliseconds(3); | |
| 1052 params[1].samples_per_burst = 10; | |
| 1053 | |
| 1054 params[2].initial_delay = TimeDelta::FromMilliseconds(10); | |
| 1055 params[2].sampling_interval = TimeDelta::FromMilliseconds(2); | |
| 1056 params[2].samples_per_burst = 10; | |
| 1057 | |
| 1058 CallStackProfiles profiles[arraysize(params)]; | |
| 1059 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed( | |
| 1060 arraysize(params)); | |
| 1061 std::vector<std::unique_ptr<StackSamplingProfiler>> profiler( | |
| 1062 arraysize(params)); | |
| 1063 CreateProfilers(target_thread_id, &profiler[0], &sampling_completed[0], | |
| 1064 profiles, params, arraysize(params)); | |
| 1065 | |
| 1066 for (size_t i = 0; i < profiler.size(); ++i) | |
| 1067 profiler[i]->Start(); | |
| 1068 | |
| 1069 // Wait for one profiler to finish. | |
| 1070 size_t completed_profiler = WaitForSamplingComplete(&sampling_completed); | |
| 1071 EXPECT_EQ(1u, profiles[completed_profiler].size()); | |
| 1072 // Stop and destroy all profilers, always in the same order. Don't crash. | |
| 1073 for (size_t i = 0; i < profiler.size(); ++i) | |
| 1074 profiler[i]->Stop(); | |
| 1075 for (size_t i = 0; i < profiler.size(); ++i) | |
| 1076 profiler[i].reset(); | |
| 1077 }); | |
| 1078 } | |
| 1079 | |
| 1080 // Checks that sampling requests execute in a staggered manner. | |
| 1081 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | |
| 1082 #define MAYBE_ConcurrentProfiling_Staggered ConcurrentProfiling_Staggered | |
| 1083 #else | |
| 1084 #define MAYBE_ConcurrentProfiling_Staggered \ | |
| 1085 DISABLED_ConcurrentProfiling_Staggered | |
| 1086 #endif | |
| 1087 TEST(StackSamplingProfilerTest, MAYBE_ConcurrentProfiling_Staggered) { | |
| 1088 WithTargetThread([](PlatformThreadId target_thread_id) { | |
| 1089 SamplingParams params[3]; | |
| 1090 params[0].initial_delay = TimeDelta::FromMilliseconds(10); | |
| 1091 params[0].sampling_interval = TimeDelta::FromMilliseconds(10); | |
| 1092 params[0].samples_per_burst = 1; | |
| 1093 | |
| 1094 params[1].initial_delay = TimeDelta::FromMilliseconds(5); | |
| 1095 params[1].sampling_interval = TimeDelta::FromMilliseconds(10); | |
| 1096 params[1].samples_per_burst = 2; | |
| 1097 | |
| 1098 params[2].initial_delay = TimeDelta::FromMilliseconds(0); | |
| 1099 params[2].sampling_interval = TimeDelta::FromMilliseconds(10); | |
| 1100 params[2].samples_per_burst = 3; | |
| 1101 | |
| 1102 CallStackProfiles profiles[arraysize(params)]; | |
| 1103 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed( | |
| 1104 arraysize(params)); | |
| 1105 std::vector<std::unique_ptr<StackSamplingProfiler>> profiler( | |
| 1106 arraysize(params)); | |
| 1107 CreateProfilers(target_thread_id, &profiler[0], &sampling_completed[0], | |
| 1108 profiles, params, arraysize(params)); | |
| 1109 | |
| 1110 profiler[0]->Start(); | |
|
Mike Wittman
2017/03/18 01:38:41
How does the ordering of the Start and Stop calls
bcwhite
2017/03/20 21:50:51
They don't. It's three different sampling paramet
Mike Wittman
2017/03/21 16:50:38
In that case, if there's still a motivation for th
bcwhite
2017/03/22 17:48:54
No motivation, per say. If you don't feel that st
Mike Wittman
2017/03/23 22:18:31
Removing SGTM. Any specific behaviors exercised by
bcwhite
2017/03/27 17:52:43
Done.
| |
| 1111 profiler[1]->Start(); | |
| 1112 sampling_completed[0]->Wait(); | |
| 1113 profiler[2]->Start(); | |
| 1114 profiler[0]->Stop(); | |
| 1115 profiler[1]->Stop(); | |
| 1116 sampling_completed[1]->Wait(); | |
| 1117 sampling_completed[2]->Wait(); | |
| 1118 EXPECT_EQ(1u, profiles[0].size()); | |
| 1119 EXPECT_EQ(1u, profiles[1].size()); | |
| 1120 EXPECT_EQ(1u, profiles[2].size()); | |
| 1121 }); | |
| 1122 } | |
| 1123 | |
| 933 // Checks that a stack that runs through another library produces a stack with | 1124 // Checks that a stack that runs through another library produces a stack with |
| 934 // the expected functions. | 1125 // the expected functions. |
| 935 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | 1126 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) |
| 936 #define MAYBE_OtherLibrary OtherLibrary | 1127 #define MAYBE_OtherLibrary OtherLibrary |
| 937 #else | 1128 #else |
| 938 #define MAYBE_OtherLibrary DISABLED_OtherLibrary | 1129 #define MAYBE_OtherLibrary DISABLED_OtherLibrary |
| 939 #endif | 1130 #endif |
| 940 TEST(StackSamplingProfilerTest, MAYBE_OtherLibrary) { | 1131 TEST(StackSamplingProfilerTest, MAYBE_OtherLibrary) { |
| 941 SamplingParams params; | 1132 SamplingParams params; |
| 942 params.sampling_interval = TimeDelta::FromMilliseconds(0); | 1133 params.sampling_interval = TimeDelta::FromMilliseconds(0); |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1017 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | 1208 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) |
| 1018 #define MAYBE_UnloadedLibrary UnloadedLibrary | 1209 #define MAYBE_UnloadedLibrary UnloadedLibrary |
| 1019 #else | 1210 #else |
| 1020 #define MAYBE_UnloadedLibrary DISABLED_UnloadedLibrary | 1211 #define MAYBE_UnloadedLibrary DISABLED_UnloadedLibrary |
| 1021 #endif | 1212 #endif |
| 1022 TEST(StackSamplingProfilerTest, MAYBE_UnloadedLibrary) { | 1213 TEST(StackSamplingProfilerTest, MAYBE_UnloadedLibrary) { |
| 1023 TestLibraryUnload(true); | 1214 TestLibraryUnload(true); |
| 1024 } | 1215 } |
| 1025 | 1216 |
| 1026 } // namespace base | 1217 } // namespace base |
| OLD | NEW |