Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(402)

Side by Side Diff: base/profiler/stack_sampling_profiler_unittest.cc

Issue 2554123002: Support parallel captures from the StackSamplingProfiler. (Closed)
Patch Set: addressed review comments by wittman Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include <stddef.h> 5 #include <stddef.h>
6 #include <stdint.h> 6 #include <stdint.h>
7 7
8 #include <cstdlib> 8 #include <cstdlib>
9 #include <memory> 9 #include <memory>
10 #include <utility> 10 #include <utility>
(...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after
367 Bind(&SaveProfilesAndSignalEvent, Unretained(profiles), 367 Bind(&SaveProfilesAndSignalEvent, Unretained(profiles),
368 Unretained(&sampling_thread_completed)); 368 Unretained(&sampling_thread_completed));
369 StackSamplingProfiler profiler(target_thread_id, params, callback); 369 StackSamplingProfiler profiler(target_thread_id, params, callback);
370 profiler.Start(); 370 profiler.Start();
371 sampling_thread_completed.TimedWait(profiler_wait_time); 371 sampling_thread_completed.TimedWait(profiler_wait_time);
372 profiler.Stop(); 372 profiler.Stop();
373 sampling_thread_completed.Wait(); 373 sampling_thread_completed.Wait();
374 }); 374 });
375 } 375 }
376 376
377 // Waits for one of multiple samplings to complete.
378 size_t WaitForSamplingComplete(
379 std::vector<std::unique_ptr<WaitableEvent>>* sampling_completed) {
380 // Map unique_ptrs to something that WaitMany can accept.
381 std::vector<WaitableEvent*> sampling_completed_rawptrs(
382 sampling_completed->size());
383 std::transform(
384 sampling_completed->begin(), sampling_completed->end(),
385 sampling_completed_rawptrs.begin(),
386 [](const std::unique_ptr<WaitableEvent>& elem) { return elem.get(); });
387 // Wait for one profiler to finish.
388 return WaitableEvent::WaitMany(sampling_completed_rawptrs.data(),
389 sampling_completed_rawptrs.size());
390 }
391
377 // If this executable was linked with /INCREMENTAL (the default for non-official 392 // If this executable was linked with /INCREMENTAL (the default for non-official
378 // debug and release builds on Windows), function addresses do not correspond to 393 // debug and release builds on Windows), function addresses do not correspond to
379 // function code itself, but instead to instructions in the Incremental Link 394 // function code itself, but instead to instructions in the Incremental Link
380 // Table that jump to the functions. Checks for a jump instruction and if 395 // Table that jump to the functions. Checks for a jump instruction and if
381 // present does a little decompilation to find the function's actual starting 396 // present does a little decompilation to find the function's actual starting
382 // address. 397 // address.
383 const void* MaybeFixupFunctionAddressForILT(const void* function_address) { 398 const void* MaybeFixupFunctionAddressForILT(const void* function_address) {
384 #if defined(_WIN64) 399 #if defined(_WIN64)
385 const unsigned char* opcode = 400 const unsigned char* opcode =
386 reinterpret_cast<const unsigned char*>(function_address); 401 reinterpret_cast<const unsigned char*>(function_address);
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
437 // asynchronous library loading has completed before walking the stack. If 452 // asynchronous library loading has completed before walking the stack. If
438 // false, the unloading may still be occurring during the stack walk. 453 // false, the unloading may still be occurring during the stack walk.
439 void TestLibraryUnload(bool wait_until_unloaded) { 454 void TestLibraryUnload(bool wait_until_unloaded) {
440 // Test delegate that supports intervening between the copying of the stack 455 // Test delegate that supports intervening between the copying of the stack
441 // and the walking of the stack. 456 // and the walking of the stack.
442 class StackCopiedSignaler : public NativeStackSamplerTestDelegate { 457 class StackCopiedSignaler : public NativeStackSamplerTestDelegate {
443 public: 458 public:
444 StackCopiedSignaler(WaitableEvent* stack_copied, 459 StackCopiedSignaler(WaitableEvent* stack_copied,
445 WaitableEvent* start_stack_walk, 460 WaitableEvent* start_stack_walk,
446 bool wait_to_walk_stack) 461 bool wait_to_walk_stack)
447 : stack_copied_(stack_copied), start_stack_walk_(start_stack_walk), 462 : stack_copied_(stack_copied),
448 wait_to_walk_stack_(wait_to_walk_stack) { 463 start_stack_walk_(start_stack_walk),
449 } 464 wait_to_walk_stack_(wait_to_walk_stack) {}
450 465
451 void OnPreStackWalk() override { 466 void OnPreStackWalk() override {
452 stack_copied_->Signal(); 467 stack_copied_->Signal();
453 if (wait_to_walk_stack_) 468 if (wait_to_walk_stack_)
454 start_stack_walk_->Wait(); 469 start_stack_walk_->Wait();
455 } 470 }
456 471
457 private: 472 private:
458 WaitableEvent* const stack_copied_; 473 WaitableEvent* const stack_copied_;
459 WaitableEvent* const start_stack_walk_; 474 WaitableEvent* const start_stack_walk_;
(...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after
713 reinterpret_cast<const void*>(&TargetThread::CallWithAlloca)) 728 reinterpret_cast<const void*>(&TargetThread::CallWithAlloca))
714 << " was not found in stack:\n" 729 << " was not found in stack:\n"
715 << FormatSampleForDiagnosticOutput(sample, profile.modules); 730 << FormatSampleForDiagnosticOutput(sample, profile.modules);
716 731
717 // These frames should be adjacent on the stack. 732 // These frames should be adjacent on the stack.
718 EXPECT_EQ(1, alloca_frame - end_frame) 733 EXPECT_EQ(1, alloca_frame - end_frame)
719 << "Stack:\n" 734 << "Stack:\n"
720 << FormatSampleForDiagnosticOutput(sample, profile.modules); 735 << FormatSampleForDiagnosticOutput(sample, profile.modules);
721 } 736 }
722 737
723 // Checks that the fire-and-forget interface works.
724 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
725 #define MAYBE_StartAndRunAsync StartAndRunAsync
726 #else
727 #define MAYBE_StartAndRunAsync DISABLED_StartAndRunAsync
728 #endif
729 TEST(StackSamplingProfilerTest, MAYBE_StartAndRunAsync) {
730 // StartAndRunAsync requires the caller to have a message loop.
731 MessageLoop message_loop;
732
733 SamplingParams params;
734 params.samples_per_burst = 1;
735
736 CallStackProfiles profiles;
737 WithTargetThread([&params, &profiles](PlatformThreadId target_thread_id) {
738 WaitableEvent sampling_thread_completed(
739 WaitableEvent::ResetPolicy::AUTOMATIC,
740 WaitableEvent::InitialState::NOT_SIGNALED);
741 const StackSamplingProfiler::CompletedCallback callback =
742 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles),
743 Unretained(&sampling_thread_completed));
744 StackSamplingProfiler::StartAndRunAsync(target_thread_id, params, callback);
745 RunLoop().RunUntilIdle();
746 sampling_thread_completed.Wait();
747 });
748
749 ASSERT_EQ(1u, profiles.size());
750 }
751
752 // Checks that the expected number of profiles and samples are present in the 738 // Checks that the expected number of profiles and samples are present in the
753 // call stack profiles produced. 739 // call stack profiles produced.
754 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) 740 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
755 #define MAYBE_MultipleProfilesAndSamples MultipleProfilesAndSamples 741 #define MAYBE_MultipleProfilesAndSamples MultipleProfilesAndSamples
756 #else 742 #else
757 #define MAYBE_MultipleProfilesAndSamples DISABLED_MultipleProfilesAndSamples 743 #define MAYBE_MultipleProfilesAndSamples DISABLED_MultipleProfilesAndSamples
758 #endif 744 #endif
759 TEST(StackSamplingProfilerTest, MAYBE_MultipleProfilesAndSamples) { 745 TEST(StackSamplingProfilerTest, MAYBE_MultipleProfilesAndSamples) {
760 SamplingParams params; 746 SamplingParams params;
761 params.burst_interval = params.sampling_interval = 747 params.burst_interval = params.sampling_interval =
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
853 }); 839 });
854 } 840 }
855 841
856 // Checks that the same profiler may be run multiple times. 842 // Checks that the same profiler may be run multiple times.
857 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) 843 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
858 #define MAYBE_CanRunMultipleTimes CanRunMultipleTimes 844 #define MAYBE_CanRunMultipleTimes CanRunMultipleTimes
859 #else 845 #else
860 #define MAYBE_CanRunMultipleTimes DISABLED_CanRunMultipleTimes 846 #define MAYBE_CanRunMultipleTimes DISABLED_CanRunMultipleTimes
861 #endif 847 #endif
862 TEST(StackSamplingProfilerTest, MAYBE_CanRunMultipleTimes) { 848 TEST(StackSamplingProfilerTest, MAYBE_CanRunMultipleTimes) {
849 StackSamplingProfiler::TestAPI::DisableIdleShutdown();
850
851 WithTargetThread([](PlatformThreadId target_thread_id) {
852 SamplingParams params;
853 params.sampling_interval = TimeDelta::FromMilliseconds(0);
854 params.samples_per_burst = 1;
855
856 CallStackProfiles profiles;
857 WaitableEvent sampling_completed(WaitableEvent::ResetPolicy::MANUAL,
858 WaitableEvent::InitialState::NOT_SIGNALED);
859 const StackSamplingProfiler::CompletedCallback callback =
860 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles),
861 Unretained(&sampling_completed));
862 StackSamplingProfiler profiler(target_thread_id, params, callback);
863
864 // Just start and stop to execute code paths.
865 profiler.Start();
866 profiler.Stop();
867 sampling_completed.Wait();
868
869 // Ensure a second request will run and not block.
870 sampling_completed.Reset();
871 profiles.clear();
872 profiler.Start();
873 sampling_completed.Wait();
874 profiler.Stop();
875 ASSERT_EQ(1u, profiles.size());
876 });
877 }
878
879 // Checks that the different profilers may be run.
880 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
881 #define MAYBE_CanRunMultipleProfilers CanRunMultipleProfilers
882 #else
883 #define MAYBE_CanRunMultipleProfilers DISABLED_CanRunMultipleProfilers
884 #endif
885 TEST(StackSamplingProfilerTest, MAYBE_CanRunMultipleProfilers) {
886 StackSamplingProfiler::TestAPI::DisableIdleShutdown();
887
863 SamplingParams params; 888 SamplingParams params;
864 params.sampling_interval = TimeDelta::FromMilliseconds(0); 889 params.sampling_interval = TimeDelta::FromMilliseconds(0);
865 params.samples_per_burst = 1; 890 params.samples_per_burst = 1;
866 891
867 std::vector<CallStackProfile> profiles; 892 std::vector<CallStackProfile> profiles;
868 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles); 893 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles);
869 ASSERT_EQ(1u, profiles.size()); 894 ASSERT_EQ(1u, profiles.size());
870 895
871 profiles.clear(); 896 profiles.clear();
872 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles); 897 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles);
873 ASSERT_EQ(1u, profiles.size()); 898 ASSERT_EQ(1u, profiles.size());
874 } 899 }
875 900
876 // Checks that requests to start profiling while another profile is taking place 901 // Checks that additional requests will restart a stopped profiler.
877 // are ignored.
878 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) 902 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
879 #define MAYBE_ConcurrentProfiling ConcurrentProfiling 903 #define MAYBE_WillRestartSampler WillRestartSampler
880 #else 904 #else
881 #define MAYBE_ConcurrentProfiling DISABLED_ConcurrentProfiling 905 #define MAYBE_WillRestartSampler DISABLED_WillRestartSampler
882 #endif 906 #endif
883 TEST(StackSamplingProfilerTest, MAYBE_ConcurrentProfiling) { 907 TEST(StackSamplingProfilerTest, MAYBE_WillRestartSampler) {
908 StackSamplingProfiler::TestAPI::DisableIdleShutdown();
909
910 SamplingParams params;
911 params.sampling_interval = TimeDelta::FromMilliseconds(0);
912 params.samples_per_burst = 1;
913
914 std::vector<CallStackProfile> profiles;
915 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles);
916 ASSERT_EQ(1u, profiles.size());
917
918 // Capture thread should still be running at this point.
919 ASSERT_TRUE(StackSamplingProfiler::TestAPI::IsSamplingThreadRunning());
920
921 // Initiate an "idle" shutdown. The task will be run immediately but on
922 // another thread so wait for it to complete.
923 StackSamplingProfiler::TestAPI::InitiateSamplingThreadIdleShutdown();
924 while (StackSamplingProfiler::TestAPI::IsSamplingThreadRunning())
925 PlatformThread::Sleep(base::TimeDelta::FromMilliseconds(100));
926
927 // Ensure another capture will start the sampling thread and run.
928 profiles.clear();
929 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles);
930 ASSERT_EQ(1u, profiles.size());
931 EXPECT_TRUE(StackSamplingProfiler::TestAPI::IsSamplingThreadRunning());
932 }
933
934 // Checks that synchronized multiple sampling requests execute in parallel.
935 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
936 #define MAYBE_ConcurrentProfiling_InSync ConcurrentProfiling_InSync
937 #else
938 #define MAYBE_ConcurrentProfiling_InSync DISABLED_ConcurrentProfiling_InSync
939 #endif
940 TEST(StackSamplingProfilerTest, MAYBE_ConcurrentProfiling_InSync) {
Mike Wittman 2017/03/14 18:57:34 General comments on testing this functionality, no
bcwhite 2017/03/16 15:56:25 GetOrCreateTaskRunnerForAdd: - state==RUNNING: tes
Mike Wittman 2017/03/18 01:38:41 Relying on existing tests is OK if the test is obv
bcwhite 2017/03/20 21:50:51 I've disabled the idle shutdown in those tests. 6
Mike Wittman 2017/03/21 16:50:38 To repeat: Relying on existing tests is OK if the
bcwhite 2017/03/22 17:48:54 This IS the dedicated test! StopAfterIdleShutdown
Mike Wittman 2017/03/23 22:18:30 This is still pretty subtle and could use some eve
bcwhite 2017/03/27 17:52:43 Done.
Mike Wittman 2017/03/28 19:32:01 Please split this out into a dedicated test. It's
bcwhite 2017/03/29 14:56:57 That would be pretty much the same as StopDuringIn
Mike Wittman 2017/03/30 16:18:38 Yes. We might as well replace StopDuringInterSampl
bcwhite 2017/03/30 18:54:50 Done.
884 WithTargetThread([](PlatformThreadId target_thread_id) { 941 WithTargetThread([](PlatformThreadId target_thread_id) {
885 SamplingParams params[2]; 942 SamplingParams params[2];
886 params[0].initial_delay = TimeDelta::FromMilliseconds(10); 943 params[0].initial_delay = TimeDelta::FromMilliseconds(10);
887 params[0].sampling_interval = TimeDelta::FromMilliseconds(0); 944 params[0].sampling_interval = TimeDelta::FromMilliseconds(1);
888 params[0].samples_per_burst = 1; 945 params[0].samples_per_burst = 9;
889 946
890 params[1].sampling_interval = TimeDelta::FromMilliseconds(0); 947 params[1].initial_delay = TimeDelta::FromMilliseconds(11);
891 params[1].samples_per_burst = 1; 948 params[1].sampling_interval = TimeDelta::FromMilliseconds(1);
949 params[1].samples_per_burst = 8;
892 950
893 CallStackProfiles profiles[2]; 951 CallStackProfiles profiles[2];
894 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed(2); 952 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed(
895 std::vector<std::unique_ptr<StackSamplingProfiler>> profiler(2); 953 arraysize(params));
896 for (int i = 0; i < 2; ++i) { 954 std::vector<std::unique_ptr<StackSamplingProfiler>> profiler(
955 arraysize(params));
956 for (size_t i = 0; i < arraysize(params); ++i) {
897 sampling_completed[i] = 957 sampling_completed[i] =
898 MakeUnique<WaitableEvent>(WaitableEvent::ResetPolicy::AUTOMATIC, 958 MakeUnique<WaitableEvent>(WaitableEvent::ResetPolicy::AUTOMATIC,
899 WaitableEvent::InitialState::NOT_SIGNALED); 959 WaitableEvent::InitialState::NOT_SIGNALED);
900 const StackSamplingProfiler::CompletedCallback callback = 960 const StackSamplingProfiler::CompletedCallback callback =
901 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles[i]), 961 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles[i]),
902 Unretained(sampling_completed[i].get())); 962 Unretained(sampling_completed[i].get()));
903 profiler[i] = MakeUnique<StackSamplingProfiler>(target_thread_id, 963 profiler[i] = MakeUnique<StackSamplingProfiler>(target_thread_id,
904 params[i], callback); 964 params[i], callback);
905 } 965 }
906 966
907 profiler[0]->Start(); 967 profiler[0]->Start();
908 profiler[1]->Start(); 968 profiler[1]->Start();
909 969
910 std::vector<WaitableEvent*> sampling_completed_rawptrs(
911 sampling_completed.size());
912 std::transform(
913 sampling_completed.begin(), sampling_completed.end(),
914 sampling_completed_rawptrs.begin(),
915 [](const std::unique_ptr<WaitableEvent>& elem) { return elem.get(); });
916 // Wait for one profiler to finish. 970 // Wait for one profiler to finish.
917 size_t completed_profiler = 971 size_t completed_profiler = WaitForSamplingComplete(&sampling_completed);
918 WaitableEvent::WaitMany(sampling_completed_rawptrs.data(), 2);
919 EXPECT_EQ(1u, profiles[completed_profiler].size()); 972 EXPECT_EQ(1u, profiles[completed_profiler].size());
920 973
921 size_t other_profiler = 1 - completed_profiler; 974 size_t other_profiler = 1 - completed_profiler;
922 // Give the other profiler a chance to run and observe that it hasn't. 975 // Wait for the other profiler to finish.
923 EXPECT_FALSE(sampling_completed[other_profiler]->TimedWait(
924 TimeDelta::FromMilliseconds(25)));
925
926 // Start the other profiler again and it should run.
927 profiler[other_profiler]->Start();
928 sampling_completed[other_profiler]->Wait(); 976 sampling_completed[other_profiler]->Wait();
929 EXPECT_EQ(1u, profiles[other_profiler].size()); 977 EXPECT_EQ(1u, profiles[other_profiler].size());
978
979 // Ensure each got the correct number of samples.
980 EXPECT_EQ(9u, profiles[0][0].samples.size());
981 EXPECT_EQ(8u, profiles[1][0].samples.size());
930 }); 982 });
931 } 983 }
932 984
985 // Checks that interleaved multiple sampling requests execute in parallel.
986 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
987 #define MAYBE_ConcurrentProfiling_Interleaved ConcurrentProfiling_Interleaved
988 #else
989 #define MAYBE_ConcurrentProfiling_Interleaved \
990 DISABLED_ConcurrentProfiling_Interleaved
991 #endif
992 TEST(StackSamplingProfilerTest, MAYBE_ConcurrentProfiling_Interleaved) {
993 WithTargetThread([](PlatformThreadId target_thread_id) {
994 SamplingParams params[2];
995 params[0].initial_delay = TimeDelta::FromMilliseconds(1);
996 params[0].sampling_interval = TimeDelta::FromMilliseconds(2);
997 params[0].samples_per_burst = 10;
998
999 params[1].initial_delay = TimeDelta::FromMilliseconds(2);
1000 params[1].sampling_interval = TimeDelta::FromMilliseconds(2);
1001 params[1].samples_per_burst = 10;
1002
1003 CallStackProfiles profiles[2];
1004 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed(
1005 arraysize(params));
1006 std::vector<std::unique_ptr<StackSamplingProfiler>> profiler(
1007 arraysize(params));
1008 for (size_t i = 0; i < arraysize(params); ++i) {
1009 sampling_completed[i] =
1010 MakeUnique<WaitableEvent>(WaitableEvent::ResetPolicy::AUTOMATIC,
1011 WaitableEvent::InitialState::NOT_SIGNALED);
1012 const StackSamplingProfiler::CompletedCallback callback =
1013 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles[i]),
1014 Unretained(sampling_completed[i].get()));
1015 profiler[i] = MakeUnique<StackSamplingProfiler>(target_thread_id,
1016 params[i], callback);
1017 }
1018
1019 for (size_t i = 0; i < profiler.size(); ++i)
1020 profiler[i]->Start();
1021
1022 // Wait for one profiler to finish.
1023 size_t completed_profiler = WaitForSamplingComplete(&sampling_completed);
1024 EXPECT_EQ(1u, profiles[completed_profiler].size());
1025 // Stop and destroy all profilers, always in the some order. Don't crash.
Mike Wittman 2017/03/14 18:57:33 nit: same
bcwhite 2017/03/16 15:56:25 Done.
1026 for (size_t i = 0; i < profiler.size(); ++i)
1027 profiler[i]->Stop();
1028 for (size_t i = 0; i < profiler.size(); ++i)
1029 profiler[i].reset();
1030 });
1031 }
1032
1033 // Checks that several mixed sampling requests execute in parallel.
1034 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
1035 #define MAYBE_ConcurrentProfiling_Mixed ConcurrentProfiling_Mixed
1036 #else
1037 #define MAYBE_ConcurrentProfiling_Mixed DISABLED_ConcurrentProfiling_Mixed
1038 #endif
1039 TEST(StackSamplingProfilerTest, MAYBE_ConcurrentProfiling_Mixed) {
1040 WithTargetThread([](PlatformThreadId target_thread_id) {
1041 SamplingParams params[3];
1042 params[0].initial_delay = TimeDelta::FromMilliseconds(8);
1043 params[0].sampling_interval = TimeDelta::FromMilliseconds(4);
1044 params[0].samples_per_burst = 10;
1045
1046 params[1].initial_delay = TimeDelta::FromMilliseconds(9);
1047 params[1].sampling_interval = TimeDelta::FromMilliseconds(3);
1048 params[1].samples_per_burst = 10;
1049
1050 params[2].initial_delay = TimeDelta::FromMilliseconds(10);
1051 params[2].sampling_interval = TimeDelta::FromMilliseconds(2);
1052 params[2].samples_per_burst = 10;
1053
1054 CallStackProfiles profiles[arraysize(params)];
1055 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed(
1056 arraysize(params));
1057 std::vector<std::unique_ptr<StackSamplingProfiler>> profiler(
1058 arraysize(params));
1059 for (size_t i = 0; i < arraysize(params); ++i) {
1060 sampling_completed[i] =
1061 MakeUnique<WaitableEvent>(WaitableEvent::ResetPolicy::AUTOMATIC,
1062 WaitableEvent::InitialState::NOT_SIGNALED);
1063 const StackSamplingProfiler::CompletedCallback callback =
1064 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles[i]),
1065 Unretained(sampling_completed[i].get()));
1066 profiler[i] = MakeUnique<StackSamplingProfiler>(target_thread_id,
1067 params[i], callback);
1068 }
1069
1070 for (size_t i = 0; i < profiler.size(); ++i)
1071 profiler[i]->Start();
1072
1073 // Wait for one profiler to finish.
1074 size_t completed_profiler = WaitForSamplingComplete(&sampling_completed);
1075 EXPECT_EQ(1u, profiles[completed_profiler].size());
1076 // Stop and destroy all profilers, always in the some order. Don't crash.
Mike Wittman 2017/03/14 18:57:34 nit: same
bcwhite 2017/03/16 15:56:25 Done.
1077 for (size_t i = 0; i < profiler.size(); ++i)
1078 profiler[i].reset();
1079 });
1080 }
1081
1082 // Checks that sampling requests execute in a staggered manner.
1083 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
1084 #define MAYBE_ConcurrentProfiling_Staggered ConcurrentProfiling_Staggered
1085 #else
1086 #define MAYBE_ConcurrentProfiling_Staggered \
1087 DISABLED_ConcurrentProfiling_Staggered
1088 #endif
1089 TEST(StackSamplingProfilerTest, MAYBE_ConcurrentProfiling_Staggered) {
1090 WithTargetThread([](PlatformThreadId target_thread_id) {
1091 SamplingParams params[3];
1092 params[0].initial_delay = TimeDelta::FromMilliseconds(10);
1093 params[0].sampling_interval = TimeDelta::FromMilliseconds(10);
1094 params[0].samples_per_burst = 1;
1095
1096 params[1].initial_delay = TimeDelta::FromMilliseconds(5);
1097 params[1].sampling_interval = TimeDelta::FromMilliseconds(10);
1098 params[1].samples_per_burst = 2;
1099
1100 params[2].initial_delay = TimeDelta::FromMilliseconds(0);
1101 params[2].sampling_interval = TimeDelta::FromMilliseconds(10);
1102 params[2].samples_per_burst = 3;
1103
1104 CallStackProfiles profiles[arraysize(params)];
1105 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed(
1106 arraysize(params));
1107 std::vector<std::unique_ptr<StackSamplingProfiler>> profiler(
1108 arraysize(params));
1109 for (size_t i = 0; i < arraysize(params); ++i) {
1110 sampling_completed[i] =
1111 MakeUnique<WaitableEvent>(WaitableEvent::ResetPolicy::AUTOMATIC,
1112 WaitableEvent::InitialState::NOT_SIGNALED);
1113 const StackSamplingProfiler::CompletedCallback callback =
1114 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles[i]),
1115 Unretained(sampling_completed[i].get()));
1116 profiler[i] = MakeUnique<StackSamplingProfiler>(target_thread_id,
1117 params[i], callback);
1118 }
1119
1120 profiler[0]->Start();
1121 profiler[1]->Start();
1122 sampling_completed[0]->Wait();
1123 EXPECT_FALSE(sampling_completed[1]->IsSignaled());
1124 profiler[2]->Start();
1125 profiler[0]->Stop();
1126 profiler[1]->Stop();
1127 sampling_completed[1]->Wait();
1128 EXPECT_FALSE(sampling_completed[2]->IsSignaled());
1129 sampling_completed[2]->Wait();
1130 EXPECT_EQ(1u, profiles[0].size());
1131 EXPECT_EQ(1u, profiles[1].size());
1132 EXPECT_EQ(1u, profiles[2].size());
1133 });
1134 }
1135
933 // Checks that a stack that runs through another library produces a stack with 1136 // Checks that a stack that runs through another library produces a stack with
934 // the expected functions. 1137 // the expected functions.
935 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) 1138 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
936 #define MAYBE_OtherLibrary OtherLibrary 1139 #define MAYBE_OtherLibrary OtherLibrary
937 #else 1140 #else
938 #define MAYBE_OtherLibrary DISABLED_OtherLibrary 1141 #define MAYBE_OtherLibrary DISABLED_OtherLibrary
939 #endif 1142 #endif
940 TEST(StackSamplingProfilerTest, MAYBE_OtherLibrary) { 1143 TEST(StackSamplingProfilerTest, MAYBE_OtherLibrary) {
941 SamplingParams params; 1144 SamplingParams params;
942 params.sampling_interval = TimeDelta::FromMilliseconds(0); 1145 params.sampling_interval = TimeDelta::FromMilliseconds(0);
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
1017 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) 1220 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
1018 #define MAYBE_UnloadedLibrary UnloadedLibrary 1221 #define MAYBE_UnloadedLibrary UnloadedLibrary
1019 #else 1222 #else
1020 #define MAYBE_UnloadedLibrary DISABLED_UnloadedLibrary 1223 #define MAYBE_UnloadedLibrary DISABLED_UnloadedLibrary
1021 #endif 1224 #endif
1022 TEST(StackSamplingProfilerTest, MAYBE_UnloadedLibrary) { 1225 TEST(StackSamplingProfilerTest, MAYBE_UnloadedLibrary) {
1023 TestLibraryUnload(true); 1226 TestLibraryUnload(true);
1024 } 1227 }
1025 1228
1026 } // namespace base 1229 } // namespace base
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698