Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: base/profiler/stack_sampling_profiler_unittest.cc

Issue 2554123002: Support parallel captures from the StackSamplingProfiler. (Closed)
Patch Set: more tests; improved tests Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include <stddef.h> 5 #include <stddef.h>
6 #include <stdint.h> 6 #include <stdint.h>
7 7
8 #include <cstdlib> 8 #include <cstdlib>
9 #include <memory> 9 #include <memory>
10 #include <utility> 10 #include <utility>
11 #include <vector> 11 #include <vector>
12 12
13 #include "base/atomicops.h"
13 #include "base/bind.h" 14 #include "base/bind.h"
14 #include "base/compiler_specific.h" 15 #include "base/compiler_specific.h"
15 #include "base/macros.h" 16 #include "base/macros.h"
16 #include "base/memory/ptr_util.h" 17 #include "base/memory/ptr_util.h"
17 #include "base/message_loop/message_loop.h"
18 #include "base/native_library.h" 18 #include "base/native_library.h"
19 #include "base/path_service.h" 19 #include "base/path_service.h"
20 #include "base/profiler/native_stack_sampler.h" 20 #include "base/profiler/native_stack_sampler.h"
21 #include "base/profiler/stack_sampling_profiler.h" 21 #include "base/profiler/stack_sampling_profiler.h"
22 #include "base/run_loop.h" 22 #include "base/run_loop.h"
23 #include "base/scoped_native_library.h" 23 #include "base/scoped_native_library.h"
24 #include "base/strings/stringprintf.h" 24 #include "base/strings/stringprintf.h"
25 #include "base/strings/utf_string_conversions.h" 25 #include "base/strings/utf_string_conversions.h"
26 #include "base/synchronization/waitable_event.h" 26 #include "base/synchronization/waitable_event.h"
27 #include "base/threading/platform_thread.h" 27 #include "base/threading/platform_thread.h"
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after
344 target_thread.SignalThreadToFinish(); 344 target_thread.SignalThreadToFinish();
345 345
346 PlatformThread::Join(target_thread_handle); 346 PlatformThread::Join(target_thread_handle);
347 } 347 }
348 348
349 template <class Function> 349 template <class Function>
350 void WithTargetThread(Function function) { 350 void WithTargetThread(Function function) {
351 WithTargetThread(function, StackConfiguration(StackConfiguration::NORMAL)); 351 WithTargetThread(function, StackConfiguration(StackConfiguration::NORMAL));
352 } 352 }
353 353
354 // Waits for one of multiple samplings to complete. If |delegates| is provided,
355 // the vector must be pre-populated and of the same size as |params|.
356 void CreateProfilers(
357 PlatformThreadId target_thread_id,
358 const std::vector<SamplingParams>& params,
359 std::vector<std::unique_ptr<NativeStackSamplerTestDelegate>>* delegates,
360 std::vector<CallStackProfiles>* profiles,
361 std::vector<std::unique_ptr<StackSamplingProfiler>>* profilers,
362 std::vector<std::unique_ptr<WaitableEvent>>* completed) {
363 ASSERT_TRUE(!params.empty());
364 ASSERT_TRUE(profiles->empty());
365 ASSERT_TRUE(profilers->empty());
366 ASSERT_TRUE(completed->empty());
367 if (delegates)
368 ASSERT_EQ(params.size(), delegates->size());
369
370 // Vectors have to be appropriately sized in advance so that the addresses of
371 // values don't change.
372 const size_t count = params.size();
373 profiles->reserve(count);
374 profilers->reserve(count);
375 completed->reserve(count);
376
377 for (size_t i = 0; i < count; ++i) {
378 profiles->push_back(CallStackProfiles());
379 completed->push_back(
380 MakeUnique<WaitableEvent>(WaitableEvent::ResetPolicy::AUTOMATIC,
381 WaitableEvent::InitialState::NOT_SIGNALED));
382 const StackSamplingProfiler::CompletedCallback callback =
383 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles->back()),
384 Unretained(completed->back().get()));
385 if (delegates) {
Mike Wittman 2017/03/28 19:32:12 shorter: profilers->push_back(MakeUnique<StackSamp
bcwhite 2017/03/29 14:56:58 Done.
386 profilers->push_back(MakeUnique<StackSamplingProfiler>(
387 target_thread_id, params[i], callback, delegates->at(i).get()));
Mike Wittman 2017/03/28 19:32:02 (*delegates)[i].get() vector<>::at() is no differ
bcwhite 2017/03/29 14:56:59 at() is required when working with const vectors (
Mike Wittman 2017/03/30 16:18:38 The const change is good. at() is not required. ve
bcwhite 2017/03/30 18:54:51 Right. It's maps that don't allow [] with const.
388 } else {
389 profilers->push_back(MakeUnique<StackSamplingProfiler>(
390 target_thread_id, params[i], callback));
391 }
392 }
393 }
394
354 // Captures profiles as specified by |params| on the TargetThread, and returns 395 // Captures profiles as specified by |params| on the TargetThread, and returns
355 // them in |profiles|. Waits up to |profiler_wait_time| for the profiler to 396 // them in |profiles|. Waits up to |profiler_wait_time| for the profiler to
356 // complete. 397 // complete.
357 void CaptureProfiles(const SamplingParams& params, TimeDelta profiler_wait_time, 398 void CaptureProfiles(const SamplingParams& params, TimeDelta profiler_wait_time,
358 CallStackProfiles* profiles) { 399 CallStackProfiles* profiles) {
359 profiles->clear(); 400 profiles->clear();
360 401
361 WithTargetThread([&params, profiles, 402 WithTargetThread([&params, profiles,
362 profiler_wait_time](PlatformThreadId target_thread_id) { 403 profiler_wait_time](PlatformThreadId target_thread_id) {
363 WaitableEvent sampling_thread_completed( 404 WaitableEvent sampling_thread_completed(
364 WaitableEvent::ResetPolicy::MANUAL, 405 WaitableEvent::ResetPolicy::MANUAL,
365 WaitableEvent::InitialState::NOT_SIGNALED); 406 WaitableEvent::InitialState::NOT_SIGNALED);
366 const StackSamplingProfiler::CompletedCallback callback = 407 const StackSamplingProfiler::CompletedCallback callback =
367 Bind(&SaveProfilesAndSignalEvent, Unretained(profiles), 408 Bind(&SaveProfilesAndSignalEvent, Unretained(profiles),
368 Unretained(&sampling_thread_completed)); 409 Unretained(&sampling_thread_completed));
369 StackSamplingProfiler profiler(target_thread_id, params, callback); 410 StackSamplingProfiler profiler(target_thread_id, params, callback);
370 profiler.Start(); 411 profiler.Start();
371 sampling_thread_completed.TimedWait(profiler_wait_time); 412 sampling_thread_completed.TimedWait(profiler_wait_time);
372 profiler.Stop(); 413 profiler.Stop();
373 sampling_thread_completed.Wait(); 414 sampling_thread_completed.Wait();
374 }); 415 });
375 } 416 }
376 417
418 // Waits for one of multiple samplings to complete.
419 size_t WaitForSamplingComplete(
420 std::vector<std::unique_ptr<WaitableEvent>>* sampling_completed) {
421 // Map unique_ptrs to something that WaitMany can accept.
422 std::vector<WaitableEvent*> sampling_completed_rawptrs(
423 sampling_completed->size());
424 std::transform(
425 sampling_completed->begin(), sampling_completed->end(),
426 sampling_completed_rawptrs.begin(),
427 [](const std::unique_ptr<WaitableEvent>& elem) { return elem.get(); });
428 // Wait for one profiler to finish.
429 return WaitableEvent::WaitMany(sampling_completed_rawptrs.data(),
430 sampling_completed_rawptrs.size());
431 }
432
377 // If this executable was linked with /INCREMENTAL (the default for non-official 433 // If this executable was linked with /INCREMENTAL (the default for non-official
378 // debug and release builds on Windows), function addresses do not correspond to 434 // debug and release builds on Windows), function addresses do not correspond to
379 // function code itself, but instead to instructions in the Incremental Link 435 // function code itself, but instead to instructions in the Incremental Link
380 // Table that jump to the functions. Checks for a jump instruction and if 436 // Table that jump to the functions. Checks for a jump instruction and if
381 // present does a little decompilation to find the function's actual starting 437 // present does a little decompilation to find the function's actual starting
382 // address. 438 // address.
383 const void* MaybeFixupFunctionAddressForILT(const void* function_address) { 439 const void* MaybeFixupFunctionAddressForILT(const void* function_address) {
384 #if defined(_WIN64) 440 #if defined(_WIN64)
385 const unsigned char* opcode = 441 const unsigned char* opcode =
386 reinterpret_cast<const unsigned char*>(function_address); 442 reinterpret_cast<const unsigned char*>(function_address);
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
437 // asynchronous library loading has completed before walking the stack. If 493 // asynchronous library loading has completed before walking the stack. If
438 // false, the unloading may still be occurring during the stack walk. 494 // false, the unloading may still be occurring during the stack walk.
439 void TestLibraryUnload(bool wait_until_unloaded) { 495 void TestLibraryUnload(bool wait_until_unloaded) {
440 // Test delegate that supports intervening between the copying of the stack 496 // Test delegate that supports intervening between the copying of the stack
441 // and the walking of the stack. 497 // and the walking of the stack.
442 class StackCopiedSignaler : public NativeStackSamplerTestDelegate { 498 class StackCopiedSignaler : public NativeStackSamplerTestDelegate {
443 public: 499 public:
444 StackCopiedSignaler(WaitableEvent* stack_copied, 500 StackCopiedSignaler(WaitableEvent* stack_copied,
445 WaitableEvent* start_stack_walk, 501 WaitableEvent* start_stack_walk,
446 bool wait_to_walk_stack) 502 bool wait_to_walk_stack)
447 : stack_copied_(stack_copied), start_stack_walk_(start_stack_walk), 503 : stack_copied_(stack_copied),
448 wait_to_walk_stack_(wait_to_walk_stack) { 504 start_stack_walk_(start_stack_walk),
449 } 505 wait_to_walk_stack_(wait_to_walk_stack) {}
450 506
451 void OnPreStackWalk() override { 507 void OnPreStackWalk() override {
452 stack_copied_->Signal(); 508 stack_copied_->Signal();
453 if (wait_to_walk_stack_) 509 if (wait_to_walk_stack_)
454 start_stack_walk_->Wait(); 510 start_stack_walk_->Wait();
455 } 511 }
456 512
457 private: 513 private:
458 WaitableEvent* const stack_copied_; 514 WaitableEvent* const stack_copied_;
459 WaitableEvent* const start_stack_walk_; 515 WaitableEvent* const start_stack_walk_;
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
580 } // namespace 636 } // namespace
581 637
582 // Checks that the basic expected information is present in a sampled call stack 638 // Checks that the basic expected information is present in a sampled call stack
583 // profile. 639 // profile.
584 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) 640 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
585 #define MAYBE_Basic Basic 641 #define MAYBE_Basic Basic
586 #else 642 #else
587 #define MAYBE_Basic DISABLED_Basic 643 #define MAYBE_Basic DISABLED_Basic
588 #endif 644 #endif
589 TEST(StackSamplingProfilerTest, MAYBE_Basic) { 645 TEST(StackSamplingProfilerTest, MAYBE_Basic) {
646 StackSamplingProfiler::TestAPI::Reset();
Mike Wittman 2017/03/28 19:32:07 Why do only some of the tests use Reset()?
bcwhite 2017/03/29 14:56:58 It's only necessary for tests that deal with the s
Mike Wittman 2017/03/30 16:18:38 If we need to clean up state to make the test runs
bcwhite 2017/03/30 18:54:51 Done.
590 StackSamplingProfiler::ResetAnnotationsForTesting(); 647 StackSamplingProfiler::ResetAnnotationsForTesting();
591 648
592 SamplingParams params; 649 SamplingParams params;
593 params.sampling_interval = TimeDelta::FromMilliseconds(0); 650 params.sampling_interval = TimeDelta::FromMilliseconds(0);
594 params.samples_per_burst = 1; 651 params.samples_per_burst = 1;
595 652
596 std::vector<CallStackProfile> profiles; 653 std::vector<CallStackProfile> profiles;
597 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles); 654 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles);
598 655
599 // Check that the profile and samples sizes are correct, and the module 656 // Check that the profile and samples sizes are correct, and the module
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after
713 reinterpret_cast<const void*>(&TargetThread::CallWithAlloca)) 770 reinterpret_cast<const void*>(&TargetThread::CallWithAlloca))
714 << " was not found in stack:\n" 771 << " was not found in stack:\n"
715 << FormatSampleForDiagnosticOutput(sample, profile.modules); 772 << FormatSampleForDiagnosticOutput(sample, profile.modules);
716 773
717 // These frames should be adjacent on the stack. 774 // These frames should be adjacent on the stack.
718 EXPECT_EQ(1, alloca_frame - end_frame) 775 EXPECT_EQ(1, alloca_frame - end_frame)
719 << "Stack:\n" 776 << "Stack:\n"
720 << FormatSampleForDiagnosticOutput(sample, profile.modules); 777 << FormatSampleForDiagnosticOutput(sample, profile.modules);
721 } 778 }
722 779
723 // Checks that the fire-and-forget interface works.
724 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
725 #define MAYBE_StartAndRunAsync StartAndRunAsync
726 #else
727 #define MAYBE_StartAndRunAsync DISABLED_StartAndRunAsync
728 #endif
729 TEST(StackSamplingProfilerTest, MAYBE_StartAndRunAsync) {
730 // StartAndRunAsync requires the caller to have a message loop.
731 MessageLoop message_loop;
732
733 SamplingParams params;
734 params.samples_per_burst = 1;
735
736 CallStackProfiles profiles;
737 WithTargetThread([&params, &profiles](PlatformThreadId target_thread_id) {
738 WaitableEvent sampling_thread_completed(
739 WaitableEvent::ResetPolicy::AUTOMATIC,
740 WaitableEvent::InitialState::NOT_SIGNALED);
741 const StackSamplingProfiler::CompletedCallback callback =
742 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles),
743 Unretained(&sampling_thread_completed));
744 StackSamplingProfiler::StartAndRunAsync(target_thread_id, params, callback);
745 RunLoop().RunUntilIdle();
746 sampling_thread_completed.Wait();
747 });
748
749 ASSERT_EQ(1u, profiles.size());
750 }
751
752 // Checks that the expected number of profiles and samples are present in the 780 // Checks that the expected number of profiles and samples are present in the
753 // call stack profiles produced. 781 // call stack profiles produced.
754 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) 782 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
755 #define MAYBE_MultipleProfilesAndSamples MultipleProfilesAndSamples 783 #define MAYBE_MultipleProfilesAndSamples MultipleProfilesAndSamples
756 #else 784 #else
757 #define MAYBE_MultipleProfilesAndSamples DISABLED_MultipleProfilesAndSamples 785 #define MAYBE_MultipleProfilesAndSamples DISABLED_MultipleProfilesAndSamples
758 #endif 786 #endif
759 TEST(StackSamplingProfilerTest, MAYBE_MultipleProfilesAndSamples) { 787 TEST(StackSamplingProfilerTest, MAYBE_MultipleProfilesAndSamples) {
760 SamplingParams params; 788 SamplingParams params;
761 params.burst_interval = params.sampling_interval = 789 params.burst_interval = params.sampling_interval =
762 TimeDelta::FromMilliseconds(0); 790 TimeDelta::FromMilliseconds(0);
763 params.bursts = 2; 791 params.bursts = 2;
764 params.samples_per_burst = 3; 792 params.samples_per_burst = 3;
765 793
766 std::vector<CallStackProfile> profiles; 794 std::vector<CallStackProfile> profiles;
767 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles); 795 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles);
768 796
769 ASSERT_EQ(2u, profiles.size()); 797 ASSERT_EQ(2u, profiles.size());
770 EXPECT_EQ(3u, profiles[0].samples.size()); 798 EXPECT_EQ(3u, profiles[0].samples.size());
771 EXPECT_EQ(3u, profiles[1].samples.size()); 799 EXPECT_EQ(3u, profiles[1].samples.size());
772 } 800 }
773 801
802 // Checks that a profiler can stop/destruct without ever having started.
803 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
804 #define MAYBE_StopWithoutStarting StopWithoutStarting
805 #else
806 #define MAYBE_StopWithoutStarting DISABLED_StopWithoutStarting
807 #endif
808 TEST(StackSamplingProfilerTest, MAYBE_StopWithoutStarting) {
809 StackSamplingProfiler::TestAPI::Reset();
810
811 WithTargetThread([](PlatformThreadId target_thread_id) {
812 SamplingParams params;
813 params.sampling_interval = TimeDelta::FromMilliseconds(0);
814 params.samples_per_burst = 1;
815
816 CallStackProfiles profiles;
817 WaitableEvent sampling_completed(WaitableEvent::ResetPolicy::MANUAL,
818 WaitableEvent::InitialState::NOT_SIGNALED);
819 const StackSamplingProfiler::CompletedCallback callback =
820 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles),
821 Unretained(&sampling_completed));
822 StackSamplingProfiler profiler(target_thread_id, params, callback);
823
824 profiler.Stop(); // Constructed but never started.
825 EXPECT_FALSE(sampling_completed.IsSignaled());
826 });
827 }
828
829 // Checks that its okay to stop a profiler before it finishes.
830 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
831 #define MAYBE_StopSafely StopSafely
832 #else
833 #define MAYBE_StopSafely DISABLED_StopSafely
834 #endif
835 TEST(StackSamplingProfilerTest, MAYBE_StopSafely) {
836 StackSamplingProfiler::TestAPI::DisableIdleShutdown();
837
838 // Test delegate that counts samples.
839 class SampleRecordedCounter : public NativeStackSamplerTestDelegate {
840 public:
841 SampleRecordedCounter() {}
842
843 void OnPreStackWalk() override {
844 subtle::NoBarrier_AtomicIncrement(&count_, 1);
845 }
846
847 int Get() { return subtle::NoBarrier_Load(&count_); }
848
849 private:
850 subtle::AtomicWord count_ = 0;
Mike Wittman 2017/03/28 19:32:02 This should use locks rather than atomic ops. From
bcwhite 2017/03/29 14:56:58 I'm relatively well versed in the subtleties of at
Mike Wittman 2017/03/30 16:18:38 You may be but readers generally won't, and we opt
851 };
852
853 WithTargetThread([](PlatformThreadId target_thread_id) {
854 std::vector<SamplingParams> params(2);
855
856 // Providing an initial delay makes it more likely that both will be
857 // scheduled before either starts to run. Once started, samples will
858 // run ordered by their scheduled, interleaved times regardless of
859 // whatever interval the thread wakes up.
860 params[0].initial_delay = TimeDelta::FromMilliseconds(10);
861 params[0].sampling_interval = TimeDelta::FromMilliseconds(1);
862 params[0].samples_per_burst = 100000;
863
864 params[1].initial_delay = TimeDelta::FromMilliseconds(10);
865 params[1].sampling_interval = TimeDelta::FromMilliseconds(1);
866 params[1].samples_per_burst = 100000;
867
868 std::vector<CallStackProfiles> profiles;
869 std::vector<std::unique_ptr<SampleRecordedCounter>> samples_recorded;
Mike Wittman 2017/03/28 19:32:02 std::vector<std::unique_ptr<NativeStackSamplerTest
bcwhite 2017/03/29 14:56:58 I tried many different ways this was the only one
Mike Wittman 2017/03/30 16:18:38 Casting to SampleRecordedCounter* on use is what's
bcwhite 2017/03/30 18:54:51 Done.
870 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed;
871 std::vector<std::unique_ptr<StackSamplingProfiler>> profilers;
872 for (size_t i = 0; i < params.size(); ++i)
873 samples_recorded.push_back(MakeUnique<SampleRecordedCounter>());
874 CreateProfilers(
875 target_thread_id, params,
876 reinterpret_cast<
877 std::vector<std::unique_ptr<NativeStackSamplerTestDelegate>>*>(
878 &samples_recorded),
879 &profiles, &profilers, &sampling_completed);
880
881 profilers[0]->Start();
882 profilers[1]->Start();
883
884 // Wait for both to start accumulating samples.
Mike Wittman 2017/03/28 19:32:06 It seems like using WaitableEvents in the test del
bcwhite 2017/03/29 14:56:58 Done.
885 while (samples_recorded[0]->Get() == 0 || samples_recorded[1]->Get() == 0)
886 PlatformThread::Sleep(TimeDelta::FromMilliseconds(1));
887
888 // Ensure that the first sampler can be safely stopped while the second
889 // continues to run. The stopped first profiler will still have a
890 // PerformCollectionTask pending that will do nothing when executed because
891 // the collection will have been removed by Stop().
892 profilers[0]->Stop();
893 sampling_completed[0]->Wait();
894 int count0 = samples_recorded[0]->Get();
895 int count1 = samples_recorded[1]->Get();
896
897 // Waiting for the second sampler to collect a couple samples ensures that
898 // the pending PerformCollectionTask for the first has executed because
899 // tasks are always ordered by their next scheduled time.
900 while (samples_recorded[1]->Get() < count1 + 2)
901 PlatformThread::Sleep(TimeDelta::FromMilliseconds(1));
902
903 // Ensure that the first profiler didn't do anything since it was stopped.
904 EXPECT_EQ(count0, samples_recorded[0]->Get());
905 });
906 }
907
774 // Checks that no call stack profiles are captured if the profiling is stopped 908 // Checks that no call stack profiles are captured if the profiling is stopped
775 // during the initial delay. 909 // during the initial delay.
776 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) 910 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
777 #define MAYBE_StopDuringInitialDelay StopDuringInitialDelay 911 #define MAYBE_StopDuringInitialDelay StopDuringInitialDelay
778 #else 912 #else
779 #define MAYBE_StopDuringInitialDelay DISABLED_StopDuringInitialDelay 913 #define MAYBE_StopDuringInitialDelay DISABLED_StopDuringInitialDelay
780 #endif 914 #endif
781 TEST(StackSamplingProfilerTest, MAYBE_StopDuringInitialDelay) { 915 TEST(StackSamplingProfilerTest, MAYBE_StopDuringInitialDelay) {
782 SamplingParams params; 916 SamplingParams params;
783 params.initial_delay = TimeDelta::FromSeconds(60); 917 params.initial_delay = TimeDelta::FromSeconds(60);
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
853 }); 987 });
854 } 988 }
855 989
856 // Checks that the same profiler may be run multiple times. 990 // Checks that the same profiler may be run multiple times.
857 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) 991 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
858 #define MAYBE_CanRunMultipleTimes CanRunMultipleTimes 992 #define MAYBE_CanRunMultipleTimes CanRunMultipleTimes
859 #else 993 #else
860 #define MAYBE_CanRunMultipleTimes DISABLED_CanRunMultipleTimes 994 #define MAYBE_CanRunMultipleTimes DISABLED_CanRunMultipleTimes
861 #endif 995 #endif
862 TEST(StackSamplingProfilerTest, MAYBE_CanRunMultipleTimes) { 996 TEST(StackSamplingProfilerTest, MAYBE_CanRunMultipleTimes) {
997 StackSamplingProfiler::TestAPI::DisableIdleShutdown();
998
999 WithTargetThread([](PlatformThreadId target_thread_id) {
1000 SamplingParams params;
1001 params.sampling_interval = TimeDelta::FromMilliseconds(0);
1002 params.samples_per_burst = 1;
1003
1004 CallStackProfiles profiles;
1005 WaitableEvent sampling_completed(WaitableEvent::ResetPolicy::MANUAL,
1006 WaitableEvent::InitialState::NOT_SIGNALED);
1007 const StackSamplingProfiler::CompletedCallback callback =
1008 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles),
1009 Unretained(&sampling_completed));
1010 StackSamplingProfiler profiler(target_thread_id, params, callback);
1011
1012 // Just start and stop to execute code paths.
1013 profiler.Start();
1014 profiler.Stop();
1015 sampling_completed.Wait();
1016
1017 // Ensure a second request will run and not block.
1018 sampling_completed.Reset();
1019 profiles.clear();
1020 profiler.Start();
1021 sampling_completed.Wait();
1022 profiler.Stop();
1023 ASSERT_EQ(1u, profiles.size());
1024 });
1025 }
1026
1027 // Checks that the different profilers may be run.
1028 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
1029 #define MAYBE_CanRunMultipleProfilers CanRunMultipleProfilers
1030 #else
1031 #define MAYBE_CanRunMultipleProfilers DISABLED_CanRunMultipleProfilers
1032 #endif
1033 TEST(StackSamplingProfilerTest, MAYBE_CanRunMultipleProfilers) {
1034 StackSamplingProfiler::TestAPI::DisableIdleShutdown();
1035
863 SamplingParams params; 1036 SamplingParams params;
864 params.sampling_interval = TimeDelta::FromMilliseconds(0); 1037 params.sampling_interval = TimeDelta::FromMilliseconds(0);
865 params.samples_per_burst = 1; 1038 params.samples_per_burst = 1;
866 1039
867 std::vector<CallStackProfile> profiles; 1040 std::vector<CallStackProfile> profiles;
868 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles); 1041 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles);
869 ASSERT_EQ(1u, profiles.size()); 1042 ASSERT_EQ(1u, profiles.size());
870 1043
871 profiles.clear(); 1044 profiles.clear();
872 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles); 1045 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles);
873 ASSERT_EQ(1u, profiles.size()); 1046 ASSERT_EQ(1u, profiles.size());
874 } 1047 }
875 1048
876 // Checks that requests to start profiling while another profile is taking place 1049 // Checks that a sampler can be started while another is running.
877 // are ignored. 1050 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
878 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) 1051 #define MAYBE_MultipleStart MultipleStart
879 #define MAYBE_ConcurrentProfiling ConcurrentProfiling 1052 #else
880 #else 1053 #define MAYBE_MultipleStart DISABLED_MultipleStart
881 #define MAYBE_ConcurrentProfiling DISABLED_ConcurrentProfiling 1054 #endif
882 #endif 1055 TEST(StackSamplingProfilerTest, MAYBE_MultipleStart) {
883 TEST(StackSamplingProfilerTest, MAYBE_ConcurrentProfiling) { 1056 StackSamplingProfiler::TestAPI::DisableIdleShutdown();
884 WithTargetThread([](PlatformThreadId target_thread_id) { 1057
885 SamplingParams params[2]; 1058 WithTargetThread([](PlatformThreadId target_thread_id) {
1059 std::vector<SamplingParams> params(2);
1060
1061 // Providing an initial delay makes it more likely that both will be
1062 // scheduled before either starts to run. Once started, samples will
1063 // run ordered by their scheduled, interleaved times regardless of
1064 // whatever interval the thread wakes up. Thus, total execution time
1065 // will be 10ms (delay) + 10x1ms (sampling) + 1/2 timer minimum interval.
Mike Wittman 2017/03/28 19:32:05 This comment is no longer relevant and can be remo
bcwhite 2017/03/29 14:56:58 Done.
1066 params[0].initial_delay = TimeDelta::FromDays(1);
Mike Wittman 2017/03/28 19:32:09 AVeryLongTimeDelta() is the established way to say
bcwhite 2017/03/29 14:56:58 Done.
1067 params[0].sampling_interval = TimeDelta::FromMilliseconds(1);
Mike Wittman 2017/03/28 19:32:11 This parameter can be removed.
bcwhite 2017/03/29 14:56:59 Done.
1068 params[0].samples_per_burst = 1;
1069
1070 params[1].initial_delay = TimeDelta::FromMilliseconds(0);
Mike Wittman 2017/03/28 19:32:02 This line can be removed since this is the default
bcwhite 2017/03/29 14:56:59 Done.
1071 params[1].sampling_interval = TimeDelta::FromMilliseconds(1);
1072 params[1].samples_per_burst = 1;
1073
1074 std::vector<CallStackProfiles> profiles;
1075 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed;
1076 std::vector<std::unique_ptr<StackSamplingProfiler>> profilers;
1077 CreateProfilers(target_thread_id, params, nullptr, &profiles, &profilers,
1078 &sampling_completed);
1079
1080 profilers[0]->Start();
1081 profilers[1]->Start();
Mike Wittman 2017/03/28 19:32:08 We should wait on the second profiler to finish an
bcwhite 2017/03/29 14:56:58 Done.
1082 EXPECT_FALSE(sampling_completed[0]->IsSignaled());
Mike Wittman 2017/03/28 19:32:08 This should be removed since what it's checking is
bcwhite 2017/03/29 14:56:58 Done.
1083 });
1084 }
1085
1086 // Checks that the sampling thread can shut down.
1087 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
1088 #define MAYBE_SamplerIdleShutdown SamplerIdleShutdown
1089 #else
1090 #define MAYBE_SamplerIdleShutdown DISABLED_SamplerIdleShutdown
1091 #endif
1092 TEST(StackSamplingProfilerTest, MAYBE_SamplerIdleShutdown) {
1093 StackSamplingProfiler::TestAPI::DisableIdleShutdown();
1094
1095 SamplingParams params;
1096 params.sampling_interval = TimeDelta::FromMilliseconds(0);
1097 params.samples_per_burst = 1;
1098
1099 std::vector<CallStackProfile> profiles;
1100 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles);
1101 ASSERT_EQ(1u, profiles.size());
1102
1103 // Capture thread should still be running at this point.
1104 ASSERT_TRUE(StackSamplingProfiler::TestAPI::IsSamplingThreadRunning());
1105
1106 // Initiate an "idle" shutdown and ensure it happens. Idle-shutdown was
1107 // disabled above so the test will fail due to a timeout if it does not
1108 // exit.
1109 StackSamplingProfiler::TestAPI::PerformSamplingThreadIdleShutdown(false);
1110
1111 // While the shutdown has been initiated, the actual exit of the thread still
1112 // happens asynchronously. Watch until the thread actually exits. This test
1113 // will time-out in the case of failure.
1114 while (StackSamplingProfiler::TestAPI::IsSamplingThreadRunning())
1115 PlatformThread::Sleep(base::TimeDelta::FromMilliseconds(1));
1116 }
1117
1118 // Checks that additional requests will restart a stopped profiler.
1119 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
1120 #define MAYBE_WillRestartSamplerAfterIdleShutdown \
1121 WillRestartSamplerAfterIdleShutdown
1122 #else
1123 #define MAYBE_WillRestartSamplerAfterIdleShutdown \
1124 DISABLED_WillRestartSamplerAfterIdleShutdown
1125 #endif
1126 TEST(StackSamplingProfilerTest, MAYBE_WillRestartSamplerAfterIdleShutdown) {
1127 StackSamplingProfiler::TestAPI::DisableIdleShutdown();
1128
1129 SamplingParams params;
1130 params.sampling_interval = TimeDelta::FromMilliseconds(0);
1131 params.samples_per_burst = 1;
1132
1133 std::vector<CallStackProfile> profiles;
1134 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles);
1135 ASSERT_EQ(1u, profiles.size());
1136
1137 // Capture thread should still be running at this point.
1138 ASSERT_TRUE(StackSamplingProfiler::TestAPI::IsSamplingThreadRunning());
1139
1140 // Post a ShutdownTask on the sampling thread which, when executed, will
1141 // mark the thread as EXITING and begin shut down of the thread.
1142 StackSamplingProfiler::TestAPI::PerformSamplingThreadIdleShutdown(false);
1143
1144 // Ensure another capture will start the sampling thread and run.
1145 profiles.clear();
1146 CaptureProfiles(params, AVeryLongTimeDelta(), &profiles);
1147 ASSERT_EQ(1u, profiles.size());
1148 EXPECT_TRUE(StackSamplingProfiler::TestAPI::IsSamplingThreadRunning());
1149 }
1150
1151 // Checks that it's safe to stop a task after it's completed and the sampling
1152 // thread has shut-down for being idle.
1153 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
1154 #define MAYBE_StopAfterIdleShutdown StopAfterIdleShutdown
1155 #else
1156 #define MAYBE_StopAfterIdleShutdown DISABLED_StopAfterIdleShutdown
1157 #endif
1158 TEST(StackSamplingProfilerTest, MAYBE_StopAfterIdleShutdown) {
1159 StackSamplingProfiler::TestAPI::DisableIdleShutdown();
1160
1161 WithTargetThread([](PlatformThreadId target_thread_id) {
1162 std::vector<SamplingParams> params(1);
Mike Wittman 2017/03/28 19:32:04 There's no need for vectors here since there's onl
bcwhite 2017/03/29 14:56:58 CreateProfilers() takes a vector so this allows co
Mike Wittman 2017/03/30 16:18:38 Yes, but we already have several instances where s
bcwhite 2017/03/30 18:54:51 Usually where it's being passed to CaptureProfiles
Mike Wittman 2017/03/31 01:38:21 OK, sounds reasonable.
1163
1164 params[0].initial_delay = TimeDelta::FromMilliseconds(0);
1165 params[0].sampling_interval = TimeDelta::FromMilliseconds(1);
1166 params[0].samples_per_burst = 1;
1167
1168 std::vector<CallStackProfiles> profiles;
1169 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed;
1170 std::vector<std::unique_ptr<StackSamplingProfiler>> profilers;
1171 CreateProfilers(target_thread_id, params, nullptr, &profiles, &profilers,
1172 &sampling_completed);
1173
1174 profilers[0]->Start();
1175 sampling_completed[0]->Wait();
1176
1177 // Capture thread should still be running at this point.
1178 ASSERT_TRUE(StackSamplingProfiler::TestAPI::IsSamplingThreadRunning());
1179
1180 // Perform an idle shutdown.
1181 StackSamplingProfiler::TestAPI::PerformSamplingThreadIdleShutdown(false);
1182
1183 // Stop should be safe though its impossible to know at this moment if the
1184 // sampling thread has completely exited or will just "stop soon".
1185 profilers[0]->Stop();
1186 });
1187 }
1188
1189 // Checks that profilers can run both before and after the sampling thread has
1190 // started.
1191 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
1192 #define MAYBE_ProfileBeforeAndAfterSamplingThreadRunning \
1193 ProfileBeforeAndAfterSamplingThreadRunning
1194 #else
1195 #define MAYBE_ProfileBeforeAndAfterSamplingThreadRunning \
1196 DISABLED_ProfileBeforeAndAfterSamplingThreadRunning
1197 #endif
1198 TEST(StackSamplingProfilerTest,
1199 MAYBE_ProfileBeforeAndAfterSamplingThreadRunning) {
1200 StackSamplingProfiler::TestAPI::Reset();
1201 StackSamplingProfiler::TestAPI::DisableIdleShutdown();
1202
1203 WithTargetThread([](PlatformThreadId target_thread_id) {
1204 std::vector<SamplingParams> params(2);
1205
1206 params[0].initial_delay = TimeDelta::FromDays(1);
1207 params[0].sampling_interval = TimeDelta::FromMilliseconds(1);
1208 params[0].samples_per_burst = 1;
1209
1210 params[1].initial_delay = TimeDelta::FromMilliseconds(0);
1211 params[1].sampling_interval = TimeDelta::FromMilliseconds(1);
1212 params[1].samples_per_burst = 1;
1213
1214 std::vector<CallStackProfiles> profiles;
1215 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed;
1216 std::vector<std::unique_ptr<StackSamplingProfiler>> profilers;
1217 CreateProfilers(target_thread_id, params, nullptr, &profiles, &profilers,
1218 &sampling_completed);
1219
1220 // First profiler is started when there has never been a sampling thread.
1221 EXPECT_FALSE(StackSamplingProfiler::TestAPI::IsSamplingThreadRunning());
1222 profilers[0]->Start();
1223 // Second profiler is started when sampling thread is already running.
1224 EXPECT_TRUE(StackSamplingProfiler::TestAPI::IsSamplingThreadRunning());
1225 profilers[1]->Start();
1226
1227 // Only the second profiler should finish before test times out.
1228 size_t completed_profiler = WaitForSamplingComplete(&sampling_completed);
1229 EXPECT_EQ(1U, completed_profiler);
1230 });
1231 }
1232
1233 // Checks that an idle-shutdown task will abort if a new profiler starts
1234 // between when it was posted and when it runs.
1235 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
1236 #define MAYBE_IdleShutdownAbort IdleShutdownAbort
1237 #else
1238 #define MAYBE_IdleShutdownAbort DISABLED_IdleShutdownAbort
1239 #endif
1240 TEST(StackSamplingProfilerTest, MAYBE_IdleShutdownAbort) {
1241 StackSamplingProfiler::TestAPI::DisableIdleShutdown();
1242
1243 WithTargetThread([](PlatformThreadId target_thread_id) {
1244 std::vector<SamplingParams> params(1);
1245
1246 params[0].initial_delay = TimeDelta::FromMilliseconds(0);
1247 params[0].sampling_interval = TimeDelta::FromMilliseconds(1);
1248 params[0].samples_per_burst = 1;
1249
1250 std::vector<CallStackProfiles> profiles;
1251 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed;
1252 std::vector<std::unique_ptr<StackSamplingProfiler>> profilers;
1253 CreateProfilers(target_thread_id, params, nullptr, &profiles, &profilers,
1254 &sampling_completed);
1255
1256 profilers[0]->Start();
1257 sampling_completed[0]->Wait();
1258
1259 // Perform an idle shutdown but simulate that a new capture is started
1260 // before it can actually run.
1261 StackSamplingProfiler::TestAPI::PerformSamplingThreadIdleShutdown(true);
1262
1263 // Though the shutdown-task has been executed, any actual exit of the
Mike Wittman 2017/03/28 19:32:02 Since we can't reliably validate that the thread w
bcwhite 2017/03/29 14:56:58 If the sampling thread did stop (when it shouldn't
Mike Wittman 2017/03/30 16:18:38 It's still worth checking this to prevent future r
bcwhite 2017/03/30 18:54:51 That is either WillRestartSamplerAfterIdleShutdown
Mike Wittman 2017/03/31 01:38:21 Neither of those tests exercise the shutdown abort
bcwhite 2017/03/31 13:57:56 Done.
1264 // thread is asynchronous so there is no way to detect that *didn't* exit
1265 // except to wait a reasonable amount of time and then check. Since the
1266 // thread was just running ("perform" blocked until it was), it should
1267 // finish almost immediately and without any waiting for tasks or events.
1268 PlatformThread::Sleep(base::TimeDelta::FromMilliseconds(200));
1269 EXPECT_TRUE(StackSamplingProfiler::TestAPI::IsSamplingThreadRunning());
1270 });
1271 }
1272
1273 // Checks that synchronized multiple sampling requests execute in parallel.
1274 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
1275 #define MAYBE_ConcurrentProfiling_InSync ConcurrentProfiling_InSync
1276 #else
1277 #define MAYBE_ConcurrentProfiling_InSync DISABLED_ConcurrentProfiling_InSync
1278 #endif
1279 TEST(StackSamplingProfilerTest, MAYBE_ConcurrentProfiling_InSync) {
1280 StackSamplingProfiler::TestAPI::DisableIdleShutdown();
1281
1282 WithTargetThread([](PlatformThreadId target_thread_id) {
1283 std::vector<SamplingParams> params(2);
1284
1285 // Providing an initial delay makes it more likely that both will be
1286 // scheduled before either starts to run. Once started, samples will
1287 // run ordered by their scheduled, interleaved times regardless of
1288 // whatever interval the thread wakes up. Thus, total execution time
1289 // will be 10ms (delay) + 10x1ms (sampling) + 1/2 timer minimum interval.
886 params[0].initial_delay = TimeDelta::FromMilliseconds(10); 1290 params[0].initial_delay = TimeDelta::FromMilliseconds(10);
887 params[0].sampling_interval = TimeDelta::FromMilliseconds(0); 1291 params[0].sampling_interval = TimeDelta::FromMilliseconds(1);
888 params[0].samples_per_burst = 1; 1292 params[0].samples_per_burst = 9;
889 1293
890 params[1].sampling_interval = TimeDelta::FromMilliseconds(0); 1294 params[1].initial_delay = TimeDelta::FromMilliseconds(11);
891 params[1].samples_per_burst = 1; 1295 params[1].sampling_interval = TimeDelta::FromMilliseconds(1);
892 1296 params[1].samples_per_burst = 8;
893 CallStackProfiles profiles[2]; 1297
894 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed(2); 1298 std::vector<CallStackProfiles> profiles;
895 std::vector<std::unique_ptr<StackSamplingProfiler>> profiler(2); 1299 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed;
896 for (int i = 0; i < 2; ++i) { 1300 std::vector<std::unique_ptr<StackSamplingProfiler>> profilers;
897 sampling_completed[i] = 1301 CreateProfilers(target_thread_id, params, nullptr, &profiles, &profilers,
898 MakeUnique<WaitableEvent>(WaitableEvent::ResetPolicy::AUTOMATIC, 1302 &sampling_completed);
899 WaitableEvent::InitialState::NOT_SIGNALED); 1303
900 const StackSamplingProfiler::CompletedCallback callback = 1304 profilers[0]->Start();
901 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles[i]), 1305 profilers[1]->Start();
902 Unretained(sampling_completed[i].get())); 1306
903 profiler[i] = MakeUnique<StackSamplingProfiler>(target_thread_id,
904 params[i], callback);
905 }
906
907 profiler[0]->Start();
908 profiler[1]->Start();
909
910 std::vector<WaitableEvent*> sampling_completed_rawptrs(
911 sampling_completed.size());
912 std::transform(
913 sampling_completed.begin(), sampling_completed.end(),
914 sampling_completed_rawptrs.begin(),
915 [](const std::unique_ptr<WaitableEvent>& elem) { return elem.get(); });
916 // Wait for one profiler to finish. 1307 // Wait for one profiler to finish.
917 size_t completed_profiler = 1308 size_t completed_profiler = WaitForSamplingComplete(&sampling_completed);
918 WaitableEvent::WaitMany(sampling_completed_rawptrs.data(), 2); 1309 ASSERT_EQ(1u, profiles[completed_profiler].size());
1310
1311 size_t other_profiler = 1 - completed_profiler;
1312 // Wait for the other profiler to finish.
1313 sampling_completed[other_profiler]->Wait();
1314 ASSERT_EQ(1u, profiles[other_profiler].size());
1315
1316 // Ensure each got the correct number of samples.
1317 EXPECT_EQ(9u, profiles[0][0].samples.size());
1318 EXPECT_EQ(8u, profiles[1][0].samples.size());
1319 });
1320 }
1321
1322 // Checks that several mixed sampling requests execute in parallel.
1323 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
1324 #define MAYBE_ConcurrentProfiling_Mixed ConcurrentProfiling_Mixed
1325 #else
1326 #define MAYBE_ConcurrentProfiling_Mixed DISABLED_ConcurrentProfiling_Mixed
1327 #endif
1328 TEST(StackSamplingProfilerTest, MAYBE_ConcurrentProfiling_Mixed) {
1329 StackSamplingProfiler::TestAPI::DisableIdleShutdown();
1330
1331 WithTargetThread([](PlatformThreadId target_thread_id) {
1332 std::vector<SamplingParams> params(3);
1333
1334 params[0].initial_delay = TimeDelta::FromMilliseconds(8);
1335 params[0].sampling_interval = TimeDelta::FromMilliseconds(4);
1336 params[0].samples_per_burst = 10;
1337
1338 params[1].initial_delay = TimeDelta::FromMilliseconds(9);
1339 params[1].sampling_interval = TimeDelta::FromMilliseconds(3);
1340 params[1].samples_per_burst = 10;
1341
1342 params[2].initial_delay = TimeDelta::FromMilliseconds(10);
1343 params[2].sampling_interval = TimeDelta::FromMilliseconds(2);
1344 params[2].samples_per_burst = 10;
1345
1346 std::vector<CallStackProfiles> profiles;
1347 std::vector<std::unique_ptr<WaitableEvent>> sampling_completed;
1348 std::vector<std::unique_ptr<StackSamplingProfiler>> profilers;
1349 CreateProfilers(target_thread_id, params, nullptr, &profiles, &profilers,
1350 &sampling_completed);
1351
1352 for (size_t i = 0; i < profilers.size(); ++i)
1353 profilers[i]->Start();
1354
1355 // Wait for one profiler to finish.
1356 size_t completed_profiler = WaitForSamplingComplete(&sampling_completed);
919 EXPECT_EQ(1u, profiles[completed_profiler].size()); 1357 EXPECT_EQ(1u, profiles[completed_profiler].size());
920 1358 // Stop and destroy all profilers, always in the same order. Don't crash.
921 size_t other_profiler = 1 - completed_profiler; 1359 for (size_t i = 0; i < profilers.size(); ++i)
922 // Give the other profiler a chance to run and observe that it hasn't. 1360 profilers[i]->Stop();
923 EXPECT_FALSE(sampling_completed[other_profiler]->TimedWait( 1361 for (size_t i = 0; i < profilers.size(); ++i)
924 TimeDelta::FromMilliseconds(25))); 1362 profilers[i].reset();
925 1363 });
926 // Start the other profiler again and it should run. 1364 }
927 profiler[other_profiler]->Start(); 1365
928 sampling_completed[other_profiler]->Wait();
929 EXPECT_EQ(1u, profiles[other_profiler].size());
930 });
931 }
932
933 // Checks that a stack that runs through another library produces a stack with 1366 // Checks that a stack that runs through another library produces a stack with
934 // the expected functions. 1367 // the expected functions.
935 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) 1368 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
936 #define MAYBE_OtherLibrary OtherLibrary 1369 #define MAYBE_OtherLibrary OtherLibrary
937 #else 1370 #else
938 #define MAYBE_OtherLibrary DISABLED_OtherLibrary 1371 #define MAYBE_OtherLibrary DISABLED_OtherLibrary
939 #endif 1372 #endif
940 TEST(StackSamplingProfilerTest, MAYBE_OtherLibrary) { 1373 TEST(StackSamplingProfilerTest, MAYBE_OtherLibrary) {
941 SamplingParams params; 1374 SamplingParams params;
942 params.sampling_interval = TimeDelta::FromMilliseconds(0); 1375 params.sampling_interval = TimeDelta::FromMilliseconds(0);
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
1017 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) 1450 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
1018 #define MAYBE_UnloadedLibrary UnloadedLibrary 1451 #define MAYBE_UnloadedLibrary UnloadedLibrary
1019 #else 1452 #else
1020 #define MAYBE_UnloadedLibrary DISABLED_UnloadedLibrary 1453 #define MAYBE_UnloadedLibrary DISABLED_UnloadedLibrary
1021 #endif 1454 #endif
1022 TEST(StackSamplingProfilerTest, MAYBE_UnloadedLibrary) { 1455 TEST(StackSamplingProfilerTest, MAYBE_UnloadedLibrary) {
1023 TestLibraryUnload(true); 1456 TestLibraryUnload(true);
1024 } 1457 }
1025 1458
1026 } // namespace base 1459 } // namespace base
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698