OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <cstdlib> | 5 #include <cstdlib> |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/compiler_specific.h" | 8 #include "base/compiler_specific.h" |
9 #include "base/memory/scoped_vector.h" | 9 #include "base/memory/scoped_vector.h" |
10 #include "base/message_loop/message_loop.h" | 10 #include "base/message_loop/message_loop.h" |
| 11 #include "base/native_library.h" |
11 #include "base/path_service.h" | 12 #include "base/path_service.h" |
| 13 #include "base/profiler/native_stack_sampler.h" |
12 #include "base/profiler/stack_sampling_profiler.h" | 14 #include "base/profiler/stack_sampling_profiler.h" |
13 #include "base/run_loop.h" | 15 #include "base/run_loop.h" |
| 16 #include "base/scoped_native_library.h" |
14 #include "base/strings/stringprintf.h" | 17 #include "base/strings/stringprintf.h" |
| 18 #include "base/strings/utf_string_conversions.h" |
15 #include "base/synchronization/waitable_event.h" | 19 #include "base/synchronization/waitable_event.h" |
16 #include "base/threading/platform_thread.h" | 20 #include "base/threading/platform_thread.h" |
17 #include "base/time/time.h" | 21 #include "base/time/time.h" |
18 #include "build/build_config.h" | 22 #include "build/build_config.h" |
19 #include "testing/gtest/include/gtest/gtest.h" | 23 #include "testing/gtest/include/gtest/gtest.h" |
20 | 24 |
21 #if defined(OS_WIN) | 25 #if defined(OS_WIN) |
22 #include <intrin.h> | 26 #include <intrin.h> |
23 #include <malloc.h> | 27 #include <malloc.h> |
| 28 #include <windows.h> |
24 #else | 29 #else |
25 #include <alloca.h> | 30 #include <alloca.h> |
26 #endif | 31 #endif |
27 | 32 |
28 // STACK_SAMPLING_PROFILER_SUPPORTED is used to conditionally enable the tests | 33 // STACK_SAMPLING_PROFILER_SUPPORTED is used to conditionally enable the tests |
29 // below for supported platforms (currently Win x64). | 34 // below for supported platforms (currently Win x64). |
30 #if defined(_WIN64) | 35 #if defined(_WIN64) |
31 #define STACK_SAMPLING_PROFILER_SUPPORTED 1 | 36 #define STACK_SAMPLING_PROFILER_SUPPORTED 1 |
32 #endif | 37 #endif |
33 | 38 |
34 #if defined(OS_WIN) | 39 #if defined(OS_WIN) |
35 #pragma intrinsic(_ReturnAddress) | 40 #pragma intrinsic(_ReturnAddress) |
36 #endif | 41 #endif |
37 | 42 |
38 namespace base { | 43 namespace base { |
39 | 44 |
40 using SamplingParams = StackSamplingProfiler::SamplingParams; | 45 using SamplingParams = StackSamplingProfiler::SamplingParams; |
41 using Frame = StackSamplingProfiler::Frame; | 46 using Frame = StackSamplingProfiler::Frame; |
42 using Module = StackSamplingProfiler::Module; | 47 using Module = StackSamplingProfiler::Module; |
43 using Sample = StackSamplingProfiler::Sample; | 48 using Sample = StackSamplingProfiler::Sample; |
44 using CallStackProfile = StackSamplingProfiler::CallStackProfile; | 49 using CallStackProfile = StackSamplingProfiler::CallStackProfile; |
45 using CallStackProfiles = StackSamplingProfiler::CallStackProfiles; | 50 using CallStackProfiles = StackSamplingProfiler::CallStackProfiles; |
46 | 51 |
47 namespace { | 52 namespace { |
48 | 53 |
49 // Configuration for whether to allocate dynamic stack memory. | 54 // Configuration for the frames that appear on the stack. |
50 enum DynamicStackAllocationConfig { USE_ALLOCA, NO_ALLOCA }; | 55 struct StackConfiguration { |
| 56 enum Config { NORMAL, WITH_ALLOCA, WITH_OTHER_LIBRARY }; |
| 57 |
| 58 explicit StackConfiguration(Config config) |
| 59 : StackConfiguration(config, nullptr) { |
| 60 EXPECT_NE(config, WITH_OTHER_LIBRARY); |
| 61 } |
| 62 |
| 63 StackConfiguration(Config config, NativeLibrary library) |
| 64 : config(config), library(library) { |
| 65 EXPECT_TRUE(config != WITH_OTHER_LIBRARY || library); |
| 66 } |
| 67 |
| 68 Config config; |
| 69 |
| 70 // Only used if config == WITH_OTHER_LIBRARY. |
| 71 NativeLibrary library; |
| 72 }; |
51 | 73 |
52 // Signature for a target function that is expected to appear in the stack. See | 74 // Signature for a target function that is expected to appear in the stack. See |
53 // SignalAndWaitUntilSignaled() below. The return value should be a program | 75 // SignalAndWaitUntilSignaled() below. The return value should be a program |
54 // counter pointer near the end of the function. | 76 // counter pointer near the end of the function. |
55 using TargetFunction = const void*(*)(WaitableEvent*, WaitableEvent*); | 77 using TargetFunction = const void*(*)(WaitableEvent*, WaitableEvent*, |
| 78 const StackConfiguration*); |
56 | 79 |
57 // A thread to target for profiling, whose stack is guaranteed to contain | 80 // A thread to target for profiling, whose stack is guaranteed to contain |
58 // SignalAndWaitUntilSignaled() when coordinated with the main thread. | 81 // SignalAndWaitUntilSignaled() when coordinated with the main thread. |
59 class TargetThread : public PlatformThread::Delegate { | 82 class TargetThread : public PlatformThread::Delegate { |
60 public: | 83 public: |
61 TargetThread(DynamicStackAllocationConfig allocation_config); | 84 TargetThread(const StackConfiguration& stack_config); |
62 | 85 |
63 // PlatformThread::Delegate: | 86 // PlatformThread::Delegate: |
64 void ThreadMain() override; | 87 void ThreadMain() override; |
65 | 88 |
66 // Waits for the thread to have started and be executing in | 89 // Waits for the thread to have started and be executing in |
67 // SignalAndWaitUntilSignaled(). | 90 // SignalAndWaitUntilSignaled(). |
68 void WaitForThreadStart(); | 91 void WaitForThreadStart(); |
69 | 92 |
70 // Allows the thread to return from SignalAndWaitUntilSignaled() and finish | 93 // Allows the thread to return from SignalAndWaitUntilSignaled() and finish |
71 // execution. | 94 // execution. |
72 void SignalThreadToFinish(); | 95 void SignalThreadToFinish(); |
73 | 96 |
74 // This function is guaranteed to be executing between calls to | 97 // This function is guaranteed to be executing between calls to |
75 // WaitForThreadStart() and SignalThreadToFinish() when invoked with | 98 // WaitForThreadStart() and SignalThreadToFinish() when invoked with |
76 // |thread_started_event_| and |finish_event_|. Returns a program counter | 99 // |thread_started_event_| and |finish_event_|. Returns a program counter |
77 // value near the end of the function. May be invoked with null WaitableEvents | 100 // value near the end of the function. May be invoked with null WaitableEvents |
78 // to just return the program counter. | 101 // to just return the program counter. |
79 // | 102 // |
80 // This function is static so that we can get a straightforward address | 103 // This function is static so that we can get a straightforward address |
81 // for it in one of the tests below, rather than dealing with the complexity | 104 // for it in one of the tests below, rather than dealing with the complexity |
82 // of a member function pointer representation. | 105 // of a member function pointer representation. |
83 static const void* SignalAndWaitUntilSignaled( | 106 static const void* SignalAndWaitUntilSignaled( |
84 WaitableEvent* thread_started_event, | 107 WaitableEvent* thread_started_event, |
85 WaitableEvent* finish_event); | 108 WaitableEvent* finish_event, |
| 109 const StackConfiguration* stack_config); |
86 | 110 |
87 // Works like SignalAndWaitUntilSignaled() but additionally allocates memory | 111 // Calls into SignalAndWaitUntilSignaled() after allocating memory on the |
88 // on the stack with alloca. Note that this must be a separate function from | 112 // stack with alloca. |
89 // SignalAndWaitUntilSignaled because on Windows x64 the compiler sets up | 113 static const void* CallWithAlloca(WaitableEvent* thread_started_event, |
90 // dynamic frame handling whenever alloca appears in a function, even if only | 114 WaitableEvent* finish_event, |
91 // conditionally invoked. | 115 const StackConfiguration* stack_config); |
92 static const void* SignalAndWaitUntilSignaledWithAlloca( | 116 |
| 117 // Calls into SignalAndWaitUntilSignaled() via a function in |
| 118 // base_profiler_test_support_library. |
| 119 static const void* CallThroughOtherLibrary( |
93 WaitableEvent* thread_started_event, | 120 WaitableEvent* thread_started_event, |
94 WaitableEvent* finish_event); | 121 WaitableEvent* finish_event, |
| 122 const StackConfiguration* stack_config); |
95 | 123 |
96 PlatformThreadId id() const { return id_; } | 124 PlatformThreadId id() const { return id_; } |
97 | 125 |
98 private: | 126 private: |
| 127 struct TargetFunctionArgs { |
| 128 WaitableEvent* thread_started_event; |
| 129 WaitableEvent* finish_event; |
| 130 const StackConfiguration* stack_config; |
| 131 }; |
| 132 |
| 133 // Callback function to be provided when calling through the other library. |
| 134 static void OtherLibraryCallback(void *arg); |
| 135 |
99 // Returns the current program counter, or a value very close to it. | 136 // Returns the current program counter, or a value very close to it. |
100 static const void* GetProgramCounter(); | 137 static const void* GetProgramCounter(); |
101 | 138 |
102 WaitableEvent thread_started_event_; | 139 WaitableEvent thread_started_event_; |
103 WaitableEvent finish_event_; | 140 WaitableEvent finish_event_; |
104 PlatformThreadId id_; | 141 PlatformThreadId id_; |
105 const DynamicStackAllocationConfig allocation_config_; | 142 const StackConfiguration stack_config_; |
106 | 143 |
107 DISALLOW_COPY_AND_ASSIGN(TargetThread); | 144 DISALLOW_COPY_AND_ASSIGN(TargetThread); |
108 }; | 145 }; |
109 | 146 |
110 TargetThread::TargetThread(DynamicStackAllocationConfig allocation_config) | 147 TargetThread::TargetThread(const StackConfiguration& stack_config) |
111 : thread_started_event_(false, false), finish_event_(false, false), | 148 : thread_started_event_(false, false), finish_event_(false, false), |
112 id_(0), allocation_config_(allocation_config) {} | 149 id_(0), stack_config_(stack_config) {} |
113 | 150 |
114 void TargetThread::ThreadMain() { | 151 void TargetThread::ThreadMain() { |
115 id_ = PlatformThread::CurrentId(); | 152 id_ = PlatformThread::CurrentId(); |
116 if (allocation_config_ == USE_ALLOCA) { | 153 switch (stack_config_.config) { |
117 SignalAndWaitUntilSignaledWithAlloca(&thread_started_event_, | 154 case StackConfiguration::NORMAL: |
118 &finish_event_); | 155 SignalAndWaitUntilSignaled(&thread_started_event_, &finish_event_, |
119 } else { | 156 &stack_config_); |
120 SignalAndWaitUntilSignaled(&thread_started_event_, &finish_event_); | 157 break; |
| 158 |
| 159 case StackConfiguration::WITH_ALLOCA: |
| 160 CallWithAlloca(&thread_started_event_, &finish_event_, &stack_config_); |
| 161 break; |
| 162 |
| 163 case StackConfiguration::WITH_OTHER_LIBRARY: |
| 164 CallThroughOtherLibrary(&thread_started_event_, &finish_event_, |
| 165 &stack_config_); |
| 166 break; |
121 } | 167 } |
122 } | 168 } |
123 | 169 |
124 void TargetThread::WaitForThreadStart() { | 170 void TargetThread::WaitForThreadStart() { |
125 thread_started_event_.Wait(); | 171 thread_started_event_.Wait(); |
126 } | 172 } |
127 | 173 |
128 void TargetThread::SignalThreadToFinish() { | 174 void TargetThread::SignalThreadToFinish() { |
129 finish_event_.Signal(); | 175 finish_event_.Signal(); |
130 } | 176 } |
131 | 177 |
132 // static | 178 // static |
133 // Disable inlining for this function so that it gets its own stack frame. | 179 // Disable inlining for this function so that it gets its own stack frame. |
134 NOINLINE const void* TargetThread::SignalAndWaitUntilSignaled( | 180 NOINLINE const void* TargetThread::SignalAndWaitUntilSignaled( |
135 WaitableEvent* thread_started_event, | 181 WaitableEvent* thread_started_event, |
136 WaitableEvent* finish_event) { | 182 WaitableEvent* finish_event, |
| 183 const StackConfiguration* stack_config) { |
137 if (thread_started_event && finish_event) { | 184 if (thread_started_event && finish_event) { |
138 thread_started_event->Signal(); | 185 thread_started_event->Signal(); |
139 finish_event->Wait(); | 186 finish_event->Wait(); |
140 } | 187 } |
141 | 188 |
142 // Volatile to prevent a tail call to GetProgramCounter(). | 189 // Volatile to prevent a tail call to GetProgramCounter(). |
143 const void* volatile program_counter = GetProgramCounter(); | 190 const void* volatile program_counter = GetProgramCounter(); |
144 return program_counter; | 191 return program_counter; |
145 } | 192 } |
146 | 193 |
147 // static | 194 // static |
148 // Disable inlining for this function so that it gets its own stack frame. | 195 // Disable inlining for this function so that it gets its own stack frame. |
149 NOINLINE const void* TargetThread::SignalAndWaitUntilSignaledWithAlloca( | 196 NOINLINE const void* TargetThread::CallWithAlloca( |
150 WaitableEvent* thread_started_event, | 197 WaitableEvent* thread_started_event, |
151 WaitableEvent* finish_event) { | 198 WaitableEvent* finish_event, |
| 199 const StackConfiguration* stack_config) { |
152 const size_t alloca_size = 100; | 200 const size_t alloca_size = 100; |
153 // Memset to 0 to generate a clean failure. | 201 // Memset to 0 to generate a clean failure. |
154 std::memset(alloca(alloca_size), 0, alloca_size); | 202 std::memset(alloca(alloca_size), 0, alloca_size); |
155 | 203 |
156 if (thread_started_event && finish_event) { | 204 SignalAndWaitUntilSignaled(thread_started_event, finish_event, stack_config); |
157 thread_started_event->Signal(); | 205 |
158 finish_event->Wait(); | 206 // Volatile to prevent a tail call to GetProgramCounter(). |
| 207 const void* volatile program_counter = GetProgramCounter(); |
| 208 return program_counter; |
| 209 } |
| 210 |
| 211 // static |
| 212 NOINLINE const void* TargetThread::CallThroughOtherLibrary( |
| 213 WaitableEvent* thread_started_event, |
| 214 WaitableEvent* finish_event, |
| 215 const StackConfiguration* stack_config) { |
| 216 if (stack_config) { |
| 217 // A function whose arguments are a function accepting void*, and a void*. |
| 218 using InvokeCallbackFunction = void(*)(void (*)(void*), void*); |
| 219 EXPECT_TRUE(stack_config->library); |
| 220 InvokeCallbackFunction function = reinterpret_cast<InvokeCallbackFunction>( |
| 221 GetFunctionPointerFromNativeLibrary(stack_config->library, |
| 222 "InvokeCallbackFunction")); |
| 223 EXPECT_TRUE(function); |
| 224 |
| 225 TargetFunctionArgs args = { |
| 226 thread_started_event, |
| 227 finish_event, |
| 228 stack_config |
| 229 }; |
| 230 (*function)(&OtherLibraryCallback, &args); |
159 } | 231 } |
160 | 232 |
161 // Volatile to prevent a tail call to GetProgramCounter(). | 233 // Volatile to prevent a tail call to GetProgramCounter(). |
162 const void* volatile program_counter = GetProgramCounter(); | 234 const void* volatile program_counter = GetProgramCounter(); |
163 return program_counter; | 235 return program_counter; |
164 } | 236 } |
165 | 237 |
166 // static | 238 // static |
| 239 void TargetThread::OtherLibraryCallback(void *arg) { |
| 240 const TargetFunctionArgs* args = static_cast<TargetFunctionArgs*>(arg); |
| 241 SignalAndWaitUntilSignaled(args->thread_started_event, args->finish_event, |
| 242 args->stack_config); |
| 243 // Prevent tail call. |
| 244 volatile int i = 0; |
| 245 ALLOW_UNUSED_LOCAL(i); |
| 246 } |
| 247 |
| 248 // static |
167 // Disable inlining for this function so that it gets its own stack frame. | 249 // Disable inlining for this function so that it gets its own stack frame. |
168 NOINLINE const void* TargetThread::GetProgramCounter() { | 250 NOINLINE const void* TargetThread::GetProgramCounter() { |
169 #if defined(OS_WIN) | 251 #if defined(OS_WIN) |
170 return _ReturnAddress(); | 252 return _ReturnAddress(); |
171 #else | 253 #else |
172 return __builtin_return_address(0); | 254 return __builtin_return_address(0); |
173 #endif | 255 #endif |
174 } | 256 } |
175 | 257 |
| 258 // Loads the other library, which defines a function to be called in the |
| 259 // WITH_OTHER_LIBRARY configuration. |
| 260 NativeLibrary LoadOtherLibrary() { |
| 261 // The lambda gymnastics works around the fact that we can't use ASSERT_* |
| 262 // macros in a function returning non-null. |
| 263 const auto load = [](NativeLibrary* library) { |
| 264 FilePath other_library_path; |
| 265 ASSERT_TRUE(PathService::Get(DIR_EXE, &other_library_path)); |
| 266 other_library_path = other_library_path.Append(FilePath::FromUTF16Unsafe( |
| 267 GetNativeLibraryName(ASCIIToUTF16( |
| 268 "base_profiler_test_support_library")))); |
| 269 NativeLibraryLoadError load_error; |
| 270 *library = LoadNativeLibrary(other_library_path, &load_error); |
| 271 ASSERT_TRUE(*library) << "error loading " << other_library_path.value() |
| 272 << ": " << load_error.ToString(); |
| 273 }; |
| 274 |
| 275 NativeLibrary library; |
| 276 load(&library); |
| 277 return library; |
| 278 } |
| 279 |
| 280 // Unloads |library| and returns when it has completed unloading. Unloading a |
| 281 // library is asynchronous on Windows, so simply calling UnloadNativeLibrary() |
| 282 // is insufficient to ensure it's been unloaded. |
| 283 void SynchronousUnloadNativeLibrary(NativeLibrary library) { |
| 284 UnloadNativeLibrary(library); |
| 285 #if defined(OS_WIN) |
| 286 // NativeLibrary is a typedef for HMODULE, which is actually the base address |
| 287 // of the module. |
| 288 uintptr_t module_base_address = reinterpret_cast<uintptr_t>(library); |
| 289 HMODULE module_handle; |
| 290 // Keep trying to get the module handle until the call fails. |
| 291 while (::GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | |
| 292 GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, |
| 293 reinterpret_cast<LPCTSTR>(module_base_address), |
| 294 &module_handle) || |
| 295 ::GetLastError() != ERROR_MOD_NOT_FOUND) { |
| 296 PlatformThread::Sleep(TimeDelta::FromMilliseconds(1)); |
| 297 } |
| 298 #else |
| 299 NOTIMPLEMENTED(); |
| 300 #endif |
| 301 } |
176 | 302 |
177 // Called on the profiler thread when complete, to collect profiles. | 303 // Called on the profiler thread when complete, to collect profiles. |
178 void SaveProfiles(CallStackProfiles* profiles, | 304 void SaveProfiles(CallStackProfiles* profiles, |
179 const CallStackProfiles& pending_profiles) { | 305 const CallStackProfiles& pending_profiles) { |
180 *profiles = pending_profiles; | 306 *profiles = pending_profiles; |
181 } | 307 } |
182 | 308 |
183 // Called on the profiler thread when complete. Collects profiles produced by | 309 // Called on the profiler thread when complete. Collects profiles produced by |
184 // the profiler, and signals an event to allow the main thread to know that that | 310 // the profiler, and signals an event to allow the main thread to know that that |
185 // the profiler is done. | 311 // the profiler is done. |
186 void SaveProfilesAndSignalEvent(CallStackProfiles* profiles, | 312 void SaveProfilesAndSignalEvent(CallStackProfiles* profiles, |
187 WaitableEvent* event, | 313 WaitableEvent* event, |
188 const CallStackProfiles& pending_profiles) { | 314 const CallStackProfiles& pending_profiles) { |
189 *profiles = pending_profiles; | 315 *profiles = pending_profiles; |
190 event->Signal(); | 316 event->Signal(); |
191 } | 317 } |
192 | 318 |
193 // Executes the function with the target thread running and executing within | 319 // Executes the function with the target thread running and executing within |
194 // SignalAndWaitUntilSignaled() or SignalAndWaitUntilSignaledWithAlloca(), | 320 // SignalAndWaitUntilSignaled(). Performs all necessary target thread startup |
195 // depending on the value of |allocation_config|. Performs all necessary target | 321 // and shutdown work before and afterward. |
196 // thread startup and shutdown work before and afterward. | |
197 template <class Function> | 322 template <class Function> |
198 void WithTargetThread(Function function, | 323 void WithTargetThread(Function function, |
199 DynamicStackAllocationConfig allocation_config) { | 324 const StackConfiguration& stack_config) { |
200 TargetThread target_thread(allocation_config); | 325 TargetThread target_thread(stack_config); |
201 PlatformThreadHandle target_thread_handle; | 326 PlatformThreadHandle target_thread_handle; |
202 EXPECT_TRUE(PlatformThread::Create(0, &target_thread, &target_thread_handle)); | 327 EXPECT_TRUE(PlatformThread::Create(0, &target_thread, &target_thread_handle)); |
203 | 328 |
204 target_thread.WaitForThreadStart(); | 329 target_thread.WaitForThreadStart(); |
205 | 330 |
206 function(target_thread.id()); | 331 function(target_thread.id()); |
207 | 332 |
208 target_thread.SignalThreadToFinish(); | 333 target_thread.SignalThreadToFinish(); |
209 | 334 |
210 PlatformThread::Join(target_thread_handle); | 335 PlatformThread::Join(target_thread_handle); |
211 } | 336 } |
212 | 337 |
213 template <class Function> | 338 template <class Function> |
214 void WithTargetThread(Function function) { | 339 void WithTargetThread(Function function) { |
215 WithTargetThread(function, NO_ALLOCA); | 340 WithTargetThread(function, StackConfiguration(StackConfiguration::NORMAL)); |
216 } | 341 } |
217 | 342 |
218 // Captures profiles as specified by |params| on the TargetThread, and returns | 343 // Captures profiles as specified by |params| on the TargetThread, and returns |
219 // them in |profiles|. Waits up to |profiler_wait_time| for the profiler to | 344 // them in |profiles|. Waits up to |profiler_wait_time| for the profiler to |
220 // complete. | 345 // complete. |
221 void CaptureProfiles(const SamplingParams& params, TimeDelta profiler_wait_time, | 346 void CaptureProfiles(const SamplingParams& params, TimeDelta profiler_wait_time, |
222 CallStackProfiles* profiles) { | 347 CallStackProfiles* profiles) { |
223 profiles->clear(); | 348 profiles->clear(); |
224 | 349 |
225 WithTargetThread([¶ms, profiles, profiler_wait_time]( | 350 WithTargetThread([¶ms, profiles, profiler_wait_time]( |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
261 // Searches through the frames in |sample|, returning an iterator to the first | 386 // Searches through the frames in |sample|, returning an iterator to the first |
262 // frame that has an instruction pointer within |target_function|. Returns | 387 // frame that has an instruction pointer within |target_function|. Returns |
263 // sample.end() if no such frames are found. | 388 // sample.end() if no such frames are found. |
264 Sample::const_iterator FindFirstFrameWithinFunction( | 389 Sample::const_iterator FindFirstFrameWithinFunction( |
265 const Sample& sample, | 390 const Sample& sample, |
266 TargetFunction target_function) { | 391 TargetFunction target_function) { |
267 uintptr_t function_start = reinterpret_cast<uintptr_t>( | 392 uintptr_t function_start = reinterpret_cast<uintptr_t>( |
268 MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( | 393 MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( |
269 target_function))); | 394 target_function))); |
270 uintptr_t function_end = | 395 uintptr_t function_end = |
271 reinterpret_cast<uintptr_t>(target_function(nullptr, nullptr)); | 396 reinterpret_cast<uintptr_t>(target_function(nullptr, nullptr, nullptr)); |
272 for (auto it = sample.begin(); it != sample.end(); ++it) { | 397 for (auto it = sample.begin(); it != sample.end(); ++it) { |
273 if ((it->instruction_pointer >= function_start) && | 398 if ((it->instruction_pointer >= function_start) && |
274 (it->instruction_pointer <= function_end)) | 399 (it->instruction_pointer <= function_end)) |
275 return it; | 400 return it; |
276 } | 401 } |
277 return sample.end(); | 402 return sample.end(); |
278 } | 403 } |
279 | 404 |
280 // Formats a sample into a string that can be output for test diagnostics. | 405 // Formats a sample into a string that can be output for test diagnostics. |
281 std::string FormatSampleForDiagnosticOutput( | 406 std::string FormatSampleForDiagnosticOutput( |
282 const Sample& sample, | 407 const Sample& sample, |
283 const std::vector<Module>& modules) { | 408 const std::vector<Module>& modules) { |
284 std::string output; | 409 std::string output; |
285 for (const Frame& frame: sample) { | 410 for (const Frame& frame: sample) { |
286 output += StringPrintf( | 411 output += StringPrintf( |
287 "0x%p %s\n", reinterpret_cast<const void*>(frame.instruction_pointer), | 412 "0x%p %s\n", reinterpret_cast<const void*>(frame.instruction_pointer), |
288 modules[frame.module_index].filename.AsUTF8Unsafe().c_str()); | 413 modules[frame.module_index].filename.AsUTF8Unsafe().c_str()); |
289 } | 414 } |
290 return output; | 415 return output; |
291 } | 416 } |
292 | 417 |
293 // Returns a duration that is longer than the test timeout. We would use | 418 // Returns a duration that is longer than the test timeout. We would use |
294 // TimeDelta::Max() but https://crbug.com/465948. | 419 // TimeDelta::Max() but https://crbug.com/465948. |
295 TimeDelta AVeryLongTimeDelta() { return TimeDelta::FromDays(1); } | 420 TimeDelta AVeryLongTimeDelta() { return TimeDelta::FromDays(1); } |
296 | 421 |
| 422 // Tests the scenario where the library is unloaded after copying the stack, but |
| 423 // before walking it. If |wait_until_unloaded| is true, ensures that the |
| 424 // asynchronous library loading has completed before walking the stack. If |
| 425 // false, the unloading may still be occurring during the stack walk. |
| 426 void TestLibraryUnload(bool wait_until_unloaded) { |
| 427 // Test delegate that supports intervening between the copying of the stack |
| 428 // and the walking of the stack. |
| 429 class StackCopiedSignaler : public NativeStackSamplerTestDelegate { |
| 430 public: |
| 431 StackCopiedSignaler(WaitableEvent* stack_copied, |
| 432 WaitableEvent* start_stack_walk, |
| 433 bool wait_to_walk_stack) |
| 434 : stack_copied_(stack_copied), start_stack_walk_(start_stack_walk), |
| 435 wait_to_walk_stack_(wait_to_walk_stack) { |
| 436 } |
| 437 |
| 438 void OnPreStackWalk() override { |
| 439 stack_copied_->Signal(); |
| 440 if (wait_to_walk_stack_) |
| 441 start_stack_walk_->Wait(); |
| 442 } |
| 443 |
| 444 private: |
| 445 WaitableEvent* const stack_copied_; |
| 446 WaitableEvent* const start_stack_walk_; |
| 447 const bool wait_to_walk_stack_; |
| 448 }; |
| 449 |
| 450 SamplingParams params; |
| 451 params.sampling_interval = TimeDelta::FromMilliseconds(0); |
| 452 params.samples_per_burst = 1; |
| 453 |
| 454 NativeLibrary other_library = LoadOtherLibrary(); |
| 455 TargetThread target_thread(StackConfiguration( |
| 456 StackConfiguration::WITH_OTHER_LIBRARY, |
| 457 other_library)); |
| 458 |
| 459 PlatformThreadHandle target_thread_handle; |
| 460 EXPECT_TRUE(PlatformThread::Create(0, &target_thread, &target_thread_handle)); |
| 461 |
| 462 target_thread.WaitForThreadStart(); |
| 463 |
| 464 WaitableEvent sampling_thread_completed(true, false); |
| 465 std::vector<CallStackProfile> profiles; |
| 466 const StackSamplingProfiler::CompletedCallback callback = |
| 467 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles), |
| 468 Unretained(&sampling_thread_completed)); |
| 469 WaitableEvent stack_copied(true, false); |
| 470 WaitableEvent start_stack_walk(true, false); |
| 471 StackCopiedSignaler test_delegate(&stack_copied, &start_stack_walk, |
| 472 wait_until_unloaded); |
| 473 StackSamplingProfiler profiler(target_thread.id(), params, callback, |
| 474 &test_delegate); |
| 475 |
| 476 profiler.Start(); |
| 477 |
| 478 // Wait for the stack to be copied and the target thread to be resumed. |
| 479 stack_copied.Wait(); |
| 480 |
| 481 // Cause the target thread to finish, so that it's no longer executing code in |
| 482 // the library we're about to unload. |
| 483 target_thread.SignalThreadToFinish(); |
| 484 PlatformThread::Join(target_thread_handle); |
| 485 |
| 486 // Unload the library now that it's not being used. |
| 487 if (wait_until_unloaded) |
| 488 SynchronousUnloadNativeLibrary(other_library); |
| 489 else |
| 490 UnloadNativeLibrary(other_library); |
| 491 |
| 492 // Let the stack walk commence after unloading the library, if we're waiting |
| 493 // on that event. |
| 494 start_stack_walk.Signal(); |
| 495 |
| 496 // Wait for the sampling thread to complete and fill out |profiles|. |
| 497 sampling_thread_completed.Wait(); |
| 498 |
| 499 // Look up the sample. |
| 500 ASSERT_EQ(1u, profiles.size()); |
| 501 const CallStackProfile& profile = profiles[0]; |
| 502 ASSERT_EQ(1u, profile.samples.size()); |
| 503 const Sample& sample = profile.samples[0]; |
| 504 |
| 505 // Check that the stack contains a frame for |
| 506 // TargetThread::SignalAndWaitUntilSignaled(). |
| 507 Sample::const_iterator end_frame = FindFirstFrameWithinFunction( |
| 508 sample, |
| 509 &TargetThread::SignalAndWaitUntilSignaled); |
| 510 ASSERT_TRUE(end_frame != sample.end()) |
| 511 << "Function at " |
| 512 << MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( |
| 513 &TargetThread::SignalAndWaitUntilSignaled)) |
| 514 << " was not found in stack:\n" |
| 515 << FormatSampleForDiagnosticOutput(sample, profile.modules); |
| 516 |
| 517 if (wait_until_unloaded) { |
| 518 // The stack should look like this, resulting in two frames between |
| 519 // SignalAndWaitUntilSignaled and the last frame, which should be the one in |
| 520 // the now-unloaded library: |
| 521 // |
| 522 // ... WaitableEvent and system frames ... |
| 523 // TargetThread::SignalAndWaitUntilSignaled |
| 524 // TargetThread::OtherLibraryCallback |
| 525 // InvokeCallbackFunction (in other library, now unloaded) |
| 526 EXPECT_EQ(2, (sample.end() - 1) - end_frame) |
| 527 << "Stack:\n" |
| 528 << FormatSampleForDiagnosticOutput(sample, profile.modules); |
| 529 } else { |
| 530 // We didn't wait for the asynchonous unloading to complete, so the results |
| 531 // are non-deterministic: if the library finished unloading we should have |
| 532 // the same stack as |wait_until_unloaded|, if not we should have the full |
| 533 // stack. The important thing is that we should not crash. |
| 534 |
| 535 if ((sample.end() - 1) - end_frame == 2) { |
| 536 // This is the same case as |wait_until_unloaded|. |
| 537 return; |
| 538 } |
| 539 |
| 540 // Check that the stack contains a frame for |
| 541 // TargetThread::CallThroughOtherLibrary(). |
| 542 Sample::const_iterator other_library_frame = FindFirstFrameWithinFunction( |
| 543 sample, |
| 544 &TargetThread::CallThroughOtherLibrary); |
| 545 ASSERT_TRUE(other_library_frame != sample.end()) |
| 546 << "Function at " |
| 547 << MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( |
| 548 &TargetThread::CallThroughOtherLibrary)) |
| 549 << " was not found in stack:\n" |
| 550 << FormatSampleForDiagnosticOutput(sample, profile.modules); |
| 551 |
| 552 // The stack should look like this, resulting in three frames between |
| 553 // SignalAndWaitUntilSignaled and CallThroughOtherLibrary: |
| 554 // |
| 555 // ... WaitableEvent and system frames ... |
| 556 // TargetThread::SignalAndWaitUntilSignaled |
| 557 // TargetThread::OtherLibraryCallback |
| 558 // InvokeCallbackFunction (in other library) |
| 559 // TargetThread::CallThroughOtherLibrary |
| 560 EXPECT_EQ(3, other_library_frame - end_frame) |
| 561 << "Stack:\n" |
| 562 << FormatSampleForDiagnosticOutput(sample, profile.modules); |
| 563 } |
| 564 } |
| 565 |
297 } // namespace | 566 } // namespace |
298 | 567 |
299 // Checks that the basic expected information is present in a sampled call stack | 568 // Checks that the basic expected information is present in a sampled call stack |
300 // profile. | 569 // profile. |
301 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | 570 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) |
302 #define MAYBE_Basic Basic | 571 #define MAYBE_Basic Basic |
303 #else | 572 #else |
304 #define MAYBE_Basic DISABLED_Basic | 573 #define MAYBE_Basic DISABLED_Basic |
305 #endif | 574 #endif |
306 TEST(StackSamplingProfilerTest, MAYBE_Basic) { | 575 TEST(StackSamplingProfilerTest, MAYBE_Basic) { |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
355 std::vector<CallStackProfile> profiles; | 624 std::vector<CallStackProfile> profiles; |
356 WithTargetThread([¶ms, &profiles]( | 625 WithTargetThread([¶ms, &profiles]( |
357 PlatformThreadId target_thread_id) { | 626 PlatformThreadId target_thread_id) { |
358 WaitableEvent sampling_thread_completed(true, false); | 627 WaitableEvent sampling_thread_completed(true, false); |
359 const StackSamplingProfiler::CompletedCallback callback = | 628 const StackSamplingProfiler::CompletedCallback callback = |
360 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles), | 629 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles), |
361 Unretained(&sampling_thread_completed)); | 630 Unretained(&sampling_thread_completed)); |
362 StackSamplingProfiler profiler(target_thread_id, params, callback); | 631 StackSamplingProfiler profiler(target_thread_id, params, callback); |
363 profiler.Start(); | 632 profiler.Start(); |
364 sampling_thread_completed.Wait(); | 633 sampling_thread_completed.Wait(); |
365 }, USE_ALLOCA); | 634 }, StackConfiguration(StackConfiguration::WITH_ALLOCA)); |
366 | 635 |
367 // Look up the sample. | 636 // Look up the sample. |
368 ASSERT_EQ(1u, profiles.size()); | 637 ASSERT_EQ(1u, profiles.size()); |
369 const CallStackProfile& profile = profiles[0]; | 638 const CallStackProfile& profile = profiles[0]; |
370 ASSERT_EQ(1u, profile.samples.size()); | 639 ASSERT_EQ(1u, profile.samples.size()); |
371 const Sample& sample = profile.samples[0]; | 640 const Sample& sample = profile.samples[0]; |
372 | 641 |
373 // Check that the stack contains a frame for | 642 // Check that the stack contains a frame for |
374 // TargetThread::SignalAndWaitUntilSignaledWithAlloca(). | 643 // TargetThread::SignalAndWaitUntilSignaled(). |
375 Sample::const_iterator loc = FindFirstFrameWithinFunction( | 644 Sample::const_iterator end_frame = FindFirstFrameWithinFunction( |
376 sample, | 645 sample, |
377 &TargetThread::SignalAndWaitUntilSignaledWithAlloca); | 646 &TargetThread::SignalAndWaitUntilSignaled); |
378 ASSERT_TRUE(loc != sample.end()) | 647 ASSERT_TRUE(end_frame != sample.end()) |
379 << "Function at " | 648 << "Function at " |
380 << MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( | 649 << MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( |
381 &TargetThread::SignalAndWaitUntilSignaledWithAlloca)) | 650 &TargetThread::SignalAndWaitUntilSignaled)) |
382 << " was not found in stack:\n" | 651 << " was not found in stack:\n" |
383 << FormatSampleForDiagnosticOutput(sample, profile.modules); | 652 << FormatSampleForDiagnosticOutput(sample, profile.modules); |
| 653 |
| 654 // Check that the stack contains a frame for TargetThread::CallWithAlloca(). |
| 655 Sample::const_iterator alloca_frame = FindFirstFrameWithinFunction( |
| 656 sample, |
| 657 &TargetThread::CallWithAlloca); |
| 658 ASSERT_TRUE(alloca_frame != sample.end()) |
| 659 << "Function at " |
| 660 << MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( |
| 661 &TargetThread::CallWithAlloca)) |
| 662 << " was not found in stack:\n" |
| 663 << FormatSampleForDiagnosticOutput(sample, profile.modules); |
| 664 |
| 665 // These frames should be adjacent on the stack. |
| 666 EXPECT_EQ(1, alloca_frame - end_frame) |
| 667 << "Stack:\n" |
| 668 << FormatSampleForDiagnosticOutput(sample, profile.modules); |
384 } | 669 } |
385 | 670 |
386 // Checks that the fire-and-forget interface works. | 671 // Checks that the fire-and-forget interface works. |
387 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | 672 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) |
388 #define MAYBE_StartAndRunAsync StartAndRunAsync | 673 #define MAYBE_StartAndRunAsync StartAndRunAsync |
389 #else | 674 #else |
390 #define MAYBE_StartAndRunAsync DISABLED_StartAndRunAsync | 675 #define MAYBE_StartAndRunAsync DISABLED_StartAndRunAsync |
391 #endif | 676 #endif |
392 TEST(StackSamplingProfilerTest, MAYBE_StartAndRunAsync) { | 677 TEST(StackSamplingProfilerTest, MAYBE_StartAndRunAsync) { |
393 // StartAndRunAsync requires the caller to have a message loop. | 678 // StartAndRunAsync requires the caller to have a message loop. |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
576 EXPECT_FALSE(sampling_completed[other_profiler]->TimedWait( | 861 EXPECT_FALSE(sampling_completed[other_profiler]->TimedWait( |
577 TimeDelta::FromMilliseconds(25))); | 862 TimeDelta::FromMilliseconds(25))); |
578 | 863 |
579 // Start the other profiler again and it should run. | 864 // Start the other profiler again and it should run. |
580 profiler[other_profiler]->Start(); | 865 profiler[other_profiler]->Start(); |
581 sampling_completed[other_profiler]->Wait(); | 866 sampling_completed[other_profiler]->Wait(); |
582 EXPECT_EQ(1u, profiles[other_profiler].size()); | 867 EXPECT_EQ(1u, profiles[other_profiler].size()); |
583 }); | 868 }); |
584 } | 869 } |
585 | 870 |
| 871 // Checks that a stack that runs through another library produces a stack with |
| 872 // the expected functions. |
| 873 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) |
| 874 #define MAYBE_OtherLibrary OtherLibrary |
| 875 #else |
| 876 #define MAYBE_OtherLibrary DISABLED_OtherLibrary |
| 877 #endif |
| 878 TEST(StackSamplingProfilerTest, MAYBE_OtherLibrary) { |
| 879 SamplingParams params; |
| 880 params.sampling_interval = TimeDelta::FromMilliseconds(0); |
| 881 params.samples_per_burst = 1; |
| 882 |
| 883 std::vector<CallStackProfile> profiles; |
| 884 { |
| 885 ScopedNativeLibrary other_library(LoadOtherLibrary()); |
| 886 WithTargetThread([¶ms, &profiles]( |
| 887 PlatformThreadId target_thread_id) { |
| 888 WaitableEvent sampling_thread_completed(true, false); |
| 889 const StackSamplingProfiler::CompletedCallback callback = |
| 890 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles), |
| 891 Unretained(&sampling_thread_completed)); |
| 892 StackSamplingProfiler profiler(target_thread_id, params, callback); |
| 893 profiler.Start(); |
| 894 sampling_thread_completed.Wait(); |
| 895 }, StackConfiguration(StackConfiguration::WITH_OTHER_LIBRARY, |
| 896 other_library.get())); |
| 897 } |
| 898 |
| 899 // Look up the sample. |
| 900 ASSERT_EQ(1u, profiles.size()); |
| 901 const CallStackProfile& profile = profiles[0]; |
| 902 ASSERT_EQ(1u, profile.samples.size()); |
| 903 const Sample& sample = profile.samples[0]; |
| 904 |
| 905 // Check that the stack contains a frame for |
| 906 // TargetThread::CallThroughOtherLibrary(). |
| 907 Sample::const_iterator other_library_frame = FindFirstFrameWithinFunction( |
| 908 sample, |
| 909 &TargetThread::CallThroughOtherLibrary); |
| 910 ASSERT_TRUE(other_library_frame != sample.end()) |
| 911 << "Function at " |
| 912 << MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( |
| 913 &TargetThread::CallThroughOtherLibrary)) |
| 914 << " was not found in stack:\n" |
| 915 << FormatSampleForDiagnosticOutput(sample, profile.modules); |
| 916 |
| 917 // Check that the stack contains a frame for |
| 918 // TargetThread::SignalAndWaitUntilSignaled(). |
| 919 Sample::const_iterator end_frame = FindFirstFrameWithinFunction( |
| 920 sample, |
| 921 &TargetThread::SignalAndWaitUntilSignaled); |
| 922 ASSERT_TRUE(end_frame != sample.end()) |
| 923 << "Function at " |
| 924 << MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( |
| 925 &TargetThread::SignalAndWaitUntilSignaled)) |
| 926 << " was not found in stack:\n" |
| 927 << FormatSampleForDiagnosticOutput(sample, profile.modules); |
| 928 |
| 929 // The stack should look like this, resulting in three frames between |
| 930 // SignalAndWaitUntilSignaled and CallThroughOtherLibrary: |
| 931 // |
| 932 // ... WaitableEvent and system frames ... |
| 933 // TargetThread::SignalAndWaitUntilSignaled |
| 934 // TargetThread::OtherLibraryCallback |
| 935 // InvokeCallbackFunction (in other library) |
| 936 // TargetThread::CallThroughOtherLibrary |
| 937 EXPECT_EQ(3, other_library_frame - end_frame) |
| 938 << "Stack:\n" << FormatSampleForDiagnosticOutput(sample, profile.modules); |
| 939 } |
| 940 |
| 941 // Checks that a stack that runs through a library that is unloading produces a |
| 942 // stack, and doesn't crash. |
| 943 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) |
| 944 #define MAYBE_UnloadingLibrary UnloadingLibrary |
| 945 #else |
| 946 #define MAYBE_UnloadingLibrary DISABLED_UnloadingLibrary |
| 947 #endif |
| 948 TEST(StackSamplingProfilerTest, MAYBE_UnloadingLibrary) { |
| 949 TestLibraryUnload(false); |
| 950 } |
| 951 |
| 952 // Checks that a stack that runs through a library that has been unloaded |
| 953 // produces a stack, and doesn't crash. |
| 954 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) |
| 955 #define MAYBE_UnloadedLibrary UnloadedLibrary |
| 956 #else |
| 957 #define MAYBE_UnloadedLibrary DISABLED_UnloadedLibrary |
| 958 #endif |
| 959 TEST(StackSamplingProfilerTest, MAYBE_UnloadedLibrary) { |
| 960 TestLibraryUnload(true); |
| 961 } |
| 962 |
586 } // namespace base | 963 } // namespace base |
OLD | NEW |