OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <cstdlib> | 5 #include <cstdlib> |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/compiler_specific.h" | 8 #include "base/compiler_specific.h" |
9 #include "base/memory/scoped_vector.h" | 9 #include "base/memory/scoped_vector.h" |
10 #include "base/message_loop/message_loop.h" | 10 #include "base/message_loop/message_loop.h" |
11 #include "base/native_library.h" | |
11 #include "base/path_service.h" | 12 #include "base/path_service.h" |
13 #include "base/profiler/native_stack_sampler.h" | |
12 #include "base/profiler/stack_sampling_profiler.h" | 14 #include "base/profiler/stack_sampling_profiler.h" |
13 #include "base/run_loop.h" | 15 #include "base/run_loop.h" |
16 #include "base/scoped_native_library.h" | |
14 #include "base/strings/stringprintf.h" | 17 #include "base/strings/stringprintf.h" |
18 #include "base/strings/utf_string_conversions.h" | |
15 #include "base/synchronization/waitable_event.h" | 19 #include "base/synchronization/waitable_event.h" |
16 #include "base/threading/platform_thread.h" | 20 #include "base/threading/platform_thread.h" |
17 #include "base/time/time.h" | 21 #include "base/time/time.h" |
18 #include "build/build_config.h" | 22 #include "build/build_config.h" |
19 #include "testing/gtest/include/gtest/gtest.h" | 23 #include "testing/gtest/include/gtest/gtest.h" |
20 | 24 |
21 #if defined(OS_WIN) | 25 #if defined(OS_WIN) |
22 #include <intrin.h> | 26 #include <intrin.h> |
23 #include <malloc.h> | 27 #include <malloc.h> |
28 #include <windows.h> | |
24 #else | 29 #else |
25 #include <alloca.h> | 30 #include <alloca.h> |
26 #endif | 31 #endif |
27 | 32 |
28 // STACK_SAMPLING_PROFILER_SUPPORTED is used to conditionally enable the tests | 33 // STACK_SAMPLING_PROFILER_SUPPORTED is used to conditionally enable the tests |
29 // below for supported platforms (currently Win x64). | 34 // below for supported platforms (currently Win x64). |
30 #if defined(_WIN64) | 35 #if defined(_WIN64) |
31 #define STACK_SAMPLING_PROFILER_SUPPORTED 1 | 36 #define STACK_SAMPLING_PROFILER_SUPPORTED 1 |
32 #endif | 37 #endif |
33 | 38 |
34 #if defined(OS_WIN) | 39 #if defined(OS_WIN) |
35 #pragma intrinsic(_ReturnAddress) | 40 #pragma intrinsic(_ReturnAddress) |
36 #endif | 41 #endif |
37 | 42 |
38 namespace base { | 43 namespace base { |
39 | 44 |
40 using SamplingParams = StackSamplingProfiler::SamplingParams; | 45 using SamplingParams = StackSamplingProfiler::SamplingParams; |
41 using Frame = StackSamplingProfiler::Frame; | 46 using Frame = StackSamplingProfiler::Frame; |
42 using Module = StackSamplingProfiler::Module; | 47 using Module = StackSamplingProfiler::Module; |
43 using Sample = StackSamplingProfiler::Sample; | 48 using Sample = StackSamplingProfiler::Sample; |
44 using CallStackProfile = StackSamplingProfiler::CallStackProfile; | 49 using CallStackProfile = StackSamplingProfiler::CallStackProfile; |
45 using CallStackProfiles = StackSamplingProfiler::CallStackProfiles; | 50 using CallStackProfiles = StackSamplingProfiler::CallStackProfiles; |
46 | 51 |
47 namespace { | 52 namespace { |
48 | 53 |
49 // Configuration for whether to allocate dynamic stack memory. | 54 // Configuration for the frames that appear on the stack. |
50 enum DynamicStackAllocationConfig { USE_ALLOCA, NO_ALLOCA }; | 55 struct StackConfiguration { |
56 enum Config { NORMAL, WITH_ALLOCA, WITH_OTHER_LIBRARY }; | |
57 | |
58 explicit StackConfiguration(Config config) | |
59 : StackConfiguration(config, nullptr) { | |
60 EXPECT_NE(config, WITH_OTHER_LIBRARY); | |
61 } | |
62 | |
63 StackConfiguration(Config config, NativeLibrary library) | |
64 : config(config), library(library) { | |
65 EXPECT_TRUE(config != WITH_OTHER_LIBRARY || library); | |
66 } | |
67 | |
68 Config config; | |
69 | |
70 // Only used if config == WITH_OTHER_LIBRARY. | |
71 NativeLibrary library; | |
72 }; | |
51 | 73 |
52 // Signature for a target function that is expected to appear in the stack. See | 74 // Signature for a target function that is expected to appear in the stack. See |
53 // SignalAndWaitUntilSignaled() below. The return value should be a program | 75 // SignalAndWaitUntilSignaled() below. The return value should be a program |
54 // counter pointer near the end of the function. | 76 // counter pointer near the end of the function. |
55 using TargetFunction = const void*(*)(WaitableEvent*, WaitableEvent*); | 77 using TargetFunction = const void*(*)(WaitableEvent*, WaitableEvent*, |
78 const StackConfiguration*); | |
56 | 79 |
57 // A thread to target for profiling, whose stack is guaranteed to contain | 80 // A thread to target for profiling, whose stack is guaranteed to contain |
58 // SignalAndWaitUntilSignaled() when coordinated with the main thread. | 81 // SignalAndWaitUntilSignaled() when coordinated with the main thread. |
59 class TargetThread : public PlatformThread::Delegate { | 82 class TargetThread : public PlatformThread::Delegate { |
60 public: | 83 public: |
61 TargetThread(DynamicStackAllocationConfig allocation_config); | 84 TargetThread(const StackConfiguration& stack_config); |
62 | 85 |
63 // PlatformThread::Delegate: | 86 // PlatformThread::Delegate: |
64 void ThreadMain() override; | 87 void ThreadMain() override; |
65 | 88 |
66 // Waits for the thread to have started and be executing in | 89 // Waits for the thread to have started and be executing in |
67 // SignalAndWaitUntilSignaled(). | 90 // SignalAndWaitUntilSignaled(). |
68 void WaitForThreadStart(); | 91 void WaitForThreadStart(); |
69 | 92 |
70 // Allows the thread to return from SignalAndWaitUntilSignaled() and finish | 93 // Allows the thread to return from SignalAndWaitUntilSignaled() and finish |
71 // execution. | 94 // execution. |
72 void SignalThreadToFinish(); | 95 void SignalThreadToFinish(); |
73 | 96 |
74 // This function is guaranteed to be executing between calls to | 97 // This function is guaranteed to be executing between calls to |
75 // WaitForThreadStart() and SignalThreadToFinish() when invoked with | 98 // WaitForThreadStart() and SignalThreadToFinish() when invoked with |
76 // |thread_started_event_| and |finish_event_|. Returns a program counter | 99 // |thread_started_event_| and |finish_event_|. Returns a program counter |
77 // value near the end of the function. May be invoked with null WaitableEvents | 100 // value near the end of the function. May be invoked with null WaitableEvents |
78 // to just return the program counter. | 101 // to just return the program counter. |
79 // | 102 // |
80 // This function is static so that we can get a straightforward address | 103 // This function is static so that we can get a straightforward address |
81 // for it in one of the tests below, rather than dealing with the complexity | 104 // for it in one of the tests below, rather than dealing with the complexity |
82 // of a member function pointer representation. | 105 // of a member function pointer representation. |
83 static const void* SignalAndWaitUntilSignaled( | 106 static const void* SignalAndWaitUntilSignaled( |
84 WaitableEvent* thread_started_event, | 107 WaitableEvent* thread_started_event, |
85 WaitableEvent* finish_event); | 108 WaitableEvent* finish_event, |
109 const StackConfiguration* stack_config); | |
86 | 110 |
87 // Works like SignalAndWaitUntilSignaled() but additionally allocates memory | 111 // Calls into SignalAndWaitUntilSignaled() after allocating memory on the |
88 // on the stack with alloca. Note that this must be a separate function from | 112 // stack with alloca. |
89 // SignalAndWaitUntilSignaled because on Windows x64 the compiler sets up | 113 static const void* CallWithAlloca(WaitableEvent* thread_started_event, |
90 // dynamic frame handling whenever alloca appears in a function, even if only | 114 WaitableEvent* finish_event, |
91 // conditionally invoked. | 115 const StackConfiguration* stack_config); |
92 static const void* SignalAndWaitUntilSignaledWithAlloca( | 116 |
117 // Calls into SignalAndWaitUntilSignaled() via a function in | |
118 // base_profiler_test_support_library. | |
119 static const void* CallThroughOtherLibrary( | |
93 WaitableEvent* thread_started_event, | 120 WaitableEvent* thread_started_event, |
94 WaitableEvent* finish_event); | 121 WaitableEvent* finish_event, |
122 const StackConfiguration* stack_config); | |
95 | 123 |
96 PlatformThreadId id() const { return id_; } | 124 PlatformThreadId id() const { return id_; } |
97 | 125 |
98 private: | 126 private: |
127 struct TargetFunctionArgs { | |
128 WaitableEvent* thread_started_event; | |
129 WaitableEvent* finish_event; | |
130 const StackConfiguration* stack_config; | |
131 }; | |
132 | |
133 // Callback function to be provided when calling through the other library. | |
134 static void OtherLibraryCallback(void *arg); | |
135 | |
99 // Returns the current program counter, or a value very close to it. | 136 // Returns the current program counter, or a value very close to it. |
100 static const void* GetProgramCounter(); | 137 static const void* GetProgramCounter(); |
101 | 138 |
102 WaitableEvent thread_started_event_; | 139 WaitableEvent thread_started_event_; |
103 WaitableEvent finish_event_; | 140 WaitableEvent finish_event_; |
104 PlatformThreadId id_; | 141 PlatformThreadId id_; |
105 const DynamicStackAllocationConfig allocation_config_; | 142 const StackConfiguration stack_config_; |
106 | 143 |
107 DISALLOW_COPY_AND_ASSIGN(TargetThread); | 144 DISALLOW_COPY_AND_ASSIGN(TargetThread); |
108 }; | 145 }; |
109 | 146 |
110 TargetThread::TargetThread(DynamicStackAllocationConfig allocation_config) | 147 TargetThread::TargetThread(const StackConfiguration& stack_config) |
111 : thread_started_event_(false, false), finish_event_(false, false), | 148 : thread_started_event_(false, false), finish_event_(false, false), |
112 id_(0), allocation_config_(allocation_config) {} | 149 id_(0), stack_config_(stack_config) {} |
113 | 150 |
114 void TargetThread::ThreadMain() { | 151 void TargetThread::ThreadMain() { |
115 id_ = PlatformThread::CurrentId(); | 152 id_ = PlatformThread::CurrentId(); |
116 if (allocation_config_ == USE_ALLOCA) { | 153 switch (stack_config_.config) { |
117 SignalAndWaitUntilSignaledWithAlloca(&thread_started_event_, | 154 case StackConfiguration::NORMAL: |
118 &finish_event_); | 155 SignalAndWaitUntilSignaled(&thread_started_event_, &finish_event_, |
119 } else { | 156 &stack_config_); |
120 SignalAndWaitUntilSignaled(&thread_started_event_, &finish_event_); | 157 break; |
158 | |
159 case StackConfiguration::WITH_ALLOCA: | |
160 CallWithAlloca(&thread_started_event_, &finish_event_, &stack_config_); | |
161 break; | |
162 | |
163 case StackConfiguration::WITH_OTHER_LIBRARY: | |
164 CallThroughOtherLibrary(&thread_started_event_, &finish_event_, | |
165 &stack_config_); | |
166 break; | |
121 } | 167 } |
122 } | 168 } |
123 | 169 |
124 void TargetThread::WaitForThreadStart() { | 170 void TargetThread::WaitForThreadStart() { |
125 thread_started_event_.Wait(); | 171 thread_started_event_.Wait(); |
126 } | 172 } |
127 | 173 |
128 void TargetThread::SignalThreadToFinish() { | 174 void TargetThread::SignalThreadToFinish() { |
129 finish_event_.Signal(); | 175 finish_event_.Signal(); |
130 } | 176 } |
131 | 177 |
132 // static | 178 // static |
133 // Disable inlining for this function so that it gets its own stack frame. | 179 // Disable inlining for this function so that it gets its own stack frame. |
134 NOINLINE const void* TargetThread::SignalAndWaitUntilSignaled( | 180 NOINLINE const void* TargetThread::SignalAndWaitUntilSignaled( |
135 WaitableEvent* thread_started_event, | 181 WaitableEvent* thread_started_event, |
136 WaitableEvent* finish_event) { | 182 WaitableEvent* finish_event, |
183 const StackConfiguration* stack_config) { | |
137 if (thread_started_event && finish_event) { | 184 if (thread_started_event && finish_event) { |
138 thread_started_event->Signal(); | 185 thread_started_event->Signal(); |
139 finish_event->Wait(); | 186 finish_event->Wait(); |
140 } | 187 } |
141 | 188 |
142 // Volatile to prevent a tail call to GetProgramCounter(). | 189 // Volatile to prevent a tail call to GetProgramCounter(). |
143 const void* volatile program_counter = GetProgramCounter(); | 190 const void* volatile program_counter = GetProgramCounter(); |
144 return program_counter; | 191 return program_counter; |
145 } | 192 } |
146 | 193 |
147 // static | 194 // static |
148 // Disable inlining for this function so that it gets its own stack frame. | 195 // Disable inlining for this function so that it gets its own stack frame. |
149 NOINLINE const void* TargetThread::SignalAndWaitUntilSignaledWithAlloca( | 196 NOINLINE const void* TargetThread::CallWithAlloca( |
150 WaitableEvent* thread_started_event, | 197 WaitableEvent* thread_started_event, |
151 WaitableEvent* finish_event) { | 198 WaitableEvent* finish_event, |
199 const StackConfiguration* stack_config) { | |
152 const size_t alloca_size = 100; | 200 const size_t alloca_size = 100; |
153 // Memset to 0 to generate a clean failure. | 201 // Memset to 0 to generate a clean failure. |
154 std::memset(alloca(alloca_size), 0, alloca_size); | 202 std::memset(alloca(alloca_size), 0, alloca_size); |
155 | 203 |
156 if (thread_started_event && finish_event) { | 204 SignalAndWaitUntilSignaled(thread_started_event, finish_event, stack_config); |
157 thread_started_event->Signal(); | 205 |
158 finish_event->Wait(); | 206 // Volatile to prevent a tail call to GetProgramCounter(). |
207 const void* volatile program_counter = GetProgramCounter(); | |
208 return program_counter; | |
209 } | |
210 | |
211 // static | |
212 NOINLINE const void* TargetThread::CallThroughOtherLibrary( | |
213 WaitableEvent* thread_started_event, | |
214 WaitableEvent* finish_event, | |
215 const StackConfiguration* stack_config) { | |
216 if (stack_config) { | |
217 // A function whose arguments are a function accepting void*, and a void*. | |
218 using InvokeCallbackFunction = void(*)(void (*)(void*), void*); | |
219 EXPECT_TRUE(stack_config->library); | |
220 InvokeCallbackFunction function = reinterpret_cast<InvokeCallbackFunction>( | |
221 GetFunctionPointerFromNativeLibrary(stack_config->library, | |
222 "InvokeCallbackFunction")); | |
223 EXPECT_TRUE(function); | |
224 | |
225 TargetFunctionArgs args = { | |
226 thread_started_event, | |
227 finish_event, | |
228 stack_config | |
229 }; | |
230 (*function)(&OtherLibraryCallback, &args); | |
159 } | 231 } |
160 | 232 |
161 // Volatile to prevent a tail call to GetProgramCounter(). | 233 // Volatile to prevent a tail call to GetProgramCounter(). |
162 const void* volatile program_counter = GetProgramCounter(); | 234 const void* volatile program_counter = GetProgramCounter(); |
163 return program_counter; | 235 return program_counter; |
164 } | 236 } |
165 | 237 |
166 // static | 238 // static |
239 void TargetThread::OtherLibraryCallback(void *arg) { | |
240 const TargetFunctionArgs* args = static_cast<TargetFunctionArgs*>(arg); | |
241 SignalAndWaitUntilSignaled(args->thread_started_event, args->finish_event, | |
242 args->stack_config); | |
243 // Prevent tail call. | |
244 volatile int i = 0; | |
245 i = 1; | |
246 } | |
247 | |
248 // static | |
167 // Disable inlining for this function so that it gets its own stack frame. | 249 // Disable inlining for this function so that it gets its own stack frame. |
168 NOINLINE const void* TargetThread::GetProgramCounter() { | 250 NOINLINE const void* TargetThread::GetProgramCounter() { |
169 #if defined(OS_WIN) | 251 #if defined(OS_WIN) |
170 return _ReturnAddress(); | 252 return _ReturnAddress(); |
171 #else | 253 #else |
172 return __builtin_return_address(0); | 254 return __builtin_return_address(0); |
173 #endif | 255 #endif |
174 } | 256 } |
175 | 257 |
258 // Loads the other library, which defines a function to be called in the | |
259 // WITH_OTHER_LIBRARY configuration. | |
260 NativeLibrary LoadOtherLibrary() { | |
261 // The lambda gymnastics works around the fact that we can't use ASSERT_* | |
262 // macros in a function returning non-null. | |
263 const auto load = [](NativeLibrary* library) { | |
264 FilePath other_library_path; | |
265 ASSERT_TRUE(PathService::Get(DIR_EXE, &other_library_path)); | |
266 other_library_path = other_library_path.Append(GetNativeLibraryName( | |
267 ASCIIToUTF16("base_profiler_test_support_library"))); | |
268 NativeLibraryLoadError load_error; | |
269 *library = LoadNativeLibrary(other_library_path, &load_error); | |
270 ASSERT_TRUE(*library) << "error loading " << other_library_path.value() | |
271 << ": " << load_error.ToString(); | |
272 }; | |
273 | |
274 NativeLibrary library; | |
275 load(&library); | |
276 return library; | |
277 } | |
278 | |
279 // Unloads |library| and returns when it has completed unloading. Unloading a | |
280 // library is asynchronous on Windows, so simply calling UnloadNativeLibrary() | |
281 // is insufficient to ensure it's been unloaded. | |
282 void SynchronousUnloadNativeLibrary(NativeLibrary library) { | |
283 UnloadNativeLibrary(library); | |
284 #if defined(OS_WIN) | |
285 // NativeLibrary is a typedef for HMODULE, which is actually the base address | |
286 // of the module. | |
287 uintptr_t module_base_address = reinterpret_cast<uintptr_t>(library); | |
288 HMODULE module_handle; | |
289 // Keep trying to get the module handle until the call fails. | |
290 while (::GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | | |
291 GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, | |
292 reinterpret_cast<LPCTSTR>(module_base_address), | |
293 &module_handle) || | |
294 ::GetLastError() != ERROR_MOD_NOT_FOUND) { | |
295 PlatformThread::YieldCurrentThread(); | |
brucedawson
2015/10/30 00:16:00
This maps to Sleep(0) which, even in test code, ma
Mike Wittman
2015/10/30 17:08:07
Done.
| |
296 } | |
297 #else | |
298 NOTIMPLEMENTED() | |
299 #endif | |
300 } | |
176 | 301 |
177 // Called on the profiler thread when complete, to collect profiles. | 302 // Called on the profiler thread when complete, to collect profiles. |
178 void SaveProfiles(CallStackProfiles* profiles, | 303 void SaveProfiles(CallStackProfiles* profiles, |
179 const CallStackProfiles& pending_profiles) { | 304 const CallStackProfiles& pending_profiles) { |
180 *profiles = pending_profiles; | 305 *profiles = pending_profiles; |
181 } | 306 } |
182 | 307 |
183 // Called on the profiler thread when complete. Collects profiles produced by | 308 // Called on the profiler thread when complete. Collects profiles produced by |
184 // the profiler, and signals an event to allow the main thread to know that that | 309 // the profiler, and signals an event to allow the main thread to know that that |
185 // the profiler is done. | 310 // the profiler is done. |
186 void SaveProfilesAndSignalEvent(CallStackProfiles* profiles, | 311 void SaveProfilesAndSignalEvent(CallStackProfiles* profiles, |
187 WaitableEvent* event, | 312 WaitableEvent* event, |
188 const CallStackProfiles& pending_profiles) { | 313 const CallStackProfiles& pending_profiles) { |
189 *profiles = pending_profiles; | 314 *profiles = pending_profiles; |
190 event->Signal(); | 315 event->Signal(); |
191 } | 316 } |
192 | 317 |
193 // Executes the function with the target thread running and executing within | 318 // Executes the function with the target thread running and executing within |
194 // SignalAndWaitUntilSignaled() or SignalAndWaitUntilSignaledWithAlloca(), | 319 // SignalAndWaitUntilSignaled(). Performs all necessary target thread startup |
195 // depending on the value of |allocation_config|. Performs all necessary target | 320 // and shutdown work before and afterward. |
196 // thread startup and shutdown work before and afterward. | |
197 template <class Function> | 321 template <class Function> |
198 void WithTargetThread(Function function, | 322 void WithTargetThread(Function function, |
199 DynamicStackAllocationConfig allocation_config) { | 323 const StackConfiguration& stack_config) { |
200 TargetThread target_thread(allocation_config); | 324 TargetThread target_thread(stack_config); |
201 PlatformThreadHandle target_thread_handle; | 325 PlatformThreadHandle target_thread_handle; |
202 EXPECT_TRUE(PlatformThread::Create(0, &target_thread, &target_thread_handle)); | 326 EXPECT_TRUE(PlatformThread::Create(0, &target_thread, &target_thread_handle)); |
203 | 327 |
204 target_thread.WaitForThreadStart(); | 328 target_thread.WaitForThreadStart(); |
205 | 329 |
206 function(target_thread.id()); | 330 function(target_thread.id()); |
207 | 331 |
208 target_thread.SignalThreadToFinish(); | 332 target_thread.SignalThreadToFinish(); |
209 | 333 |
210 PlatformThread::Join(target_thread_handle); | 334 PlatformThread::Join(target_thread_handle); |
211 } | 335 } |
212 | 336 |
213 template <class Function> | 337 template <class Function> |
214 void WithTargetThread(Function function) { | 338 void WithTargetThread(Function function) { |
215 WithTargetThread(function, NO_ALLOCA); | 339 WithTargetThread(function, StackConfiguration(StackConfiguration::NORMAL)); |
216 } | 340 } |
217 | 341 |
218 // Captures profiles as specified by |params| on the TargetThread, and returns | 342 // Captures profiles as specified by |params| on the TargetThread, and returns |
219 // them in |profiles|. Waits up to |profiler_wait_time| for the profiler to | 343 // them in |profiles|. Waits up to |profiler_wait_time| for the profiler to |
220 // complete. | 344 // complete. |
221 void CaptureProfiles(const SamplingParams& params, TimeDelta profiler_wait_time, | 345 void CaptureProfiles(const SamplingParams& params, TimeDelta profiler_wait_time, |
222 CallStackProfiles* profiles) { | 346 CallStackProfiles* profiles) { |
223 profiles->clear(); | 347 profiles->clear(); |
224 | 348 |
225 WithTargetThread([¶ms, profiles, profiler_wait_time]( | 349 WithTargetThread([¶ms, profiles, profiler_wait_time]( |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
261 // Searches through the frames in |sample|, returning an iterator to the first | 385 // Searches through the frames in |sample|, returning an iterator to the first |
262 // frame that has an instruction pointer within |target_function|. Returns | 386 // frame that has an instruction pointer within |target_function|. Returns |
263 // sample.end() if no such frames are found. | 387 // sample.end() if no such frames are found. |
264 Sample::const_iterator FindFirstFrameWithinFunction( | 388 Sample::const_iterator FindFirstFrameWithinFunction( |
265 const Sample& sample, | 389 const Sample& sample, |
266 TargetFunction target_function) { | 390 TargetFunction target_function) { |
267 uintptr_t function_start = reinterpret_cast<uintptr_t>( | 391 uintptr_t function_start = reinterpret_cast<uintptr_t>( |
268 MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( | 392 MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( |
269 target_function))); | 393 target_function))); |
270 uintptr_t function_end = | 394 uintptr_t function_end = |
271 reinterpret_cast<uintptr_t>(target_function(nullptr, nullptr)); | 395 reinterpret_cast<uintptr_t>(target_function(nullptr, nullptr, nullptr)); |
272 for (auto it = sample.begin(); it != sample.end(); ++it) { | 396 for (auto it = sample.begin(); it != sample.end(); ++it) { |
273 if ((it->instruction_pointer >= function_start) && | 397 if ((it->instruction_pointer >= function_start) && |
274 (it->instruction_pointer <= function_end)) | 398 (it->instruction_pointer <= function_end)) |
275 return it; | 399 return it; |
276 } | 400 } |
277 return sample.end(); | 401 return sample.end(); |
278 } | 402 } |
279 | 403 |
280 // Formats a sample into a string that can be output for test diagnostics. | 404 // Formats a sample into a string that can be output for test diagnostics. |
281 std::string FormatSampleForDiagnosticOutput( | 405 std::string FormatSampleForDiagnosticOutput( |
282 const Sample& sample, | 406 const Sample& sample, |
283 const std::vector<Module>& modules) { | 407 const std::vector<Module>& modules) { |
284 std::string output; | 408 std::string output; |
285 for (const Frame& frame: sample) { | 409 for (const Frame& frame: sample) { |
286 output += StringPrintf( | 410 output += StringPrintf( |
287 "0x%p %s\n", reinterpret_cast<const void*>(frame.instruction_pointer), | 411 "0x%p %s\n", reinterpret_cast<const void*>(frame.instruction_pointer), |
288 modules[frame.module_index].filename.AsUTF8Unsafe().c_str()); | 412 modules[frame.module_index].filename.AsUTF8Unsafe().c_str()); |
289 } | 413 } |
290 return output; | 414 return output; |
291 } | 415 } |
292 | 416 |
293 // Returns a duration that is longer than the test timeout. We would use | 417 // Returns a duration that is longer than the test timeout. We would use |
294 // TimeDelta::Max() but https://crbug.com/465948. | 418 // TimeDelta::Max() but https://crbug.com/465948. |
295 TimeDelta AVeryLongTimeDelta() { return TimeDelta::FromDays(1); } | 419 TimeDelta AVeryLongTimeDelta() { return TimeDelta::FromDays(1); } |
296 | 420 |
421 // Tests the scenario where the library is unloaded after copying the stack, but | |
brucedawson
2015/10/30 00:16:00
Have you confirmed that this test reliably crashes
Mike Wittman
2015/10/30 17:08:07
Yes, for |wait_until_unloaded| == true and for |wa
| |
422 // before walking it. If |wait_until_unloaded| is true, ensures that the | |
423 // asynchronous library loading has completed before walking the stack. If | |
424 // false, the unloading may still be occurring during the stack walk. | |
425 void TestLibraryUnload(bool wait_until_unloaded) { | |
426 // Test delegate that supports intervening between the copying of the stack | |
427 // and the walking of the stack. | |
428 class StackCopiedSignaler : public NativeStackSamplerTestDelegate { | |
429 public: | |
430 StackCopiedSignaler(WaitableEvent* stack_copied, | |
431 WaitableEvent* start_stack_walk, | |
432 bool wait_to_walk_stack) | |
433 : stack_copied_(stack_copied), start_stack_walk_(start_stack_walk), | |
434 wait_to_walk_stack_(wait_to_walk_stack) { | |
435 } | |
436 | |
437 void OnPreStackWalk() override { | |
438 stack_copied_->Signal(); | |
439 if (wait_to_walk_stack_) | |
440 start_stack_walk_->Wait(); | |
441 } | |
442 | |
443 private: | |
444 WaitableEvent* const stack_copied_; | |
445 WaitableEvent* const start_stack_walk_; | |
446 const bool wait_to_walk_stack_; | |
447 }; | |
448 | |
449 SamplingParams params; | |
450 params.sampling_interval = TimeDelta::FromMilliseconds(0); | |
451 params.samples_per_burst = 1; | |
452 | |
453 NativeLibrary other_library = LoadOtherLibrary(); | |
454 TargetThread target_thread(StackConfiguration( | |
455 StackConfiguration::WITH_OTHER_LIBRARY, | |
456 other_library)); | |
457 | |
458 PlatformThreadHandle target_thread_handle; | |
459 EXPECT_TRUE(PlatformThread::Create(0, &target_thread, &target_thread_handle)); | |
460 | |
461 target_thread.WaitForThreadStart(); | |
462 | |
463 WaitableEvent sampling_thread_completed(true, false); | |
464 std::vector<CallStackProfile> profiles; | |
465 const StackSamplingProfiler::CompletedCallback callback = | |
466 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles), | |
467 Unretained(&sampling_thread_completed)); | |
468 WaitableEvent stack_copied(true, false); | |
469 WaitableEvent start_stack_walk(true, false); | |
470 StackCopiedSignaler test_delegate(&stack_copied, &start_stack_walk, | |
471 wait_until_unloaded); | |
472 StackSamplingProfiler profiler(target_thread.id(), params, callback, | |
473 &test_delegate); | |
474 | |
475 profiler.Start(); | |
476 | |
477 // Wait for the stack to be copied and the target thread to be resumed. | |
478 stack_copied.Wait(); | |
479 | |
480 // Cause the target thread to finish, so that it's no longer executing code in | |
481 // the library we're about to unload. | |
482 target_thread.SignalThreadToFinish(); | |
483 PlatformThread::Join(target_thread_handle); | |
484 | |
485 // Unload the library now that it's not being used. | |
486 if (wait_until_unloaded) | |
487 SynchronousUnloadNativeLibrary(other_library); | |
488 else | |
489 UnloadNativeLibrary(other_library); | |
490 | |
491 // Let the stack walk commence after unloading the library, if we're waiting | |
492 // on that event. | |
493 start_stack_walk.Signal(); | |
494 | |
495 // Wait for the sampling thread to complete and fill out |profiles|. | |
496 sampling_thread_completed.Wait(); | |
497 | |
498 // Look up the sample. | |
499 ASSERT_EQ(1u, profiles.size()); | |
500 const CallStackProfile& profile = profiles[0]; | |
501 ASSERT_EQ(1u, profile.samples.size()); | |
502 const Sample& sample = profile.samples[0]; | |
503 | |
504 // Check that the stack contains a frame for | |
505 // TargetThread::SignalAndWaitUntilSignaled(). | |
506 Sample::const_iterator end_frame = FindFirstFrameWithinFunction( | |
507 sample, | |
508 &TargetThread::SignalAndWaitUntilSignaled); | |
509 ASSERT_TRUE(end_frame != sample.end()) | |
510 << "Function at " | |
511 << MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( | |
512 &TargetThread::SignalAndWaitUntilSignaled)) | |
513 << " was not found in stack:\n" | |
514 << FormatSampleForDiagnosticOutput(sample, profile.modules); | |
515 | |
516 if (wait_until_unloaded) { | |
517 // The stack should look like this, resulting in two frames between | |
518 // SignalAndWaitUntilSignaled and the last frame, which should be the one in | |
519 // the now-unloaded library: | |
520 // | |
521 // ... WaitableEvent and system frames ... | |
522 // TargetThread::SignalAndWaitUntilSignaled | |
523 // TargetThread::OtherLibraryCallback | |
524 // InvokeCallbackFunction (in other library, now unloaded) | |
525 EXPECT_EQ(2, (sample.end() - 1) - end_frame) | |
526 << "Stack:\n" | |
527 << FormatSampleForDiagnosticOutput(sample, profile.modules); | |
528 } else { | |
529 // We didn't wait for the asynchonous unloading to complete, so the results | |
530 // are non-deterministic: if the library finished unloading we should have | |
531 // the same stack as |wait_until_unloaded|, if not we should have the full | |
532 // stack. The important thing is that we should not crash. | |
533 | |
534 if ((sample.end() - 1) - end_frame == 2) { | |
535 // This is the same case as |wait_until_unloaded|. | |
536 return; | |
537 } | |
538 | |
539 // Check that the stack contains a frame for | |
540 // TargetThread::CallThroughOtherLibrary(). | |
541 Sample::const_iterator other_library_frame = FindFirstFrameWithinFunction( | |
542 sample, | |
543 &TargetThread::CallThroughOtherLibrary); | |
544 ASSERT_TRUE(other_library_frame != sample.end()) | |
545 << "Function at " | |
546 << MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( | |
547 &TargetThread::CallThroughOtherLibrary)) | |
548 << " was not found in stack:\n" | |
549 << FormatSampleForDiagnosticOutput(sample, profile.modules); | |
550 | |
551 // The stack should look like this, resulting in three frames between | |
552 // SignalAndWaitUntilSignaled and CallThroughOtherLibrary: | |
553 // | |
554 // ... WaitableEvent and system frames ... | |
555 // TargetThread::SignalAndWaitUntilSignaled | |
556 // TargetThread::OtherLibraryCallback | |
557 // InvokeCallbackFunction (in other library) | |
558 // TargetThread::CallThroughOtherLibrary | |
559 EXPECT_EQ(3, other_library_frame - end_frame) | |
560 << "Stack:\n" | |
561 << FormatSampleForDiagnosticOutput(sample, profile.modules); | |
562 } | |
563 } | |
564 | |
297 } // namespace | 565 } // namespace |
298 | 566 |
299 // Checks that the basic expected information is present in a sampled call stack | 567 // Checks that the basic expected information is present in a sampled call stack |
300 // profile. | 568 // profile. |
301 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | 569 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) |
302 #define MAYBE_Basic Basic | 570 #define MAYBE_Basic Basic |
303 #else | 571 #else |
304 #define MAYBE_Basic DISABLED_Basic | 572 #define MAYBE_Basic DISABLED_Basic |
305 #endif | 573 #endif |
306 TEST(StackSamplingProfilerTest, MAYBE_Basic) { | 574 TEST(StackSamplingProfilerTest, MAYBE_Basic) { |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
355 std::vector<CallStackProfile> profiles; | 623 std::vector<CallStackProfile> profiles; |
356 WithTargetThread([¶ms, &profiles]( | 624 WithTargetThread([¶ms, &profiles]( |
357 PlatformThreadId target_thread_id) { | 625 PlatformThreadId target_thread_id) { |
358 WaitableEvent sampling_thread_completed(true, false); | 626 WaitableEvent sampling_thread_completed(true, false); |
359 const StackSamplingProfiler::CompletedCallback callback = | 627 const StackSamplingProfiler::CompletedCallback callback = |
360 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles), | 628 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles), |
361 Unretained(&sampling_thread_completed)); | 629 Unretained(&sampling_thread_completed)); |
362 StackSamplingProfiler profiler(target_thread_id, params, callback); | 630 StackSamplingProfiler profiler(target_thread_id, params, callback); |
363 profiler.Start(); | 631 profiler.Start(); |
364 sampling_thread_completed.Wait(); | 632 sampling_thread_completed.Wait(); |
365 }, USE_ALLOCA); | 633 }, StackConfiguration(StackConfiguration::WITH_ALLOCA)); |
366 | 634 |
367 // Look up the sample. | 635 // Look up the sample. |
368 ASSERT_EQ(1u, profiles.size()); | 636 ASSERT_EQ(1u, profiles.size()); |
369 const CallStackProfile& profile = profiles[0]; | 637 const CallStackProfile& profile = profiles[0]; |
370 ASSERT_EQ(1u, profile.samples.size()); | 638 ASSERT_EQ(1u, profile.samples.size()); |
371 const Sample& sample = profile.samples[0]; | 639 const Sample& sample = profile.samples[0]; |
372 | 640 |
373 // Check that the stack contains a frame for | 641 // Check that the stack contains a frame for |
374 // TargetThread::SignalAndWaitUntilSignaledWithAlloca(). | 642 // TargetThread::SignalAndWaitUntilSignaled(). |
375 Sample::const_iterator loc = FindFirstFrameWithinFunction( | 643 Sample::const_iterator end_frame = FindFirstFrameWithinFunction( |
376 sample, | 644 sample, |
377 &TargetThread::SignalAndWaitUntilSignaledWithAlloca); | 645 &TargetThread::SignalAndWaitUntilSignaled); |
378 ASSERT_TRUE(loc != sample.end()) | 646 ASSERT_TRUE(end_frame != sample.end()) |
379 << "Function at " | 647 << "Function at " |
380 << MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( | 648 << MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( |
381 &TargetThread::SignalAndWaitUntilSignaledWithAlloca)) | 649 &TargetThread::SignalAndWaitUntilSignaled)) |
382 << " was not found in stack:\n" | 650 << " was not found in stack:\n" |
383 << FormatSampleForDiagnosticOutput(sample, profile.modules); | 651 << FormatSampleForDiagnosticOutput(sample, profile.modules); |
652 | |
653 // Check that the stack contains a frame for TargetThread::CallWithAlloca(). | |
654 Sample::const_iterator alloca_frame = FindFirstFrameWithinFunction( | |
655 sample, | |
656 &TargetThread::CallWithAlloca); | |
657 ASSERT_TRUE(alloca_frame != sample.end()) | |
658 << "Function at " | |
659 << MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( | |
660 &TargetThread::CallWithAlloca)) | |
661 << " was not found in stack:\n" | |
662 << FormatSampleForDiagnosticOutput(sample, profile.modules); | |
663 | |
664 // These frames should be adjacent on the stack. | |
665 EXPECT_EQ(1, alloca_frame - end_frame) | |
666 << "Stack:\n" | |
667 << FormatSampleForDiagnosticOutput(sample, profile.modules); | |
384 } | 668 } |
385 | 669 |
386 // Checks that the fire-and-forget interface works. | 670 // Checks that the fire-and-forget interface works. |
387 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | 671 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) |
388 #define MAYBE_StartAndRunAsync StartAndRunAsync | 672 #define MAYBE_StartAndRunAsync StartAndRunAsync |
389 #else | 673 #else |
390 #define MAYBE_StartAndRunAsync DISABLED_StartAndRunAsync | 674 #define MAYBE_StartAndRunAsync DISABLED_StartAndRunAsync |
391 #endif | 675 #endif |
392 TEST(StackSamplingProfilerTest, MAYBE_StartAndRunAsync) { | 676 TEST(StackSamplingProfilerTest, MAYBE_StartAndRunAsync) { |
393 // StartAndRunAsync requires the caller to have a message loop. | 677 // StartAndRunAsync requires the caller to have a message loop. |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
576 EXPECT_FALSE(sampling_completed[other_profiler]->TimedWait( | 860 EXPECT_FALSE(sampling_completed[other_profiler]->TimedWait( |
577 TimeDelta::FromMilliseconds(25))); | 861 TimeDelta::FromMilliseconds(25))); |
578 | 862 |
579 // Start the other profiler again and it should run. | 863 // Start the other profiler again and it should run. |
580 profiler[other_profiler]->Start(); | 864 profiler[other_profiler]->Start(); |
581 sampling_completed[other_profiler]->Wait(); | 865 sampling_completed[other_profiler]->Wait(); |
582 EXPECT_EQ(1u, profiles[other_profiler].size()); | 866 EXPECT_EQ(1u, profiles[other_profiler].size()); |
583 }); | 867 }); |
584 } | 868 } |
585 | 869 |
870 // Checks that a stack that runs through another library produces a stack with | |
871 // the expected functions. | |
872 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | |
873 #define MAYBE_OtherLibrary OtherLibrary | |
874 #else | |
875 #define MAYBE_OtherLibrary DISABLED_OtherLibrary | |
876 #endif | |
877 TEST(StackSamplingProfilerTest, MAYBE_OtherLibrary) { | |
878 SamplingParams params; | |
879 params.sampling_interval = TimeDelta::FromMilliseconds(0); | |
880 params.samples_per_burst = 1; | |
881 | |
882 std::vector<CallStackProfile> profiles; | |
883 { | |
884 ScopedNativeLibrary other_library(LoadOtherLibrary()); | |
885 WithTargetThread([¶ms, &profiles]( | |
886 PlatformThreadId target_thread_id) { | |
887 WaitableEvent sampling_thread_completed(true, false); | |
888 const StackSamplingProfiler::CompletedCallback callback = | |
889 Bind(&SaveProfilesAndSignalEvent, Unretained(&profiles), | |
890 Unretained(&sampling_thread_completed)); | |
891 StackSamplingProfiler profiler(target_thread_id, params, callback); | |
892 profiler.Start(); | |
893 sampling_thread_completed.Wait(); | |
894 }, StackConfiguration(StackConfiguration::WITH_OTHER_LIBRARY, | |
895 other_library.get())); | |
896 } | |
897 | |
898 // Look up the sample. | |
899 ASSERT_EQ(1u, profiles.size()); | |
900 const CallStackProfile& profile = profiles[0]; | |
901 ASSERT_EQ(1u, profile.samples.size()); | |
902 const Sample& sample = profile.samples[0]; | |
903 | |
904 // Check that the stack contains a frame for | |
905 // TargetThread::CallThroughOtherLibrary(). | |
906 Sample::const_iterator other_library_frame = FindFirstFrameWithinFunction( | |
907 sample, | |
908 &TargetThread::CallThroughOtherLibrary); | |
909 ASSERT_TRUE(other_library_frame != sample.end()) | |
910 << "Function at " | |
911 << MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( | |
912 &TargetThread::CallThroughOtherLibrary)) | |
913 << " was not found in stack:\n" | |
914 << FormatSampleForDiagnosticOutput(sample, profile.modules); | |
915 | |
916 // Check that the stack contains a frame for | |
917 // TargetThread::SignalAndWaitUntilSignaled(). | |
918 Sample::const_iterator end_frame = FindFirstFrameWithinFunction( | |
919 sample, | |
920 &TargetThread::SignalAndWaitUntilSignaled); | |
921 ASSERT_TRUE(end_frame != sample.end()) | |
922 << "Function at " | |
923 << MaybeFixupFunctionAddressForILT(reinterpret_cast<const void*>( | |
924 &TargetThread::SignalAndWaitUntilSignaled)) | |
925 << " was not found in stack:\n" | |
926 << FormatSampleForDiagnosticOutput(sample, profile.modules); | |
927 | |
928 // The stack should look like this, resulting in three frames between | |
929 // SignalAndWaitUntilSignaled and CallThroughOtherLibrary: | |
930 // | |
931 // ... WaitableEvent and system frames ... | |
932 // TargetThread::SignalAndWaitUntilSignaled | |
933 // TargetThread::OtherLibraryCallback | |
934 // InvokeCallbackFunction (in other library) | |
935 // TargetThread::CallThroughOtherLibrary | |
936 EXPECT_EQ(3, other_library_frame - end_frame) | |
937 << "Stack:\n" << FormatSampleForDiagnosticOutput(sample, profile.modules); | |
938 } | |
939 | |
940 // Checks that a stack that runs through a library that is unloading produces a | |
941 // stack, and doesn't crash. | |
942 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | |
943 #define MAYBE_UnloadingLibrary UnloadingLibrary | |
944 #else | |
945 #define MAYBE_UnloadingLibrary DISABLED_UnloadingLibrary | |
946 #endif | |
947 TEST(StackSamplingProfilerTest, MAYBE_UnloadingLibrary) { | |
948 TestLibraryUnload(false); | |
949 } | |
950 | |
951 // Checks that a stack that runs through a library that has been unloaded | |
952 // produces a stack, and doesn't crash. | |
953 #if defined(STACK_SAMPLING_PROFILER_SUPPORTED) | |
954 #define MAYBE_UnloadedLibrary UnloadedLibrary | |
955 #else | |
956 #define MAYBE_UnloadedLibrary DISABLED_UnloadedLibrary | |
957 #endif | |
958 TEST(StackSamplingProfilerTest, MAYBE_UnloadedLibrary) { | |
959 TestLibraryUnload(true); | |
960 } | |
961 | |
586 } // namespace base | 962 } // namespace base |
OLD | NEW |