Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(309)

Side by Side Diff: chrome/test/perf/rendering/latency_tests.cc

Issue 8883005: Input latency performance test that uses tracing. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: no change - update and merge Created 9 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
(Empty)
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/command_line.h"
6 #include "base/file_util.h"
7 #include "base/memory/scoped_ptr.h"
8 #include "base/path_service.h"
9 #include "base/string_number_conversions.h"
10 #include "base/stringprintf.h"
11 #include "base/test/test_switches.h"
12 #include "base/test/trace_event_analyzer.h"
13 #include "base/threading/platform_thread.h"
14 #include "base/timer.h"
15 #include "base/version.h"
16 #include "chrome/browser/ui/browser.h"
17 #include "chrome/browser/ui/tab_contents/tab_contents_wrapper.h"
18 #include "chrome/common/chrome_paths.h"
19 #include "chrome/common/chrome_switches.h"
20 #include "chrome/test/base/tracing.h"
21 #include "chrome/test/base/ui_test_utils.h"
22 #include "chrome/test/perf/browser_perf_test.h"
23 #include "chrome/test/perf/perf_test.h"
24 #include "content/browser/renderer_host/render_view_host.h"
25 #include "content/browser/tab_contents/tab_contents.h"
26 #include "content/public/common/content_switches.h"
27 #include "third_party/WebKit/Source/WebKit/chromium/public/WebInputEvent.h"
28 #include "testing/gtest/include/gtest/gtest.h"
29
30 // Run with --vmodule=latency_tests=1 to print verbose latency info.
31
32 // How is latency measured?
33 //
34 // The test injects mouse moves many times per frame from the browser via
35 // RenderWidgetHost. Each input has a unique x coordinate. When the javascript
36 // handler receives the input, it stores the coordinate for later use in the
37 // requestAnimationFrame callback. In RAF, the test paints using the x
38 // coordinate as a color (in software, it sets the color of a table; in webgl,
39 // it executes a glClearColor). Trace events emit the color when it is picked up
40 // by either UpdateRect for software or gles2_cmd_decoder/glClear for webgl.
41 //
42 // Each UpdateRect (software) or SwapBuffers (webgl) is considered to be a frame
43 // boundary that will be used to measure latency in number of frames. Starting
44 // from a frame boundary Y, the test first determines what mouse x coordinate
45 // was represented by the color at that frame boundary. Then, the test walks
46 // backward through the trace events to find the input event matching that
47 // x coordinate. Then, the test find the nearest frame boundary X to the input
48 // event (may be before or after). The number of frame boundaries is then
49 // counted between X and Y to determine the input latency.
50 //
51 // By injecting mouse moves many times per frame, we reduce flakiness in the
52 // finding of the nearest frame boundary.
53 //
54 // This test only measures the latency introduced by chrome code -- it does not
55 // measure latency introduced by mouse drivers or the GL driver or the OS window
56 // manager. The actual latency seen by a user is more than what is reported by
57 // this test.
58 //
59 // Current modes:
60 // - Software RAF
61 // - WebGL RAF
62
63 namespace {
64
65 using namespace trace_analyzer;
66
67 enum LatencyTestMode {
68 kWebGL,
69 kSoftware
70 };
71
72 enum LatencyTestFlags {
73 kInputHeavy = 1 << 0,
74 kInputDirty = 1 << 1,
75 kRafHeavy = 1 << 2,
76 kPaintHeavy = 1 << 3
77 };
78
79 const int kWebGLCanvasWidth = 10;
80 const int kNumFrames = 80;
81 const int kInputsPerFrame = 16;
82 // Magic number to identify certain glClear events.
83 const int kClearColorGreen = 137;
84 const int kMouseY = 5;
85
86 // Don't analyze begin frames that may be inaccurate. Latencies can be as high
87 // as 5 frames or so, so skip the first 6 frames to get more accurate results.
88 const int kIgnoreBeginFrames = 6;
89 // Don't analyze end frames that may be inaccurate.
90 const int kIgnoreEndFrames = 4;
91 // Minimum frames to produce an answer.
92 const int kMinimumFramesForAnalysis = 5;
93
94 class LatencyTest : public BrowserPerfTest {
95 public:
96 LatencyTest() :
97 query_instant_(Query(EVENT_PHASE) ==
98 Query::Phase(TRACE_EVENT_PHASE_INSTANT)),
99 // These queries are initialized in RunTest.
100 query_swaps_(Query::Bool(false)),
101 query_inputs_(Query::Bool(false)),
102 query_blits_(Query::Bool(false)),
103 query_clears_(Query::Bool(false)),
104 mouse_x_(0),
105 tab_width_(0),
106 delay_time_us_(0),
107 num_frames_(0),
108 verbose_(false),
109 test_flags_(0) {}
110
111 virtual void SetUpCommandLine(CommandLine* command_line) OVERRIDE;
112
113 std::vector<int> GetAllBehaviors();
114
115 // Run test with specified |mode| and |behaviors|.
116 // |mode| can be webgl or software.
117 // |behaviors| is a list of combinations of LatencyTestFlags.
118 void RunTest(LatencyTestMode mode, const std::vector<int>& behaviors);
119
120 private:
121 void RunTestInternal(const std::string& test_url,
122 bool send_inputs,
123 int input_delay_us);
124
125 double CalculateLatency();
126
127 std::string GetModeString() {
128 switch (mode_) {
129 case kWebGL:
130 return "webgl";
131 case kSoftware:
132 return "software";
133 }
134 }
135
136 std::string GetTraceName(int flags);
137
138 std::string GetUrlModeString(int flags);
139
140 std::string GetUrl(int flags);
141
142 int GetMeanFrameTimeMicros() const;
143
144 void SendInput();
145
146 void PrintEvents(const TraceEventVector& events);
147
148 // Path to html file.
149 FilePath test_path_;
150
151 // Query INSTANT events.
152 Query query_instant_;
153
154 // Query "swaps" which is SwapBuffers for GL and UpdateRect for software.
155 Query query_swaps_;
156
157 // Query mouse input entry events in browser process (ForwardMouseEvent).
158 Query query_inputs_;
159
160 // Query GL blits for the WebGL canvas -- represents the compositor consuming
161 // the WebGL contents for display.
162 Query query_blits_;
163
164 // Query glClear calls with mouse coordinate as clear color.
165 Query query_clears_;
166
167 // For searching trace data.
168 scoped_ptr<TraceAnalyzer> analyzer_;
169
170 // Current mouse x coordinate for injecting events.
171 int mouse_x_;
172
173 // Width of window containing our tab.
174 int tab_width_;
175
176 // Timer for injecting mouse events periodically.
177 base::RepeatingTimer<LatencyTest> timer_;
178
179 // Mode: webgl or software
Paweł Hajdan Jr. 2011/12/22 08:08:39 nit: Dot at the end.
jbates 2011/12/22 19:39:34 Done.
180 LatencyTestMode mode_;
181
182 // Delay time for javascript test code. Typically 2 x frame duration. Used
183 // to spin-wait in the javascript input handler and requestAnimationFrame.
184 int delay_time_us_;
185
186 // Number of frames to render from the html test code.
187 int num_frames_;
188
189 // Map from test flags combination to the calculated mean latency.
190 std::map<int, double> latencies_;
191
192 // Whether to print more verbose output.
193 bool verbose_;
194
195 // Current test flags combination, determining the behavior of the test.
196 int test_flags_;
197 };
198
199 void LatencyTest::SetUpCommandLine(CommandLine* command_line) {
200 BrowserPerfTest::SetUpCommandLine(command_line);
201 // This enables DOM automation for tab contents.
202 EnableDOMAutomation();
203 if (CommandLine::ForCurrentProcess()->
204 HasSwitch(switches::kEnableThreadedCompositing)) {
205 command_line->AppendSwitch(switches::kEnableThreadedCompositing);
206 }
207 // Default behavior is to thumbnail the tab after 0.5 seconds, causing
208 // a nasty frame hitch and disturbing the latency test. Fix that:
209 command_line->AppendSwitch(switches::kEnableInBrowserThumbnailing);
210 command_line->AppendSwitch(switches::kDisableBackgroundNetworking);
211 }
212
213 std::vector<int> LatencyTest::GetAllBehaviors() {
214 std::vector<int> behaviors;
215 int max_behaviors = kInputHeavy | kInputDirty | kRafHeavy | kPaintHeavy;
216 for (int i = 0; i <= max_behaviors; ++i)
217 behaviors.push_back(i);
218 return behaviors;
219 }
220
221 void LatencyTest::RunTest(LatencyTestMode mode,
222 const std::vector<int>& behaviors) {
223 mode_ = mode;
224 verbose_ = (logging::GetVlogLevel("latency_tests") > 0);
225
226 // Construct queries for searching trace events via TraceAnalyzer.
227 if (mode_ == kWebGL) {
228 query_swaps_ = query_instant_ &&
229 Query(EVENT_NAME) == Query::String("SwapBuffers") &&
230 Query(EVENT_ARG, "width") != Query::Int(kWebGLCanvasWidth);
231 } else if (mode_ == kSoftware) {
232 // Software updates need to have x=0 and y=0 to contain the input color.
233 query_swaps_ = query_instant_ &&
234 Query(EVENT_NAME) == Query::String("UpdateRect") &&
235 Query(EVENT_ARG, "x+y") == Query::Int(0);
236 }
237 query_inputs_ = query_instant_ &&
238 Query(EVENT_NAME) == Query::String("MouseEventBegin");
239 query_blits_ = query_instant_ &&
240 Query(EVENT_NAME) == Query::String("DoBlit") &&
241 Query(EVENT_ARG, "width") == Query::Int(kWebGLCanvasWidth);
242 query_clears_ = query_instant_ &&
243 Query(EVENT_NAME) == Query::String("DoClear") &&
244 Query(EVENT_ARG, "green") == Query::Int(kClearColorGreen);
245 Query query_width_swaps = query_swaps_;
246 if (mode_ == kSoftware) {
247 query_width_swaps = query_instant_ &&
248 Query(EVENT_NAME) == Query::String("UpdateRectWidth") &&
249 Query(EVENT_ARG, "width") > Query::Int(kWebGLCanvasWidth);
250 }
251
252 // Set path to test html.
253 ASSERT_TRUE(PathService::Get(chrome::DIR_TEST_DATA, &test_path_));
254 test_path_ = test_path_.Append(FILE_PATH_LITERAL("perf"));
255 test_path_ = test_path_.Append(FILE_PATH_LITERAL("latency_suite.html"));
256 ASSERT_TRUE(file_util::PathExists(test_path_))
257 << "Missing test file: " << test_path_.value();
258
259 // Run once with defaults to measure the frame times.
260 delay_time_us_ = 0;
261 // kNumFrames may be very high, but we only need a few frames to measure
262 // average frame times.
263 num_frames_ = 30;
264 int initial_flags = 0;
265 if (mode_ == kSoftware) {
266 // For the first run, run software with kPaintHeavy (which toggles the
267 // background color every frame) to force an update each RAF. Otherwise it
268 // won't trigger an UpdateRect each frame and we won't be able to measure
269 // framerate, because there are no inputs during the first run.
270 initial_flags = static_cast<int>(kPaintHeavy);
271 }
272 RunTestInternal(GetUrl(initial_flags), false, 0);
273
274 // Get width of tab so that we know the limit of x coordinates for the
275 // injected mouse inputs.
276 const TraceEvent* swap_event = analyzer_->FindOneEvent(query_width_swaps);
277 ASSERT_TRUE(swap_event);
278 tab_width_ = swap_event->GetKnownArgAsInt("width");
279 // Keep printf output clean by limiting input coords to three digits:
280 tab_width_ = (tab_width_ < 1000) ? tab_width_ : 999;
281 // Sanity check the tab_width -- it should be more than 100 pixels.
282 EXPECT_GT(tab_width_, 100);
283
284 int mean_frame_time_us = GetMeanFrameTimeMicros();
285 if (verbose_)
286 printf("Mean frame time micros = %d\n", mean_frame_time_us);
287 // Delay time is 2x the average frame time.
288 delay_time_us_ = 2 * mean_frame_time_us;
289 // Calculate delay time between inputs based on the measured frame time.
290 // This prevents flooding the browser with more events than we need if the
291 // test is running very slow (such as on a VM).
292 int delay_us = mean_frame_time_us / kInputsPerFrame;
293
294 // Reset num_frames_ for actual test runs.
295 num_frames_ = kNumFrames;
296
297 // Run input latency test with each requested behavior.
298 for (size_t i = 0; i < behaviors.size(); ++i) {
299 test_flags_ = behaviors[i];
300 std::string url = GetUrl(test_flags_);
301 printf("=============================================================\n");
302 if (verbose_)
303 printf("Mode: %s\n", GetUrlModeString(i).c_str());
304 printf("URL: %s\n", url.c_str());
305
306 // Do the actual test with input events.
307 RunTestInternal(url, true, delay_us);
308 latencies_[test_flags_] = CalculateLatency();
309 }
310
311 // Print summary if more than 1 behavior was tested in this run. This is only
312 // for manual test runs for human reabable results, not for perf bots.
313 if (behaviors.size() > 1) {
314 printf("#############################################################\n");
315 printf("## %s\n", GetModeString().c_str());
316 if (verbose_) {
317 printf("Latency, behavior:\n");
318 for (size_t i = 0; i < behaviors.size(); ++i) {
319 printf("%.1f, %s%s%s%s\n", latencies_[behaviors[i]],
320 (i & kInputHeavy) ? "InputHeavy " : "",
321 (i & kInputDirty) ? "InputDirty " : "",
322 (i & kRafHeavy) ? "RafHeavy " : "",
323 (i & kPaintHeavy) ? "PaintHeavy " : "");
324 }
325 }
326 printf("Latencies for tests: ");
327 for (size_t i = 0; i < behaviors.size(); ++i) {
328 printf("%.1f%s", latencies_[behaviors[i]],
329 (i < behaviors.size() - 1) ? ", " : "");
330 }
331 printf("\n");
332 printf("#############################################################\n");
333 }
334 }
335
336 void LatencyTest::RunTestInternal(const std::string& test_url,
337 bool send_inputs,
338 int input_delay_us) {
339 mouse_x_ = 0;
340
341 ASSERT_TRUE(tracing::BeginTracing("test_gpu,test_latency"));
342
343 ui_test_utils::NavigateToURLWithDisposition(
344 browser(), GURL(test_url), CURRENT_TAB,
345 ui_test_utils::BROWSER_TEST_NONE);
346
347 // Start sending mouse inputs.
348 if (send_inputs) {
349 // Round input_delay_us down to nearest milliseconds. The rounding in timer
350 // code rounds up from us to ms, so we need to do our own rounding here.
351 int input_delay_ms = input_delay_us / 1000;
352 input_delay_ms = (input_delay_ms <= 0) ? 1 : input_delay_ms;
353 timer_.Start(FROM_HERE, base::TimeDelta::FromMilliseconds(input_delay_ms),
354 this, &LatencyTest::SendInput);
355 }
356
357 // Wait for message indicating the test has finished running.
358 ui_test_utils::DOMMessageQueue message_queue;
359 ASSERT_TRUE(message_queue.WaitForMessage(NULL));
360
361 timer_.Stop();
362
363 std::string json_events;
364 ASSERT_TRUE(tracing::EndTracing(&json_events));
365
366 analyzer_.reset(TraceAnalyzer::Create(json_events));
367 analyzer_->AssociateBeginEndEvents();
368 analyzer_->MergeAssociatedEventArgs();
369 }
370
371 double LatencyTest::CalculateLatency() {
372 TraceEventVector events;
373 if (mode_ == kWebGL) {
374 // Search for three types of events in WebGL mode:
375 // - onscreen swaps.
376 // - DoClear calls that contain the mouse x coordinate.
377 // - mouse events.
378 analyzer_->FindEvents(query_swaps_ || query_inputs_ ||
379 query_blits_ || query_clears_, &events);
380 } else if (mode_ == kSoftware) {
381 analyzer_->FindEvents(query_swaps_ || query_inputs_, &events);
382 } else {
383 NOTREACHED() << "invalid mode";
384 }
385
386 if (verbose_)
387 PrintEvents(events);
388
389 int swap_count = 0;
390 size_t previous_blit_pos = 0;
391 swap_count = 0;
392 std::vector<int> latencies;
393 printf("Measured latency (in number of frames) for each frame:\n");
394 for (size_t i = 0; i < events.size(); ++i) {
395 if (query_swaps_.Evaluate(*events[i])) {
396 // Compositor context swap buffers.
397 ++swap_count;
398 // Don't analyze first few swaps, because they are filling the rendering
399 // pipeline and may be unstable.
400 if (swap_count > kIgnoreBeginFrames) {
401 int mouse_x = 0;
402 if (mode_ == kWebGL) {
403 // Trace backwards through the events to find the input event that
404 // matches the glClear that was presented by this SwapBuffers.
405
406 // Step 1: Find the last blit (which will be the WebGL blit).
407 size_t blit_pos = 0;
408 EXPECT_TRUE(FindLastOf(events, query_blits_, i, &blit_pos));
409 // Skip this SwapBuffers if the blit has already been consumed by a
410 // previous SwapBuffers. This means the current frame did not receive
411 // an update from WebGL.
412 EXPECT_GT(blit_pos, previous_blit_pos);
413 if (blit_pos == previous_blit_pos) {
414 if (verbose_)
415 printf(" %03d: ERROR\n", swap_count);
416 else
417 printf(" ERROR");
418 continue;
419 }
420 previous_blit_pos = blit_pos;
421
422 // Step 2: find the last clear from the WebGL blit. This will be the
423 // value of the latest mouse input that has affected this swap.
424 size_t clear_pos = 0;
425 EXPECT_TRUE(FindLastOf(events, query_clears_, blit_pos, &clear_pos));
426 mouse_x = events[clear_pos]->GetKnownArgAsInt("red");
427 } else if (mode_ == kSoftware) {
428 // The software path gets the mouse_x directly from the DIB colors.
429 mouse_x = events[i]->GetKnownArgAsInt("color");
430 }
431
432 // Find the corresponding mouse input.
433 size_t input_pos = 0;
434 Query query_mouse_event = query_inputs_ &&
435 Query(EVENT_ARG, "x") == Query::Int(mouse_x);
436 EXPECT_TRUE(FindLastOf(events, query_mouse_event, i, &input_pos));
437
438 // Step 4: Find the nearest onscreen SwapBuffers to this input event.
439 size_t closest_swap = 0;
440 size_t second_closest_swap = 0;
441 EXPECT_TRUE(FindClosest(events, query_swaps_, input_pos,
442 &closest_swap, &second_closest_swap));
443 int latency = CountMatches(events, query_swaps_, closest_swap, i);
444 latencies.push_back(latency);
445 if (verbose_)
446 printf(" %03d: %d\n", swap_count, latency);
447 else
448 printf(" %d", latency);
449 }
450 }
451 }
452 printf("\n");
453
454 size_t ignoreEndFrames = static_cast<size_t>(kIgnoreEndFrames);
455 bool haveEnoughFrames = latencies.size() >
456 ignoreEndFrames + static_cast<size_t>(kMinimumFramesForAnalysis);
457 EXPECT_TRUE(haveEnoughFrames);
458 if (!haveEnoughFrames)
459 return 0.0;
460
461 double mean_latency = 0.0;
462 // Skip last few frames, because they may be unreliable.
463 size_t num_consider = latencies.size() - ignoreEndFrames;
464 for (size_t i = 0; i < num_consider; ++i)
465 mean_latency += static_cast<double>(latencies[i]);
466 mean_latency /= static_cast<double>(num_consider);
467 printf("Mean latency = %f\n", mean_latency);
468
469 double mean_error = 0.0;
470 for (size_t i = 0; i < num_consider; ++i) {
471 double offset = fabs(mean_latency - static_cast<double>(latencies[i]));
472 mean_error = (offset > mean_error) ? offset : mean_error;
473 }
474
475 std::string trace_name = GetTraceName(test_flags_);
476 std::string mean_and_error = base::StringPrintf("%f,%f", mean_latency,
477 mean_error);
478 perf_test::PrintResultMeanAndError(GetModeString(), "", trace_name,
479 mean_and_error, "frames", true);
480 return mean_latency;
481 }
482
483 std::string LatencyTest::GetTraceName(int flags) {
484 if (flags == 0)
485 return "simple";
486 std::string name;
487 if (flags & kInputHeavy)
488 name += "ih";
489 if (flags & kInputDirty)
490 name += std::string(name.empty()? "" : "_") + "id";
491 if (flags & kRafHeavy)
492 name += std::string(name.empty()? "" : "_") + "rh";
493 if (flags & kPaintHeavy)
494 name += std::string(name.empty()? "" : "_") + "ph";
495 return name;
496 }
497
498 std::string LatencyTest::GetUrlModeString(int flags) {
499 std::string mode = "&mode=" + GetModeString();
500 if (flags & kInputHeavy)
501 mode += "&inputHeavy";
502 if (flags & kInputDirty)
503 mode += "&inputDirty";
504 if (flags & kRafHeavy)
505 mode += "&rafHeavy";
506 if (flags & kPaintHeavy)
507 mode += "&paintHeavy";
508 return mode;
509 }
510
511 std::string LatencyTest::GetUrl(int flags) {
512 std::string test_url =
513 net::FilePathToFileURL(test_path_).possibly_invalid_spec();
514 test_url += "?numFrames=" + base::IntToString(num_frames_);
515 test_url += "&canvasWidth=" + base::IntToString(kWebGLCanvasWidth);
516 test_url += "&clearColorGreen=" + base::IntToString(kClearColorGreen);
517 test_url += "&delayTimeMS=" + base::IntToString(delay_time_us_ / 1000);
518 test_url += "&y=" + base::IntToString(kMouseY);
519 return test_url + GetUrlModeString(flags);
520 }
521
522 int LatencyTest::GetMeanFrameTimeMicros() const {
523 TraceEventVector events;
524 // Search for compositor swaps (or UpdateRects in the software path).
525 analyzer_->FindEvents(query_swaps_, &events);
526 RateStats stats;
527 bool success = GetRateStats(events, &stats);
528 // ASSERT_TRUE doesn't work in methods that return non-void.
529 EXPECT_TRUE(success);
530 CHECK(success);
Paweł Hajdan Jr. 2011/12/22 08:08:39 Are you sure you want to crash the test? This is a
jbates 2011/12/22 19:39:34 Done. (Changed to void method with ASSERT_TRUE.)
531
532 // Check that the number of swaps is close to kNumFrames.
533 EXPECT_LT(num_frames_ - num_frames_ / 4, static_cast<int>(events.size()));
534 return static_cast<int>(stats.mean_us);
535 }
536
537 void LatencyTest::SendInput() {
538 RenderViewHost* rvh = browser()->GetSelectedTabContentsWrapper()->
539 tab_contents()->render_manager_for_testing()->current_host();
540 WebKit::WebMouseEvent mouse_event;
541 mouse_event.movementX = 1;
542 mouse_x_ += mouse_event.movementX;
543 // Wrap mouse_x_ when it's near the edge of the tab.
544 if (mouse_x_ > tab_width_ - 5)
545 mouse_x_ = 1;
546 mouse_event.x = mouse_event.windowX = mouse_x_;
547 // Set y coordinate to be a few pixels down from the top of the window,
548 // so that it is between the top and bottom of the canvas.
549 mouse_event.y = mouse_event.windowY = 5;
550 mouse_event.type = WebKit::WebInputEvent::MouseMove;
551 TRACE_EVENT_INSTANT1("test_latency", "MouseEventBegin", "x", mouse_x_);
552 rvh->ForwardMouseEvent(mouse_event);
553 }
554
555 void LatencyTest::PrintEvents(const TraceEventVector& events) {
556 bool is_software = (mode_ == kSoftware);
557 int swap_count = 0;
558 for (size_t i = 0; i < events.size(); ++i) {
559 if (events[i]->name == "MouseEventBegin") {
560 printf("%03d ", events[i]->GetKnownArgAsInt("x"));
561 } else if (events[i]->name == "DoClear") {
562 printf("Clr%03d ", events[i]->GetKnownArgAsInt("red"));
563 } else if (events[i]->name == "DoBlit") {
564 // WebGL context swap buffers.
565 printf("BLT ");
566 } else if (events[i]->name == "SwapBuffers") {
567 // Compositor context swap buffers.
568 ++swap_count;
569 printf("|\nframe %03d: ", swap_count + 1);
570 } else if (is_software && events[i]->name == "UpdateRect") {
571 ++swap_count;
572 printf("(%d)|\nframe %03d: ",
573 events[i]->GetKnownArgAsInt("color"), swap_count + 1);
574 }
575 }
576 printf("\n");
577 }
578
579 ////////////////////////////////////////////////////////////////////////////////
580 /// Tests
581
582 // For manual testing only, run all input latency tests and print summary.
583 IN_PROC_BROWSER_TEST_F(LatencyTest, DISABLED_LatencyWebGLAll) {
584 RunTest(kWebGL, GetAllBehaviors());
585 }
586
587 // For manual testing only, run all input latency tests and print summary.
588 IN_PROC_BROWSER_TEST_F(LatencyTest, DISABLED_LatencySoftwareAll) {
589 RunTest(kSoftware, GetAllBehaviors());
590 }
591
592 // Define a latency test for WebGL and Software.
593 // |prefix| can optionally be DISABLED_ or FLAKY_, etc.
594 #define LATENCY_TEST(prefix, name, behavior) \
Paweł Hajdan Jr. 2011/12/22 08:08:39 Why a macro? I think gtest has parametrized tests,
jbates 2011/12/22 19:39:34 Done.
595 IN_PROC_BROWSER_TEST_F(LatencyTest, prefix ## LatencyWebGL ## name) { \
596 std::vector<int> behaviors; \
597 behaviors.push_back(behavior); \
598 RunTest(kWebGL, behaviors); \
599 } \
600 IN_PROC_BROWSER_TEST_F(LatencyTest, prefix ## LatencySoftware ## name) { \
601 std::vector<int> behaviors; \
602 behaviors.push_back(behavior); \
603 RunTest(kSoftware, behaviors); \
604 }
605
606 LATENCY_TEST(, _simple, 0);
607 LATENCY_TEST(, _ih, kInputHeavy);
608 LATENCY_TEST(, _ih_id_rh, kInputHeavy | kInputDirty | kRafHeavy);
609 LATENCY_TEST(, _id_ph, kInputDirty | kPaintHeavy);
610 LATENCY_TEST(, _id_rh_ph, kInputDirty | kRafHeavy | kPaintHeavy);
611 LATENCY_TEST(, _id_ih_rh_ph, kInputDirty | kInputHeavy | kRafHeavy |
612 kPaintHeavy);
613
614 } // namespace anonymous
Paweł Hajdan Jr. 2011/12/22 08:08:39 nit: We usually just write "// namespace".
jbates 2011/12/22 19:39:34 Done.
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698