Chromium Code Reviews| Index: chrome/test/perf/rendering/latency_tests.cc |
| diff --git a/chrome/test/perf/rendering/latency_tests.cc b/chrome/test/perf/rendering/latency_tests.cc |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..a98b86fe8330ca34b6e8c91ee4f4030994786489 |
| --- /dev/null |
| +++ b/chrome/test/perf/rendering/latency_tests.cc |
| @@ -0,0 +1,539 @@ |
| +// Copyright (c) 2011 The Chromium Authors. All rights reserved. |
| +// Use of this source code is governed by a BSD-style license that can be |
| +// found in the LICENSE file. |
| + |
| +#include "base/command_line.h" |
| +#include "base/file_util.h" |
| +#include "base/memory/scoped_ptr.h" |
| +#include "base/path_service.h" |
| +#include "base/string_number_conversions.h" |
| +#include "base/stringprintf.h" |
| +#include "base/test/test_switches.h" |
| +#include "base/test/trace_event_analyzer.h" |
| +#include "base/threading/platform_thread.h" |
| +#include "base/timer.h" |
| +#include "base/version.h" |
| +#include "chrome/browser/ui/browser.h" |
| +#include "chrome/browser/ui/tab_contents/tab_contents_wrapper.h" |
| +#include "chrome/common/chrome_paths.h" |
| +#include "chrome/common/chrome_switches.h" |
| +#include "chrome/test/base/tracing.h" |
| +#include "chrome/test/base/ui_test_utils.h" |
| +#include "chrome/test/perf/browser_perf_test.h" |
| +#include "chrome/test/perf/perf_test.h" |
| +#include "content/browser/renderer_host/render_view_host.h" |
| +#include "content/browser/tab_contents/tab_contents.h" |
| +#include "content/public/common/content_switches.h" |
| +#include "third_party/WebKit/Source/WebKit/chromium/public/WebInputEvent.h" |
| +#include "testing/gtest/include/gtest/gtest.h" |
| + |
| +// Run with --vmodule=latency_tests=1 to print verbose latency info. |
| + |
| +// How is latency measured? |
| +// |
| +// The test injects mouse moves many times per frame from the browser via |
| +// RenderWidgetHost. Each input has a unique x coordinate. When the javascript |
| +// handler receives the input, it stores the coordinate for later use in the |
| +// requestAnimationFrame callback. In RAF, the test paints using the x |
| +// coordinate as a color (in software, it sets the color of a table; in webgl, |
| +// it executes a glClearColor). Trace events emit the color when it is picked up |
| +// by either UpdateRect for software or gles2_cmd_decoder/glClear for webgl. |
| +// |
| +// Each UpdateRect (software) or SwapBuffers (webgl) is considered to be a frame |
| +// boundary that will be used to measure latency in number of frames. Starting |
| +// from a frame boundary Y, the test first determines what mouse x coordinate |
| +// was represented by the color at that frame boundary. Then, the test walks |
| +// backward through the trace events to find the input event matching that |
| +// x coordinate. Then, the test find the nearest frame boundary X to the input |
| +// event (may be before or after). The number of frame boundaries is then |
| +// counted between X and Y to determine the input latency. |
| +// |
| +// By injecting mouse moves many times per frame, we reduce flakiness in the |
| +// finding of the nearest frame boundary. |
| +// |
| +// This test only measures the latency introduced by chrome code -- it does not |
| +// measure latency introduced by mouse drivers or the GL driver or the OS window |
| +// manager. The actual latency seen by a user is more than what is reported by |
| +// this test. |
| +// |
| +// Current modes: |
| +// - Software RAF |
| +// - WebGL RAF |
| + |
| +namespace { |
| + |
| +using namespace trace_analyzer; |
| + |
| +enum LatencyTestFlags { |
| + kInputHeavy = 1 << 0, |
| + kInputDirty = 1 << 1, |
| + kRafHeavy = 1 << 2, |
| + kPaintHeavy = 1 << 3 |
| +}; |
| + |
| +int kWebGLCanvasWidth = 10; |
|
nduca
2011/12/21 20:24:23
should these be const? you've got them named as if
jbates
2011/12/22 02:41:17
Done.
|
| +int kNumFrames = 60; |
| +int kInputsPerFrame = 16; |
| +// Magic number to identify certain glClear events. |
| +int kClearColorGreen = 137; |
| +int kMouseY = 5; |
| + |
| +class LatencyTest : public BrowserPerfTest { |
| + public: |
| + LatencyTest() : |
| + query_instant_(Query(EVENT_PHASE) == |
| + Query::Phase(TRACE_EVENT_PHASE_INSTANT)), |
|
nduca
2011/12/21 20:24:23
you might make a note that these will be initializ
jbates
2011/12/22 02:41:17
Done. (Sadly no default constructor on Query)
|
| + query_swaps_(Query::Bool(false)), |
| + query_inputs_(Query::Bool(false)), |
| + query_blits_(Query::Bool(false)), |
| + query_clears_(Query::Bool(false)), |
| + mouse_x_(0), |
| + tab_width_(0), |
| + delay_time_ms_(0), |
| + num_frames_(0), |
| + verbose_(false), |
| + test_flags_(0) {} |
| + |
| + virtual void SetUpCommandLine(CommandLine* command_line) { |
|
Paweł Hajdan Jr.
2011/12/21 09:50:29
nit: OVERRIDE
jbates
2011/12/21 19:34:18
Done.
|
| + BrowserPerfTest::SetUpCommandLine(command_line); |
| + // This enables DOM automation for tab contents. |
| + EnableDOMAutomation(); |
| +#ifdef TEST_THREADED_COMPOSITING_LATENCY |
|
Paweł Hajdan Jr.
2011/12/21 09:50:29
Why is this pre-processor controlled? I don't see
jbates
2011/12/21 19:34:18
Done.
|
| + command_line->AppendSwitch(switches::kEnableThreadedCompositing); |
| +#endif |
| + // Default behavior is to thumbnail the tab after 0.5 seconds, causing |
| + // a nasty frame hitch and disturbing the latency test. Fix that: |
| + command_line->AppendSwitch(switches::kEnableInBrowserThumbnailing); |
| + command_line->AppendSwitch(switches::kDisableBackgroundNetworking); |
| + } |
| + |
| + // Run test with specified |mode| and |behaviors|. |
| + // |mode| can be webgl or software. |
| + // |behaviors| is a list of combinations of LatencyTestFlags. |
| + void RunTest(const std::string& mode, const std::vector<int>& behaviors); |
| + |
| + std::vector<int> GetAllBehaviors() { |
| + std::vector<int> behaviors; |
| + int max_behaviors = kInputHeavy | kInputDirty | kRafHeavy | kPaintHeavy; |
| + for (int i = 0; i <= max_behaviors; ++i) |
| + behaviors.push_back(i); |
| + return behaviors; |
| + } |
| + |
| +private: |
|
Paweł Hajdan Jr.
2011/12/21 09:50:29
nit: Indent +1.
jbates
2011/12/21 19:34:18
Done.
|
| + void RunTestInternal(const std::string& test_url, |
| + bool send_inputs, |
| + int input_delay_ms); |
| + |
| + double CalculateLatency(); |
| + |
| + std::string GetTraceName(int flags) { |
| + if (flags == 0) |
| + return "simple"; |
| + std::string name; |
| + if (flags & kInputHeavy) |
| + name += "ih"; |
| + if (flags & kInputDirty) |
| + name += std::string(name.empty()? "" : "_") + "id"; |
| + if (flags & kRafHeavy) |
| + name += std::string(name.empty()? "" : "_") + "rh"; |
| + if (flags & kPaintHeavy) |
| + name += std::string(name.empty()? "" : "_") + "ph"; |
| + return name; |
| + } |
| + |
| + std::string GetUrlModeString(int flags) { |
| + std::string mode = "&mode=" + mode_; |
| + if (flags & kInputHeavy) |
| + mode += "&inputHeavy=true"; |
| + if (flags & kInputDirty) |
| + mode += "&inputDirty=true"; |
| + if (flags & kRafHeavy) |
| + mode += "&rafHeavy=true"; |
| + if (flags & kPaintHeavy) |
| + mode += "&paintHeavy=true"; |
| + return mode; |
| + } |
| + |
| + std::string GetUrl(int flags) { |
| + std::string test_url = |
| + net::FilePathToFileURL(test_path_).possibly_invalid_spec(); |
| + test_url += "?numFrames=" + base::IntToString(num_frames_); |
| + test_url += "&canvasWidth=" + base::IntToString(kWebGLCanvasWidth); |
| + test_url += "&clearColorGreen=" + base::IntToString(kClearColorGreen); |
| + test_url += "&delayTimeMS=" + base::IntToString(delay_time_ms_); |
| + test_url += "&y=" + base::IntToString(kMouseY); |
| + return test_url + GetUrlModeString(flags); |
| + } |
| + |
| + int GetMeanFrameTime() const { |
| + TraceEventVector events; |
| + // Search for compositor swaps (or UpdateRects in the software path). |
| + analyzer_->FindEvents(query_swaps_, &events); |
| + RateStats stats; |
| + bool success = GetRateStats(events, &stats); |
| + EXPECT_TRUE(success); |
| + if (!success) |
| + return 16000; |
|
nduca
2011/12/21 20:24:23
Comment to explain why this magic number is used?
jbates
2011/12/22 02:41:17
Done.
|
| + |
| + // Check that the number of swaps is close to kNumFrames. |
| + EXPECT_LT(num_frames_ - num_frames_/4, static_cast<int>(events.size())); |
|
Paweł Hajdan Jr.
2011/12/21 09:50:29
nit: Spaces around "/".
jbates
2011/12/21 19:34:18
Done.
|
| + return static_cast<int>(stats.mean_us); |
| + } |
| + |
| + void SendInput() { |
| + RenderViewHost* rvh = browser()->GetSelectedTabContentsWrapper()-> |
| + tab_contents()->render_view_host(); |
| + WebKit::WebMouseEvent mouse_event; |
| + mouse_event.movementX = 1; |
| + mouse_x_ += mouse_event.movementX; |
| + // Wrap mouse_x_ when it's near the edge of the tab. |
| + if (mouse_x_ > tab_width_ - 5) |
| + mouse_x_ = 1; |
| + mouse_event.x = mouse_event.windowX = mouse_x_; |
| + // Set y coordinate to be a few pixels down from the top of the window, |
| + // so that it is between the top and bottom of the canvas. |
| + mouse_event.y = mouse_event.windowY = 5; |
| + mouse_event.type = WebKit::WebInputEvent::MouseMove; |
| + TRACE_EVENT_INSTANT1("test_latency", "MouseEventBegin", "x", mouse_x_); |
| + rvh->ForwardMouseEvent(mouse_event); |
|
nduca
2011/12/21 20:24:23
/me wonders out loud whether this will cleanly ext
jbates
2011/12/22 02:41:17
Yeah, it seems like it would be a trivial update t
|
| + } |
| + |
| + void PrintEvents(const TraceEventVector& events) { |
| + bool is_software = (mode_ == "software"); |
| + // Print trace data. |
|
nduca
2011/12/21 20:24:23
comment doesn't add a lot of value methinks
jbates
2011/12/22 02:41:17
Done.
|
| + int swap_count = 0; |
| + for (size_t i = 0; i < events.size(); ++i) { |
| + if (events[i]->name == "MouseEventBegin") { |
| + printf("%03d ", events[i]->GetKnownArgAsInt("x")); |
| + } |
| + else if (events[i]->name == "DoClear") { |
|
Paweł Hajdan Jr.
2011/12/21 09:50:29
nit: move this to the previous line, same below, i
jbates
2011/12/21 19:34:18
Done.
|
| + printf("Clr%03d ", events[i]->GetKnownArgAsInt("red")); |
| + } |
| + else if (events[i]->name == "DoBlit") { |
| + // WebGL context swap buffers. |
| + printf("BLT "); |
| + } |
| + else if (events[i]->name == "SwapBuffers") { |
| + // Compositor context swap buffers. |
| + ++swap_count; |
| + printf("|\nframe %03d: ", swap_count + 1); |
| + } |
| + else if (is_software && events[i]->name == "UpdateRect") { |
| + ++swap_count; |
| + printf("(%d)|\nframe %03d: ", |
| + events[i]->GetKnownArgAsInt("color"), swap_count + 1); |
| + } |
| + } |
| + printf("\n"); |
| + } |
| + |
| + FilePath test_path_; |
|
Paweł Hajdan Jr.
2011/12/21 09:50:29
nit: Please comment member variables.
jbates
2011/12/21 19:34:18
Done.
|
| + Query query_instant_; |
| + Query query_swaps_; |
| + Query query_inputs_; |
| + Query query_blits_; |
| + Query query_clears_; |
| + scoped_ptr<TraceAnalyzer> analyzer_; |
| + int mouse_x_; |
| + int tab_width_; |
| + base::RepeatingTimer<LatencyTest> timer_; |
| + std::string mode_; |
| + int delay_time_ms_; |
| + int num_frames_; |
| + std::map<int, double> latencies_; |
| + bool verbose_; |
| + int test_flags_; |
| +}; |
| + |
| +void LatencyTest::RunTest(const std::string& mode, |
|
nduca
2011/12/21 20:24:23
odd that you're using a mode string but an enum fo
jbates
2011/12/22 02:41:17
Done.
|
| + const std::vector<int>& behaviors) { |
|
nduca
2011/12/21 20:24:23
Also, a bit confused that some functions are imple
jbates
2011/12/22 02:41:17
Moved them out (to make the changes easier to foll
|
| + mode_ = mode; |
| + verbose_ = (logging::GetVlogLevel("latency_tests") > 0); |
| + |
| + // Construct queries for searching trace events via TraceAnalyzer. |
| + int initial_flags = 0; |
| + if (mode_ == "webgl") { |
| + query_swaps_ = query_instant_ && |
| + Query(EVENT_NAME) == Query::String("SwapBuffers") && |
| + Query(EVENT_ARG, "width") != Query::Int(kWebGLCanvasWidth); |
| + } else if (mode_ == "software") { |
| + // Run software with kPaintHeavy the first time when no inputs are |
| + // triggering paints -- otherwise it won't update each frame. |
|
nduca
2011/12/21 20:24:23
Not sure I follow this. Also, this feels out of pl
jbates
2011/12/22 02:41:17
Done.
|
| + initial_flags = static_cast<int>(kPaintHeavy); |
| + // Software updates need to have x=0 and y=0 to contain the input color. |
| + query_swaps_ = query_instant_ && |
| + Query(EVENT_NAME) == Query::String("UpdateRect") && |
| + Query(EVENT_ARG, "x+y") == Query::Int(0); |
| + } |
| + query_inputs_ = query_instant_ && |
| + Query(EVENT_NAME) == Query::String("MouseEventBegin"); |
| + query_blits_ = query_instant_ && |
| + Query(EVENT_NAME) == Query::String("DoBlit") && |
| + Query(EVENT_ARG, "width") == Query::Int(kWebGLCanvasWidth); |
| + query_clears_ = query_instant_ && |
| + Query(EVENT_NAME) == Query::String("DoClear") && |
| + Query(EVENT_ARG, "green") == Query::Int(kClearColorGreen); |
| + Query query_width_swaps = query_swaps_; |
| + if (mode_ == "software") { |
| + query_width_swaps = query_instant_ && |
| + Query(EVENT_NAME) == Query::String("UpdateRectWidth") && |
| + Query(EVENT_ARG, "width") > Query::Int(kWebGLCanvasWidth); |
| + } |
| + |
| + // Set path to test html. |
| + PathService::Get(chrome::DIR_TEST_DATA, &test_path_); |
|
Paweł Hajdan Jr.
2011/12/21 09:50:29
Check the return value.
jbates
2011/12/21 19:34:18
Done.
|
| + test_path_ = test_path_.Append(FILE_PATH_LITERAL("perf")); |
| + test_path_ = test_path_.Append(FILE_PATH_LITERAL("latency_suite.html")); |
| + ASSERT_TRUE(file_util::PathExists(test_path_)) |
| + << "Missing test file: " << test_path_.value(); |
| + |
| + // Run once with defaults to measure the frame times. |
| + delay_time_ms_ = 0; |
| + // kNumFrames may be very high, but we only need a few frames to measure |
| + // average frame times. |
| + num_frames_ = 30; |
| + RunTestInternal(GetUrl(initial_flags), false, 0); |
| + |
| + // Get width of tab so that we know the limit of x coordinates for the |
| + // injected mouse inputs. |
| + const TraceEvent* swap_event = analyzer_->FindOneEvent(query_width_swaps); |
| + ASSERT_TRUE(swap_event); |
| + tab_width_ = swap_event->GetKnownArgAsInt("width"); |
| + // Keep printf output clean by limiting input coords to three digits: |
| + tab_width_ = (tab_width_ < 1000) ? tab_width_ : 999; |
| + // Sanity check the tab_width -- it should be more than 100 pixels. |
| + EXPECT_GT(tab_width_, 100); |
| + |
| + int mean_frame_time_us = GetMeanFrameTime(); |
|
nduca
2011/12/21 20:24:23
why are we mixing micros and ms?
jbates
2011/12/22 02:41:17
Done.
|
| + if (verbose_) |
| + printf("Mean frame time micros = %d\n", mean_frame_time_us); |
| + // Delay time is 2x the average frame time. |
| + delay_time_ms_ = 2 * mean_frame_time_us / 1000; |
| + // Calculate delay time between inputs based on the measured frame time. |
| + // This prevents flooding the browser with more events than we need if the |
| + // test is running very slow (such as on a VM). |
| + int delay_us = mean_frame_time_us / kInputsPerFrame; |
| + // Round delay_us down to nearest milliseconds. |
| + int delay_ms = delay_us / 1000; |
| + delay_ms = (delay_ms <= 0) ? 1 : delay_ms; |
| + |
| + // Reset num_frames_ for actual test runs. |
| + num_frames_ = kNumFrames; |
| + |
| + // Run input latency test with each requested behavior. |
|
nduca
2011/12/21 20:24:23
This behaviors vs TEST_ distinction reminds me of
jbates
2011/12/22 02:41:17
Done.
|
| + for (size_t i = 0; i < behaviors.size(); ++i) { |
| + // Sleep to allow the rendering pipeline to flush. |
| + base::PlatformThread::Sleep(200); |
|
Paweł Hajdan Jr.
2011/12/21 09:50:29
Shouldn't you rather wait for an event or somethin
jbates
2011/12/21 19:34:18
You're right, this was left-over from local testin
|
| + |
| + test_flags_ = behaviors[i]; |
| + std::string url = GetUrl(test_flags_); |
| + printf("=============================================================\n"); |
| + if (verbose_) |
| + printf("Mode: %s\n", GetUrlModeString(i).c_str()); |
| + printf("URL: %s\n", url.c_str()); |
| + |
| + // Do the actual test with input events. |
| + RunTestInternal(url, true, delay_ms); |
| + latencies_[test_flags_] = CalculateLatency(); |
| + } |
| + |
| + // Print summary if more than 1 behavior was tested in this run. |
|
nduca
2011/12/21 20:24:23
Is this for human consumption, or for consumption
jbates
2011/12/22 02:41:17
For human consumption, added a comment.
|
| + if (behaviors.size() > 1) { |
| + printf("#############################################################\n"); |
| + printf("## %s\n", mode_.c_str()); |
| + if (verbose_) { |
| + printf("Latency, behavior:\n"); |
| + for (size_t i = 0; i < behaviors.size(); ++i) { |
| + printf("%.1f, %s%s%s%s\n", latencies_[behaviors[i]], |
| + (i & kInputHeavy) ? "InputHeavy " : "", |
| + (i & kInputDirty) ? "InputDirty " : "", |
| + (i & kRafHeavy) ? "RafHeavy " : "", |
| + (i & kPaintHeavy) ? "PaintHeavy " : ""); |
| + } |
| + } |
| + printf("Latencies for tests: "); |
| + for (size_t i = 0; i < behaviors.size(); ++i) { |
| + printf("%.1f%s", latencies_[behaviors[i]], |
| + (i < behaviors.size() - 1) ? ", " : ""); |
| + } |
| + printf("\n"); |
| + printf("#############################################################\n"); |
| + } |
| +} |
| + |
| +void LatencyTest::RunTestInternal(const std::string& test_url, |
| + bool send_inputs, |
| + int input_delay_ms) { |
| + mouse_x_ = 0; |
| + |
| + ASSERT_TRUE(tracing::BeginTracing("test_gpu,test_latency")); |
| + |
| + ui_test_utils::NavigateToURLWithDisposition( |
| + browser(), GURL(test_url), CURRENT_TAB, |
| + ui_test_utils::BROWSER_TEST_NONE); |
| + |
| + // Start sending mouse inputs. |
| + if (send_inputs) { |
| + timer_.Start(FROM_HERE, base::TimeDelta::FromMicroseconds(input_delay_ms), |
| + this, &LatencyTest::SendInput); |
| + } |
| + |
| + // Wait for message indicating the test has finished running. |
| + ui_test_utils::DOMMessageQueue message_queue; |
| + ASSERT_TRUE(message_queue.WaitForMessage(NULL)); |
|
nduca
2011/12/21 20:24:23
Do we need any error handling here? Or is the idea
jbates
2011/12/22 02:41:17
The trace analysis does lots of error checking. Le
|
| + |
| + timer_.Stop(); |
| + |
| + std::string json_events; |
| + ASSERT_TRUE(tracing::EndTracing(&json_events)); |
| + |
| + analyzer_.reset(TraceAnalyzer::Create(json_events)); |
| + analyzer_->AssociateBeginEndEvents(); |
| + analyzer_->MergeAssociatedEventArgs(); |
| +} |
| + |
| +double LatencyTest::CalculateLatency() { |
| + TraceEventVector events; |
| + if (mode_ == "webgl") { |
| + // Search for three types of events in WebGL mode: |
| + // - onscreen swaps. |
| + // - DoClear calls that contain the mouse x coordinate. |
| + // - mouse events. |
| + analyzer_->FindEvents(query_swaps_ || query_inputs_ || |
| + query_blits_ || query_clears_, &events); |
| + } |
| + else if (mode_ == "software") { |
|
Paweł Hajdan Jr.
2011/12/21 09:50:29
nit: Move to the previous line.
jbates
2011/12/21 19:34:18
Done.
|
| + analyzer_->FindEvents(query_swaps_ || query_inputs_, &events); |
| + } else { |
| + CHECK(0) << "invalid mode"; |
|
Paweł Hajdan Jr.
2011/12/21 09:50:29
Oh, a CHECK? We usually do a NOTREACHED() in those
jbates
2011/12/21 19:34:18
Done.
|
| + } |
| + |
| + if (verbose_) |
| + PrintEvents(events); |
| + |
| + int swap_count = 0; |
| + size_t previous_blit_pos = 0; |
| + swap_count = 0; |
| + std::vector<int> latencies; |
| + printf("Measured latency (in number of frames) for each frame:\n"); |
|
nduca
2011/12/21 20:24:23
/me is confused about the output of this test. You
jbates
2011/12/22 02:41:17
The only stuff that is parsed by the perf bots com
|
| + for (size_t i = 0; i < events.size(); ++i) { |
|
nduca
2011/12/21 20:24:23
I'm a little surprise to see an actual loop in use
jbates
2011/12/22 02:41:17
I think it's just the nature of this test. It need
|
| + if (query_swaps_.Evaluate(*events[i])) { |
| + // Compositor context swap buffers. |
| + ++swap_count; |
| + // Don't analyze first few swaps, because they are filling the rendering |
| + // pipeline and may be unstable. Latencies can be as high as 5 frames or |
| + // so, so skip the first 6 frames to get more accurate results. |
| + if (swap_count > 6) { |
| + int mouse_x = 0; |
| + if (mode_ == "webgl") { |
| + // Trace backwards through the events to find the input event that |
| + // matches the glClear that was presented by this SwapBuffers. |
| + |
| + // Step 1: Find the last blit (which will be the WebGL blit). |
| + size_t blit_pos = 0; |
| + EXPECT_TRUE(FindLastOf(events, query_blits_, i, &blit_pos)); |
| + // Skip this SwapBuffers if the blit has already been consumed by a |
| + // previous SwapBuffers. This means the current frame did not receive |
| + // an update from WebGL. |
| + EXPECT_GT(blit_pos, previous_blit_pos); |
| + if (blit_pos == previous_blit_pos) { |
| + if (verbose_) |
| + printf(" %03d: ERROR\n", swap_count); |
| + else |
| + printf(" ERROR"); |
| + continue; |
| + } |
| + previous_blit_pos = blit_pos; |
| + |
| + // Step 2: find the last clear from the WebGL blit. This will be the |
| + // value of the latest mouse input that has affected this swap. |
| + size_t clear_pos = 0; |
| + EXPECT_TRUE(FindLastOf(events, query_clears_, blit_pos, &clear_pos)); |
| + mouse_x = events[clear_pos]->GetKnownArgAsInt("red"); |
| + } else if (mode_ == "software") { |
| + // The software path gets the mouse_x directly from the DIB colors. |
| + mouse_x = events[i]->GetKnownArgAsInt("color"); |
| + } |
| + |
| + // Find the corresponding mouse input. |
| + size_t input_pos = 0; |
| + Query query_mouse_event = query_inputs_ && |
| + Query(EVENT_ARG, "x") == Query::Int(mouse_x); |
| + EXPECT_TRUE(FindLastOf(events, query_mouse_event, i, &input_pos)); |
| + |
| + // Step 4: Find the nearest onscreen SwapBuffers to this input event. |
| + size_t closest_swap = 0; |
| + size_t second_closest_swap = 0; |
| + EXPECT_TRUE(FindClosest(events, query_swaps_, input_pos, |
| + &closest_swap, &second_closest_swap)); |
| + int latency = CountMatches(events, query_swaps_, closest_swap, i); |
| + latencies.push_back(latency); |
| + if (verbose_) |
| + printf(" %03d: %d\n", swap_count, latency); |
| + else |
| + printf(" %d", latency); |
| + } |
| + } |
| + } |
| + printf("\n"); |
| + |
| + EXPECT_GT(latencies.size(), size_t(2)); |
| + if (latencies.size() <= size_t(2)) |
| + return 0.0; |
| + |
| + double mean_latency = 0.0; |
| + // Skip last couple latencies, because they could be unreliable. |
| + size_t num_consider = latencies.size() - 2; |
| + for (size_t i = 0; i < num_consider; ++i) |
| + mean_latency += static_cast<double>(latencies[i]); |
| + mean_latency /= static_cast<double>(num_consider); |
| + printf("Mean latency = %f\n", mean_latency); |
| + |
| + double mean_error = 0.0; |
| + for (size_t i = 0; i < num_consider; ++i) { |
| + double offset = fabs(mean_latency - static_cast<double>(latencies[i])); |
| + mean_error = (offset > mean_error) ? offset : mean_error; |
| + } |
| + |
| + std::string trace_name = GetTraceName(test_flags_); |
| + std::string mean_and_error = base::StringPrintf("%f,%f", mean_latency, |
| + mean_error); |
| + perf_test::PrintResultMeanAndError(mode_, "", trace_name, mean_and_error, |
| + "frames", true); |
| + return mean_latency; |
| +} |
| + |
| +// For manual testing only, run all input latency tests and print summary. |
| +IN_PROC_BROWSER_TEST_F(LatencyTest, DISABLED_WebGLLatencyAll) { |
| + RunTest("webgl", GetAllBehaviors()); |
| +} |
| + |
| +// For manual testing only, run all input latency tests and print summary. |
| +IN_PROC_BROWSER_TEST_F(LatencyTest, DISABLED_SoftwareLatencyAll) { |
| + RunTest("software", GetAllBehaviors()); |
| +} |
| + |
| +IN_PROC_BROWSER_TEST_F(LatencyTest, WebGLLatencySelect) { |
| + std::vector<int> behaviors; |
| + behaviors.push_back(0); |
| + behaviors.push_back(kInputHeavy); |
| + behaviors.push_back(kInputHeavy | kInputDirty | kRafHeavy); |
| + behaviors.push_back(kInputDirty | kPaintHeavy); |
| + behaviors.push_back(kInputDirty | kRafHeavy | kPaintHeavy); |
| + behaviors.push_back(kInputDirty | kInputHeavy | kRafHeavy | kPaintHeavy); |
| + RunTest("webgl", behaviors); |
| +} |
| + |
| +IN_PROC_BROWSER_TEST_F(LatencyTest, SoftwareLatencySelect) { |
| + std::vector<int> behaviors; |
| + behaviors.push_back(0); |
| + behaviors.push_back(kInputHeavy); |
| + behaviors.push_back(kInputHeavy | kInputDirty | kRafHeavy); |
| + behaviors.push_back(kInputDirty | kPaintHeavy); |
| + behaviors.push_back(kInputDirty | kRafHeavy | kPaintHeavy); |
| + behaviors.push_back(kInputDirty | kInputHeavy | kRafHeavy | kPaintHeavy); |
| + RunTest("software", behaviors); |
| +} |
| + |
| +} // namespace anonymous |