| OLD | NEW |
| (Empty) |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include <map> | |
| 6 | |
| 7 #include "base/command_line.h" | |
| 8 #include "base/file_util.h" | |
| 9 #include "base/path_service.h" | |
| 10 #include "base/strings/string_number_conversions.h" | |
| 11 #include "base/strings/utf_string_conversions.h" | |
| 12 #include "base/test/test_timeouts.h" | |
| 13 #include "base/test/trace_event_analyzer.h" | |
| 14 #include "chrome/common/chrome_paths.h" | |
| 15 #include "chrome/common/chrome_switches.h" | |
| 16 #include "chrome/test/automation/automation_proxy.h" | |
| 17 #include "chrome/test/automation/tab_proxy.h" | |
| 18 #include "chrome/test/perf/perf_test.h" | |
| 19 #include "chrome/test/ui/javascript_test_util.h" | |
| 20 #include "chrome/test/ui/ui_perf_test.h" | |
| 21 #include "net/base/net_util.h" | |
| 22 #include "testing/perf/perf_test.h" | |
| 23 #include "ui/gl/gl_implementation.h" | |
| 24 #include "ui/gl/gl_switches.h" | |
| 25 | |
| 26 #if defined(OS_WIN) | |
| 27 #include "base/win/windows_version.h" | |
| 28 #endif | |
| 29 | |
| 30 namespace { | |
| 31 | |
| 32 enum FrameRateTestFlags { | |
| 33 kUseGpu = 1 << 0, // Only execute test if --enable-gpu, and verify | |
| 34 // that test ran on GPU. This is required for | |
| 35 // tests that run on GPU. | |
| 36 kForceGpuComposited = 1 << 1, // Force the test to use the compositor. | |
| 37 kDisableVsync = 1 << 2, // Do not limit framerate to vertical refresh. | |
| 38 // when on GPU, nor to 60hz when not on GPU. | |
| 39 kUseReferenceBuild = 1 << 3, // Run test using the reference chrome build. | |
| 40 kInternal = 1 << 4, // Test uses internal test data. | |
| 41 kHasRedirect = 1 << 5, // Test page contains an HTML redirect. | |
| 42 kIsGpuCanvasTest = 1 << 6 // Test uses GPU accelerated canvas features. | |
| 43 }; | |
| 44 | |
| 45 class FrameRateTest | |
| 46 : public UIPerfTest | |
| 47 , public ::testing::WithParamInterface<int> { | |
| 48 public: | |
| 49 FrameRateTest() { | |
| 50 show_window_ = true; | |
| 51 dom_automation_enabled_ = true; | |
| 52 } | |
| 53 | |
| 54 bool HasFlag(FrameRateTestFlags flag) const { | |
| 55 return (GetParam() & flag) == flag; | |
| 56 } | |
| 57 | |
| 58 bool IsGpuAvailable() const { | |
| 59 return CommandLine::ForCurrentProcess()->HasSwitch("enable-gpu"); | |
| 60 } | |
| 61 | |
| 62 std::string GetSuffixForTestFlags() { | |
| 63 std::string suffix; | |
| 64 if (HasFlag(kForceGpuComposited)) | |
| 65 suffix += "_comp"; | |
| 66 if (HasFlag(kUseGpu)) | |
| 67 suffix += "_gpu"; | |
| 68 if (HasFlag(kDisableVsync)) | |
| 69 suffix += "_novsync"; | |
| 70 if (HasFlag(kUseReferenceBuild)) | |
| 71 suffix += "_ref"; | |
| 72 return suffix; | |
| 73 } | |
| 74 | |
| 75 virtual base::FilePath GetDataPath(const std::string& name) { | |
| 76 // Make sure the test data is checked out. | |
| 77 base::FilePath test_path; | |
| 78 PathService::Get(chrome::DIR_TEST_DATA, &test_path); | |
| 79 test_path = test_path.Append(FILE_PATH_LITERAL("perf")); | |
| 80 test_path = test_path.Append(FILE_PATH_LITERAL("frame_rate")); | |
| 81 if (HasFlag(kInternal)) { | |
| 82 test_path = test_path.Append(FILE_PATH_LITERAL("private")); | |
| 83 } else { | |
| 84 test_path = test_path.Append(FILE_PATH_LITERAL("content")); | |
| 85 } | |
| 86 test_path = test_path.AppendASCII(name); | |
| 87 return test_path; | |
| 88 } | |
| 89 | |
| 90 virtual void SetUp() { | |
| 91 if (HasFlag(kUseReferenceBuild)) | |
| 92 UseReferenceBuild(); | |
| 93 | |
| 94 // Turn on chrome.Interval to get higher-resolution timestamps on frames. | |
| 95 launch_arguments_.AppendSwitch(switches::kEnableBenchmarking); | |
| 96 | |
| 97 // Some of the tests may launch http requests through JSON or AJAX | |
| 98 // which causes a security error (cross domain request) when the page | |
| 99 // is loaded from the local file system ( file:// ). The following switch | |
| 100 // fixes that error. | |
| 101 launch_arguments_.AppendSwitch(switches::kAllowFileAccessFromFiles); | |
| 102 | |
| 103 if (!HasFlag(kUseGpu)) { | |
| 104 launch_arguments_.AppendSwitch(switches::kDisableAcceleratedCompositing); | |
| 105 launch_arguments_.AppendSwitch(switches::kDisableExperimentalWebGL); | |
| 106 launch_arguments_.AppendSwitch(switches::kDisableAccelerated2dCanvas); | |
| 107 } | |
| 108 | |
| 109 if (HasFlag(kDisableVsync)) | |
| 110 launch_arguments_.AppendSwitch(switches::kDisableGpuVsync); | |
| 111 | |
| 112 UIPerfTest::SetUp(); | |
| 113 } | |
| 114 | |
| 115 bool DidRunOnGpu(const std::string& json_events) { | |
| 116 using trace_analyzer::Query; | |
| 117 using trace_analyzer::TraceAnalyzer; | |
| 118 | |
| 119 // Check trace for GPU accleration. | |
| 120 scoped_ptr<TraceAnalyzer> analyzer(TraceAnalyzer::Create(json_events)); | |
| 121 | |
| 122 gfx::GLImplementation gl_impl = gfx::kGLImplementationNone; | |
| 123 const trace_analyzer::TraceEvent* gpu_event = analyzer->FindFirstOf( | |
| 124 Query::EventNameIs("SwapBuffers") && | |
| 125 Query::EventHasNumberArg("GLImpl")); | |
| 126 if (gpu_event) | |
| 127 gl_impl = static_cast<gfx::GLImplementation>( | |
| 128 gpu_event->GetKnownArgAsInt("GLImpl")); | |
| 129 return (gl_impl == gfx::kGLImplementationDesktopGL || | |
| 130 gl_impl == gfx::kGLImplementationEGLGLES2); | |
| 131 } | |
| 132 | |
| 133 void RunTest(const std::string& name) { | |
| 134 #if defined(USE_AURA) | |
| 135 if (!HasFlag(kUseGpu)) { | |
| 136 printf("Test skipped, Aura always runs with GPU\n"); | |
| 137 return; | |
| 138 } | |
| 139 #endif | |
| 140 #if defined(OS_WIN) | |
| 141 if (HasFlag(kUseGpu) && HasFlag(kIsGpuCanvasTest) && | |
| 142 base::win::OSInfo::GetInstance()->version() == base::win::VERSION_XP) { | |
| 143 // crbug.com/128208 | |
| 144 LOG(WARNING) << "Test skipped: GPU canvas tests do not run on XP."; | |
| 145 return; | |
| 146 } | |
| 147 #endif | |
| 148 | |
| 149 if (HasFlag(kUseGpu) && !IsGpuAvailable()) { | |
| 150 printf("Test skipped: requires gpu. Pass --enable-gpu on the command " | |
| 151 "line if use of GPU is desired.\n"); | |
| 152 return; | |
| 153 } | |
| 154 | |
| 155 // Verify flag combinations. | |
| 156 ASSERT_TRUE(HasFlag(kUseGpu) || !HasFlag(kForceGpuComposited)); | |
| 157 ASSERT_TRUE(!HasFlag(kUseGpu) || IsGpuAvailable()); | |
| 158 | |
| 159 base::FilePath test_path = GetDataPath(name); | |
| 160 ASSERT_TRUE(base::DirectoryExists(test_path)) | |
| 161 << "Missing test directory: " << test_path.value(); | |
| 162 | |
| 163 test_path = test_path.Append(FILE_PATH_LITERAL("test.html")); | |
| 164 | |
| 165 scoped_refptr<TabProxy> tab(GetActiveTab()); | |
| 166 ASSERT_TRUE(tab.get()); | |
| 167 | |
| 168 // TODO(jbates): remove this check when ref builds are updated. | |
| 169 if (!HasFlag(kUseReferenceBuild)) | |
| 170 ASSERT_TRUE(automation()->BeginTracing("test_gpu")); | |
| 171 | |
| 172 if (HasFlag(kHasRedirect)) { | |
| 173 // If the test file is known to contain an html redirect, we must block | |
| 174 // until the second navigation is complete and reacquire the active tab | |
| 175 // in order to avoid a race condition. | |
| 176 // If the following assertion is triggered due to a timeout, it is | |
| 177 // possible that the current test does not re-direct and therefore should | |
| 178 // not have the kHasRedirect flag turned on. | |
| 179 ASSERT_EQ(AUTOMATION_MSG_NAVIGATION_SUCCESS, | |
| 180 tab->NavigateToURLBlockUntilNavigationsComplete( | |
| 181 net::FilePathToFileURL(test_path), 2)); | |
| 182 tab = GetActiveTab(); | |
| 183 ASSERT_TRUE(tab.get()); | |
| 184 } else { | |
| 185 ASSERT_EQ(AUTOMATION_MSG_NAVIGATION_SUCCESS, | |
| 186 tab->NavigateToURL(net::FilePathToFileURL(test_path))); | |
| 187 } | |
| 188 | |
| 189 // Block until initialization completes | |
| 190 // If the following assertion fails intermittently, it could be due to a | |
| 191 // race condition caused by an html redirect. If that is the case, verify | |
| 192 // that flag kHasRedirect is enabled for the current test. | |
| 193 ASSERT_TRUE(WaitUntilJavaScriptCondition( | |
| 194 tab.get(), | |
| 195 std::wstring(), | |
| 196 L"window.domAutomationController.send(__initialized);", | |
| 197 TestTimeouts::large_test_timeout())); | |
| 198 | |
| 199 if (HasFlag(kForceGpuComposited)) { | |
| 200 ASSERT_TRUE(tab->NavigateToURLAsync( | |
| 201 GURL("javascript:__make_body_composited();"))); | |
| 202 } | |
| 203 | |
| 204 // Start the tests. | |
| 205 ASSERT_TRUE(tab->NavigateToURLAsync(GURL("javascript:__start_all();"))); | |
| 206 | |
| 207 // Block until the tests completes. | |
| 208 ASSERT_TRUE(WaitUntilJavaScriptCondition( | |
| 209 tab.get(), | |
| 210 std::wstring(), | |
| 211 L"window.domAutomationController.send(!__running_all);", | |
| 212 TestTimeouts::large_test_timeout())); | |
| 213 | |
| 214 // TODO(jbates): remove this check when ref builds are updated. | |
| 215 if (!HasFlag(kUseReferenceBuild)) { | |
| 216 std::string json_events; | |
| 217 ASSERT_TRUE(automation()->EndTracing(&json_events)); | |
| 218 | |
| 219 bool did_run_on_gpu = DidRunOnGpu(json_events); | |
| 220 bool expect_gpu = HasFlag(kUseGpu); | |
| 221 EXPECT_EQ(expect_gpu, did_run_on_gpu); | |
| 222 } | |
| 223 | |
| 224 // Read out the results. | |
| 225 std::wstring json; | |
| 226 ASSERT_TRUE(tab->ExecuteAndExtractString( | |
| 227 std::wstring(), | |
| 228 L"window.domAutomationController.send(" | |
| 229 L"JSON.stringify(__calc_results_total()));", | |
| 230 &json)); | |
| 231 | |
| 232 std::map<std::string, std::string> results; | |
| 233 ASSERT_TRUE(JsonDictionaryToMap(base::WideToUTF8(json), &results)); | |
| 234 | |
| 235 ASSERT_TRUE(results.find("mean") != results.end()); | |
| 236 ASSERT_TRUE(results.find("sigma") != results.end()); | |
| 237 ASSERT_TRUE(results.find("gestures") != results.end()); | |
| 238 ASSERT_TRUE(results.find("means") != results.end()); | |
| 239 ASSERT_TRUE(results.find("sigmas") != results.end()); | |
| 240 | |
| 241 std::string trace_name = "interval" + GetSuffixForTestFlags(); | |
| 242 printf("GESTURES %s: %s= [%s] [%s] [%s]\n", name.c_str(), | |
| 243 trace_name.c_str(), | |
| 244 results["gestures"].c_str(), | |
| 245 results["means"].c_str(), | |
| 246 results["sigmas"].c_str()); | |
| 247 | |
| 248 std::string mean_and_error = results["mean"] + "," + results["sigma"]; | |
| 249 perf_test::PrintResultMeanAndError(name, | |
| 250 std::string(), | |
| 251 trace_name, | |
| 252 mean_and_error, | |
| 253 "milliseconds-per-frame", | |
| 254 true); | |
| 255 | |
| 256 // Navigate back to NTP so that we can quit without timing out during the | |
| 257 // wait-for-idle stage in test framework. | |
| 258 EXPECT_EQ(tab->GoBack(), AUTOMATION_MSG_NAVIGATION_SUCCESS); | |
| 259 } | |
| 260 }; | |
| 261 | |
| 262 // Must use a different class name to avoid test instantiation conflicts | |
| 263 // with FrameRateTest. An alias is good enough. The alias names must match | |
| 264 // the pattern FrameRate*Test* for them to get picked up by the test bots. | |
| 265 typedef FrameRateTest FrameRateCompositingTest; | |
| 266 | |
| 267 // Tests that trigger compositing with a -webkit-translateZ(0) | |
| 268 #define FRAME_RATE_TEST_WITH_AND_WITHOUT_ACCELERATED_COMPOSITING(content) \ | |
| 269 TEST_P(FrameRateCompositingTest, content) { \ | |
| 270 RunTest(#content); \ | |
| 271 } | |
| 272 | |
| 273 INSTANTIATE_TEST_CASE_P(, FrameRateCompositingTest, ::testing::Values( | |
| 274 0, | |
| 275 kUseGpu | kForceGpuComposited, | |
| 276 kUseReferenceBuild, | |
| 277 kUseReferenceBuild | kUseGpu | kForceGpuComposited)); | |
| 278 | |
| 279 FRAME_RATE_TEST_WITH_AND_WITHOUT_ACCELERATED_COMPOSITING(blank); | |
| 280 FRAME_RATE_TEST_WITH_AND_WITHOUT_ACCELERATED_COMPOSITING(googleblog); | |
| 281 | |
| 282 typedef FrameRateTest FrameRateNoVsyncCanvasInternalTest; | |
| 283 | |
| 284 // Tests for animated 2D canvas content with and without disabling vsync | |
| 285 #define INTERNAL_FRAME_RATE_TEST_CANVAS_WITH_AND_WITHOUT_NOVSYNC(content) \ | |
| 286 TEST_P(FrameRateNoVsyncCanvasInternalTest, content) { \ | |
| 287 RunTest(#content); \ | |
| 288 } | |
| 289 | |
| 290 INSTANTIATE_TEST_CASE_P(, FrameRateNoVsyncCanvasInternalTest, ::testing::Values( | |
| 291 kInternal | kHasRedirect, | |
| 292 kIsGpuCanvasTest | kInternal | kHasRedirect | kUseGpu, | |
| 293 kIsGpuCanvasTest | kInternal | kHasRedirect | kUseGpu | kDisableVsync, | |
| 294 kUseReferenceBuild | kInternal | kHasRedirect, | |
| 295 kIsGpuCanvasTest | kUseReferenceBuild | kInternal | kHasRedirect | kUseGpu, | |
| 296 kIsGpuCanvasTest | kUseReferenceBuild | kInternal | kHasRedirect | kUseGpu | | |
| 297 kDisableVsync)); | |
| 298 | |
| 299 INTERNAL_FRAME_RATE_TEST_CANVAS_WITH_AND_WITHOUT_NOVSYNC(fishbowl) | |
| 300 | |
| 301 typedef FrameRateTest FrameRateGpuCanvasInternalTest; | |
| 302 | |
| 303 // Tests for animated 2D canvas content to be tested only with GPU | |
| 304 // acceleration. | |
| 305 // tests are run with and without Vsync | |
| 306 #define INTERNAL_FRAME_RATE_TEST_CANVAS_GPU(content) \ | |
| 307 TEST_P(FrameRateGpuCanvasInternalTest, content) { \ | |
| 308 RunTest(#content); \ | |
| 309 } | |
| 310 | |
| 311 INSTANTIATE_TEST_CASE_P(, FrameRateGpuCanvasInternalTest, ::testing::Values( | |
| 312 kIsGpuCanvasTest | kInternal | kHasRedirect | kUseGpu, | |
| 313 kIsGpuCanvasTest | kInternal | kHasRedirect | kUseGpu | kDisableVsync, | |
| 314 kIsGpuCanvasTest | kUseReferenceBuild | kInternal | kHasRedirect | kUseGpu, | |
| 315 kIsGpuCanvasTest | kUseReferenceBuild | kInternal | kHasRedirect | kUseGpu | | |
| 316 kDisableVsync)); | |
| 317 | |
| 318 INTERNAL_FRAME_RATE_TEST_CANVAS_GPU(fireflies) | |
| 319 INTERNAL_FRAME_RATE_TEST_CANVAS_GPU(FishIE) | |
| 320 INTERNAL_FRAME_RATE_TEST_CANVAS_GPU(speedreading) | |
| 321 | |
| 322 } // namespace | |
| OLD | NEW |