Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3160)

Unified Diff: content/test/gpu/gpu_tests/maps_integration_test.py

Issue 2618983004: Port Maps test to browser_test_runner harness. (Closed)
Patch Set: Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « content/test/gpu/gpu_tests/maps.py ('k') | content/test/gpu/run_gpu_test.py » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: content/test/gpu/gpu_tests/maps_integration_test.py
diff --git a/content/test/gpu/gpu_tests/maps_integration_test.py b/content/test/gpu/gpu_tests/maps_integration_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..f3fb80b494bcef2578f629c0c3058a12840c6036
--- /dev/null
+++ b/content/test/gpu/gpu_tests/maps_integration_test.py
@@ -0,0 +1,140 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+
+from gpu_tests import cloud_storage_integration_test_base
+from gpu_tests import maps_expectations
+from gpu_tests import path_util
+
+import py_utils
+from py_utils import cloud_storage
+
+data_path = os.path.join(path_util.GetChromiumSrcDir(),
+ 'content', 'test', 'gpu', 'page_sets', 'data')
+
+class MapsIntegrationTest(
+ cloud_storage_integration_test_base.CloudStorageIntegrationTestBase):
+ """Google Maps pixel tests.
+
+ Note: the WPR for this test was recorded from the smoothness.maps
+ benchmark's similar page. The Maps team gave us a build of their test. The
+ only modification to the test was to config.js, where the width and height
+ query args were set to 800 by 600. The WPR was recorded with:
+
+ tools/perf/record_wpr smoothness_maps --browser=system
+
+ This produced maps_???.wpr and maps.json, which were copied from
+ tools/perf/page_sets/data into content/test/gpu/page_sets/data.
+
+ It's worth noting that telemetry no longer allows replaying a URL that
+ refers to localhost. If the recording was created for the locahost URL, one
+ can update the host name by running:
+
+ web-page-replay/httparchive.py remap-host maps_004.wpr \
+ localhost:10020 map-test
+
+ (web-page-replay/ can be found in third_party/catapult/telemetry/third_party/)
+
+ After updating the host name in the WPR archive, please remember to update
+ the host URL in content/test/gpu/gpu_tests/maps_integration_test.py as well.
+
+ To upload the maps_???.wpr to cloud storage, one would run:
+
+ depot_tools/upload_to_google_storage.py --bucket=chromium-telemetry \
+ maps_???.wpr
+
+ The same sha1 file and json file need to be copied into both of these
+ directories in any CL which updates the recording.
+ """
+
+ @classmethod
+ def Name(cls):
+ return 'maps'
+
+ @classmethod
+ def _CreateExpectations(cls):
+ return maps_expectations.MapsExpectations()
+
+ @classmethod
+ def setUpClass(cls):
+ super(cls, MapsIntegrationTest).setUpClass()
+ cls.SetBrowserOptions(cls._finder_options)
+ cls.StartWPRServer(os.path.join(data_path, 'maps_004.wpr.updated'),
+ cloud_storage.PUBLIC_BUCKET)
+ cls.StartBrowser()
+
+ @classmethod
+ def tearDownClass(cls):
+ super(cls, MapsIntegrationTest).tearDownClass()
+ cls.StopWPRServer()
+
+ @classmethod
+ def GenerateGpuTests(cls, options):
+ cls.SetParsedCommandLineOptions(options)
+ yield('Maps_maps_004',
+ 'http://map-test/performance.html',
+ ('maps_004_expectations.json'))
+
+ def _ReadPixelExpectations(self, expectations_file):
+ expectations_path = os.path.join(data_path, expectations_file)
+ with open(expectations_path, 'r') as f:
+ json_contents = json.load(f)
+ return json_contents
+
+ def _SpinWaitOnRAF(self, iterations, timeout=60):
+ tab = self.tab
+ waitScript = r"""
+ window.__spinWaitOnRAFDone = false;
+ var iterationsLeft = %d;
+
+ function spin() {
+ iterationsLeft--;
+ if (iterationsLeft == 0) {
+ window.__spinWaitOnRAFDone = true;
+ return;
+ }
+ window.requestAnimationFrame(spin);
+ }
+ window.requestAnimationFrame(spin);
+ """ % iterations
+
+ def IsWaitComplete():
+ return tab.EvaluateJavaScript('window.__spinWaitOnRAFDone')
+
+ tab.ExecuteJavaScript(waitScript)
+ py_utils.WaitFor(IsWaitComplete, timeout)
+
+ def RunActualGpuTest(self, url, *args):
+ tab = self.tab
+ pixel_expectations_file = args[0]
+ action_runner = tab.action_runner
+ action_runner.Navigate(url)
+ action_runner.WaitForJavaScriptCondition(
+ 'window.testDone', timeout_in_seconds=180)
+
+ # TODO(kbr): This should not be necessary, but it's not clear if the test
+ # is failing on the bots in its absence. Remove once we can verify that
+ # it's safe to do so.
+ self._SpinWaitOnRAF(3)
+
+ if not tab.screenshot_supported:
+ self.fail('Browser does not support screenshot capture')
+ screenshot = tab.Screenshot(5)
+ if screenshot is None:
+ self.fail('Could not capture screenshot')
+
+ dpr = tab.EvaluateJavaScript('window.devicePixelRatio')
+ print 'Maps\' devicePixelRatio is ' + str(dpr)
+ # Even though the Maps test uses a fixed devicePixelRatio so that
+ # it fetches all of the map tiles at the same resolution, on two
+ # different devices with the same devicePixelRatio (a Retina
+ # MacBook Pro and a Nexus 9), different scale factors of the final
+ # screenshot are observed. Hack around this by specifying a scale
+ # factor for these bots in the test expectations. This relies on
+ # the test-machine-name argument being specified on the command
+ # line.
+ expected = self._ReadPixelExpectations(pixel_expectations_file)
+ self._ValidateScreenshotSamples(tab, url, screenshot, expected, dpr)
« no previous file with comments | « content/test/gpu/gpu_tests/maps.py ('k') | content/test/gpu/run_gpu_test.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698