| OLD | NEW |
| (Empty) | |
| 1 # Copyright 2017 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. |
| 4 |
| 5 import json |
| 6 import os |
| 7 |
| 8 from gpu_tests import cloud_storage_integration_test_base |
| 9 from gpu_tests import maps_expectations |
| 10 from gpu_tests import path_util |
| 11 |
| 12 import py_utils |
| 13 from py_utils import cloud_storage |
| 14 |
| 15 data_path = os.path.join(path_util.GetChromiumSrcDir(), |
| 16 'content', 'test', 'gpu', 'page_sets', 'data') |
| 17 |
| 18 class MapsIntegrationTest( |
| 19 cloud_storage_integration_test_base.CloudStorageIntegrationTestBase): |
| 20 """Google Maps pixel tests. |
| 21 |
| 22 Note: the WPR for this test was recorded from the smoothness.maps |
| 23 benchmark's similar page. The Maps team gave us a build of their test. The |
| 24 only modification to the test was to config.js, where the width and height |
| 25 query args were set to 800 by 600. The WPR was recorded with: |
| 26 |
| 27 tools/perf/record_wpr smoothness_maps --browser=system |
| 28 |
| 29 This produced maps_???.wpr and maps.json, which were copied from |
| 30 tools/perf/page_sets/data into content/test/gpu/page_sets/data. |
| 31 |
| 32 It's worth noting that telemetry no longer allows replaying a URL that |
| 33 refers to localhost. If the recording was created for the locahost URL, one |
| 34 can update the host name by running: |
| 35 |
| 36 web-page-replay/httparchive.py remap-host maps_004.wpr \ |
| 37 localhost:10020 map-test |
| 38 |
| 39 (web-page-replay/ can be found in third_party/catapult/telemetry/third_party/) |
| 40 |
| 41 After updating the host name in the WPR archive, please remember to update |
| 42 the host URL in content/test/gpu/gpu_tests/maps_integration_test.py as well. |
| 43 |
| 44 To upload the maps_???.wpr to cloud storage, one would run: |
| 45 |
| 46 depot_tools/upload_to_google_storage.py --bucket=chromium-telemetry \ |
| 47 maps_???.wpr |
| 48 |
| 49 The same sha1 file and json file need to be copied into both of these |
| 50 directories in any CL which updates the recording. |
| 51 """ |
| 52 |
| 53 @classmethod |
| 54 def Name(cls): |
| 55 return 'maps' |
| 56 |
| 57 @classmethod |
| 58 def _CreateExpectations(cls): |
| 59 return maps_expectations.MapsExpectations() |
| 60 |
| 61 @classmethod |
| 62 def setUpClass(cls): |
| 63 super(cls, MapsIntegrationTest).setUpClass() |
| 64 cls.SetBrowserOptions(cls._finder_options) |
| 65 cls.StartWPRServer(os.path.join(data_path, 'maps_004.wpr.updated'), |
| 66 cloud_storage.PUBLIC_BUCKET) |
| 67 cls.StartBrowser() |
| 68 |
| 69 @classmethod |
| 70 def tearDownClass(cls): |
| 71 super(cls, MapsIntegrationTest).tearDownClass() |
| 72 cls.StopWPRServer() |
| 73 |
| 74 @classmethod |
| 75 def GenerateGpuTests(cls, options): |
| 76 cls.SetParsedCommandLineOptions(options) |
| 77 yield('Maps_maps_004', |
| 78 'http://map-test/performance.html', |
| 79 ('maps_004_expectations.json')) |
| 80 |
| 81 def _ReadPixelExpectations(self, expectations_file): |
| 82 expectations_path = os.path.join(data_path, expectations_file) |
| 83 with open(expectations_path, 'r') as f: |
| 84 json_contents = json.load(f) |
| 85 return json_contents |
| 86 |
| 87 def _SpinWaitOnRAF(self, iterations, timeout=60): |
| 88 tab = self.tab |
| 89 waitScript = r""" |
| 90 window.__spinWaitOnRAFDone = false; |
| 91 var iterationsLeft = %d; |
| 92 |
| 93 function spin() { |
| 94 iterationsLeft--; |
| 95 if (iterationsLeft == 0) { |
| 96 window.__spinWaitOnRAFDone = true; |
| 97 return; |
| 98 } |
| 99 window.requestAnimationFrame(spin); |
| 100 } |
| 101 window.requestAnimationFrame(spin); |
| 102 """ % iterations |
| 103 |
| 104 def IsWaitComplete(): |
| 105 return tab.EvaluateJavaScript('window.__spinWaitOnRAFDone') |
| 106 |
| 107 tab.ExecuteJavaScript(waitScript) |
| 108 py_utils.WaitFor(IsWaitComplete, timeout) |
| 109 |
| 110 def RunActualGpuTest(self, url, *args): |
| 111 tab = self.tab |
| 112 pixel_expectations_file = args[0] |
| 113 action_runner = tab.action_runner |
| 114 action_runner.Navigate(url) |
| 115 action_runner.WaitForJavaScriptCondition( |
| 116 'window.testDone', timeout_in_seconds=180) |
| 117 |
| 118 # TODO(kbr): This should not be necessary, but it's not clear if the test |
| 119 # is failing on the bots in its absence. Remove once we can verify that |
| 120 # it's safe to do so. |
| 121 self._SpinWaitOnRAF(3) |
| 122 |
| 123 if not tab.screenshot_supported: |
| 124 self.fail('Browser does not support screenshot capture') |
| 125 screenshot = tab.Screenshot(5) |
| 126 if screenshot is None: |
| 127 self.fail('Could not capture screenshot') |
| 128 |
| 129 dpr = tab.EvaluateJavaScript('window.devicePixelRatio') |
| 130 print 'Maps\' devicePixelRatio is ' + str(dpr) |
| 131 # Even though the Maps test uses a fixed devicePixelRatio so that |
| 132 # it fetches all of the map tiles at the same resolution, on two |
| 133 # different devices with the same devicePixelRatio (a Retina |
| 134 # MacBook Pro and a Nexus 9), different scale factors of the final |
| 135 # screenshot are observed. Hack around this by specifying a scale |
| 136 # factor for these bots in the test expectations. This relies on |
| 137 # the test-machine-name argument being specified on the command |
| 138 # line. |
| 139 expected = self._ReadPixelExpectations(pixel_expectations_file) |
| 140 self._ValidateScreenshotSamples(tab, url, screenshot, expected, dpr) |
| OLD | NEW |