| OLD | NEW | 
|---|
|  | (Empty) | 
| 1 #!/usr/bin/env python |  | 
| 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |  | 
| 3 # Use of this source code is governed by a BSD-style license that can be |  | 
| 4 # found in the LICENSE file. |  | 
| 5 |  | 
| 6 """Jerkiness performance test for video playback. |  | 
| 7 |  | 
| 8 Uses jerky tool, (http://go/jerky), to record a jerkiness metric for videos |  | 
| 9 sensitive to jerkiness. |  | 
| 10 |  | 
| 11 Jerkiness is defined as a percentage of the average on screen frame time by the |  | 
| 12 formula below.  Where smoothed_frame_time[i] represents a frame's on screen time |  | 
| 13 plus amortized measurement gap error (time taken to capture each frame). |  | 
| 14 |  | 
| 15 sqrt(average((avg_frame_time - smoothed_frame_time[i])^2, i=m..n)) |  | 
| 16 ------------------------------------------------------------------ |  | 
| 17                           avg_frame_time |  | 
| 18 |  | 
| 19 Currently, only the Linux binaries are checked in for this test since we only |  | 
| 20 have a Linux performance bot.  The current binary is a custom build with some |  | 
| 21 fixes from veganjerky (http://go/veganjerky) for timing, waiting, and stdout |  | 
| 22 flushing. |  | 
| 23 |  | 
| 24 TODO(dalecurtis): Move Jerky tool sources into the Chromium tree. |  | 
| 25 |  | 
| 26 TODO(dalecurtis): Jerky tool uses a questionable method for determining torn |  | 
| 27 frames, determine if it is actually worth recording. |  | 
| 28 """ |  | 
| 29 |  | 
| 30 import glob |  | 
| 31 import logging |  | 
| 32 import os |  | 
| 33 import re |  | 
| 34 import subprocess |  | 
| 35 import tempfile |  | 
| 36 |  | 
| 37 import pyauto_media |  | 
| 38 import pyauto |  | 
| 39 import pyauto_utils |  | 
| 40 |  | 
| 41 # HTML test path; relative to src/chrome/test/data. |  | 
| 42 _TEST_HTML_PATH = os.path.join('media', 'html', 'media_jerky.html') |  | 
| 43 |  | 
| 44 # Path under data path for test files. |  | 
| 45 _TEST_MEDIA_PATH = os.path.join('pyauto_private', 'media', 'birds') |  | 
| 46 |  | 
| 47 # Path to Jerky tool executable. |  | 
| 48 _JERKY_PATH = os.path.join('pyauto_private', 'media', 'tools', 'jerky') |  | 
| 49 |  | 
| 50 # Regular expression for extracting jerkiness percentage.  Sample line: |  | 
| 51 #   using 1:9 title 'test.log (35.36% jerky, 0 teared frames)' lw 2 with lines |  | 
| 52 _JERKY_LOG_REGEX = re.compile( |  | 
| 53     r'\((\d{0,3}\.?\d{0,2})% jerky, (\d+) teared frames\)') |  | 
| 54 |  | 
| 55 # Regular expression for extracting computed fps.  Sample line: |  | 
| 56 # INFO: 33797 us per frame => 29.6 fps. |  | 
| 57 _JERKY_LOG_FPS_REGEX = re.compile(r' => (\d+\.\d+) fps') |  | 
| 58 |  | 
| 59 # Minimum and maximum number of iterations for each test.  Due to timing issues |  | 
| 60 # the jerky tool may not always calculate the fps correctly.  When invalid |  | 
| 61 # results are detected, the test is rerun up to the maxium # of times set below. |  | 
| 62 _JERKY_ITERATIONS_MIN = 3 |  | 
| 63 _JERKY_ITERATIONS_MAX = 10 |  | 
| 64 |  | 
| 65 # The media files used for testing.  Each entry represents a tuple of (filename, |  | 
| 66 # width, height, fps).  The width and height are used to create a calibration |  | 
| 67 # pattern for jerky tool.  The fps is used to ensure Jerky tool computed a valid |  | 
| 68 # result. |  | 
| 69 _TEST_VIDEOS = [('birds540.webm', 960, 540, 29.9)] |  | 
| 70 |  | 
| 71 |  | 
| 72 def GetTempFilename(): |  | 
| 73   """Returns an absolute path to an empty temp file.""" |  | 
| 74   f, path = tempfile.mkstemp(prefix='jerky_tmp') |  | 
| 75   os.close(f) |  | 
| 76   return path |  | 
| 77 |  | 
| 78 |  | 
| 79 class MediaJerkyPerfTest(pyauto.PyUITest): |  | 
| 80   """PyAuto test container.  See file doc string for more information.""" |  | 
| 81 |  | 
| 82   def StartJerkyCapture(self): |  | 
| 83     """Starts jerky tool in capture mode and waits until its ready to capture. |  | 
| 84 |  | 
| 85     Returns: |  | 
| 86       A tuple of the jerky process and an absolute path to the capture log. |  | 
| 87     """ |  | 
| 88     jerky_log = GetTempFilename() |  | 
| 89     logging.debug('Logging data to %s', jerky_log) |  | 
| 90     process = subprocess.Popen( |  | 
| 91         [os.path.join(self.DataDir(), _JERKY_PATH), |  | 
| 92          'capture', '--log', jerky_log], |  | 
| 93         stdout=subprocess.PIPE) |  | 
| 94 |  | 
| 95     # Set the jerky tool process to soft-realtime w/ round-robin scheduling. |  | 
| 96     subprocess.check_call(['sudo', 'chrt', '-r', '-p', str(process.pid)]) |  | 
| 97 |  | 
| 98     # Wait for server to start up. |  | 
| 99     line = True |  | 
| 100     while line: |  | 
| 101       line = process.stdout.readline() |  | 
| 102       if 'Waiting for calibration pattern to disappear' in line: |  | 
| 103         return process, jerky_log |  | 
| 104     self.fail('Failed to launch Jerky tool.') |  | 
| 105 |  | 
| 106   def AnalyzeJerkyCapture(self, jerky_log): |  | 
| 107     """Run jerky analyze on the specified log and return various metrics. |  | 
| 108 |  | 
| 109     Once analyze has completed, the jerky_log and associated outputs will be |  | 
| 110     removed. |  | 
| 111 |  | 
| 112     Args: |  | 
| 113       jerky_log: Absolute path to the capture log. |  | 
| 114 |  | 
| 115     Returns: |  | 
| 116       Tuple of fps, jerkiness, and torn frames. |  | 
| 117     """ |  | 
| 118     results_log_base = GetTempFilename() |  | 
| 119     process = subprocess.Popen( |  | 
| 120         [os.path.join(self.DataDir(), _JERKY_PATH), |  | 
| 121          'analyze', '--ref', jerky_log, '--out', results_log_base], |  | 
| 122         stdout=subprocess.PIPE) |  | 
| 123 |  | 
| 124     # Wait for process to end w/o risking deadlock. |  | 
| 125     stdout = process.communicate()[0] |  | 
| 126     self.assertEquals(process.returncode, 0) |  | 
| 127 |  | 
| 128     # Scrape out the calculated FPS. |  | 
| 129     fps_match = None |  | 
| 130     for line in stdout.splitlines(): |  | 
| 131       fps_match = _JERKY_LOG_FPS_REGEX.search(line) |  | 
| 132       if fps_match: |  | 
| 133         break |  | 
| 134 |  | 
| 135     # Open *.error.gnuplot and scrape out jerkiness. |  | 
| 136     jerky_match = None |  | 
| 137     with open('%s.error.gnuplot' % results_log_base) as results: |  | 
| 138       for line in results: |  | 
| 139         jerky_match = _JERKY_LOG_REGEX.search(line) |  | 
| 140         if jerky_match: |  | 
| 141           break |  | 
| 142 |  | 
| 143     # Cleanup all the temp and results files jerky spits out. |  | 
| 144     for log in glob.glob('%s*' % results_log_base) + [jerky_log]: |  | 
| 145       os.unlink(log) |  | 
| 146 |  | 
| 147     if fps_match and jerky_match: |  | 
| 148       return (float(fps_match.group(1)), float(jerky_match.group(1)), |  | 
| 149               int(jerky_match.group(2))) |  | 
| 150     return None, None, None |  | 
| 151 |  | 
| 152   def testMediaJerkyPerformance(self): |  | 
| 153     """Launches Jerky tool and records jerkiness for HTML5 videos. |  | 
| 154 |  | 
| 155     For each video, the test starts up jerky tool then plays until the Jerky |  | 
| 156     tool collects enough information.  Next the capture log is analyzed using |  | 
| 157     Jerky's analyze command.  If the computed fps matches the expected fps the |  | 
| 158     jerkiness metric is recorded. |  | 
| 159 |  | 
| 160     The test will run up to _JERKY_ITERATIONS_MAX times in an attempt to get at |  | 
| 161     least _JERKY_ITERATIONS_MIN valid values.  The results are recorded under |  | 
| 162     the 'jerkiness' variable for graphing on the bots. |  | 
| 163     """ |  | 
| 164     self.NavigateToURL(self.GetFileURLForDataPath(_TEST_HTML_PATH)) |  | 
| 165 |  | 
| 166     # Xvfb on the bots is restricted to 1024x768 at present.  Ensure we're using |  | 
| 167     # all of the real estate we can.  Jerky tool needs a clear picture of every |  | 
| 168     # frame, so we can't clip the video in any way. |  | 
| 169     self.SetWindowDimensions(0, 0, 1024, 768) |  | 
| 170 |  | 
| 171     for name, width, height, expected_fps in _TEST_VIDEOS: |  | 
| 172       jerkiness = [] |  | 
| 173       torn_frames = [] |  | 
| 174       file_url = self.GetFileURLForDataPath( |  | 
| 175           os.path.join(_TEST_MEDIA_PATH, name)) |  | 
| 176 |  | 
| 177       # Initialize the calibration area for Jerky tool. |  | 
| 178       self.assertTrue(self.ExecuteJavascript( |  | 
| 179           'initializeTest(%d, %d);' % (width, height))) |  | 
| 180 |  | 
| 181       runs_left = _JERKY_ITERATIONS_MIN |  | 
| 182       runs_total = 0 |  | 
| 183       while runs_left > 0 and runs_total < _JERKY_ITERATIONS_MAX: |  | 
| 184         runs_total += 1 |  | 
| 185         logging.info('Running Jerky perf test #%d for %s.', runs_total, name) |  | 
| 186 |  | 
| 187         # Startup Jerky tool in capture mode. |  | 
| 188         jerky_process, jerky_log = self.StartJerkyCapture() |  | 
| 189 |  | 
| 190         # Start playback of the test video. |  | 
| 191         self.assertTrue(self.ExecuteJavascript("startTest('%s');" % file_url)) |  | 
| 192 |  | 
| 193         # Wait for jerky tool to finish if it hasn't already. |  | 
| 194         self.assertTrue(jerky_process.wait() == 0) |  | 
| 195 |  | 
| 196         # Stop playback of the test video so the next run can cleanly find the |  | 
| 197         # calibration zone. |  | 
| 198         self.assertTrue(self.ExecuteJavascript('stopTest();')) |  | 
| 199 |  | 
| 200         # Analyze the results. |  | 
| 201         jerky_fps, jerky_percent, jerky_torn_frames = self.AnalyzeJerkyCapture( |  | 
| 202             jerky_log) |  | 
| 203         if (jerky_fps is None or jerky_percent is None or |  | 
| 204             jerky_torn_frames is None): |  | 
| 205           logging.error('No metrics recorded for this run.') |  | 
| 206           continue |  | 
| 207 |  | 
| 208         # Ensure the results for this run are valid. |  | 
| 209         if jerky_fps != expected_fps: |  | 
| 210           logging.error( |  | 
| 211               'Invalid fps detected (actual: %f, expected: %f, jerkiness: %f). ' |  | 
| 212               'Discarding results for this run.', jerky_fps, expected_fps, |  | 
| 213               jerky_percent) |  | 
| 214           continue |  | 
| 215 |  | 
| 216         jerkiness.append(jerky_percent) |  | 
| 217         torn_frames.append(jerky_torn_frames) |  | 
| 218         runs_left -= 1 |  | 
| 219 |  | 
| 220       pyauto_utils.PrintPerfResult('jerkiness', name, jerkiness, '%') |  | 
| 221 |  | 
| 222 |  | 
| 223 if __name__ == '__main__': |  | 
| 224   pyauto_media.Main() |  | 
| OLD | NEW | 
|---|