OLD | NEW |
(Empty) | |
| 1 #!/usr/bin/python |
| 2 |
| 3 """ |
| 4 Copyright 2014 Google Inc. |
| 5 |
| 6 Use of this source code is governed by a BSD-style license that can be |
| 7 found in the LICENSE file. |
| 8 |
| 9 Download actual GM results for a particular builder. |
| 10 """ |
| 11 |
| 12 # System-level imports |
| 13 import httplib |
| 14 import logging |
| 15 import optparse |
| 16 import os |
| 17 import posixpath |
| 18 import re |
| 19 import urllib2 |
| 20 |
| 21 # Must fix up PYTHONPATH before importing from within Skia |
| 22 import rs_fixpypath # pylint: disable=W0611 |
| 23 |
| 24 # Imports from within Skia |
| 25 from py.utils import gs_utils |
| 26 from py.utils import url_utils |
| 27 import buildbot_globals |
| 28 import gm_json |
| 29 |
| 30 |
| 31 GM_SUMMARIES_BUCKET = buildbot_globals.Get('gm_summaries_bucket') |
| 32 DEFAULT_ACTUALS_BASE_URL = ( |
| 33 'http://storage.googleapis.com/%s' % GM_SUMMARIES_BUCKET) |
| 34 DEFAULT_JSON_FILENAME = 'actual-results.json' |
| 35 |
| 36 |
| 37 class Download(object): |
| 38 |
| 39 def __init__(self, actuals_base_url=DEFAULT_ACTUALS_BASE_URL, |
| 40 json_filename=DEFAULT_JSON_FILENAME, |
| 41 gm_actuals_root_url=gm_json.GM_ACTUALS_ROOT_HTTP_URL): |
| 42 """ |
| 43 Args: |
| 44 actuals_base_url: URL pointing at the root directory |
| 45 containing all actual-results.json files, e.g., |
| 46 http://domain.name/path/to/dir OR |
| 47 file:///absolute/path/to/localdir |
| 48 json_filename: The JSON filename to read from within each directory. |
| 49 gm_actuals_root_url: Base URL under which the actually-generated-by-bots |
| 50 GM images are stored. |
| 51 """ |
| 52 self._actuals_base_url = actuals_base_url |
| 53 self._json_filename = json_filename |
| 54 self._gm_actuals_root_url = gm_actuals_root_url |
| 55 self._image_filename_re = re.compile(gm_json.IMAGE_FILENAME_PATTERN) |
| 56 |
| 57 def fetch(self, builder_name, dest_dir): |
| 58 """ Downloads actual GM results for a particular builder. |
| 59 |
| 60 Args: |
| 61 builder_name: which builder to download results of |
| 62 dest_dir: path to directory where the image files will be written; |
| 63 if the directory does not exist yet, it will be created |
| 64 |
| 65 TODO(epoger): Display progress info. Right now, it can take a long time |
| 66 to download all of the results, and there is no indication of progress. |
| 67 |
| 68 TODO(epoger): Download multiple images in parallel to speed things up. |
| 69 """ |
| 70 json_url = posixpath.join(self._actuals_base_url, builder_name, |
| 71 self._json_filename) |
| 72 json_contents = urllib2.urlopen(json_url).read() |
| 73 results_dict = gm_json.LoadFromString(json_contents) |
| 74 |
| 75 actual_results_dict = results_dict[gm_json.JSONKEY_ACTUALRESULTS] |
| 76 for result_type in sorted(actual_results_dict.keys()): |
| 77 results_of_this_type = actual_results_dict[result_type] |
| 78 if not results_of_this_type: |
| 79 continue |
| 80 for image_name in sorted(results_of_this_type.keys()): |
| 81 (test, config) = self._image_filename_re.match(image_name).groups() |
| 82 (hash_type, hash_digest) = results_of_this_type[image_name] |
| 83 source_url = gm_json.CreateGmActualUrl( |
| 84 test_name=test, hash_type=hash_type, hash_digest=hash_digest, |
| 85 gm_actuals_root_url=self._gm_actuals_root_url) |
| 86 dest_path = os.path.join(dest_dir, config, test + '.png') |
| 87 url_utils.copy_contents(source_url=source_url, dest_path=dest_path, |
| 88 create_subdirs_if_needed=True) |
| 89 |
| 90 |
| 91 def get_builders_list(summaries_bucket=GM_SUMMARIES_BUCKET): |
| 92 """ Returns the list of builders we have actual results for. |
| 93 |
| 94 Args: |
| 95 summaries_bucket: Google Cloud Storage bucket containing the summary |
| 96 JSON files |
| 97 """ |
| 98 dirs, _ = gs_utils.GSUtils().list_bucket_contents(bucket=GM_SUMMARIES_BUCKET) |
| 99 return dirs |
| 100 |
| 101 |
| 102 class ActualLocation(object): |
| 103 def __init__(self, bucket, path, generation): |
| 104 self.bucket = bucket |
| 105 self.path = path |
| 106 self.generation = generation |
| 107 |
| 108 |
| 109 class TipOfTreeActuals(object): |
| 110 def __init__(self, summaries_bucket=GM_SUMMARIES_BUCKET, |
| 111 json_filename=DEFAULT_JSON_FILENAME): |
| 112 """ |
| 113 Args: |
| 114 summaries_bucket: URL pointing at the root directory |
| 115 containing all actual-results.json files, e.g., |
| 116 http://domain.name/path/to/dir OR |
| 117 file:///absolute/path/to/localdir |
| 118 json_filename: The JSON filename to read from within each directory. |
| 119 """ |
| 120 self._json_filename = json_filename |
| 121 self._summaries_bucket = summaries_bucket |
| 122 |
| 123 def description(self): |
| 124 return 'gm_summaries_bucket %s' % (self._summaries_bucket,) |
| 125 |
| 126 def get_builders(self): |
| 127 """ Returns the list of builders we have actual results for. |
| 128 {builder:string -> ActualLocation} |
| 129 """ |
| 130 dirs = get_builders_list(self._summaries_bucket) |
| 131 result = dict() |
| 132 for builder in dirs: |
| 133 result[builder] = ActualLocation( |
| 134 self._summaries_bucket, |
| 135 "%s/%s" % (builder, self._json_filename), |
| 136 None) |
| 137 return result |
| 138 |
| 139 |
| 140 class RietveldIssueActuals(object): |
| 141 def __init__(self, issue, json_filename=DEFAULT_JSON_FILENAME): |
| 142 """ |
| 143 Args: |
| 144 issue: The rietveld issue from which to obtain actuals. |
| 145 json_filename: The JSON filename to read from within each directory. |
| 146 """ |
| 147 self._issue = issue |
| 148 self._json_filename = json_filename |
| 149 |
| 150 def description(self): |
| 151 return 'rietveld issue %s' % (self._issue,) |
| 152 |
| 153 def get_builders(self): |
| 154 """ Returns the actuals for the given rietveld issue's tryjobs. |
| 155 {builder:string -> ActualLocation} |
| 156 |
| 157 e.g. |
| 158 {'Test-Android-Xoom-Tegra2-Arm7-Release': ( |
| 159 'chromium-skia-gm-summaries', |
| 160 'Test-Android-Xoom-Tegra2-Arm7-Release-Trybot/actual-results.json', |
| 161 '1415041165535000')} |
| 162 """ |
| 163 result = dict() |
| 164 json_filename_re = re.compile( |
| 165 'Created: gs://([^/]+)/((?:[^/]+/)+%s)#(\d+)' |
| 166 % re.escape(self._json_filename)) |
| 167 codereview_api_url = 'https://codereview.chromium.org/api' |
| 168 upload_gm_step_url = '/steps/Upload GM Results/logs/stdio' |
| 169 |
| 170 logging.info('Fetching issue %s ...' % (self._issue,)) |
| 171 json_issue_url = '%s/%s' % (codereview_api_url, self._issue) |
| 172 json_issue_data = urllib2.urlopen(json_issue_url).read() |
| 173 issue_dict = gm_json.LoadFromString(json_issue_data) |
| 174 |
| 175 patchsets = issue_dict.get("patchsets", []) |
| 176 patchset = patchsets[-1] |
| 177 if not patchset: |
| 178 logging.warning('No patchsets for rietveld issue %s.' % (self._issue,)) |
| 179 return result |
| 180 |
| 181 logging.info('Fetching issue %s patch %s...' % (self._issue, patchset)) |
| 182 json_patchset_url = '%s/%s/%s' % (codereview_api_url, self._issue, patchset) |
| 183 json_patchset_data = urllib2.urlopen(json_patchset_url).read() |
| 184 patchset_dict = gm_json.LoadFromString(json_patchset_data) |
| 185 |
| 186 # try_job_results is ordered reverse chronologically |
| 187 try_job_results = patchset_dict.get('try_job_results', []) |
| 188 for try_job_result in try_job_results: |
| 189 try_builder = try_job_result.get('builder', '<bad builder>') |
| 190 if not try_builder.endswith('-Trybot'): |
| 191 logging.warning('Builder %s is not a trybot?' % (try_builder,)) |
| 192 continue |
| 193 builder = try_builder[:-len('-Trybot')] |
| 194 if builder in result: |
| 195 continue |
| 196 |
| 197 logging.info('Fetching issue %s patch %s try %s...' % |
| 198 (self._issue, patchset, try_builder)) |
| 199 build_url = try_job_result.get('url', '<bad url>') |
| 200 if build_url is None: |
| 201 logging.warning('Builder %s has not started.' % (try_builder,)) |
| 202 continue |
| 203 gm_upload_output_url = build_url + urllib2.quote(upload_gm_step_url) |
| 204 logging.info('Fetching %s ...' % (gm_upload_output_url,)) |
| 205 |
| 206 # Tryjobs might not produce the step, but don't let that fail everything. |
| 207 gm_upload_output = None |
| 208 try: |
| 209 gm_upload_output = urllib2.urlopen(gm_upload_output_url).read() |
| 210 except (urllib2.HTTPError, urllib2.URLError, httplib.HTTPException) as e: |
| 211 logging.warning(e) |
| 212 except Exception: |
| 213 logging.exception('Error opening %s .' % (gm_upload_output_url,)) |
| 214 if not gm_upload_output: |
| 215 logging.warning('Could not fetch %s .' % (gm_upload_output_url,)) |
| 216 continue |
| 217 |
| 218 json_filename_match = json_filename_re.search(gm_upload_output) |
| 219 if json_filename_match: |
| 220 logging.info('Found issue %s patch %s try %s result gs://%s/%s#%s .' % |
| 221 (self._issue, patchset, builder, |
| 222 json_filename_match.group(1), |
| 223 json_filename_match.group(2), |
| 224 json_filename_match.group(3))) |
| 225 result[builder] = ActualLocation(json_filename_match.group(1), |
| 226 json_filename_match.group(2), |
| 227 json_filename_match.group(3)) |
| 228 else: |
| 229 logging.warning('Did not find %s for issue %s patch %s try %s.' % |
| 230 (self._json_filename, self._issue, patchset, try_builder)) |
| 231 |
| 232 return result |
| 233 |
| 234 |
| 235 def main(): |
| 236 parser = optparse.OptionParser() |
| 237 required_params = [] |
| 238 parser.add_option('--actuals-base-url', |
| 239 action='store', type='string', |
| 240 default=DEFAULT_ACTUALS_BASE_URL, |
| 241 help=('Base URL from which to read files containing JSON ' |
| 242 'summaries of actual GM results; defaults to ' |
| 243 '"%default".')) |
| 244 required_params.append('builder') |
| 245 # TODO(epoger): Before https://codereview.chromium.org/309653005 , when this |
| 246 # tool downloaded the JSON summaries from skia-autogen, it had the ability |
| 247 # to get results as of a specific revision number. We should add similar |
| 248 # functionality when retrieving the summaries from Google Storage. |
| 249 parser.add_option('--builder', |
| 250 action='store', type='string', |
| 251 help=('REQUIRED: Which builder to download results for. ' |
| 252 'To see a list of builders, run with the ' |
| 253 '--list-builders option set.')) |
| 254 required_params.append('dest_dir') |
| 255 parser.add_option('--dest-dir', |
| 256 action='store', type='string', |
| 257 help=('REQUIRED: Directory where all images should be ' |
| 258 'written. If this directory does not exist yet, it ' |
| 259 'will be created.')) |
| 260 parser.add_option('--json-filename', |
| 261 action='store', type='string', |
| 262 default=DEFAULT_JSON_FILENAME, |
| 263 help=('JSON summary filename to read for each builder; ' |
| 264 'defaults to "%default".')) |
| 265 parser.add_option('--list-builders', action='store_true', |
| 266 help=('List all available builders.')) |
| 267 (params, remaining_args) = parser.parse_args() |
| 268 |
| 269 if params.list_builders: |
| 270 print '\n'.join(get_builders_list()) |
| 271 return |
| 272 |
| 273 # Make sure all required options were set, |
| 274 # and that there were no items left over in the command line. |
| 275 for required_param in required_params: |
| 276 if not getattr(params, required_param): |
| 277 raise Exception('required option \'%s\' was not set' % required_param) |
| 278 if len(remaining_args) is not 0: |
| 279 raise Exception('extra items specified in the command line: %s' % |
| 280 remaining_args) |
| 281 |
| 282 downloader = Download(actuals_base_url=params.actuals_base_url) |
| 283 downloader.fetch(builder_name=params.builder, |
| 284 dest_dir=params.dest_dir) |
| 285 |
| 286 |
| 287 |
| 288 if __name__ == '__main__': |
| 289 main() |
OLD | NEW |