Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(108)

Side by Side Diff: gm/rebaseline_server/download_actuals.py

Issue 688353003: Add support for rebaselining from trybots. (Closed) Base URL: https://skia.googlesource.com/skia.git@master
Patch Set: Created 6 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | gm/rebaseline_server/server.py » ('j') | gm/rebaseline_server/server.py » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/python 1 #!/usr/bin/python
2 2
3 """ 3 """
4 Copyright 2014 Google Inc. 4 Copyright 2014 Google Inc.
5 5
6 Use of this source code is governed by a BSD-style license that can be 6 Use of this source code is governed by a BSD-style license that can be
7 found in the LICENSE file. 7 found in the LICENSE file.
8 8
9 Download actual GM results for a particular builder. 9 Download actual GM results for a particular builder.
10 """ 10 """
11 11
12 # System-level imports 12 # System-level imports
13 import httplib
14 import logging
13 import optparse 15 import optparse
14 import os 16 import os
15 import posixpath 17 import posixpath
16 import re 18 import re
19 import traceback
17 import urllib2 20 import urllib2
18 21
19 # Must fix up PYTHONPATH before importing from within Skia 22 # Must fix up PYTHONPATH before importing from within Skia
20 import rs_fixpypath # pylint: disable=W0611 23 import rs_fixpypath # pylint: disable=W0611
21 24
22 # Imports from within Skia 25 # Imports from within Skia
23 from py.utils import gs_utils 26 from py.utils import gs_utils
24 from py.utils import url_utils 27 from py.utils import url_utils
25 import buildbot_globals 28 import buildbot_globals
26 import gm_json 29 import gm_json
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
89 def get_builders_list(summaries_bucket=GM_SUMMARIES_BUCKET): 92 def get_builders_list(summaries_bucket=GM_SUMMARIES_BUCKET):
90 """ Returns the list of builders we have actual results for. 93 """ Returns the list of builders we have actual results for.
91 94
92 Args: 95 Args:
93 summaries_bucket: Google Cloud Storage bucket containing the summary 96 summaries_bucket: Google Cloud Storage bucket containing the summary
94 JSON files 97 JSON files
95 """ 98 """
96 dirs, _ = gs_utils.GSUtils().list_bucket_contents(bucket=GM_SUMMARIES_BUCKET) 99 dirs, _ = gs_utils.GSUtils().list_bucket_contents(bucket=GM_SUMMARIES_BUCKET)
97 return dirs 100 return dirs
98 101
102 def get_rietveld_actuals(issue, json_filename):
103 """ Returns the actuals for the given rietveld issue's tryjobs.
104 {non_try_builder:string -> [ bucket:string, path:string, generation:string ]}
105
106 e.g.
107 {'Test-Android-Xoom-Tegra2-Arm7-Release': (
108 'chromium-skia-gm-summaries',
109 'Test-Android-Xoom-Tegra2-Arm7-Release-Trybot/actual-results.json',
110 '1415041165535000')}
111 """
112 result = dict()
113 json_filename_re = re.compile('^Created: gs://([^/]+)/((?:[^/]+/)+%s)#(\d+)$'
114 % re.escape(json_filename), re.MULTILINE)
115 codereview_api_url = 'https://codereview.chromium.org/api'
116 upload_gm_results_step_url = '/steps/Upload GM Results/logs/stdio'
117
118 logging.info('Fetching issue %s ...' % (issue,))
119 json_issue_url = '%s/%s' % (codereview_api_url, issue)
120 json_issue_data = urllib2.urlopen(json_issue_url).read()
121 issue_dict = gm_json.LoadFromString(json_issue_data)
122
123 patchsets = issue_dict.get("patchsets", [])
124 patchset = patchsets[-1]
125 if not patchset:
126 logging.warning('No patchsets for rietveld issue %s.' % (issue,))
127 return result
128
129 logging.info('Fetching issue %s patch %s...' % (issue, patchset))
130 json_patchset_url = '%s/%s/%s' % (codereview_api_url, issue, patchset)
131 json_patchset_data = urllib2.urlopen(json_patchset_url).read()
132 patchset_dict = gm_json.LoadFromString(json_patchset_data)
133
134 # try_job_results is ordered reverse chronologically
135 try_job_results = patchset_dict.get('try_job_results', [])
136 for try_job_result in try_job_results:
137 builder = try_job_result.get('builder', '<bad builder>')
138 non_try_builder = builder
rmistry 2014/11/05 13:39:16 Seems confusing to switch non_try_builder's value
bungeman-skia 2014/11/07 22:56:52 Done.
139 if non_try_builder.endswith('-Trybot'):
140 non_try_builder = non_try_builder[:-len('-Trybot')]
141 if non_try_builder in result:
142 continue
143
144 logging.info('Fetching issue %s patch %s try %s...' %
145 (issue, patchset, builder))
146 build_url = try_job_result.get('url', '<bad url>')
147 gm_upload_output_url = build_url + urllib2.quote(upload_gm_results_step_url)
148 logging.info('Fetching %s ...' % (gm_upload_output_url,))
149
150 # Tryjobs might not produce the step, but don't let that fail everything.
151 gm_upload_output = None
152 try:
153 gm_upload_output = urllib2.urlopen(gm_upload_output_url).read()
154 except urllib2.HTTPError, e:
155 logging.warning('HTTPError: ' + str(e.code))
156 except urllib2.URLError, e:
157 logging.warning('URLError: ' + str(e.reason))
158 except httplib.HTTPException, e:
159 logging.warning('HTTPException')
160 except Exception:
161 logging.warning('generic exception: ' + traceback.format_exc())
rmistry 2014/11/05 13:39:16 traceback.format_exc() returns None so this will p
bungeman-skia 2014/11/07 22:56:53 Done.
162 if not gm_upload_output:
163 logging.warning('Could not fetch %s .' % (gm_upload_output_url,))
164 continue
165
166 json_filename_match = json_filename_re.search(gm_upload_output)
167 if json_filename_match:
168 logging.info('Found issue %s patch %s try %s result gs://%s/%s#%s .' %
169 (issue, patchset, non_try_builder,
170 json_filename_match.group(1),
171 json_filename_match.group(2),
172 json_filename_match.group(3)))
173 result[non_try_builder] = [json_filename_match.group(1),
174 json_filename_match.group(2),
175 json_filename_match.group(3)]
176 else:
177 logging.warning('Did not find %s for issue %s patch %s try %s.' %
178 (json_filename, issue, patchset, builder))
179
180 return result
99 181
100 def main(): 182 def main():
101 parser = optparse.OptionParser() 183 parser = optparse.OptionParser()
102 required_params = [] 184 required_params = []
103 parser.add_option('--actuals-base-url', 185 parser.add_option('--actuals-base-url',
104 action='store', type='string', 186 action='store', type='string',
105 default=DEFAULT_ACTUALS_BASE_URL, 187 default=DEFAULT_ACTUALS_BASE_URL,
106 help=('Base URL from which to read files containing JSON ' 188 help=('Base URL from which to read files containing JSON '
107 'summaries of actual GM results; defaults to ' 189 'summaries of actual GM results; defaults to '
108 '"%default".')) 190 '"%default".'))
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
145 remaining_args) 227 remaining_args)
146 228
147 downloader = Download(actuals_base_url=params.actuals_base_url) 229 downloader = Download(actuals_base_url=params.actuals_base_url)
148 downloader.fetch(builder_name=params.builder, 230 downloader.fetch(builder_name=params.builder,
149 dest_dir=params.dest_dir) 231 dest_dir=params.dest_dir)
150 232
151 233
152 234
153 if __name__ == '__main__': 235 if __name__ == '__main__':
154 main() 236 main()
OLDNEW
« no previous file with comments | « no previous file | gm/rebaseline_server/server.py » ('j') | gm/rebaseline_server/server.py » ('J')

Powered by Google App Engine
This is Rietveld 408576698