OLD | NEW |
---|---|
1 #!/usr/bin/python | 1 #!/usr/bin/python |
2 | 2 |
3 """ | 3 """ |
4 Copyright 2014 Google Inc. | 4 Copyright 2014 Google Inc. |
5 | 5 |
6 Use of this source code is governed by a BSD-style license that can be | 6 Use of this source code is governed by a BSD-style license that can be |
7 found in the LICENSE file. | 7 found in the LICENSE file. |
8 | 8 |
9 Download actual GM results for a particular builder. | 9 Download actual GM results for a particular builder. |
10 """ | 10 """ |
11 | 11 |
12 # System-level imports | 12 # System-level imports |
13 import httplib | |
14 import logging | |
13 import optparse | 15 import optparse |
14 import os | 16 import os |
15 import posixpath | 17 import posixpath |
16 import re | 18 import re |
17 import urllib2 | 19 import urllib2 |
18 | 20 |
19 # Must fix up PYTHONPATH before importing from within Skia | 21 # Must fix up PYTHONPATH before importing from within Skia |
20 import rs_fixpypath # pylint: disable=W0611 | 22 import rs_fixpypath # pylint: disable=W0611 |
21 | 23 |
22 # Imports from within Skia | 24 # Imports from within Skia |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
90 """ Returns the list of builders we have actual results for. | 92 """ Returns the list of builders we have actual results for. |
91 | 93 |
92 Args: | 94 Args: |
93 summaries_bucket: Google Cloud Storage bucket containing the summary | 95 summaries_bucket: Google Cloud Storage bucket containing the summary |
94 JSON files | 96 JSON files |
95 """ | 97 """ |
96 dirs, _ = gs_utils.GSUtils().list_bucket_contents(bucket=GM_SUMMARIES_BUCKET) | 98 dirs, _ = gs_utils.GSUtils().list_bucket_contents(bucket=GM_SUMMARIES_BUCKET) |
97 return dirs | 99 return dirs |
98 | 100 |
99 | 101 |
102 class ActualLocation(object): | |
103 def __init__(self, bucket, path, generation): | |
104 self.bucket = bucket | |
105 self.path = path | |
106 self.generation = generation | |
107 | |
108 | |
109 class TipOfTreeActuals(object): | |
110 def __init__(self, summaries_bucket=GM_SUMMARIES_BUCKET, | |
111 json_filename=DEFAULT_JSON_FILENAME): | |
112 """ | |
113 Args: | |
114 summaries_bucket: URL pointing at the root directory | |
115 containing all actual-results.json files, e.g., | |
116 http://domain.name/path/to/dir OR | |
117 file:///absolute/path/to/localdir | |
118 json_filename: The JSON filename to read from within each directory. | |
119 """ | |
120 self._json_filename = json_filename | |
121 self._summaries_bucket = summaries_bucket | |
122 | |
123 def description(self): | |
124 return 'gm_summaries_bucket %s' % (self._summaries_bucket,) | |
125 | |
126 def get_builders(self): | |
127 """ Returns the list of builders we have actual results for. | |
128 {builder:string -> ActualLocation} | |
129 """ | |
130 dirs, _ = get_builders_list(self._summaries_bucket) | |
bungeman-skia
2014/11/11 20:33:06
Need to remove the ', _', as this used to call 'li
| |
131 result = dict() | |
132 for builder in dirs: | |
133 result[builder] = ActualLocation( | |
134 self._summaries_bucket, | |
135 "%s/%s" % (builder, self._json_filename), | |
136 None) | |
137 return result | |
138 | |
139 | |
140 class RietveldIssueActuals(object): | |
141 def __init__(self, issue, json_filename=DEFAULT_JSON_FILENAME): | |
142 """ | |
143 Args: | |
144 issue: The rietveld issue from which to obtain actuals. | |
145 json_filename: The JSON filename to read from within each directory. | |
146 """ | |
147 self._issue = issue | |
148 self._json_filename = json_filename | |
149 | |
150 def description(self): | |
151 return 'rietveld issue %s' % (self._issue,) | |
152 | |
153 def get_builders(self): | |
154 """ Returns the actuals for the given rietveld issue's tryjobs. | |
155 {builder:string -> ActualLocation} | |
156 | |
157 e.g. | |
158 {'Test-Android-Xoom-Tegra2-Arm7-Release': ( | |
159 'chromium-skia-gm-summaries', | |
160 'Test-Android-Xoom-Tegra2-Arm7-Release-Trybot/actual-results.json', | |
161 '1415041165535000')} | |
162 """ | |
163 result = dict() | |
164 json_filename_re = re.compile( | |
165 '^Created: gs://([^/]+)/((?:[^/]+/)+%s)#(\d+)$' | |
166 % re.escape(self._json_filename), re.MULTILINE) | |
167 codereview_api_url = 'https://codereview.chromium.org/api' | |
168 upload_gm_step_url = '/steps/Upload GM Results/logs/stdio' | |
169 | |
170 logging.info('Fetching issue %s ...' % (self._issue,)) | |
171 json_issue_url = '%s/%s' % (codereview_api_url, self._issue) | |
172 json_issue_data = urllib2.urlopen(json_issue_url).read() | |
173 issue_dict = gm_json.LoadFromString(json_issue_data) | |
174 | |
175 patchsets = issue_dict.get("patchsets", []) | |
176 patchset = patchsets[-1] | |
177 if not patchset: | |
178 logging.warning('No patchsets for rietveld issue %s.' % (self._issue,)) | |
179 return result | |
180 | |
181 logging.info('Fetching issue %s patch %s...' % (self._issue, patchset)) | |
182 json_patchset_url = '%s/%s/%s' % (codereview_api_url, self._issue, patchset) | |
183 json_patchset_data = urllib2.urlopen(json_patchset_url).read() | |
184 patchset_dict = gm_json.LoadFromString(json_patchset_data) | |
185 | |
186 # try_job_results is ordered reverse chronologically | |
187 try_job_results = patchset_dict.get('try_job_results', []) | |
188 for try_job_result in try_job_results: | |
189 try_builder = try_job_result.get('builder', '<bad builder>') | |
190 if not try_builder.endswith('-Trybot'): | |
191 logging.warning('Builder %s is not a trybot?' % (try_builder,)) | |
192 continue | |
193 builder = try_builder[:-len('-Trybot')] | |
194 if builder in result: | |
195 continue | |
196 | |
197 logging.info('Fetching issue %s patch %s try %s...' % | |
198 (self._issue, patchset, try_builder)) | |
199 build_url = try_job_result.get('url', '<bad url>') | |
200 gm_upload_output_url = build_url + urllib2.quote(upload_gm_step_url) | |
201 logging.info('Fetching %s ...' % (gm_upload_output_url,)) | |
202 | |
203 # Tryjobs might not produce the step, but don't let that fail everything. | |
204 gm_upload_output = None | |
205 try: | |
206 gm_upload_output = urllib2.urlopen(gm_upload_output_url).read() | |
207 except (urllib2.HTTPError, urllib2.URLError, httplib.HTTPException) as e: | |
208 logging.warning(e) | |
209 except Exception: | |
210 logging.exception('Error opening %s .' % (gm_upload_output_url,)) | |
211 if not gm_upload_output: | |
212 logging.warning('Could not fetch %s .' % (gm_upload_output_url,)) | |
213 continue | |
214 | |
215 json_filename_match = json_filename_re.search(gm_upload_output) | |
216 if json_filename_match: | |
217 logging.info('Found issue %s patch %s try %s result gs://%s/%s#%s .' % | |
218 (self._issue, patchset, builder, | |
219 json_filename_match.group(1), | |
220 json_filename_match.group(2), | |
221 json_filename_match.group(3))) | |
222 result[builder] = ActualLocation(json_filename_match.group(1), | |
223 json_filename_match.group(2), | |
224 json_filename_match.group(3)) | |
225 else: | |
226 logging.warning('Did not find %s for issue %s patch %s try %s.' % | |
227 (self._json_filename, self._issue, patchset, try_builder)) | |
228 | |
229 return result | |
230 | |
231 | |
100 def main(): | 232 def main(): |
101 parser = optparse.OptionParser() | 233 parser = optparse.OptionParser() |
102 required_params = [] | 234 required_params = [] |
103 parser.add_option('--actuals-base-url', | 235 parser.add_option('--actuals-base-url', |
104 action='store', type='string', | 236 action='store', type='string', |
105 default=DEFAULT_ACTUALS_BASE_URL, | 237 default=DEFAULT_ACTUALS_BASE_URL, |
106 help=('Base URL from which to read files containing JSON ' | 238 help=('Base URL from which to read files containing JSON ' |
107 'summaries of actual GM results; defaults to ' | 239 'summaries of actual GM results; defaults to ' |
108 '"%default".')) | 240 '"%default".')) |
109 required_params.append('builder') | 241 required_params.append('builder') |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
145 remaining_args) | 277 remaining_args) |
146 | 278 |
147 downloader = Download(actuals_base_url=params.actuals_base_url) | 279 downloader = Download(actuals_base_url=params.actuals_base_url) |
148 downloader.fetch(builder_name=params.builder, | 280 downloader.fetch(builder_name=params.builder, |
149 dest_dir=params.dest_dir) | 281 dest_dir=params.dest_dir) |
150 | 282 |
151 | 283 |
152 | 284 |
153 if __name__ == '__main__': | 285 if __name__ == '__main__': |
154 main() | 286 main() |
OLD | NEW |