OLD | NEW |
1 #! /usr/bin/env python | 1 #! /usr/bin/env python |
2 # | 2 # |
3 # Copyright 2017 The Chromium Authors. All rights reserved. | 3 # Copyright 2017 The Chromium Authors. All rights reserved. |
4 # Use of this source code is governed by a BSD-style license that can be | 4 # Use of this source code is governed by a BSD-style license that can be |
5 # found in the LICENSE file. | 5 # found in the LICENSE file. |
6 | 6 |
7 import argparse | 7 import argparse |
8 import collections | 8 import collections |
9 import json | 9 import json |
10 import tempfile | 10 import tempfile |
11 import time | |
12 import os | 11 import os |
13 import subprocess | |
14 import sys | 12 import sys |
15 | 13 |
16 CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) | 14 CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) |
17 BASE_DIR = os.path.abspath(os.path.join( | 15 BASE_DIR = os.path.abspath(os.path.join( |
18 CURRENT_DIR, '..', '..', '..', '..', '..')) | 16 CURRENT_DIR, '..', '..', '..', '..', '..')) |
| 17 |
| 18 sys.path.append(os.path.join(BASE_DIR, 'build', 'android')) |
| 19 from pylib.utils import google_storage_helper # pylint: disable=import-error |
| 20 |
19 sys.path.append(os.path.join(BASE_DIR, 'third_party')) | 21 sys.path.append(os.path.join(BASE_DIR, 'third_party')) |
20 import jinja2 # pylint: disable=import-error | 22 import jinja2 # pylint: disable=import-error |
21 JINJA_ENVIRONMENT = jinja2.Environment( | 23 JINJA_ENVIRONMENT = jinja2.Environment( |
22 loader=jinja2.FileSystemLoader(os.path.dirname(__file__)), | 24 loader=jinja2.FileSystemLoader(os.path.dirname(__file__)), |
23 autoescape=True) | 25 autoescape=True) |
24 | 26 |
25 | 27 |
26 def cell(data, html_class='center'): | 28 def cell(data, html_class='center'): |
27 """Formats table cell data for processing in jinja template.""" | 29 """Formats table cell data for processing in jinja template.""" |
28 return { | 30 return { |
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
240 if footer_row[FAIL_COUNT_INDEX]['data'] > 0: | 242 if footer_row[FAIL_COUNT_INDEX]['data'] > 0: |
241 footer_row[FAIL_COUNT_INDEX]['class'] += ' failure' | 243 footer_row[FAIL_COUNT_INDEX]['class'] += ' failure' |
242 else: | 244 else: |
243 footer_row[FAIL_COUNT_INDEX]['class'] += ' success' | 245 footer_row[FAIL_COUNT_INDEX]['class'] += ' success' |
244 | 246 |
245 return (header_row, | 247 return (header_row, |
246 [[suite_row] for suite_row in suite_row_dict.values()], | 248 [[suite_row] for suite_row in suite_row_dict.values()], |
247 footer_row) | 249 footer_row) |
248 | 250 |
249 | 251 |
250 def results_to_html(results_dict, cs_base_url, bucket, server_url): | 252 def results_to_html(results_dict, cs_base_url): |
251 """Convert list of test results into html format.""" | 253 """Convert list of test results into html format.""" |
252 | 254 |
253 test_rows_header, test_rows = create_test_table(results_dict, cs_base_url) | 255 test_rows_header, test_rows = create_test_table(results_dict, cs_base_url) |
254 suite_rows_header, suite_rows, suite_row_footer = create_suite_table( | 256 suite_rows_header, suite_rows, suite_row_footer = create_suite_table( |
255 results_dict) | 257 results_dict) |
256 | 258 |
257 suite_table_values = { | 259 suite_table_values = { |
258 'table_id': 'suite-table', | 260 'table_id': 'suite-table', |
259 'table_headers': suite_rows_header, | 261 'table_headers': suite_rows_header, |
260 'table_row_blocks': suite_rows, | 262 'table_row_blocks': suite_rows, |
261 'table_footer': suite_row_footer, | 263 'table_footer': suite_row_footer, |
262 } | 264 } |
263 | 265 |
264 test_table_values = { | 266 test_table_values = { |
265 'table_id': 'test-table', | 267 'table_id': 'test-table', |
266 'table_headers': test_rows_header, | 268 'table_headers': test_rows_header, |
267 'table_row_blocks': test_rows, | 269 'table_row_blocks': test_rows, |
268 } | 270 } |
269 | 271 |
270 main_template = JINJA_ENVIRONMENT.get_template( | 272 main_template = JINJA_ENVIRONMENT.get_template( |
271 os.path.join('template', 'main.html')) | 273 os.path.join('template', 'main.html')) |
272 return main_template.render( # pylint: disable=no-member | 274 return main_template.render( # pylint: disable=no-member |
273 {'tb_values': [suite_table_values, test_table_values], | 275 {'tb_values': [suite_table_values, test_table_values]}) |
274 'bucket': bucket, 'server_url': server_url}) | |
275 | 276 |
276 | 277 |
277 def result_details(json_path, cs_base_url, bucket, server_url): | 278 def result_details(json_path, cs_base_url): |
278 """Get result details from json path and then convert results to html.""" | 279 """Get result details from json path and then convert results to html.""" |
279 | 280 |
280 with open(json_path) as json_file: | 281 with open(json_path) as json_file: |
281 json_object = json.loads(json_file.read()) | 282 json_object = json.loads(json_file.read()) |
282 | 283 |
283 if not 'per_iteration_data' in json_object: | 284 if not 'per_iteration_data' in json_object: |
284 return 'Error: json file missing per_iteration_data.' | 285 return 'Error: json file missing per_iteration_data.' |
285 | 286 |
286 results_dict = collections.defaultdict(list) | 287 results_dict = collections.defaultdict(list) |
287 for testsuite_run in json_object['per_iteration_data']: | 288 for testsuite_run in json_object['per_iteration_data']: |
288 for test, test_runs in testsuite_run.iteritems(): | 289 for test, test_runs in testsuite_run.iteritems(): |
289 results_dict[test].extend(test_runs) | 290 results_dict[test].extend(test_runs) |
290 return results_to_html(results_dict, cs_base_url, bucket, server_url) | 291 return results_to_html(results_dict, cs_base_url) |
291 | 292 |
292 | 293 |
293 def upload_to_google_bucket(html, test_name, builder_name, build_number, | 294 def upload_to_google_bucket(html, test_name, builder_name, build_number, |
294 bucket, server_url, content_type): | 295 bucket): |
295 with tempfile.NamedTemporaryFile(suffix='.html') as temp_file: | 296 with tempfile.NamedTemporaryFile(suffix='.html') as temp_file: |
296 temp_file.write(html) | 297 temp_file.write(html) |
297 temp_file.flush() | 298 temp_file.flush() |
298 dest = 'html/%s_%s_%s_%s.html' % ( | |
299 test_name, builder_name, build_number, | |
300 time.strftime('%Y_%m_%d_T%H_%M_%S')) | |
301 gsutil_path = os.path.join(BASE_DIR, 'third_party', 'catapult', | |
302 'third_party', 'gsutil', 'gsutil.py') | |
303 subprocess.check_call([ | |
304 sys.executable, gsutil_path, '-h', "Content-Type:%s" % content_type, | |
305 'cp', temp_file.name, 'gs://%s/%s' % (bucket, dest)]) | |
306 | 299 |
307 return '%s/%s/%s' % (server_url, bucket, dest) | 300 return google_storage_helper.upload( |
| 301 name=google_storage_helper.unique_name( |
| 302 '%s_%s_%s' % (test_name, builder_name, build_number), |
| 303 suffix='.html'), |
| 304 filepath=temp_file.name, |
| 305 bucket='%s/html' % bucket, |
| 306 content_type='text/html', |
| 307 authenticated_link=True) |
308 | 308 |
309 | 309 |
310 def main(): | 310 def main(): |
311 parser = argparse.ArgumentParser() | 311 parser = argparse.ArgumentParser() |
312 parser.add_argument('--json-file', help='Path of json file.') | 312 parser.add_argument('--json-file', help='Path of json file.') |
313 parser.add_argument('--cs-base-url', help='Base url for code search.', | 313 parser.add_argument('--cs-base-url', help='Base url for code search.', |
314 default='http://cs.chromium.org') | 314 default='http://cs.chromium.org') |
315 parser.add_argument('--bucket', help='Google storage bucket.', required=True) | 315 parser.add_argument('--bucket', help='Google storage bucket.', required=True) |
316 parser.add_argument('--builder-name', help='Builder name.') | 316 parser.add_argument('--builder-name', help='Builder name.') |
317 parser.add_argument('--build-number', help='Build number.') | 317 parser.add_argument('--build-number', help='Build number.') |
318 parser.add_argument('--test-name', help='The name of the test.', | 318 parser.add_argument('--test-name', help='The name of the test.', |
319 required=True) | 319 required=True) |
320 parser.add_argument('--server-url', help='The url of the server.', | |
321 default='https://storage.cloud.google.com') | |
322 parser.add_argument( | |
323 '--content-type', | |
324 help=('Content type, which is used to determine ' | |
325 'whether to download the file, or view in browser.'), | |
326 default='text/html', | |
327 choices=['text/html', 'application/octet-stream']) | |
328 parser.add_argument( | 320 parser.add_argument( |
329 '-o', '--output-json', | 321 '-o', '--output-json', |
330 help='(Swarming Merge Script API)' | 322 help='(Swarming Merge Script API)' |
331 ' Output JSON file to create.') | 323 ' Output JSON file to create.') |
332 parser.add_argument( | 324 parser.add_argument( |
333 '--build-properties', | 325 '--build-properties', |
334 help='(Swarming Merge Script API) ' | 326 help='(Swarming Merge Script API) ' |
335 'Build property JSON file provided by recipes.') | 327 'Build property JSON file provided by recipes.') |
336 parser.add_argument( | 328 parser.add_argument( |
337 '--summary-json', | 329 '--summary-json', |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
378 if args.positional: | 370 if args.positional: |
379 if not len(args.positional) == 1: | 371 if not len(args.positional) == 1: |
380 raise parser.error('More than 1 json file specified.') | 372 raise parser.error('More than 1 json file specified.') |
381 json_file = args.positional[0] | 373 json_file = args.positional[0] |
382 elif args.json_file: | 374 elif args.json_file: |
383 json_file = args.json_file | 375 json_file = args.json_file |
384 | 376 |
385 if not os.path.exists(json_file): | 377 if not os.path.exists(json_file): |
386 raise IOError('--json-file %s not found.' % json_file) | 378 raise IOError('--json-file %s not found.' % json_file) |
387 | 379 |
388 result_html_string = result_details(json_file, args.cs_base_url, | 380 result_html_string = result_details(json_file, args.cs_base_url) |
389 args.bucket, args.server_url) | |
390 result_details_link = upload_to_google_bucket( | 381 result_details_link = upload_to_google_bucket( |
391 result_html_string.encode('UTF-8'), | 382 result_html_string.encode('UTF-8'), |
392 args.test_name, builder_name, | 383 args.test_name, builder_name, |
393 build_number, args.bucket, | 384 build_number, args.bucket) |
394 args.server_url, args.content_type) | |
395 | 385 |
396 if args.output_json: | 386 if args.output_json: |
397 with open(json_file) as original_json_file: | 387 with open(json_file) as original_json_file: |
398 json_object = json.load(original_json_file) | 388 json_object = json.load(original_json_file) |
399 json_object['links'] = {'result_details': result_details_link} | 389 json_object['links'] = {'result_details': result_details_link} |
400 with open(args.output_json, 'w') as f: | 390 with open(args.output_json, 'w') as f: |
401 json.dump(json_object, f) | 391 json.dump(json_object, f) |
402 else: | 392 else: |
403 print result_details_link | 393 print result_details_link |
404 | 394 |
405 if __name__ == '__main__': | 395 if __name__ == '__main__': |
406 sys.exit(main()) | 396 sys.exit(main()) |
OLD | NEW |