Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 #! /usr/bin/env python | |
|
jbudorick
2017/06/19 13:33:42
Name this file differently. _test.py implies that
BigBossZhiling
2017/06/19 18:24:06
Done.
| |
| 2 # | |
| 3 # Copyright 2017 The Chromium Authors. All rights reserved. | |
| 4 # Use of this source code is governed by a BSD-style license that can be | |
| 5 # found in the LICENSE file. | |
| 6 | |
| 7 import argparse | |
| 8 import json | |
| 9 import os | |
| 10 import sys | |
| 11 | |
| 12 | |
| 13 def merge_shard_results(summary_json, jsons_to_merge): | |
| 14 """Reads JSON test output from all shards and combines them into one. | |
| 15 | |
| 16 Returns dict with merged test output on success or None on failure. Emits | |
| 17 annotations. | |
| 18 """ | |
| 19 try: | |
| 20 with open(summary_json) as f: | |
| 21 summary = json.load(f) | |
| 22 except (IOError, ValueError): | |
| 23 raise Exception('Summary json cannot be loaded.') | |
| 24 | |
| 25 # Merge all JSON files together. Keep track of missing shards. | |
| 26 merged = { | |
| 27 'all_tests': set(), | |
| 28 'disabled_tests': set(), | |
| 29 'global_tags': set(), | |
| 30 'missing_shards': [], | |
| 31 'per_iteration_data': [], | |
| 32 'swarming_summary': summary, | |
| 33 'links': set() | |
| 34 } | |
| 35 for index, result in enumerate(summary['shards']): | |
| 36 if result is not None: | |
| 37 # Author note: this code path doesn't trigger convert_to_old_format() in | |
| 38 # client/swarming.py, which means the state enum is saved in its string | |
| 39 # name form, not in the number form. | |
| 40 state = result.get('state') | |
| 41 if state == u'BOT_DIED': | |
| 42 print >> sys.stderr, 'Shard #%d had a Swarming internal failure' % index | |
| 43 elif state == u'EXPIRED': | |
| 44 print >> sys.stderr, 'There wasn\'t enough capacity to run your test' | |
| 45 elif state == u'TIMED_OUT': | |
| 46 print >> sys.stderr, ( | |
| 47 'Test runtime exceeded allocated time' | |
| 48 'Either it ran for too long (hard timeout) or it didn\'t produce ' | |
| 49 'I/O for an extended period of time (I/O timeout)') | |
| 50 elif state == u'COMPLETED': | |
| 51 json_data, err_msg = load_shard_json(index, jsons_to_merge) | |
| 52 if json_data: | |
| 53 # Set-like fields. | |
| 54 for key in ('all_tests', 'disabled_tests', 'global_tags', 'links'): | |
| 55 merged[key].update(json_data.get(key), []) | |
| 56 | |
| 57 # 'per_iteration_data' is a list of dicts. Dicts should be merged | |
| 58 # together, not the 'per_iteration_data' list itself. | |
| 59 merged['per_iteration_data'] = merge_list_of_dicts( | |
| 60 merged['per_iteration_data'], | |
| 61 json_data.get('per_iteration_data', [])) | |
| 62 continue | |
| 63 else: | |
| 64 print >> sys.stderr, 'Task ran but no result was found: %s' % err_msg | |
| 65 else: | |
| 66 print >> sys.stderr, 'Invalid Swarming task state: %s' % state | |
| 67 merged['missing_shards'].append(index) | |
| 68 | |
| 69 # If some shards are missing, make it known. Continue parsing anyway. Step | |
| 70 # should be red anyway, since swarming.py return non-zero exit code in that | |
| 71 # case. | |
| 72 if merged['missing_shards']: | |
| 73 as_str = ', '.join([str(shard) for shard in merged['missing_shards']]) | |
| 74 print >> sys.stderr, ('some shards did not complete: %s' % as_str) | |
| 75 # Not all tests run, combined JSON summary can not be trusted. | |
| 76 merged['global_tags'].add('UNRELIABLE_RESULTS') | |
| 77 | |
| 78 # Convert to jsonish dict. | |
| 79 for key in ('all_tests', 'disabled_tests', 'global_tags', 'links'): | |
| 80 merged[key] = sorted(merged[key]) | |
| 81 return merged | |
| 82 | |
| 83 | |
| 84 OUTPUT_JSON_SIZE_LIMIT = 100 * 1024 * 1024 # 100 MB | |
|
BigBossZhiling
2017/06/19 05:23:28
Not sure whether we need this logic here. It is in
| |
| 85 | |
| 86 | |
| 87 def load_shard_json(index, jsons_to_merge): | |
| 88 """Reads JSON output of the specified shard. | |
| 89 | |
| 90 Args: | |
| 91 jsons_to_merge: List of json files to be merge. | |
| 92 index: The index of the shard to load data for. | |
| 93 | |
| 94 Returns: A tuple containing: | |
| 95 * The contents of path, deserialized into a python object. | |
| 96 * An error string. | |
| 97 (exactly one of the tuple elements will be non-None). | |
| 98 """ | |
| 99 matching_json_files = [ | |
| 100 j for j in jsons_to_merge | |
| 101 if (os.path.basename(j) == 'output.json' | |
| 102 and os.path.basename(os.path.dirname(j)) == str(index))] | |
| 103 | |
| 104 if not matching_json_files: | |
| 105 print >> sys.stderr, 'shard %s test output missing' % index | |
| 106 return (None, 'shard %s test output was missing' % index) | |
| 107 elif len(matching_json_files) > 1: | |
| 108 print >> sys.stderr, 'duplicate test output for shard %s' % index | |
| 109 return (None, 'shard %s test output was duplicated' % index) | |
| 110 | |
| 111 path = matching_json_files[0] | |
| 112 | |
| 113 try: | |
| 114 filesize = os.stat(path).st_size | |
| 115 if filesize > OUTPUT_JSON_SIZE_LIMIT: | |
| 116 print >> sys.stderr, 'output.json is %d bytes. Max size is %d' % ( | |
| 117 filesize, OUTPUT_JSON_SIZE_LIMIT) | |
| 118 return (None, 'shard %s test output exceeded the size limit' % index) | |
| 119 | |
| 120 with open(path) as f: | |
| 121 return (json.load(f), None) | |
| 122 except (IOError, ValueError, OSError) as e: | |
| 123 print >> sys.stderr, 'Missing or invalid gtest JSON file: %s' % path | |
| 124 print >> sys.stderr, '%s: %s' % (type(e).__name__, e) | |
| 125 | |
| 126 return (None, 'shard %s test output was missing or invalid' % index) | |
| 127 | |
| 128 | |
| 129 def merge_list_of_dicts(left, right): | |
| 130 """Merges dicts left[0] with right[0], left[1] with right[1], etc.""" | |
| 131 output = [] | |
| 132 for i in xrange(max(len(left), len(right))): | |
| 133 left_dict = left[i] if i < len(left) else {} | |
| 134 right_dict = right[i] if i < len(right) else {} | |
| 135 merged_dict = left_dict.copy() | |
| 136 merged_dict.update(right_dict) | |
| 137 output.append(merged_dict) | |
| 138 return output | |
| 139 | |
| 140 | |
| 141 def merge_test( | |
| 142 output_json, summary_json, jsons_to_merge): | |
| 143 | |
| 144 output = merge_shard_results(summary_json, jsons_to_merge) | |
| 145 with open(output_json, 'wb') as f: | |
| 146 json.dump(output, f) | |
| 147 | |
| 148 return 0 | |
| 149 | |
| 150 | |
| 151 def main(raw_args): | |
| 152 parser = argparse.ArgumentParser() | |
| 153 parser.add_argument('--summary-json') | |
| 154 parser.add_argument('-o', '--output-json', required=True) | |
| 155 parser.add_argument('jsons_to_merge', nargs='*') | |
| 156 | |
| 157 args = parser.parse_args(raw_args) | |
| 158 | |
| 159 return merge_test( | |
| 160 args.output_json, args.summary_json, args.jsons_to_merge) | |
| 161 | |
| 162 | |
| 163 if __name__ == '__main__': | |
| 164 sys.exit(main(sys.argv[1:])) | |
| OLD | NEW |