OLD | NEW |
1 import json | 1 import json |
2 | 2 |
3 from recipe_engine import recipe_test_api | 3 from recipe_engine import recipe_test_api |
4 | 4 |
5 from .util import GTestResults, TestResults | 5 from .util import GTestResults, TestResults |
6 | 6 |
7 class TestUtilsTestApi(recipe_test_api.RecipeTestApi): | 7 class TestUtilsTestApi(recipe_test_api.RecipeTestApi): |
8 @recipe_test_api.placeholder_step_data | 8 @recipe_test_api.placeholder_step_data |
9 def test_results(self, test_results, retcode=None): | 9 def test_results(self, test_results, retcode=None): |
10 return self.m.json.output(test_results.as_jsonish(), retcode) | 10 return self.m.json.output(test_results.as_jsonish(), retcode) |
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
105 | 105 |
106 retcode = None if passing else 1 | 106 retcode = None if passing else 1 |
107 return self.raw_gtest_output(canned_jsonish, retcode) | 107 return self.raw_gtest_output(canned_jsonish, retcode) |
108 | 108 |
109 def raw_gtest_output(self, jsonish, retcode): | 109 def raw_gtest_output(self, jsonish, retcode): |
110 t = GTestResults(jsonish) | 110 t = GTestResults(jsonish) |
111 ret = self.gtest_results(t) | 111 ret = self.gtest_results(t) |
112 ret.retcode = retcode | 112 ret.retcode = retcode |
113 return ret | 113 return ret |
114 | 114 |
115 def canned_isolated_script_output(self, passing, is_win, swarming=False, | 115 def generate_simplified_json_results(self, shards, isolated_script_passing, |
116 shards=1, swarming_internal_failure=False, | 116 valid): |
117 isolated_script_passing=True, valid=True, | |
118 missing_shards=[], | |
119 empty_shards=[], | |
120 output_chartjson=False): | |
121 """Produces a test results' compatible json for isolated script tests. """ | |
122 per_shard_results = [] | 117 per_shard_results = [] |
123 per_shard_chartjson_results = [] | |
124 for i in xrange(shards): | 118 for i in xrange(shards): |
125 jsonish_results = {} | 119 jsonish_results = {} |
126 chartjsonish_results = {} | |
127 jsonish_results['valid'] = valid | 120 jsonish_results['valid'] = valid |
128 # Keep shard 0's results equivalent to the old code to minimize | 121 # Keep shard 0's results equivalent to the old code to minimize |
129 # expectation diffs. | 122 # expectation diffs. |
130 idx = 1 + (2 * i) | 123 idx = 1 + (2 * i) |
131 tests_run = ['test%d.Test%d' % (idx, idx), | 124 tests_run = ['test%d.Test%d' % (idx, idx), |
132 'test%d.Test%d' % (idx + 1, idx + 1)] | 125 'test%d.Test%d' % (idx + 1, idx + 1)] |
133 if isolated_script_passing: | 126 if isolated_script_passing: |
134 jsonish_results['failures'] = [] | 127 jsonish_results['failures'] = [] |
135 jsonish_results['successes'] = tests_run | 128 jsonish_results['successes'] = tests_run |
136 else: | 129 else: |
137 jsonish_results['failures'] = tests_run | 130 jsonish_results['failures'] = tests_run |
138 jsonish_results['successes'] = [] | 131 jsonish_results['successes'] = [] |
139 jsonish_results['times'] = {t : 0.1 for t in tests_run} | 132 jsonish_results['times'] = {t : 0.1 for t in tests_run} |
| 133 per_shard_results.append(jsonish_results) |
| 134 return per_shard_results |
| 135 |
| 136 def generate_json_test_results(self, shards, isolated_script_passing, |
| 137 valid): |
| 138 per_shard_results = [] |
| 139 for i in xrange(shards): |
| 140 jsonish_results = { |
| 141 'interrupted': False, |
| 142 'path_delimiter': '.', |
| 143 'version': 3, |
| 144 'seconds_since_epoch': 14000000 + i, |
| 145 'num_failures_by_type': { |
| 146 'FAIL': 0, |
| 147 'PASS': 0 |
| 148 } |
| 149 } |
| 150 if not valid: |
| 151 del jsonish_results['path_delimiter'] |
| 152 idx = 1 + (2 * i) |
| 153 if isolated_script_passing: |
| 154 tests_run = { |
| 155 'test_common': { |
| 156 'Test%d' % idx: { |
| 157 'expected': 'PASS', |
| 158 'actual': 'PASS', |
| 159 }, |
| 160 }, |
| 161 'test%d' % idx: { |
| 162 'Test%d' % idx: { |
| 163 'expected': 'PASS', |
| 164 'actual': 'PASS', |
| 165 }, |
| 166 'Test%d' % (idx + 1): { |
| 167 'expected': 'FAIL', |
| 168 'actual': 'FAIL', |
| 169 }, |
| 170 } |
| 171 } |
| 172 jsonish_results['num_failures_by_type']['PASS'] = 2 |
| 173 else: |
| 174 tests_run = { |
| 175 'test%d' % idx: { |
| 176 'Test%d' % idx: { |
| 177 'expected': 'FAIL', |
| 178 'actual': 'PASS', |
| 179 }, |
| 180 'Test%d' % (idx + 1): { |
| 181 'expected': 'PASS', |
| 182 'actual': 'FAIL', |
| 183 }, |
| 184 } |
| 185 } |
| 186 |
| 187 jsonish_results['num_failures_by_type']['FAIL'] = 2 |
| 188 jsonish_results['tests'] = tests_run |
| 189 per_shard_results.append(jsonish_results) |
| 190 return per_shard_results |
| 191 |
| 192 def canned_isolated_script_output(self, passing, is_win, swarming=False, |
| 193 shards=1, swarming_internal_failure=False, |
| 194 isolated_script_passing=True, valid=True, |
| 195 missing_shards=[], |
| 196 empty_shards=[], |
| 197 use_json_test_format=False, |
| 198 output_chartjson=False): |
| 199 """Produces a test results' compatible json for isolated script tests. """ |
| 200 per_shard_results = [] |
| 201 per_shard_chartjson_results = [] |
| 202 for i in xrange(shards): |
| 203 chartjsonish_results = {} |
| 204 idx = 1 + (2 * i) |
140 chartjsonish_results['dummy'] = 'dummy%d' % i | 205 chartjsonish_results['dummy'] = 'dummy%d' % i |
141 chartjsonish_results['charts'] = {'entry%d' % idx: 'chart%d' % idx, | 206 chartjsonish_results['charts'] = {'entry%d' % idx: 'chart%d' % idx, |
142 'entry%d' % (idx + 1): 'chart%d' % (idx + 1)} | 207 'entry%d' % (idx + 1): 'chart%d' % (idx + 1)} |
143 per_shard_results.append(jsonish_results) | |
144 per_shard_chartjson_results.append(chartjsonish_results) | 208 per_shard_chartjson_results.append(chartjsonish_results) |
| 209 if use_json_test_format: |
| 210 per_shard_results = self.generate_json_test_results( |
| 211 shards, isolated_script_passing, valid) |
| 212 else: |
| 213 per_shard_results = self.generate_simplified_json_results( |
| 214 shards, isolated_script_passing, valid) |
145 if swarming: | 215 if swarming: |
146 jsonish_shards = [] | 216 jsonish_shards = [] |
147 files_dict = {} | 217 files_dict = {} |
148 for i in xrange(shards): | 218 for i in xrange(shards): |
149 jsonish_shards.append({ | 219 jsonish_shards.append({ |
150 'failure': not passing, | 220 'failure': not passing, |
151 'internal_failure': swarming_internal_failure, | 221 'internal_failure': swarming_internal_failure, |
152 'exit_code': '1' if not passing or swarming_internal_failure else '0' | 222 'exit_code': '1' if not passing or swarming_internal_failure else '0' |
153 }) | 223 }) |
154 swarming_path = str(i) | 224 swarming_path = str(i) |
(...skipping 16 matching lines...) Expand all Loading... |
171 if not chartjson_output_missing and output_chartjson: | 241 if not chartjson_output_missing and output_chartjson: |
172 files_dict[chartjson_swarming_path] = \ | 242 files_dict[chartjson_swarming_path] = \ |
173 '' if chartjson_output_empty \ | 243 '' if chartjson_output_empty \ |
174 else json.dumps(per_shard_chartjson_results[i]) | 244 else json.dumps(per_shard_chartjson_results[i]) |
175 | 245 |
176 jsonish_summary = {'shards': jsonish_shards} | 246 jsonish_summary = {'shards': jsonish_shards} |
177 files_dict['summary.json'] = json.dumps(jsonish_summary) | 247 files_dict['summary.json'] = json.dumps(jsonish_summary) |
178 return self.m.raw_io.output_dir(files_dict) | 248 return self.m.raw_io.output_dir(files_dict) |
179 else: | 249 else: |
180 return self.m.json.output(per_shard_results[0]) | 250 return self.m.json.output(per_shard_results[0]) |
OLD | NEW |