OLD | NEW |
---|---|
1 # Copyright (C) 2010 Google Inc. All rights reserved. | 1 # Copyright (C) 2010 Google Inc. All rights reserved. |
2 # Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Sze ged | 2 # Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Sze ged |
3 # | 3 # |
4 # Redistribution and use in source and binary forms, with or without | 4 # Redistribution and use in source and binary forms, with or without |
5 # modification, are permitted provided that the following conditions are | 5 # modification, are permitted provided that the following conditions are |
6 # met: | 6 # met: |
7 # | 7 # |
8 # * Redistributions of source code must retain the above copyright | 8 # * Redistributions of source code must retain the above copyright |
9 # notice, this list of conditions and the following disclaimer. | 9 # notice, this list of conditions and the following disclaimer. |
10 # * Redistributions in binary form must reproduce the above | 10 # * Redistributions in binary form must reproduce the above |
(...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
206 else: | 206 else: |
207 should_retry_failures = self._options.retry_failures | 207 should_retry_failures = self._options.retry_failures |
208 | 208 |
209 enabled_pixel_tests_in_retry = False | 209 enabled_pixel_tests_in_retry = False |
210 try: | 210 try: |
211 self._start_servers(tests_to_run) | 211 self._start_servers(tests_to_run) |
212 | 212 |
213 initial_results = self._run_tests(tests_to_run, tests_to_skip, self. _options.repeat_each, self._options.iterations, | 213 initial_results = self._run_tests(tests_to_run, tests_to_skip, self. _options.repeat_each, self._options.iterations, |
214 int(self._options.child_processes), retrying=False) | 214 int(self._options.child_processes), retrying=False) |
215 | 215 |
216 # Don't retry failures when interrupted by user or failures limit ex ception. | |
217 should_retry_failures = should_retry_failures and not (initial_resul ts.interrupted or initial_results.keyboard_interrupted) | |
218 | |
216 tests_to_retry = self._tests_to_retry(initial_results) | 219 tests_to_retry = self._tests_to_retry(initial_results) |
217 if should_retry_failures and tests_to_retry and not initial_results. interrupted: | 220 if should_retry_failures and tests_to_retry: |
218 enabled_pixel_tests_in_retry = self._force_pixel_tests_if_needed () | 221 enabled_pixel_tests_in_retry = self._force_pixel_tests_if_needed () |
219 | 222 |
220 _log.info('') | 223 _log.info('') |
221 _log.info("Retrying %d unexpected failure(s) ..." % len(tests_to _retry)) | 224 _log.info("Retrying %d unexpected failure(s) ..." % len(tests_to _retry)) |
222 _log.info('') | 225 _log.info('') |
223 retry_results = self._run_tests(tests_to_retry, tests_to_skip=se t(), repeat_each=1, iterations=1, | 226 retry_results = self._run_tests(tests_to_retry, tests_to_skip=se t(), repeat_each=1, iterations=1, |
224 num_workers=1, retrying=True) | 227 num_workers=1, retrying=True) |
225 | 228 |
226 if enabled_pixel_tests_in_retry: | 229 if enabled_pixel_tests_in_retry: |
227 self._options.pixel_tests = False | 230 self._options.pixel_tests = False |
(...skipping 14 matching lines...) Expand all Loading... | |
242 summarized_full_results = test_run_results.summarize_results(self._port, self._expectations, initial_results, retry_results, enabled_pixel_tests_in_retr y) | 245 summarized_full_results = test_run_results.summarize_results(self._port, self._expectations, initial_results, retry_results, enabled_pixel_tests_in_retr y) |
243 summarized_failing_results = test_run_results.summarize_results(self._po rt, self._expectations, initial_results, retry_results, enabled_pixel_tests_in_r etry, only_include_failing=True) | 246 summarized_failing_results = test_run_results.summarize_results(self._po rt, self._expectations, initial_results, retry_results, enabled_pixel_tests_in_r etry, only_include_failing=True) |
244 | 247 |
245 exit_code = summarized_failing_results['num_regressions'] | 248 exit_code = summarized_failing_results['num_regressions'] |
246 if not self._options.dry_run: | 249 if not self._options.dry_run: |
247 self._write_json_files(summarized_full_results, summarized_failing_r esults, initial_results) | 250 self._write_json_files(summarized_full_results, summarized_failing_r esults, initial_results) |
248 self._upload_json_files() | 251 self._upload_json_files() |
249 | 252 |
250 results_path = self._filesystem.join(self._results_directory, "resul ts.html") | 253 results_path = self._filesystem.join(self._results_directory, "resul ts.html") |
251 self._copy_results_html_file(results_path) | 254 self._copy_results_html_file(results_path) |
252 if self._options.show_results and (exit_code or (self._options.full_ results_html and initial_results.total_failures)): | 255 if initial_results.keyboard_interrupted: |
253 self._port.show_results_html_file(results_path) | 256 exit_code = test_run_results.TestRunResults.INTERRUPTED_EXIT_STA TUS |
Dirk Pranke
2013/09/13 05:37:31
change this to test_run_results.INTERRUPTED_EXIT_S
r.kasibhatla
2013/09/13 13:43:35
Done.
| |
254 | 257 else: |
255 self._printer.print_results(time.time() - start_time, initial_results, s ummarized_failing_results) | 258 if self._options.show_results and (exit_code or (self._options.f ull_results_html and initial_results.total_failures)): |
259 self._port.show_results_html_file(results_path) | |
260 self._printer.print_results(time.time() - start_time, initial_re sults, summarized_failing_results) | |
256 return test_run_results.RunDetails(exit_code, summarized_full_results, s ummarized_failing_results, initial_results, retry_results, enabled_pixel_tests_i n_retry) | 261 return test_run_results.RunDetails(exit_code, summarized_full_results, s ummarized_failing_results, initial_results, retry_results, enabled_pixel_tests_i n_retry) |
257 | 262 |
258 def _run_tests(self, tests_to_run, tests_to_skip, repeat_each, iterations, n um_workers, retrying): | 263 def _run_tests(self, tests_to_run, tests_to_skip, repeat_each, iterations, n um_workers, retrying): |
259 | 264 |
260 test_inputs = [] | 265 test_inputs = [] |
261 for _ in xrange(iterations): | 266 for _ in xrange(iterations): |
262 for test in tests_to_run: | 267 for test in tests_to_run: |
263 for _ in xrange(repeat_each): | 268 for _ in xrange(repeat_each): |
264 test_inputs.append(self._test_input_for_file(test)) | 269 test_inputs.append(self._test_input_for_file(test)) |
265 return self._runner.run_tests(self._expectations, test_inputs, tests_to_ skip, num_workers, retrying) | 270 return self._runner.run_tests(self._expectations, test_inputs, tests_to_ skip, num_workers, retrying) |
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
415 return int(worker_name.split('/')[1]) if worker_name else -1 | 420 return int(worker_name.split('/')[1]) if worker_name else -1 |
416 | 421 |
417 stats = {} | 422 stats = {} |
418 for result in initial_results.results_by_name.values(): | 423 for result in initial_results.results_by_name.values(): |
419 if result.type != test_expectations.SKIP: | 424 if result.type != test_expectations.SKIP: |
420 stats[result.test_name] = {'results': (_worker_number(result.wor ker_name), result.test_number, result.pid, int(result.test_run_time * 1000), int (result.total_run_time * 1000))} | 425 stats[result.test_name] = {'results': (_worker_number(result.wor ker_name), result.test_number, result.pid, int(result.test_run_time * 1000), int (result.total_run_time * 1000))} |
421 stats_trie = {} | 426 stats_trie = {} |
422 for name, value in stats.iteritems(): | 427 for name, value in stats.iteritems(): |
423 json_results_generator.add_path_to_trie(name, value, stats_trie) | 428 json_results_generator.add_path_to_trie(name, value, stats_trie) |
424 return stats_trie | 429 return stats_trie |
OLD | NEW |