| OLD | NEW |
| 1 # Copyright (C) 2010 Google Inc. All rights reserved. | 1 # Copyright (C) 2010 Google Inc. All rights reserved. |
| 2 # Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Sze
ged | 2 # Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Sze
ged |
| 3 # | 3 # |
| 4 # Redistribution and use in source and binary forms, with or without | 4 # Redistribution and use in source and binary forms, with or without |
| 5 # modification, are permitted provided that the following conditions are | 5 # modification, are permitted provided that the following conditions are |
| 6 # met: | 6 # met: |
| 7 # | 7 # |
| 8 # * Redistributions of source code must retain the above copyright | 8 # * Redistributions of source code must retain the above copyright |
| 9 # notice, this list of conditions and the following disclaimer. | 9 # notice, this list of conditions and the following disclaimer. |
| 10 # * Redistributions in binary form must reproduce the above | 10 # * Redistributions in binary form must reproduce the above |
| 11 # copyright notice, this list of conditions and the following disclaimer | 11 # copyright notice, this list of conditions and the following disclaimer |
| 12 # in the documentation and/or other materials provided with the | 12 # in the documentation and/or other materials provided with the |
| 13 # distribution. | 13 # distribution. |
| 14 # * Neither the name of Google Inc. nor the names of its | 14 # * Neither the name of Google Inc. nor the names of its |
| 15 # contributors may be used to endorse or promote products derived from | 15 # contributors may be used to endorse or promote products derived from |
| 16 # this software without specific prior written permission. | 16 # this software without specific prior written permission. |
| 17 # | 17 # |
| 18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | 18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | 19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | 20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | 21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | 22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 29 | |
| 30 """ | 29 """ |
| 31 The Manager runs a series of tests (TestType interface) against a set | 30 The Manager runs a series of tests (TestType interface) against a set |
| 32 of test files. If a test file fails a TestType, it returns a list of TestFailur
e | 31 of test files. If a test file fails a TestType, it returns a list of TestFailur
e |
| 33 objects to the Manager. The Manager then aggregates the TestFailures to | 32 objects to the Manager. The Manager then aggregates the TestFailures to |
| 34 create a final report. | 33 create a final report. |
| 35 """ | 34 """ |
| 36 | 35 |
| 37 import datetime | 36 import datetime |
| 38 import json | 37 import json |
| 39 import logging | 38 import logging |
| (...skipping 13 matching lines...) Expand all Loading... |
| 53 from webkitpy.tool import grammar | 52 from webkitpy.tool import grammar |
| 54 | 53 |
| 55 _log = logging.getLogger(__name__) | 54 _log = logging.getLogger(__name__) |
| 56 | 55 |
| 57 # Builder base URL where we have the archived test results. | 56 # Builder base URL where we have the archived test results. |
| 58 BUILDER_BASE_URL = "http://build.chromium.org/buildbot/layout_test_results/" | 57 BUILDER_BASE_URL = "http://build.chromium.org/buildbot/layout_test_results/" |
| 59 | 58 |
| 60 TestExpectations = test_expectations.TestExpectations | 59 TestExpectations = test_expectations.TestExpectations |
| 61 | 60 |
| 62 | 61 |
| 63 | |
| 64 class Manager(object): | 62 class Manager(object): |
| 65 """A class for managing running a series of tests on a series of layout | 63 """A class for managing running a series of tests on a series of layout |
| 66 test files.""" | 64 test files.""" |
| 67 | 65 |
| 68 def __init__(self, port, options, printer): | 66 def __init__(self, port, options, printer): |
| 69 """Initialize test runner data structures. | 67 """Initialize test runner data structures. |
| 70 | 68 |
| 71 Args: | 69 Args: |
| 72 port: an object implementing port-specific | 70 port: an object implementing port-specific |
| 73 options: a dictionary of command line options | 71 options: a dictionary of command line options |
| 74 printer: a Printer object to record updates to. | 72 printer: a Printer object to record updates to. |
| 75 """ | 73 """ |
| 76 self._port = port | 74 self._port = port |
| 77 self._filesystem = port.host.filesystem | 75 self._filesystem = port.host.filesystem |
| 78 self._options = options | 76 self._options = options |
| 79 self._printer = printer | 77 self._printer = printer |
| 80 self._expectations = None | 78 self._expectations = None |
| 81 | 79 |
| 82 self.HTTP_SUBDIR = 'http' + port.TEST_PATH_SEPARATOR | 80 self.HTTP_SUBDIR = 'http' + port.TEST_PATH_SEPARATOR |
| 83 self.INSPECTOR_SUBDIR = 'inspector' + port.TEST_PATH_SEPARATOR | 81 self.INSPECTOR_SUBDIR = 'inspector' + port.TEST_PATH_SEPARATOR |
| 84 self.PERF_SUBDIR = 'perf' | 82 self.PERF_SUBDIR = 'perf' |
| 85 self.WEBSOCKET_SUBDIR = 'websocket' + port.TEST_PATH_SEPARATOR | 83 self.WEBSOCKET_SUBDIR = 'websocket' + port.TEST_PATH_SEPARATOR |
| 86 self.VIRTUAL_HTTP_SUBDIR = port.TEST_PATH_SEPARATOR.join([ | 84 self.VIRTUAL_HTTP_SUBDIR = port.TEST_PATH_SEPARATOR.join(['virtual', 'st
able', 'http']) |
| 87 'virtual', 'stable', 'http']) | |
| 88 self.LAYOUT_TESTS_DIRECTORY = 'LayoutTests' | 85 self.LAYOUT_TESTS_DIRECTORY = 'LayoutTests' |
| 89 self.ARCHIVED_RESULTS_LIMIT = 25 | 86 self.ARCHIVED_RESULTS_LIMIT = 25 |
| 90 self._http_server_started = False | 87 self._http_server_started = False |
| 91 self._wptserve_started = False | 88 self._wptserve_started = False |
| 92 self._websockets_server_started = False | 89 self._websockets_server_started = False |
| 93 | 90 |
| 94 self._results_directory = self._port.results_directory() | 91 self._results_directory = self._port.results_directory() |
| 95 self._finder = LayoutTestFinder(self._port, self._options) | 92 self._finder = LayoutTestFinder(self._port, self._options) |
| 96 self._runner = LayoutTestRunner(self._options, self._port, self._printer
, self._results_directory, self._test_is_slow) | 93 self._runner = LayoutTestRunner(self._options, self._port, self._printer
, self._results_directory, self._test_is_slow) |
| 97 | 94 |
| 98 def _collect_tests(self, args): | 95 def _collect_tests(self, args): |
| 99 return self._finder.find_tests(args, test_list=self._options.test_list, | 96 return self._finder.find_tests(args, test_list=self._options.test_list,
fastest_percentile=self._options.fastest) |
| 100 fastest_percentile=self._options.fastest) | |
| 101 | 97 |
| 102 def _is_http_test(self, test): | 98 def _is_http_test(self, test): |
| 103 return ( | 99 return (test.startswith(self.HTTP_SUBDIR) or self._is_websocket_test(tes
t) or self.VIRTUAL_HTTP_SUBDIR in test) |
| 104 test.startswith(self.HTTP_SUBDIR) or | |
| 105 self._is_websocket_test(test) or | |
| 106 self.VIRTUAL_HTTP_SUBDIR in test | |
| 107 ) | |
| 108 | 100 |
| 109 def _is_inspector_test(self, test): | 101 def _is_inspector_test(self, test): |
| 110 return self.INSPECTOR_SUBDIR in test | 102 return self.INSPECTOR_SUBDIR in test |
| 111 | 103 |
| 112 def _is_websocket_test(self, test): | 104 def _is_websocket_test(self, test): |
| 113 if self._port.is_wpt_enabled() and self._port.is_wpt_test(test): | 105 if self._port.is_wpt_enabled() and self._port.is_wpt_test(test): |
| 114 return False | 106 return False |
| 115 | 107 |
| 116 return self.WEBSOCKET_SUBDIR in test | 108 return self.WEBSOCKET_SUBDIR in test |
| 117 | 109 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 129 return tests_to_run, tests_to_skip | 121 return tests_to_run, tests_to_skip |
| 130 | 122 |
| 131 # Create a sorted list of test files so the subset chunk, | 123 # Create a sorted list of test files so the subset chunk, |
| 132 # if used, contains alphabetically consecutive tests. | 124 # if used, contains alphabetically consecutive tests. |
| 133 if self._options.order == 'natural': | 125 if self._options.order == 'natural': |
| 134 tests_to_run.sort(key=self._port.test_key) | 126 tests_to_run.sort(key=self._port.test_key) |
| 135 elif self._options.order == 'random': | 127 elif self._options.order == 'random': |
| 136 random.shuffle(tests_to_run) | 128 random.shuffle(tests_to_run) |
| 137 elif self._options.order == 'random-seeded': | 129 elif self._options.order == 'random-seeded': |
| 138 rnd = random.Random() | 130 rnd = random.Random() |
| 139 rnd.seed(4) # http://xkcd.com/221/ | 131 rnd.seed(4) # http://xkcd.com/221/ |
| 140 rnd.shuffle(tests_to_run) | 132 rnd.shuffle(tests_to_run) |
| 141 | 133 |
| 142 tests_to_run, tests_in_other_chunks = self._finder.split_into_chunks(tes
ts_to_run) | 134 tests_to_run, tests_in_other_chunks = self._finder.split_into_chunks(tes
ts_to_run) |
| 143 self._expectations.add_extra_skipped_tests(tests_in_other_chunks) | 135 self._expectations.add_extra_skipped_tests(tests_in_other_chunks) |
| 144 tests_to_skip.update(tests_in_other_chunks) | 136 tests_to_skip.update(tests_in_other_chunks) |
| 145 | 137 |
| 146 return tests_to_run, tests_to_skip | 138 return tests_to_run, tests_to_skip |
| 147 | 139 |
| 148 def _test_input_for_file(self, test_file): | 140 def _test_input_for_file(self, test_file): |
| 149 return TestInput(test_file, | 141 return TestInput( |
| 142 test_file, |
| 150 self._options.slow_time_out_ms if self._test_is_slow(test_file) else
self._options.time_out_ms, | 143 self._options.slow_time_out_ms if self._test_is_slow(test_file) else
self._options.time_out_ms, |
| 151 self._test_requires_lock(test_file), | 144 self._test_requires_lock(test_file), |
| 152 should_add_missing_baselines=(self._options.new_test_results and not
self._test_is_expected_missing(test_file))) | 145 should_add_missing_baselines=(self._options.new_test_results and not
self._test_is_expected_missing(test_file))) |
| 153 | 146 |
| 154 def _test_requires_lock(self, test_file): | 147 def _test_requires_lock(self, test_file): |
| 155 """Return True if the test needs to be locked when | 148 """Return True if the test needs to be locked when |
| 156 running multiple copies of NRWTs. Perf tests are locked | 149 running multiple copies of NRWTs. Perf tests are locked |
| 157 because heavy load caused by running other tests in parallel | 150 because heavy load caused by running other tests in parallel |
| 158 might cause some of them to timeout.""" | 151 might cause some of them to timeout.""" |
| 159 return self._is_http_test(test_file) or self._is_perf_test(test_file) | 152 return self._is_http_test(test_file) or self._is_perf_test(test_file) |
| 160 | 153 |
| 161 def _test_is_expected_missing(self, test_file): | 154 def _test_is_expected_missing(self, test_file): |
| 162 expectations = self._expectations.model().get_expectations(test_file) | 155 expectations = self._expectations.model().get_expectations(test_file) |
| 163 return test_expectations.MISSING in expectations or test_expectations.NE
EDS_REBASELINE in expectations or test_expectations.NEEDS_MANUAL_REBASELINE in e
xpectations | 156 return test_expectations.MISSING in expectations or test_expectations.NE
EDS_REBASELINE in expectations or test_expectations.NEEDS_MANUAL_REBASELINE in e
xpectations |
| 164 | 157 |
| 165 def _test_is_slow(self, test_file): | 158 def _test_is_slow(self, test_file): |
| 166 return test_expectations.SLOW in self._expectations.model().get_expectat
ions(test_file) | 159 return test_expectations.SLOW in self._expectations.model().get_expectat
ions(test_file) |
| 167 | 160 |
| 168 def needs_servers(self, test_names): | 161 def needs_servers(self, test_names): |
| 169 return any(self._test_requires_lock(test_name) for test_name in test_nam
es) | 162 return any(self._test_requires_lock(test_name) for test_name in test_nam
es) |
| 170 | 163 |
| 171 def _rename_results_folder(self): | 164 def _rename_results_folder(self): |
| 172 try: | 165 try: |
| 173 timestamp = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(self._
filesystem.mtime(self._filesystem.join(self._results_directory, "results.html"))
)) | 166 timestamp = time.strftime( |
| 167 "%Y-%m-%d-%H-%M-%S", |
| 168 time.localtime(self._filesystem.mtime(self._filesystem.join(self
._results_directory, "results.html")))) |
| 174 except (IOError, OSError), e: | 169 except (IOError, OSError), e: |
| 175 # It might be possible that results.html was not generated in previo
us run, because the test | 170 # It might be possible that results.html was not generated in previo
us run, because the test |
| 176 # run was interrupted even before testing started. In those cases, d
on't archive the folder. | 171 # run was interrupted even before testing started. In those cases, d
on't archive the folder. |
| 177 # Simply override the current folder contents with new results. | 172 # Simply override the current folder contents with new results. |
| 178 import errno | 173 import errno |
| 179 if e.errno == errno.EEXIST or e.errno == errno.ENOENT: | 174 if e.errno == errno.EEXIST or e.errno == errno.ENOENT: |
| 180 self._printer.write_update("No results.html file found in previo
us run, skipping it.") | 175 self._printer.write_update("No results.html file found in previo
us run, skipping it.") |
| 181 return None | 176 return None |
| 182 archived_name = ''.join((self._filesystem.basename(self._results_directo
ry), "_", timestamp)) | 177 archived_name = ''.join((self._filesystem.basename(self._results_directo
ry), "_", timestamp)) |
| 183 archived_path = self._filesystem.join(self._filesystem.dirname(self._res
ults_directory), archived_name) | 178 archived_path = self._filesystem.join(self._filesystem.dirname(self._res
ults_directory), archived_name) |
| (...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 265 should_retry_failures = len(paths) < len(test_names) | 260 should_retry_failures = len(paths) < len(test_names) |
| 266 else: | 261 else: |
| 267 should_retry_failures = self._options.retry_failures | 262 should_retry_failures = self._options.retry_failures |
| 268 | 263 |
| 269 enabled_pixel_tests_in_retry = False | 264 enabled_pixel_tests_in_retry = False |
| 270 try: | 265 try: |
| 271 self._start_servers(tests_to_run) | 266 self._start_servers(tests_to_run) |
| 272 | 267 |
| 273 num_workers = self._port.num_workers(int(self._options.child_process
es)) | 268 num_workers = self._port.num_workers(int(self._options.child_process
es)) |
| 274 | 269 |
| 275 initial_results = self._run_tests( | 270 initial_results = self._run_tests(tests_to_run, tests_to_skip, self.
_options.repeat_each, self._options.iterations, |
| 276 tests_to_run, tests_to_skip, self._options.repeat_each, self._op
tions.iterations, | 271 num_workers) |
| 277 num_workers) | |
| 278 | 272 |
| 279 # Don't retry failures when interrupted by user or failures limit ex
ception. | 273 # Don't retry failures when interrupted by user or failures limit ex
ception. |
| 280 should_retry_failures = should_retry_failures and not (initial_resul
ts.interrupted or initial_results.keyboard_interrupted) | 274 should_retry_failures = should_retry_failures and not (initial_resul
ts.interrupted or |
| 275 initial_resul
ts.keyboard_interrupted) |
| 281 | 276 |
| 282 tests_to_retry = self._tests_to_retry(initial_results) | 277 tests_to_retry = self._tests_to_retry(initial_results) |
| 283 all_retry_results = [] | 278 all_retry_results = [] |
| 284 if should_retry_failures and tests_to_retry: | 279 if should_retry_failures and tests_to_retry: |
| 285 enabled_pixel_tests_in_retry = self._force_pixel_tests_if_needed
() | 280 enabled_pixel_tests_in_retry = self._force_pixel_tests_if_needed
() |
| 286 | 281 |
| 287 for retry_attempt in xrange(1, self._options.num_retries + 1): | 282 for retry_attempt in xrange(1, self._options.num_retries + 1): |
| 288 if not tests_to_retry: | 283 if not tests_to_retry: |
| 289 break | 284 break |
| 290 | 285 |
| 291 _log.info('') | 286 _log.info('') |
| 292 _log.info('Retrying %s, attempt %d of %d...' % | 287 _log.info('Retrying %s, attempt %d of %d...' % (grammar.plur
alize('unexpected failure', len(tests_to_retry)), |
| 293 (grammar.pluralize('unexpected failure', len(tests
_to_retry)), | 288 retry_attemp
t, self._options.num_retries)) |
| 294 retry_attempt, self._options.num_retries)) | |
| 295 | 289 |
| 296 retry_results = self._run_tests(tests_to_retry, | 290 retry_results = self._run_tests(tests_to_retry, |
| 297 tests_to_skip=set(), | 291 tests_to_skip=set(), |
| 298 repeat_each=1, | 292 repeat_each=1, |
| 299 iterations=1, | 293 iterations=1, |
| 300 num_workers=num_workers, | 294 num_workers=num_workers, |
| 301 retry_attempt=retry_attempt) | 295 retry_attempt=retry_attempt) |
| 302 all_retry_results.append(retry_results) | 296 all_retry_results.append(retry_results) |
| 303 | 297 |
| 304 tests_to_retry = self._tests_to_retry(retry_results) | 298 tests_to_retry = self._tests_to_retry(retry_results) |
| 305 | 299 |
| 306 if enabled_pixel_tests_in_retry: | 300 if enabled_pixel_tests_in_retry: |
| 307 self._options.pixel_tests = False | 301 self._options.pixel_tests = False |
| 308 finally: | 302 finally: |
| 309 self._stop_servers() | 303 self._stop_servers() |
| 310 self._clean_up_run() | 304 self._clean_up_run() |
| 311 | 305 |
| 312 # Some crash logs can take a long time to be written out so look | 306 # Some crash logs can take a long time to be written out so look |
| 313 # for new logs after the test run finishes. | 307 # for new logs after the test run finishes. |
| 314 self._printer.write_update("looking for new crash logs") | 308 self._printer.write_update("looking for new crash logs") |
| 315 self._look_for_new_crash_logs(initial_results, start_time) | 309 self._look_for_new_crash_logs(initial_results, start_time) |
| 316 for retry_attempt_results in all_retry_results: | 310 for retry_attempt_results in all_retry_results: |
| 317 self._look_for_new_crash_logs(retry_attempt_results, start_time) | 311 self._look_for_new_crash_logs(retry_attempt_results, start_time) |
| 318 | 312 |
| 319 _log.debug("summarizing results") | 313 _log.debug("summarizing results") |
| 320 summarized_full_results = test_run_results.summarize_results( | 314 summarized_full_results = test_run_results.summarize_results(self._port,
self._expectations, initial_results, |
| 321 self._port, self._expectations, initial_results, all_retry_results, | 315 all_retry_r
esults, enabled_pixel_tests_in_retry) |
| 322 enabled_pixel_tests_in_retry) | 316 summarized_failing_results = test_run_results.summarize_results(self._po
rt, |
| 323 summarized_failing_results = test_run_results.summarize_results( | 317 self._ex
pectations, |
| 324 self._port, self._expectations, initial_results, all_retry_results, | 318 initial_
results, |
| 325 enabled_pixel_tests_in_retry, only_include_failing=True) | 319 all_retr
y_results, |
| 320 enabled_
pixel_tests_in_retry, |
| 321 only_inc
lude_failing=True) |
| 326 | 322 |
| 327 exit_code = summarized_failing_results['num_regressions'] | 323 exit_code = summarized_failing_results['num_regressions'] |
| 328 if exit_code > test_run_results.MAX_FAILURES_EXIT_STATUS: | 324 if exit_code > test_run_results.MAX_FAILURES_EXIT_STATUS: |
| 329 _log.warning('num regressions (%d) exceeds max exit status (%d)' % | 325 _log.warning('num regressions (%d) exceeds max exit status (%d)' % |
| 330 (exit_code, test_run_results.MAX_FAILURES_EXIT_STATUS)) | 326 (exit_code, test_run_results.MAX_FAILURES_EXIT_STATUS)) |
| 331 exit_code = test_run_results.MAX_FAILURES_EXIT_STATUS | 327 exit_code = test_run_results.MAX_FAILURES_EXIT_STATUS |
| 332 | 328 |
| 333 if not self._options.dry_run: | 329 if not self._options.dry_run: |
| 334 self._write_json_files(summarized_full_results, summarized_failing_r
esults, initial_results, running_all_tests) | 330 self._write_json_files(summarized_full_results, summarized_failing_r
esults, initial_results, running_all_tests) |
| 335 | 331 |
| 336 if self._options.write_full_results_to: | 332 if self._options.write_full_results_to: |
| 337 self._filesystem.copyfile(self._filesystem.join(self._results_di
rectory, "full_results.json"), | 333 self._filesystem.copyfile( |
| 338 self._options.write_full_results_to) | 334 self._filesystem.join(self._results_directory, "full_results
.json"), self._options.write_full_results_to) |
| 339 | 335 |
| 340 self._upload_json_files() | 336 self._upload_json_files() |
| 341 | 337 |
| 342 results_path = self._filesystem.join(self._results_directory, "resul
ts.html") | 338 results_path = self._filesystem.join(self._results_directory, "resul
ts.html") |
| 343 self._copy_results_html_file(results_path) | 339 self._copy_results_html_file(results_path) |
| 344 if initial_results.keyboard_interrupted: | 340 if initial_results.keyboard_interrupted: |
| 345 exit_code = test_run_results.INTERRUPTED_EXIT_STATUS | 341 exit_code = test_run_results.INTERRUPTED_EXIT_STATUS |
| 346 else: | 342 else: |
| 347 if initial_results.interrupted: | 343 if initial_results.interrupted: |
| 348 exit_code = test_run_results.EARLY_EXIT_STATUS | 344 exit_code = test_run_results.EARLY_EXIT_STATUS |
| 349 if self._options.show_results and (exit_code or (self._options.f
ull_results_html and initial_results.total_failures)): | 345 if self._options.show_results and (exit_code or |
| 346 (self._options.full_results_h
tml and initial_results.total_failures)): |
| 350 self._port.show_results_html_file(results_path) | 347 self._port.show_results_html_file(results_path) |
| 351 self._printer.print_results(time.time() - start_time, initial_re
sults, summarized_failing_results) | 348 self._printer.print_results(time.time() - start_time, initial_re
sults, summarized_failing_results) |
| 352 | 349 |
| 353 self._check_for_stale_w3c_dir() | 350 self._check_for_stale_w3c_dir() |
| 354 | 351 |
| 355 return test_run_results.RunDetails( | 352 return test_run_results.RunDetails(exit_code, summarized_full_results, s
ummarized_failing_results, initial_results, |
| 356 exit_code, summarized_full_results, summarized_failing_results, | 353 all_retry_results, enabled_pixel_test
s_in_retry) |
| 357 initial_results, all_retry_results, enabled_pixel_tests_in_retry) | |
| 358 | 354 |
| 359 def _run_tests(self, tests_to_run, tests_to_skip, repeat_each, iterations, | 355 def _run_tests(self, tests_to_run, tests_to_skip, repeat_each, iterations, n
um_workers, retry_attempt=0): |
| 360 num_workers, retry_attempt=0): | |
| 361 | 356 |
| 362 test_inputs = [] | 357 test_inputs = [] |
| 363 for _ in xrange(iterations): | 358 for _ in xrange(iterations): |
| 364 for test in tests_to_run: | 359 for test in tests_to_run: |
| 365 for _ in xrange(repeat_each): | 360 for _ in xrange(repeat_each): |
| 366 test_inputs.append(self._test_input_for_file(test)) | 361 test_inputs.append(self._test_input_for_file(test)) |
| 367 return self._runner.run_tests(self._expectations, test_inputs, | 362 return self._runner.run_tests(self._expectations, test_inputs, tests_to_
skip, num_workers, retry_attempt) |
| 368 tests_to_skip, num_workers, retry_attempt) | |
| 369 | 363 |
| 370 def _start_servers(self, tests_to_run): | 364 def _start_servers(self, tests_to_run): |
| 371 if self._port.is_wpt_enabled() and any(self._port.is_wpt_test(test) for
test in tests_to_run): | 365 if self._port.is_wpt_enabled() and any(self._port.is_wpt_test(test) for
test in tests_to_run): |
| 372 self._printer.write_update('Starting WPTServe ...') | 366 self._printer.write_update('Starting WPTServe ...') |
| 373 self._port.start_wptserve() | 367 self._port.start_wptserve() |
| 374 self._wptserve_started = True | 368 self._wptserve_started = True |
| 375 | 369 |
| 376 if self._port.requires_http_server() or any((self._is_http_test(test) or
self._is_inspector_test(test)) for test in tests_to_run): | 370 if self._port.requires_http_server() or any((self._is_http_test(test) or
self._is_inspector_test(test)) |
| 371 for test in tests_to_run): |
| 377 self._printer.write_update('Starting HTTP server ...') | 372 self._printer.write_update('Starting HTTP server ...') |
| 378 self._port.start_http_server(additional_dirs={}, number_of_drivers=s
elf._options.max_locked_shards) | 373 self._port.start_http_server(additional_dirs={}, number_of_drivers=s
elf._options.max_locked_shards) |
| 379 self._http_server_started = True | 374 self._http_server_started = True |
| 380 | 375 |
| 381 if any(self._is_websocket_test(test) for test in tests_to_run): | 376 if any(self._is_websocket_test(test) for test in tests_to_run): |
| 382 self._printer.write_update('Starting WebSocket server ...') | 377 self._printer.write_update('Starting WebSocket server ...') |
| 383 self._port.start_websocket_server() | 378 self._port.start_websocket_server() |
| 384 self._websockets_server_started = True | 379 self._websockets_server_started = True |
| 385 | 380 |
| 386 def _stop_servers(self): | 381 def _stop_servers(self): |
| (...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 469 if self._filesystem.isdir(file_path) and self._results_directory in
file_path: | 464 if self._filesystem.isdir(file_path) and self._results_directory in
file_path: |
| 470 results_directories.append(file_path) | 465 results_directories.append(file_path) |
| 471 self._delete_dirs(results_directories) | 466 self._delete_dirs(results_directories) |
| 472 | 467 |
| 473 # Port specific clean-up. | 468 # Port specific clean-up. |
| 474 self._port.clobber_old_port_specific_results() | 469 self._port.clobber_old_port_specific_results() |
| 475 | 470 |
| 476 def _tests_to_retry(self, run_results): | 471 def _tests_to_retry(self, run_results): |
| 477 # TODO(ojan): This should also check that result.type != test_expectatio
ns.MISSING since retrying missing expectations is silly. | 472 # TODO(ojan): This should also check that result.type != test_expectatio
ns.MISSING since retrying missing expectations is silly. |
| 478 # But that's a bit tricky since we only consider the last retry attempt
for the count of unexpected regressions. | 473 # But that's a bit tricky since we only consider the last retry attempt
for the count of unexpected regressions. |
| 479 return [result.test_name for result in run_results.unexpected_results_by
_name.values() if result.type != test_expectations.PASS] | 474 return [result.test_name |
| 475 for result in run_results.unexpected_results_by_name.values() if
result.type != test_expectations.PASS] |
| 480 | 476 |
| 481 def _write_json_files(self, summarized_full_results, summarized_failing_resu
lts, initial_results, running_all_tests): | 477 def _write_json_files(self, summarized_full_results, summarized_failing_resu
lts, initial_results, running_all_tests): |
| 482 _log.debug("Writing JSON files in %s." % self._results_directory) | 478 _log.debug("Writing JSON files in %s." % self._results_directory) |
| 483 | 479 |
| 484 # FIXME: Upload stats.json to the server and delete times_ms. | 480 # FIXME: Upload stats.json to the server and delete times_ms. |
| 485 times_trie = json_results_generator.test_timings_trie(initial_results.re
sults_by_name.values()) | 481 times_trie = json_results_generator.test_timings_trie(initial_results.re
sults_by_name.values()) |
| 486 times_json_path = self._filesystem.join(self._results_directory, "times_
ms.json") | 482 times_json_path = self._filesystem.join(self._results_directory, "times_
ms.json") |
| 487 json_results_generator.write_json(self._filesystem, times_trie, times_js
on_path) | 483 json_results_generator.write_json(self._filesystem, times_trie, times_js
on_path) |
| 488 | 484 |
| 489 # Save out the times data so we can use it for --fastest in the future. | 485 # Save out the times data so we can use it for --fastest in the future. |
| (...skipping 17 matching lines...) Expand all Loading... |
| 507 | 503 |
| 508 def _upload_json_files(self): | 504 def _upload_json_files(self): |
| 509 if not self._options.test_results_server: | 505 if not self._options.test_results_server: |
| 510 return | 506 return |
| 511 | 507 |
| 512 if not self._options.master_name: | 508 if not self._options.master_name: |
| 513 _log.error("--test-results-server was set, but --master-name was not
. Not uploading JSON files.") | 509 _log.error("--test-results-server was set, but --master-name was not
. Not uploading JSON files.") |
| 514 return | 510 return |
| 515 | 511 |
| 516 _log.debug("Uploading JSON files for builder: %s", self._options.builder
_name) | 512 _log.debug("Uploading JSON files for builder: %s", self._options.builder
_name) |
| 517 attrs = [("builder", self._options.builder_name), | 513 attrs = [("builder", self._options.builder_name), ("testtype", self._opt
ions.step_name), |
| 518 ("testtype", self._options.step_name), | |
| 519 ("master", self._options.master_name)] | 514 ("master", self._options.master_name)] |
| 520 | 515 |
| 521 files = [(file, self._filesystem.join(self._results_directory, file)) fo
r file in ["failing_results.json", "full_results.json", "times_ms.json"]] | 516 files = [(file, self._filesystem.join(self._results_directory, file)) |
| 517 for file in ["failing_results.json", "full_results.json", "time
s_ms.json"]] |
| 522 | 518 |
| 523 url = "http://%s/testfile/upload" % self._options.test_results_server | 519 url = "http://%s/testfile/upload" % self._options.test_results_server |
| 524 # Set uploading timeout in case appengine server is having problems. | 520 # Set uploading timeout in case appengine server is having problems. |
| 525 # 120 seconds are more than enough to upload test results. | 521 # 120 seconds are more than enough to upload test results. |
| 526 uploader = FileUploader(url, 120) | 522 uploader = FileUploader(url, 120) |
| 527 try: | 523 try: |
| 528 response = uploader.upload_as_multipart_form_data(self._filesystem,
files, attrs) | 524 response = uploader.upload_as_multipart_form_data(self._filesystem,
files, attrs) |
| 529 if response: | 525 if response: |
| 530 if response.code == 200: | 526 if response.code == 200: |
| 531 _log.debug("JSON uploaded.") | 527 _log.debug("JSON uploaded.") |
| (...skipping 12 matching lines...) Expand all Loading... |
| 544 if self._filesystem.exists(results_file): | 540 if self._filesystem.exists(results_file): |
| 545 self._filesystem.copyfile(results_file, destination_path) | 541 self._filesystem.copyfile(results_file, destination_path) |
| 546 | 542 |
| 547 def _stats_trie(self, initial_results): | 543 def _stats_trie(self, initial_results): |
| 548 def _worker_number(worker_name): | 544 def _worker_number(worker_name): |
| 549 return int(worker_name.split('/')[1]) if worker_name else -1 | 545 return int(worker_name.split('/')[1]) if worker_name else -1 |
| 550 | 546 |
| 551 stats = {} | 547 stats = {} |
| 552 for result in initial_results.results_by_name.values(): | 548 for result in initial_results.results_by_name.values(): |
| 553 if result.type != test_expectations.SKIP: | 549 if result.type != test_expectations.SKIP: |
| 554 stats[result.test_name] = {'results': (_worker_number(result.wor
ker_name), result.test_number, result.pid, int(result.test_run_time * 1000), int
(result.total_run_time * 1000))} | 550 stats[result.test_name] = {'results': (_worker_number(result.wor
ker_name), result.test_number, result.pid, int( |
| 551 result.test_run_time * 1000), int(result.total_run_time * 10
00))} |
| 555 stats_trie = {} | 552 stats_trie = {} |
| 556 for name, value in stats.iteritems(): | 553 for name, value in stats.iteritems(): |
| 557 json_results_generator.add_path_to_trie(name, value, stats_trie) | 554 json_results_generator.add_path_to_trie(name, value, stats_trie) |
| 558 return stats_trie | 555 return stats_trie |
| OLD | NEW |