OLD | NEW |
---|---|
1 # Copyright (C) 2010 Google Inc. All rights reserved. | 1 # Copyright (C) 2010 Google Inc. All rights reserved. |
2 # Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Sze ged | 2 # Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Sze ged |
3 # | 3 # |
4 # Redistribution and use in source and binary forms, with or without | 4 # Redistribution and use in source and binary forms, with or without |
5 # modification, are permitted provided that the following conditions are | 5 # modification, are permitted provided that the following conditions are |
6 # met: | 6 # met: |
7 # | 7 # |
8 # * Redistributions of source code must retain the above copyright | 8 # * Redistributions of source code must retain the above copyright |
9 # notice, this list of conditions and the following disclaimer. | 9 # notice, this list of conditions and the following disclaimer. |
10 # * Redistributions in binary form must reproduce the above | 10 # * Redistributions in binary form must reproduce the above |
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
148 | 148 |
149 def _test_is_slow(self, test_file): | 149 def _test_is_slow(self, test_file): |
150 return test_expectations.SLOW in self._expectations.model().get_expectat ions(test_file) | 150 return test_expectations.SLOW in self._expectations.model().get_expectat ions(test_file) |
151 | 151 |
152 def needs_servers(self, test_names): | 152 def needs_servers(self, test_names): |
153 return any(self._test_requires_lock(test_name) for test_name in test_nam es) | 153 return any(self._test_requires_lock(test_name) for test_name in test_nam es) |
154 | 154 |
155 def _rename_results_folder(self): | 155 def _rename_results_folder(self): |
156 try: | 156 try: |
157 timestamp = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(self._ filesystem.mtime(self._filesystem.join(self._results_directory, "results.html")) )) | 157 timestamp = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(self._ filesystem.mtime(self._filesystem.join(self._results_directory, "results.html")) )) |
158 except OSError, e: | 158 except (IOError, OSError), e: |
159 # It might be possible that results.html was not generated in previo us run, because the test | 159 # It might be possible that results.html was not generated in previo us run, because the test |
160 # run was interrupted even before testing started. In those cases, d on't archive the folder. | 160 # run was interrupted even before testing started. In those cases, d on't archive the folder. |
161 # Simply override the current folder contents with new results. | 161 # Simply override the current folder contents with new results. |
162 import errno | 162 import errno |
163 if e.errno == errno.EEXIST: | 163 if e.errno == errno.EEXIST or e.errno == errno.ENOENT: |
164 _log.warning("No results.html file found in previous run, skippi ng it.") | 164 _log.warning("No results.html file found in previous run, skippi ng it.") |
165 return None | 165 return None |
166 archived_name = ''.join((self._filesystem.basename(self._results_directo ry), "_", timestamp)) | 166 archived_name = ''.join((self._filesystem.basename(self._results_directo ry), "_", timestamp)) |
167 archived_path = self._filesystem.join(self._filesystem.dirname(self._res ults_directory), archived_name) | 167 archived_path = self._filesystem.join(self._filesystem.dirname(self._res ults_directory), archived_name) |
168 self._filesystem.move(self._results_directory, archived_path) | 168 self._filesystem.move(self._results_directory, archived_path) |
169 | 169 |
170 def _clobber_old_archived_results(self): | 170 def _delete_dirs(self, dir_list): |
171 for dir in dir_list: | |
172 self._filesystem.rmtree(dir) | |
173 | |
174 def _limit_archived_results_count(self): | |
171 results_directory_path = self._filesystem.dirname(self._results_director y) | 175 results_directory_path = self._filesystem.dirname(self._results_director y) |
172 file_list = self._filesystem.listdir(results_directory_path) | 176 file_list = self._filesystem.listdir(results_directory_path) |
173 results_directories = [] | 177 results_directories = [] |
174 for dir in file_list: | 178 for dir in file_list: |
175 file_path = self._filesystem.join(results_directory_path, dir) | 179 file_path = self._filesystem.join(results_directory_path, dir) |
176 if self._filesystem.isdir(file_path): | 180 if self._filesystem.isdir(file_path) and self._results_directory in file_path: |
177 results_directories.append(file_path) | 181 results_directories.append(file_path) |
178 results_directories.sort(key=lambda x: self._filesystem.mtime(x)) | 182 results_directories.sort(key=lambda x: self._filesystem.mtime(x)) |
179 self._printer.write_update("Clobbering old archived results in %s" % res ults_directory_path) | 183 self._printer.write_update("Clobbering excess archived results in %s" % results_directory_path) |
180 for dir in results_directories[:-self.ARCHIVED_RESULTS_LIMIT]: | 184 self._delete_dirs(results_directories[:-self.ARCHIVED_RESULTS_LIMIT]) |
181 self._filesystem.rmtree(dir) | |
182 | 185 |
183 def _set_up_run(self, test_names): | 186 def _set_up_run(self, test_names): |
184 self._printer.write_update("Checking build ...") | 187 self._printer.write_update("Checking build ...") |
185 if self._options.build: | 188 if self._options.build: |
186 exit_code = self._port.check_build(self.needs_servers(test_names), s elf._printer) | 189 exit_code = self._port.check_build(self.needs_servers(test_names), s elf._printer) |
187 if exit_code: | 190 if exit_code: |
188 _log.error("Build check failed") | 191 _log.error("Build check failed") |
189 return exit_code | 192 return exit_code |
190 | 193 |
191 # This must be started before we check the system dependencies, | 194 # This must be started before we check the system dependencies, |
192 # since the helper may do things to make the setup correct. | 195 # since the helper may do things to make the setup correct. |
193 if self._options.pixel_tests: | 196 if self._options.pixel_tests: |
194 self._printer.write_update("Starting pixel test helper ...") | 197 self._printer.write_update("Starting pixel test helper ...") |
195 self._port.start_helper() | 198 self._port.start_helper() |
196 | 199 |
197 # Check that the system dependencies (themes, fonts, ...) are correct. | 200 # Check that the system dependencies (themes, fonts, ...) are correct. |
198 if not self._options.nocheck_sys_deps: | 201 if not self._options.nocheck_sys_deps: |
199 self._printer.write_update("Checking system dependencies ...") | 202 self._printer.write_update("Checking system dependencies ...") |
200 exit_code = self._port.check_sys_deps(self.needs_servers(test_names) ) | 203 exit_code = self._port.check_sys_deps(self.needs_servers(test_names) ) |
201 if exit_code: | 204 if exit_code: |
202 self._port.stop_helper() | 205 self._port.stop_helper() |
203 return exit_code | 206 return exit_code |
204 | 207 |
205 if self._options.enable_versioned_results and self._filesystem.exists(se lf._results_directory): | 208 if self._options.clobber_old_results: |
206 if self._options.clobber_old_results: | 209 self._clobber_old_results() |
207 _log.warning("Flag --enable_versioned_results overrides --clobbe r-old-results.") | 210 elif self._filesystem.exists(self._results_directory): |
208 self._clobber_old_archived_results() | 211 _log.error('exists') |
212 self._limit_archived_results_count() | |
209 # Rename the existing results folder for archiving. | 213 # Rename the existing results folder for archiving. |
210 self._rename_results_folder() | 214 self._rename_results_folder() |
211 elif self._options.clobber_old_results: | |
212 self._clobber_old_results() | |
213 | 215 |
214 # Create the output directory if it doesn't already exist. | 216 # Create the output directory if it doesn't already exist. |
215 self._port.host.filesystem.maybe_make_directory(self._results_directory) | 217 self._port.host.filesystem.maybe_make_directory(self._results_directory) |
216 | 218 |
217 self._port.setup_test_run() | 219 self._port.setup_test_run() |
218 return test_run_results.OK_EXIT_STATUS | 220 return test_run_results.OK_EXIT_STATUS |
219 | 221 |
220 def run(self, args): | 222 def run(self, args): |
221 """Run the tests and return a RunDetails object with the results.""" | 223 """Run the tests and return a RunDetails object with the results.""" |
222 start_time = time.time() | 224 start_time = time.time() |
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
388 writer = TestResultWriter(self._port._filesystem, self._port, se lf._port.results_directory(), test) | 390 writer = TestResultWriter(self._port._filesystem, self._port, se lf._port.results_directory(), test) |
389 writer.copy_sample_file(sample_file) | 391 writer.copy_sample_file(sample_file) |
390 | 392 |
391 crash_logs = self._port.look_for_new_crash_logs(crashed_processes, start _time) | 393 crash_logs = self._port.look_for_new_crash_logs(crashed_processes, start _time) |
392 if crash_logs: | 394 if crash_logs: |
393 for test, crash_log in crash_logs.iteritems(): | 395 for test, crash_log in crash_logs.iteritems(): |
394 writer = TestResultWriter(self._port._filesystem, self._port, se lf._port.results_directory(), test) | 396 writer = TestResultWriter(self._port._filesystem, self._port, se lf._port.results_directory(), test) |
395 writer.write_crash_log(crash_log) | 397 writer.write_crash_log(crash_log) |
396 | 398 |
397 def _clobber_old_results(self): | 399 def _clobber_old_results(self): |
398 # Just clobber the actual test results directories since the other | 400 results_directory_path = self._filesystem.dirname(self._results_director y) |
Dirk Pranke
2014/09/05 21:07:31
Looking at this again, I'd probably call this some
patro
2014/09/06 06:50:55
Done.
| |
399 # files in the results directory are explicitly used for cross-run | 401 self._printer.write_update("Clobbering old results in %s" % results_dire ctory_path) |
400 # tracking. | 402 if not self._filesystem.exists(results_directory_path): |
401 self._printer.write_update("Clobbering old results in %s" % | 403 return |
402 self._results_directory) | 404 file_list = self._filesystem.listdir(results_directory_path) |
403 layout_tests_dir = self._port.layout_tests_dir() | 405 results_directories = [] |
404 possible_dirs = self._port.test_dirs() | 406 for dir in file_list: |
405 for dirname in possible_dirs: | 407 file_path = self._filesystem.join(results_directory_path, dir) |
406 if self._filesystem.isdir(self._filesystem.join(layout_tests_dir, di rname)): | 408 if self._filesystem.isdir(file_path) and self._results_directory in file_path: |
407 self._filesystem.rmtree(self._filesystem.join(self._results_dire ctory, dirname)) | 409 results_directories.append(file_path) |
410 self._delete_dirs(results_directories) | |
408 | 411 |
409 # Port specific clean-up. | 412 # Port specific clean-up. |
410 self._port.clobber_old_port_specific_results() | 413 self._port.clobber_old_port_specific_results() |
411 | 414 |
412 def _tests_to_retry(self, run_results): | 415 def _tests_to_retry(self, run_results): |
413 return [result.test_name for result in run_results.unexpected_results_by _name.values() if result.type != test_expectations.PASS] | 416 return [result.test_name for result in run_results.unexpected_results_by _name.values() if result.type != test_expectations.PASS] |
414 | 417 |
415 def _write_json_files(self, summarized_full_results, summarized_failing_resu lts, initial_results): | 418 def _write_json_files(self, summarized_full_results, summarized_failing_resu lts, initial_results): |
416 _log.debug("Writing JSON files in %s." % self._results_directory) | 419 _log.debug("Writing JSON files in %s." % self._results_directory) |
417 | 420 |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
477 return int(worker_name.split('/')[1]) if worker_name else -1 | 480 return int(worker_name.split('/')[1]) if worker_name else -1 |
478 | 481 |
479 stats = {} | 482 stats = {} |
480 for result in initial_results.results_by_name.values(): | 483 for result in initial_results.results_by_name.values(): |
481 if result.type != test_expectations.SKIP: | 484 if result.type != test_expectations.SKIP: |
482 stats[result.test_name] = {'results': (_worker_number(result.wor ker_name), result.test_number, result.pid, int(result.test_run_time * 1000), int (result.total_run_time * 1000))} | 485 stats[result.test_name] = {'results': (_worker_number(result.wor ker_name), result.test_number, result.pid, int(result.test_run_time * 1000), int (result.total_run_time * 1000))} |
483 stats_trie = {} | 486 stats_trie = {} |
484 for name, value in stats.iteritems(): | 487 for name, value in stats.iteritems(): |
485 json_results_generator.add_path_to_trie(name, value, stats_trie) | 488 json_results_generator.add_path_to_trie(name, value, stats_trie) |
486 return stats_trie | 489 return stats_trie |
OLD | NEW |