Index: third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/views/printing.py |
diff --git a/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/views/printing.py b/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/views/printing.py |
index c6d163c2adfb10bcdca3a6732ad85ffba279d779..ba085aed90e8e7407037f4da4687e7c821b2cf27 100644 |
--- a/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/views/printing.py |
+++ b/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/views/printing.py |
@@ -25,7 +25,6 @@ |
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
- |
"""Package that handles non-debug, non-file output for run-webkit-tests.""" |
import math |
@@ -36,21 +35,32 @@ from webkitpy.layout_tests.models import test_expectations |
from webkitpy.layout_tests.models.test_expectations import TestExpectations, TestExpectationParser |
from webkitpy.layout_tests.views.metered_stream import MeteredStream |
- |
NUM_SLOW_TESTS_TO_LOG = 10 |
def print_options(): |
return [ |
- optparse.make_option('--debug-rwt-logging', action='store_true', default=False, |
+ optparse.make_option('--debug-rwt-logging', |
+ action='store_true', |
+ default=False, |
help='print timestamps and debug information for run-webkit-tests itself'), |
- optparse.make_option('--details', action='store_true', default=False, |
+ optparse.make_option('--details', |
+ action='store_true', |
+ default=False, |
help='print detailed results for every test'), |
- optparse.make_option('-q', '--quiet', action='store_true', default=False, |
+ optparse.make_option('-q', |
+ '--quiet', |
+ action='store_true', |
+ default=False, |
help='run quietly (errors, warnings, and progress only)'), |
- optparse.make_option('--timing', action='store_true', default=False, |
+ optparse.make_option('--timing', |
+ action='store_true', |
+ default=False, |
help='display test times (summary plus per-test w/ --verbose)'), |
- optparse.make_option('-v', '--verbose', action='store_true', default=False, |
+ optparse.make_option('-v', |
+ '--verbose', |
+ action='store_true', |
+ default=False, |
help='print a summarized result for every test (one line per test)'), |
] |
@@ -63,7 +73,9 @@ class Printer(object): |
self.num_tests = 0 |
self._port = port |
self._options = options |
- self._meter = MeteredStream(regular_output, options.debug_rwt_logging, logger=logger, |
+ self._meter = MeteredStream(regular_output, |
+ options.debug_rwt_logging, |
+ logger=logger, |
number_of_columns=self._port.host.platform.terminal_width()) |
self._running_tests = [] |
self._completed_tests = [] |
@@ -95,7 +107,7 @@ class Printer(object): |
self._print_default("Pixel tests disabled") |
self._print_default("Regular timeout: %s, slow test timeout: %s" % |
- (self._options.time_out_ms, self._options.slow_time_out_ms)) |
+ (self._options.time_out_ms, self._options.slow_time_out_ms)) |
self._print_default('Command line: ' + ' '.join(self._port.driver_cmd_line())) |
self._print_default('') |
@@ -130,8 +142,7 @@ class Printer(object): |
# We use a fancy format string in order to print the data out in a |
# nicely-aligned table. |
- fmtstr = ("Expect: %%5d %%-8s (%%%dd now, %%%dd wontfix)" |
- % (self._num_digits(now), self._num_digits(wontfix))) |
+ fmtstr = ("Expect: %%5d %%-8s (%%%dd now, %%%dd wontfix)" % (self._num_digits(now), self._num_digits(wontfix))) |
self._print_debug(fmtstr % (len(tests), result_type_str, len(tests & now), len(tests & wontfix))) |
def _num_digits(self, num): |
@@ -165,7 +176,8 @@ class Printer(object): |
cuml_time += result.total_run_time |
for worker_name in stats: |
- self._print_debug(" %10s: %5d tests, %6.2f secs" % (worker_name, stats[worker_name]['num_tests'], stats[worker_name]['total_time'])) |
+ self._print_debug(" %10s: %5d tests, %6.2f secs" % |
+ (worker_name, stats[worker_name]['num_tests'], stats[worker_name]['total_time'])) |
self._print_debug(" %6.2f cumulative, %6.2f optimal" % (cuml_time, cuml_time / num_workers)) |
self._print_debug("") |
@@ -176,7 +188,9 @@ class Printer(object): |
def _print_individual_test_times(self, run_results): |
# Reverse-sort by the time spent in the driver. |
- individual_test_timings = sorted(run_results.results_by_name.values(), key=lambda result: result.test_run_time, reverse=True) |
+ individual_test_timings = sorted(run_results.results_by_name.values(), |
+ key=lambda result: result.test_run_time, |
+ reverse=True) |
num_printed = 0 |
slow_tests = [] |
timeout_or_crash_tests = [] |
@@ -190,8 +204,7 @@ class Printer(object): |
if test_name in run_results.failures_by_name: |
result = run_results.results_by_name[test_name].type |
- if (result == test_expectations.TIMEOUT or |
- result == test_expectations.CRASH): |
+ if (result == test_expectations.TIMEOUT or result == test_expectations.CRASH): |
is_timeout_crash_or_slow = True |
timeout_or_crash_tests.append(test_tuple) |
@@ -202,7 +215,7 @@ class Printer(object): |
self._print_debug("") |
if unexpected_slow_tests: |
self._print_test_list_timing("%s slowest tests that are not marked as SLOW and did not timeout/crash:" % |
- NUM_SLOW_TESTS_TO_LOG, unexpected_slow_tests) |
+ NUM_SLOW_TESTS_TO_LOG, unexpected_slow_tests) |
self._print_debug("") |
if slow_tests: |
@@ -302,7 +315,8 @@ class Printer(object): |
expected_summary_str = '' |
if run_results.expected_failures > 0: |
- expected_summary_str = " (%d passed, %d didn't)" % (expected - run_results.expected_failures, run_results.expected_failures) |
+ expected_summary_str = " (%d passed, %d didn't)" % (expected - run_results.expected_failures, |
+ run_results.expected_failures) |
summary = '' |
if unexpected == 0: |
@@ -312,9 +326,11 @@ class Printer(object): |
else: |
summary = "The test ran as expected%s%s." % (expected_summary_str, timing_summary) |
else: |
- summary = "%s ran as expected%s%s%s." % (grammar.pluralize('test', expected), expected_summary_str, incomplete_str, timing_summary) |
+ summary = "%s ran as expected%s%s%s." % (grammar.pluralize('test', expected), expected_summary_str, incomplete_str, |
+ timing_summary) |
else: |
- summary = "%s ran as expected%s, %d didn't%s%s:" % (grammar.pluralize('test', expected), expected_summary_str, unexpected, incomplete_str, timing_summary) |
+ summary = "%s ran as expected%s, %d didn't%s%s:" % (grammar.pluralize('test', expected), expected_summary_str, |
+ unexpected, incomplete_str, timing_summary) |
self._print_quiet(summary) |
self._print_quiet("") |
@@ -351,8 +367,7 @@ class Printer(object): |
self.num_completed += 1 |
test_name = result.test_name |
- result_message = self._result_message(result.type, result.failures, expected, |
- self._options.timing, result.test_run_time) |
+ result_message = self._result_message(result.type, result.failures, expected, self._options.timing, result.test_run_time) |
if self._options.details: |
self._print_test_trace(result, exp_str, got_str) |
@@ -397,7 +412,7 @@ class Printer(object): |
self._print_default(' ref: %s' % self._port.relative_test_filename(filename)) |
else: |
for extension in ('.txt', '.png', '.wav'): |
- self._print_baseline(test_name, extension) |
+ self._print_baseline(test_name, extension) |
self._print_default(' exp: %s' % exp_str) |
self._print_default(' got: %s' % got_str) |