Chromium Code Reviews| Index: content/test/gpu/gpu_tests/gpu_test_base.py |
| diff --git a/content/test/gpu/gpu_tests/gpu_test_base.py b/content/test/gpu/gpu_tests/gpu_test_base.py |
| index b935998653db6e9f8d4021b643d102f48daac3d0..fe08874832ab1e3a92bc99287da34e12c7e7efd5 100644 |
| --- a/content/test/gpu/gpu_tests/gpu_test_base.py |
| +++ b/content/test/gpu/gpu_tests/gpu_test_base.py |
| @@ -5,20 +5,18 @@ |
| import logging |
| from telemetry import benchmark as benchmark_module |
| -from telemetry.core import exceptions |
| from telemetry.page import page as page_module |
| from telemetry.page import page_test |
| from telemetry.page import shared_page_state |
| from telemetry.testing import fakes |
| -from telemetry.value import skip |
| -import exception_formatter |
| -import gpu_test_expectations |
| - |
| -"""Base classes for all GPU tests in this directory. Implements |
| -support for per-page test expectations.""" |
| +from gpu_tests import exception_formatter |
| +from gpu_tests import gpu_test_expectations |
| class TestBase(benchmark_module.Benchmark): |
| + """Base classes for all GPU tests in this directory. Implements |
| + support for per-page test expectations.""" |
| + |
| def __init__(self, max_failures=None): |
| super(TestBase, self).__init__(max_failures=max_failures) |
| self._cached_expectations = None |
| @@ -52,24 +50,26 @@ class ValidatorBase(page_test.PageTest): |
| needs_browser_restart_after_each_page, |
| clear_cache_before_each_run=clear_cache_before_each_run) |
| + def ValidateAndMeasurePage(self, page, tab, result): |
|
Ken Russell (switch to Gerrit)
2015/10/21 22:12:08
Again, unfortunate that useless code is required t
Corentin Wallez
2015/10/21 23:07:25
Acknowledged.
|
| + pass |
| def _CanRunOnBrowser(browser_info, page): |
| expectations = page.GetExpectations() |
| return expectations.GetExpectationForPage( |
| browser_info.browser, page) != 'skip' |
| -def RunStoryWithRetries(cls, shared_page_state, results): |
| - page = shared_page_state.current_page |
| +def RunStoryWithRetries(cls, shared_state, results): |
|
Ken Russell (switch to Gerrit)
2015/10/21 22:12:08
Consider importing shared_page_state as shared_pag
Corentin Wallez
2015/10/21 23:07:25
Done.
|
| + page = shared_state.current_page |
| expectations = page.GetExpectations() |
| expectation = 'pass' |
| if expectations: |
| expectation = expectations.GetExpectationForPage( |
| - shared_page_state.browser, page) |
| + shared_state.browser, page) |
| if expectation == 'skip': |
| raise Exception( |
| 'Skip expectations should have been handled in CanRunOnBrowser') |
| try: |
| - super(cls, shared_page_state).RunStory(results) |
| + super(cls, shared_state).RunStory(results) |
| except Exception: |
| if expectation == 'pass': |
| raise |
| @@ -79,12 +79,12 @@ def RunStoryWithRetries(cls, shared_page_state, results): |
| return |
| if expectation != 'flaky': |
| logging.warning( |
| - 'Unknown expectation %s while handling exception for %s' % |
| - (expectation, page.display_name)) |
| + 'Unknown expectation %s while handling exception for %s', |
| + expectation, page.display_name) |
| raise |
| # Flaky tests are handled here. |
| num_retries = expectations.GetFlakyRetriesForPage( |
| - shared_page_state.browser, page) |
| + shared_state.browser, page) |
| if not num_retries: |
| # Re-raise the exception. |
| raise |
| @@ -92,7 +92,7 @@ def RunStoryWithRetries(cls, shared_page_state, results): |
| for ii in xrange(0, num_retries): |
| print 'FLAKY TEST FAILURE, retrying: ' + page.display_name |
| try: |
| - super(cls, shared_page_state).RunStory(results) |
| + super(cls, shared_state).RunStory(results) |
| break |
| except Exception: |
| # Squelch any exceptions from any but the last retry. |