Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(187)

Unified Diff: tools/chrome_proxy/live_tests/chrome_proxy_measurements.py

Issue 1098253004: Move top_20 tests to a separate suite (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 5 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: tools/chrome_proxy/live_tests/chrome_proxy_measurements.py
diff --git a/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py b/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py
new file mode 100644
index 0000000000000000000000000000000000000000..b90e12b8c6c7bff2c092494c6dd690461c644dec
--- /dev/null
+++ b/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py
@@ -0,0 +1,118 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import logging
+import urlparse
+
+from integration_tests import chrome_proxy_metrics as metrics
sclittle 2015/04/23 01:07:50 IIUC you're actually importing the integration_tes
bustamante 2015/04/23 02:26:09 Yeah, Done.
+from metrics import loading
+from telemetry.core import exceptions
+from telemetry.page import page_test
+
+class ChromeProxyLatency(page_test.PageTest):
sclittle 2015/04/23 01:07:50 Could these latency and data savings tests be remo
bustamante 2015/04/23 02:26:09 Yeah I think we should remove them, or add a condi
+ """Chrome proxy latency measurement."""
+
+ def __init__(self, *args, **kwargs):
+ super(ChromeProxyLatency, self).__init__(*args, **kwargs)
+ self._metrics = metrics.ChromeProxyMetric()
+
+ def CustomizeBrowserOptions(self, options):
+ options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
+
+ def WillNavigateToPage(self, page, tab):
+ tab.ClearCache(force=True)
+
+ def ValidateAndMeasurePage(self, page, tab, results):
+ # Wait for the load event.
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ self._metrics.AddResultsForLatency(tab, results)
+
+
+class ChromeProxyDataSaving(page_test.PageTest):
+ """Chrome proxy data saving measurement."""
+ def __init__(self, *args, **kwargs):
+ super(ChromeProxyDataSaving, self).__init__(*args, **kwargs)
+ self._metrics = metrics.ChromeProxyMetric()
+
+ def CustomizeBrowserOptions(self, options):
+ options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
+
+ def WillNavigateToPage(self, page, tab):
+ tab.ClearCache(force=True)
+ self._metrics.Start(page, tab)
+
+ def ValidateAndMeasurePage(self, page, tab, results):
+ # Wait for the load event.
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ self._metrics.Stop(page, tab)
+ self._metrics.AddResultsForDataSaving(tab, results)
+
+
+class ChromeProxyValidation(page_test.PageTest):
sclittle 2015/04/23 01:07:50 Maybe this file could import the old measurements.
bustamante 2015/04/23 02:26:09 The header test that used this got removed so we d
+ """Base class for all chrome proxy correctness measurements."""
+
+ # Value of the extra via header. |None| if no extra via header is expected.
+ extra_via_header = None
+
+ def __init__(self, restart_after_each_page=False):
+ super(ChromeProxyValidation, self).__init__(
+ needs_browser_restart_after_each_page=restart_after_each_page)
+ self._metrics = metrics.ChromeProxyMetric()
+ self._page = None
+ # Whether a timeout exception is expected during the test.
+ self._expect_timeout = False
+
+ def CustomizeBrowserOptions(self, options):
+ # Enable the chrome proxy (data reduction proxy).
+ options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
+
+ def WillNavigateToPage(self, page, tab):
+ tab.ClearCache(force=True)
+ assert self._metrics
+ self._metrics.Start(page, tab)
+
+ def ValidateAndMeasurePage(self, page, tab, results):
+ self._page = page
+ # Wait for the load event.
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ assert self._metrics
+ self._metrics.Stop(page, tab)
+ if ChromeProxyValidation.extra_via_header:
+ self._metrics.AddResultsForExtraViaHeader(
+ tab, results, ChromeProxyValidation.extra_via_header)
+ self.AddResults(tab, results)
+
+ def AddResults(self, tab, results):
+ raise NotImplementedError
+
+ def StopBrowserAfterPage(self, browser, page): # pylint: disable=W0613
+ if hasattr(page, 'restart_after') and page.restart_after:
+ return True
+ return False
+
+ def RunNavigateSteps(self, page, tab):
+ # The redirect from safebrowsing causes a timeout. Ignore that.
+ try:
+ super(ChromeProxyValidation, self).RunNavigateSteps(page, tab)
+ if self._expect_timeout:
+ raise metrics.ChromeProxyMetricException, (
+ 'Timeout was expected, but did not occur')
+ except exceptions.TimeoutException as e:
+ if self._expect_timeout:
+ logging.warning('Navigation timeout on page %s',
+ page.name if page.name else page.url)
+ else:
+ raise e
+
+
+class ChromeProxyHeaders(ChromeProxyValidation):
sclittle 2015/04/23 01:07:50 Does it really make sense to run the header valida
bustamante 2015/04/23 02:26:09 Yeah I agree, I'll remove the test for now, and la
+ """Correctness measurement for response headers."""
+
+ def __init__(self):
+ super(ChromeProxyHeaders, self).__init__(restart_after_each_page=True)
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForHeaderValidation(tab, results)
+

Powered by Google App Engine
This is Rietveld 408576698