Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(51)

Side by Side Diff: tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py

Issue 397483002: Move chrome_proxy tests from under tools/perf to tools/chrome_proxy/integration_tests. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2014 The Chromium Authors. All rights reserved. 1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import datetime 5 import datetime
6 import logging 6 import logging
7 import os 7 import os
8 8
9 from integration_tests import network_metrics
9 from telemetry.page import page_measurement 10 from telemetry.page import page_measurement
10 from metrics import network
11 from telemetry.value import scalar
12 11
13 12
14 class ChromeProxyMetricException(page_measurement.MeasurementFailure): 13 class ChromeProxyMetricException(page_measurement.MeasurementFailure):
15 pass 14 pass
16 15
17 16
18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy' 17 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy' 18 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy'
20 19
21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443' 20 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443'
22 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS 21 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS
23 PROXY_SETTING_HTTP = 'compress.googlezip.net:80' 22 PROXY_SETTING_HTTP = 'compress.googlezip.net:80'
24 PROXY_SETTING_DIRECT = 'direct://' 23 PROXY_SETTING_DIRECT = 'direct://'
25 24
26 # The default Chrome Proxy bypass time is a range from one to five mintues. 25 # The default Chrome Proxy bypass time is a range from one to five mintues.
27 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc. 26 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc.
28 DEFAULT_BYPASS_MIN_SECONDS = 60 27 DEFAULT_BYPASS_MIN_SECONDS = 60
29 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60 28 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60
30 29
31 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'): 30 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'):
32 tab.Navigate(url) 31 tab.Navigate(url)
33 with open(os.path.join(os.path.dirname(__file__), 'chrome_proxy.js')) as f: 32 with open(os.path.join(os.path.dirname(__file__),
33 'chrome_proxy_metrics.js')) as f:
34 js = f.read() 34 js = f.read()
35 tab.ExecuteJavaScript(js) 35 tab.ExecuteJavaScript(js)
36 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300) 36 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
37 info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()') 37 info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()')
38 return info 38 return info
39 39
40 40
41 def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30): 41 def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30):
42 return (retry_time >= low and 42 return (retry_time >= low and
43 (retry_time < high + datetime.timedelta(seconds=grace_seconds))) 43 (retry_time < high + datetime.timedelta(seconds=grace_seconds)))
44 44
45 45
46 class ChromeProxyResponse(network.HTTPResponse): 46 class ChromeProxyResponse(network_metrics.HTTPResponse):
47 """ Represents an HTTP response from a timeleine event.""" 47 """ Represents an HTTP response from a timeleine event."""
48 def __init__(self, event): 48 def __init__(self, event):
49 super(ChromeProxyResponse, self).__init__(event) 49 super(ChromeProxyResponse, self).__init__(event)
50 50
51 def ShouldHaveChromeProxyViaHeader(self): 51 def ShouldHaveChromeProxyViaHeader(self):
52 resp = self.response 52 resp = self.response
53 # Ignore https and data url 53 # Ignore https and data url
54 if resp.url.startswith('https') or resp.url.startswith('data:'): 54 if resp.url.startswith('https') or resp.url.startswith('data:'):
55 return False 55 return False
56 # Ignore 304 Not Modified and cache hit. 56 # Ignore 304 Not Modified and cache hit.
(...skipping 24 matching lines...) Expand all
81 81
82 def IsSafebrowsingResponse(self): 82 def IsSafebrowsingResponse(self):
83 if (self.response.status == 307 and 83 if (self.response.status == 307 and
84 self.response.GetHeader('X-Malware-Url') == '1' and 84 self.response.GetHeader('X-Malware-Url') == '1' and
85 self.IsValidByViaHeader() and 85 self.IsValidByViaHeader() and
86 self.response.GetHeader('Location') == self.response.url): 86 self.response.GetHeader('Location') == self.response.url):
87 return True 87 return True
88 return False 88 return False
89 89
90 90
91 class ChromeProxyMetric(network.NetworkMetric): 91 class ChromeProxyMetric(network_metrics.NetworkMetric):
92 """A Chrome proxy timeline metric.""" 92 """A Chrome proxy timeline metric."""
93 93
94 def __init__(self): 94 def __init__(self):
95 super(ChromeProxyMetric, self).__init__() 95 super(ChromeProxyMetric, self).__init__()
96 self.compute_data_saving = True 96 self.compute_data_saving = True
97 self.effective_proxies = { 97 self.effective_proxies = {
98 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME, 98 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME,
99 "fallback": PROXY_SETTING_HTTP, 99 "fallback": PROXY_SETTING_HTTP,
100 "direct": PROXY_SETTING_DIRECT, 100 "direct": PROXY_SETTING_DIRECT,
101 } 101 }
(...skipping 15 matching lines...) Expand all
117 117
118 super(ChromeProxyMetric, self).AddResults(tab, results) 118 super(ChromeProxyMetric, self).AddResults(tab, results)
119 for resp in self.IterResponses(tab): 119 for resp in self.IterResponses(tab):
120 if resp.response.served_from_cache: 120 if resp.response.served_from_cache:
121 resources_from_cache += 1 121 resources_from_cache += 1
122 if resp.HasChromeProxyViaHeader(): 122 if resp.HasChromeProxyViaHeader():
123 resources_via_proxy += 1 123 resources_via_proxy += 1
124 else: 124 else:
125 resources_direct += 1 125 resources_direct += 1
126 126
127 results.AddValue(scalar.ScalarValue( 127 results.Add('resources_via_proxy', 'count', resources_via_proxy)
128 results.current_page, 'resources_via_proxy', 'count', 128 results.Add('resources_from_cache', 'count', resources_from_cache)
129 resources_via_proxy)) 129 results.Add('resources_direct', 'count', resources_direct)
130 results.AddValue(scalar.ScalarValue(
131 results.current_page, 'resources_from_cache', 'count',
132 resources_from_cache))
133 results.AddValue(scalar.ScalarValue(
134 results.current_page, 'resources_direct', 'count', resources_direct))
135 130
136 def AddResultsForHeaderValidation(self, tab, results): 131 def AddResultsForHeaderValidation(self, tab, results):
137 via_count = 0 132 via_count = 0
138 bypass_count = 0 133 bypass_count = 0
139 for resp in self.IterResponses(tab): 134 for resp in self.IterResponses(tab):
140 if resp.IsValidByViaHeader(): 135 if resp.IsValidByViaHeader():
141 via_count += 1 136 via_count += 1
142 elif tab and self.IsProxyBypassed(tab): 137 elif tab and self.IsProxyBypassed(tab):
143 logging.warning('Proxy bypassed for %s', resp.response.url) 138 logging.warning('Proxy bypassed for %s', resp.response.url)
144 bypass_count += 1 139 bypass_count += 1
145 else: 140 else:
146 r = resp.response 141 r = resp.response
147 raise ChromeProxyMetricException, ( 142 raise ChromeProxyMetricException, (
148 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % ( 143 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
149 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) 144 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
150 results.AddValue(scalar.ScalarValue( 145 results.Add('checked_via_header', 'count', via_count)
151 results.current_page, 'checked_via_header', 'count', via_count)) 146 results.Add('request_bypassed', 'count', bypass_count)
152 results.AddValue(scalar.ScalarValue(
153 results.current_page, 'request_bypassed', 'count', bypass_count))
154 147
155 def IsProxyBypassed(self, tab): 148 def IsProxyBypassed(self, tab):
156 """ Returns True if all configured proxies are bypassed.""" 149 """ Returns True if all configured proxies are bypassed."""
157 info = GetProxyInfoFromNetworkInternals(tab) 150 info = GetProxyInfoFromNetworkInternals(tab)
158 if not info['enabled']: 151 if not info['enabled']:
159 raise ChromeProxyMetricException, ( 152 raise ChromeProxyMetricException, (
160 'Chrome proxy should be enabled. proxy info: %s' % info) 153 'Chrome proxy should be enabled. proxy info: %s' % info)
161 154
162 bad_proxies = [str(p['proxy']) for p in info['badProxies']].sort() 155 bad_proxies = [str(p['proxy']) for p in info['badProxies']].sort()
163 proxies = [self.effective_proxies['proxy'], 156 proxies = [self.effective_proxies['proxy'],
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
210 if tab: 203 if tab:
211 info = GetProxyInfoFromNetworkInternals(tab) 204 info = GetProxyInfoFromNetworkInternals(tab)
212 if not info['enabled']: 205 if not info['enabled']:
213 raise ChromeProxyMetricException, ( 206 raise ChromeProxyMetricException, (
214 'Chrome proxy should be enabled. proxy info: %s' % info) 207 'Chrome proxy should be enabled. proxy info: %s' % info)
215 self.VerifyBadProxies( 208 self.VerifyBadProxies(
216 info['badProxies'], 209 info['badProxies'],
217 [self.effective_proxies['proxy'], 210 [self.effective_proxies['proxy'],
218 self.effective_proxies['fallback']]) 211 self.effective_proxies['fallback']])
219 212
220 results.AddValue(scalar.ScalarValue( 213 results.Add('bypass', 'count', bypass_count)
221 results.current_page, 'bypass', 'count', bypass_count))
222 214
223 def AddResultsForSafebrowsing(self, tab, results): 215 def AddResultsForSafebrowsing(self, tab, results):
224 count = 0 216 count = 0
225 safebrowsing_count = 0 217 safebrowsing_count = 0
226 for resp in self.IterResponses(tab): 218 for resp in self.IterResponses(tab):
227 count += 1 219 count += 1
228 if resp.IsSafebrowsingResponse(): 220 if resp.IsSafebrowsingResponse():
229 safebrowsing_count += 1 221 safebrowsing_count += 1
230 else: 222 else:
231 r = resp.response 223 r = resp.response
232 raise ChromeProxyMetricException, ( 224 raise ChromeProxyMetricException, (
233 '%s: Not a valid safe browsing response.\n' 225 '%s: Not a valid safe browsing response.\n'
234 'Reponse: status=(%d, %s)\nHeaders:\n %s' % ( 226 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
235 r.url, r.status, r.status_text, r.headers)) 227 r.url, r.status, r.status_text, r.headers))
236 if count == safebrowsing_count: 228 if count == safebrowsing_count:
237 results.AddValue(scalar.ScalarValue( 229 results.Add('safebrowsing', 'boolean', True)
238 results.current_page, 'safebrowsing', 'boolean', True))
239 else: 230 else:
240 raise ChromeProxyMetricException, ( 231 raise ChromeProxyMetricException, (
241 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % ( 232 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
242 count, safebrowsing_count)) 233 count, safebrowsing_count))
243 234
244 def AddResultsForHTTPFallback( 235 def AddResultsForHTTPFallback(
245 self, tab, results, expected_proxies=None, expected_bad_proxies=None): 236 self, tab, results, expected_proxies=None, expected_bad_proxies=None):
246 info = GetProxyInfoFromNetworkInternals(tab) 237 info = GetProxyInfoFromNetworkInternals(tab)
247 if not 'enabled' in info or not info['enabled']: 238 if not 'enabled' in info or not info['enabled']:
248 raise ChromeProxyMetricException, ( 239 raise ChromeProxyMetricException, (
(...skipping 12 matching lines...) Expand all
261 str(proxies), str(expected_proxies))) 252 str(proxies), str(expected_proxies)))
262 253
263 bad_proxies = [] 254 bad_proxies = []
264 if 'badProxies' in info and info['badProxies']: 255 if 'badProxies' in info and info['badProxies']:
265 bad_proxies = [p['proxy'] for p in info['badProxies'] 256 bad_proxies = [p['proxy'] for p in info['badProxies']
266 if 'proxy' in p and p['proxy']] 257 if 'proxy' in p and p['proxy']]
267 if bad_proxies != expected_bad_proxies: 258 if bad_proxies != expected_bad_proxies:
268 raise ChromeProxyMetricException, ( 259 raise ChromeProxyMetricException, (
269 'Wrong bad proxies (%s). Expect: "%s"' % ( 260 'Wrong bad proxies (%s). Expect: "%s"' % (
270 str(bad_proxies), str(expected_bad_proxies))) 261 str(bad_proxies), str(expected_bad_proxies)))
271 results.AddValue(scalar.ScalarValue( 262 results.Add('http_fallback', 'boolean', True)
272 results.current_page, 'http_fallback', 'boolean', True))
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698