Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(57)

Side by Side Diff: tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py

Issue 553543004: Update metrics verification for dev-proxy. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: verification for dev proxy Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2014 The Chromium Authors. All rights reserved. 1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import datetime 5 import datetime
6 import logging 6 import logging
7 import os 7 import os
8 8
9 from integration_tests import network_metrics 9 from integration_tests import network_metrics
10 from telemetry.page import page_test 10 from telemetry.page import page_test
11 from telemetry.value import scalar 11 from telemetry.value import scalar
12 12
13 13
14 class ChromeProxyMetricException(page_test.MeasurementFailure): 14 class ChromeProxyMetricException(page_test.MeasurementFailure):
15 pass 15 pass
16 16
17 17
18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy' 18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy' 19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy'
20 20
21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443' 21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443'
22 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS 22 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS
23 PROXY_DEV_SETTING_HTTPS_WITH_SCHEME = 'http://proxy-dev.googlezip.net:80'
23 PROXY_SETTING_HTTP = 'compress.googlezip.net:80' 24 PROXY_SETTING_HTTP = 'compress.googlezip.net:80'
24 PROXY_SETTING_DIRECT = 'direct://' 25 PROXY_SETTING_DIRECT = 'direct://'
25 26
26 # The default Chrome Proxy bypass time is a range from one to five mintues. 27 # The default Chrome Proxy bypass time is a range from one to five mintues.
27 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc. 28 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc.
28 DEFAULT_BYPASS_MIN_SECONDS = 60 29 DEFAULT_BYPASS_MIN_SECONDS = 60
29 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60 30 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60
30 31
31 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'): 32 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'):
32 tab.Navigate(url) 33 tab.Navigate(url)
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
90 91
91 92
92 class ChromeProxyMetric(network_metrics.NetworkMetric): 93 class ChromeProxyMetric(network_metrics.NetworkMetric):
93 """A Chrome proxy timeline metric.""" 94 """A Chrome proxy timeline metric."""
94 95
95 def __init__(self): 96 def __init__(self):
96 super(ChromeProxyMetric, self).__init__() 97 super(ChromeProxyMetric, self).__init__()
97 self.compute_data_saving = True 98 self.compute_data_saving = True
98 self.effective_proxies = { 99 self.effective_proxies = {
99 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME, 100 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME,
101 "proxy-dev": PROXY_DEV_SETTING_HTTPS_WITH_SCHEME,
100 "fallback": PROXY_SETTING_HTTP, 102 "fallback": PROXY_SETTING_HTTP,
101 "direct": PROXY_SETTING_DIRECT, 103 "direct": PROXY_SETTING_DIRECT,
102 } 104 }
103 105
104 def SetEvents(self, events): 106 def SetEvents(self, events):
105 """Used for unittest.""" 107 """Used for unittest."""
106 self._events = events 108 self._events = events
107 109
108 def ResponseFromEvent(self, event): 110 def ResponseFromEvent(self, event):
109 return ChromeProxyResponse(event) 111 return ChromeProxyResponse(event)
(...skipping 23 matching lines...) Expand all
133 resources_from_cache)) 135 resources_from_cache))
134 results.AddValue(scalar.ScalarValue( 136 results.AddValue(scalar.ScalarValue(
135 results.current_page, 'resources_direct', 'count', resources_direct)) 137 results.current_page, 'resources_direct', 'count', resources_direct))
136 138
137 def AddResultsForHeaderValidation(self, tab, results): 139 def AddResultsForHeaderValidation(self, tab, results):
138 via_count = 0 140 via_count = 0
139 bypass_count = 0 141 bypass_count = 0
140 for resp in self.IterResponses(tab): 142 for resp in self.IterResponses(tab):
141 if resp.IsValidByViaHeader(): 143 if resp.IsValidByViaHeader():
142 via_count += 1 144 via_count += 1
143 elif tab and self.IsProxyBypassed(tab):
144 logging.warning('Proxy bypassed for %s', resp.response.url)
145 bypass_count += 1
146 else: 145 else:
147 r = resp.response 146 bypassed, _ = self.IsProxyBypassed(tab)
148 raise ChromeProxyMetricException, ( 147 if tab and bypassed:
149 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % ( 148 logging.warning('Proxy bypassed for %s', resp.response.url)
150 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) 149 bypass_count += 1
150 else:
151 r = resp.response
152 raise ChromeProxyMetricException, (
153 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
154 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
151 results.AddValue(scalar.ScalarValue( 155 results.AddValue(scalar.ScalarValue(
152 results.current_page, 'checked_via_header', 'count', via_count)) 156 results.current_page, 'checked_via_header', 'count', via_count))
153 results.AddValue(scalar.ScalarValue( 157 results.AddValue(scalar.ScalarValue(
154 results.current_page, 'request_bypassed', 'count', bypass_count)) 158 results.current_page, 'request_bypassed', 'count', bypass_count))
155 159
156 def IsProxyBypassed(self, tab): 160 def IsProxyBypassed(self, tab):
157 """ Returns True if all configured proxies are bypassed.""" 161 """ Returns True if all configured proxies are bypassed."""
158 info = GetProxyInfoFromNetworkInternals(tab) 162 info = GetProxyInfoFromNetworkInternals(tab)
159 if not info['enabled']: 163 if not info['enabled']:
160 raise ChromeProxyMetricException, ( 164 raise ChromeProxyMetricException, (
161 'Chrome proxy should be enabled. proxy info: %s' % info) 165 'Chrome proxy should be enabled. proxy info: %s' % info)
162 166
163 bad_proxies = [str(p['proxy']) for p in info['badProxies']].sort() 167 bad_proxies = [str(p['proxy']) for p in info['badProxies']]
168 bad_proxies.sort()
164 proxies = [self.effective_proxies['proxy'], 169 proxies = [self.effective_proxies['proxy'],
165 self.effective_proxies['fallback']].sort() 170 self.effective_proxies['fallback']]
166 return bad_proxies == proxies 171 proxies.sort()
172 proxies_dev = [self.effective_proxies['proxy-dev'],
173 self.effective_proxies['fallback']]
bengr 2014/09/09 19:11:49 indentation is off.
bolian 2014/09/09 20:59:12 Done.
174 proxies_dev.sort()
175 if bad_proxies == proxies:
176 return True, proxies
177 elif bad_proxies == proxies_dev:
178 return True, proxies_dev
179 return False, []
167 180
168 @staticmethod 181 @staticmethod
169 def VerifyBadProxies( 182 def VerifyBadProxies(
170 badProxies, expected_proxies, 183 badProxies, expected_proxies,
171 retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS, 184 retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS,
172 retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS): 185 retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS):
173 """Verify the bad proxy list and their retry times are expected. """ 186 """Verify the bad proxy list and their retry times are expected. """
174 if not badProxies or (len(badProxies) != len(expected_proxies)): 187 if not badProxies or (len(badProxies) != len(expected_proxies)):
175 return False 188 return False
176 189
(...skipping 29 matching lines...) Expand all
206 raise ChromeProxyMetricException, ( 219 raise ChromeProxyMetricException, (
207 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % ( 220 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
208 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) 221 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
209 bypass_count += 1 222 bypass_count += 1
210 223
211 if tab: 224 if tab:
212 info = GetProxyInfoFromNetworkInternals(tab) 225 info = GetProxyInfoFromNetworkInternals(tab)
213 if not info['enabled']: 226 if not info['enabled']:
214 raise ChromeProxyMetricException, ( 227 raise ChromeProxyMetricException, (
215 'Chrome proxy should be enabled. proxy info: %s' % info) 228 'Chrome proxy should be enabled. proxy info: %s' % info)
216 self.VerifyBadProxies( 229 _, expected_bad_proxies = self.IsProxyBypassed(tab)
217 info['badProxies'], 230 self.VerifyBadProxies(info['badProxies'], expected_bad_proxies)
218 [self.effective_proxies['proxy'],
219 self.effective_proxies['fallback']])
220 231
221 results.AddValue(scalar.ScalarValue( 232 results.AddValue(scalar.ScalarValue(
222 results.current_page, 'bypass', 'count', bypass_count)) 233 results.current_page, 'bypass', 'count', bypass_count))
223 234
224 def AddResultsForSafebrowsing(self, tab, results): 235 def AddResultsForSafebrowsing(self, tab, results):
225 count = 0 236 count = 0
226 safebrowsing_count = 0 237 safebrowsing_count = 0
227 for resp in self.IterResponses(tab): 238 for resp in self.IterResponses(tab):
228 count += 1 239 count += 1
229 if resp.IsSafebrowsingResponse(): 240 if resp.IsSafebrowsingResponse():
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
264 bad_proxies = [] 275 bad_proxies = []
265 if 'badProxies' in info and info['badProxies']: 276 if 'badProxies' in info and info['badProxies']:
266 bad_proxies = [p['proxy'] for p in info['badProxies'] 277 bad_proxies = [p['proxy'] for p in info['badProxies']
267 if 'proxy' in p and p['proxy']] 278 if 'proxy' in p and p['proxy']]
268 if bad_proxies != expected_bad_proxies: 279 if bad_proxies != expected_bad_proxies:
269 raise ChromeProxyMetricException, ( 280 raise ChromeProxyMetricException, (
270 'Wrong bad proxies (%s). Expect: "%s"' % ( 281 'Wrong bad proxies (%s). Expect: "%s"' % (
271 str(bad_proxies), str(expected_bad_proxies))) 282 str(bad_proxies), str(expected_bad_proxies)))
272 results.AddValue(scalar.ScalarValue( 283 results.AddValue(scalar.ScalarValue(
273 results.current_page, 'http_fallback', 'boolean', True)) 284 results.current_page, 'http_fallback', 'boolean', True))
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698