Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(340)

Side by Side Diff: tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py

Issue 555373002: Revert of Update metrics verification for dev-proxy. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2014 The Chromium Authors. All rights reserved. 1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import datetime 5 import datetime
6 import logging 6 import logging
7 import os 7 import os
8 8
9 from integration_tests import network_metrics 9 from integration_tests import network_metrics
10 from telemetry.page import page_test 10 from telemetry.page import page_test
11 from telemetry.value import scalar 11 from telemetry.value import scalar
12 12
13 13
14 class ChromeProxyMetricException(page_test.MeasurementFailure): 14 class ChromeProxyMetricException(page_test.MeasurementFailure):
15 pass 15 pass
16 16
17 17
18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy' 18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy' 19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy'
20 20
21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443' 21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443'
22 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS 22 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS
23 PROXY_DEV_SETTING_HTTPS_WITH_SCHEME = 'http://proxy-dev.googlezip.net:80'
24 PROXY_SETTING_HTTP = 'compress.googlezip.net:80' 23 PROXY_SETTING_HTTP = 'compress.googlezip.net:80'
25 PROXY_SETTING_DIRECT = 'direct://' 24 PROXY_SETTING_DIRECT = 'direct://'
26 25
27 # The default Chrome Proxy bypass time is a range from one to five mintues. 26 # The default Chrome Proxy bypass time is a range from one to five mintues.
28 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc. 27 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc.
29 DEFAULT_BYPASS_MIN_SECONDS = 60 28 DEFAULT_BYPASS_MIN_SECONDS = 60
30 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60 29 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60
31 30
32 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'): 31 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'):
33 tab.Navigate(url) 32 tab.Navigate(url)
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
91 90
92 91
93 class ChromeProxyMetric(network_metrics.NetworkMetric): 92 class ChromeProxyMetric(network_metrics.NetworkMetric):
94 """A Chrome proxy timeline metric.""" 93 """A Chrome proxy timeline metric."""
95 94
96 def __init__(self): 95 def __init__(self):
97 super(ChromeProxyMetric, self).__init__() 96 super(ChromeProxyMetric, self).__init__()
98 self.compute_data_saving = True 97 self.compute_data_saving = True
99 self.effective_proxies = { 98 self.effective_proxies = {
100 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME, 99 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME,
101 "proxy-dev": PROXY_DEV_SETTING_HTTPS_WITH_SCHEME,
102 "fallback": PROXY_SETTING_HTTP, 100 "fallback": PROXY_SETTING_HTTP,
103 "direct": PROXY_SETTING_DIRECT, 101 "direct": PROXY_SETTING_DIRECT,
104 } 102 }
105 103
106 def SetEvents(self, events): 104 def SetEvents(self, events):
107 """Used for unittest.""" 105 """Used for unittest."""
108 self._events = events 106 self._events = events
109 107
110 def ResponseFromEvent(self, event): 108 def ResponseFromEvent(self, event):
111 return ChromeProxyResponse(event) 109 return ChromeProxyResponse(event)
(...skipping 23 matching lines...) Expand all
135 resources_from_cache)) 133 resources_from_cache))
136 results.AddValue(scalar.ScalarValue( 134 results.AddValue(scalar.ScalarValue(
137 results.current_page, 'resources_direct', 'count', resources_direct)) 135 results.current_page, 'resources_direct', 'count', resources_direct))
138 136
139 def AddResultsForHeaderValidation(self, tab, results): 137 def AddResultsForHeaderValidation(self, tab, results):
140 via_count = 0 138 via_count = 0
141 bypass_count = 0 139 bypass_count = 0
142 for resp in self.IterResponses(tab): 140 for resp in self.IterResponses(tab):
143 if resp.IsValidByViaHeader(): 141 if resp.IsValidByViaHeader():
144 via_count += 1 142 via_count += 1
143 elif tab and self.IsProxyBypassed(tab):
144 logging.warning('Proxy bypassed for %s', resp.response.url)
145 bypass_count += 1
145 else: 146 else:
146 bypassed, _ = self.IsProxyBypassed(tab) 147 r = resp.response
147 if tab and bypassed: 148 raise ChromeProxyMetricException, (
148 logging.warning('Proxy bypassed for %s', resp.response.url) 149 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
149 bypass_count += 1 150 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
150 else:
151 r = resp.response
152 raise ChromeProxyMetricException, (
153 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
154 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
155 results.AddValue(scalar.ScalarValue( 151 results.AddValue(scalar.ScalarValue(
156 results.current_page, 'checked_via_header', 'count', via_count)) 152 results.current_page, 'checked_via_header', 'count', via_count))
157 results.AddValue(scalar.ScalarValue( 153 results.AddValue(scalar.ScalarValue(
158 results.current_page, 'request_bypassed', 'count', bypass_count)) 154 results.current_page, 'request_bypassed', 'count', bypass_count))
159 155
160 def IsProxyBypassed(self, tab): 156 def IsProxyBypassed(self, tab):
161 """ Returns True if all configured proxies are bypassed.""" 157 """ Returns True if all configured proxies are bypassed."""
162 info = GetProxyInfoFromNetworkInternals(tab) 158 info = GetProxyInfoFromNetworkInternals(tab)
163 if not info['enabled']: 159 if not info['enabled']:
164 raise ChromeProxyMetricException, ( 160 raise ChromeProxyMetricException, (
165 'Chrome proxy should be enabled. proxy info: %s' % info) 161 'Chrome proxy should be enabled. proxy info: %s' % info)
166 162
167 bad_proxies = [str(p['proxy']) for p in info['badProxies']] 163 bad_proxies = [str(p['proxy']) for p in info['badProxies']].sort()
168 bad_proxies.sort()
169 proxies = [self.effective_proxies['proxy'], 164 proxies = [self.effective_proxies['proxy'],
170 self.effective_proxies['fallback']] 165 self.effective_proxies['fallback']].sort()
171 proxies.sort() 166 return bad_proxies == proxies
172 proxies_dev = [self.effective_proxies['proxy-dev'],
173 self.effective_proxies['fallback']]
174 proxies_dev.sort()
175 if bad_proxies == proxies:
176 return True, proxies
177 elif bad_proxies == proxies_dev:
178 return True, proxies_dev
179 return False, []
180 167
181 @staticmethod 168 @staticmethod
182 def VerifyBadProxies( 169 def VerifyBadProxies(
183 badProxies, expected_proxies, 170 badProxies, expected_proxies,
184 retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS, 171 retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS,
185 retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS): 172 retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS):
186 """Verify the bad proxy list and their retry times are expected. """ 173 """Verify the bad proxy list and their retry times are expected. """
187 if not badProxies or (len(badProxies) != len(expected_proxies)): 174 if not badProxies or (len(badProxies) != len(expected_proxies)):
188 return False 175 return False
189 176
(...skipping 29 matching lines...) Expand all
219 raise ChromeProxyMetricException, ( 206 raise ChromeProxyMetricException, (
220 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % ( 207 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
221 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) 208 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
222 bypass_count += 1 209 bypass_count += 1
223 210
224 if tab: 211 if tab:
225 info = GetProxyInfoFromNetworkInternals(tab) 212 info = GetProxyInfoFromNetworkInternals(tab)
226 if not info['enabled']: 213 if not info['enabled']:
227 raise ChromeProxyMetricException, ( 214 raise ChromeProxyMetricException, (
228 'Chrome proxy should be enabled. proxy info: %s' % info) 215 'Chrome proxy should be enabled. proxy info: %s' % info)
229 _, expected_bad_proxies = self.IsProxyBypassed(tab) 216 self.VerifyBadProxies(
230 self.VerifyBadProxies(info['badProxies'], expected_bad_proxies) 217 info['badProxies'],
218 [self.effective_proxies['proxy'],
219 self.effective_proxies['fallback']])
231 220
232 results.AddValue(scalar.ScalarValue( 221 results.AddValue(scalar.ScalarValue(
233 results.current_page, 'bypass', 'count', bypass_count)) 222 results.current_page, 'bypass', 'count', bypass_count))
234 223
235 def AddResultsForSafebrowsing(self, tab, results): 224 def AddResultsForSafebrowsing(self, tab, results):
236 count = 0 225 count = 0
237 safebrowsing_count = 0 226 safebrowsing_count = 0
238 for resp in self.IterResponses(tab): 227 for resp in self.IterResponses(tab):
239 count += 1 228 count += 1
240 if resp.IsSafebrowsingResponse(): 229 if resp.IsSafebrowsingResponse():
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
275 bad_proxies = [] 264 bad_proxies = []
276 if 'badProxies' in info and info['badProxies']: 265 if 'badProxies' in info and info['badProxies']:
277 bad_proxies = [p['proxy'] for p in info['badProxies'] 266 bad_proxies = [p['proxy'] for p in info['badProxies']
278 if 'proxy' in p and p['proxy']] 267 if 'proxy' in p and p['proxy']]
279 if bad_proxies != expected_bad_proxies: 268 if bad_proxies != expected_bad_proxies:
280 raise ChromeProxyMetricException, ( 269 raise ChromeProxyMetricException, (
281 'Wrong bad proxies (%s). Expect: "%s"' % ( 270 'Wrong bad proxies (%s). Expect: "%s"' % (
282 str(bad_proxies), str(expected_bad_proxies))) 271 str(bad_proxies), str(expected_bad_proxies)))
283 results.AddValue(scalar.ScalarValue( 272 results.AddValue(scalar.ScalarValue(
284 results.current_page, 'http_fallback', 'boolean', True)) 273 results.current_page, 'http_fallback', 'boolean', True))
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698