OLD | NEW |
1 # Copyright 2014 The Chromium Authors. All rights reserved. | 1 # Copyright 2014 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import datetime | 5 import datetime |
6 import logging | 6 import logging |
7 import os | 7 import os |
8 | 8 |
9 from integration_tests import network_metrics | 9 from integration_tests import network_metrics |
10 from telemetry.page import page_test | 10 from telemetry.page import page_test |
11 from telemetry.value import scalar | 11 from telemetry.value import scalar |
12 | 12 |
13 | 13 |
14 class ChromeProxyMetricException(page_test.MeasurementFailure): | 14 class ChromeProxyMetricException(page_test.MeasurementFailure): |
15 pass | 15 pass |
16 | 16 |
17 | 17 |
18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy' | 18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy' |
19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy' | 19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy' |
20 | 20 |
21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443' | 21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443' |
22 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS | 22 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS |
| 23 PROXY_DEV_SETTING_HTTPS_WITH_SCHEME = 'http://proxy-dev.googlezip.net:80' |
23 PROXY_SETTING_HTTP = 'compress.googlezip.net:80' | 24 PROXY_SETTING_HTTP = 'compress.googlezip.net:80' |
24 PROXY_SETTING_DIRECT = 'direct://' | 25 PROXY_SETTING_DIRECT = 'direct://' |
25 | 26 |
26 # The default Chrome Proxy bypass time is a range from one to five mintues. | 27 # The default Chrome Proxy bypass time is a range from one to five mintues. |
27 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc. | 28 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc. |
28 DEFAULT_BYPASS_MIN_SECONDS = 60 | 29 DEFAULT_BYPASS_MIN_SECONDS = 60 |
29 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60 | 30 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60 |
30 | 31 |
31 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'): | 32 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'): |
32 tab.Navigate(url) | 33 tab.Navigate(url) |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
90 | 91 |
91 | 92 |
92 class ChromeProxyMetric(network_metrics.NetworkMetric): | 93 class ChromeProxyMetric(network_metrics.NetworkMetric): |
93 """A Chrome proxy timeline metric.""" | 94 """A Chrome proxy timeline metric.""" |
94 | 95 |
95 def __init__(self): | 96 def __init__(self): |
96 super(ChromeProxyMetric, self).__init__() | 97 super(ChromeProxyMetric, self).__init__() |
97 self.compute_data_saving = True | 98 self.compute_data_saving = True |
98 self.effective_proxies = { | 99 self.effective_proxies = { |
99 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME, | 100 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME, |
| 101 "proxy-dev": PROXY_DEV_SETTING_HTTPS_WITH_SCHEME, |
100 "fallback": PROXY_SETTING_HTTP, | 102 "fallback": PROXY_SETTING_HTTP, |
101 "direct": PROXY_SETTING_DIRECT, | 103 "direct": PROXY_SETTING_DIRECT, |
102 } | 104 } |
103 | 105 |
104 def SetEvents(self, events): | 106 def SetEvents(self, events): |
105 """Used for unittest.""" | 107 """Used for unittest.""" |
106 self._events = events | 108 self._events = events |
107 | 109 |
108 def ResponseFromEvent(self, event): | 110 def ResponseFromEvent(self, event): |
109 return ChromeProxyResponse(event) | 111 return ChromeProxyResponse(event) |
(...skipping 23 matching lines...) Expand all Loading... |
133 resources_from_cache)) | 135 resources_from_cache)) |
134 results.AddValue(scalar.ScalarValue( | 136 results.AddValue(scalar.ScalarValue( |
135 results.current_page, 'resources_direct', 'count', resources_direct)) | 137 results.current_page, 'resources_direct', 'count', resources_direct)) |
136 | 138 |
137 def AddResultsForHeaderValidation(self, tab, results): | 139 def AddResultsForHeaderValidation(self, tab, results): |
138 via_count = 0 | 140 via_count = 0 |
139 bypass_count = 0 | 141 bypass_count = 0 |
140 for resp in self.IterResponses(tab): | 142 for resp in self.IterResponses(tab): |
141 if resp.IsValidByViaHeader(): | 143 if resp.IsValidByViaHeader(): |
142 via_count += 1 | 144 via_count += 1 |
143 elif tab and self.IsProxyBypassed(tab): | |
144 logging.warning('Proxy bypassed for %s', resp.response.url) | |
145 bypass_count += 1 | |
146 else: | 145 else: |
147 r = resp.response | 146 bypassed, _ = self.IsProxyBypassed(tab) |
148 raise ChromeProxyMetricException, ( | 147 if tab and bypassed: |
149 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % ( | 148 logging.warning('Proxy bypassed for %s', resp.response.url) |
150 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) | 149 bypass_count += 1 |
| 150 else: |
| 151 r = resp.response |
| 152 raise ChromeProxyMetricException, ( |
| 153 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % ( |
| 154 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) |
151 results.AddValue(scalar.ScalarValue( | 155 results.AddValue(scalar.ScalarValue( |
152 results.current_page, 'checked_via_header', 'count', via_count)) | 156 results.current_page, 'checked_via_header', 'count', via_count)) |
153 results.AddValue(scalar.ScalarValue( | 157 results.AddValue(scalar.ScalarValue( |
154 results.current_page, 'request_bypassed', 'count', bypass_count)) | 158 results.current_page, 'request_bypassed', 'count', bypass_count)) |
155 | 159 |
156 def IsProxyBypassed(self, tab): | 160 def IsProxyBypassed(self, tab): |
157 """ Returns True if all configured proxies are bypassed.""" | 161 """ Returns True if all configured proxies are bypassed.""" |
| 162 if not tab: |
| 163 return False, [] |
| 164 |
158 info = GetProxyInfoFromNetworkInternals(tab) | 165 info = GetProxyInfoFromNetworkInternals(tab) |
159 if not info['enabled']: | 166 if not info['enabled']: |
160 raise ChromeProxyMetricException, ( | 167 raise ChromeProxyMetricException, ( |
161 'Chrome proxy should be enabled. proxy info: %s' % info) | 168 'Chrome proxy should be enabled. proxy info: %s' % info) |
162 | 169 |
163 bad_proxies = [str(p['proxy']) for p in info['badProxies']].sort() | 170 bad_proxies = [str(p['proxy']) for p in info['badProxies']] |
| 171 bad_proxies.sort() |
164 proxies = [self.effective_proxies['proxy'], | 172 proxies = [self.effective_proxies['proxy'], |
165 self.effective_proxies['fallback']].sort() | 173 self.effective_proxies['fallback']] |
166 return bad_proxies == proxies | 174 proxies.sort() |
| 175 proxies_dev = [self.effective_proxies['proxy-dev'], |
| 176 self.effective_proxies['fallback']] |
| 177 proxies_dev.sort() |
| 178 if bad_proxies == proxies: |
| 179 return True, proxies |
| 180 elif bad_proxies == proxies_dev: |
| 181 return True, proxies_dev |
| 182 return False, [] |
167 | 183 |
168 @staticmethod | 184 @staticmethod |
169 def VerifyBadProxies( | 185 def VerifyBadProxies( |
170 badProxies, expected_proxies, | 186 badProxies, expected_proxies, |
171 retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS, | 187 retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS, |
172 retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS): | 188 retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS): |
173 """Verify the bad proxy list and their retry times are expected. """ | 189 """Verify the bad proxy list and their retry times are expected. """ |
174 if not badProxies or (len(badProxies) != len(expected_proxies)): | 190 if not badProxies or (len(badProxies) != len(expected_proxies)): |
175 return False | 191 return False |
176 | 192 |
(...skipping 29 matching lines...) Expand all Loading... |
206 raise ChromeProxyMetricException, ( | 222 raise ChromeProxyMetricException, ( |
207 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % ( | 223 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % ( |
208 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) | 224 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) |
209 bypass_count += 1 | 225 bypass_count += 1 |
210 | 226 |
211 if tab: | 227 if tab: |
212 info = GetProxyInfoFromNetworkInternals(tab) | 228 info = GetProxyInfoFromNetworkInternals(tab) |
213 if not info['enabled']: | 229 if not info['enabled']: |
214 raise ChromeProxyMetricException, ( | 230 raise ChromeProxyMetricException, ( |
215 'Chrome proxy should be enabled. proxy info: %s' % info) | 231 'Chrome proxy should be enabled. proxy info: %s' % info) |
216 self.VerifyBadProxies( | 232 _, expected_bad_proxies = self.IsProxyBypassed(tab) |
217 info['badProxies'], | 233 self.VerifyBadProxies(info['badProxies'], expected_bad_proxies) |
218 [self.effective_proxies['proxy'], | |
219 self.effective_proxies['fallback']]) | |
220 | 234 |
221 results.AddValue(scalar.ScalarValue( | 235 results.AddValue(scalar.ScalarValue( |
222 results.current_page, 'bypass', 'count', bypass_count)) | 236 results.current_page, 'bypass', 'count', bypass_count)) |
223 | 237 |
224 def AddResultsForSafebrowsing(self, tab, results): | 238 def AddResultsForSafebrowsing(self, tab, results): |
225 count = 0 | 239 count = 0 |
226 safebrowsing_count = 0 | 240 safebrowsing_count = 0 |
227 for resp in self.IterResponses(tab): | 241 for resp in self.IterResponses(tab): |
228 count += 1 | 242 count += 1 |
229 if resp.IsSafebrowsingResponse(): | 243 if resp.IsSafebrowsingResponse(): |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
264 bad_proxies = [] | 278 bad_proxies = [] |
265 if 'badProxies' in info and info['badProxies']: | 279 if 'badProxies' in info and info['badProxies']: |
266 bad_proxies = [p['proxy'] for p in info['badProxies'] | 280 bad_proxies = [p['proxy'] for p in info['badProxies'] |
267 if 'proxy' in p and p['proxy']] | 281 if 'proxy' in p and p['proxy']] |
268 if bad_proxies != expected_bad_proxies: | 282 if bad_proxies != expected_bad_proxies: |
269 raise ChromeProxyMetricException, ( | 283 raise ChromeProxyMetricException, ( |
270 'Wrong bad proxies (%s). Expect: "%s"' % ( | 284 'Wrong bad proxies (%s). Expect: "%s"' % ( |
271 str(bad_proxies), str(expected_bad_proxies))) | 285 str(bad_proxies), str(expected_bad_proxies))) |
272 results.AddValue(scalar.ScalarValue( | 286 results.AddValue(scalar.ScalarValue( |
273 results.current_page, 'http_fallback', 'boolean', True)) | 287 results.current_page, 'http_fallback', 'boolean', True)) |
OLD | NEW |