OLD | NEW |
1 # Copyright 2014 The Chromium Authors. All rights reserved. | 1 # Copyright 2014 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import datetime | 5 import datetime |
6 import logging | 6 import logging |
7 import os | 7 import os |
8 | 8 |
9 from integration_tests import network_metrics | 9 from integration_tests import network_metrics |
10 from telemetry.page import page_test | 10 from telemetry.page import page_test |
(...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
162 r = resp.response | 162 r = resp.response |
163 if resp.response.status != 200: | 163 if resp.response.status != 200: |
164 raise ChromeProxyMetricException, ('%s: Response is not 200: %d' % | 164 raise ChromeProxyMetricException, ('%s: Response is not 200: %d' % |
165 (r.url, r.status)) | 165 (r.url, r.status)) |
166 if not resp.IsValidByViaHeader(): | 166 if not resp.IsValidByViaHeader(): |
167 raise ChromeProxyMetricException, ('%s: Response missing via header' % | 167 raise ChromeProxyMetricException, ('%s: Response missing via header' % |
168 (r.url)) | 168 (r.url)) |
169 results.AddValue(scalar.ScalarValue( | 169 results.AddValue(scalar.ScalarValue( |
170 results.current_page, 'version_test', 'count', 1)) | 170 results.current_page, 'version_test', 'count', 1)) |
171 | 171 |
| 172 def ProxyListForDev(self, proxies): |
| 173 return [self.effective_proxies['proxy-dev'] |
| 174 if proxy == self.effective_proxies['proxy'] |
| 175 else proxy for proxy in proxies] |
| 176 |
172 | 177 |
173 def IsProxyBypassed(self, tab): | 178 def IsProxyBypassed(self, tab): |
174 """Get whether all configured proxies are bypassed. | 179 """Get whether all configured proxies are bypassed. |
175 | 180 |
176 Returns: | 181 Returns: |
177 A tuple of the form (boolean, string list). If all configured proxies | 182 A tuple of the form (boolean, string list). If all configured proxies |
178 are bypassed, then the return value will be (True, bypassed proxies). | 183 are bypassed, then the return value will be (True, bypassed proxies). |
179 Otherwise, the return value will be (False, empty list). | 184 Otherwise, the return value will be (False, empty list). |
180 """ | 185 """ |
181 if not tab: | 186 if not tab: |
182 return False, [] | 187 return False, [] |
183 | 188 |
184 info = GetProxyInfoFromNetworkInternals(tab) | 189 info = GetProxyInfoFromNetworkInternals(tab) |
185 if not info['enabled']: | 190 if not info['enabled']: |
186 raise ChromeProxyMetricException, ( | 191 raise ChromeProxyMetricException, ( |
187 'Chrome proxy should be enabled. proxy info: %s' % info) | 192 'Chrome proxy should be enabled. proxy info: %s' % info) |
188 | 193 |
189 bad_proxies = [str(p['proxy']) for p in info['badProxies']] | 194 bad_proxies = [str(p['proxy']) for p in info['badProxies']] |
190 bad_proxies.sort() | 195 bad_proxies.sort() |
191 proxies = [self.effective_proxies['proxy'], | 196 proxies = [self.effective_proxies['proxy'], |
192 self.effective_proxies['fallback']] | 197 self.effective_proxies['fallback']] |
193 proxies.sort() | 198 proxies.sort() |
194 proxies_dev = [self.effective_proxies['proxy-dev'], | 199 proxies_dev = self.ProxyListForDev(proxies) |
195 self.effective_proxies['fallback']] | |
196 proxies_dev.sort() | 200 proxies_dev.sort() |
197 if bad_proxies == proxies: | 201 if bad_proxies == proxies: |
198 return True, proxies | 202 return True, proxies |
199 elif bad_proxies == proxies_dev: | 203 elif bad_proxies == proxies_dev: |
200 return True, proxies_dev | 204 return True, proxies_dev |
201 return False, [] | 205 return False, [] |
202 | 206 |
203 @staticmethod | 207 def VerifyBadProxies(self, bad_proxies, expected_bad_proxies): |
204 def VerifyBadProxies( | 208 """Verify the bad proxy list and their retry times are expected. |
205 badProxies, expected_proxies, | |
206 retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS, | |
207 retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS): | |
208 """Verify the bad proxy list and their retry times are expected. """ | |
209 if not badProxies or (len(badProxies) != len(expected_proxies)): | |
210 return False | |
211 | 209 |
212 # Check all expected proxies. | 210 Args: |
213 proxies = [p['proxy'] for p in badProxies] | 211 bad_proxies: the list of actual bad proxies and their retry times. |
214 expected_proxies.sort() | 212 expected_bad_proxies: a list of dictionaries in the form: |
215 proxies.sort() | |
216 if not expected_proxies == proxies: | |
217 raise ChromeProxyMetricException, ( | |
218 'Bad proxies: got %s want %s' % ( | |
219 str(badProxies), str(expected_proxies))) | |
220 | 213 |
221 # Check retry time | 214 {'proxy': <proxy origin>, |
222 for p in badProxies: | 215 'retry_seconds_low': <minimum bypass duration in seconds>, |
| 216 'retry_seconds_high': <maximum bypass duration in seconds>} |
| 217 |
| 218 If an element in the list is missing either the 'retry_seconds_low' |
| 219 entry or the 'retry_seconds_high' entry, the default bypass minimum |
| 220 and maximum durations respectively will be used for that element. |
| 221 """ |
| 222 if not bad_proxies: |
| 223 bad_proxies = [] |
| 224 |
| 225 # Check that each of the proxy origins and retry times match. |
| 226 for bad_proxy, expected_bad_proxy in map(None, bad_proxies, |
| 227 expected_bad_proxies): |
| 228 # Check if the proxy origins match, allowing for the proxy-dev origin in |
| 229 # the place of the HTTPS proxy origin. |
| 230 if (bad_proxy['proxy'] != expected_bad_proxy['proxy'] and |
| 231 bad_proxy['proxy'] != expected_bad_proxy['proxy'].replace( |
| 232 self.effective_proxies['proxy'], |
| 233 self.effective_proxies['proxy-dev'])): |
| 234 raise ChromeProxyMetricException, ( |
| 235 'Actual and expected bad proxies should match: %s vs. %s' % ( |
| 236 str(bad_proxy), str(expected_bad_proxy))) |
| 237 |
| 238 # Check that the retry times match. |
| 239 retry_seconds_low = expected_bad_proxy.get('retry_seconds_low', |
| 240 DEFAULT_BYPASS_MIN_SECONDS) |
| 241 retry_seconds_high = expected_bad_proxy.get('retry_seconds_high', |
| 242 DEFAULT_BYPASS_MAX_SECONDS) |
223 retry_time_low = (datetime.datetime.now() + | 243 retry_time_low = (datetime.datetime.now() + |
224 datetime.timedelta(seconds=retry_seconds_low)) | 244 datetime.timedelta(seconds=retry_seconds_low)) |
225 retry_time_high = (datetime.datetime.now() + | 245 retry_time_high = (datetime.datetime.now() + |
226 datetime.timedelta(seconds=retry_seconds_high)) | 246 datetime.timedelta(seconds=retry_seconds_high)) |
227 got_retry_time = datetime.datetime.fromtimestamp(int(p['retry'])/1000) | 247 got_retry_time = datetime.datetime.fromtimestamp( |
| 248 int(bad_proxy['retry'])/1000) |
228 if not ProxyRetryTimeInRange( | 249 if not ProxyRetryTimeInRange( |
229 got_retry_time, retry_time_low, retry_time_high): | 250 got_retry_time, retry_time_low, retry_time_high): |
230 raise ChromeProxyMetricException, ( | 251 raise ChromeProxyMetricException, ( |
231 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % ( | 252 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % ( |
232 p['proxy'], str(got_retry_time), str(retry_time_low), | 253 bad_proxy['proxy'], str(got_retry_time), str(retry_time_low), |
233 str(retry_time_high))) | 254 str(retry_time_high))) |
234 return True | |
235 | 255 |
236 def VerifyAllProxiesBypassed(self, tab): | 256 def VerifyAllProxiesBypassed(self, tab): |
237 if tab: | 257 if tab: |
238 info = GetProxyInfoFromNetworkInternals(tab) | 258 info = GetProxyInfoFromNetworkInternals(tab) |
239 if not info['enabled']: | 259 if not info['enabled']: |
240 raise ChromeProxyMetricException, ( | 260 raise ChromeProxyMetricException, ( |
241 'Chrome proxy should be enabled. proxy info: %s' % info) | 261 'Chrome proxy should be enabled. proxy info: %s' % info) |
242 is_bypassed, expected_bad_proxies = self.IsProxyBypassed(tab) | 262 is_bypassed, expected_bad_proxies = self.IsProxyBypassed(tab) |
243 if not is_bypassed: | 263 if not is_bypassed: |
244 raise ChromeProxyMetricException, ( | 264 raise ChromeProxyMetricException, ( |
245 'Chrome proxy should be bypassed. proxy info: %s' % info) | 265 'Chrome proxy should be bypassed. proxy info: %s' % info) |
246 self.VerifyBadProxies(info['badProxies'], expected_bad_proxies) | 266 self.VerifyBadProxies(info['badProxies'], |
| 267 [{'proxy': p} for p in expected_bad_proxies]) |
247 | 268 |
248 def AddResultsForBypass(self, tab, results): | 269 def AddResultsForBypass(self, tab, results): |
249 bypass_count = 0 | 270 bypass_count = 0 |
250 for resp in self.IterResponses(tab): | 271 for resp in self.IterResponses(tab): |
251 if resp.HasChromeProxyViaHeader(): | 272 if resp.HasChromeProxyViaHeader(): |
252 r = resp.response | 273 r = resp.response |
253 raise ChromeProxyMetricException, ( | 274 raise ChromeProxyMetricException, ( |
254 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % ( | 275 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % ( |
255 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) | 276 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) |
256 bypass_count += 1 | 277 bypass_count += 1 |
257 | 278 |
258 self.VerifyAllProxiesBypassed(tab) | 279 self.VerifyAllProxiesBypassed(tab) |
259 results.AddValue(scalar.ScalarValue( | 280 results.AddValue(scalar.ScalarValue( |
260 results.current_page, 'bypass', 'count', bypass_count)) | 281 results.current_page, 'bypass', 'count', bypass_count)) |
261 | 282 |
| 283 def AddResultsForFallback(self, tab, results): |
| 284 via_proxy_count = 0 |
| 285 bypass_count = 0 |
| 286 for resp in self.IterResponses(tab): |
| 287 if resp.HasChromeProxyViaHeader(): |
| 288 via_proxy_count += 1 |
| 289 elif resp.ShouldHaveChromeProxyViaHeader(): |
| 290 bypass_count += 1 |
| 291 |
| 292 if bypass_count != 1: |
| 293 raise ChromeProxyMetricException, ( |
| 294 'Only the triggering response should have bypassed all proxies.') |
| 295 |
| 296 info = GetProxyInfoFromNetworkInternals(tab) |
| 297 if not 'enabled' in info or not info['enabled']: |
| 298 raise ChromeProxyMetricException, ( |
| 299 'Chrome proxy should be enabled. proxy info: %s' % info) |
| 300 self.VerifyBadProxies(info['badProxies'], |
| 301 [{'proxy': self.effective_proxies['proxy']}]) |
| 302 |
| 303 results.AddValue(scalar.ScalarValue( |
| 304 results.current_page, 'via_proxy', 'count', via_proxy_count)) |
| 305 results.AddValue(scalar.ScalarValue( |
| 306 results.current_page, 'bypass', 'count', bypass_count)) |
| 307 |
262 def AddResultsForCorsBypass(self, tab, results): | 308 def AddResultsForCorsBypass(self, tab, results): |
263 eligible_response_count = 0 | 309 eligible_response_count = 0 |
264 bypass_count = 0 | 310 bypass_count = 0 |
265 bypasses = {} | 311 bypasses = {} |
266 for resp in self.IterResponses(tab): | 312 for resp in self.IterResponses(tab): |
267 logging.warn('got a resource %s' % (resp.response.url)) | 313 logging.warn('got a resource %s' % (resp.response.url)) |
268 | 314 |
269 for resp in self.IterResponses(tab): | 315 for resp in self.IterResponses(tab): |
270 if resp.ShouldHaveChromeProxyViaHeader(): | 316 if resp.ShouldHaveChromeProxyViaHeader(): |
271 eligible_response_count += 1 | 317 eligible_response_count += 1 |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
345 'Reponse: status=(%d, %s)\nHeaders:\n %s' % ( | 391 'Reponse: status=(%d, %s)\nHeaders:\n %s' % ( |
346 r.url, r.status, r.status_text, r.headers)) | 392 r.url, r.status, r.status_text, r.headers)) |
347 if count == safebrowsing_count: | 393 if count == safebrowsing_count: |
348 results.AddValue(scalar.ScalarValue( | 394 results.AddValue(scalar.ScalarValue( |
349 results.current_page, 'safebrowsing', 'boolean', True)) | 395 results.current_page, 'safebrowsing', 'boolean', True)) |
350 else: | 396 else: |
351 raise ChromeProxyMetricException, ( | 397 raise ChromeProxyMetricException, ( |
352 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % ( | 398 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % ( |
353 count, safebrowsing_count)) | 399 count, safebrowsing_count)) |
354 | 400 |
355 def ProxyListForDev(self, proxies): | |
356 return [self.effective_proxies['proxy-dev'] | |
357 if proxy == self.effective_proxies['proxy'] | |
358 else proxy for proxy in proxies] | |
359 | |
360 def VerifyProxyInfo(self, tab, expected_proxies, expected_bad_proxies): | 401 def VerifyProxyInfo(self, tab, expected_proxies, expected_bad_proxies): |
361 info = GetProxyInfoFromNetworkInternals(tab) | 402 info = GetProxyInfoFromNetworkInternals(tab) |
362 if not 'enabled' in info or not info['enabled']: | 403 if not 'enabled' in info or not info['enabled']: |
363 raise ChromeProxyMetricException, ( | 404 raise ChromeProxyMetricException, ( |
364 'Chrome proxy should be enabled. proxy info: %s' % info) | 405 'Chrome proxy should be enabled. proxy info: %s' % info) |
365 proxies = info['proxies'] | 406 proxies = info['proxies'] |
366 if (proxies != expected_proxies and | 407 if (proxies != expected_proxies and |
367 proxies != self.ProxyListForDev(expected_proxies)): | 408 proxies != self.ProxyListForDev(expected_proxies)): |
368 raise ChromeProxyMetricException, ( | 409 raise ChromeProxyMetricException, ( |
369 'Wrong effective proxies (%s). Expect: "%s"' % ( | 410 'Wrong effective proxies (%s). Expect: "%s"' % ( |
(...skipping 18 matching lines...) Expand all Loading... |
388 expected_bad_proxies = [] | 429 expected_bad_proxies = [] |
389 | 430 |
390 self.VerifyProxyInfo(tab, expected_proxies, expected_bad_proxies) | 431 self.VerifyProxyInfo(tab, expected_proxies, expected_bad_proxies) |
391 results.AddValue(scalar.ScalarValue( | 432 results.AddValue(scalar.ScalarValue( |
392 results.current_page, 'http_fallback', 'boolean', True)) | 433 results.current_page, 'http_fallback', 'boolean', True)) |
393 | 434 |
394 def AddResultsForHTTPToDirectFallback(self, tab, results): | 435 def AddResultsForHTTPToDirectFallback(self, tab, results): |
395 self.VerifyAllProxiesBypassed(tab) | 436 self.VerifyAllProxiesBypassed(tab) |
396 results.AddValue(scalar.ScalarValue( | 437 results.AddValue(scalar.ScalarValue( |
397 results.current_page, 'direct_fallback', 'boolean', True)) | 438 results.current_page, 'direct_fallback', 'boolean', True)) |
| 439 |
| 440 def AddResultsForExplicitBypass(self, tab, results, expected_bad_proxies): |
| 441 """Verify results for an explicit bypass test. |
| 442 |
| 443 Args: |
| 444 tab: the tab for the test. |
| 445 results: the results object to add the results values to. |
| 446 expected_bad_proxies: A list of dictionary objects representing |
| 447 expected bad proxies and their expected retry time windows. |
| 448 See the definition of VerifyBadProxies for details. |
| 449 """ |
| 450 info = GetProxyInfoFromNetworkInternals(tab) |
| 451 if not 'enabled' in info or not info['enabled']: |
| 452 raise ChromeProxyMetricException, ( |
| 453 'Chrome proxy should be enabled. proxy info: %s' % info) |
| 454 self.VerifyBadProxies(info['badProxies'], |
| 455 expected_bad_proxies) |
| 456 results.AddValue(scalar.ScalarValue( |
| 457 results.current_page, 'explicit_bypass', 'boolean', True)) |
OLD | NEW |