Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 # Copyright 2014 The Chromium Authors. All rights reserved. | 1 # Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import datetime | |
| 6 import logging | 5 import logging |
| 7 import os | 6 import time |
| 8 | 7 |
| 9 from integration_tests import network_metrics | 8 from integration_tests import network_metrics |
| 10 from telemetry.core import util | |
| 11 from telemetry.page import page_test | 9 from telemetry.page import page_test |
| 12 from telemetry.value import scalar | 10 from telemetry.value import scalar |
| 13 | 11 |
| 14 | 12 |
| 15 class ChromeProxyMetricException(page_test.MeasurementFailure): | 13 class ChromeProxyMetricException(page_test.MeasurementFailure): |
| 16 pass | 14 pass |
| 17 | 15 |
| 18 | 16 |
| 19 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy' | 17 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy' |
| 20 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy' | 18 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy' |
| 21 | 19 |
| 22 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443' | |
| 23 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS | |
| 24 PROXY_DEV_SETTING_HTTP = 'proxy-xt.googlezip.net:80' | |
| 25 PROXY_SETTING_HTTP = 'compress.googlezip.net:80' | |
| 26 PROXY_SETTING_DIRECT = 'direct://' | |
| 27 | |
| 28 # The default Chrome Proxy bypass time is a range from one to five mintues. | |
| 29 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc. | |
| 30 DEFAULT_BYPASS_MIN_SECONDS = 60 | |
| 31 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60 | |
| 32 | |
| 33 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'): | |
| 34 tab.Navigate(url) | |
| 35 with open(os.path.join(os.path.dirname(__file__), | |
| 36 'chrome_proxy_metrics.js')) as f: | |
| 37 js = f.read() | |
| 38 tab.ExecuteJavaScript(js) | |
| 39 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300) | |
| 40 | |
| 41 # Sometimes, the proxy information on net_internals#proxy is slow to come up. | |
| 42 # In order to prevent this from causing tests to flake frequently, wait for | |
| 43 # up to 10 seconds for this information to appear. | |
| 44 def IsDataReductionProxyEnabled(): | |
| 45 info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()') | |
| 46 return info['enabled'] | |
| 47 | |
| 48 util.WaitFor(IsDataReductionProxyEnabled, 10) | |
| 49 info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()') | |
| 50 return info | |
| 51 | |
| 52 | |
| 53 def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30): | |
| 54 return (retry_time >= low - datetime.timedelta(seconds=grace_seconds) and | |
| 55 (retry_time < high + datetime.timedelta(seconds=grace_seconds))) | |
| 56 | |
| 57 | 20 |
| 58 class ChromeProxyResponse(network_metrics.HTTPResponse): | 21 class ChromeProxyResponse(network_metrics.HTTPResponse): |
| 59 """ Represents an HTTP response from a timeleine event.""" | 22 """ Represents an HTTP response from a timeleine event.""" |
| 60 def __init__(self, event): | 23 def __init__(self, event): |
| 61 super(ChromeProxyResponse, self).__init__(event) | 24 super(ChromeProxyResponse, self).__init__(event) |
| 62 | 25 |
| 63 def ShouldHaveChromeProxyViaHeader(self): | 26 def ShouldHaveChromeProxyViaHeader(self): |
| 64 resp = self.response | 27 resp = self.response |
| 65 # Ignore https and data url | 28 # Ignore https and data url |
| 66 if resp.url.startswith('https') or resp.url.startswith('data:'): | 29 if resp.url.startswith('https') or resp.url.startswith('data:'): |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 119 return kvp[1].strip() | 82 return kvp[1].strip() |
| 120 return None | 83 return None |
| 121 | 84 |
| 122 | 85 |
| 123 class ChromeProxyMetric(network_metrics.NetworkMetric): | 86 class ChromeProxyMetric(network_metrics.NetworkMetric): |
| 124 """A Chrome proxy timeline metric.""" | 87 """A Chrome proxy timeline metric.""" |
| 125 | 88 |
| 126 def __init__(self): | 89 def __init__(self): |
| 127 super(ChromeProxyMetric, self).__init__() | 90 super(ChromeProxyMetric, self).__init__() |
| 128 self.compute_data_saving = True | 91 self.compute_data_saving = True |
| 129 self.effective_proxies = { | |
| 130 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME, | |
| 131 "proxy-dev": PROXY_DEV_SETTING_HTTP, | |
| 132 "fallback": PROXY_SETTING_HTTP, | |
| 133 "direct": PROXY_SETTING_DIRECT, | |
| 134 } | |
| 135 | 92 |
| 136 def SetEvents(self, events): | 93 def SetEvents(self, events): |
| 137 """Used for unittest.""" | 94 """Used for unittest.""" |
| 138 self._events = events | 95 self._events = events |
| 139 | 96 |
| 140 def ResponseFromEvent(self, event): | 97 def ResponseFromEvent(self, event): |
| 141 return ChromeProxyResponse(event) | 98 return ChromeProxyResponse(event) |
| 142 | 99 |
| 143 def AddResults(self, tab, results): | 100 def AddResults(self, tab, results): |
| 144 raise NotImplementedError | 101 raise NotImplementedError |
| (...skipping 16 matching lines...) Expand all Loading... | |
| 161 results.current_page, 'resources_via_proxy', 'count', | 118 results.current_page, 'resources_via_proxy', 'count', |
| 162 resources_via_proxy)) | 119 resources_via_proxy)) |
| 163 results.AddValue(scalar.ScalarValue( | 120 results.AddValue(scalar.ScalarValue( |
| 164 results.current_page, 'resources_from_cache', 'count', | 121 results.current_page, 'resources_from_cache', 'count', |
| 165 resources_from_cache)) | 122 resources_from_cache)) |
| 166 results.AddValue(scalar.ScalarValue( | 123 results.AddValue(scalar.ScalarValue( |
| 167 results.current_page, 'resources_direct', 'count', resources_direct)) | 124 results.current_page, 'resources_direct', 'count', resources_direct)) |
| 168 | 125 |
| 169 def AddResultsForHeaderValidation(self, tab, results): | 126 def AddResultsForHeaderValidation(self, tab, results): |
| 170 via_count = 0 | 127 via_count = 0 |
| 128 | |
| 171 for resp in self.IterResponses(tab): | 129 for resp in self.IterResponses(tab): |
| 172 if resp.IsValidByViaHeader(): | 130 if resp.IsValidByViaHeader(): |
| 173 via_count += 1 | 131 via_count += 1 |
| 174 else: | 132 else: |
| 175 r = resp.response | 133 r = resp.response |
| 176 raise ChromeProxyMetricException, ( | 134 raise ChromeProxyMetricException, ( |
| 177 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % ( | 135 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % ( |
| 178 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) | 136 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) |
| 179 results.AddValue(scalar.ScalarValue( | 137 results.AddValue(scalar.ScalarValue( |
| 180 results.current_page, 'checked_via_header', 'count', via_count)) | 138 results.current_page, 'checked_via_header', 'count', via_count)) |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 226 raise ChromeProxyMetricException, ( | 184 raise ChromeProxyMetricException, ( |
| 227 '%s: Response missing via header. Only "%s" clients should ' | 185 '%s: Response missing via header. Only "%s" clients should ' |
| 228 'bypass for this page, but this client is "%s".' % ( | 186 'bypass for this page, but this client is "%s".' % ( |
| 229 resp.response.url, bypass_for_client_type, client_type)) | 187 resp.response.url, bypass_for_client_type, client_type)) |
| 230 | 188 |
| 231 results.AddValue(scalar.ScalarValue( | 189 results.AddValue(scalar.ScalarValue( |
| 232 results.current_page, 'via', 'count', via_count)) | 190 results.current_page, 'via', 'count', via_count)) |
| 233 results.AddValue(scalar.ScalarValue( | 191 results.AddValue(scalar.ScalarValue( |
| 234 results.current_page, 'bypass', 'count', bypass_count)) | 192 results.current_page, 'bypass', 'count', bypass_count)) |
| 235 | 193 |
| 236 def ProxyListForDev(self, proxies): | |
| 237 return [self.effective_proxies['proxy-dev'] | |
| 238 if proxy == self.effective_proxies['proxy'] | |
| 239 else proxy for proxy in proxies] | |
| 240 | |
| 241 def IsProxyBypassed(self, tab): | |
| 242 """Get whether all configured proxies are bypassed. | |
| 243 | |
| 244 Returns: | |
| 245 A tuple of the form (boolean, string list). If all configured proxies | |
| 246 are bypassed, then the return value will be (True, bypassed proxies). | |
| 247 Otherwise, the return value will be (False, empty list). | |
| 248 """ | |
| 249 if not tab: | |
| 250 return False, [] | |
| 251 | |
| 252 info = GetProxyInfoFromNetworkInternals(tab) | |
| 253 if not info['enabled']: | |
| 254 raise ChromeProxyMetricException, ( | |
| 255 'Chrome proxy should be enabled. proxy info: %s' % info) | |
| 256 if not info['badProxies']: | |
| 257 return False, [] | |
| 258 | |
| 259 bad_proxies = [str(p['proxy']) for p in info['badProxies']] | |
| 260 # Expect all but the "direct://" proxy to be bad. | |
| 261 expected_bad_proxies = info['proxies'][:-1] | |
| 262 if set(bad_proxies) == set(expected_bad_proxies): | |
| 263 return True, expected_bad_proxies | |
| 264 return False, [] | |
| 265 | |
| 266 def VerifyBadProxies(self, bad_proxies, expected_bad_proxies): | |
| 267 """Verify the bad proxy list and their retry times are expected. | |
| 268 | |
| 269 Args: | |
| 270 bad_proxies: the list of actual bad proxies and their retry times. | |
| 271 expected_bad_proxies: a list of dictionaries in the form: | |
| 272 | |
| 273 {'proxy': <proxy origin>, | |
| 274 'retry_seconds_low': <minimum bypass duration in seconds>, | |
| 275 'retry_seconds_high': <maximum bypass duration in seconds>} | |
| 276 | |
| 277 If an element in the list is missing either the 'retry_seconds_low' | |
| 278 entry or the 'retry_seconds_high' entry, the default bypass minimum | |
| 279 and maximum durations respectively will be used for that element. | |
| 280 """ | |
| 281 if not bad_proxies: | |
| 282 bad_proxies = [] | |
| 283 if len(bad_proxies) != len(expected_bad_proxies): | |
| 284 raise ChromeProxyMetricException, ( | |
| 285 'Actual and expected bad proxy lists should match: %s vs. %s' % ( | |
| 286 str(bad_proxies), str(expected_bad_proxies))) | |
| 287 | |
| 288 # Check that each of the proxy origins and retry times match. | |
| 289 for expected_bad_proxy in expected_bad_proxies: | |
| 290 # Find a matching actual bad proxy origin, allowing for the proxy-dev | |
| 291 # origin in the place of the HTTPS proxy origin. | |
| 292 bad_proxy = None | |
| 293 for actual_proxy in bad_proxies: | |
| 294 if (expected_bad_proxy['proxy'] == actual_proxy['proxy'] or ( | |
| 295 self.effective_proxies['proxy-dev'] == actual_proxy['proxy'] and | |
| 296 self.effective_proxies['proxy'] == expected_bad_proxy['proxy'])): | |
| 297 bad_proxy = actual_proxy | |
| 298 break | |
| 299 if not bad_proxy: | |
| 300 raise ChromeProxyMetricException, ( | |
| 301 'No match for expected bad proxy %s - actual and expected bad ' | |
| 302 'proxies should match: %s vs. %s' % (expected_bad_proxy['proxy'], | |
| 303 str(bad_proxies), | |
| 304 str(expected_bad_proxies))) | |
| 305 | |
| 306 # Check that the retry times match. | |
| 307 retry_seconds_low = expected_bad_proxy.get('retry_seconds_low', | |
| 308 DEFAULT_BYPASS_MIN_SECONDS) | |
| 309 retry_seconds_high = expected_bad_proxy.get('retry_seconds_high', | |
| 310 DEFAULT_BYPASS_MAX_SECONDS) | |
| 311 retry_time_low = (datetime.datetime.now() + | |
| 312 datetime.timedelta(seconds=retry_seconds_low)) | |
| 313 retry_time_high = (datetime.datetime.now() + | |
| 314 datetime.timedelta(seconds=retry_seconds_high)) | |
| 315 got_retry_time = datetime.datetime.fromtimestamp( | |
| 316 int(bad_proxy['retry'])/1000) | |
| 317 if not ProxyRetryTimeInRange( | |
| 318 got_retry_time, retry_time_low, retry_time_high): | |
| 319 raise ChromeProxyMetricException, ( | |
| 320 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % ( | |
| 321 bad_proxy['proxy'], str(got_retry_time), str(retry_time_low), | |
| 322 str(retry_time_high))) | |
| 323 | |
| 324 def VerifyAllProxiesBypassed(self, tab): | |
| 325 """Verify that all proxies are bypassed for 1 to 5 minutes.""" | |
| 326 if tab: | |
| 327 info = GetProxyInfoFromNetworkInternals(tab) | |
| 328 if not info['enabled']: | |
| 329 raise ChromeProxyMetricException, ( | |
| 330 'Chrome proxy should be enabled. proxy info: %s' % info) | |
| 331 is_bypassed, expected_bad_proxies = self.IsProxyBypassed(tab) | |
| 332 if not is_bypassed: | |
| 333 raise ChromeProxyMetricException, ( | |
| 334 'Chrome proxy should be bypassed. proxy info: %s' % info) | |
| 335 self.VerifyBadProxies(info['badProxies'], | |
| 336 [{'proxy': p} for p in expected_bad_proxies]) | |
| 337 | |
| 338 def AddResultsForBypass(self, tab, results): | 194 def AddResultsForBypass(self, tab, results): |
| 339 bypass_count = 0 | 195 bypass_count = 0 |
| 196 | |
| 340 for resp in self.IterResponses(tab): | 197 for resp in self.IterResponses(tab): |
| 341 if resp.HasChromeProxyViaHeader(): | 198 if resp.HasChromeProxyViaHeader(): |
| 342 r = resp.response | 199 r = resp.response |
| 343 raise ChromeProxyMetricException, ( | 200 raise ChromeProxyMetricException, ( |
| 344 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % ( | 201 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % ( |
| 345 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) | 202 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) |
| 346 bypass_count += 1 | 203 bypass_count += 1 |
| 347 | 204 |
| 348 results.AddValue(scalar.ScalarValue( | 205 results.AddValue(scalar.ScalarValue( |
| 349 results.current_page, 'bypass', 'count', bypass_count)) | 206 results.current_page, 'bypass', 'count', bypass_count)) |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 382 'At least one response should be bypassed. ' | 239 'At least one response should be bypassed. ' |
| 383 '(eligible_response_count=%d, bypass_count=%d)\n' % ( | 240 '(eligible_response_count=%d, bypass_count=%d)\n' % ( |
| 384 eligible_response_count, bypass_count)) | 241 eligible_response_count, bypass_count)) |
| 385 | 242 |
| 386 results.AddValue(scalar.ScalarValue( | 243 results.AddValue(scalar.ScalarValue( |
| 387 results.current_page, 'cors_bypass', 'count', bypass_count)) | 244 results.current_page, 'cors_bypass', 'count', bypass_count)) |
| 388 | 245 |
| 389 def AddResultsForBlockOnce(self, tab, results): | 246 def AddResultsForBlockOnce(self, tab, results): |
| 390 eligible_response_count = 0 | 247 eligible_response_count = 0 |
| 391 bypass_count = 0 | 248 bypass_count = 0 |
| 249 | |
| 392 for resp in self.IterResponses(tab): | 250 for resp in self.IterResponses(tab): |
| 393 if resp.ShouldHaveChromeProxyViaHeader(): | 251 if resp.ShouldHaveChromeProxyViaHeader(): |
| 394 eligible_response_count += 1 | 252 eligible_response_count += 1 |
| 395 if not resp.HasChromeProxyViaHeader(): | 253 if not resp.HasChromeProxyViaHeader(): |
| 396 bypass_count += 1 | 254 bypass_count += 1 |
| 397 | 255 |
| 398 if eligible_response_count <= 1: | 256 if eligible_response_count <= 1: |
| 399 raise ChromeProxyMetricException, ( | 257 raise ChromeProxyMetricException, ( |
| 400 'There should be more than one DRP eligible response ' | 258 'There should be more than one DRP eligible response ' |
| 401 '(eligible_response_count=%d, bypass_count=%d)\n' % ( | 259 '(eligible_response_count=%d, bypass_count=%d)\n' % ( |
| 402 eligible_response_count, bypass_count)) | 260 eligible_response_count, bypass_count)) |
| 403 elif bypass_count != 1: | 261 elif bypass_count != 1: |
| 404 raise ChromeProxyMetricException, ( | 262 raise ChromeProxyMetricException, ( |
| 405 'Exactly one response should be bypassed. ' | 263 'Exactly one response should be bypassed. ' |
| 406 '(eligible_response_count=%d, bypass_count=%d)\n' % ( | 264 '(eligible_response_count=%d, bypass_count=%d)\n' % ( |
| 407 eligible_response_count, bypass_count)) | 265 eligible_response_count, bypass_count)) |
| 408 else: | 266 else: |
| 409 results.AddValue(scalar.ScalarValue( | 267 results.AddValue(scalar.ScalarValue( |
| 410 results.current_page, 'eligible_responses', 'count', | 268 results.current_page, 'eligible_responses', 'count', |
| 411 eligible_response_count)) | 269 eligible_response_count)) |
| 412 results.AddValue(scalar.ScalarValue( | 270 results.AddValue(scalar.ScalarValue( |
| 413 results.current_page, 'bypass', 'count', bypass_count)) | 271 results.current_page, 'bypass', 'count', bypass_count)) |
| 414 | 272 |
| 415 def AddResultsForSafebrowsing(self, tab, results): | 273 def AddResultsForSafebrowsing(self, tab, results): |
| 416 count = 0 | 274 count = 0 |
| 417 safebrowsing_count = 0 | 275 safebrowsing_count = 0 |
| 276 | |
| 418 for resp in self.IterResponses(tab): | 277 for resp in self.IterResponses(tab): |
| 419 count += 1 | 278 count += 1 |
| 420 if resp.IsSafebrowsingResponse(): | 279 if resp.IsSafebrowsingResponse(): |
| 421 safebrowsing_count += 1 | 280 safebrowsing_count += 1 |
| 422 else: | 281 else: |
| 423 r = resp.response | 282 r = resp.response |
| 424 raise ChromeProxyMetricException, ( | 283 raise ChromeProxyMetricException, ( |
| 425 '%s: Not a valid safe browsing response.\n' | 284 '%s: Not a valid safe browsing response.\n' |
| 426 'Reponse: status=(%d, %s)\nHeaders:\n %s' % ( | 285 'Reponse: status=(%d, %s)\nHeaders:\n %s' % ( |
| 427 r.url, r.status, r.status_text, r.headers)) | 286 r.url, r.status, r.status_text, r.headers)) |
| 428 if count == safebrowsing_count: | 287 if count == safebrowsing_count: |
| 429 results.AddValue(scalar.ScalarValue( | 288 results.AddValue(scalar.ScalarValue( |
| 430 results.current_page, 'safebrowsing', 'boolean', True)) | 289 results.current_page, 'safebrowsing', 'boolean', True)) |
| 431 else: | 290 else: |
| 432 raise ChromeProxyMetricException, ( | 291 raise ChromeProxyMetricException, ( |
| 433 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % ( | 292 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % ( |
| 434 count, safebrowsing_count)) | 293 count, safebrowsing_count)) |
| 435 | 294 |
| 436 def VerifyProxyInfo(self, tab, expected_proxies, expected_bad_proxies): | 295 def AddResultsForHTTPFallback(self, tab, results): |
| 437 info = GetProxyInfoFromNetworkInternals(tab) | 296 via_fallback_count = 0 |
| 438 if not 'enabled' in info or not info['enabled']: | |
| 439 raise ChromeProxyMetricException, ( | |
| 440 'Chrome proxy should be enabled. proxy info: %s' % info) | |
| 441 proxies = info['proxies'] | |
| 442 if (set(proxies) != set(expected_proxies) and | |
| 443 set(proxies) != set(self.ProxyListForDev(expected_proxies))): | |
| 444 raise ChromeProxyMetricException, ( | |
| 445 'Wrong effective proxies (%s). Expect: "%s"' % ( | |
| 446 str(proxies), str(expected_proxies))) | |
| 447 | 297 |
| 448 bad_proxies = [] | 298 for resp in self.IterResponses(tab): |
| 449 if 'badProxies' in info and info['badProxies']: | 299 if resp.ShouldHaveChromeProxyViaHeader(): |
| 450 bad_proxies = [p['proxy'] for p in info['badProxies'] | 300 # All responses should have come through the HTTP fallback proxy, which |
| 451 if 'proxy' in p and p['proxy']] | 301 # means that they should have the via header, and if a remote port is |
| 452 if (set(bad_proxies) != set(expected_bad_proxies) and | 302 # defined, it should be port 80. |
| 453 set(bad_proxies) != set(self.ProxyListForDev(expected_bad_proxies))): | 303 if (not resp.HasChromeProxyViaHeader() or |
| 454 raise ChromeProxyMetricException, ( | 304 (resp.remote_port and resp.remote_port != 80)): |
| 455 'Wrong bad proxies (%s). Expect: "%s"' % ( | 305 r = resp.response |
| 456 str(bad_proxies), str(expected_bad_proxies))) | 306 raise ChromeProxyMetricException, ( |
| 307 '%s: Should have come through the fallback proxy.\n' | |
| 308 'Reponse: remote_port=%s status=(%d, %s)\nHeaders:\n %s' % ( | |
| 309 r.url, str(resp.remote_port), r.status, r.status_text, | |
| 310 r.headers)) | |
| 311 via_fallback_count += 1 | |
| 457 | 312 |
| 458 def AddResultsForHTTPFallback( | |
| 459 self, tab, results, expected_proxies=None, expected_bad_proxies=None): | |
| 460 if not expected_proxies: | |
| 461 expected_proxies = [self.effective_proxies['fallback'], | |
| 462 self.effective_proxies['direct']] | |
| 463 if not expected_bad_proxies: | |
| 464 expected_bad_proxies = [] | |
| 465 | |
| 466 # TODO(sclittle): Remove this dependency on net-internals#proxy once an | |
| 467 # alternative method of verifying that Chrome is on the fallback proxy | |
| 468 # exists. | |
| 469 self.VerifyProxyInfo(tab, expected_proxies, expected_bad_proxies) | |
| 470 results.AddValue(scalar.ScalarValue( | 313 results.AddValue(scalar.ScalarValue( |
| 471 results.current_page, 'http_fallback', 'boolean', True)) | 314 results.current_page, 'via_fallback', 'count', via_fallback_count)) |
| 472 | 315 |
| 473 def AddResultsForHTTPToDirectFallback(self, tab, results): | 316 def AddResultsForHTTPToDirectFallback(self, tab, results): |
| 317 via_fallback_count = 0 | |
| 474 bypass_count = 0 | 318 bypass_count = 0 |
| 475 for resp in self.IterResponses(tab): | 319 responses = self.IterResponses(tab) |
| 320 | |
| 321 # The very first response should be through the HTTP fallback proxy. | |
| 322 fallback_resp = next(responses, None) | |
| 323 if not fallback_resp: | |
| 324 raise ChromeProxyMetricException, 'There should be at least one response.' | |
| 325 elif (not fallback_resp.HasChromeProxyViaHeader() or | |
| 326 fallback_resp.remote_port != 80): | |
| 327 r = fallback_resp.response | |
| 328 raise ChromeProxyMetricException, ( | |
| 329 'Response for %s should have come through the fallback proxy.\n' | |
| 330 'Reponse: remote_port=%s status=(%d, %s)\nHeaders:\n %s' % ( | |
| 331 r.url, str(fallback_resp.remote_port), r.status, r.status_text, | |
| 332 r.headers)) | |
| 333 else: | |
| 334 via_fallback_count += 1 | |
| 335 | |
| 336 # All other responses should have been bypassed. | |
| 337 for resp in responses: | |
| 476 if resp.HasChromeProxyViaHeader(): | 338 if resp.HasChromeProxyViaHeader(): |
| 477 r = resp.response | 339 r = resp.response |
| 478 raise ChromeProxyMetricException, ( | 340 raise ChromeProxyMetricException, ( |
| 479 'Response for %s should not have via header. ' | 341 'Response for %s should not have via header.\n' |
| 480 'Reponse: status=(%d, %s)\nHeaders:\n %s' % ( | 342 'Reponse: status=(%d, %s)\nHeaders:\n %s' % ( |
| 481 r.url, r.status, r.status_text, r.headers)) | 343 r.url, r.status, r.status_text, r.headers)) |
| 482 else: | 344 else: |
| 483 bypass_count += 1 | 345 bypass_count += 1 |
| 484 | 346 |
| 485 results.AddValue(scalar.ScalarValue( | 347 results.AddValue(scalar.ScalarValue( |
| 348 results.current_page, 'via_fallback', 'count', via_fallback_count)) | |
| 349 results.AddValue(scalar.ScalarValue( | |
| 486 results.current_page, 'bypass', 'count', bypass_count)) | 350 results.current_page, 'bypass', 'count', bypass_count)) |
| 487 | 351 |
| 488 def AddResultsForExplicitBypass(self, tab, results, expected_bad_proxies): | 352 def AddResultsForReenableAfterBypass( |
| 489 """Verify results for an explicit bypass test. | 353 self, tab, results, bypass_seconds_low, bypass_seconds_high): |
| 354 """Verify results for a re-enable after bypass test. | |
| 490 | 355 |
| 491 Args: | 356 Args: |
| 492 tab: the tab for the test. | 357 tab: the tab for the test. |
| 493 results: the results object to add the results values to. | 358 results: the results object to add the results values to. |
| 494 expected_bad_proxies: A list of dictionary objects representing | 359 bypass_seconds_low: the minimum duration of the bypass. |
| 495 expected bad proxies and their expected retry time windows. | 360 bypass_seconds_high: the maximum duration of the bypass. |
| 496 See the definition of VerifyBadProxies for details. | |
| 497 """ | 361 """ |
| 498 info = GetProxyInfoFromNetworkInternals(tab) | 362 bypass_count = 0 |
| 499 if not 'enabled' in info or not info['enabled']: | 363 via_count = 0 |
| 500 raise ChromeProxyMetricException, ( | 364 |
| 501 'Chrome proxy should be enabled. proxy info: %s' % info) | 365 for resp in self.IterResponses(tab): |
| 502 # TODO(sclittle): Remove this dependency on net-internals#proxy once an | 366 if resp.HasChromeProxyViaHeader(): |
| 503 # alternative method of verifying that Chrome is on the fallback proxy | 367 r = resp.response |
| 504 # exists. | 368 raise ChromeProxyMetricException, ( |
| 505 self.VerifyBadProxies(info['badProxies'], | 369 'Response for %s should not have via header.\n' |
| 506 expected_bad_proxies) | 370 'Reponse: status=(%d, %s)\nHeaders:\n %s' % ( |
| 371 r.url, r.status, r.status_text, r.headers)) | |
| 372 else: | |
| 373 bypass_count += 1 | |
| 374 | |
| 375 # Wait until 30 seconds before the bypass should expire, and fetch a page. | |
| 376 # It should not have the via header because the proxy should still be | |
| 377 # bypassed. | |
| 378 time.sleep(bypass_seconds_low - 30) | |
| 379 | |
| 380 tab.ClearCache(force=True) | |
| 381 before_metrics = ChromeProxyMetric() | |
| 382 before_metrics.Start(results.current_page, tab) | |
| 383 tab.Navigate('http://chromeproxy-test.appspot.com/default') | |
| 384 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10) | |
| 385 before_metrics.Stop(results.current_page, tab) | |
| 386 | |
| 387 for resp in before_metrics.IterResponses(tab): | |
| 388 if resp.HasChromeProxyViaHeader(): | |
| 389 r = resp.response | |
| 390 raise ChromeProxyMetricException, ( | |
| 391 'Response for %s should not have via header; proxy should still ' | |
| 392 'be bypassed.\nReponse: status=(%d, %s)\nHeaders:\n %s' % ( | |
| 393 r.url, r.status, r.status_text, r.headers)) | |
| 394 else: | |
| 395 bypass_count += 1 | |
| 396 | |
| 397 # Wait until 30 seconds after the bypass should expire, and fetch a page. It | |
|
bengr
2015/01/15 00:06:09
Is there no way to get around such delays?
sclittle
2015/01/15 00:44:35
See my comment in chrome_proxy_measurements.py.
| |
| 398 # should have the via header since the proxy should no longer be bypassed. | |
| 399 time.sleep((bypass_seconds_high + 30) - (bypass_seconds_low - 30)) | |
| 400 | |
| 401 tab.ClearCache(force=True) | |
| 402 after_metrics = ChromeProxyMetric() | |
| 403 after_metrics.Start(results.current_page, tab) | |
| 404 tab.Navigate('http://chromeproxy-test.appspot.com/default') | |
| 405 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10) | |
| 406 after_metrics.Stop(results.current_page, tab) | |
| 407 | |
| 408 for resp in after_metrics.IterResponses(tab): | |
| 409 if not resp.HasChromeProxyViaHeader(): | |
| 410 r = resp.response | |
| 411 raise ChromeProxyMetricException, ( | |
| 412 'Response for %s should have via header; proxy should no longer ' | |
| 413 'be bypassed.\nReponse: status=(%d, %s)\nHeaders:\n %s' % ( | |
| 414 r.url, r.status, r.status_text, r.headers)) | |
| 415 else: | |
| 416 via_count += 1 | |
| 417 | |
| 507 results.AddValue(scalar.ScalarValue( | 418 results.AddValue(scalar.ScalarValue( |
| 508 results.current_page, 'explicit_bypass', 'boolean', True)) | 419 results.current_page, 'bypass', 'count', bypass_count)) |
| 420 results.AddValue(scalar.ScalarValue( | |
| 421 results.current_page, 'via', 'count', via_count)) | |
| OLD | NEW |