| OLD | NEW |
| 1 # Copyright 2014 The Chromium Authors. All rights reserved. | 1 # Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import datetime | 5 import datetime |
| 6 import logging | 6 import logging |
| 7 import os | 7 import os |
| 8 | 8 |
| 9 from integration_tests import network_metrics |
| 9 from telemetry.page import page_measurement | 10 from telemetry.page import page_measurement |
| 10 from metrics import network | |
| 11 from telemetry.value import scalar | 11 from telemetry.value import scalar |
| 12 | 12 |
| 13 | 13 |
| 14 class ChromeProxyMetricException(page_measurement.MeasurementFailure): | 14 class ChromeProxyMetricException(page_measurement.MeasurementFailure): |
| 15 pass | 15 pass |
| 16 | 16 |
| 17 | 17 |
| 18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy' | 18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy' |
| 19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy' | 19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy' |
| 20 | 20 |
| 21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443' | 21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443' |
| 22 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS | 22 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS |
| 23 PROXY_SETTING_HTTP = 'compress.googlezip.net:80' | 23 PROXY_SETTING_HTTP = 'compress.googlezip.net:80' |
| 24 PROXY_SETTING_DIRECT = 'direct://' | 24 PROXY_SETTING_DIRECT = 'direct://' |
| 25 | 25 |
| 26 # The default Chrome Proxy bypass time is a range from one to five mintues. | 26 # The default Chrome Proxy bypass time is a range from one to five mintues. |
| 27 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc. | 27 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc. |
| 28 DEFAULT_BYPASS_MIN_SECONDS = 60 | 28 DEFAULT_BYPASS_MIN_SECONDS = 60 |
| 29 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60 | 29 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60 |
| 30 | 30 |
| 31 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'): | 31 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'): |
| 32 tab.Navigate(url) | 32 tab.Navigate(url) |
| 33 with open(os.path.join(os.path.dirname(__file__), 'chrome_proxy.js')) as f: | 33 with open(os.path.join(os.path.dirname(__file__), |
| 34 'chrome_proxy_metrics.js')) as f: |
| 34 js = f.read() | 35 js = f.read() |
| 35 tab.ExecuteJavaScript(js) | 36 tab.ExecuteJavaScript(js) |
| 36 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300) | 37 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300) |
| 37 info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()') | 38 info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()') |
| 38 return info | 39 return info |
| 39 | 40 |
| 40 | 41 |
| 41 def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30): | 42 def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30): |
| 42 return (retry_time >= low and | 43 return (retry_time >= low and |
| 43 (retry_time < high + datetime.timedelta(seconds=grace_seconds))) | 44 (retry_time < high + datetime.timedelta(seconds=grace_seconds))) |
| 44 | 45 |
| 45 | 46 |
| 46 class ChromeProxyResponse(network.HTTPResponse): | 47 class ChromeProxyResponse(network_metrics.HTTPResponse): |
| 47 """ Represents an HTTP response from a timeleine event.""" | 48 """ Represents an HTTP response from a timeleine event.""" |
| 48 def __init__(self, event): | 49 def __init__(self, event): |
| 49 super(ChromeProxyResponse, self).__init__(event) | 50 super(ChromeProxyResponse, self).__init__(event) |
| 50 | 51 |
| 51 def ShouldHaveChromeProxyViaHeader(self): | 52 def ShouldHaveChromeProxyViaHeader(self): |
| 52 resp = self.response | 53 resp = self.response |
| 53 # Ignore https and data url | 54 # Ignore https and data url |
| 54 if resp.url.startswith('https') or resp.url.startswith('data:'): | 55 if resp.url.startswith('https') or resp.url.startswith('data:'): |
| 55 return False | 56 return False |
| 56 # Ignore 304 Not Modified and cache hit. | 57 # Ignore 304 Not Modified and cache hit. |
| (...skipping 24 matching lines...) Expand all Loading... |
| 81 | 82 |
| 82 def IsSafebrowsingResponse(self): | 83 def IsSafebrowsingResponse(self): |
| 83 if (self.response.status == 307 and | 84 if (self.response.status == 307 and |
| 84 self.response.GetHeader('X-Malware-Url') == '1' and | 85 self.response.GetHeader('X-Malware-Url') == '1' and |
| 85 self.IsValidByViaHeader() and | 86 self.IsValidByViaHeader() and |
| 86 self.response.GetHeader('Location') == self.response.url): | 87 self.response.GetHeader('Location') == self.response.url): |
| 87 return True | 88 return True |
| 88 return False | 89 return False |
| 89 | 90 |
| 90 | 91 |
| 91 class ChromeProxyMetric(network.NetworkMetric): | 92 class ChromeProxyMetric(network_metrics.NetworkMetric): |
| 92 """A Chrome proxy timeline metric.""" | 93 """A Chrome proxy timeline metric.""" |
| 93 | 94 |
| 94 def __init__(self): | 95 def __init__(self): |
| 95 super(ChromeProxyMetric, self).__init__() | 96 super(ChromeProxyMetric, self).__init__() |
| 96 self.compute_data_saving = True | 97 self.compute_data_saving = True |
| 97 self.effective_proxies = { | 98 self.effective_proxies = { |
| 98 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME, | 99 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME, |
| 99 "fallback": PROXY_SETTING_HTTP, | 100 "fallback": PROXY_SETTING_HTTP, |
| 100 "direct": PROXY_SETTING_DIRECT, | 101 "direct": PROXY_SETTING_DIRECT, |
| 101 } | 102 } |
| (...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 263 bad_proxies = [] | 264 bad_proxies = [] |
| 264 if 'badProxies' in info and info['badProxies']: | 265 if 'badProxies' in info and info['badProxies']: |
| 265 bad_proxies = [p['proxy'] for p in info['badProxies'] | 266 bad_proxies = [p['proxy'] for p in info['badProxies'] |
| 266 if 'proxy' in p and p['proxy']] | 267 if 'proxy' in p and p['proxy']] |
| 267 if bad_proxies != expected_bad_proxies: | 268 if bad_proxies != expected_bad_proxies: |
| 268 raise ChromeProxyMetricException, ( | 269 raise ChromeProxyMetricException, ( |
| 269 'Wrong bad proxies (%s). Expect: "%s"' % ( | 270 'Wrong bad proxies (%s). Expect: "%s"' % ( |
| 270 str(bad_proxies), str(expected_bad_proxies))) | 271 str(bad_proxies), str(expected_bad_proxies))) |
| 271 results.AddValue(scalar.ScalarValue( | 272 results.AddValue(scalar.ScalarValue( |
| 272 results.current_page, 'http_fallback', 'boolean', True)) | 273 results.current_page, 'http_fallback', 'boolean', True)) |
| OLD | NEW |