OLD | NEW |
1 # Copyright 2014 The Chromium Authors. All rights reserved. | 1 # Copyright 2014 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import datetime | 5 import datetime |
| 6 import logging |
6 import os | 7 import os |
7 | 8 |
8 from telemetry.page import page_measurement | 9 from telemetry.page import page_measurement |
9 from metrics import network | 10 from metrics import network |
10 | 11 |
11 | 12 |
12 class ChromeProxyMetricException(page_measurement.MeasurementFailure): | 13 class ChromeProxyMetricException(page_measurement.MeasurementFailure): |
13 pass | 14 pass |
14 | 15 |
15 | 16 |
(...skipping 28 matching lines...) Expand all Loading... |
44 class ChromeProxyResponse(network.HTTPResponse): | 45 class ChromeProxyResponse(network.HTTPResponse): |
45 """ Represents an HTTP response from a timeleine event.""" | 46 """ Represents an HTTP response from a timeleine event.""" |
46 def __init__(self, event): | 47 def __init__(self, event): |
47 super(ChromeProxyResponse, self).__init__(event) | 48 super(ChromeProxyResponse, self).__init__(event) |
48 | 49 |
49 def ShouldHaveChromeProxyViaHeader(self): | 50 def ShouldHaveChromeProxyViaHeader(self): |
50 resp = self.response | 51 resp = self.response |
51 # Ignore https and data url | 52 # Ignore https and data url |
52 if resp.url.startswith('https') or resp.url.startswith('data:'): | 53 if resp.url.startswith('https') or resp.url.startswith('data:'): |
53 return False | 54 return False |
54 # Ignore 304 Not Modified. | 55 # Ignore 304 Not Modified and cache hit. |
55 if resp.status == 304: | 56 if resp.status == 304 or resp.served_from_cache: |
| 57 return False |
| 58 # Ignore invalid responses that don't have any header. Log a warning. |
| 59 if not resp.headers: |
| 60 logging.warning('response for %s does not any have header ' |
| 61 '(refer=%s, status=%s)', |
| 62 resp.url, resp.GetHeader('Referer'), resp.status) |
56 return False | 63 return False |
57 return True | 64 return True |
58 | 65 |
59 def HasChromeProxyViaHeader(self): | 66 def HasChromeProxyViaHeader(self): |
60 via_header = self.response.GetHeader('Via') | 67 via_header = self.response.GetHeader('Via') |
61 if not via_header: | 68 if not via_header: |
62 return False | 69 return False |
63 vias = [v.strip(' ') for v in via_header.split(',')] | 70 vias = [v.strip(' ') for v in via_header.split(',')] |
64 # The Via header is valid if it is the old format or the new format | 71 # The Via header is valid if it is the old format or the new format |
65 # with 4-character version prefix, for example, | 72 # with 4-character version prefix, for example, |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
115 resources_via_proxy += 1 | 122 resources_via_proxy += 1 |
116 else: | 123 else: |
117 resources_direct += 1 | 124 resources_direct += 1 |
118 | 125 |
119 results.Add('resources_via_proxy', 'count', resources_via_proxy) | 126 results.Add('resources_via_proxy', 'count', resources_via_proxy) |
120 results.Add('resources_from_cache', 'count', resources_from_cache) | 127 results.Add('resources_from_cache', 'count', resources_from_cache) |
121 results.Add('resources_direct', 'count', resources_direct) | 128 results.Add('resources_direct', 'count', resources_direct) |
122 | 129 |
123 def AddResultsForHeaderValidation(self, tab, results): | 130 def AddResultsForHeaderValidation(self, tab, results): |
124 via_count = 0 | 131 via_count = 0 |
| 132 bypass_count = 0 |
125 for resp in self.IterResponses(tab): | 133 for resp in self.IterResponses(tab): |
126 if resp.IsValidByViaHeader(): | 134 if resp.IsValidByViaHeader(): |
127 via_count += 1 | 135 via_count += 1 |
| 136 elif tab and self.IsProxyBypassed(tab): |
| 137 logging.warning('Proxy bypassed for %s', resp.response.url) |
| 138 bypass_count += 1 |
128 else: | 139 else: |
129 r = resp.response | 140 r = resp.response |
130 raise ChromeProxyMetricException, ( | 141 raise ChromeProxyMetricException, ( |
131 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % ( | 142 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % ( |
132 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) | 143 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) |
133 results.Add('checked_via_header', 'count', via_count) | 144 results.Add('checked_via_header', 'count', via_count) |
| 145 results.Add('request_bypassed', 'count', bypass_count) |
| 146 |
| 147 def IsProxyBypassed(self, tab): |
| 148 """ Returns True if all configured proxies are bypassed.""" |
| 149 info = GetProxyInfoFromNetworkInternals(tab) |
| 150 if not info['enabled']: |
| 151 raise ChromeProxyMetricException, ( |
| 152 'Chrome proxy should be enabled. proxy info: %s' % info) |
| 153 |
| 154 bad_proxies = [str(p['proxy']) for p in info['badProxies']].sort() |
| 155 proxies = [self.effective_proxies['proxy'], |
| 156 self.effective_proxies['fallback']].sort() |
| 157 return bad_proxies == proxies |
134 | 158 |
135 @staticmethod | 159 @staticmethod |
136 def VerifyBadProxies( | 160 def VerifyBadProxies( |
137 badProxies, expected_proxies, | 161 badProxies, expected_proxies, |
138 retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS, | 162 retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS, |
139 retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS): | 163 retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS): |
140 """Verify the bad proxy list and their retry times are expected. """ | 164 """Verify the bad proxy list and their retry times are expected. """ |
141 if not badProxies or (len(badProxies) != len(expected_proxies)): | 165 if not badProxies or (len(badProxies) != len(expected_proxies)): |
142 return False | 166 return False |
143 | 167 |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
228 | 252 |
229 bad_proxies = [] | 253 bad_proxies = [] |
230 if 'badProxies' in info and info['badProxies']: | 254 if 'badProxies' in info and info['badProxies']: |
231 bad_proxies = [p['proxy'] for p in info['badProxies'] | 255 bad_proxies = [p['proxy'] for p in info['badProxies'] |
232 if 'proxy' in p and p['proxy']] | 256 if 'proxy' in p and p['proxy']] |
233 if bad_proxies != expected_bad_proxies: | 257 if bad_proxies != expected_bad_proxies: |
234 raise ChromeProxyMetricException, ( | 258 raise ChromeProxyMetricException, ( |
235 'Wrong bad proxies (%s). Expect: "%s"' % ( | 259 'Wrong bad proxies (%s). Expect: "%s"' % ( |
236 str(bad_proxies), str(expected_bad_proxies))) | 260 str(bad_proxies), str(expected_bad_proxies))) |
237 results.Add('http_fallback', 'boolean', True) | 261 results.Add('http_fallback', 'boolean', True) |
OLD | NEW |