Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(4)

Side by Side Diff: tools/perf/metrics/chrome_proxy.py

Issue 397483002: Move chrome_proxy tests from under tools/perf to tools/chrome_proxy/integration_tests. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: addressed comments and sync'ed Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « tools/perf/metrics/chrome_proxy.js ('k') | tools/perf/metrics/chrome_proxy_unittest.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5 import datetime
6 import logging
7 import os
8
9 from telemetry.page import page_measurement
10 from metrics import network
11 from telemetry.value import scalar
12
13
14 class ChromeProxyMetricException(page_measurement.MeasurementFailure):
15 pass
16
17
18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy'
20
21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443'
22 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS
23 PROXY_SETTING_HTTP = 'compress.googlezip.net:80'
24 PROXY_SETTING_DIRECT = 'direct://'
25
26 # The default Chrome Proxy bypass time is a range from one to five mintues.
27 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc.
28 DEFAULT_BYPASS_MIN_SECONDS = 60
29 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60
30
31 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'):
32 tab.Navigate(url)
33 with open(os.path.join(os.path.dirname(__file__), 'chrome_proxy.js')) as f:
34 js = f.read()
35 tab.ExecuteJavaScript(js)
36 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
37 info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()')
38 return info
39
40
41 def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30):
42 return (retry_time >= low and
43 (retry_time < high + datetime.timedelta(seconds=grace_seconds)))
44
45
46 class ChromeProxyResponse(network.HTTPResponse):
47 """ Represents an HTTP response from a timeleine event."""
48 def __init__(self, event):
49 super(ChromeProxyResponse, self).__init__(event)
50
51 def ShouldHaveChromeProxyViaHeader(self):
52 resp = self.response
53 # Ignore https and data url
54 if resp.url.startswith('https') or resp.url.startswith('data:'):
55 return False
56 # Ignore 304 Not Modified and cache hit.
57 if resp.status == 304 or resp.served_from_cache:
58 return False
59 # Ignore invalid responses that don't have any header. Log a warning.
60 if not resp.headers:
61 logging.warning('response for %s does not any have header '
62 '(refer=%s, status=%s)',
63 resp.url, resp.GetHeader('Referer'), resp.status)
64 return False
65 return True
66
67 def HasChromeProxyViaHeader(self):
68 via_header = self.response.GetHeader('Via')
69 if not via_header:
70 return False
71 vias = [v.strip(' ') for v in via_header.split(',')]
72 # The Via header is valid if it is the old format or the new format
73 # with 4-character version prefix, for example,
74 # "1.1 Chrome-Compression-Proxy".
75 return (CHROME_PROXY_VIA_HEADER_DEPRECATED in vias or
76 any(v[4:] == CHROME_PROXY_VIA_HEADER for v in vias))
77
78 def IsValidByViaHeader(self):
79 return (not self.ShouldHaveChromeProxyViaHeader() or
80 self.HasChromeProxyViaHeader())
81
82 def IsSafebrowsingResponse(self):
83 if (self.response.status == 307 and
84 self.response.GetHeader('X-Malware-Url') == '1' and
85 self.IsValidByViaHeader() and
86 self.response.GetHeader('Location') == self.response.url):
87 return True
88 return False
89
90
91 class ChromeProxyMetric(network.NetworkMetric):
92 """A Chrome proxy timeline metric."""
93
94 def __init__(self):
95 super(ChromeProxyMetric, self).__init__()
96 self.compute_data_saving = True
97 self.effective_proxies = {
98 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME,
99 "fallback": PROXY_SETTING_HTTP,
100 "direct": PROXY_SETTING_DIRECT,
101 }
102
103 def SetEvents(self, events):
104 """Used for unittest."""
105 self._events = events
106
107 def ResponseFromEvent(self, event):
108 return ChromeProxyResponse(event)
109
110 def AddResults(self, tab, results):
111 raise NotImplementedError
112
113 def AddResultsForDataSaving(self, tab, results):
114 resources_via_proxy = 0
115 resources_from_cache = 0
116 resources_direct = 0
117
118 super(ChromeProxyMetric, self).AddResults(tab, results)
119 for resp in self.IterResponses(tab):
120 if resp.response.served_from_cache:
121 resources_from_cache += 1
122 if resp.HasChromeProxyViaHeader():
123 resources_via_proxy += 1
124 else:
125 resources_direct += 1
126
127 results.AddValue(scalar.ScalarValue(
128 results.current_page, 'resources_via_proxy', 'count',
129 resources_via_proxy))
130 results.AddValue(scalar.ScalarValue(
131 results.current_page, 'resources_from_cache', 'count',
132 resources_from_cache))
133 results.AddValue(scalar.ScalarValue(
134 results.current_page, 'resources_direct', 'count', resources_direct))
135
136 def AddResultsForHeaderValidation(self, tab, results):
137 via_count = 0
138 bypass_count = 0
139 for resp in self.IterResponses(tab):
140 if resp.IsValidByViaHeader():
141 via_count += 1
142 elif tab and self.IsProxyBypassed(tab):
143 logging.warning('Proxy bypassed for %s', resp.response.url)
144 bypass_count += 1
145 else:
146 r = resp.response
147 raise ChromeProxyMetricException, (
148 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
149 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
150 results.AddValue(scalar.ScalarValue(
151 results.current_page, 'checked_via_header', 'count', via_count))
152 results.AddValue(scalar.ScalarValue(
153 results.current_page, 'request_bypassed', 'count', bypass_count))
154
155 def IsProxyBypassed(self, tab):
156 """ Returns True if all configured proxies are bypassed."""
157 info = GetProxyInfoFromNetworkInternals(tab)
158 if not info['enabled']:
159 raise ChromeProxyMetricException, (
160 'Chrome proxy should be enabled. proxy info: %s' % info)
161
162 bad_proxies = [str(p['proxy']) for p in info['badProxies']].sort()
163 proxies = [self.effective_proxies['proxy'],
164 self.effective_proxies['fallback']].sort()
165 return bad_proxies == proxies
166
167 @staticmethod
168 def VerifyBadProxies(
169 badProxies, expected_proxies,
170 retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS,
171 retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS):
172 """Verify the bad proxy list and their retry times are expected. """
173 if not badProxies or (len(badProxies) != len(expected_proxies)):
174 return False
175
176 # Check all expected proxies.
177 proxies = [p['proxy'] for p in badProxies]
178 expected_proxies.sort()
179 proxies.sort()
180 if not expected_proxies == proxies:
181 raise ChromeProxyMetricException, (
182 'Bad proxies: got %s want %s' % (
183 str(badProxies), str(expected_proxies)))
184
185 # Check retry time
186 for p in badProxies:
187 retry_time_low = (datetime.datetime.now() +
188 datetime.timedelta(seconds=retry_seconds_low))
189 retry_time_high = (datetime.datetime.now() +
190 datetime.timedelta(seconds=retry_seconds_high))
191 got_retry_time = datetime.datetime.fromtimestamp(int(p['retry'])/1000)
192 if not ProxyRetryTimeInRange(
193 got_retry_time, retry_time_low, retry_time_high):
194 raise ChromeProxyMetricException, (
195 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % (
196 p['proxy'], str(got_retry_time), str(retry_time_low),
197 str(retry_time_high)))
198 return True
199
200 def AddResultsForBypass(self, tab, results):
201 bypass_count = 0
202 for resp in self.IterResponses(tab):
203 if resp.HasChromeProxyViaHeader():
204 r = resp.response
205 raise ChromeProxyMetricException, (
206 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
207 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
208 bypass_count += 1
209
210 if tab:
211 info = GetProxyInfoFromNetworkInternals(tab)
212 if not info['enabled']:
213 raise ChromeProxyMetricException, (
214 'Chrome proxy should be enabled. proxy info: %s' % info)
215 self.VerifyBadProxies(
216 info['badProxies'],
217 [self.effective_proxies['proxy'],
218 self.effective_proxies['fallback']])
219
220 results.AddValue(scalar.ScalarValue(
221 results.current_page, 'bypass', 'count', bypass_count))
222
223 def AddResultsForSafebrowsing(self, tab, results):
224 count = 0
225 safebrowsing_count = 0
226 for resp in self.IterResponses(tab):
227 count += 1
228 if resp.IsSafebrowsingResponse():
229 safebrowsing_count += 1
230 else:
231 r = resp.response
232 raise ChromeProxyMetricException, (
233 '%s: Not a valid safe browsing response.\n'
234 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
235 r.url, r.status, r.status_text, r.headers))
236 if count == safebrowsing_count:
237 results.AddValue(scalar.ScalarValue(
238 results.current_page, 'safebrowsing', 'boolean', True))
239 else:
240 raise ChromeProxyMetricException, (
241 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
242 count, safebrowsing_count))
243
244 def AddResultsForHTTPFallback(
245 self, tab, results, expected_proxies=None, expected_bad_proxies=None):
246 info = GetProxyInfoFromNetworkInternals(tab)
247 if not 'enabled' in info or not info['enabled']:
248 raise ChromeProxyMetricException, (
249 'Chrome proxy should be enabled. proxy info: %s' % info)
250
251 if not expected_proxies:
252 expected_proxies = [self.effective_proxies['fallback'],
253 self.effective_proxies['direct']]
254 if not expected_bad_proxies:
255 expected_bad_proxies = []
256
257 proxies = info['proxies']
258 if proxies != expected_proxies:
259 raise ChromeProxyMetricException, (
260 'Wrong effective proxies (%s). Expect: "%s"' % (
261 str(proxies), str(expected_proxies)))
262
263 bad_proxies = []
264 if 'badProxies' in info and info['badProxies']:
265 bad_proxies = [p['proxy'] for p in info['badProxies']
266 if 'proxy' in p and p['proxy']]
267 if bad_proxies != expected_bad_proxies:
268 raise ChromeProxyMetricException, (
269 'Wrong bad proxies (%s). Expect: "%s"' % (
270 str(bad_proxies), str(expected_bad_proxies)))
271 results.AddValue(scalar.ScalarValue(
272 results.current_page, 'http_fallback', 'boolean', True))
OLDNEW
« no previous file with comments | « tools/perf/metrics/chrome_proxy.js ('k') | tools/perf/metrics/chrome_proxy_unittest.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698