Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(34)

Side by Side Diff: tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py

Issue 655703002: Added integration tests for falling back from the HTTP proxy to direct. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Addressed comments Created 6 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2014 The Chromium Authors. All rights reserved. 1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import datetime 5 import datetime
6 import logging 6 import logging
7 import os 7 import os
8 8
9 from integration_tests import network_metrics 9 from integration_tests import network_metrics
10 from telemetry.page import page_test 10 from telemetry.page import page_test
(...skipping 23 matching lines...) Expand all
34 with open(os.path.join(os.path.dirname(__file__), 34 with open(os.path.join(os.path.dirname(__file__),
35 'chrome_proxy_metrics.js')) as f: 35 'chrome_proxy_metrics.js')) as f:
36 js = f.read() 36 js = f.read()
37 tab.ExecuteJavaScript(js) 37 tab.ExecuteJavaScript(js)
38 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300) 38 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
39 info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()') 39 info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()')
40 return info 40 return info
41 41
42 42
43 def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30): 43 def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30):
44 return (retry_time >= low and 44 return (retry_time >= low - datetime.timedelta(seconds=grace_seconds) and
45 (retry_time < high + datetime.timedelta(seconds=grace_seconds))) 45 (retry_time < high + datetime.timedelta(seconds=grace_seconds)))
46 46
47 47
48 class ChromeProxyResponse(network_metrics.HTTPResponse): 48 class ChromeProxyResponse(network_metrics.HTTPResponse):
49 """ Represents an HTTP response from a timeleine event.""" 49 """ Represents an HTTP response from a timeleine event."""
50 def __init__(self, event): 50 def __init__(self, event):
51 super(ChromeProxyResponse, self).__init__(event) 51 super(ChromeProxyResponse, self).__init__(event)
52 52
53 def ShouldHaveChromeProxyViaHeader(self): 53 def ShouldHaveChromeProxyViaHeader(self):
54 resp = self.response 54 resp = self.response
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
164 raise ChromeProxyMetricException, ('%s: Response is not 200: %d' % 164 raise ChromeProxyMetricException, ('%s: Response is not 200: %d' %
165 (r.url, r.status)) 165 (r.url, r.status))
166 if not resp.IsValidByViaHeader(): 166 if not resp.IsValidByViaHeader():
167 raise ChromeProxyMetricException, ('%s: Response missing via header' % 167 raise ChromeProxyMetricException, ('%s: Response missing via header' %
168 (r.url)) 168 (r.url))
169 results.AddValue(scalar.ScalarValue( 169 results.AddValue(scalar.ScalarValue(
170 results.current_page, 'version_test', 'count', 1)) 170 results.current_page, 'version_test', 'count', 1))
171 171
172 172
173 def IsProxyBypassed(self, tab): 173 def IsProxyBypassed(self, tab):
174 """ Returns True if all configured proxies are bypassed.""" 174 """Get whether all configured proxies are bypassed.
175
176 Returns:
177 A tuple of the form (boolean, string list). If all configured proxies
178 are bypassed, then the return value will be (True, bypassed proxies).
179 Otherwise, the return value will be (False, empty list).
180 """
175 if not tab: 181 if not tab:
176 return False, [] 182 return False, []
177 183
178 info = GetProxyInfoFromNetworkInternals(tab) 184 info = GetProxyInfoFromNetworkInternals(tab)
179 if not info['enabled']: 185 if not info['enabled']:
180 raise ChromeProxyMetricException, ( 186 raise ChromeProxyMetricException, (
181 'Chrome proxy should be enabled. proxy info: %s' % info) 187 'Chrome proxy should be enabled. proxy info: %s' % info)
182 188
183 bad_proxies = [str(p['proxy']) for p in info['badProxies']] 189 bad_proxies = [str(p['proxy']) for p in info['badProxies']]
184 bad_proxies.sort() 190 bad_proxies.sort()
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
220 datetime.timedelta(seconds=retry_seconds_high)) 226 datetime.timedelta(seconds=retry_seconds_high))
221 got_retry_time = datetime.datetime.fromtimestamp(int(p['retry'])/1000) 227 got_retry_time = datetime.datetime.fromtimestamp(int(p['retry'])/1000)
222 if not ProxyRetryTimeInRange( 228 if not ProxyRetryTimeInRange(
223 got_retry_time, retry_time_low, retry_time_high): 229 got_retry_time, retry_time_low, retry_time_high):
224 raise ChromeProxyMetricException, ( 230 raise ChromeProxyMetricException, (
225 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % ( 231 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % (
226 p['proxy'], str(got_retry_time), str(retry_time_low), 232 p['proxy'], str(got_retry_time), str(retry_time_low),
227 str(retry_time_high))) 233 str(retry_time_high)))
228 return True 234 return True
229 235
236 def VerifyAllProxiesBypassed(self, tab):
237 if tab:
238 info = GetProxyInfoFromNetworkInternals(tab)
239 if not info['enabled']:
240 raise ChromeProxyMetricException, (
241 'Chrome proxy should be enabled. proxy info: %s' % info)
242 is_bypassed, expected_bad_proxies = self.IsProxyBypassed(tab)
243 if not is_bypassed:
244 raise ChromeProxyMetricException, (
245 'Chrome proxy should be bypassed. proxy info: %s' % info)
246 self.VerifyBadProxies(info['badProxies'], expected_bad_proxies)
247
230 def AddResultsForBypass(self, tab, results): 248 def AddResultsForBypass(self, tab, results):
231 bypass_count = 0 249 bypass_count = 0
232 for resp in self.IterResponses(tab): 250 for resp in self.IterResponses(tab):
233 if resp.HasChromeProxyViaHeader(): 251 if resp.HasChromeProxyViaHeader():
234 r = resp.response 252 r = resp.response
235 raise ChromeProxyMetricException, ( 253 raise ChromeProxyMetricException, (
236 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % ( 254 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
237 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) 255 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
238 bypass_count += 1 256 bypass_count += 1
239 257
240 if tab: 258 self.VerifyAllProxiesBypassed(tab)
241 info = GetProxyInfoFromNetworkInternals(tab)
242 if not info['enabled']:
243 raise ChromeProxyMetricException, (
244 'Chrome proxy should be enabled. proxy info: %s' % info)
245 _, expected_bad_proxies = self.IsProxyBypassed(tab)
246 self.VerifyBadProxies(info['badProxies'], expected_bad_proxies)
247
248 results.AddValue(scalar.ScalarValue( 259 results.AddValue(scalar.ScalarValue(
249 results.current_page, 'bypass', 'count', bypass_count)) 260 results.current_page, 'bypass', 'count', bypass_count))
250 261
251 def AddResultsForCorsBypass(self, tab, results): 262 def AddResultsForCorsBypass(self, tab, results):
252 eligible_response_count = 0 263 eligible_response_count = 0
253 bypass_count = 0 264 bypass_count = 0
254 bypasses = {} 265 bypasses = {}
255 for resp in self.IterResponses(tab): 266 for resp in self.IterResponses(tab):
256 logging.warn('got a resource %s' % (resp.response.url)) 267 logging.warn('got a resource %s' % (resp.response.url))
257 268
(...skipping 17 matching lines...) Expand all
275 raise ChromeProxyMetricException, ( 286 raise ChromeProxyMetricException, (
276 '%s: Got a 502 without a subsequent 200' % (url)) 287 '%s: Got a 502 without a subsequent 200' % (url))
277 elif bypasses[url] > 1: 288 elif bypasses[url] > 1:
278 raise ChromeProxyMetricException, ( 289 raise ChromeProxyMetricException, (
279 '%s: Got a 502 and multiple 200s: %d' % (url, bypasses[url])) 290 '%s: Got a 502 and multiple 200s: %d' % (url, bypasses[url]))
280 if bypass_count == 0: 291 if bypass_count == 0:
281 raise ChromeProxyMetricException, ( 292 raise ChromeProxyMetricException, (
282 'At least one response should be bypassed. ' 293 'At least one response should be bypassed. '
283 '(eligible_response_count=%d, bypass_count=%d)\n' % ( 294 '(eligible_response_count=%d, bypass_count=%d)\n' % (
284 eligible_response_count, bypass_count)) 295 eligible_response_count, bypass_count))
285 if tab:
286 info = GetProxyInfoFromNetworkInternals(tab)
287 if not info['enabled']:
288 raise ChromeProxyMetricException, (
289 'Chrome proxy should be enabled. proxy info: %s' % info)
290 _, expected_bad_proxies = self.IsProxyBypassed(tab)
291 self.VerifyBadProxies(info['badProxies'], expected_bad_proxies)
292 296
297 self.VerifyAllProxiesBypassed(tab)
293 results.AddValue(scalar.ScalarValue( 298 results.AddValue(scalar.ScalarValue(
294 results.current_page, 'cors_bypass', 'count', bypass_count)) 299 results.current_page, 'cors_bypass', 'count', bypass_count))
295 300
296 def AddResultsForBlockOnce(self, tab, results): 301 def AddResultsForBlockOnce(self, tab, results):
297 eligible_response_count = 0 302 eligible_response_count = 0
298 bypass_count = 0 303 bypass_count = 0
299 for resp in self.IterResponses(tab): 304 for resp in self.IterResponses(tab):
300 if resp.ShouldHaveChromeProxyViaHeader(): 305 if resp.ShouldHaveChromeProxyViaHeader():
301 eligible_response_count += 1 306 eligible_response_count += 1
302 if not resp.HasChromeProxyViaHeader(): 307 if not resp.HasChromeProxyViaHeader():
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
340 'Reponse: status=(%d, %s)\nHeaders:\n %s' % ( 345 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
341 r.url, r.status, r.status_text, r.headers)) 346 r.url, r.status, r.status_text, r.headers))
342 if count == safebrowsing_count: 347 if count == safebrowsing_count:
343 results.AddValue(scalar.ScalarValue( 348 results.AddValue(scalar.ScalarValue(
344 results.current_page, 'safebrowsing', 'boolean', True)) 349 results.current_page, 'safebrowsing', 'boolean', True))
345 else: 350 else:
346 raise ChromeProxyMetricException, ( 351 raise ChromeProxyMetricException, (
347 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % ( 352 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
348 count, safebrowsing_count)) 353 count, safebrowsing_count))
349 354
350 def AddResultsForHTTPFallback( 355 def VerifyProxyInfo(self, tab, expected_proxies, expected_bad_proxies):
351 self, tab, results, expected_proxies=None, expected_bad_proxies=None):
352 info = GetProxyInfoFromNetworkInternals(tab) 356 info = GetProxyInfoFromNetworkInternals(tab)
353 if not 'enabled' in info or not info['enabled']: 357 if not 'enabled' in info or not info['enabled']:
354 raise ChromeProxyMetricException, ( 358 raise ChromeProxyMetricException, (
355 'Chrome proxy should be enabled. proxy info: %s' % info) 359 'Chrome proxy should be enabled. proxy info: %s' % info)
356 360
357 if not expected_proxies:
358 expected_proxies = [self.effective_proxies['fallback'],
359 self.effective_proxies['direct']]
360 if not expected_bad_proxies:
361 expected_bad_proxies = []
362
363 proxies = info['proxies'] 361 proxies = info['proxies']
364 if proxies != expected_proxies: 362 if proxies != expected_proxies:
365 raise ChromeProxyMetricException, ( 363 raise ChromeProxyMetricException, (
366 'Wrong effective proxies (%s). Expect: "%s"' % ( 364 'Wrong effective proxies (%s). Expect: "%s"' % (
367 str(proxies), str(expected_proxies))) 365 str(proxies), str(expected_proxies)))
368 366
369 bad_proxies = [] 367 bad_proxies = []
370 if 'badProxies' in info and info['badProxies']: 368 if 'badProxies' in info and info['badProxies']:
371 bad_proxies = [p['proxy'] for p in info['badProxies'] 369 bad_proxies = [p['proxy'] for p in info['badProxies']
372 if 'proxy' in p and p['proxy']] 370 if 'proxy' in p and p['proxy']]
373 if bad_proxies != expected_bad_proxies: 371 if bad_proxies != expected_bad_proxies:
374 raise ChromeProxyMetricException, ( 372 raise ChromeProxyMetricException, (
375 'Wrong bad proxies (%s). Expect: "%s"' % ( 373 'Wrong bad proxies (%s). Expect: "%s"' % (
376 str(bad_proxies), str(expected_bad_proxies))) 374 str(bad_proxies), str(expected_bad_proxies)))
375
376 def AddResultsForHTTPFallback(
377 self, tab, results, expected_proxies=None, expected_bad_proxies=None):
378 if not expected_proxies:
379 expected_proxies = [self.effective_proxies['fallback'],
380 self.effective_proxies['direct']]
381 if not expected_bad_proxies:
382 expected_bad_proxies = []
383
384 self.VerifyProxyInfo(tab, expected_proxies, expected_bad_proxies)
377 results.AddValue(scalar.ScalarValue( 385 results.AddValue(scalar.ScalarValue(
378 results.current_page, 'http_fallback', 'boolean', True)) 386 results.current_page, 'http_fallback', 'boolean', True))
387
388 def AddResultsForHTTPToDirectFallback(self, tab, results):
389 self.VerifyAllProxiesBypassed(tab)
390 results.AddValue(scalar.ScalarValue(
391 results.current_page, 'direct_fallback', 'boolean', True))
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698