OLD | NEW |
---|---|
1 # Copyright 2014 The Chromium Authors. All rights reserved. | 1 # Copyright 2014 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import datetime | 5 import datetime |
6 import logging | 6 import logging |
7 import os | 7 import os |
8 | 8 |
9 from integration_tests import network_metrics | 9 from integration_tests import network_metrics |
10 from telemetry.page import page_test | 10 from telemetry.page import page_test |
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
164 raise ChromeProxyMetricException, ('%s: Response is not 200: %d' % | 164 raise ChromeProxyMetricException, ('%s: Response is not 200: %d' % |
165 (r.url, r.status)) | 165 (r.url, r.status)) |
166 if not resp.IsValidByViaHeader(): | 166 if not resp.IsValidByViaHeader(): |
167 raise ChromeProxyMetricException, ('%s: Response missing via header' % | 167 raise ChromeProxyMetricException, ('%s: Response missing via header' % |
168 (r.url)) | 168 (r.url)) |
169 results.AddValue(scalar.ScalarValue( | 169 results.AddValue(scalar.ScalarValue( |
170 results.current_page, 'version_test', 'count', 1)) | 170 results.current_page, 'version_test', 'count', 1)) |
171 | 171 |
172 | 172 |
173 def IsProxyBypassed(self, tab): | 173 def IsProxyBypassed(self, tab): |
174 """ Returns True if all configured proxies are bypassed.""" | 174 """ Returns True if all configured proxies are bypassed.""" |
bolian
2014/10/14 06:00:49
Can you update the doc to reflect the 2nd return v
sclittle
2014/10/14 17:36:45
Done.
| |
175 if not tab: | 175 if not tab: |
176 return False, [] | 176 return False, [] |
177 | 177 |
178 info = GetProxyInfoFromNetworkInternals(tab) | 178 info = GetProxyInfoFromNetworkInternals(tab) |
179 if not info['enabled']: | 179 if not info['enabled']: |
180 raise ChromeProxyMetricException, ( | 180 raise ChromeProxyMetricException, ( |
181 'Chrome proxy should be enabled. proxy info: %s' % info) | 181 'Chrome proxy should be enabled. proxy info: %s' % info) |
182 | 182 |
183 bad_proxies = [str(p['proxy']) for p in info['badProxies']] | 183 bad_proxies = [str(p['proxy']) for p in info['badProxies']] |
184 bad_proxies.sort() | 184 bad_proxies.sort() |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
220 datetime.timedelta(seconds=retry_seconds_high)) | 220 datetime.timedelta(seconds=retry_seconds_high)) |
221 got_retry_time = datetime.datetime.fromtimestamp(int(p['retry'])/1000) | 221 got_retry_time = datetime.datetime.fromtimestamp(int(p['retry'])/1000) |
222 if not ProxyRetryTimeInRange( | 222 if not ProxyRetryTimeInRange( |
223 got_retry_time, retry_time_low, retry_time_high): | 223 got_retry_time, retry_time_low, retry_time_high): |
224 raise ChromeProxyMetricException, ( | 224 raise ChromeProxyMetricException, ( |
225 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % ( | 225 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % ( |
226 p['proxy'], str(got_retry_time), str(retry_time_low), | 226 p['proxy'], str(got_retry_time), str(retry_time_low), |
227 str(retry_time_high))) | 227 str(retry_time_high))) |
228 return True | 228 return True |
229 | 229 |
230 def VerifyAllProxiesBypassed(self, tab): | |
231 if tab: | |
232 info = GetProxyInfoFromNetworkInternals(tab) | |
233 if not info['enabled']: | |
234 raise ChromeProxyMetricException, ( | |
235 'Chrome proxy should be enabled. proxy info: %s' % info) | |
236 _, expected_bad_proxies = self.IsProxyBypassed(tab) | |
bolian
2014/10/14 06:00:49
I think you want to verify the first return value
sclittle
2014/10/14 17:36:45
Done.
| |
237 self.VerifyBadProxies(info['badProxies'], expected_bad_proxies) | |
238 | |
230 def AddResultsForBypass(self, tab, results): | 239 def AddResultsForBypass(self, tab, results): |
231 bypass_count = 0 | 240 bypass_count = 0 |
232 for resp in self.IterResponses(tab): | 241 for resp in self.IterResponses(tab): |
233 if resp.HasChromeProxyViaHeader(): | 242 if resp.HasChromeProxyViaHeader(): |
234 r = resp.response | 243 r = resp.response |
235 raise ChromeProxyMetricException, ( | 244 raise ChromeProxyMetricException, ( |
236 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % ( | 245 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % ( |
237 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) | 246 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) |
238 bypass_count += 1 | 247 bypass_count += 1 |
239 | 248 |
240 if tab: | 249 self.VerifyAllProxiesBypassed(tab) |
241 info = GetProxyInfoFromNetworkInternals(tab) | |
242 if not info['enabled']: | |
243 raise ChromeProxyMetricException, ( | |
244 'Chrome proxy should be enabled. proxy info: %s' % info) | |
245 _, expected_bad_proxies = self.IsProxyBypassed(tab) | |
246 self.VerifyBadProxies(info['badProxies'], expected_bad_proxies) | |
247 | |
248 results.AddValue(scalar.ScalarValue( | 250 results.AddValue(scalar.ScalarValue( |
249 results.current_page, 'bypass', 'count', bypass_count)) | 251 results.current_page, 'bypass', 'count', bypass_count)) |
250 | 252 |
251 def AddResultsForCorsBypass(self, tab, results): | 253 def AddResultsForCorsBypass(self, tab, results): |
252 eligible_response_count = 0 | 254 eligible_response_count = 0 |
253 bypass_count = 0 | 255 bypass_count = 0 |
254 bypasses = {} | 256 bypasses = {} |
255 for resp in self.IterResponses(tab): | 257 for resp in self.IterResponses(tab): |
256 logging.warn('got a resource %s' % (resp.response.url)) | 258 logging.warn('got a resource %s' % (resp.response.url)) |
257 | 259 |
(...skipping 17 matching lines...) Expand all Loading... | |
275 raise ChromeProxyMetricException, ( | 277 raise ChromeProxyMetricException, ( |
276 '%s: Got a 502 without a subsequent 200' % (url)) | 278 '%s: Got a 502 without a subsequent 200' % (url)) |
277 elif bypasses[url] > 1: | 279 elif bypasses[url] > 1: |
278 raise ChromeProxyMetricException, ( | 280 raise ChromeProxyMetricException, ( |
279 '%s: Got a 502 and multiple 200s: %d' % (url, bypasses[url])) | 281 '%s: Got a 502 and multiple 200s: %d' % (url, bypasses[url])) |
280 if bypass_count == 0: | 282 if bypass_count == 0: |
281 raise ChromeProxyMetricException, ( | 283 raise ChromeProxyMetricException, ( |
282 'At least one response should be bypassed. ' | 284 'At least one response should be bypassed. ' |
283 '(eligible_response_count=%d, bypass_count=%d)\n' % ( | 285 '(eligible_response_count=%d, bypass_count=%d)\n' % ( |
284 eligible_response_count, bypass_count)) | 286 eligible_response_count, bypass_count)) |
285 if tab: | |
286 info = GetProxyInfoFromNetworkInternals(tab) | |
287 if not info['enabled']: | |
288 raise ChromeProxyMetricException, ( | |
289 'Chrome proxy should be enabled. proxy info: %s' % info) | |
290 _, expected_bad_proxies = self.IsProxyBypassed(tab) | |
291 self.VerifyBadProxies(info['badProxies'], expected_bad_proxies) | |
292 | 287 |
288 self.VerifyAllProxiesBypassed(tab) | |
293 results.AddValue(scalar.ScalarValue( | 289 results.AddValue(scalar.ScalarValue( |
294 results.current_page, 'cors_bypass', 'count', bypass_count)) | 290 results.current_page, 'cors_bypass', 'count', bypass_count)) |
295 | 291 |
296 def AddResultsForBlockOnce(self, tab, results): | 292 def AddResultsForBlockOnce(self, tab, results): |
297 eligible_response_count = 0 | 293 eligible_response_count = 0 |
298 bypass_count = 0 | 294 bypass_count = 0 |
299 for resp in self.IterResponses(tab): | 295 for resp in self.IterResponses(tab): |
300 if resp.ShouldHaveChromeProxyViaHeader(): | 296 if resp.ShouldHaveChromeProxyViaHeader(): |
301 eligible_response_count += 1 | 297 eligible_response_count += 1 |
302 if not resp.HasChromeProxyViaHeader(): | 298 if not resp.HasChromeProxyViaHeader(): |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
340 'Reponse: status=(%d, %s)\nHeaders:\n %s' % ( | 336 'Reponse: status=(%d, %s)\nHeaders:\n %s' % ( |
341 r.url, r.status, r.status_text, r.headers)) | 337 r.url, r.status, r.status_text, r.headers)) |
342 if count == safebrowsing_count: | 338 if count == safebrowsing_count: |
343 results.AddValue(scalar.ScalarValue( | 339 results.AddValue(scalar.ScalarValue( |
344 results.current_page, 'safebrowsing', 'boolean', True)) | 340 results.current_page, 'safebrowsing', 'boolean', True)) |
345 else: | 341 else: |
346 raise ChromeProxyMetricException, ( | 342 raise ChromeProxyMetricException, ( |
347 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % ( | 343 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % ( |
348 count, safebrowsing_count)) | 344 count, safebrowsing_count)) |
349 | 345 |
350 def AddResultsForHTTPFallback( | 346 def VerifyProxyInfo(self, tab, expected_proxies, expected_bad_proxies): |
351 self, tab, results, expected_proxies=None, expected_bad_proxies=None): | |
352 info = GetProxyInfoFromNetworkInternals(tab) | 347 info = GetProxyInfoFromNetworkInternals(tab) |
353 if not 'enabled' in info or not info['enabled']: | 348 if not 'enabled' in info or not info['enabled']: |
354 raise ChromeProxyMetricException, ( | 349 raise ChromeProxyMetricException, ( |
355 'Chrome proxy should be enabled. proxy info: %s' % info) | 350 'Chrome proxy should be enabled. proxy info: %s' % info) |
356 | 351 |
357 if not expected_proxies: | |
358 expected_proxies = [self.effective_proxies['fallback'], | |
359 self.effective_proxies['direct']] | |
360 if not expected_bad_proxies: | |
361 expected_bad_proxies = [] | |
362 | |
363 proxies = info['proxies'] | 352 proxies = info['proxies'] |
364 if proxies != expected_proxies: | 353 if proxies != expected_proxies: |
365 raise ChromeProxyMetricException, ( | 354 raise ChromeProxyMetricException, ( |
366 'Wrong effective proxies (%s). Expect: "%s"' % ( | 355 'Wrong effective proxies (%s). Expect: "%s"' % ( |
367 str(proxies), str(expected_proxies))) | 356 str(proxies), str(expected_proxies))) |
368 | 357 |
369 bad_proxies = [] | 358 bad_proxies = [] |
370 if 'badProxies' in info and info['badProxies']: | 359 if 'badProxies' in info and info['badProxies']: |
371 bad_proxies = [p['proxy'] for p in info['badProxies'] | 360 bad_proxies = [p['proxy'] for p in info['badProxies'] |
372 if 'proxy' in p and p['proxy']] | 361 if 'proxy' in p and p['proxy']] |
373 if bad_proxies != expected_bad_proxies: | 362 if bad_proxies != expected_bad_proxies: |
374 raise ChromeProxyMetricException, ( | 363 raise ChromeProxyMetricException, ( |
375 'Wrong bad proxies (%s). Expect: "%s"' % ( | 364 'Wrong bad proxies (%s). Expect: "%s"' % ( |
376 str(bad_proxies), str(expected_bad_proxies))) | 365 str(bad_proxies), str(expected_bad_proxies))) |
366 | |
367 def AddResultsForHTTPFallback( | |
368 self, tab, results, expected_proxies=None, expected_bad_proxies=None): | |
369 if not expected_proxies: | |
370 expected_proxies = [self.effective_proxies['fallback'], | |
371 self.effective_proxies['direct']] | |
372 if not expected_bad_proxies: | |
373 expected_bad_proxies = [] | |
374 | |
375 self.VerifyProxyInfo(tab, expected_proxies, expected_bad_proxies) | |
377 results.AddValue(scalar.ScalarValue( | 376 results.AddValue(scalar.ScalarValue( |
378 results.current_page, 'http_fallback', 'boolean', True)) | 377 results.current_page, 'http_fallback', 'boolean', True)) |
378 | |
379 def AddResultsForHTTPToDirectFallback(self, tab, results): | |
380 self.VerifyAllProxiesBypassed(tab) | |
381 results.AddValue(scalar.ScalarValue( | |
382 results.current_page, 'direct_fallback', 'boolean', True)) | |
OLD | NEW |