Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(78)

Side by Side Diff: tools/perf/metrics/chrome_proxy.py

Issue 382593002: Upload chrome_proxy to use results.AddValue(...) (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright 2014 The Chromium Authors. All rights reserved. 1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import datetime 5 import datetime
6 import logging 6 import logging
7 import os 7 import os
8 8
9 from telemetry.page import page_measurement 9 from telemetry.page import page_measurement
10 from metrics import network 10 from metrics import network
11 from telemetry.value import scalar
11 12
12 13
13 class ChromeProxyMetricException(page_measurement.MeasurementFailure): 14 class ChromeProxyMetricException(page_measurement.MeasurementFailure):
14 pass 15 pass
15 16
16 17
17 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy' 18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
18 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy' 19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy'
19 20
20 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443' 21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443'
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
116 117
117 super(ChromeProxyMetric, self).AddResults(tab, results) 118 super(ChromeProxyMetric, self).AddResults(tab, results)
118 for resp in self.IterResponses(tab): 119 for resp in self.IterResponses(tab):
119 if resp.response.served_from_cache: 120 if resp.response.served_from_cache:
120 resources_from_cache += 1 121 resources_from_cache += 1
121 if resp.HasChromeProxyViaHeader(): 122 if resp.HasChromeProxyViaHeader():
122 resources_via_proxy += 1 123 resources_via_proxy += 1
123 else: 124 else:
124 resources_direct += 1 125 resources_direct += 1
125 126
126 results.Add('resources_via_proxy', 'count', resources_via_proxy) 127 results.AddValue(scalar.ScalarValue(
127 results.Add('resources_from_cache', 'count', resources_from_cache) 128 results.current_page, 'resources_via_proxy', 'count',
128 results.Add('resources_direct', 'count', resources_direct) 129 resources_via_proxy))
130 results.AddValue(scalar.ScalarValue(
131 results.current_page, 'resources_from_cache', 'count',
132 resources_from_cache))
133 results.AddValue(scalar.ScalarValue(
134 results.current_page, 'resources_direct', 'count', resources_direct))
129 135
130 def AddResultsForHeaderValidation(self, tab, results): 136 def AddResultsForHeaderValidation(self, tab, results):
131 via_count = 0 137 via_count = 0
132 bypass_count = 0 138 bypass_count = 0
133 for resp in self.IterResponses(tab): 139 for resp in self.IterResponses(tab):
134 if resp.IsValidByViaHeader(): 140 if resp.IsValidByViaHeader():
135 via_count += 1 141 via_count += 1
136 elif tab and self.IsProxyBypassed(tab): 142 elif tab and self.IsProxyBypassed(tab):
137 logging.warning('Proxy bypassed for %s', resp.response.url) 143 logging.warning('Proxy bypassed for %s', resp.response.url)
138 bypass_count += 1 144 bypass_count += 1
139 else: 145 else:
140 r = resp.response 146 r = resp.response
141 raise ChromeProxyMetricException, ( 147 raise ChromeProxyMetricException, (
142 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % ( 148 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
143 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status)) 149 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
144 results.Add('checked_via_header', 'count', via_count) 150 results.AddValue(scalar.ScalarValue(
145 results.Add('request_bypassed', 'count', bypass_count) 151 results.current_page, 'checked_via_header', 'count', via_count))
152 results.AddValue(scalar.ScalarValue(
153 results.current_page, 'request_bypassed', 'count', bypass_count))
146 154
147 def IsProxyBypassed(self, tab): 155 def IsProxyBypassed(self, tab):
148 """ Returns True if all configured proxies are bypassed.""" 156 """ Returns True if all configured proxies are bypassed."""
149 info = GetProxyInfoFromNetworkInternals(tab) 157 info = GetProxyInfoFromNetworkInternals(tab)
150 if not info['enabled']: 158 if not info['enabled']:
151 raise ChromeProxyMetricException, ( 159 raise ChromeProxyMetricException, (
152 'Chrome proxy should be enabled. proxy info: %s' % info) 160 'Chrome proxy should be enabled. proxy info: %s' % info)
153 161
154 bad_proxies = [str(p['proxy']) for p in info['badProxies']].sort() 162 bad_proxies = [str(p['proxy']) for p in info['badProxies']].sort()
155 proxies = [self.effective_proxies['proxy'], 163 proxies = [self.effective_proxies['proxy'],
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
202 if tab: 210 if tab:
203 info = GetProxyInfoFromNetworkInternals(tab) 211 info = GetProxyInfoFromNetworkInternals(tab)
204 if not info['enabled']: 212 if not info['enabled']:
205 raise ChromeProxyMetricException, ( 213 raise ChromeProxyMetricException, (
206 'Chrome proxy should be enabled. proxy info: %s' % info) 214 'Chrome proxy should be enabled. proxy info: %s' % info)
207 self.VerifyBadProxies( 215 self.VerifyBadProxies(
208 info['badProxies'], 216 info['badProxies'],
209 [self.effective_proxies['proxy'], 217 [self.effective_proxies['proxy'],
210 self.effective_proxies['fallback']]) 218 self.effective_proxies['fallback']])
211 219
212 results.Add('bypass', 'count', bypass_count) 220 results.AddValue(scalar.ScalarValue(
221 results.current_page, 'bypass', 'count', bypass_count))
213 222
214 def AddResultsForSafebrowsing(self, tab, results): 223 def AddResultsForSafebrowsing(self, tab, results):
215 count = 0 224 count = 0
216 safebrowsing_count = 0 225 safebrowsing_count = 0
217 for resp in self.IterResponses(tab): 226 for resp in self.IterResponses(tab):
218 count += 1 227 count += 1
219 if resp.IsSafebrowsingResponse(): 228 if resp.IsSafebrowsingResponse():
220 safebrowsing_count += 1 229 safebrowsing_count += 1
221 else: 230 else:
222 r = resp.response 231 r = resp.response
223 raise ChromeProxyMetricException, ( 232 raise ChromeProxyMetricException, (
224 '%s: Not a valid safe browsing response.\n' 233 '%s: Not a valid safe browsing response.\n'
225 'Reponse: status=(%d, %s)\nHeaders:\n %s' % ( 234 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
226 r.url, r.status, r.status_text, r.headers)) 235 r.url, r.status, r.status_text, r.headers))
227 if count == safebrowsing_count: 236 if count == safebrowsing_count:
228 results.Add('safebrowsing', 'boolean', True) 237 results.AddValue(scalar.ScalarValue(
238 results.current_page, 'safebrowsing', 'boolean', True))
229 else: 239 else:
230 raise ChromeProxyMetricException, ( 240 raise ChromeProxyMetricException, (
231 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % ( 241 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
232 count, safebrowsing_count)) 242 count, safebrowsing_count))
233 243
234 def AddResultsForHTTPFallback( 244 def AddResultsForHTTPFallback(
235 self, tab, results, expected_proxies=None, expected_bad_proxies=None): 245 self, tab, results, expected_proxies=None, expected_bad_proxies=None):
236 info = GetProxyInfoFromNetworkInternals(tab) 246 info = GetProxyInfoFromNetworkInternals(tab)
237 if not 'enabled' in info or not info['enabled']: 247 if not 'enabled' in info or not info['enabled']:
238 raise ChromeProxyMetricException, ( 248 raise ChromeProxyMetricException, (
(...skipping 12 matching lines...) Expand all
251 str(proxies), str(expected_proxies))) 261 str(proxies), str(expected_proxies)))
252 262
253 bad_proxies = [] 263 bad_proxies = []
254 if 'badProxies' in info and info['badProxies']: 264 if 'badProxies' in info and info['badProxies']:
255 bad_proxies = [p['proxy'] for p in info['badProxies'] 265 bad_proxies = [p['proxy'] for p in info['badProxies']
256 if 'proxy' in p and p['proxy']] 266 if 'proxy' in p and p['proxy']]
257 if bad_proxies != expected_bad_proxies: 267 if bad_proxies != expected_bad_proxies:
258 raise ChromeProxyMetricException, ( 268 raise ChromeProxyMetricException, (
259 'Wrong bad proxies (%s). Expect: "%s"' % ( 269 'Wrong bad proxies (%s). Expect: "%s"' % (
260 str(bad_proxies), str(expected_bad_proxies))) 270 str(bad_proxies), str(expected_bad_proxies)))
261 results.Add('http_fallback', 'boolean', True) 271 results.AddValue(scalar.ScalarValue(
272 results.current_page, 'http_fallback', 'boolean', True))
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698