OLD | NEW |
---|---|
1 #! /usr/bin/python | 1 #! /usr/bin/python |
2 # Copyright 2015 The Chromium Authors. All rights reserved. | 2 # Copyright 2015 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 import argparse | 6 import argparse |
7 import cgi | 7 import cgi |
8 import json | 8 import json |
9 import logging | 9 import logging |
10 import os | 10 import os |
11 import subprocess | 11 import subprocess |
12 import sys | 12 import sys |
13 import tempfile | 13 import tempfile |
14 import time | 14 import time |
15 | 15 |
16 _SRC_DIR = os.path.abspath(os.path.join( | 16 _SRC_DIR = os.path.abspath(os.path.join( |
17 os.path.dirname(__file__), '..', '..', '..')) | 17 os.path.dirname(__file__), '..', '..', '..')) |
18 | 18 |
19 sys.path.append(os.path.join(_SRC_DIR, 'third_party', 'catapult', 'devil')) | 19 sys.path.append(os.path.join(_SRC_DIR, 'third_party', 'catapult', 'devil')) |
20 from devil.android import device_utils | 20 from devil.android import device_utils |
21 from devil.android.sdk import intent | 21 from devil.android.sdk import intent |
22 | 22 |
23 sys.path.append(os.path.join(_SRC_DIR, 'build', 'android')) | 23 sys.path.append(os.path.join(_SRC_DIR, 'build', 'android')) |
24 import devil_chromium | 24 import devil_chromium |
25 from pylib import constants | 25 from pylib import constants |
26 | 26 |
27 import log_parser | 27 import device_setup |
28 import log_requests | |
29 import loading_model | 28 import loading_model |
29 import loading_trace | |
30 import trace_recorder | |
30 | 31 |
31 | 32 |
32 # TODO(mattcary): logging.info isn't that useful; we need something finer | 33 # TODO(mattcary): logging.info isn't that useful, as the whole (tools) world |
33 # grained. For now we just do logging.warning. | 34 # uses logging info; we need to introduce logging modules to get finer-grained |
35 # output. For now we just do logging.warning. | |
34 | 36 |
35 | 37 |
36 # TODO(mattcary): probably we want this piped in through a flag. | 38 # TODO(mattcary): probably we want this piped in through a flag. |
37 CHROME = constants.PACKAGE_INFO['chrome'] | 39 CHROME = constants.PACKAGE_INFO['chrome'] |
38 | 40 |
39 | 41 |
40 def _SetupAndGetDevice(): | |
41 """Gets an android device, set up the way we like it. | |
42 | |
43 Returns: | |
44 An instance of DeviceUtils for the first device found. | |
45 """ | |
46 device = device_utils.DeviceUtils.HealthyDevices()[0] | |
47 device.EnableRoot() | |
48 device.KillAll(CHROME.package, quiet=True) | |
49 return device | |
50 | |
51 | |
52 def _LoadPage(device, url): | 42 def _LoadPage(device, url): |
53 """Load a page on chrome on our device. | 43 """Load a page on chrome on our device. |
54 | 44 |
55 Args: | 45 Args: |
56 device: an AdbWrapper for the device on which to load the page. | 46 device: an AdbWrapper for the device on which to load the page. |
57 url: url as a string to load. | 47 url: url as a string to load. |
58 """ | 48 """ |
59 load_intent = intent.Intent( | 49 load_intent = intent.Intent( |
60 package=CHROME.package, activity=CHROME.activity, data=url) | 50 package=CHROME.package, activity=CHROME.activity, data=url) |
61 logging.warning('Loading ' + url) | 51 logging.warning('Loading ' + url) |
(...skipping 30 matching lines...) Expand all Loading... | |
92 <html> | 82 <html> |
93 <head> | 83 <head> |
94 <title>%s</title> | 84 <title>%s</title> |
95 """ % title) | 85 """ % title) |
96 for info in graph.ResourceInfo(): | 86 for info in graph.ResourceInfo(): |
97 output.append('<link rel="prefetch" href="%s">\n' % info.Url()) | 87 output.append('<link rel="prefetch" href="%s">\n' % info.Url()) |
98 output.append("""</head> | 88 output.append("""</head> |
99 <body>%s</body> | 89 <body>%s</body> |
100 </html> | 90 </html> |
101 """ % title) | 91 """ % title) |
102 | |
103 return '\n'.join(output) | 92 return '\n'.join(output) |
104 | 93 |
105 | 94 |
106 def _LogRequests(url, clear_cache=True, local=False): | 95 def _LogRequests(url, clear_cache=True, local=False): |
Benoit L
2016/01/21 14:14:38
Can this move into trace_recorder?
mattcary
2016/01/21 16:11:35
I think I already essentially did that by breaking
Benoit L
2016/01/21 16:20:03
Acknowledged.
| |
107 """Log requests for a web page. | 96 """Log requests for a web page. |
108 | 97 |
109 TODO(mattcary): loading.log_requests probably needs to be refactored as we're | |
110 using private methods, also there's ugliness like _ResponseDataToJson return a | |
111 json.dumps that we immediately json.loads. | |
112 | |
113 Args: | 98 Args: |
114 url: url to log as string. | 99 url: url to log as string. |
115 clear_cache: optional flag to clear the cache. | 100 clear_cache: optional flag to clear the cache. |
116 local: log from local (desktop) chrome session. | 101 local: log from local (desktop) chrome session. |
117 | 102 |
118 Returns: | 103 Returns: |
119 JSON of logged information (ie, a dict that describes JSON). | 104 JSON dict of logged information (ie, a dict that describes JSON). |
120 """ | 105 """ |
121 device = _SetupAndGetDevice() if not local else None | 106 device = device_setup.GetFirstDevice() if not local else None |
122 request_logger = log_requests.AndroidRequestsLogger(device) | 107 with device_setup.DeviceConnection(device) as connection: |
123 logging.warning('Logging %scached %s' % ('un' if clear_cache else '', url)) | 108 logging.warning('Logging %scached %s' % ('un' if clear_cache else '', url)) |
124 response_data = request_logger.LogPageLoad( | 109 if clear_cache: |
125 url, clear_cache, 'chrome') | 110 connection.ClearCache() |
126 return json.loads(log_requests._ResponseDataToJson(response_data)) | 111 trace = trace_recorder.MonitorUrl(connection, url) |
112 return trace.ToJsonDict() | |
127 | 113 |
128 | 114 |
129 def _FullFetch(url, json_output, prefetch, local, prefetch_delay_seconds): | 115 def _FullFetch(url, json_output, prefetch, local, prefetch_delay_seconds): |
130 """Do a full fetch with optional prefetching.""" | 116 """Do a full fetch with optional prefetching.""" |
131 if not url.startswith('http'): | 117 if not url.startswith('http'): |
132 url = 'http://' + url | 118 url = 'http://' + url |
133 logging.warning('Cold fetch') | 119 logging.warning('Cold fetch') |
134 cold_data = _LogRequests(url, local=local) | 120 cold_data = _LogRequests(url, local=local) |
135 assert cold_data, 'Cold fetch failed to produce data. Check your phone.' | 121 assert cold_data, 'Cold fetch failed to produce data. Check your phone.' |
136 if prefetch: | 122 if prefetch: |
137 assert not local | 123 assert not local |
138 logging.warning('Generating prefetch') | 124 logging.warning('Generating prefetch') |
139 prefetch_html = _GetPrefetchHtml(_ProcessJson(cold_data), name=url) | 125 prefetch_html = _GetPrefetchHtml(_ProcessJson(cold_data), name=url) |
140 tmp = tempfile.NamedTemporaryFile() | 126 tmp = tempfile.NamedTemporaryFile() |
141 tmp.write(prefetch_html) | 127 tmp.write(prefetch_html) |
142 tmp.flush() | 128 tmp.flush() |
143 # We hope that the tmpfile name is unique enough for the device. | 129 # We hope that the tmpfile name is unique enough for the device. |
144 target = os.path.join('/sdcard/Download', os.path.basename(tmp.name)) | 130 target = os.path.join('/sdcard/Download', os.path.basename(tmp.name)) |
145 device = _SetupAndGetDevice() | 131 device = device_setup.GetFirstDevice() |
146 device.adb.Push(tmp.name, target) | 132 device.adb.Push(tmp.name, target) |
147 logging.warning('Pushed prefetch %s to device at %s' % (tmp.name, target)) | 133 logging.warning('Pushed prefetch %s to device at %s' % (tmp.name, target)) |
148 _LoadPage(device, 'file://' + target) | 134 _LoadPage(device, 'file://' + target) |
149 time.sleep(prefetch_delay_seconds) | 135 time.sleep(prefetch_delay_seconds) |
150 logging.warning('Warm fetch') | 136 logging.warning('Warm fetch') |
151 warm_data = _LogRequests(url, clear_cache=False) | 137 warm_data = _LogRequests(url, clear_cache=False) |
152 with open(json_output, 'w') as f: | 138 with open(json_output, 'w') as f: |
153 _WriteJson(f, warm_data) | 139 _WriteJson(f, warm_data) |
154 logging.warning('Wrote ' + json_output) | 140 logging.warning('Wrote ' + json_output) |
155 with open(json_output + '.cold', 'w') as f: | 141 with open(json_output + '.cold', 'w') as f: |
156 _WriteJson(f, cold_data) | 142 _WriteJson(f, cold_data) |
157 logging.warning('Wrote ' + json_output + '.cold') | 143 logging.warning('Wrote ' + json_output + '.cold') |
158 else: | 144 else: |
159 with open(json_output, 'w') as f: | 145 with open(json_output, 'w') as f: |
160 _WriteJson(f, cold_data) | 146 _WriteJson(f, cold_data) |
161 logging.warning('Wrote ' + json_output) | 147 logging.warning('Wrote ' + json_output) |
162 | 148 |
163 | 149 |
164 # TODO(mattcary): it would be nice to refactor so the --noads flag gets dealt | 150 # TODO(mattcary): it would be nice to refactor so the --noads flag gets dealt |
165 # with here. | 151 # with here. |
166 def _ProcessRequests(filename): | 152 def _ProcessRequests(filename): |
167 requests = log_parser.FilterRequests(log_parser.ParseJsonFile(filename)) | 153 with open(filename) as f: |
168 return loading_model.ResourceGraph(requests) | 154 return loading_model.ResourceGraph( |
blundell
2016/01/21 14:11:38
these two lines are:
return _ProcessJson(json.l
mattcary
2016/01/21 16:11:35
All simplified by change to allow the ResourceGrap
| |
155 loading_trace.LoadingTrace.FromJsonDict(json.load(f))) | |
169 | 156 |
170 | 157 |
171 def _ProcessJson(json_data): | 158 def _ProcessJson(json_data): |
172 assert json_data | 159 assert json_data |
173 return loading_model.ResourceGraph(log_parser.FilterRequests( | 160 return loading_model.ResourceGraph( |
174 [log_parser.RequestData.FromDict(r) for r in json_data])) | 161 loading_trace.LoadingTrace.FromJsonDict(json_data)) |
175 | 162 |
176 | 163 |
177 def InvalidCommand(cmd): | 164 def InvalidCommand(cmd): |
178 sys.exit('Invalid command "%s"\nChoices are: %s' % | 165 sys.exit('Invalid command "%s"\nChoices are: %s' % |
179 (cmd, ' '.join(COMMAND_MAP.keys()))) | 166 (cmd, ' '.join(COMMAND_MAP.keys()))) |
180 | 167 |
181 | 168 |
182 def DoCost(arg_str): | 169 def DoCost(arg_str): |
183 parser = argparse.ArgumentParser(usage='cost [--parameter ...] REQUEST_JSON') | 170 parser = argparse.ArgumentParser(usage='cost [--parameter ...] REQUEST_JSON') |
184 parser.add_argument('request_json') | 171 parser.add_argument('request_json') |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
248 usage='prefetch_setup [--upload] REQUEST_JSON TARGET_HTML') | 235 usage='prefetch_setup [--upload] REQUEST_JSON TARGET_HTML') |
249 parser.add_argument('request_json') | 236 parser.add_argument('request_json') |
250 parser.add_argument('target_html') | 237 parser.add_argument('target_html') |
251 parser.add_argument('--upload', action='store_true') | 238 parser.add_argument('--upload', action='store_true') |
252 args = parser.parse_args(arg_str) | 239 args = parser.parse_args(arg_str) |
253 graph = _ProcessRequests(args.request_json) | 240 graph = _ProcessRequests(args.request_json) |
254 with open(args.target_html, 'w') as html: | 241 with open(args.target_html, 'w') as html: |
255 html.write(_GetPrefetchHtml( | 242 html.write(_GetPrefetchHtml( |
256 graph, name=os.path.basename(args.request_json))) | 243 graph, name=os.path.basename(args.request_json))) |
257 if args.upload: | 244 if args.upload: |
258 device = _SetupAndGetDevice() | 245 device = device_setup.GetFirstDevice() |
259 destination = os.path.join('/sdcard/Download', | 246 destination = os.path.join('/sdcard/Download', |
260 os.path.basename(args.target_html)) | 247 os.path.basename(args.target_html)) |
261 device.adb.Push(args.target_html, destination) | 248 device.adb.Push(args.target_html, destination) |
262 | 249 |
263 logging.warning( | 250 logging.warning( |
264 'Pushed %s to device at %s' % (args.target_html, destination)) | 251 'Pushed %s to device at %s' % (args.target_html, destination)) |
265 | 252 |
266 | 253 |
267 def DoLogRequests(arg_str): | 254 def DoLogRequests(arg_str): |
268 parser = argparse.ArgumentParser( | 255 parser = argparse.ArgumentParser( |
(...skipping 26 matching lines...) Expand all Loading... | |
295 args = parser.parse_args(arg_str) | 282 args = parser.parse_args(arg_str) |
296 if not os.path.exists(args.dir): | 283 if not os.path.exists(args.dir): |
297 os.makedirs(args.dir) | 284 os.makedirs(args.dir) |
298 _FullFetch(url=args.site, | 285 _FullFetch(url=args.site, |
299 json_output=os.path.join(args.dir, args.site + '.json'), | 286 json_output=os.path.join(args.dir, args.site + '.json'), |
300 prefetch=True, | 287 prefetch=True, |
301 prefetch_delay_seconds=args.prefetch_delay_seconds, | 288 prefetch_delay_seconds=args.prefetch_delay_seconds, |
302 local=False) | 289 local=False) |
303 | 290 |
304 | 291 |
305 def DoTracing(arg_str): | |
306 parser = argparse.ArgumentParser( | |
307 usage='tracing URL JSON_OUTPUT') | |
308 parser.add_argument('url') | |
309 parser.add_argument('json_output') | |
310 args = parser.parse_args(arg_str) | |
311 device = _SetupAndGetDevice() | |
312 request_logger = log_requests.AndroidRequestsLogger(device) | |
313 tracing = request_logger.LogTracing(args.url) | |
314 with open(args.json_output, 'w') as f: | |
315 _WriteJson(f, tracing) | |
316 logging.warning('Wrote ' + args.json_output) | |
317 | |
318 | |
319 def DoLongPole(arg_str): | 292 def DoLongPole(arg_str): |
320 parser = argparse.ArgumentParser(usage='longpole [--noads] REQUEST_JSON') | 293 parser = argparse.ArgumentParser(usage='longpole [--noads] REQUEST_JSON') |
321 parser.add_argument('request_json') | 294 parser.add_argument('request_json') |
322 parser.add_argument('--noads', action='store_true') | 295 parser.add_argument('--noads', action='store_true') |
323 args = parser.parse_args(arg_str) | 296 args = parser.parse_args(arg_str) |
324 graph = _ProcessRequests(args.request_json) | 297 graph = _ProcessRequests(args.request_json) |
325 if args.noads: | 298 if args.noads: |
326 graph.Set(node_filter=graph.FilterAds) | 299 graph.Set(node_filter=graph.FilterAds) |
327 path_list = [] | 300 path_list = [] |
328 cost = graph.Cost(path_list=path_list) | 301 cost = graph.Cost(path_list=path_list) |
(...skipping 10 matching lines...) Expand all Loading... | |
339 graph.Set(node_filter=graph.FilterAds) | 312 graph.Set(node_filter=graph.FilterAds) |
340 print sum((n.NodeCost() for n in graph.Nodes())) | 313 print sum((n.NodeCost() for n in graph.Nodes())) |
341 | 314 |
342 | 315 |
343 COMMAND_MAP = { | 316 COMMAND_MAP = { |
344 'cost': DoCost, | 317 'cost': DoCost, |
345 'png': DoPng, | 318 'png': DoPng, |
346 'compare': DoCompare, | 319 'compare': DoCompare, |
347 'prefetch_setup': DoPrefetchSetup, | 320 'prefetch_setup': DoPrefetchSetup, |
348 'log_requests': DoLogRequests, | 321 'log_requests': DoLogRequests, |
349 'tracing': DoTracing, | |
350 'longpole': DoLongPole, | 322 'longpole': DoLongPole, |
351 'nodecost': DoNodeCost, | 323 'nodecost': DoNodeCost, |
352 'fetch': DoFetch, | 324 'fetch': DoFetch, |
353 } | 325 } |
354 | 326 |
355 def main(): | 327 def main(): |
356 logging.basicConfig(level=logging.WARNING) | 328 logging.basicConfig(level=logging.WARNING) |
357 parser = argparse.ArgumentParser(usage=' '.join(COMMAND_MAP.keys())) | 329 parser = argparse.ArgumentParser(usage=' '.join(COMMAND_MAP.keys())) |
358 parser.add_argument('command') | 330 parser.add_argument('command') |
359 parser.add_argument('rest', nargs=argparse.REMAINDER) | 331 parser.add_argument('rest', nargs=argparse.REMAINDER) |
360 args = parser.parse_args() | 332 args = parser.parse_args() |
361 devil_chromium.Initialize() | 333 devil_chromium.Initialize() |
362 COMMAND_MAP.get(args.command, | 334 COMMAND_MAP.get(args.command, |
363 lambda _: InvalidCommand(args.command))(args.rest) | 335 lambda _: InvalidCommand(args.command))(args.rest) |
364 | 336 |
365 | 337 |
366 if __name__ == '__main__': | 338 if __name__ == '__main__': |
367 main() | 339 main() |
OLD | NEW |