OLD | NEW |
---|---|
(Empty) | |
1 #!/usr/bin/python | |
2 # Copyright (c) 2011 The Chromium Authors. All rights reserved. | |
3 # Use of this source code is governed by a BSD-style license that can be | |
4 # found in the LICENSE file. | |
5 | |
6 """Prints the size of each given file and optionally computes the size of | |
7 libchrome.so without the dependencies added for building with android NDK. | |
8 Also breaks down the contents of the APK to determine the installed size | |
9 and assign size contributions to different classes of file. | |
10 """ | |
11 | |
12 import collections | |
13 import json | |
14 import operator | |
15 import optparse | |
16 import os | |
17 import re | |
18 import sys | |
19 import tempfile | |
20 import zipfile | |
21 import zlib | |
22 | |
23 from devil.utils import cmd_helper | |
24 from pylib import constants | |
25 | |
26 sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT, 'tools', 'grit')) | |
27 from grit.format import data_pack # pylint: disable=import-error | |
28 sys.path.append(os.path.join( | |
29 constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common')) | |
30 import perf_tests_results_helper # pylint: disable=import-error | |
31 | |
32 | |
33 # Static initializers expected in official builds. Note that this list is built | |
34 # using 'nm' on libchrome.so which results from a GCC official build (i.e. | |
35 # Clang is not supported currently). | |
36 | |
37 STATIC_INITIALIZER_SYMBOL_PREFIX = '_GLOBAL__I_' | |
38 | |
39 EXPECTED_STATIC_INITIALIZERS = frozenset([ | |
40 'allocators.cpp', | |
41 'common.pb.cc', | |
42 'defaults.cc', | |
43 'generated_message_util.cc', | |
44 'locale_impl.cpp', | |
45 'timeutils.cc', | |
46 'watchdog.cc', | |
47 # http://b/6354040 | |
48 'SkFontHost_android.cpp', | |
49 # http://b/6354040 | |
50 'isolate.cc', | |
51 'assembler_arm.cc', | |
52 'isolate.cc', | |
53 ]) | |
54 | |
55 _BASE_CHART = { | |
56 'format_version': '0.1', | |
57 'benchmark_name': 'resource_sizes', | |
58 'benchmark_description': 'APK resource size information.', | |
59 'trace_rerun_options': [], | |
60 'charts': {} | |
61 } | |
62 | |
63 _RC_HEADER_RE = re.compile(r'^#define (?P<name>\w+) (?P<id>\d+)$') | |
64 | |
65 | |
66 def GetStaticInitializers(so_path): | |
67 """Returns a list of static initializers found in the non-stripped library | |
68 located at the provided path. Note that this function assumes that the | |
69 library was compiled with GCC. | |
70 """ | |
71 output = cmd_helper.GetCmdOutput(['nm', so_path]) | |
72 static_initializers = [] | |
73 for line in output: | |
74 symbol_name = line.split(' ').pop().rstrip() | |
75 if STATIC_INITIALIZER_SYMBOL_PREFIX in symbol_name: | |
76 static_initializers.append( | |
77 symbol_name.replace(STATIC_INITIALIZER_SYMBOL_PREFIX, '')) | |
78 return static_initializers | |
79 | |
80 | |
81 def ReportPerfResult(chart_data, graph_title, trace_title, value, units, | |
82 improvement_direction='down', important=True): | |
83 """Outputs test results in correct format. | |
84 | |
85 If chart_data is None, it outputs data in old format. If chart_data is a | |
86 dictionary, formats in chartjson format. If any other format defaults to | |
87 old format. | |
88 """ | |
89 if chart_data and isinstance(chart_data, dict): | |
90 chart_data['charts'].setdefault(graph_title, {}) | |
91 chart_data['charts'][graph_title][trace_title] = { | |
92 'type': 'scalar', | |
93 'value': value, | |
94 'units': units, | |
95 'improvement_direction': improvement_direction, | |
96 'important': important | |
97 } | |
98 else: | |
99 perf_tests_results_helper.PrintPerfResult( | |
100 graph_title, trace_title, [value], units) | |
101 | |
102 | |
103 def PrintResourceSizes(files, chartjson=None): | |
104 """Prints the sizes of each given file. | |
105 | |
106 Args: | |
107 files: List of files to print sizes for. | |
108 """ | |
109 for f in files: | |
110 ReportPerfResult(chartjson, 'ResourceSizes', os.path.basename(f) + ' size', | |
111 os.path.getsize(f), 'bytes') | |
112 | |
113 | |
114 def PrintApkAnalysis(apk_filename, chartjson=None): | |
115 """Analyse APK to determine size contributions of different file classes.""" | |
116 # Define a named tuple type for file grouping. | |
117 # name: Human readable name for this file group | |
118 # regex: Regular expression to match filename | |
119 # extracted: Function that takes a file name and returns whether the file is | |
120 # extracted from the apk at install/runtime. | |
121 FileGroup = collections.namedtuple('FileGroup', | |
122 ['name', 'regex', 'extracted']) | |
123 | |
124 # File groups are checked in sequence, so more specific regexes should be | |
125 # earlier in the list. | |
126 YES = lambda _: True | |
127 NO = lambda _: False | |
128 FILE_GROUPS = ( | |
129 FileGroup('Native code', r'\.so$', lambda f: 'crazy' not in f), | |
130 FileGroup('Java code', r'\.dex$', YES), | |
131 FileGroup('Native resources (no l10n)', r'\.pak$', NO), | |
132 # For locale paks, assume only english paks are extracted. | |
133 FileGroup('Native resources (l10n)', r'\.lpak$', lambda f: 'en_' in f), | |
134 FileGroup('ICU (i18n library) data', r'assets/icudtl\.dat$', NO), | |
135 FileGroup('V8 Snapshots', r'\.bin$', NO), | |
136 FileGroup('PNG drawables', r'\.png$', NO), | |
137 FileGroup('Non-compiled Android resources', r'^res/', NO), | |
138 FileGroup('Compiled Android resources', r'\.arsc$', NO), | |
139 FileGroup('Package metadata', r'^(META-INF/|AndroidManifest\.xml$)', NO), | |
140 FileGroup('Unknown files', r'.', NO), | |
141 ) | |
142 | |
143 apk = zipfile.ZipFile(apk_filename, 'r') | |
144 try: | |
145 apk_contents = apk.infolist() | |
146 finally: | |
147 apk.close() | |
148 | |
149 total_apk_size = os.path.getsize(apk_filename) | |
150 apk_basename = os.path.basename(apk_filename) | |
151 | |
152 found_files = {} | |
153 for group in FILE_GROUPS: | |
154 found_files[group] = [] | |
155 | |
156 for member in apk_contents: | |
157 for group in FILE_GROUPS: | |
158 if re.search(group.regex, member.filename): | |
159 found_files[group].append(member) | |
160 break | |
161 else: | |
162 raise KeyError('No group found for file "%s"' % member.filename) | |
163 | |
164 total_install_size = total_apk_size | |
165 | |
166 for group in FILE_GROUPS: | |
167 apk_size = sum(member.compress_size for member in found_files[group]) | |
168 install_size = apk_size | |
169 install_bytes = sum(f.file_size for f in found_files[group] | |
170 if group.extracted(f.filename)) | |
171 install_size += install_bytes | |
172 total_install_size += install_bytes | |
173 | |
174 ReportPerfResult(chartjson, apk_basename + '_Breakdown', | |
175 group.name + ' size', apk_size, 'bytes') | |
176 ReportPerfResult(chartjson, apk_basename + '_InstallBreakdown', | |
177 group.name + ' size', install_size, 'bytes') | |
178 | |
179 transfer_size = _CalculateCompressedSize(apk_filename) | |
180 ReportPerfResult(chartjson, apk_basename + '_InstallSize', | |
181 'Estimated installed size', total_install_size, 'bytes') | |
182 ReportPerfResult(chartjson, apk_basename + '_InstallSize', 'APK size', | |
183 total_apk_size, 'bytes') | |
184 ReportPerfResult(chartjson, apk_basename + '_TransferSize', | |
185 'Transfer size (deflate)', transfer_size, 'bytes') | |
186 | |
187 | |
188 def IsPakFileName(file_name): | |
189 """Returns whether the given file name ends with .pak or .lpak.""" | |
190 return file_name.endswith('.pak') or file_name.endswith('.lpak') | |
191 | |
192 | |
193 def PrintPakAnalysis(apk_filename, min_pak_resource_size, build_type): | |
194 """Print sizes of all resources in all pak files in |apk_filename|.""" | |
195 print | |
196 print 'Analyzing pak files in %s...' % apk_filename | |
197 | |
198 # A structure for holding details about a pak file. | |
199 Pak = collections.namedtuple( | |
200 'Pak', ['filename', 'compress_size', 'file_size', 'resources']) | |
201 | |
202 # Build a list of Pak objets for each pak file. | |
203 paks = [] | |
204 apk = zipfile.ZipFile(apk_filename, 'r') | |
205 try: | |
206 for i in apk.infolist(): | |
jbudorick
2015/12/17 17:08:14
nit:
for i in (x for x in apk.infolist() if IsP
rnephew (Reviews Here)
2015/12/17 17:28:37
Done.
| |
207 if not IsPakFileName(i.filename): | |
208 continue | |
209 with tempfile.NamedTemporaryFile() as f: | |
210 f.write(apk.read(i.filename)) | |
211 f.flush() | |
212 paks.append(Pak(i.filename, i.compress_size, i.file_size, | |
213 data_pack.DataPack.ReadDataPack(f.name).resources)) | |
214 finally: | |
215 apk.close() | |
216 | |
217 # Output the overall pak file summary. | |
218 total_files = len(paks) | |
219 total_compress_size = sum(pak.compress_size for pak in paks) | |
220 total_file_size = sum(pak.file_size for pak in paks) | |
221 print 'Total pak files: %d' % total_files | |
222 print 'Total compressed size: %s' % _FormatBytes(total_compress_size) | |
223 print 'Total uncompressed size: %s' % _FormatBytes(total_file_size) | |
224 print | |
225 | |
226 # Output the table of details about all pak files. | |
227 print '%25s%11s%21s%21s' % ( | |
228 'FILENAME', 'RESOURCES', 'COMPRESSED SIZE', 'UNCOMPRESSED SIZE') | |
229 for pak in sorted(paks, key=operator.attrgetter('file_size'), reverse=True): | |
230 print '%25s %10s %12s %6.2f%% %12s %6.2f%%' % ( | |
231 pak.filename, | |
232 len(pak.resources), | |
233 _FormatBytes(pak.compress_size), | |
234 100.0 * pak.compress_size / total_compress_size, | |
235 _FormatBytes(pak.file_size), | |
236 100.0 * pak.file_size / total_file_size) | |
237 | |
238 print | |
239 print 'Analyzing pak resources in %s...' % apk_filename | |
240 | |
241 # Calculate aggregate stats about resources across pak files. | |
242 resource_count_map = collections.defaultdict(int) | |
243 resource_size_map = collections.defaultdict(int) | |
244 resource_overhead_bytes = 6 | |
245 for pak in paks: | |
246 for r in pak.resources: | |
247 resource_count_map[r] += 1 | |
248 resource_size_map[r] += len(pak.resources[r]) + resource_overhead_bytes | |
249 | |
250 # Output the overall resource summary. | |
251 total_resource_size = sum(resource_size_map.values()) | |
252 total_resource_count = len(resource_count_map) | |
253 assert total_resource_size <= total_file_size | |
254 print 'Total pak resources: %s' % total_resource_count | |
255 print 'Total uncompressed resource size: %s' % _FormatBytes( | |
256 total_resource_size) | |
257 print | |
258 | |
259 resource_id_name_map = _GetResourceIdNameMap(build_type) | |
260 | |
261 # Output the table of details about all resources across pak files. | |
262 print | |
263 print '%56s %5s %17s' % ('RESOURCE', 'COUNT', 'UNCOMPRESSED SIZE') | |
264 for i in sorted(resource_size_map, key=resource_size_map.get, | |
265 reverse=True): | |
266 if resource_size_map[i] >= min_pak_resource_size: | |
267 print '%56s %5s %9s %6.2f%%' % ( | |
268 i in resource_id_name_map and resource_id_name_map[i] or i, | |
jbudorick
2015/12/17 17:08:14
nit: this can just be
resource_id_name_map.get(
rnephew (Reviews Here)
2015/12/17 17:28:37
Done.
| |
269 resource_count_map[i], | |
270 _FormatBytes(resource_size_map[i]), | |
271 100.0 * resource_size_map[i] / total_resource_size) | |
272 | |
273 | |
274 def _GetResourceIdNameMap(build_type): | |
275 """Returns a map of {resource_id: resource_name}.""" | |
276 out_dir = os.path.join(constants.DIR_SOURCE_ROOT, 'out', build_type) | |
277 assert os.path.isdir(out_dir), 'Failed to locate out dir at %s' % out_dir | |
278 print 'Looking at resources in: %s' % out_dir | |
279 | |
280 grit_headers = [] | |
281 for root, _, files in os.walk(out_dir): | |
282 if root.endswith('grit'): | |
283 grit_headers += [os.path.join(root, f) for f in files if f.endswith('.h')] | |
284 assert grit_headers, 'Failed to find grit headers in %s' % out_dir | |
285 | |
286 id_name_map = {} | |
287 for header in grit_headers: | |
288 with open(header, 'r') as f: | |
289 for line in f.readlines(): | |
290 m = _RC_HEADER_RE.match(line.strip()) | |
291 if m: | |
292 i = int(m.group('id')) | |
293 name = m.group('name') | |
294 if i in id_name_map and name != id_name_map[i]: | |
295 print 'WARNING: Resource ID conflict %s (%s vs %s)' % ( | |
296 i, id_name_map[i], name) | |
297 id_name_map[i] = name | |
298 return id_name_map | |
299 | |
300 | |
301 def PrintStaticInitializersCount(so_with_symbols_path, chartjson=None): | |
302 """Emits the performance result for static initializers found in the provided | |
303 shared library. Additionally, files for which static initializers were | |
304 found are printed on the standard output. | |
305 | |
306 Args: | |
307 so_with_symbols_path: Path to the unstripped libchrome.so file. | |
308 """ | |
309 print 'Files with static initializers:' | |
310 static_initializers = GetStaticInitializers(so_with_symbols_path) | |
311 print '\n'.join(static_initializers) | |
312 | |
313 ReportPerfResult(chartjson, 'StaticInitializersCount', 'count', | |
314 len(static_initializers), 'count') | |
315 | |
316 | |
317 def _FormatBytes(byts): | |
318 """Pretty-print a number of bytes.""" | |
319 if byts > 2**20.0: | |
320 byts /= 2**20.0 | |
321 return '%.2fm' % byts | |
322 if byts > 2**10.0: | |
323 byts /= 2**10.0 | |
324 return '%.2fk' % byts | |
325 return str(byts) | |
326 | |
327 | |
328 def _CalculateCompressedSize(file_path): | |
329 CHUNK_SIZE = 256 * 1024 | |
330 compressor = zlib.compressobj() | |
331 total_size = 0 | |
332 with open(file_path, 'rb') as f: | |
333 for chunk in iter(lambda: f.read(CHUNK_SIZE), ''): | |
334 total_size += len(compressor.compress(chunk)) | |
335 total_size += len(compressor.flush()) | |
336 return total_size | |
337 | |
338 | |
339 def main(argv): | |
340 usage = """Usage: %prog [options] file1 file2 ... | |
341 | |
342 Pass any number of files to graph their sizes. Any files with the extension | |
343 '.apk' will be broken down into their components on a separate graph.""" | |
344 option_parser = optparse.OptionParser(usage=usage) | |
345 option_parser.add_option('--so-path', help='Path to libchrome.so.') | |
346 option_parser.add_option('--so-with-symbols-path', | |
347 help='Path to libchrome.so with symbols.') | |
348 option_parser.add_option('--min-pak-resource-size', type='int', | |
349 default=20*1024, | |
350 help='Minimum byte size of displayed pak resources.') | |
351 option_parser.add_option('--build_type', dest='build_type', default='Debug', | |
352 help='Sets the build type, default is Debug.') | |
353 option_parser.add_option('--chartjson', action="store_true", | |
354 help='Sets output mode to chartjson.') | |
355 option_parser.add_option('--output-dir', default='.', | |
356 help='Directory to save chartjson to.') | |
357 option_parser.add_option('-d', '--device', | |
358 help='Dummy option for perf runner.') | |
359 options, args = option_parser.parse_args(argv) | |
360 files = args[1:] | |
361 chartjson = _BASE_CHART.copy() if options.chartjson else None | |
362 | |
363 # For backward compatibilty with buildbot scripts, treat --so-path as just | |
364 # another file to print the size of. We don't need it for anything special any | |
365 # more. | |
366 if options.so_path: | |
367 files.append(options.so_path) | |
368 | |
369 if not files: | |
370 option_parser.error('Must specify a file') | |
371 | |
372 if options.so_with_symbols_path: | |
373 PrintStaticInitializersCount( | |
374 options.so_with_symbols_path, chartjson=chartjson) | |
375 | |
376 PrintResourceSizes(files, chartjson=chartjson) | |
377 | |
378 for f in files: | |
379 if f.endswith('.apk'): | |
380 PrintApkAnalysis(f, chartjson=chartjson) | |
381 PrintPakAnalysis(f, options.min_pak_resource_size, options.build_type) | |
382 | |
383 if chartjson: | |
384 with open(os.path.join( | |
385 options.output_dir, 'results-chart.json'), 'w') as json_file: | |
jbudorick
2015/12/17 17:08:14
nit: change this indentation. as is, it looks like
rnephew (Reviews Here)
2015/12/17 17:28:37
Done.
| |
386 json.dump(chartjson, json_file) | |
387 | |
388 | |
389 if __name__ == '__main__': | |
390 sys.exit(main(sys.argv)) | |
OLD | NEW |