Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(130)

Side by Side Diff: tools/binary_size/run_binary_size_analysis.py

Issue 231803002: New binary size tool visualization options. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Rename experimenal_template and template Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 #!/usr/bin/python 1 #!/usr/bin/python
2 # Copyright 2014 The Chromium Authors. All rights reserved. 2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Generate a spatial analysis against an arbitrary library. 6 """Generate a spatial analysis against an arbitrary library.
7 7
8 To use, build the 'binary_size_tool' target. Then run this tool, passing 8 To use, build the 'binary_size_tool' target. Then run this tool, passing
9 in the location of the library to be analyzed along with any other options 9 in the location of the library to be analyzed along with any other options
10 you desire. 10 you desire.
11 """ 11 """
12 12
13 import collections 13 import collections
14 import fileinput 14 import fileinput
15 import json 15 import json
16 import optparse 16 import optparse
17 import os 17 import os
18 import pprint 18 import pprint
19 import re 19 import re
20 import shutil 20 import shutil
21 import subprocess 21 import subprocess
22 import sys 22 import sys
23 import tempfile 23 import tempfile
24 24
25 25
26 # TODO(andrewhayden): Only used for legacy reports. Delete.
26 def FormatBytes(bytes): 27 def FormatBytes(bytes):
27 """Pretty-print a number of bytes.""" 28 """Pretty-print a number of bytes."""
28 if bytes > 1e6: 29 if bytes > 1e6:
29 bytes = bytes / 1.0e6 30 bytes = bytes / 1.0e6
30 return '%.1fm' % bytes 31 return '%.1fm' % bytes
31 if bytes > 1e3: 32 if bytes > 1e3:
32 bytes = bytes / 1.0e3 33 bytes = bytes / 1.0e3
33 return '%.1fk' % bytes 34 return '%.1fk' % bytes
34 return str(bytes) 35 return str(bytes)
35 36
36 37
38 # TODO(andrewhayden): Only used for legacy reports. Delete.
37 def SymbolTypeToHuman(type): 39 def SymbolTypeToHuman(type):
38 """Convert a symbol type as printed by nm into a human-readable name.""" 40 """Convert a symbol type as printed by nm into a human-readable name."""
39 return {'b': 'bss', 41 return {'b': 'bss',
40 'd': 'data', 42 'd': 'data',
41 'r': 'read-only data', 43 'r': 'read-only data',
42 't': 'code', 44 't': 'code',
43 'w': 'weak symbol', 45 'w': 'weak symbol',
44 'v': 'weak symbol'}[type] 46 'v': 'weak symbol'}[type]
45 47
46 48
(...skipping 16 matching lines...) Expand all
63 addr_re = re.compile(r'^[0-9a-f]{8} (.) ([^\t]+)(?:\t.*)?$') 65 addr_re = re.compile(r'^[0-9a-f]{8} (.) ([^\t]+)(?:\t.*)?$')
64 # Match lines that don't have an address at all -- typically external symbols. 66 # Match lines that don't have an address at all -- typically external symbols.
65 noaddr_re = re.compile(r'^ {8} (.) (.*)$') 67 noaddr_re = re.compile(r'^ {8} (.) (.*)$')
66 68
67 for line in input: 69 for line in input:
68 line = line.rstrip() 70 line = line.rstrip()
69 match = sym_re.match(line) 71 match = sym_re.match(line)
70 if match: 72 if match:
71 size, type, sym = match.groups()[0:3] 73 size, type, sym = match.groups()[0:3]
72 size = int(size, 16) 74 size = int(size, 16)
73 type = type.lower() 75 if type.lower() == 'b':
74 if type == 'v':
75 type = 'w' # just call them all weak
76 if type == 'b':
77 continue # skip all BSS for now 76 continue # skip all BSS for now
78 path = match.group(4) 77 path = match.group(4)
79 yield sym, type, size, path 78 yield sym, type, size, path
80 continue 79 continue
81 match = addr_re.match(line) 80 match = addr_re.match(line)
82 if match: 81 if match:
83 type, sym = match.groups()[0:2] 82 type, sym = match.groups()[0:2]
84 # No size == we don't care. 83 # No size == we don't care.
85 continue 84 continue
86 match = noaddr_re.match(line) 85 match = noaddr_re.match(line)
87 if match: 86 if match:
88 type, sym = match.groups() 87 type, sym = match.groups()
89 if type in ('U', 'w'): 88 if type in ('U', 'w'):
90 # external or weak symbol 89 # external or weak symbol
91 continue 90 continue
92 91
93 print >>sys.stderr, 'unparsed:', repr(line) 92 print >>sys.stderr, 'unparsed:', repr(line)
94 93
bulach 2014/04/16 16:32:16 nit: another \n also, _MkChuld
Andrew Hayden (chromium.org) 2014/04/16 17:53:21 Done.
94 def _mk_child(node, name):
95 child = None
96 for test in node['children']:
97 if test['n'] == name:
98 child = test
99 break
100 if child == None:
bulach 2014/04/16 16:32:16 nit: if not child:
Andrew Hayden (chromium.org) 2014/04/16 17:53:21 Done.
101 child = {'n': name, 'children': []}
102 node['children'].append(child)
103 return child
95 104
bulach 2014/04/16 16:32:16 nit: another \n
Andrew Hayden (chromium.org) 2014/04/16 17:53:21 Done throughout
105 def MakeCompactTree(symbols):
106 result = {'n': '/', 'children': [], 'k': 'p', 'maxDepth': 0}
107 for symbol_name, symbol_type, symbol_size, file_path in symbols:
108
109 if 'vtable for ' in symbol_name:
110 symbol_type = '@' # hack to categorize these separately
111 # Take path like '/foo/bar/baz', convert to ['foo', 'bar', 'baz']
112 if file_path:
113 file_path = os.path.normpath(file_path)
114 else:
115 file_path = '(No Path)'
116
117 if file_path.startswith('/'):
118 file_path = file_path[1:]
119 path_parts = file_path.split('/')
120
121 # Find pre-existing node in tree, or update if it already exists
122 node = result
123 depth = 0
124 while len(path_parts) > 0:
125 path_part = path_parts.pop(0)
126 if len(path_part) == 0:
127 continue
128 depth += 1
129 node = _mk_child(node, path_part);
130 node['k'] = 'p' # p for path
131
132 # 'node' is now the file node. Find the symbol-type bucket.
133 node['lastPathElement'] = True
134 node = _mk_child(node, symbol_type)
135 node['t'] = symbol_type
136 node['k'] = 'b' # b for bucket
137 depth += 1
138
139 # 'node' is now the symbol-type bucket. Make the child entry.
140 node = _mk_child(node, symbol_name)
141 if 'children' in node: # Only possible if we're adding duplicate entries!!!
142 del node['children']
143 node['value'] = symbol_size
144 node['t'] = symbol_type
145 node['k'] = 's' # s for symbol
146 depth += 1
147 result['maxDepth'] = max(result['maxDepth'], depth);
148
149 return result
150
151
152 # TODO(andrewhayden): Only used for legacy reports. Delete.
96 def TreeifySymbols(symbols): 153 def TreeifySymbols(symbols):
97 """Convert symbols into a path-based tree, calculating size information 154 """Convert symbols into a path-based tree, calculating size information
98 along the way. 155 along the way.
99 156
100 The result is a dictionary that contains two kinds of nodes: 157 The result is a dictionary that contains two kinds of nodes:
101 1. Leaf nodes, representing source code locations (e.g., c++ files) 158 1. Leaf nodes, representing source code locations (e.g., c++ files)
102 These nodes have the following dictionary entries: 159 These nodes have the following dictionary entries:
103 sizes: a dictionary whose keys are categories (such as code, data, 160 sizes: a dictionary whose keys are categories (such as code, data,
104 vtable, etceteras) and whose values are the size, in bytes, of 161 vtable, etceteras) and whose values are the size, in bytes, of
105 those categories; 162 those categories;
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
181 subkey = '__FUNCTION__' 238 subkey = '__FUNCTION__'
182 elif sym.startswith('CSWTCH.'): 239 elif sym.startswith('CSWTCH.'):
183 subkey = 'CSWTCH' 240 subkey = 'CSWTCH'
184 elif '::' in sym: 241 elif '::' in sym:
185 subkey = sym[0:sym.find('::') + 2] 242 subkey = sym[0:sym.find('::') + 2]
186 tree['sizes'][subkey] = tree['sizes'].get(subkey, 0) + size 243 tree['sizes'][subkey] = tree['sizes'].get(subkey, 0) + size
187 tree['size'] += size 244 tree['size'] += size
188 return dirs 245 return dirs
189 246
190 247
248 # TODO(andrewhayden): Only used for legacy reports. Delete.
191 def JsonifyTree(tree, name): 249 def JsonifyTree(tree, name):
192 """Convert TreeifySymbols output to a JSON treemap. 250 """Convert TreeifySymbols output to a JSON treemap.
193 251
194 The format is very similar, with the notable exceptions being 252 The format is very similar, with the notable exceptions being
195 lists of children instead of maps and some different attribute names.""" 253 lists of children instead of maps and some different attribute names."""
196 children = [] 254 children = []
197 css_class_map = { 255 css_class_map = {
198 '[vtable]': 'vtable', 256 '[vtable]': 'vtable',
199 '[rodata]': 'read-only_data', 257 '[rodata]': 'read-only_data',
200 '[data]': 'data', 258 '[data]': 'data',
(...skipping 16 matching lines...) Expand all
217 # Sort children by size, largest to smallest. 275 # Sort children by size, largest to smallest.
218 children.sort(key=lambda child: -child['data']['$area']) 276 children.sort(key=lambda child: -child['data']['$area'])
219 277
220 # For leaf nodes, the 'size' attribute is the size of the leaf; 278 # For leaf nodes, the 'size' attribute is the size of the leaf;
221 # Non-leaf nodes don't really have a size, but their 'size' attribute is 279 # Non-leaf nodes don't really have a size, but their 'size' attribute is
222 # the sum of the sizes of all their children. 280 # the sum of the sizes of all their children.
223 return {'name': name + ' (' + FormatBytes(tree['size']) + ')', 281 return {'name': name + ' (' + FormatBytes(tree['size']) + ')',
224 'data': { '$area': tree['size'] }, 282 'data': { '$area': tree['size'] },
225 'children': children } 283 'children': children }
226 284
285 def DumpCompactTree(symbols, outfile):
286 out = open(outfile, 'w')
287 try:
288 out.write('var tree_data = ' + json.dumps(MakeCompactTree(symbols)))
289 finally:
290 out.flush()
291 out.close()
227 292
293 # TODO(andrewhayden): Only used for legacy reports. Delete.
228 def DumpTreemap(symbols, outfile): 294 def DumpTreemap(symbols, outfile):
229 dirs = TreeifySymbols(symbols) 295 dirs = TreeifySymbols(symbols)
230 out = open(outfile, 'w') 296 out = open(outfile, 'w')
231 try: 297 try:
232 out.write('var kTree = ' + json.dumps(JsonifyTree(dirs, '/'))) 298 out.write('var kTree = ' + json.dumps(JsonifyTree(dirs, '/')))
233 finally: 299 finally:
234 out.flush() 300 out.flush()
235 out.close() 301 out.close()
236 302
237 303
304 # TODO(andrewhayden): Only used for legacy reports. Delete.
238 def DumpLargestSymbols(symbols, outfile, n): 305 def DumpLargestSymbols(symbols, outfile, n):
239 # a list of (sym, type, size, path); sort by size. 306 # a list of (sym, type, size, path); sort by size.
240 symbols = sorted(symbols, key=lambda x: -x[2]) 307 symbols = sorted(symbols, key=lambda x: -x[2])
241 dumped = 0 308 dumped = 0
242 out = open(outfile, 'w') 309 out = open(outfile, 'w')
243 try: 310 try:
244 out.write('var largestSymbols = [\n') 311 out.write('var largestSymbols = [\n')
245 for sym, type, size, path in symbols: 312 for sym, type, size, path in symbols:
246 if type in ('b', 'w'): 313 if type in ('b', 'w'):
247 continue # skip bss and weak symbols 314 continue # skip bss and weak symbols
(...skipping 23 matching lines...) Expand all
271 else: 338 else:
272 key = '[no path]' 339 key = '[no path]'
273 if key not in sources: 340 if key not in sources:
274 sources[key] = {'path': path, 'symbol_count': 0, 'size': 0} 341 sources[key] = {'path': path, 'symbol_count': 0, 'size': 0}
275 record = sources[key] 342 record = sources[key]
276 record['size'] += size 343 record['size'] += size
277 record['symbol_count'] += 1 344 record['symbol_count'] += 1
278 return sources 345 return sources
279 346
280 347
348 # TODO(andrewhayden): Only used for legacy reports. Delete.
281 def DumpLargestSources(symbols, outfile, n): 349 def DumpLargestSources(symbols, outfile, n):
282 map = MakeSourceMap(symbols) 350 map = MakeSourceMap(symbols)
283 sources = sorted(map.values(), key=lambda x: -x['size']) 351 sources = sorted(map.values(), key=lambda x: -x['size'])
284 dumped = 0 352 dumped = 0
285 out = open(outfile, 'w') 353 out = open(outfile, 'w')
286 try: 354 try:
287 out.write('var largestSources = [\n') 355 out.write('var largestSources = [\n')
288 for record in sources: 356 for record in sources:
289 entry = {'size': FormatBytes(record['size']), 357 entry = {'size': FormatBytes(record['size']),
290 'symbol_count': str(record['symbol_count']), 358 'symbol_count': str(record['symbol_count']),
291 'location': record['path']} 359 'location': record['path']}
292 out.write(json.dumps(entry)) 360 out.write(json.dumps(entry))
293 out.write(',\n') 361 out.write(',\n')
294 dumped += 1 362 dumped += 1
295 if dumped >= n: 363 if dumped >= n:
296 return 364 return
297 finally: 365 finally:
298 out.write('];\n') 366 out.write('];\n')
299 out.flush() 367 out.flush()
300 out.close() 368 out.close()
301 369
302 370
371 # TODO(andrewhayden): Only used for legacy reports. Delete.
303 def DumpLargestVTables(symbols, outfile, n): 372 def DumpLargestVTables(symbols, outfile, n):
304 vtables = [] 373 vtables = []
305 for symbol, type, size, path in symbols: 374 for symbol, type, size, path in symbols:
306 if 'vtable for ' in symbol: 375 if 'vtable for ' in symbol:
307 vtables.append({'symbol': symbol, 'path': path, 'size': size}) 376 vtables.append({'symbol': symbol, 'path': path, 'size': size})
308 vtables = sorted(vtables, key=lambda x: -x['size']) 377 vtables = sorted(vtables, key=lambda x: -x['size'])
309 dumped = 0 378 dumped = 0
310 out = open(outfile, 'w') 379 out = open(outfile, 'w')
311 try: 380 try:
312 out.write('var largestVTables = [\n') 381 out.write('var largestVTables = [\n')
313 for record in vtables: 382 for record in vtables:
314 entry = {'size': FormatBytes(record['size']), 383 entry = {'size': FormatBytes(record['size']),
315 'symbol': record['symbol'], 384 'symbol': record['symbol'],
316 'location': record['path']} 385 'location': record['path']}
317 out.write(json.dumps(entry)) 386 out.write(json.dumps(entry))
318 out.write(',\n') 387 out.write(',\n')
319 dumped += 1 388 dumped += 1
320 if dumped >= n: 389 if dumped >= n:
321 return 390 return
322 finally: 391 finally:
323 out.write('];\n') 392 out.write('];\n')
324 out.flush() 393 out.flush()
325 out.close() 394 out.close()
326 395
327 396 # TODO(andrewhayden): Switch to Primiano's python-based version.
328 def RunParallelAddress2Line(outfile, library, arch, jobs, verbose): 397 def RunParallelAddress2Line(outfile, library, arch, jobs, verbose):
329 """Run a parallel addr2line processing engine to dump and resolve symbols.""" 398 """Run a parallel addr2line processing engine to dump and resolve symbols."""
330 out_dir = os.getenv('CHROMIUM_OUT_DIR', 'out') 399 out_dir = os.getenv('CHROMIUM_OUT_DIR', 'out')
331 build_type = os.getenv('BUILDTYPE', 'Release') 400 build_type = os.getenv('BUILDTYPE', 'Release')
332 classpath = os.path.join(out_dir, build_type, 'lib.java', 401 classpath = os.path.join(out_dir, build_type, 'lib.java',
333 'binary_size_java.jar') 402 'binary_size_java.jar')
334 cmd = ['java', 403 cmd = ['java',
335 '-classpath', classpath, 404 '-classpath', classpath,
336 'org.chromium.tools.binary_size.ParallelAddress2Line', 405 'org.chromium.tools.binary_size.ParallelAddress2Line',
337 '--disambiguate', 406 '--disambiguate',
338 '--outfile', outfile, 407 '--outfile', outfile,
339 '--library', library, 408 '--library', library,
340 '--threads', jobs] 409 '--threads', jobs]
341 if verbose is True: 410 if verbose is True:
342 cmd.append('--verbose') 411 cmd.append('--verbose')
343 prefix = os.path.join('third_party', 'android_tools', 'ndk', 'toolchains') 412 prefix = os.path.join('third_party', 'android_tools', 'ndk', 'toolchains')
344 if arch == 'android-arm': 413 if arch == 'android-arm':
345 prefix = os.path.join(prefix, 'arm-linux-androideabi-4.7', 'prebuilt', 414 prefix = os.path.join(prefix, 'arm-linux-androideabi-4.8', 'prebuilt',
346 'linux-x86_64', 'bin', 'arm-linux-androideabi-') 415 'linux-x86_64', 'bin', 'arm-linux-androideabi-')
347 cmd.extend(['--nm', prefix + 'nm', '--addr2line', prefix + 'addr2line']) 416 cmd.extend(['--nm', prefix + 'nm', '--addr2line', prefix + 'addr2line'])
348 elif arch == 'android-mips': 417 elif arch == 'android-mips':
349 prefix = os.path.join(prefix, 'mipsel-linux-android-4.7', 'prebuilt', 418 prefix = os.path.join(prefix, 'mipsel-linux-android-4.8', 'prebuilt',
350 'linux-x86_64', 'bin', 'mipsel-linux-android-') 419 'linux-x86_64', 'bin', 'mipsel-linux-android-')
351 cmd.extend(['--nm', prefix + 'nm', '--addr2line', prefix + 'addr2line']) 420 cmd.extend(['--nm', prefix + 'nm', '--addr2line', prefix + 'addr2line'])
352 elif arch == 'android-x86': 421 elif arch == 'android-x86':
353 prefix = os.path.join(prefix, 'x86-4.7', 'prebuilt', 422 prefix = os.path.join(prefix, 'x86-4.8', 'prebuilt',
354 'linux-x86_64', 'bin', 'i686-linux-android-') 423 'linux-x86_64', 'bin', 'i686-linux-android-')
355 cmd.extend(['--nm', prefix + 'nm', '--addr2line', prefix + 'addr2line']) 424 cmd.extend(['--nm', prefix + 'nm', '--addr2line', prefix + 'addr2line'])
356 # else, use whatever is in PATH (don't pass --nm or --addr2line) 425 # else, use whatever is in PATH (don't pass --nm or --addr2line)
357 426
358 if verbose: 427 if verbose:
359 print cmd 428 print cmd
360 429
361 return_code = subprocess.call(cmd) 430 return_code = subprocess.call(cmd)
362 if return_code: 431 if return_code:
363 raise RuntimeError('Failed to run ParallelAddress2Line: returned ' + 432 raise RuntimeError('Failed to run ParallelAddress2Line: returned ' +
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
434 'This argument is only valid when using --library.') 503 'This argument is only valid when using --library.')
435 parser.add_option('-v', dest='verbose', action='store_true', 504 parser.add_option('-v', dest='verbose', action='store_true',
436 help='be verbose, printing lots of status information.') 505 help='be verbose, printing lots of status information.')
437 parser.add_option('--nm-out', metavar='PATH', 506 parser.add_option('--nm-out', metavar='PATH',
438 help='keep the nm output file, and store it at the ' 507 help='keep the nm output file, and store it at the '
439 'specified path. This is useful if you want to see the ' 508 'specified path. This is useful if you want to see the '
440 'fully processed nm output after the symbols have been ' 509 'fully processed nm output after the symbols have been '
441 'mapped to source locations. By default, a tempfile is ' 510 'mapped to source locations. By default, a tempfile is '
442 'used and is deleted when the program terminates.' 511 'used and is deleted when the program terminates.'
443 'This argument is only valid when using --library.') 512 'This argument is only valid when using --library.')
513 parser.add_option('--legacy', action='store_true',
514 help='emit legacy binary size report instead of modern')
444 opts, args = parser.parse_args() 515 opts, args = parser.parse_args()
445 516
446 if ((not opts.library) and (not opts.nm_in)) or (opts.library and opts.nm_in): 517 if ((not opts.library) and (not opts.nm_in)) or (opts.library and opts.nm_in):
447 parser.error('exactly one of --library or --nm-in is required') 518 parser.error('exactly one of --library or --nm-in is required')
448 if (opts.nm_in): 519 if (opts.nm_in):
449 if opts.jobs: 520 if opts.jobs:
450 print >> sys.stderr, ('WARNING: --jobs has no effect ' 521 print >> sys.stderr, ('WARNING: --jobs has no effect '
451 'when used with --nm-in') 522 'when used with --nm-in')
452 if opts.arch: 523 if opts.arch:
453 print >> sys.stderr, ('WARNING: --arch has no effect ' 524 print >> sys.stderr, ('WARNING: --arch has no effect '
454 'when used with --nm-in') 525 'when used with --nm-in')
455 if not opts.destdir: 526 if not opts.destdir:
456 parser.error('--destdir is required argument') 527 parser.error('--destdir is required argument')
457 if not opts.jobs: 528 if not opts.jobs:
458 opts.jobs = '1' 529 opts.jobs = '1'
459 if not opts.arch: 530 if not opts.arch:
460 opts.arch = 'host-native' 531 opts.arch = 'host-native'
461 532
462 symbols = GetNmSymbols(opts.nm_in, opts.nm_out, opts.library, opts.arch, 533 symbols = GetNmSymbols(opts.nm_in, opts.nm_out, opts.library, opts.arch,
463 opts.jobs, opts.verbose is True) 534 opts.jobs, opts.verbose is True)
464 if not os.path.exists(opts.destdir): 535 if not os.path.exists(opts.destdir):
465 os.makedirs(opts.destdir, 0755) 536 os.makedirs(opts.destdir, 0755)
466 537
467 DumpTreemap(symbols, os.path.join(opts.destdir, 'treemap-dump.js'))
468 DumpLargestSymbols(symbols,
469 os.path.join(opts.destdir, 'largest-symbols.js'), 100)
470 DumpLargestSources(symbols,
471 os.path.join(opts.destdir, 'largest-sources.js'), 100)
472 DumpLargestVTables(symbols,
473 os.path.join(opts.destdir, 'largest-vtables.js'), 100)
474 538
475 # TODO(andrewhayden): Switch to D3 for greater flexibility 539 if opts.legacy: # legacy report
476 treemap_out = os.path.join(opts.destdir, 'webtreemap') 540 DumpTreemap(symbols, os.path.join(opts.destdir, 'treemap-dump.js'))
477 if not os.path.exists(treemap_out): 541 DumpLargestSymbols(symbols,
478 os.makedirs(treemap_out, 0755) 542 os.path.join(opts.destdir, 'largest-symbols.js'), 100)
479 treemap_src = os.path.join('third_party', 'webtreemap', 'src') 543 DumpLargestSources(symbols,
480 shutil.copy(os.path.join(treemap_src, 'COPYING'), treemap_out) 544 os.path.join(opts.destdir, 'largest-sources.js'), 100)
481 shutil.copy(os.path.join(treemap_src, 'webtreemap.js'), treemap_out) 545 DumpLargestVTables(symbols,
482 shutil.copy(os.path.join(treemap_src, 'webtreemap.css'), treemap_out) 546 os.path.join(opts.destdir, 'largest-vtables.js'), 100)
483 shutil.copy(os.path.join('tools', 'binary_size', 'template', 'index.html'), 547 treemap_out = os.path.join(opts.destdir, 'webtreemap')
484 opts.destdir) 548 if not os.path.exists(treemap_out):
549 os.makedirs(treemap_out, 0755)
550 treemap_src = os.path.join('third_party', 'webtreemap', 'src')
551 shutil.copy(os.path.join(treemap_src, 'COPYING'), treemap_out)
552 shutil.copy(os.path.join(treemap_src, 'webtreemap.js'), treemap_out)
553 shutil.copy(os.path.join(treemap_src, 'webtreemap.css'), treemap_out)
554 shutil.copy(os.path.join('tools', 'binary_size', 'legacy_template',
555 'index.html'), opts.destdir)
556 else: # modern report
557 DumpCompactTree(symbols, os.path.join(opts.destdir, 'data.js'))
558 d3_out = os.path.join(opts.destdir, 'd3')
559 if not os.path.exists(d3_out):
560 os.makedirs(d3_out, 0755)
561 d3_src = os.path.join('third_party', 'd3', 'src')
562 template_src = os.path.join('tools', 'binary_size',
563 'template')
564 shutil.copy(os.path.join(d3_src, 'LICENSE'), d3_out)
565 shutil.copy(os.path.join(d3_src, 'd3.js'), d3_out)
566 shutil.copy(os.path.join(template_src, 'index.html'), opts.destdir)
567 shutil.copy(os.path.join(template_src, 'D3SymbolTreeMap.js'), opts.destdir)
568
485 if opts.verbose: 569 if opts.verbose:
486 print 'Report saved to ' + opts.destdir + '/index.html' 570 print 'Report saved to ' + opts.destdir + '/index.html'
487 571
488 572
489 if __name__ == '__main__': 573 if __name__ == '__main__':
490 sys.exit(main()) 574 sys.exit(main())
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698