Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(214)

Side by Side Diff: tools/binary_size/run_binary_size_analysis.py

Issue 231803002: New binary size tool visualization options. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: So indent, very space, much format. Wow. Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « tools/binary_size/legacy_template/index.html ('k') | tools/binary_size/template/.gitignore » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/python 1 #!/usr/bin/python
2 # Copyright 2014 The Chromium Authors. All rights reserved. 2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Generate a spatial analysis against an arbitrary library. 6 """Generate a spatial analysis against an arbitrary library.
7 7
8 To use, build the 'binary_size_tool' target. Then run this tool, passing 8 To use, build the 'binary_size_tool' target. Then run this tool, passing
9 in the location of the library to be analyzed along with any other options 9 in the location of the library to be analyzed along with any other options
10 you desire. 10 you desire.
11 """ 11 """
12 12
13 import collections 13 import collections
14 import fileinput 14 import fileinput
15 import json 15 import json
16 import optparse 16 import optparse
17 import os 17 import os
18 import pprint 18 import pprint
19 import re 19 import re
20 import shutil 20 import shutil
21 import subprocess 21 import subprocess
22 import sys 22 import sys
23 import tempfile 23 import tempfile
24 24
25 25
26 # TODO(andrewhayden): Only used for legacy reports. Delete.
26 def FormatBytes(bytes): 27 def FormatBytes(bytes):
27 """Pretty-print a number of bytes.""" 28 """Pretty-print a number of bytes."""
28 if bytes > 1e6: 29 if bytes > 1e6:
29 bytes = bytes / 1.0e6 30 bytes = bytes / 1.0e6
30 return '%.1fm' % bytes 31 return '%.1fm' % bytes
31 if bytes > 1e3: 32 if bytes > 1e3:
32 bytes = bytes / 1.0e3 33 bytes = bytes / 1.0e3
33 return '%.1fk' % bytes 34 return '%.1fk' % bytes
34 return str(bytes) 35 return str(bytes)
35 36
36 37
38 # TODO(andrewhayden): Only used for legacy reports. Delete.
37 def SymbolTypeToHuman(type): 39 def SymbolTypeToHuman(type):
38 """Convert a symbol type as printed by nm into a human-readable name.""" 40 """Convert a symbol type as printed by nm into a human-readable name."""
39 return {'b': 'bss', 41 return {'b': 'bss',
40 'd': 'data', 42 'd': 'data',
41 'r': 'read-only data', 43 'r': 'read-only data',
42 't': 'code', 44 't': 'code',
43 'w': 'weak symbol', 45 'w': 'weak symbol',
44 'v': 'weak symbol'}[type] 46 'v': 'weak symbol'}[type]
45 47
46 48
(...skipping 16 matching lines...) Expand all
63 addr_re = re.compile(r'^[0-9a-f]{8} (.) ([^\t]+)(?:\t.*)?$') 65 addr_re = re.compile(r'^[0-9a-f]{8} (.) ([^\t]+)(?:\t.*)?$')
64 # Match lines that don't have an address at all -- typically external symbols. 66 # Match lines that don't have an address at all -- typically external symbols.
65 noaddr_re = re.compile(r'^ {8} (.) (.*)$') 67 noaddr_re = re.compile(r'^ {8} (.) (.*)$')
66 68
67 for line in input: 69 for line in input:
68 line = line.rstrip() 70 line = line.rstrip()
69 match = sym_re.match(line) 71 match = sym_re.match(line)
70 if match: 72 if match:
71 size, type, sym = match.groups()[0:3] 73 size, type, sym = match.groups()[0:3]
72 size = int(size, 16) 74 size = int(size, 16)
73 type = type.lower() 75 if type.lower() == 'b':
74 if type == 'v':
75 type = 'w' # just call them all weak
76 if type == 'b':
77 continue # skip all BSS for now 76 continue # skip all BSS for now
78 path = match.group(4) 77 path = match.group(4)
79 yield sym, type, size, path 78 yield sym, type, size, path
80 continue 79 continue
81 match = addr_re.match(line) 80 match = addr_re.match(line)
82 if match: 81 if match:
83 type, sym = match.groups()[0:2] 82 type, sym = match.groups()[0:2]
84 # No size == we don't care. 83 # No size == we don't care.
85 continue 84 continue
86 match = noaddr_re.match(line) 85 match = noaddr_re.match(line)
87 if match: 86 if match:
88 type, sym = match.groups() 87 type, sym = match.groups()
89 if type in ('U', 'w'): 88 if type in ('U', 'w'):
90 # external or weak symbol 89 # external or weak symbol
91 continue 90 continue
92 91
93 print >>sys.stderr, 'unparsed:', repr(line) 92 print >>sys.stderr, 'unparsed:', repr(line)
94 93
95 94
95 def _MkChild(node, name):
96 child = None
97 for test in node['children']:
98 if test['n'] == name:
99 child = test
100 break
101 if not child:
102 child = {'n': name, 'children': []}
103 node['children'].append(child)
104 return child
105
106
107 def MakeCompactTree(symbols):
108 result = {'n': '/', 'children': [], 'k': 'p', 'maxDepth': 0}
109 for symbol_name, symbol_type, symbol_size, file_path in symbols:
110
111 if 'vtable for ' in symbol_name:
112 symbol_type = '@' # hack to categorize these separately
113 # Take path like '/foo/bar/baz', convert to ['foo', 'bar', 'baz']
114 if file_path:
115 file_path = os.path.normpath(file_path)
116 else:
117 file_path = '(No Path)'
118
119 if file_path.startswith('/'):
120 file_path = file_path[1:]
121 path_parts = file_path.split('/')
122
123 # Find pre-existing node in tree, or update if it already exists
124 node = result
125 depth = 0
126 while len(path_parts) > 0:
127 path_part = path_parts.pop(0)
128 if len(path_part) == 0:
129 continue
130 depth += 1
131 node = _MkChild(node, path_part);
132 node['k'] = 'p' # p for path
133
134 # 'node' is now the file node. Find the symbol-type bucket.
135 node['lastPathElement'] = True
136 node = _MkChild(node, symbol_type)
137 node['t'] = symbol_type
138 node['k'] = 'b' # b for bucket
139 depth += 1
140
141 # 'node' is now the symbol-type bucket. Make the child entry.
142 node = _MkChild(node, symbol_name)
143 if 'children' in node: # Only possible if we're adding duplicate entries!!!
144 del node['children']
145 node['value'] = symbol_size
146 node['t'] = symbol_type
147 node['k'] = 's' # s for symbol
148 depth += 1
149 result['maxDepth'] = max(result['maxDepth'], depth);
150
151 return result
152
153
154 # TODO(andrewhayden): Only used for legacy reports. Delete.
96 def TreeifySymbols(symbols): 155 def TreeifySymbols(symbols):
97 """Convert symbols into a path-based tree, calculating size information 156 """Convert symbols into a path-based tree, calculating size information
98 along the way. 157 along the way.
99 158
100 The result is a dictionary that contains two kinds of nodes: 159 The result is a dictionary that contains two kinds of nodes:
101 1. Leaf nodes, representing source code locations (e.g., c++ files) 160 1. Leaf nodes, representing source code locations (e.g., c++ files)
102 These nodes have the following dictionary entries: 161 These nodes have the following dictionary entries:
103 sizes: a dictionary whose keys are categories (such as code, data, 162 sizes: a dictionary whose keys are categories (such as code, data,
104 vtable, etceteras) and whose values are the size, in bytes, of 163 vtable, etceteras) and whose values are the size, in bytes, of
105 those categories; 164 those categories;
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
181 subkey = '__FUNCTION__' 240 subkey = '__FUNCTION__'
182 elif sym.startswith('CSWTCH.'): 241 elif sym.startswith('CSWTCH.'):
183 subkey = 'CSWTCH' 242 subkey = 'CSWTCH'
184 elif '::' in sym: 243 elif '::' in sym:
185 subkey = sym[0:sym.find('::') + 2] 244 subkey = sym[0:sym.find('::') + 2]
186 tree['sizes'][subkey] = tree['sizes'].get(subkey, 0) + size 245 tree['sizes'][subkey] = tree['sizes'].get(subkey, 0) + size
187 tree['size'] += size 246 tree['size'] += size
188 return dirs 247 return dirs
189 248
190 249
250 # TODO(andrewhayden): Only used for legacy reports. Delete.
191 def JsonifyTree(tree, name): 251 def JsonifyTree(tree, name):
192 """Convert TreeifySymbols output to a JSON treemap. 252 """Convert TreeifySymbols output to a JSON treemap.
193 253
194 The format is very similar, with the notable exceptions being 254 The format is very similar, with the notable exceptions being
195 lists of children instead of maps and some different attribute names.""" 255 lists of children instead of maps and some different attribute names."""
196 children = [] 256 children = []
197 css_class_map = { 257 css_class_map = {
198 '[vtable]': 'vtable', 258 '[vtable]': 'vtable',
199 '[rodata]': 'read-only_data', 259 '[rodata]': 'read-only_data',
200 '[data]': 'data', 260 '[data]': 'data',
(...skipping 16 matching lines...) Expand all
217 # Sort children by size, largest to smallest. 277 # Sort children by size, largest to smallest.
218 children.sort(key=lambda child: -child['data']['$area']) 278 children.sort(key=lambda child: -child['data']['$area'])
219 279
220 # For leaf nodes, the 'size' attribute is the size of the leaf; 280 # For leaf nodes, the 'size' attribute is the size of the leaf;
221 # Non-leaf nodes don't really have a size, but their 'size' attribute is 281 # Non-leaf nodes don't really have a size, but their 'size' attribute is
222 # the sum of the sizes of all their children. 282 # the sum of the sizes of all their children.
223 return {'name': name + ' (' + FormatBytes(tree['size']) + ')', 283 return {'name': name + ' (' + FormatBytes(tree['size']) + ')',
224 'data': { '$area': tree['size'] }, 284 'data': { '$area': tree['size'] },
225 'children': children } 285 'children': children }
226 286
287 def DumpCompactTree(symbols, outfile):
288 out = open(outfile, 'w')
289 try:
290 out.write('var tree_data = ' + json.dumps(MakeCompactTree(symbols)))
291 finally:
292 out.flush()
293 out.close()
227 294
295
296 # TODO(andrewhayden): Only used for legacy reports. Delete.
228 def DumpTreemap(symbols, outfile): 297 def DumpTreemap(symbols, outfile):
229 dirs = TreeifySymbols(symbols) 298 dirs = TreeifySymbols(symbols)
230 out = open(outfile, 'w') 299 out = open(outfile, 'w')
231 try: 300 try:
232 out.write('var kTree = ' + json.dumps(JsonifyTree(dirs, '/'))) 301 out.write('var kTree = ' + json.dumps(JsonifyTree(dirs, '/')))
233 finally: 302 finally:
234 out.flush() 303 out.flush()
235 out.close() 304 out.close()
236 305
237 306
307 # TODO(andrewhayden): Only used for legacy reports. Delete.
238 def DumpLargestSymbols(symbols, outfile, n): 308 def DumpLargestSymbols(symbols, outfile, n):
239 # a list of (sym, type, size, path); sort by size. 309 # a list of (sym, type, size, path); sort by size.
240 symbols = sorted(symbols, key=lambda x: -x[2]) 310 symbols = sorted(symbols, key=lambda x: -x[2])
241 dumped = 0 311 dumped = 0
242 out = open(outfile, 'w') 312 out = open(outfile, 'w')
243 try: 313 try:
244 out.write('var largestSymbols = [\n') 314 out.write('var largestSymbols = [\n')
245 for sym, type, size, path in symbols: 315 for sym, type, size, path in symbols:
246 if type in ('b', 'w'): 316 if type in ('b', 'w'):
247 continue # skip bss and weak symbols 317 continue # skip bss and weak symbols
(...skipping 23 matching lines...) Expand all
271 else: 341 else:
272 key = '[no path]' 342 key = '[no path]'
273 if key not in sources: 343 if key not in sources:
274 sources[key] = {'path': path, 'symbol_count': 0, 'size': 0} 344 sources[key] = {'path': path, 'symbol_count': 0, 'size': 0}
275 record = sources[key] 345 record = sources[key]
276 record['size'] += size 346 record['size'] += size
277 record['symbol_count'] += 1 347 record['symbol_count'] += 1
278 return sources 348 return sources
279 349
280 350
351 # TODO(andrewhayden): Only used for legacy reports. Delete.
281 def DumpLargestSources(symbols, outfile, n): 352 def DumpLargestSources(symbols, outfile, n):
282 map = MakeSourceMap(symbols) 353 map = MakeSourceMap(symbols)
283 sources = sorted(map.values(), key=lambda x: -x['size']) 354 sources = sorted(map.values(), key=lambda x: -x['size'])
284 dumped = 0 355 dumped = 0
285 out = open(outfile, 'w') 356 out = open(outfile, 'w')
286 try: 357 try:
287 out.write('var largestSources = [\n') 358 out.write('var largestSources = [\n')
288 for record in sources: 359 for record in sources:
289 entry = {'size': FormatBytes(record['size']), 360 entry = {'size': FormatBytes(record['size']),
290 'symbol_count': str(record['symbol_count']), 361 'symbol_count': str(record['symbol_count']),
291 'location': record['path']} 362 'location': record['path']}
292 out.write(json.dumps(entry)) 363 out.write(json.dumps(entry))
293 out.write(',\n') 364 out.write(',\n')
294 dumped += 1 365 dumped += 1
295 if dumped >= n: 366 if dumped >= n:
296 return 367 return
297 finally: 368 finally:
298 out.write('];\n') 369 out.write('];\n')
299 out.flush() 370 out.flush()
300 out.close() 371 out.close()
301 372
302 373
374 # TODO(andrewhayden): Only used for legacy reports. Delete.
303 def DumpLargestVTables(symbols, outfile, n): 375 def DumpLargestVTables(symbols, outfile, n):
304 vtables = [] 376 vtables = []
305 for symbol, type, size, path in symbols: 377 for symbol, type, size, path in symbols:
306 if 'vtable for ' in symbol: 378 if 'vtable for ' in symbol:
307 vtables.append({'symbol': symbol, 'path': path, 'size': size}) 379 vtables.append({'symbol': symbol, 'path': path, 'size': size})
308 vtables = sorted(vtables, key=lambda x: -x['size']) 380 vtables = sorted(vtables, key=lambda x: -x['size'])
309 dumped = 0 381 dumped = 0
310 out = open(outfile, 'w') 382 out = open(outfile, 'w')
311 try: 383 try:
312 out.write('var largestVTables = [\n') 384 out.write('var largestVTables = [\n')
313 for record in vtables: 385 for record in vtables:
314 entry = {'size': FormatBytes(record['size']), 386 entry = {'size': FormatBytes(record['size']),
315 'symbol': record['symbol'], 387 'symbol': record['symbol'],
316 'location': record['path']} 388 'location': record['path']}
317 out.write(json.dumps(entry)) 389 out.write(json.dumps(entry))
318 out.write(',\n') 390 out.write(',\n')
319 dumped += 1 391 dumped += 1
320 if dumped >= n: 392 if dumped >= n:
321 return 393 return
322 finally: 394 finally:
323 out.write('];\n') 395 out.write('];\n')
324 out.flush() 396 out.flush()
325 out.close() 397 out.close()
326 398
327 399
400 # TODO(andrewhayden): Switch to Primiano's python-based version.
328 def RunParallelAddress2Line(outfile, library, arch, jobs, verbose): 401 def RunParallelAddress2Line(outfile, library, arch, jobs, verbose):
329 """Run a parallel addr2line processing engine to dump and resolve symbols.""" 402 """Run a parallel addr2line processing engine to dump and resolve symbols."""
330 out_dir = os.getenv('CHROMIUM_OUT_DIR', 'out') 403 out_dir = os.getenv('CHROMIUM_OUT_DIR', 'out')
331 build_type = os.getenv('BUILDTYPE', 'Release') 404 build_type = os.getenv('BUILDTYPE', 'Release')
332 classpath = os.path.join(out_dir, build_type, 'lib.java', 405 classpath = os.path.join(out_dir, build_type, 'lib.java',
333 'binary_size_java.jar') 406 'binary_size_java.jar')
334 cmd = ['java', 407 cmd = ['java',
335 '-classpath', classpath, 408 '-classpath', classpath,
336 'org.chromium.tools.binary_size.ParallelAddress2Line', 409 'org.chromium.tools.binary_size.ParallelAddress2Line',
337 '--disambiguate', 410 '--disambiguate',
338 '--outfile', outfile, 411 '--outfile', outfile,
339 '--library', library, 412 '--library', library,
340 '--threads', jobs] 413 '--threads', jobs]
341 if verbose is True: 414 if verbose is True:
342 cmd.append('--verbose') 415 cmd.append('--verbose')
343 prefix = os.path.join('third_party', 'android_tools', 'ndk', 'toolchains') 416 prefix = os.path.join('third_party', 'android_tools', 'ndk', 'toolchains')
344 if arch == 'android-arm': 417 if arch == 'android-arm':
345 prefix = os.path.join(prefix, 'arm-linux-androideabi-4.7', 'prebuilt', 418 prefix = os.path.join(prefix, 'arm-linux-androideabi-4.8', 'prebuilt',
346 'linux-x86_64', 'bin', 'arm-linux-androideabi-') 419 'linux-x86_64', 'bin', 'arm-linux-androideabi-')
347 cmd.extend(['--nm', prefix + 'nm', '--addr2line', prefix + 'addr2line']) 420 cmd.extend(['--nm', prefix + 'nm', '--addr2line', prefix + 'addr2line'])
348 elif arch == 'android-mips': 421 elif arch == 'android-mips':
349 prefix = os.path.join(prefix, 'mipsel-linux-android-4.7', 'prebuilt', 422 prefix = os.path.join(prefix, 'mipsel-linux-android-4.8', 'prebuilt',
350 'linux-x86_64', 'bin', 'mipsel-linux-android-') 423 'linux-x86_64', 'bin', 'mipsel-linux-android-')
351 cmd.extend(['--nm', prefix + 'nm', '--addr2line', prefix + 'addr2line']) 424 cmd.extend(['--nm', prefix + 'nm', '--addr2line', prefix + 'addr2line'])
352 elif arch == 'android-x86': 425 elif arch == 'android-x86':
353 prefix = os.path.join(prefix, 'x86-4.7', 'prebuilt', 426 prefix = os.path.join(prefix, 'x86-4.8', 'prebuilt',
354 'linux-x86_64', 'bin', 'i686-linux-android-') 427 'linux-x86_64', 'bin', 'i686-linux-android-')
355 cmd.extend(['--nm', prefix + 'nm', '--addr2line', prefix + 'addr2line']) 428 cmd.extend(['--nm', prefix + 'nm', '--addr2line', prefix + 'addr2line'])
356 # else, use whatever is in PATH (don't pass --nm or --addr2line) 429 # else, use whatever is in PATH (don't pass --nm or --addr2line)
357 430
358 if verbose: 431 if verbose:
359 print cmd 432 print cmd
360 433
361 return_code = subprocess.call(cmd) 434 return_code = subprocess.call(cmd)
362 if return_code: 435 if return_code:
363 raise RuntimeError('Failed to run ParallelAddress2Line: returned ' + 436 raise RuntimeError('Failed to run ParallelAddress2Line: returned ' +
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
434 'This argument is only valid when using --library.') 507 'This argument is only valid when using --library.')
435 parser.add_option('-v', dest='verbose', action='store_true', 508 parser.add_option('-v', dest='verbose', action='store_true',
436 help='be verbose, printing lots of status information.') 509 help='be verbose, printing lots of status information.')
437 parser.add_option('--nm-out', metavar='PATH', 510 parser.add_option('--nm-out', metavar='PATH',
438 help='keep the nm output file, and store it at the ' 511 help='keep the nm output file, and store it at the '
439 'specified path. This is useful if you want to see the ' 512 'specified path. This is useful if you want to see the '
440 'fully processed nm output after the symbols have been ' 513 'fully processed nm output after the symbols have been '
441 'mapped to source locations. By default, a tempfile is ' 514 'mapped to source locations. By default, a tempfile is '
442 'used and is deleted when the program terminates.' 515 'used and is deleted when the program terminates.'
443 'This argument is only valid when using --library.') 516 'This argument is only valid when using --library.')
517 parser.add_option('--legacy', action='store_true',
518 help='emit legacy binary size report instead of modern')
444 opts, args = parser.parse_args() 519 opts, args = parser.parse_args()
445 520
446 if ((not opts.library) and (not opts.nm_in)) or (opts.library and opts.nm_in): 521 if ((not opts.library) and (not opts.nm_in)) or (opts.library and opts.nm_in):
447 parser.error('exactly one of --library or --nm-in is required') 522 parser.error('exactly one of --library or --nm-in is required')
448 if (opts.nm_in): 523 if (opts.nm_in):
449 if opts.jobs: 524 if opts.jobs:
450 print >> sys.stderr, ('WARNING: --jobs has no effect ' 525 print >> sys.stderr, ('WARNING: --jobs has no effect '
451 'when used with --nm-in') 526 'when used with --nm-in')
452 if opts.arch: 527 if opts.arch:
453 print >> sys.stderr, ('WARNING: --arch has no effect ' 528 print >> sys.stderr, ('WARNING: --arch has no effect '
454 'when used with --nm-in') 529 'when used with --nm-in')
455 if not opts.destdir: 530 if not opts.destdir:
456 parser.error('--destdir is required argument') 531 parser.error('--destdir is required argument')
457 if not opts.jobs: 532 if not opts.jobs:
458 opts.jobs = '1' 533 opts.jobs = '1'
459 if not opts.arch: 534 if not opts.arch:
460 opts.arch = 'host-native' 535 opts.arch = 'host-native'
461 536
462 symbols = GetNmSymbols(opts.nm_in, opts.nm_out, opts.library, opts.arch, 537 symbols = GetNmSymbols(opts.nm_in, opts.nm_out, opts.library, opts.arch,
463 opts.jobs, opts.verbose is True) 538 opts.jobs, opts.verbose is True)
464 if not os.path.exists(opts.destdir): 539 if not os.path.exists(opts.destdir):
465 os.makedirs(opts.destdir, 0755) 540 os.makedirs(opts.destdir, 0755)
466 541
467 DumpTreemap(symbols, os.path.join(opts.destdir, 'treemap-dump.js'))
468 DumpLargestSymbols(symbols,
469 os.path.join(opts.destdir, 'largest-symbols.js'), 100)
470 DumpLargestSources(symbols,
471 os.path.join(opts.destdir, 'largest-sources.js'), 100)
472 DumpLargestVTables(symbols,
473 os.path.join(opts.destdir, 'largest-vtables.js'), 100)
474 542
475 # TODO(andrewhayden): Switch to D3 for greater flexibility 543 if opts.legacy: # legacy report
476 treemap_out = os.path.join(opts.destdir, 'webtreemap') 544 DumpTreemap(symbols, os.path.join(opts.destdir, 'treemap-dump.js'))
477 if not os.path.exists(treemap_out): 545 DumpLargestSymbols(symbols,
478 os.makedirs(treemap_out, 0755) 546 os.path.join(opts.destdir, 'largest-symbols.js'), 100)
479 treemap_src = os.path.join('third_party', 'webtreemap', 'src') 547 DumpLargestSources(symbols,
480 shutil.copy(os.path.join(treemap_src, 'COPYING'), treemap_out) 548 os.path.join(opts.destdir, 'largest-sources.js'), 100)
481 shutil.copy(os.path.join(treemap_src, 'webtreemap.js'), treemap_out) 549 DumpLargestVTables(symbols,
482 shutil.copy(os.path.join(treemap_src, 'webtreemap.css'), treemap_out) 550 os.path.join(opts.destdir, 'largest-vtables.js'), 100)
483 shutil.copy(os.path.join('tools', 'binary_size', 'template', 'index.html'), 551 treemap_out = os.path.join(opts.destdir, 'webtreemap')
484 opts.destdir) 552 if not os.path.exists(treemap_out):
553 os.makedirs(treemap_out, 0755)
554 treemap_src = os.path.join('third_party', 'webtreemap', 'src')
555 shutil.copy(os.path.join(treemap_src, 'COPYING'), treemap_out)
556 shutil.copy(os.path.join(treemap_src, 'webtreemap.js'), treemap_out)
557 shutil.copy(os.path.join(treemap_src, 'webtreemap.css'), treemap_out)
558 shutil.copy(os.path.join('tools', 'binary_size', 'legacy_template',
559 'index.html'), opts.destdir)
560 else: # modern report
561 DumpCompactTree(symbols, os.path.join(opts.destdir, 'data.js'))
562 d3_out = os.path.join(opts.destdir, 'd3')
563 if not os.path.exists(d3_out):
564 os.makedirs(d3_out, 0755)
565 d3_src = os.path.join('third_party', 'd3', 'src')
566 template_src = os.path.join('tools', 'binary_size',
567 'template')
568 shutil.copy(os.path.join(d3_src, 'LICENSE'), d3_out)
569 shutil.copy(os.path.join(d3_src, 'd3.js'), d3_out)
570 shutil.copy(os.path.join(template_src, 'index.html'), opts.destdir)
571 shutil.copy(os.path.join(template_src, 'D3SymbolTreeMap.js'), opts.destdir)
572
485 if opts.verbose: 573 if opts.verbose:
486 print 'Report saved to ' + opts.destdir + '/index.html' 574 print 'Report saved to ' + opts.destdir + '/index.html'
487 575
488 576
489 if __name__ == '__main__': 577 if __name__ == '__main__':
490 sys.exit(main()) 578 sys.exit(main())
OLDNEW
« no previous file with comments | « tools/binary_size/legacy_template/index.html ('k') | tools/binary_size/template/.gitignore » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698