Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: tools/binary_size/create_html_breakdown.py

Issue 2724253002: V1 of //tools/binary_size rewrite (Closed)
Patch Set: README tweaks, more cases for function parsing Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 #!/usr/bin/env python
2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Creates an html report that allows you to view binary size by component."""
7
8 import argparse
9 import json
10 import logging
11 import os
12 import shutil
13 import sys
14
15 import analyze
16 import helpers
17
18
19 # Node dictionary keys. These are output in json read by the webapp so
20 # keep them short to save file size.
21 # Note: If these change, the webapp must also change.
22 _NODE_TYPE_KEY = 'k'
23 _NODE_TYPE_BUCKET = 'b'
24 _NODE_TYPE_PATH = 'p'
25 _NODE_TYPE_SYMBOL = 's'
26 _NODE_NAME_KEY = 'n'
27 _NODE_CHILDREN_KEY = 'children'
28 _NODE_SYMBOL_TYPE_KEY = 't'
29 _NODE_SYMBOL_TYPE_VTABLE = '@'
30 _NODE_SYMBOL_TYPE_GENERATED = '*'
31 _NODE_SYMBOL_SIZE_KEY = 'value'
32 _NODE_MAX_DEPTH_KEY = 'maxDepth'
33 _NODE_LAST_PATH_ELEMENT_KEY = 'lastPathElement'
34
35 # The display name of the bucket where we put symbols without path.
36 _NAME_NO_PATH_BUCKET = '(No Path)'
37
38 # Try to keep data buckets smaller than this to avoid killing the
39 # graphing lib.
40 _BIG_BUCKET_LIMIT = 3000
41
42
43 def _GetOrMakeChildNode(node, node_type, name):
44 child = node[_NODE_CHILDREN_KEY].get(name)
45 if child is None:
46 child = {
47 _NODE_TYPE_KEY: node_type,
48 _NODE_NAME_KEY: name,
49 }
50 if node_type != _NODE_TYPE_SYMBOL:
51 child[_NODE_CHILDREN_KEY] = {}
52 node[_NODE_CHILDREN_KEY][name] = child
53 else:
54 assert child[_NODE_TYPE_KEY] == node_type
55 return child
56
57
58 def _SplitLargeBucket(bucket):
59 """Split the given node into sub-buckets when it's too big."""
60 old_children = bucket[_NODE_CHILDREN_KEY]
61 count = 0
62 for symbol_type, symbol_bucket in old_children.iteritems():
63 count += len(symbol_bucket[_NODE_CHILDREN_KEY])
64 if count > _BIG_BUCKET_LIMIT:
65 new_children = {}
66 bucket[_NODE_CHILDREN_KEY] = new_children
67 current_bucket = None
68 index = 0
69 for symbol_type, symbol_bucket in old_children.iteritems():
70 for symbol_name, value in symbol_bucket[_NODE_CHILDREN_KEY].iteritems():
71 if index % _BIG_BUCKET_LIMIT == 0:
72 group_no = (index / _BIG_BUCKET_LIMIT) + 1
73 node_name = '%s subgroup %d' % (_NAME_NO_PATH_BUCKET, group_no)
74 current_bucket = _GetOrMakeChildNode(
75 bucket, _NODE_TYPE_PATH, node_name)
76 index += 1
77 symbol_size = value[_NODE_SYMBOL_SIZE_KEY]
78 _AddSymbolIntoFileNode(current_bucket, symbol_type, symbol_name,
79 symbol_size, True)
80
81
82 def _MakeChildrenDictsIntoLists(node):
83 """Recursively converts all children from dicts -> lists."""
84 children = node.get(_NODE_CHILDREN_KEY)
85 if children:
86 children = children.values() # Convert dict -> list.
87 node[_NODE_CHILDREN_KEY] = children
88 for child in children:
89 _MakeChildrenDictsIntoLists(child)
90 if len(children) > _BIG_BUCKET_LIMIT:
91 logging.warning('Bucket found with %d entries. Might be unusable.',
92 len(children))
93
94
95 def _AddSymbolIntoFileNode(node, symbol_type, symbol_name, symbol_size,
96 include_symbols):
97 """Puts symbol into the file path node |node|."""
98 node[_NODE_LAST_PATH_ELEMENT_KEY] = True
99 node = _GetOrMakeChildNode(node, _NODE_TYPE_BUCKET, symbol_type)
100 node[_NODE_SYMBOL_TYPE_KEY] = symbol_type
101
102 # 'node' is now the symbol-type bucket. Make the child entry.
103 if not symbol_name:
104 node_name = '[Anonymous]'
105 elif symbol_name.startswith('*') or include_symbols:
106 node_name = symbol_name
107 else:
108 node_name = '[Elided]'
109 node = _GetOrMakeChildNode(node, _NODE_TYPE_SYMBOL, node_name)
110 node[_NODE_SYMBOL_SIZE_KEY] = node.get(_NODE_SYMBOL_SIZE_KEY, 0) + symbol_size
111 node[_NODE_SYMBOL_TYPE_KEY] = symbol_type
112
113
114 def _MakeCompactTree(root_group, include_symbols):
115 result = {
116 _NODE_NAME_KEY: '/',
117 _NODE_CHILDREN_KEY: {},
118 _NODE_TYPE_KEY: 'p',
119 _NODE_MAX_DEPTH_KEY: 0,
120 }
121 for symbol in root_group:
122 file_path = symbol.path or _NAME_NO_PATH_BUCKET
123 node = result
124 depth = 0
125 for path_part in file_path.split(os.path.sep):
126 if not path_part:
127 continue
128 depth += 1
129 node = _GetOrMakeChildNode(node, _NODE_TYPE_PATH, path_part)
130
131 symbol_type = symbol.section
132 if symbol.name:
133 if symbol.name.endswith('[vtable]'):
134 symbol_type = _NODE_SYMBOL_TYPE_VTABLE
135 elif symbol.name.endswith(']'):
136 symbol_type = _NODE_SYMBOL_TYPE_GENERATED
137 _AddSymbolIntoFileNode(node, symbol_type, symbol.name, symbol.size,
138 include_symbols)
139 depth += 2
140 result[_NODE_MAX_DEPTH_KEY] = max(result[_NODE_MAX_DEPTH_KEY], depth)
141
142 # The (no path) bucket can be extremely large if we failed to get
143 # path information. Split it into subgroups if needed.
144 no_path_bucket = result[_NODE_CHILDREN_KEY].get(_NAME_NO_PATH_BUCKET)
145 if no_path_bucket:
146 _SplitLargeBucket(no_path_bucket)
147
148 _MakeChildrenDictsIntoLists(result)
149
150 return result
151
152
153 def _CopyTemplateFiles(dest_dir):
154 d3_out = os.path.join(dest_dir, 'd3')
155 if not os.path.exists(d3_out):
156 os.makedirs(d3_out, 0755)
157 d3_src = os.path.join(helpers.SRC_ROOT, 'third_party', 'd3', 'src')
158 template_src = os.path.join(os.path.dirname(__file__), 'template')
159 shutil.copy(os.path.join(d3_src, 'LICENSE'), d3_out)
160 shutil.copy(os.path.join(d3_src, 'd3.js'), d3_out)
161 shutil.copy(os.path.join(template_src, 'index.html'), dest_dir)
162 shutil.copy(os.path.join(template_src, 'D3SymbolTreeMap.js'), dest_dir)
163
164
165 def main():
166 parser = argparse.ArgumentParser()
167 parser.add_argument('--report-dir', metavar='PATH', required=True,
168 help='Write output to the specified directory. An HTML '
169 'report is generated here.')
170 parser.add_argument('--include-bss', action='store_true',
171 help='Include symbols from .bss (which consume no real '
172 'space)')
173 parser.add_argument('--include-symbols', action='store_true',
174 help='Use per-symbol granularity rather than per-file.')
175 analyze.AddOptions(parser)
176 helpers.AddCommonOptions(parser)
177 args = parser.parse_args()
178 helpers.HandleCommonOptions(args)
179
180 result = analyze.AnalyzeWithArgs(args)
181 root_group = result.symbol_group
182 if not args.include_bss:
183 root_group = root_group.WhereInSection('b').Inverted()
184
185 # Copy report boilerplate into output directory. This also proves that the
186 # output directory is safe for writing, so there should be no problems writing
187 # the nm.out file later.
188 _CopyTemplateFiles(args.report_dir)
189
190 logging.info('Creating JSON objects')
191 tree_root = _MakeCompactTree(root_group, args.include_symbols)
192
193 logging.info('Serializing')
194 with open(os.path.join(args.report_dir, 'data.js'), 'w') as out_file:
195 out_file.write('var tree_data=')
196 # Use separators without whitespace to get a smaller file.
197 json.dump(tree_root, out_file, ensure_ascii=False, check_circular=False,
198 separators=(',', ':'))
199
200 logging.info('Done. Peak RAM usage was %d MB.', helpers.GetPeakRamUsage())
201 print 'Report saved to ' + args.report_dir + '/index.html'
202
203 if __name__ == '__main__':
204 sys.exit(main())
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698