| OLD | NEW |
| 1 # Copyright 2017 The Chromium Authors. All rights reserved. | 1 # Copyright 2017 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 """Deals with loading & saving .size files.""" | 5 """Deals with loading & saving .size files.""" |
| 6 | 6 |
| 7 import cStringIO | 7 import cStringIO |
| 8 import calendar | 8 import calendar |
| 9 import collections | 9 import collections |
| 10 import datetime | 10 import datetime |
| (...skipping 27 matching lines...) Expand all Loading... |
| 38 } | 38 } |
| 39 metadata_str = json.dumps(headers, file_obj, indent=2, sort_keys=True) | 39 metadata_str = json.dumps(headers, file_obj, indent=2, sort_keys=True) |
| 40 file_obj.write('%d\n' % len(metadata_str)) | 40 file_obj.write('%d\n' % len(metadata_str)) |
| 41 file_obj.write(metadata_str) | 41 file_obj.write(metadata_str) |
| 42 file_obj.write('\n') | 42 file_obj.write('\n') |
| 43 _LogSize(file_obj, 'header') # For libchrome: 570 bytes. | 43 _LogSize(file_obj, 'header') # For libchrome: 570 bytes. |
| 44 | 44 |
| 45 # Store a single copy of all paths and have them referenced by index. | 45 # Store a single copy of all paths and have them referenced by index. |
| 46 # Using an OrderedDict makes the indices more repetitive (better compression). | 46 # Using an OrderedDict makes the indices more repetitive (better compression). |
| 47 path_tuples = collections.OrderedDict.fromkeys( | 47 path_tuples = collections.OrderedDict.fromkeys( |
| 48 (s.object_path, s.source_path) for s in size_info.raw_symbols) | 48 (s.object_path, s.source_path) for s in size_info.symbols) |
| 49 for i, key in enumerate(path_tuples): | 49 for i, key in enumerate(path_tuples): |
| 50 path_tuples[key] = i | 50 path_tuples[key] = i |
| 51 file_obj.write('%d\n' % len(path_tuples)) | 51 file_obj.write('%d\n' % len(path_tuples)) |
| 52 file_obj.writelines('%s\t%s\n' % pair for pair in path_tuples) | 52 file_obj.writelines('%s\t%s\n' % pair for pair in path_tuples) |
| 53 _LogSize(file_obj, 'paths') # For libchrome, adds 200kb. | 53 _LogSize(file_obj, 'paths') # For libchrome, adds 200kb. |
| 54 | 54 |
| 55 # Symbol counts by section. | 55 # Symbol counts by section. |
| 56 by_section = models.SymbolGroup(size_info.raw_symbols) | 56 by_section = models.SymbolGroup(size_info.symbols) |
| 57 by_section = by_section.GroupBySectionName().SortedByName() | 57 by_section = by_section.GroupBySectionName().SortedByName() |
| 58 file_obj.write('%s\n' % '\t'.join(g.name for g in by_section)) | 58 file_obj.write('%s\n' % '\t'.join(g.name for g in by_section)) |
| 59 file_obj.write('%s\n' % '\t'.join(str(len(g)) for g in by_section)) | 59 file_obj.write('%s\n' % '\t'.join(str(len(g)) for g in by_section)) |
| 60 | 60 |
| 61 def write_numeric(func, delta=False): | 61 def write_numeric(func, delta=False): |
| 62 for group in by_section: | 62 for group in by_section: |
| 63 prev_value = 0 | 63 prev_value = 0 |
| 64 last_sym = group[-1] | 64 last_sym = group[-1] |
| 65 for symbol in group: | 65 for symbol in group: |
| 66 value = func(symbol) | 66 value = func(symbol) |
| (...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 174 elif alias_counter > 0: | 174 elif alias_counter > 0: |
| 175 new_sym.aliases = raw_symbols[symbol_idx - 1].aliases | 175 new_sym.aliases = raw_symbols[symbol_idx - 1].aliases |
| 176 new_sym.aliases.append(new_sym) | 176 new_sym.aliases.append(new_sym) |
| 177 alias_counter -= 1 | 177 alias_counter -= 1 |
| 178 else: | 178 else: |
| 179 new_sym.aliases = None | 179 new_sym.aliases = None |
| 180 | 180 |
| 181 raw_symbols[symbol_idx] = new_sym | 181 raw_symbols[symbol_idx] = new_sym |
| 182 symbol_idx += 1 | 182 symbol_idx += 1 |
| 183 | 183 |
| 184 return models.SizeInfo(section_sizes, raw_symbols, metadata=metadata) | 184 return models.SizeInfo(section_sizes, models.SymbolGroup(raw_symbols), |
| 185 metadata=metadata) |
| 185 | 186 |
| 186 | 187 |
| 187 def SaveSizeInfo(size_info, path): | 188 def SaveSizeInfo(size_info, path): |
| 188 """Saves |size_info| to |path}.""" | 189 """Saves |size_info| to |path}.""" |
| 189 if os.environ.get('SUPERSIZE_MEASURE_GZIP') == '1': | 190 if os.environ.get('SUPERSIZE_MEASURE_GZIP') == '1': |
| 190 with gzip.open(path, 'wb') as f: | 191 with gzip.open(path, 'wb') as f: |
| 191 _SaveSizeInfoToFile(size_info, f) | 192 _SaveSizeInfoToFile(size_info, f) |
| 192 else: | 193 else: |
| 193 # It is seconds faster to do gzip in a separate step. 6s -> 3.5s. | 194 # It is seconds faster to do gzip in a separate step. 6s -> 3.5s. |
| 194 stringio = cStringIO.StringIO() | 195 stringio = cStringIO.StringIO() |
| 195 _SaveSizeInfoToFile(size_info, stringio) | 196 _SaveSizeInfoToFile(size_info, stringio) |
| 196 | 197 |
| 197 logging.debug('Serialization complete. Gzipping...') | 198 logging.debug('Serialization complete. Gzipping...') |
| 198 stringio.seek(0) | 199 stringio.seek(0) |
| 199 with gzip.open(path, 'wb') as f: | 200 with gzip.open(path, 'wb') as f: |
| 200 shutil.copyfileobj(stringio, f) | 201 shutil.copyfileobj(stringio, f) |
| 201 | 202 |
| 202 | 203 |
| 203 def LoadSizeInfo(path): | 204 def LoadSizeInfo(path): |
| 204 """Returns a SizeInfo loaded from |path|.""" | 205 """Returns a SizeInfo loaded from |path|.""" |
| 205 with gzip.open(path) as f: | 206 with gzip.open(path) as f: |
| 206 return _LoadSizeInfoFromFile(f) | 207 return _LoadSizeInfoFromFile(f) |
| OLD | NEW |