| OLD | NEW |
| 1 # Copyright 2017 The Chromium Authors. All rights reserved. | 1 # Copyright 2017 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 """Deals with loading & saving .size files.""" | 5 """Deals with loading & saving .size files.""" |
| 6 | 6 |
| 7 import cStringIO | 7 import cStringIO |
| 8 import calendar | 8 import calendar |
| 9 import collections | 9 import collections |
| 10 import datetime | 10 import datetime |
| (...skipping 26 matching lines...) Expand all Loading... |
| 37 } | 37 } |
| 38 metadata_str = json.dumps(headers, file_obj, indent=2, sort_keys=True) | 38 metadata_str = json.dumps(headers, file_obj, indent=2, sort_keys=True) |
| 39 file_obj.write('%d\n' % len(metadata_str)) | 39 file_obj.write('%d\n' % len(metadata_str)) |
| 40 file_obj.write(metadata_str) | 40 file_obj.write(metadata_str) |
| 41 file_obj.write('\n') | 41 file_obj.write('\n') |
| 42 _LogSize(file_obj, 'header') # For libchrome: 570 bytes. | 42 _LogSize(file_obj, 'header') # For libchrome: 570 bytes. |
| 43 | 43 |
| 44 # Store a single copy of all paths and have them referenced by index. | 44 # Store a single copy of all paths and have them referenced by index. |
| 45 # Using an OrderedDict makes the indices more repetitive (better compression). | 45 # Using an OrderedDict makes the indices more repetitive (better compression). |
| 46 path_tuples = collections.OrderedDict.fromkeys( | 46 path_tuples = collections.OrderedDict.fromkeys( |
| 47 (s.object_path, s.source_path) for s in size_info.symbols) | 47 (s.object_path, s.source_path) for s in size_info.raw_symbols) |
| 48 for i, key in enumerate(path_tuples): | 48 for i, key in enumerate(path_tuples): |
| 49 path_tuples[key] = i | 49 path_tuples[key] = i |
| 50 file_obj.write('%d\n' % len(path_tuples)) | 50 file_obj.write('%d\n' % len(path_tuples)) |
| 51 file_obj.writelines('%s\t%s\n' % pair for pair in path_tuples) | 51 file_obj.writelines('%s\t%s\n' % pair for pair in path_tuples) |
| 52 _LogSize(file_obj, 'paths') # For libchrome, adds 200kb. | 52 _LogSize(file_obj, 'paths') # For libchrome, adds 200kb. |
| 53 | 53 |
| 54 # Symbol counts by section. | 54 # Symbol counts by section. |
| 55 by_section = size_info.symbols.GroupBySectionName().SortedByName() | 55 by_section = models.SymbolGroup(size_info.raw_symbols) |
| 56 by_section = by_section.GroupBySectionName().SortedByName() |
| 56 file_obj.write('%s\n' % '\t'.join(g.name for g in by_section)) | 57 file_obj.write('%s\n' % '\t'.join(g.name for g in by_section)) |
| 57 file_obj.write('%s\n' % '\t'.join(str(len(g)) for g in by_section)) | 58 file_obj.write('%s\n' % '\t'.join(str(len(g)) for g in by_section)) |
| 58 | 59 |
| 59 def write_numeric(func, delta=False): | 60 def write_numeric(func, delta=False): |
| 60 for group in by_section: | 61 for group in by_section: |
| 61 prev_value = 0 | 62 prev_value = 0 |
| 62 last_sym = group[-1] | 63 last_sym = group[-1] |
| 63 for symbol in group: | 64 for symbol in group: |
| 64 value = func(symbol) | 65 value = func(symbol) |
| 65 if delta: | 66 if delta: |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 120 for i, f in enumerate(fields): | 121 for i, f in enumerate(fields): |
| 121 value = value * delta_multiplier + int(f) | 122 value = value * delta_multiplier + int(f) |
| 122 fields[i] = value | 123 fields[i] = value |
| 123 ret.append(fields) | 124 ret.append(fields) |
| 124 return ret | 125 return ret |
| 125 | 126 |
| 126 addresses = read_numeric(delta=True) | 127 addresses = read_numeric(delta=True) |
| 127 sizes = read_numeric(delta=False) | 128 sizes = read_numeric(delta=False) |
| 128 path_indices = read_numeric(delta=True) | 129 path_indices = read_numeric(delta=True) |
| 129 | 130 |
| 130 symbol_list = [None] * sum(section_counts) | 131 raw_symbols = [None] * sum(section_counts) |
| 131 symbol_idx = 0 | 132 symbol_idx = 0 |
| 132 for section_index, cur_section_name in enumerate(section_names): | 133 for section_index, cur_section_name in enumerate(section_names): |
| 133 for i in xrange(section_counts[section_index]): | 134 for i in xrange(section_counts[section_index]): |
| 134 line = next(lines)[:-1] | 135 line = next(lines)[:-1] |
| 135 is_anonymous = line.endswith('\t1') | 136 is_anonymous = line.endswith('\t1') |
| 136 name = line[:-2] if is_anonymous else line | 137 name = line[:-2] if is_anonymous else line |
| 137 | 138 |
| 138 new_sym = models.Symbol.__new__(models.Symbol) | 139 new_sym = models.Symbol.__new__(models.Symbol) |
| 139 new_sym.section_name = cur_section_name | 140 new_sym.section_name = cur_section_name |
| 140 new_sym.address = addresses[section_index][i] | 141 new_sym.address = addresses[section_index][i] |
| 141 new_sym.size = sizes[section_index][i] | 142 new_sym.size = sizes[section_index][i] |
| 142 new_sym.name = name | 143 new_sym.name = name |
| 143 paths = path_tuples[path_indices[section_index][i]] | 144 paths = path_tuples[path_indices[section_index][i]] |
| 144 new_sym.object_path = paths[0] | 145 new_sym.object_path = paths[0] |
| 145 new_sym.source_path = paths[1] | 146 new_sym.source_path = paths[1] |
| 146 new_sym.is_anonymous = is_anonymous | 147 new_sym.is_anonymous = is_anonymous |
| 147 new_sym.padding = 0 # Derived | 148 new_sym.padding = 0 # Derived |
| 148 new_sym.full_name = None # Derived | 149 new_sym.full_name = None # Derived |
| 149 symbol_list[symbol_idx] = new_sym | 150 raw_symbols[symbol_idx] = new_sym |
| 150 symbol_idx += 1 | 151 symbol_idx += 1 |
| 151 | 152 |
| 152 symbols = models.SymbolGroup(symbol_list) | 153 return models.SizeInfo(section_sizes, raw_symbols, metadata=metadata) |
| 153 return models.SizeInfo(section_sizes, symbols, metadata=metadata) | |
| 154 | 154 |
| 155 | 155 |
| 156 def SaveSizeInfo(size_info, path): | 156 def SaveSizeInfo(size_info, path): |
| 157 """Saves |size_info| to |path}.""" | 157 """Saves |size_info| to |path}.""" |
| 158 if os.environ.get('MEASURE_GZIP') == '1': | 158 if os.environ.get('MEASURE_GZIP') == '1': |
| 159 with gzip.open(path, 'wb') as f: | 159 with gzip.open(path, 'wb') as f: |
| 160 _SaveSizeInfoToFile(size_info, f) | 160 _SaveSizeInfoToFile(size_info, f) |
| 161 else: | 161 else: |
| 162 # It is seconds faster to do gzip in a separate step. 6s -> 3.5s. | 162 # It is seconds faster to do gzip in a separate step. 6s -> 3.5s. |
| 163 stringio = cStringIO.StringIO() | 163 stringio = cStringIO.StringIO() |
| 164 _SaveSizeInfoToFile(size_info, stringio) | 164 _SaveSizeInfoToFile(size_info, stringio) |
| 165 | 165 |
| 166 logging.debug('Serialization complete. Gzipping...') | 166 logging.debug('Serialization complete. Gzipping...') |
| 167 stringio.seek(0) | 167 stringio.seek(0) |
| 168 with gzip.open(path, 'wb') as f: | 168 with gzip.open(path, 'wb') as f: |
| 169 shutil.copyfileobj(stringio, f) | 169 shutil.copyfileobj(stringio, f) |
| 170 | 170 |
| 171 | 171 |
| 172 def LoadSizeInfo(path): | 172 def LoadSizeInfo(path): |
| 173 """Returns a SizeInfo loaded from |path|.""" | 173 """Returns a SizeInfo loaded from |path|.""" |
| 174 with gzip.open(path) as f: | 174 with gzip.open(path) as f: |
| 175 return _LoadSizeInfoFromFile(f) | 175 return _LoadSizeInfoFromFile(f) |
| OLD | NEW |