OLD | NEW |
1 # Copyright 2017 The Chromium Authors. All rights reserved. | 1 # Copyright 2017 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 """Functions that rely on parsing output of "nm" tool.""" | 5 """Functions that rely on parsing output of "nm" tool.""" |
6 | 6 |
| 7 import atexit |
7 import collections | 8 import collections |
| 9 import errno |
8 import logging | 10 import logging |
9 import os | 11 import os |
10 import subprocess | 12 import subprocess |
11 import sys | 13 import sys |
12 | 14 |
13 import concurrent | 15 import concurrent |
14 | 16 |
| 17 _active_subprocesses = None |
| 18 |
15 | 19 |
16 def CollectAliasesByAddress(elf_path, tool_prefix): | 20 def CollectAliasesByAddress(elf_path, tool_prefix): |
17 """Runs nm on |elf_path| and returns a dict of address->[names]""" | 21 """Runs nm on |elf_path| and returns a dict of address->[names]""" |
18 names_by_address = collections.defaultdict(list) | 22 names_by_address = collections.defaultdict(list) |
19 | 23 |
20 # About 60mb of output, but piping takes ~30s, and loading it into RAM | 24 # About 60mb of output, but piping takes ~30s, and loading it into RAM |
21 # directly takes 3s. | 25 # directly takes 3s. |
22 args = [tool_prefix + 'nm', '--no-sort', '--defined-only', '--demangle', | 26 args = [tool_prefix + 'nm', '--no-sort', '--defined-only', '--demangle', |
23 elf_path] | 27 elf_path] |
24 output = subprocess.check_output(args) | 28 output = subprocess.check_output(args) |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
161 # It would speed up mashalling of the values by removing all entries | 165 # It would speed up mashalling of the values by removing all entries |
162 # that have only 1 path. However, these entries are needed to give | 166 # that have only 1 path. However, these entries are needed to give |
163 # path information to symbol aliases. | 167 # path information to symbol aliases. |
164 self._result = paths_by_name | 168 self._result = paths_by_name |
165 | 169 |
166 def Get(self): | 170 def Get(self): |
167 assert self._result is not None | 171 assert self._result is not None |
168 return self._result | 172 return self._result |
169 | 173 |
170 | 174 |
| 175 def _TerminateSubprocesses(): |
| 176 for proc in _active_subprocesses: |
| 177 proc.kill() |
| 178 |
| 179 |
171 class _BulkObjectFileAnalyzerMaster(object): | 180 class _BulkObjectFileAnalyzerMaster(object): |
172 """Runs BulkObjectFileAnalyzer in a subprocess.""" | 181 """Runs BulkObjectFileAnalyzer in a subprocess.""" |
173 | 182 |
174 def __init__(self, tool_prefix, output_directory): | 183 def __init__(self, tool_prefix, output_directory): |
175 self._process = None | 184 self._process = None |
176 self._tool_prefix = tool_prefix | 185 self._tool_prefix = tool_prefix |
177 self._output_directory = output_directory | 186 self._output_directory = output_directory |
178 | 187 |
179 def _Spawn(self): | 188 def _Spawn(self): |
| 189 global _active_subprocesses |
180 log_level = str(logging.getLogger().getEffectiveLevel()) | 190 log_level = str(logging.getLogger().getEffectiveLevel()) |
181 args = [sys.executable, __file__, log_level, self._tool_prefix, | 191 args = [sys.executable, __file__, log_level, self._tool_prefix, |
182 self._output_directory] | 192 self._output_directory] |
183 self._process = subprocess.Popen( | 193 self._process = subprocess.Popen( |
184 args, stdin=subprocess.PIPE, stdout=subprocess.PIPE) | 194 args, stdin=subprocess.PIPE, stdout=subprocess.PIPE) |
| 195 if _active_subprocesses is None: |
| 196 _active_subprocesses = [] |
| 197 atexit.register(_TerminateSubprocesses) |
| 198 _active_subprocesses.append(self._process) |
185 | 199 |
186 def AnalyzePaths(self, paths): | 200 def AnalyzePaths(self, paths): |
187 if self._process is None: | 201 if self._process is None: |
188 self._Spawn() | 202 self._Spawn() |
189 | 203 |
190 logging.debug('Sending batch of %d paths to subprocess', len(paths)) | 204 logging.debug('Sending batch of %d paths to subprocess', len(paths)) |
191 payload = '\x01'.join(paths) | 205 payload = '\x01'.join(paths) |
192 self._process.stdin.write('{:08x}'.format(len(payload))) | 206 self._process.stdin.write('{:08x}'.format(len(payload))) |
193 self._process.stdin.write(payload) | 207 self._process.stdin.write(payload) |
194 | 208 |
195 def Close(self): | 209 def Close(self): |
196 assert not self._process.stdin.closed | 210 assert not self._process.stdin.closed |
197 self._process.stdin.close() | 211 self._process.stdin.close() |
| 212 _active_subprocesses.remove(self._process) |
198 | 213 |
199 def Get(self): | 214 def Get(self): |
200 assert self._process.stdin.closed | 215 assert self._process.stdin.closed |
201 logging.debug('Decoding nm results from forked process') | 216 logging.debug('Decoding nm results from forked process') |
202 | 217 |
203 encoded_keys_len = int(self._process.stdout.read(8), 16) | 218 encoded_keys_len = int(self._process.stdout.read(8), 16) |
204 encoded_keys = self._process.stdout.read(encoded_keys_len) | 219 encoded_keys = self._process.stdout.read(encoded_keys_len) |
205 encoded_values = self._process.stdout.read() | 220 encoded_values = self._process.stdout.read() |
206 return concurrent.DecodeDictOfLists(encoded_keys, encoded_values) | 221 return concurrent.DecodeDictOfLists(encoded_keys, encoded_values) |
207 | 222 |
208 | 223 |
209 BulkObjectFileAnalyzer = _BulkObjectFileAnalyzerMaster | 224 BulkObjectFileAnalyzer = _BulkObjectFileAnalyzerMaster |
210 if concurrent.DISABLE_ASYNC: | 225 if concurrent.DISABLE_ASYNC: |
211 BulkObjectFileAnalyzer = _BulkObjectFileAnalyzerWorker | 226 BulkObjectFileAnalyzer = _BulkObjectFileAnalyzerWorker |
212 | 227 |
213 | 228 |
214 def _SubMain(log_level, tool_prefix, output_directory): | 229 def _SubMain(log_level, tool_prefix, output_directory): |
215 logging.basicConfig(level=int(log_level), | 230 logging.basicConfig( |
216 format='%(levelname).1s %(relativeCreated)6d %(message)s') | 231 level=int(log_level), |
| 232 format='nm: %(levelname).1s %(relativeCreated)6d %(message)s') |
217 bulk_analyzer = _BulkObjectFileAnalyzerWorker(tool_prefix, output_directory) | 233 bulk_analyzer = _BulkObjectFileAnalyzerWorker(tool_prefix, output_directory) |
218 while True: | 234 while True: |
219 payload_len = int(sys.stdin.read(8) or '0', 16) | 235 payload_len = int(sys.stdin.read(8) or '0', 16) |
220 if not payload_len: | 236 if not payload_len: |
221 logging.debug('nm bulk subprocess received eof.') | 237 logging.debug('nm bulk subprocess received eof.') |
222 break | 238 break |
223 paths = sys.stdin.read(payload_len).split('\x01') | 239 paths = sys.stdin.read(payload_len).split('\x01') |
224 bulk_analyzer.AnalyzePaths(paths) | 240 bulk_analyzer.AnalyzePaths(paths) |
225 | 241 |
226 bulk_analyzer.Close() | 242 bulk_analyzer.Close() |
227 paths_by_name = bulk_analyzer.Get() | 243 paths_by_name = bulk_analyzer.Get() |
228 encoded_keys, encoded_values = concurrent.EncodeDictOfLists(paths_by_name) | 244 encoded_keys, encoded_values = concurrent.EncodeDictOfLists(paths_by_name) |
229 sys.stdout.write('%08x' % len(encoded_keys)) | 245 try: |
230 sys.stdout.write(encoded_keys) | 246 sys.stdout.write('%08x' % len(encoded_keys)) |
231 sys.stdout.write(encoded_values) | 247 sys.stdout.write(encoded_keys) |
| 248 sys.stdout.write(encoded_values) |
| 249 except IOError, e: |
| 250 # Parent process exited. |
| 251 if e.errno == errno.EPIPE: |
| 252 sys.exit(1) |
| 253 |
232 logging.debug('nm bulk subprocess finished.') | 254 logging.debug('nm bulk subprocess finished.') |
233 | 255 |
234 | 256 |
235 if __name__ == '__main__': | 257 if __name__ == '__main__': |
236 _SubMain(*sys.argv[1:]) | 258 _SubMain(*sys.argv[1:]) |
OLD | NEW |