OLD | NEW |
1 #!/usr/bin/python | 1 #!/usr/bin/python |
2 # Copyright (c) 2006-2008 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2006-2008 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 # memcheck_analyze.py | 6 # memcheck_analyze.py |
7 | 7 |
8 ''' Given a valgrind XML file, parses errors and uniques them.''' | 8 ''' Given a valgrind XML file, parses errors and uniques them.''' |
9 | 9 |
10 import logging | 10 import logging |
(...skipping 230 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
241 # <frame> | 241 # <frame> |
242 # <ip>0x83751BC</ip> | 242 # <ip>0x83751BC</ip> |
243 # <obj>/data/dkegel/chrome-build/src/out/Release/base_unittests</obj> | 243 # <obj>/data/dkegel/chrome-build/src/out/Release/base_unittests</obj> |
244 # <fn>_ZN7testing8internal12TestInfoImpl7RunTestEPNS_8TestInfoE</fn> | 244 # <fn>_ZN7testing8internal12TestInfoImpl7RunTestEPNS_8TestInfoE</fn> |
245 # <dir>/data/dkegel/chrome-build/src/testing/gtest/src</dir> | 245 # <dir>/data/dkegel/chrome-build/src/testing/gtest/src</dir> |
246 # <file>gtest-internal-inl.h</file> | 246 # <file>gtest-internal-inl.h</file> |
247 # <line>655</line> | 247 # <line>655</line> |
248 # </frame> | 248 # </frame> |
249 # although the dir, file, and line elements are missing if there is | 249 # although the dir, file, and line elements are missing if there is |
250 # no debug info. | 250 # no debug info. |
| 251 # |
| 252 # With our patch for https://bugs.kde.org/show_bug.cgi?id=205000 in, |
| 253 # the file also includes records of the form |
| 254 # <load_obj><obj>/usr/lib/libgcc_s.1.dylib</obj><ip>0x27000</ip></load_obj> |
| 255 # giving the filename and load address of each binary that was mapped |
| 256 # into the process. |
251 | 257 |
252 self._kind = getTextOf(error_node, "kind") | 258 self._kind = getTextOf(error_node, "kind") |
253 self._backtraces = [] | 259 self._backtraces = [] |
254 self._suppression = None | 260 self._suppression = None |
255 self._commandline = commandline | 261 self._commandline = commandline |
256 | 262 |
257 # Iterate through the nodes, parsing <what|auxwhat><stack> pairs. | 263 # Iterate through the nodes, parsing <what|auxwhat><stack> pairs. |
258 description = None | 264 description = None |
259 for node in error_node.childNodes: | 265 for node in error_node.childNodes: |
260 if node.localName == "what" or node.localName == "auxwhat": | 266 if node.localName == "what" or node.localName == "auxwhat": |
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
365 | 371 |
366 def __init__(self, source_dir, files, show_all_leaks=False, use_gdb=False): | 372 def __init__(self, source_dir, files, show_all_leaks=False, use_gdb=False): |
367 '''Reads in a set of files. | 373 '''Reads in a set of files. |
368 | 374 |
369 Args: | 375 Args: |
370 source_dir: Path to top of source tree for this build | 376 source_dir: Path to top of source tree for this build |
371 files: A list of filenames. | 377 files: A list of filenames. |
372 show_all_leaks: whether to show even less important leaks | 378 show_all_leaks: whether to show even less important leaks |
373 ''' | 379 ''' |
374 | 380 |
375 # Beyond the detailed errors parsed by ValgrindError above, | |
376 # the xml file contain records describing suppressions that were used: | |
377 # <suppcounts> | |
378 # <pair> | |
379 # <count>28</count> | |
380 # <name>pango_font_leak_todo</name> | |
381 # </pair> | |
382 # <pair> | |
383 # <count>378</count> | |
384 # <name>bug_13243</name> | |
385 # </pair> | |
386 # </suppcounts | |
387 # Collect these and print them at the end. | |
388 # | |
389 # With our patch for https://bugs.kde.org/show_bug.cgi?id=205000 in, | |
390 # the file also includes records of the form | |
391 # <load_obj><obj>/usr/lib/libgcc_s.1.dylib</obj><ip>0x27000</ip></load_obj> | |
392 # giving the filename and load address of each binary that was mapped | |
393 # into the process. | |
394 | |
395 global TheAddressTable | 381 global TheAddressTable |
396 if use_gdb: | 382 if use_gdb: |
397 TheAddressTable = _AddressTable() | 383 TheAddressTable = _AddressTable() |
398 self._errors = set() | 384 self._errors = set() |
399 self._suppcounts = {} | |
400 badfiles = set() | 385 badfiles = set() |
401 start = time.time() | 386 start = time.time() |
402 self._parse_failed = False | 387 self._parse_failed = False |
403 for file in files: | 388 for file in files: |
404 # Wait up to three minutes for valgrind to finish writing all files, | 389 # Wait up to three minutes for valgrind to finish writing all files, |
405 # but after that, just skip incomplete files and warn. | 390 # but after that, just skip incomplete files and warn. |
406 f = open(file, "r+") | 391 f = open(file, "r+") |
407 found = False | 392 found = False |
408 firstrun = True | 393 firstrun = True |
409 origsize = os.path.getsize(file) | 394 origsize = os.path.getsize(file) |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
457 break | 442 break |
458 | 443 |
459 raw_errors = parsed_file.getElementsByTagName("error") | 444 raw_errors = parsed_file.getElementsByTagName("error") |
460 for raw_error in raw_errors: | 445 for raw_error in raw_errors: |
461 # Ignore "possible" leaks for now by default. | 446 # Ignore "possible" leaks for now by default. |
462 if (show_all_leaks or | 447 if (show_all_leaks or |
463 getTextOf(raw_error, "kind") != "Leak_PossiblyLost"): | 448 getTextOf(raw_error, "kind") != "Leak_PossiblyLost"): |
464 error = ValgrindError(source_dir, raw_error, commandline) | 449 error = ValgrindError(source_dir, raw_error, commandline) |
465 self._errors.add(error) | 450 self._errors.add(error) |
466 | 451 |
467 suppcountlist = parsed_file.getElementsByTagName("suppcounts")[0] | |
468 for node in suppcountlist.getElementsByTagName("pair"): | |
469 count = getTextOf(node, "count"); | |
470 name = getTextOf(node, "name"); | |
471 if name in self._suppcounts: | |
472 self._suppcounts[name] += int(count) | |
473 else: | |
474 self._suppcounts[name] = int(count) | |
475 | |
476 if len(badfiles) > 0: | 452 if len(badfiles) > 0: |
477 logging.warn("valgrind didn't finish writing %d files?!" % len(badfiles)) | 453 logging.warn("valgrind didn't finish writing %d files?!" % len(badfiles)) |
478 for file in badfiles: | 454 for file in badfiles: |
479 logging.warn("Last 20 lines of %s :" % file) | 455 logging.warn("Last 20 lines of %s :" % file) |
480 os.system("tail -n 20 '%s' 1>&2" % file) | 456 os.system("tail -n 20 '%s' 1>&2" % file) |
481 | 457 |
482 def Report(self): | 458 def Report(self): |
483 if self._parse_failed: | 459 if self._parse_failed: |
484 logging.error("FAIL! Couldn't parse Valgrind output file") | 460 logging.error("FAIL! Couldn't parse Valgrind output file") |
485 return -2 | 461 return -2 |
486 | 462 |
487 print "-----------------------------------------------------" | |
488 print "Suppressions used:" | |
489 print " count name" | |
490 for item in sorted(self._suppcounts.items(), key=lambda (k,v): (v,k)): | |
491 print "%7s %s" % (item[1], item[0]) | |
492 print "-----------------------------------------------------" | |
493 | |
494 if self._errors: | 463 if self._errors: |
495 logging.error("FAIL! There were %s errors: " % len(self._errors)) | 464 logging.error("FAIL! There were %s errors: " % len(self._errors)) |
496 | 465 |
497 global TheAddressTable | 466 global TheAddressTable |
498 if TheAddressTable != None: | 467 if TheAddressTable != None: |
499 TheAddressTable.ResolveAll() | 468 TheAddressTable.ResolveAll() |
500 | 469 |
501 for error in self._errors: | 470 for error in self._errors: |
502 logging.error(error) | 471 logging.error(error) |
503 | 472 |
(...skipping 15 matching lines...) Expand all Loading... |
519 parser.error("no filename specified") | 488 parser.error("no filename specified") |
520 filenames = args | 489 filenames = args |
521 | 490 |
522 analyzer = MemcheckAnalyze(options.source_dir, filenames, use_gdb=True) | 491 analyzer = MemcheckAnalyze(options.source_dir, filenames, use_gdb=True) |
523 retcode = analyzer.Report() | 492 retcode = analyzer.Report() |
524 | 493 |
525 sys.exit(retcode) | 494 sys.exit(retcode) |
526 | 495 |
527 if __name__ == "__main__": | 496 if __name__ == "__main__": |
528 _main() | 497 _main() |
OLD | NEW |