Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(93)

Side by Side Diff: tools/valgrind/memcheck_analyze.py

Issue 8702004: Refactor "Suppressions used" printing code in memcheck and tsan analyzer scripts (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src/
Patch Set: Created 9 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | tools/valgrind/tsan/suppressions.txt » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/python 1 #!/usr/bin/python
2 # Copyright (c) 2011 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2011 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 # memcheck_analyze.py 6 # memcheck_analyze.py
7 7
8 ''' Given a valgrind XML file, parses errors and uniques them.''' 8 ''' Given a valgrind XML file, parses errors and uniques them.'''
9 9
10 import gdb_helper 10 import gdb_helper
11 11
12 from collections import defaultdict
12 import hashlib 13 import hashlib
13 import logging 14 import logging
14 import optparse 15 import optparse
15 import os 16 import os
16 import re 17 import re
17 import subprocess 18 import subprocess
18 import sys 19 import sys
19 import time 20 import time
20 from xml.dom.minidom import parse 21 from xml.dom.minidom import parse
21 from xml.parsers.expat import ExpatError 22 from xml.parsers.expat import ExpatError
(...skipping 406 matching lines...) Expand 10 before | Expand all | Expand 10 after
428 # <load_obj><obj>/usr/lib/libgcc_s.1.dylib</obj><ip>0x27000</ip></load_obj> 429 # <load_obj><obj>/usr/lib/libgcc_s.1.dylib</obj><ip>0x27000</ip></load_obj>
429 # giving the filename and load address of each binary that was mapped 430 # giving the filename and load address of each binary that was mapped
430 # into the process. 431 # into the process.
431 432
432 global TheAddressTable 433 global TheAddressTable
433 if self._use_gdb: 434 if self._use_gdb:
434 TheAddressTable = gdb_helper.AddressTable() 435 TheAddressTable = gdb_helper.AddressTable()
435 else: 436 else:
436 TheAddressTable = None 437 TheAddressTable = None
437 cur_report_errors = set() 438 cur_report_errors = set()
438 suppcounts = {} 439 suppcounts = defaultdict(int)
439 badfiles = set() 440 badfiles = set()
440 441
441 if self._analyze_start_time == None: 442 if self._analyze_start_time == None:
442 self._analyze_start_time = time.time() 443 self._analyze_start_time = time.time()
443 start_time = self._analyze_start_time 444 start_time = self._analyze_start_time
444 445
445 parse_failed = False 446 parse_failed = False
446 for file in files: 447 for file in files:
447 # Wait up to three minutes for valgrind to finish writing all files, 448 # Wait up to three minutes for valgrind to finish writing all files,
448 # but after that, just skip incomplete files and warn. 449 # but after that, just skip incomplete files and warn.
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
541 # ... and we haven't seen it in other tests as well 542 # ... and we haven't seen it in other tests as well
542 self._errors.add(error) 543 self._errors.add(error)
543 cur_report_errors.add(error) 544 cur_report_errors.add(error)
544 545
545 suppcountlist = parsed_file.getElementsByTagName("suppcounts") 546 suppcountlist = parsed_file.getElementsByTagName("suppcounts")
546 if len(suppcountlist) > 0: 547 if len(suppcountlist) > 0:
547 suppcountlist = suppcountlist[0] 548 suppcountlist = suppcountlist[0]
548 for node in suppcountlist.getElementsByTagName("pair"): 549 for node in suppcountlist.getElementsByTagName("pair"):
549 count = getTextOf(node, "count"); 550 count = getTextOf(node, "count");
550 name = getTextOf(node, "name"); 551 name = getTextOf(node, "name");
551 if name in suppcounts: 552 suppcounts[name] += int(count)
552 suppcounts[name] += int(count)
553 else:
554 suppcounts[name] = int(count)
555 553
556 if len(badfiles) > 0: 554 if len(badfiles) > 0:
557 logging.warn("valgrind didn't finish writing %d files?!" % len(badfiles)) 555 logging.warn("valgrind didn't finish writing %d files?!" % len(badfiles))
558 for file in badfiles: 556 for file in badfiles:
559 logging.warn("Last 20 lines of %s :" % file) 557 logging.warn("Last 20 lines of %s :" % file)
560 os.system("tail -n 20 '%s' 1>&2" % file) 558 os.system("tail -n 20 '%s' 1>&2" % file)
561 559
562 if parse_failed: 560 if parse_failed:
563 logging.error("FAIL! Couldn't parse Valgrind output file") 561 logging.error("FAIL! Couldn't parse Valgrind output file")
564 return -2 562 return -2
565 563
566 is_sane = False 564 common.PrintUsedSuppressionsList(suppcounts)
567 print "-----------------------------------------------------"
568 print "Suppressions used:"
569 print " count name"
570
571 remaining_sanity_supp = MemcheckAnalyzer.SANITY_TEST_SUPPRESSIONS
572 for (name, count) in sorted(suppcounts.items(),
573 key=lambda (k,v): (v,k)):
574 print "%7d %s" % (count, name)
575 if name in remaining_sanity_supp and remaining_sanity_supp[name] == count:
576 del remaining_sanity_supp[name]
577 if len(remaining_sanity_supp) == 0:
578 is_sane = True
579 print "-----------------------------------------------------"
580 sys.stdout.flush()
581 565
582 retcode = 0 566 retcode = 0
583 if cur_report_errors: 567 if cur_report_errors:
584 logging.error("FAIL! There were %s errors: " % len(cur_report_errors)) 568 logging.error("FAIL! There were %s errors: " % len(cur_report_errors))
585 569
586 if TheAddressTable != None: 570 if TheAddressTable != None:
587 TheAddressTable.ResolveAll() 571 TheAddressTable.ResolveAll()
588 572
589 for error in cur_report_errors: 573 for error in cur_report_errors:
590 logging.error(error) 574 logging.error(error)
591 575
592 retcode = -1 576 retcode = -1
593 577
594 # Report tool's insanity even if there were errors. 578 # Report tool's insanity even if there were errors.
595 if check_sanity and not is_sane: 579 if check_sanity:
596 logging.error("FAIL! Sanity check failed!") 580 remaining_sanity_supp = MemcheckAnalyzer.SANITY_TEST_SUPPRESSIONS
597 logging.info("The following test errors were not handled: ") 581 for (name, count) in suppcounts.iteritems():
598 for (name, count) in sorted(remaining_sanity_supp.items(), 582 if (name in remaining_sanity_supp and
599 key=lambda (k,v): (v,k)): 583 remaining_sanity_supp[name] == count):
600 logging.info("%7d %s" % (count, name)) 584 del remaining_sanity_supp[name]
585 if remaining_sanity_supp:
586 logging.error("FAIL! Sanity check failed!")
587 logging.info("The following test errors were not handled: ")
588 for (name, count) in remaining_sanity_supp.iteritems():
589 logging.info(" * %dx %s" % (count, name))
601 retcode = -3 590 retcode = -3
602 591
603 if retcode != 0: 592 if retcode != 0:
604 return retcode 593 return retcode
605 594
606 logging.info("PASS! No errors found!") 595 logging.info("PASS! No errors found!")
607 return 0 596 return 0
608 597
609 def _main(): 598 def _main():
610 '''For testing only. The MemcheckAnalyzer class should be imported instead.''' 599 '''For testing only. The MemcheckAnalyzer class should be imported instead.'''
611 retcode = 0 600 retcode = 0
612 parser = optparse.OptionParser("usage: %prog [options] <files to analyze>") 601 parser = optparse.OptionParser("usage: %prog [options] <files to analyze>")
613 parser.add_option("", "--source_dir", 602 parser.add_option("", "--source_dir",
614 help="path to top of source tree for this build" 603 help="path to top of source tree for this build"
615 "(used to normalize source paths in baseline)") 604 "(used to normalize source paths in baseline)")
616 605
617 (options, args) = parser.parse_args() 606 (options, args) = parser.parse_args()
618 if len(args) == 0: 607 if len(args) == 0:
619 parser.error("no filename specified") 608 parser.error("no filename specified")
620 filenames = args 609 filenames = args
621 610
622 analyzer = MemcheckAnalyzer(options.source_dir, use_gdb=True) 611 analyzer = MemcheckAnalyzer(options.source_dir, use_gdb=True)
623 retcode = analyzer.Report(filenames, None) 612 retcode = analyzer.Report(filenames, None)
624 613
625 sys.exit(retcode) 614 sys.exit(retcode)
626 615
627 if __name__ == "__main__": 616 if __name__ == "__main__":
628 _main() 617 _main()
OLDNEW
« no previous file with comments | « no previous file | tools/valgrind/tsan/suppressions.txt » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698