Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(173)

Side by Side Diff: tools/valgrind/tsan_analyze.py

Issue 8702004: Refactor "Suppressions used" printing code in memcheck and tsan analyzer scripts (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src/
Patch Set: Created 9 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « tools/valgrind/tsan/suppressions.txt ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/python 1 #!/usr/bin/python
2 # Copyright (c) 2011 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2011 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 # tsan_analyze.py 6 # tsan_analyze.py
7 7
8 ''' Given a ThreadSanitizer output file, parses errors and uniques them.''' 8 ''' Given a ThreadSanitizer output file, parses errors and uniques them.'''
9 9
10 import gdb_helper 10 import gdb_helper
11 11
12 import common 12 from collections import defaultdict
13 import hashlib 13 import hashlib
14 import logging 14 import logging
15 import optparse 15 import optparse
16 import os 16 import os
17 import re 17 import re
18 import subprocess 18 import subprocess
19 import sys 19 import sys
20 import time 20 import time
21 21
22 import common
23
22 # Global symbol table (ugh) 24 # Global symbol table (ugh)
23 TheAddressTable = None 25 TheAddressTable = None
24 26
25 class _StackTraceLine(object): 27 class _StackTraceLine(object):
26 def __init__(self, line, address, binary): 28 def __init__(self, line, address, binary):
27 self.raw_line_ = line 29 self.raw_line_ = line
28 self.address = address 30 self.address = address
29 self.binary = binary 31 self.binary = binary
30 def __str__(self): 32 def __str__(self):
31 global TheAddressTable 33 global TheAddressTable
32 file, line = TheAddressTable.GetFileLine(self.binary, self.address) 34 file, line = TheAddressTable.GetFileLine(self.binary, self.address)
33 if (file is None) or (line is None): 35 if (file is None) or (line is None):
34 return self.raw_line_ 36 return self.raw_line_
35 else: 37 else:
36 return self.raw_line_.replace(self.binary, '%s:%s' % (file, line)) 38 return self.raw_line_.replace(self.binary, '%s:%s' % (file, line))
37 39
38 class TsanAnalyzer(object): 40 class TsanAnalyzer(object):
39 ''' Given a set of ThreadSanitizer output files, parse all the errors out of 41 ''' Given a set of ThreadSanitizer output files, parse all the errors out of
40 them, unique them and output the results.''' 42 them, unique them and output the results.'''
41 43
42 LOAD_LIB_RE = re.compile('--[0-9]+-- ([^(:]*) \((0x[0-9a-f]+)\)') 44 LOAD_LIB_RE = re.compile('--[0-9]+-- ([^(:]*) \((0x[0-9a-f]+)\)')
43 TSAN_LINE_RE = re.compile('==[0-9]+==\s*[#0-9]+\s*' 45 TSAN_LINE_RE = re.compile('==[0-9]+==\s*[#0-9]+\s*'
44 '([0-9A-Fa-fx]+):' 46 '([0-9A-Fa-fx]+):'
45 '(?:[^ ]* )*' 47 '(?:[^ ]* )*'
46 '([^ :\n]+)' 48 '([^ :\n]+)'
47 '') 49 '')
48 THREAD_CREATION_STR = ("INFO: T.* " 50 THREAD_CREATION_STR = ("INFO: T.* "
49 "(has been created by T.* at this point|is program's main thread)") 51 "(has been created by T.* at this point|is program's main thread)")
50 52
51 SANITY_TEST_SUPPRESSION = "ThreadSanitizer sanity test" 53 SANITY_TEST_SUPPRESSION = ("ThreadSanitizer sanity test "
54 "(ToolsSanityTest.DataRace)")
52 TSAN_RACE_DESCRIPTION = "Possible data race" 55 TSAN_RACE_DESCRIPTION = "Possible data race"
53 TSAN_WARNING_DESCRIPTION = ("Unlocking a non-locked lock" 56 TSAN_WARNING_DESCRIPTION = ("Unlocking a non-locked lock"
54 "|accessing an invalid lock" 57 "|accessing an invalid lock"
55 "|which did not acquire this lock") 58 "|which did not acquire this lock")
56 RACE_VERIFIER_LINE = "Confirmed a race|unexpected race" 59 RACE_VERIFIER_LINE = "Confirmed a race|unexpected race"
57 TSAN_ASSERTION = "Assertion failed: " 60 TSAN_ASSERTION = "Assertion failed: "
58 61
59 def __init__(self, source_dir, use_gdb=False): 62 def __init__(self, source_dir, use_gdb=False):
60 '''Reads in a set of files. 63 '''Reads in a set of files.
61 64
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
179 tmp = [] 182 tmp = []
180 if re.search(TsanAnalyzer.TSAN_ASSERTION, self.line_): 183 if re.search(TsanAnalyzer.TSAN_ASSERTION, self.line_):
181 tmp.extend(self.ReadTillTheEnd()) 184 tmp.extend(self.ReadTillTheEnd())
182 ret.append(tmp) 185 ret.append(tmp)
183 break 186 break
184 187
185 match = re.search("used_suppression:\s+([0-9]+)\s(.*)", self.line_) 188 match = re.search("used_suppression:\s+([0-9]+)\s(.*)", self.line_)
186 if match: 189 if match:
187 count, supp_name = match.groups() 190 count, supp_name = match.groups()
188 count = int(count) 191 count = int(count)
189 if supp_name in self.used_suppressions: 192 self.used_suppressions[supp_name] += count
190 self.used_suppressions[supp_name] += count
191 else:
192 self.used_suppressions[supp_name] = count
193 self.cur_fd_.close() 193 self.cur_fd_.close()
194 return ret 194 return ret
195 195
196 def GetReports(self, files): 196 def GetReports(self, files):
197 '''Extracts reports from a set of files. 197 '''Extracts reports from a set of files.
198 198
199 Reads a set of files and returns a list of all discovered 199 Reads a set of files and returns a list of all discovered
200 ThreadSanitizer race reports. As a side effect, populates 200 ThreadSanitizer race reports. As a side effect, populates
201 self.used_suppressions with appropriate info. 201 self.used_suppressions with appropriate info.
202 ''' 202 '''
203 203
204 global TheAddressTable 204 global TheAddressTable
205 if self._use_gdb: 205 if self._use_gdb:
206 TheAddressTable = gdb_helper.AddressTable() 206 TheAddressTable = gdb_helper.AddressTable()
207 else: 207 else:
208 TheAddressTable = None 208 TheAddressTable = None
209 reports = [] 209 reports = []
210 self.used_suppressions = {} 210 self.used_suppressions = defaultdict(int)
211 for file in files: 211 for file in files:
212 reports.extend(self.ParseReportFile(file)) 212 reports.extend(self.ParseReportFile(file))
213 if self._use_gdb: 213 if self._use_gdb:
214 TheAddressTable.ResolveAll() 214 TheAddressTable.ResolveAll()
215 # Make each line of each report a string. 215 # Make each line of each report a string.
216 reports = map(lambda(x): map(str, x), reports) 216 reports = map(lambda(x): map(str, x), reports)
217 return [''.join(report_lines) for report_lines in reports] 217 return [''.join(report_lines) for report_lines in reports]
218 218
219 def Report(self, files, testcase, check_sanity=False): 219 def Report(self, files, testcase, check_sanity=False):
220 '''Reads in a set of files and prints ThreadSanitizer report. 220 '''Reads in a set of files and prints ThreadSanitizer report.
221 221
222 Args: 222 Args:
223 files: A list of filenames. 223 files: A list of filenames.
224 check_sanity: if true, search for SANITY_TEST_SUPPRESSIONS 224 check_sanity: if true, search for SANITY_TEST_SUPPRESSIONS
225 ''' 225 '''
226 226
227 # We set up _cur_testcase class-wide variable to avoid passing it through 227 # We set up _cur_testcase class-wide variable to avoid passing it through
228 # about 5 functions. 228 # about 5 functions.
229 self._cur_testcase = testcase 229 self._cur_testcase = testcase
230 reports = self.GetReports(files) 230 reports = self.GetReports(files)
231 self._cur_testcase = None # just in case, shouldn't be used anymore 231 self._cur_testcase = None # just in case, shouldn't be used anymore
232 232
233 is_sane = False 233 common.PrintUsedSuppressionsList(self.used_suppressions)
234 print "-----------------------------------------------------" 234
235 print "Suppressions used:"
236 print " count name"
237 for item in sorted(self.used_suppressions.items(), key=lambda (k,v): (v,k)):
238 print "%7s %s" % (item[1], item[0])
239 if item[0].startswith(TsanAnalyzer.SANITY_TEST_SUPPRESSION):
240 is_sane = True
241 print "-----------------------------------------------------"
242 sys.stdout.flush()
243 235
244 retcode = 0 236 retcode = 0
245 if reports: 237 if reports:
246 logging.error("FAIL! Found %i report(s)" % len(reports)) 238 logging.error("FAIL! Found %i report(s)" % len(reports))
247 for report in reports: 239 for report in reports:
248 logging.error('\n' + report) 240 logging.error('\n' + report)
249 retcode = -1 241 retcode = -1
250 242
251 # Report tool's insanity even if there were errors. 243 # Report tool's insanity even if there were errors.
252 if check_sanity and not is_sane: 244 if (check_sanity and
245 TsanAnalyzer.SANITY_TEST_SUPPRESSION not in self.used_suppressions):
253 logging.error("FAIL! Sanity check failed!") 246 logging.error("FAIL! Sanity check failed!")
254 retcode = -3 247 retcode = -3
255 248
256 if retcode != 0: 249 if retcode != 0:
257 return retcode 250 return retcode
251
258 logging.info("PASS: No reports found") 252 logging.info("PASS: No reports found")
259 return 0 253 return 0
260 254
261 if __name__ == '__main__': 255 if __name__ == '__main__':
262 '''For testing only. The TsanAnalyzer class should be imported instead.''' 256 '''For testing only. The TsanAnalyzer class should be imported instead.'''
263 retcode = 0 257 retcode = 0
264 parser = optparse.OptionParser("usage: %prog [options] <files to analyze>") 258 parser = optparse.OptionParser("usage: %prog [options] <files to analyze>")
265 parser.add_option("", "--source_dir", 259 parser.add_option("", "--source_dir",
266 help="path to top of source tree for this build" 260 help="path to top of source tree for this build"
267 "(used to normalize source paths in baseline)") 261 "(used to normalize source paths in baseline)")
268 262
269 (options, args) = parser.parse_args() 263 (options, args) = parser.parse_args()
270 if not args: 264 if not args:
271 parser.error("no filename specified") 265 parser.error("no filename specified")
272 filenames = args 266 filenames = args
273 267
274 analyzer = TsanAnalyzer(options.source_dir, use_gdb=True) 268 analyzer = TsanAnalyzer(options.source_dir, use_gdb=True)
275 retcode = analyzer.Report(filenames, None) 269 retcode = analyzer.Report(filenames, None)
276 270
277 sys.exit(retcode) 271 sys.exit(retcode)
OLDNEW
« no previous file with comments | « tools/valgrind/tsan/suppressions.txt ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698