OLD | NEW |
1 #!/usr/bin/python | 1 #!/usr/bin/python |
2 # Copyright (c) 2006-2008 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2006-2008 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 # tsan_analyze.py | 6 # tsan_analyze.py |
7 | 7 |
8 ''' Given a ThreadSanitizer output file, parses errors and uniques them.''' | 8 ''' Given a ThreadSanitizer output file, parses errors and uniques them.''' |
9 | 9 |
10 import gdb_helper | 10 import gdb_helper |
(...skipping 15 matching lines...) Expand all Loading... |
26 self.address = address | 26 self.address = address |
27 self.binary = binary | 27 self.binary = binary |
28 def __str__(self): | 28 def __str__(self): |
29 global TheAddressTable | 29 global TheAddressTable |
30 file, line = TheAddressTable.GetFileLine(self.binary, self.address) | 30 file, line = TheAddressTable.GetFileLine(self.binary, self.address) |
31 if (file is None) or (line is None): | 31 if (file is None) or (line is None): |
32 return self.raw_line_ | 32 return self.raw_line_ |
33 else: | 33 else: |
34 return self.raw_line_.replace(self.binary, '%s:%s' % (file, line)) | 34 return self.raw_line_.replace(self.binary, '%s:%s' % (file, line)) |
35 | 35 |
36 class TsanAnalyze: | 36 class TsanAnalyzer: |
37 ''' Given a set of ThreadSanitizer output files, parse all the errors out of | 37 ''' Given a set of ThreadSanitizer output files, parse all the errors out of |
38 them, unique them and output the results.''' | 38 them, unique them and output the results.''' |
39 | 39 |
40 LOAD_LIB_RE = re.compile('--[0-9]+-- ([^(:]*) \((0x[0-9a-f]+)\)') | 40 LOAD_LIB_RE = re.compile('--[0-9]+-- ([^(:]*) \((0x[0-9a-f]+)\)') |
41 TSAN_LINE_RE = re.compile('==[0-9]+==\s*[#0-9]+\s*' | 41 TSAN_LINE_RE = re.compile('==[0-9]+==\s*[#0-9]+\s*' |
42 '([0-9A-Fa-fx]+):' | 42 '([0-9A-Fa-fx]+):' |
43 '(?:[^ ]* )*' | 43 '(?:[^ ]* )*' |
44 '([^ :\n]+)' | 44 '([^ :\n]+)' |
45 '') | 45 '') |
46 | 46 |
47 THREAD_CREATION_STR = ("INFO: T.* " | 47 THREAD_CREATION_STR = ("INFO: T.* " |
48 "(has been created by T.* at this point|is program's main thread)") | 48 "(has been created by T.* at this point|is program's main thread)") |
49 | 49 |
50 SANITY_TEST_SUPPRESSION = "ThreadSanitizer sanity test" | 50 SANITY_TEST_SUPPRESSION = "ThreadSanitizer sanity test" |
51 def __init__(self, source_dir, files, use_gdb=False): | 51 def __init__(self, source_dir, use_gdb=False): |
52 '''Reads in a set of files. | 52 '''Reads in a set of files. |
53 | 53 |
54 Args: | 54 Args: |
55 source_dir: Path to top of source tree for this build | 55 source_dir: Path to top of source tree for this build |
56 files: A list of filenames. | |
57 ''' | 56 ''' |
58 | 57 |
59 self.use_gdb = use_gdb | 58 self._use_gdb = use_gdb |
60 if use_gdb: | |
61 global TheAddressTable | |
62 TheAddressTable = gdb_helper.AddressTable() | |
63 self.races = [] | |
64 self.used_suppressions = {} | |
65 for file in files: | |
66 self.ParseReportFile(file) | |
67 if self.use_gdb: | |
68 TheAddressTable.ResolveAll() | |
69 | 59 |
70 def ReadLine(self): | 60 def ReadLine(self): |
71 self.line_ = self.cur_fd_.readline() | 61 self.line_ = self.cur_fd_.readline() |
72 self.stack_trace_line_ = None | 62 self.stack_trace_line_ = None |
73 if not self.use_gdb: | 63 if not self._use_gdb: |
74 return | 64 return |
75 global TheAddressTable | 65 global TheAddressTable |
76 match = TsanAnalyze.LOAD_LIB_RE.match(self.line_) | 66 match = TsanAnalyzer.LOAD_LIB_RE.match(self.line_) |
77 if match: | 67 if match: |
78 binary, ip = match.groups() | 68 binary, ip = match.groups() |
79 TheAddressTable.AddBinaryAt(binary, ip) | 69 TheAddressTable.AddBinaryAt(binary, ip) |
80 return | 70 return |
81 match = TsanAnalyze.TSAN_LINE_RE.match(self.line_) | 71 match = TsanAnalyzer.TSAN_LINE_RE.match(self.line_) |
82 if match: | 72 if match: |
83 address, binary_name = match.groups() | 73 address, binary_name = match.groups() |
84 stack_trace_line = _StackTraceLine(self.line_, address, binary_name) | 74 stack_trace_line = _StackTraceLine(self.line_, address, binary_name) |
85 TheAddressTable.Add(stack_trace_line.binary, stack_trace_line.address) | 75 TheAddressTable.Add(stack_trace_line.binary, stack_trace_line.address) |
86 self.stack_trace_line_ = stack_trace_line | 76 self.stack_trace_line_ = stack_trace_line |
87 | 77 |
88 def ReadSection(self): | 78 def ReadSection(self): |
89 result = [self.line_] | 79 result = [self.line_] |
90 if re.search("{{{", self.line_): | 80 if re.search("{{{", self.line_): |
91 while not re.search('}}}', self.line_): | 81 while not re.search('}}}', self.line_): |
92 self.ReadLine() | 82 self.ReadLine() |
93 if self.stack_trace_line_ is None: | 83 if self.stack_trace_line_ is None: |
94 result.append(self.line_) | 84 result.append(self.line_) |
95 else: | 85 else: |
96 result.append(self.stack_trace_line_) | 86 result.append(self.stack_trace_line_) |
97 return result | 87 return result |
98 | 88 |
99 def ParseReportFile(self, filename): | 89 def ParseReportFile(self, filename): |
100 self.cur_fd_ = open(filename, 'r') | 90 self.cur_fd_ = open(filename, 'r') |
101 | 91 |
102 while True: | 92 while True: |
103 # Read race reports. | 93 # Read race reports. |
104 self.ReadLine() | 94 self.ReadLine() |
105 if (self.line_ == ''): | 95 if (self.line_ == ''): |
106 break | 96 break |
107 | 97 |
108 tmp = [] | 98 tmp = [] |
109 while re.search(TsanAnalyze.THREAD_CREATION_STR, self.line_): | 99 while re.search(TsanAnalyzer.THREAD_CREATION_STR, self.line_): |
110 tmp.extend(self.ReadSection()) | 100 tmp.extend(self.ReadSection()) |
111 self.ReadLine() | 101 self.ReadLine() |
112 if re.search("Possible data race", self.line_): | 102 if re.search("Possible data race", self.line_): |
113 tmp.extend(self.ReadSection()) | 103 tmp.extend(self.ReadSection()) |
114 self.races.append(tmp) | 104 self.races.append(tmp) |
115 | 105 |
116 match = re.search(" used_suppression:\s+([0-9]+)\s(.*)", self.line_) | 106 match = re.search(" used_suppression:\s+([0-9]+)\s(.*)", self.line_) |
117 if match: | 107 if match: |
118 count, supp_name = match.groups() | 108 count, supp_name = match.groups() |
119 count = int(count) | 109 count = int(count) |
120 if supp_name in self.used_suppressions: | 110 if supp_name in self.used_suppressions: |
121 self.used_suppressions[supp_name] += count | 111 self.used_suppressions[supp_name] += count |
122 else: | 112 else: |
123 self.used_suppressions[supp_name] = count | 113 self.used_suppressions[supp_name] = count |
124 self.cur_fd_.close() | 114 self.cur_fd_.close() |
125 | 115 |
126 def Report(self, check_sanity=False): | 116 def Report(self, files, check_sanity=False): |
| 117 '''TODO!!! |
| 118 files: A list of filenames. |
| 119 ''' |
| 120 global TheAddressTable |
| 121 if self._use_gdb: |
| 122 TheAddressTable = gdb_helper.AddressTable() |
| 123 else: |
| 124 TheAddressTable = None |
| 125 self.races = [] |
| 126 self.used_suppressions = {} |
| 127 for file in files: |
| 128 self.ParseReportFile(file) |
| 129 if self._use_gdb: |
| 130 TheAddressTable.ResolveAll() |
| 131 |
127 is_sane = False | 132 is_sane = False |
128 print "-----------------------------------------------------" | 133 print "-----------------------------------------------------" |
129 print "Suppressions used:" | 134 print "Suppressions used:" |
130 print " count name" | 135 print " count name" |
131 for item in sorted(self.used_suppressions.items(), key=lambda (k,v): (v,k)): | 136 for item in sorted(self.used_suppressions.items(), key=lambda (k,v): (v,k)): |
132 print "%7s %s" % (item[1], item[0]) | 137 print "%7s %s" % (item[1], item[0]) |
133 if item[0].startswith(TsanAnalyze.SANITY_TEST_SUPPRESSION): | 138 if item[0].startswith(TsanAnalyzer.SANITY_TEST_SUPPRESSION): |
134 is_sane = True | 139 is_sane = True |
135 print "-----------------------------------------------------" | 140 print "-----------------------------------------------------" |
136 sys.stdout.flush() | 141 sys.stdout.flush() |
137 | 142 |
138 retcode = 0 | 143 retcode = 0 |
139 if len(self.races) > 0: | 144 if len(self.races) > 0: |
140 logging.error("FAIL! Found %i race reports" % len(self.races)) | 145 logging.error("FAIL! Found %i race reports" % len(self.races)) |
141 for report_list in self.races: | 146 for report_list in self.races: |
142 report = '' | 147 report = '' |
143 for line in report_list: | 148 for line in report_list: |
144 report += str(line) | 149 report += str(line) |
145 logging.error('\n' + report) | 150 logging.error('\n' + report) |
146 retcode = -1 | 151 retcode = -1 |
147 | 152 |
148 # Report tool's insanity even if there were errors. | 153 # Report tool's insanity even if there were errors. |
149 if check_sanity and not is_sane: | 154 if check_sanity and not is_sane: |
150 logging.error("FAIL! Sanity check failed!") | 155 logging.error("FAIL! Sanity check failed!") |
151 retcode = -3 | 156 retcode = -3 |
152 | 157 |
153 if retcode != 0: | 158 if retcode != 0: |
154 return retcode | 159 return retcode |
155 logging.info("PASS: No race reports found") | 160 logging.info("PASS: No race reports found") |
156 return 0 | 161 return 0 |
157 | 162 |
158 if __name__ == '__main__': | 163 if __name__ == '__main__': |
159 '''For testing only. The TsanAnalyze class should be imported instead.''' | 164 '''For testing only. The TsanAnalyzer class should be imported instead.''' |
160 retcode = 0 | 165 retcode = 0 |
161 parser = optparse.OptionParser("usage: %prog [options] <files to analyze>") | 166 parser = optparse.OptionParser("usage: %prog [options] <files to analyze>") |
162 parser.add_option("", "--source_dir", | 167 parser.add_option("", "--source_dir", |
163 help="path to top of source tree for this build" | 168 help="path to top of source tree for this build" |
164 "(used to normalize source paths in baseline)") | 169 "(used to normalize source paths in baseline)") |
165 | 170 |
166 (options, args) = parser.parse_args() | 171 (options, args) = parser.parse_args() |
167 if len(args) == 0: | 172 if len(args) == 0: |
168 parser.error("no filename specified") | 173 parser.error("no filename specified") |
169 filenames = args | 174 filenames = args |
170 | 175 |
171 analyzer = TsanAnalyze(options.source_dir, filenames, use_gdb=True) | 176 analyzer = TsanAnalyzer(options.source_dir, use_gdb=True) |
172 retcode = analyzer.Report() | 177 retcode = analyzer.Report(filenames) |
173 | 178 |
174 sys.exit(retcode) | 179 sys.exit(retcode) |
OLD | NEW |