Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(149)

Side by Side Diff: tools/code_coverage/croc.py

Issue 113980: Major refactoring of Croc.... (Closed) Base URL: svn://chrome-svn/chrome/trunk/src/
Patch Set: Created 11 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 #!/usr/bin/python2.4 1 #!/usr/bin/python2.4
2 # 2 #
3 # Copyright 2009, Google Inc. 3 # Copyright 2009, Google Inc.
4 # All rights reserved. 4 # All rights reserved.
5 # 5 #
6 # Redistribution and use in source and binary forms, with or without 6 # Redistribution and use in source and binary forms, with or without
7 # modification, are permitted provided that the following conditions are 7 # modification, are permitted provided that the following conditions are
8 # met: 8 # met:
9 # 9 #
10 # * Redistributions of source code must retain the above copyright 10 # * Redistributions of source code must retain the above copyright
(...skipping 13 matching lines...) Expand all
24 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 31
32 """Crocodile - compute coverage numbers for Chrome coverage dashboard.""" 32 """Crocodile - compute coverage numbers for Chrome coverage dashboard."""
33 33
34 import optparse
34 import os 35 import os
35 import re 36 import re
36 import sys 37 import sys
37 from optparse import OptionParser 38 import croc_html
39 import croc_scan
38 40
39 41
40 class CoverageError(Exception): 42 class CrocError(Exception):
41 """Coverage error.""" 43 """Coverage error."""
42 44
43 class CoverageStatError(CoverageError): 45
46 class CrocStatError(CrocError):
44 """Error evaluating coverage stat.""" 47 """Error evaluating coverage stat."""
45 48
46 #------------------------------------------------------------------------------ 49 #------------------------------------------------------------------------------
47 50
48 51
49 class CoverageStats(dict): 52 class CoverageStats(dict):
50 """Coverage statistics.""" 53 """Coverage statistics."""
51 54
52 def Add(self, coverage_stats): 55 def Add(self, coverage_stats):
53 """Adds a contribution from another coverage stats dict. 56 """Adds a contribution from another coverage stats dict.
54 57
55 Args: 58 Args:
56 coverage_stats: Statistics to add to this one. 59 coverage_stats: Statistics to add to this one.
57 """ 60 """
58 for k, v in coverage_stats.iteritems(): 61 for k, v in coverage_stats.iteritems():
59 if k in self: 62 if k in self:
60 self[k] = self[k] + v 63 self[k] += v
61 else: 64 else:
62 self[k] = v 65 self[k] = v
63 66
64 #------------------------------------------------------------------------------ 67 #------------------------------------------------------------------------------
65 68
66 69
67 class CoveredFile(object): 70 class CoveredFile(object):
68 """Information about a single covered file.""" 71 """Information about a single covered file."""
69 72
70 def __init__(self, filename, group, language): 73 def __init__(self, filename, **kwargs):
71 """Constructor. 74 """Constructor.
72 75
73 Args: 76 Args:
74 filename: Full path to file, '/'-delimited. 77 filename: Full path to file, '/'-delimited.
75 group: Group file belongs to. 78 kwargs: Keyword args are attributes for file.
76 language: Language for file.
77 """ 79 """
78 self.filename = filename 80 self.filename = filename
79 self.group = group 81 self.attrs = dict(kwargs)
80 self.language = language 82
83 # Move these to attrs?
84 self.local_path = None # Local path to file
85 self.in_lcov = False # Is file instrumented?
81 86
82 # No coverage data for file yet 87 # No coverage data for file yet
83 self.lines = {} # line_no -> None=executable, 0=instrumented, 1=covered 88 self.lines = {} # line_no -> None=executable, 0=instrumented, 1=covered
84 self.stats = CoverageStats() 89 self.stats = CoverageStats()
85 90
86 def UpdateCoverage(self): 91 def UpdateCoverage(self):
87 """Updates the coverage summary based on covered lines.""" 92 """Updates the coverage summary based on covered lines."""
88 exe = instr = cov = 0 93 exe = instr = cov = 0
89 for l in self.lines.itervalues(): 94 for l in self.lines.itervalues():
90 exe += 1 95 exe += 1
91 if l is not None: 96 if l is not None:
92 instr += 1 97 instr += 1
93 if l == 1: 98 if l == 1:
94 cov += 1 99 cov += 1
95 100
96 # Add stats that always exist 101 # Add stats that always exist
97 self.stats = CoverageStats(lines_executable=exe, 102 self.stats = CoverageStats(lines_executable=exe,
98 lines_instrumented=instr, 103 lines_instrumented=instr,
99 lines_covered=cov, 104 lines_covered=cov,
100 files_executable=1) 105 files_executable=1)
101 106
102 # Add conditional stats 107 # Add conditional stats
103 if cov: 108 if cov:
104 self.stats['files_covered'] = 1 109 self.stats['files_covered'] = 1
105 if instr: 110 if instr or self.in_lcov:
106 self.stats['files_instrumented'] = 1 111 self.stats['files_instrumented'] = 1
107 112
108
109 #------------------------------------------------------------------------------ 113 #------------------------------------------------------------------------------
110 114
111 115
112 class CoveredDir(object): 116 class CoveredDir(object):
113 """Information about a directory containing covered files.""" 117 """Information about a directory containing covered files."""
114 118
115 def __init__(self, dirpath): 119 def __init__(self, dirpath):
116 """Constructor. 120 """Constructor.
117 121
118 Args: 122 Args:
119 dirpath: Full path of directory, '/'-delimited. 123 dirpath: Full path of directory, '/'-delimited.
120 """ 124 """
121 self.dirpath = dirpath 125 self.dirpath = dirpath
122 126
123 # List of covered files directly in this dir, indexed by filename (not 127 # List of covered files directly in this dir, indexed by filename (not
124 # full path) 128 # full path)
125 self.files = {} 129 self.files = {}
126 130
127 # List of subdirs, indexed by filename (not full path) 131 # List of subdirs, indexed by filename (not full path)
128 self.subdirs = {} 132 self.subdirs = {}
129 133
130 # Dict of CoverageStats objects summarizing all children, indexed by group 134 # Dict of CoverageStats objects summarizing all children, indexed by group
131 self.stats_by_group = {'all':CoverageStats()} 135 self.stats_by_group = {'all': CoverageStats()}
132 # TODO: by language 136 # TODO: by language
133 137
134 def GetTree(self, indent=''): 138 def GetTree(self, indent=''):
135 """Recursively gets stats for the directory and its children. 139 """Recursively gets stats for the directory and its children.
136 140
137 Args: 141 Args:
138 indent: indent prefix string. 142 indent: indent prefix string.
139 143
140 Returns: 144 Returns:
141 The tree as a string. 145 The tree as a string.
142 """ 146 """
143 dest = [] 147 dest = []
144 148
145 # Compile all groupstats 149 # Compile all groupstats
146 groupstats = [] 150 groupstats = []
147 for group in sorted(self.stats_by_group): 151 for group in sorted(self.stats_by_group):
148 s = self.stats_by_group[group] 152 s = self.stats_by_group[group]
149 if not s.get('lines_executable'): 153 if not s.get('lines_executable'):
150 continue # Skip groups with no executable lines 154 continue # Skip groups with no executable lines
151 groupstats.append('%s:%d/%d/%d' % ( 155 groupstats.append('%s:%d/%d/%d' % (
152 group, s.get('lines_covered', 0), 156 group, s.get('lines_covered', 0),
153 s.get('lines_instrumented', 0), 157 s.get('lines_instrumented', 0),
154 s.get('lines_executable', 0))) 158 s.get('lines_executable', 0)))
155 159
156 outline = '%s%-30s %s' % (indent, 160 outline = '%s%-30s %s' % (indent,
157 self.dirpath + '/', ' '.join(groupstats)) 161 os.path.split(self.dirpath)[1] + '/',
162 ' '.join(groupstats))
158 dest.append(outline.rstrip()) 163 dest.append(outline.rstrip())
159 164
160 for d in sorted(self.subdirs): 165 for d in sorted(self.subdirs):
161 dest.append(self.subdirs[d].GetTree(indent=indent + ' ')) 166 dest.append(self.subdirs[d].GetTree(indent=indent + ' '))
162 167
163 return '\n'.join(dest) 168 return '\n'.join(dest)
164 169
165 #------------------------------------------------------------------------------ 170 #------------------------------------------------------------------------------
166 171
167 172
168 class Coverage(object): 173 class Coverage(object):
169 """Code coverage for a group of files.""" 174 """Code coverage for a group of files."""
170 175
171 def __init__(self): 176 def __init__(self):
172 """Constructor.""" 177 """Constructor."""
173 self.files = {} # Map filename --> CoverageFile 178 self.files = {} # Map filename --> CoverageFile
174 self.root_dirs = [] # (root, altname) 179 self.root_dirs = [] # (root, altname)
175 self.rules = [] # (regexp, include, group, language) 180 self.rules = [] # (regexp, dict of RHS attrs)
176 self.tree = CoveredDir('') 181 self.tree = CoveredDir('')
177 self.print_stats = [] # Dicts of args to PrintStat() 182 self.print_stats = [] # Dicts of args to PrintStat()
178 183
179 self.add_files_walk = os.walk # Walk function for AddFiles() 184 # Functions which need to be replaced for unit testing
180 185 self.add_files_walk = os.walk # Walk function for AddFiles()
181 # Must specify subdir rule, or AddFiles() won't find any files because it 186 self.scan_file = croc_scan.ScanFile # Source scanner for AddFiles()
182 # will prune out all the subdirs. Since subdirs never match any code,
183 # they won't be reported in other stats, so this is ok.
184 self.AddRule('.*/$', language='subdir')
185
186 187
187 def CleanupFilename(self, filename): 188 def CleanupFilename(self, filename):
188 """Cleans up a filename. 189 """Cleans up a filename.
189 190
190 Args: 191 Args:
191 filename: Input filename. 192 filename: Input filename.
192 193
193 Returns: 194 Returns:
194 The cleaned up filename. 195 The cleaned up filename.
195 196
196 Changes all path separators to '/'. 197 Changes all path separators to '/'.
197 Makes relative paths (those starting with '../' or './' absolute. 198 Makes relative paths (those starting with '../' or './' absolute.
198 Replaces all instances of root dirs with alternate names. 199 Replaces all instances of root dirs with alternate names.
199 """ 200 """
200 # Change path separators 201 # Change path separators
201 filename = filename.replace('\\', '/') 202 filename = filename.replace('\\', '/')
202 203
203 # If path is relative, make it absolute 204 # If path is relative, make it absolute
204 # TODO: Perhaps we should default to relative instead, and only understand 205 # TODO: Perhaps we should default to relative instead, and only understand
205 # absolute to be files starting with '\', '/', or '[A-Za-z]:'? 206 # absolute to be files starting with '\', '/', or '[A-Za-z]:'?
206 if filename.split('/')[0] in ('.', '..'): 207 if filename.split('/')[0] in ('.', '..'):
207 filename = os.path.abspath(filename).replace('\\', '/') 208 filename = os.path.abspath(filename).replace('\\', '/')
208 209
209 # Replace alternate roots 210 # Replace alternate roots
210 for root, alt_name in self.root_dirs: 211 for root, alt_name in self.root_dirs:
211 filename = re.sub('^' + re.escape(root) + '(?=(/|$))', 212 filename = re.sub('^' + re.escape(root) + '(?=(/|$))',
212 alt_name, filename) 213 alt_name, filename)
213 return filename 214 return filename
214 215
215 def ClassifyFile(self, filename): 216 def ClassifyFile(self, filename):
216 """Applies rules to a filename, to see if we care about it. 217 """Applies rules to a filename, to see if we care about it.
217 218
218 Args: 219 Args:
219 filename: Input filename. 220 filename: Input filename.
220 221
221 Returns: 222 Returns:
222 (None, None) if the file is not included or has no group or has no 223 A dict of attributes for the file, accumulated from the right hand sides
223 language. Otherwise, a 2-tuple containing: 224 of rules which fired.
224 The group for the file (for example, 'source' or 'test').
225 The language of the file.
226 """ 225 """
227 include = False 226 attrs = {}
228 group = None
229 language = None
230 227
231 # Process all rules 228 # Process all rules
232 for regexp, rule_include, rule_group, rule_language in self.rules: 229 for regexp, rhs_dict in self.rules:
233 if regexp.match(filename): 230 if regexp.match(filename):
234 # include/exclude source 231 attrs.update(rhs_dict)
235 if rule_include is not None:
236 include = rule_include
237 if rule_group is not None:
238 group = rule_group
239 if rule_language is not None:
240 language = rule_language
241 232
242 # TODO: Should have a debug mode which prints files which aren't excluded 233 return attrs
243 # and why (explicitly excluded, no type, no language, etc.)
244
245 # TODO: Files can belong to multiple groups? 234 # TODO: Files can belong to multiple groups?
246 # (test/source) 235 # (test/source)
247 # (mac/pc/win) 236 # (mac/pc/win)
248 # (media_test/all_tests) 237 # (media_test/all_tests)
249 # (small/med/large) 238 # (small/med/large)
250 # How to handle that? 239 # How to handle that?
251 240
252 # Return classification if the file is included and has a group and 241 def AddRoot(self, root_path, alt_name='_'):
253 # language
254 if include and group and language:
255 return group, language
256 else:
257 return None, None
258
259 def AddRoot(self, root_path, alt_name='#'):
260 """Adds a root directory. 242 """Adds a root directory.
261 243
262 Args: 244 Args:
263 root_path: Root directory to add. 245 root_path: Root directory to add.
264 alt_name: If specified, name of root dir 246 alt_name: If specified, name of root dir. Otherwise, defaults to '_'.
247
248 Raises:
249 ValueError: alt_name was blank.
265 """ 250 """
251 # Alt name must not be blank. If it were, there wouldn't be a way to
252 # reverse-resolve from a root-replaced path back to the local path, since
253 # '' would always match the beginning of the candidate filename, resulting
254 # in an infinite loop.
255 if not alt_name:
256 raise ValueError('AddRoot alt_name must not be blank.')
257
266 # Clean up root path based on existing rules 258 # Clean up root path based on existing rules
267 self.root_dirs.append([self.CleanupFilename(root_path), alt_name]) 259 self.root_dirs.append([self.CleanupFilename(root_path), alt_name])
268 260
269 def AddRule(self, path_regexp, include=None, group=None, language=None): 261 def AddRule(self, path_regexp, **kwargs):
270 """Adds a rule. 262 """Adds a rule.
271 263
272 Args: 264 Args:
273 path_regexp: Regular expression to match for filenames. These are 265 path_regexp: Regular expression to match for filenames. These are
274 matched after root directory replacement. 266 matched after root directory replacement.
267 kwargs: Keyword arguments are attributes to set if the rule applies.
268
269 Keyword arguments currently supported:
275 include: If True, includes matches; if False, excludes matches. Ignored 270 include: If True, includes matches; if False, excludes matches. Ignored
276 if None. 271 if None.
277 group: If not None, sets group to apply to matches. 272 group: If not None, sets group to apply to matches.
278 language: If not None, sets file language to apply to matches. 273 language: If not None, sets file language to apply to matches.
279 """ 274 """
275
280 # Compile regexp ahead of time 276 # Compile regexp ahead of time
281 self.rules.append([re.compile(path_regexp), include, group, language]) 277 self.rules.append([re.compile(path_regexp), dict(kwargs)])
282 278
283 def GetCoveredFile(self, filename, add=False): 279 def GetCoveredFile(self, filename, add=False):
284 """Gets the CoveredFile object for the filename. 280 """Gets the CoveredFile object for the filename.
285 281
286 Args: 282 Args:
287 filename: Name of file to find. 283 filename: Name of file to find.
288 add: If True, will add the file if it's not present. This applies the 284 add: If True, will add the file if it's not present. This applies the
289 transformations from AddRoot() and AddRule(), and only adds the file 285 transformations from AddRoot() and AddRule(), and only adds the file
290 if a rule includes it, and it has a group and language. 286 if a rule includes it, and it has a group and language.
291 287
292 Returns: 288 Returns:
293 The matching CoveredFile object, or None if not present. 289 The matching CoveredFile object, or None if not present.
294 """ 290 """
295 # Clean filename 291 # Clean filename
296 filename = self.CleanupFilename(filename) 292 filename = self.CleanupFilename(filename)
297 293
298 # Check for existing match 294 # Check for existing match
299 if filename in self.files: 295 if filename in self.files:
300 return self.files[filename] 296 return self.files[filename]
301 297
302 # File isn't one we know about. If we can't add it, give up. 298 # File isn't one we know about. If we can't add it, give up.
303 if not add: 299 if not add:
304 return None 300 return None
305 301
306 # Check rules to see if file can be added 302 # Check rules to see if file can be added. Files must be included and
307 group, language = self.ClassifyFile(filename) 303 # have a group and language.
308 if not group: 304 attrs = self.ClassifyFile(filename)
305 if not (attrs.get('include')
306 and attrs.get('group')
307 and attrs.get('language')):
309 return None 308 return None
310 309
311 # Add the file 310 # Add the file
312 f = CoveredFile(filename, group, language) 311 f = CoveredFile(filename, **attrs)
313 self.files[filename] = f 312 self.files[filename] = f
314 313
315 # Return the newly covered file 314 # Return the newly covered file
316 return f 315 return f
317 316
317 def RemoveCoveredFile(self, cov_file):
318 """Removes the file from the covered file list.
319
320 Args:
321 cov_file: A file object returned by GetCoveredFile().
322 """
323 self.files.pop(cov_file.filename)
324
318 def ParseLcovData(self, lcov_data): 325 def ParseLcovData(self, lcov_data):
319 """Adds coverage from LCOV-formatted data. 326 """Adds coverage from LCOV-formatted data.
320 327
321 Args: 328 Args:
322 lcov_data: An iterable returning lines of data in LCOV format. For 329 lcov_data: An iterable returning lines of data in LCOV format. For
323 example, a file or list of strings. 330 example, a file or list of strings.
324 """ 331 """
325 cov_file = None 332 cov_file = None
326 cov_lines = None 333 cov_lines = None
327 for line in lcov_data: 334 for line in lcov_data:
328 line = line.strip() 335 line = line.strip()
329 if line.startswith('SF:'): 336 if line.startswith('SF:'):
330 # Start of data for a new file; payload is filename 337 # Start of data for a new file; payload is filename
331 cov_file = self.GetCoveredFile(line[3:], add=True) 338 cov_file = self.GetCoveredFile(line[3:], add=True)
332 if cov_file: 339 if cov_file:
333 cov_lines = cov_file.lines 340 cov_lines = cov_file.lines
341 cov_file.in_lcov = True # File was instrumented
334 elif not cov_file: 342 elif not cov_file:
335 # Inside data for a file we don't care about - so skip it 343 # Inside data for a file we don't care about - so skip it
336 pass 344 pass
337 elif line.startswith('DA:'): 345 elif line.startswith('DA:'):
338 # Data point - that is, an executable line in current file 346 # Data point - that is, an executable line in current file
339 line_no, is_covered = map(int, line[3:].split(',')) 347 line_no, is_covered = map(int, line[3:].split(','))
340 if is_covered: 348 if is_covered:
341 # Line is covered 349 # Line is covered
342 cov_lines[line_no] = 1 350 cov_lines[line_no] = 1
343 elif cov_lines.get(line_no) != 1: 351 elif cov_lines.get(line_no) != 1:
(...skipping 21 matching lines...) Expand all
365 373
366 def GetStat(self, stat, group='all', default=None): 374 def GetStat(self, stat, group='all', default=None):
367 """Gets a statistic from the coverage object. 375 """Gets a statistic from the coverage object.
368 376
369 Args: 377 Args:
370 stat: Statistic to get. May also be an evaluatable python expression, 378 stat: Statistic to get. May also be an evaluatable python expression,
371 using the stats. For example, 'stat1 - stat2'. 379 using the stats. For example, 'stat1 - stat2'.
372 group: File group to match; if 'all', matches all groups. 380 group: File group to match; if 'all', matches all groups.
373 default: Value to return if there was an error evaluating the stat. For 381 default: Value to return if there was an error evaluating the stat. For
374 example, if the stat does not exist. If None, raises 382 example, if the stat does not exist. If None, raises
375 CoverageStatError. 383 CrocStatError.
376 384
377 Returns: 385 Returns:
378 The evaluated stat, or None if error. 386 The evaluated stat, or None if error.
379 387
380 Raises: 388 Raises:
381 CoverageStatError: Error evaluating stat. 389 CrocStatError: Error evaluating stat.
382 """ 390 """
383 # TODO: specify a subdir to get the stat from, then walk the tree to 391 # TODO: specify a subdir to get the stat from, then walk the tree to
384 # print the stats from just that subdir 392 # print the stats from just that subdir
385 393
386 # Make sure the group exists 394 # Make sure the group exists
387 if group not in self.tree.stats_by_group: 395 if group not in self.tree.stats_by_group:
388 if default is None: 396 if default is None:
389 raise CoverageStatError('Group %r not found.' % group) 397 raise CrocStatError('Group %r not found.' % group)
390 else: 398 else:
391 return default 399 return default
392 400
393 stats = self.tree.stats_by_group[group] 401 stats = self.tree.stats_by_group[group]
394 try: 402 try:
395 return eval(stat, {'__builtins__':{'S':self.GetStat}}, stats) 403 return eval(stat, {'__builtins__': {'S': self.GetStat}}, stats)
396 except Exception, e: 404 except Exception, e:
397 if default is None: 405 if default is None:
398 raise CoverageStatError('Error evaluating stat %r: %s' % (stat, e)) 406 raise CrocStatError('Error evaluating stat %r: %s' % (stat, e))
399 else: 407 else:
400 return default 408 return default
401 409
402 def PrintStat(self, stat, format=None, outfile=sys.stdout, **kwargs): 410 def PrintStat(self, stat, format=None, outfile=sys.stdout, **kwargs):
403 """Prints a statistic from the coverage object. 411 """Prints a statistic from the coverage object.
404 412
405 Args: 413 Args:
406 stat: Statistic to get. May also be an evaluatable python expression, 414 stat: Statistic to get. May also be an evaluatable python expression,
407 using the stats. For example, 'stat1 - stat2'. 415 using the stats. For example, 'stat1 - stat2'.
408 format: Format string to use when printing stat. If None, prints the 416 format: Format string to use when printing stat. If None, prints the
(...skipping 10 matching lines...) Expand all
419 def AddFiles(self, src_dir): 427 def AddFiles(self, src_dir):
420 """Adds files to coverage information. 428 """Adds files to coverage information.
421 429
422 LCOV files only contains files which are compiled and instrumented as part 430 LCOV files only contains files which are compiled and instrumented as part
423 of running coverage. This function finds missing files and adds them. 431 of running coverage. This function finds missing files and adds them.
424 432
425 Args: 433 Args:
426 src_dir: Directory on disk at which to start search. May be a relative 434 src_dir: Directory on disk at which to start search. May be a relative
427 path on disk starting with '.' or '..', or an absolute path, or a 435 path on disk starting with '.' or '..', or an absolute path, or a
428 path relative to an alt_name for one of the roots 436 path relative to an alt_name for one of the roots
429 (for example, '#/src'). If the alt_name matches more than one root, 437 (for example, '_/src'). If the alt_name matches more than one root,
430 all matches will be attempted. 438 all matches will be attempted.
431 439
432 Note that dirs not underneath one of the root dirs and covered by an 440 Note that dirs not underneath one of the root dirs and covered by an
433 inclusion rule will be ignored. 441 inclusion rule will be ignored.
434 """ 442 """
435 # Check for root dir alt_names in the path and replace with the actual 443 # Check for root dir alt_names in the path and replace with the actual
436 # root dirs, then recurse. 444 # root dirs, then recurse.
437 found_root = False 445 found_root = False
438 for root, alt_name in self.root_dirs: 446 for root, alt_name in self.root_dirs:
439 replaced_root = re.sub('^' + re.escape(alt_name) + '(?=(/|$))', root, 447 replaced_root = re.sub('^' + re.escape(alt_name) + '(?=(/|$))', root,
440 src_dir) 448 src_dir)
441 if replaced_root != src_dir: 449 if replaced_root != src_dir:
442 found_root = True 450 found_root = True
443 self.AddFiles(replaced_root) 451 self.AddFiles(replaced_root)
444 if found_root: 452 if found_root:
445 return # Replaced an alt_name with a root_dir, so already recursed. 453 return # Replaced an alt_name with a root_dir, so already recursed.
446 454
447 for (dirpath, dirnames, filenames) in self.add_files_walk(src_dir): 455 for (dirpath, dirnames, filenames) in self.add_files_walk(src_dir):
448 # Make a copy of the dirnames list so we can modify the original to 456 # Make a copy of the dirnames list so we can modify the original to
449 # prune subdirs we don't need to walk. 457 # prune subdirs we don't need to walk.
450 for d in list(dirnames): 458 for d in list(dirnames):
451 # Add trailing '/' to directory names so dir-based regexps can match 459 # Add trailing '/' to directory names so dir-based regexps can match
452 # '/' instead of needing to specify '(/|$)'. 460 # '/' instead of needing to specify '(/|$)'.
453 dpath = self.CleanupFilename(dirpath + '/' + d) + '/' 461 dpath = self.CleanupFilename(dirpath + '/' + d) + '/'
454 group, language = self.ClassifyFile(dpath) 462 attrs = self.ClassifyFile(dpath)
455 if not group: 463 if not attrs.get('include'):
456 # Directory has been excluded, so don't traverse it 464 # Directory has been excluded, so don't traverse it
457 # TODO: Document the slight weirdness caused by this: If you 465 # TODO: Document the slight weirdness caused by this: If you
458 # AddFiles('./A'), and the rules include 'A/B/C/D' but not 'A/B', 466 # AddFiles('./A'), and the rules include 'A/B/C/D' but not 'A/B',
459 # then it won't recurse into './A/B' so won't find './A/B/C/D'. 467 # then it won't recurse into './A/B' so won't find './A/B/C/D'.
460 # Workarounds are to AddFiles('./A/B/C/D') or AddFiles('./A/B/C'). 468 # Workarounds are to AddFiles('./A/B/C/D') or AddFiles('./A/B/C').
461 # The latter works because it explicitly walks the contents of the 469 # The latter works because it explicitly walks the contents of the
462 # path passed to AddFiles(), so it finds './A/B/C/D'. 470 # path passed to AddFiles(), so it finds './A/B/C/D'.
463 dirnames.remove(d) 471 dirnames.remove(d)
464 472
465 for f in filenames: 473 for f in filenames:
466 covf = self.GetCoveredFile(dirpath + '/' + f, add=True) 474 local_path = dirpath + '/' + f
467 # TODO: scan files for executable lines. Add these to the file as 475
468 # 'executable', but not 'instrumented' or 'covered'. 476 covf = self.GetCoveredFile(local_path, add=True)
469 # TODO: if a file has no executable lines, don't add it. 477 if not covf:
470 if covf: 478 continue
479
480 # Save where we found the file, for generating line-by-line HTML output
481 covf.local_path = local_path
482
483 if covf.in_lcov:
484 # File already instrumented and doesn't need to be scanned
485 continue
486
487 if not covf.attrs.get('add_if_missing', 1):
488 # Not allowed to add the file
489 self.RemoveCoveredFile(covf)
490 continue
491
492 # Scan file to find potentially-executable lines
493 lines = self.scan_file(covf.local_path, covf.attrs.get('language'))
494 if lines:
495 for l in lines:
496 covf.lines[l] = None
471 covf.UpdateCoverage() 497 covf.UpdateCoverage()
498 else:
499 # File has no executable lines, so don't count it
500 self.RemoveCoveredFile(covf)
472 501
473 def AddConfig(self, config_data, lcov_queue=None, addfiles_queue=None): 502 def AddConfig(self, config_data, lcov_queue=None, addfiles_queue=None):
474 """Adds JSON-ish config data. 503 """Adds JSON-ish config data.
475 504
476 Args: 505 Args:
477 config_data: Config data string. 506 config_data: Config data string.
478 lcov_queue: If not None, object to append lcov_files to instead of 507 lcov_queue: If not None, object to append lcov_files to instead of
479 parsing them immediately. 508 parsing them immediately.
480 addfiles_queue: If not None, object to append add_files to instead of 509 addfiles_queue: If not None, object to append add_files to instead of
481 processing them immediately. 510 processing them immediately.
482 """ 511 """
483 # TODO: All manner of error checking 512 # TODO: All manner of error checking
484 cfg = eval(config_data, {'__builtins__':{}}, {}) 513 cfg = eval(config_data, {'__builtins__': {}}, {})
485 514
486 for rootdict in cfg.get('roots', []): 515 for rootdict in cfg.get('roots', []):
487 self.AddRoot(rootdict['root'], alt_name=rootdict.get('altname', '#')) 516 self.AddRoot(rootdict['root'], alt_name=rootdict.get('altname', '_'))
488 517
489 for ruledict in cfg.get('rules', []): 518 for ruledict in cfg.get('rules', []):
490 self.AddRule(ruledict['regexp'], 519 regexp = ruledict.pop('regexp')
491 include=ruledict.get('include'), 520 self.AddRule(regexp, **ruledict)
492 group=ruledict.get('group'),
493 language=ruledict.get('language'))
494 521
495 for add_lcov in cfg.get('lcov_files', []): 522 for add_lcov in cfg.get('lcov_files', []):
496 if lcov_queue is not None: 523 if lcov_queue is not None:
497 lcov_queue.append(add_lcov) 524 lcov_queue.append(add_lcov)
498 else: 525 else:
499 self.ParseLcovFile(add_lcov) 526 self.ParseLcovFile(add_lcov)
500 527
501 for add_path in cfg.get('add_files', []): 528 for add_path in cfg.get('add_files', []):
502 if addfiles_queue is not None: 529 if addfiles_queue is not None:
503 addfiles_queue.append(add_path) 530 addfiles_queue.append(add_path)
504 else: 531 else:
505 self.AddFiles(add_path) 532 self.AddFiles(add_path)
506 533
507 self.print_stats += cfg.get('print_stats', []) 534 self.print_stats += cfg.get('print_stats', [])
508 535
509 def ParseConfig(self, filename, **kwargs): 536 def ParseConfig(self, filename, **kwargs):
510 """Parses a configuration file. 537 """Parses a configuration file.
511 538
512 Args: 539 Args:
513 filename: Config filename. 540 filename: Config filename.
541 kwargs: Additional parameters to pass to AddConfig().
514 """ 542 """
515 # TODO: All manner of error checking 543 # TODO: All manner of error checking
516 f = None 544 f = None
517 try: 545 try:
518 f = open(filename, 'rt') 546 f = open(filename, 'rt')
519 # Need to strip CR's from CRLF-terminated lines or posix systems can't 547 # Need to strip CR's from CRLF-terminated lines or posix systems can't
520 # eval the data. 548 # eval the data.
521 config_data = f.read().replace('\r\n', '\n') 549 config_data = f.read().replace('\r\n', '\n')
522 # TODO: some sort of include syntax. Needs to be done at string-time 550 # TODO: some sort of include syntax.
523 # rather than at eval()-time, so that it's possible to include parts of 551 #
524 # dicts. Path from a file to its include should be relative to the dir 552 # Needs to be done at string-time rather than at eval()-time, so that
525 # containing the file. 553 # it's possible to include parts of dicts. Path from a file to its
554 # include should be relative to the dir containing the file.
555 #
556 # Or perhaps it could be done after eval. In that case, there'd be an
557 # 'include' section with a list of files to include. Those would be
558 # eval()'d and recursively pre- or post-merged with the including file.
559 #
560 # Or maybe just don't worry about it, since multiple configs can be
561 # specified on the command line.
526 self.AddConfig(config_data, **kwargs) 562 self.AddConfig(config_data, **kwargs)
527 finally: 563 finally:
528 if f: 564 if f:
529 f.close() 565 f.close()
530 566
531 def UpdateTreeStats(self): 567 def UpdateTreeStats(self):
532 """Recalculates the tree stats from the currently covered files. 568 """Recalculates the tree stats from the currently covered files.
533 569
534 Also calculates coverage summary for files.""" 570 Also calculates coverage summary for files.
571 """
535 self.tree = CoveredDir('') 572 self.tree = CoveredDir('')
536 for cov_file in self.files.itervalues(): 573 for cov_file in self.files.itervalues():
537 # Add the file to the tree 574 # Add the file to the tree
538 # TODO: Don't really need to create the tree unless we're creating HTML
539 fdirs = cov_file.filename.split('/') 575 fdirs = cov_file.filename.split('/')
540 parent = self.tree 576 parent = self.tree
541 ancestors = [parent] 577 ancestors = [parent]
542 for d in fdirs[:-1]: 578 for d in fdirs[:-1]:
543 if d not in parent.subdirs: 579 if d not in parent.subdirs:
544 parent.subdirs[d] = CoveredDir(d) 580 if parent.dirpath:
581 parent.subdirs[d] = CoveredDir(parent.dirpath + '/' + d)
582 else:
583 parent.subdirs[d] = CoveredDir(d)
545 parent = parent.subdirs[d] 584 parent = parent.subdirs[d]
546 ancestors.append(parent) 585 ancestors.append(parent)
547 # Final subdir actually contains the file 586 # Final subdir actually contains the file
548 parent.files[fdirs[-1]] = cov_file 587 parent.files[fdirs[-1]] = cov_file
549 588
550 # Now add file's contribution to coverage by dir 589 # Now add file's contribution to coverage by dir
551 for a in ancestors: 590 for a in ancestors:
552 # Add to 'all' group 591 # Add to 'all' group
553 a.stats_by_group['all'].Add(cov_file.stats) 592 a.stats_by_group['all'].Add(cov_file.stats)
554 593
555 # Add to group file belongs to 594 # Add to group file belongs to
556 if cov_file.group not in a.stats_by_group: 595 group = cov_file.attrs.get('group')
557 a.stats_by_group[cov_file.group] = CoverageStats() 596 if group not in a.stats_by_group:
558 cbyg = a.stats_by_group[cov_file.group] 597 a.stats_by_group[group] = CoverageStats()
598 cbyg = a.stats_by_group[group]
559 cbyg.Add(cov_file.stats) 599 cbyg.Add(cov_file.stats)
560 600
561 def PrintTree(self): 601 def PrintTree(self):
562 """Prints the tree stats.""" 602 """Prints the tree stats."""
563 # Print the tree 603 # Print the tree
564 print 'Lines of code coverage by directory:' 604 print 'Lines of code coverage by directory:'
565 print self.tree.GetTree() 605 print self.tree.GetTree()
566 606
567 #------------------------------------------------------------------------------ 607 #------------------------------------------------------------------------------
568 608
569 609
570 def Main(argv): 610 def Main(argv):
571 """Main routine. 611 """Main routine.
572 612
573 Args: 613 Args:
574 argv: list of arguments 614 argv: list of arguments
575 615
576 Returns: 616 Returns:
577 exit code, 0 for normal exit. 617 exit code, 0 for normal exit.
578 """ 618 """
579 # Parse args 619 # Parse args
580 parser = OptionParser() 620 parser = optparse.OptionParser()
581 parser.add_option( 621 parser.add_option(
582 '-i', '--input', dest='inputs', type='string', action='append', 622 '-i', '--input', dest='inputs', type='string', action='append',
583 metavar='FILE', 623 metavar='FILE',
584 help='read LCOV input from FILE') 624 help='read LCOV input from FILE')
585 parser.add_option( 625 parser.add_option(
586 '-r', '--root', dest='roots', type='string', action='append', 626 '-r', '--root', dest='roots', type='string', action='append',
587 metavar='ROOT[=ALTNAME]', 627 metavar='ROOT[=ALTNAME]',
588 help='add ROOT directory, optionally map in coverage results as ALTNAME') 628 help='add ROOT directory, optionally map in coverage results as ALTNAME')
589 parser.add_option( 629 parser.add_option(
590 '-c', '--config', dest='configs', type='string', action='append', 630 '-c', '--config', dest='configs', type='string', action='append',
591 metavar='FILE', 631 metavar='FILE',
592 help='read settings from configuration FILE') 632 help='read settings from configuration FILE')
593 parser.add_option( 633 parser.add_option(
594 '-a', '--addfiles', dest='addfiles', type='string', action='append', 634 '-a', '--addfiles', dest='addfiles', type='string', action='append',
595 metavar='PATH', 635 metavar='PATH',
596 help='add files from PATH to coverage data') 636 help='add files from PATH to coverage data')
597 parser.add_option( 637 parser.add_option(
598 '-t', '--tree', dest='tree', action='store_true', 638 '-t', '--tree', dest='tree', action='store_true',
599 help='print tree of code coverage by group') 639 help='print tree of code coverage by group')
600 parser.add_option( 640 parser.add_option(
601 '-u', '--uninstrumented', dest='uninstrumented', action='store_true', 641 '-u', '--uninstrumented', dest='uninstrumented', action='store_true',
602 help='list uninstrumented files') 642 help='list uninstrumented files')
643 parser.add_option(
644 '-m', '--html', dest='html_out', type='string', metavar='PATH',
645 help='write HTML output to PATH')
603 646
604 parser.set_defaults( 647 parser.set_defaults(
605 inputs=[], 648 inputs=[],
606 roots=[], 649 roots=[],
607 configs=[], 650 configs=[],
608 addfiles=[], 651 addfiles=[],
609 tree=False, 652 tree=False,
653 html_out=None,
610 ) 654 )
611 655
612 (options, args) = parser.parse_args() 656 options = parser.parse_args(args=argv)[0]
613 657
614 cov = Coverage() 658 cov = Coverage()
615 659
616 # Set root directories for coverage 660 # Set root directories for coverage
617 for root_opt in options.roots: 661 for root_opt in options.roots:
618 if '=' in root_opt: 662 if '=' in root_opt:
619 cov.AddRoot(*root_opt.split('=')) 663 cov.AddRoot(*root_opt.split('='))
620 else: 664 else:
621 cov.AddRoot(root_opt) 665 cov.AddRoot(root_opt)
622 666
(...skipping 17 matching lines...) Expand all
640 return 1 684 return 1
641 685
642 # Update tree stats 686 # Update tree stats
643 cov.UpdateTreeStats() 687 cov.UpdateTreeStats()
644 688
645 # Print uninstrumented filenames 689 # Print uninstrumented filenames
646 if options.uninstrumented: 690 if options.uninstrumented:
647 print 'Uninstrumented files:' 691 print 'Uninstrumented files:'
648 for f in sorted(cov.files): 692 for f in sorted(cov.files):
649 covf = cov.files[f] 693 covf = cov.files[f]
650 if not covf.stats.get('lines_instrumented'): 694 if not covf.in_lcov:
651 print ' %-6s %-6s %s' % (covf.group, covf.language, f) 695 print ' %-6s %-6s %s' % (covf.attrs.get('group'),
652 696 covf.attrs.get('language'), f)
653 697
654 # Print tree stats 698 # Print tree stats
655 if options.tree: 699 if options.tree:
656 cov.PrintTree() 700 cov.PrintTree()
657 701
658 # Print stats 702 # Print stats
659 for ps_args in cov.print_stats: 703 for ps_args in cov.print_stats:
660 cov.PrintStat(**ps_args) 704 cov.PrintStat(**ps_args)
661 705
706 # Generate HTML
707 if options.html_out:
708 html = croc_html.CrocHtml(cov, options.html_out)
709 html.Write()
710
662 # Normal exit 711 # Normal exit
663 return 0 712 return 0
664 713
665 714
666 #------------------------------------------------------------------------------ 715 #------------------------------------------------------------------------------
667 716
668 if __name__ == '__main__': 717 if __name__ == '__main__':
669 sys.exit(Main(sys.argv)) 718 sys.exit(Main(sys.argv))
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698