OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2016 the V8 project authors. All rights reserved. | 2 # Copyright 2016 the V8 project authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Script to transform and merge sancov files into human readable json-format. | 6 """Script to transform and merge sancov files into human readable json-format. |
7 | 7 |
8 The script supports two actions: | 8 The script supports three actions: |
9 all: Writes a json file with all instrumented lines of all executables. | 9 all: Writes a json file with all instrumented lines of all executables. |
10 merge: Merges sancov files with coverage output into an existing json file. | 10 merge: Merges sancov files with coverage output into an existing json file. |
11 split: Split json file into separate files per covered source file. | |
11 | 12 |
12 The json data is structured as follows: | 13 The json data is structured as follows: |
13 { | 14 { |
14 "version": 1, | 15 "version": 1, |
15 "tests": ["executable1", "executable2", ...], | 16 "tests": ["executable1", "executable2", ...], |
16 "files": { | 17 "files": { |
17 "file1": [[<instr line 1>, <bit_mask>], [<instr line 2>, <bit_mask>], ...], | 18 "file1": [[<instr line 1>, <bit_mask>], [<instr line 2>, <bit_mask>], ...], |
18 "file2": [...], | 19 "file2": [...], |
19 ... | 20 ... |
20 } | 21 } |
21 } | 22 } |
22 | 23 |
23 The executables are sorted and determine the test bit mask. Their index+1 is | 24 The executables are sorted and determine the test bit mask. Their index+1 is |
24 the bit, e.g. executable1 = 1, executable3 = 4, etc. Hence, a line covered by | 25 the bit, e.g. executable1 = 1, executable3 = 4, etc. Hence, a line covered by |
25 executable1 and executable3 will have bit_mask == 5 == 0b101. The number of | 26 executable1 and executable3 will have bit_mask == 5 == 0b101. The number of |
26 tests is restricted to 52 in version 1, to allow javascript JSON parsing of | 27 tests is restricted to 52 in version 1, to allow javascript JSON parsing of |
27 the bitsets encoded as numbers. JS max safe int is (1 << 53) - 1. | 28 the bitsets encoded as numbers. JS max safe int is (1 << 53) - 1. |
28 | 29 |
29 The line-number-bit_mask pairs are sorted by line number and don't contain | 30 The line-number-bit_mask pairs are sorted by line number and don't contain |
30 duplicates. | 31 duplicates. |
31 | 32 |
33 Split json data preserves the same format, but only contains one file per | |
34 json file. | |
35 | |
32 The sancov tool is expected to be in the llvm compiler-rt third-party | 36 The sancov tool is expected to be in the llvm compiler-rt third-party |
33 directory. It's not checked out by default and must be added as a custom deps: | 37 directory. It's not checked out by default and must be added as a custom deps: |
34 'v8/third_party/llvm/projects/compiler-rt': | 38 'v8/third_party/llvm/projects/compiler-rt': |
35 'https://chromium.googlesource.com/external/llvm.org/compiler-rt.git' | 39 'https://chromium.googlesource.com/external/llvm.org/compiler-rt.git' |
36 """ | 40 """ |
37 | 41 |
38 import argparse | 42 import argparse |
39 import json | 43 import json |
40 import logging | 44 import logging |
41 import os | 45 import os |
(...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
206 return { | 210 return { |
207 'version': 1, | 211 'version': 1, |
208 'tests': sorted(map(os.path.basename, exe_list)), | 212 'tests': sorted(map(os.path.basename, exe_list)), |
209 'files': {f: map(lambda l: [l, 0], sorted(result[f])) for f in result}, | 213 'files': {f: map(lambda l: [l, 0], sorted(result[f])) for f in result}, |
210 } | 214 } |
211 | 215 |
212 | 216 |
213 def write_instrumented(options): | 217 def write_instrumented(options): |
214 """Implements the 'all' action of this tool.""" | 218 """Implements the 'all' action of this tool.""" |
215 exe_list = list(executables()) | 219 exe_list = list(executables()) |
216 logging.info('Reading instrumented lines from %d executables.' % | 220 logging.info('Reading instrumented lines from %d executables.', |
217 len(exe_list)) | 221 len(exe_list)) |
218 pool = Pool(CPUS) | 222 pool = Pool(CPUS) |
219 try: | 223 try: |
220 results = pool.imap_unordered(get_instrumented_lines, exe_list) | 224 results = pool.imap_unordered(get_instrumented_lines, exe_list) |
221 finally: | 225 finally: |
222 pool.close() | 226 pool.close() |
223 | 227 |
224 # Merge multiprocessing results and prepare output data. | 228 # Merge multiprocessing results and prepare output data. |
225 data = merge_instrumented_line_results(exe_list, results) | 229 data = merge_instrumented_line_results(exe_list, results) |
226 | 230 |
227 logging.info('Read data from %d executables, which covers %d files.' % | 231 logging.info('Read data from %d executables, which covers %d files.', |
228 (len(data['tests']), len(data['files']))) | 232 len(data['tests']), len(data['files'])) |
229 logging.info('Writing results to %s' % options.json_output) | 233 logging.info('Writing results to %s', options.json_output) |
230 | 234 |
231 # Write json output. | 235 # Write json output. |
232 with open(options.json_output, 'w') as f: | 236 with open(options.json_output, 'w') as f: |
233 json.dump(data, f, sort_keys=True) | 237 json.dump(data, f, sort_keys=True) |
234 | 238 |
235 | 239 |
236 def get_covered_lines(args): | 240 def get_covered_lines(args): |
237 """Return the covered lines of an executable. | 241 """Return the covered lines of an executable. |
238 | 242 |
239 Called trough multiprocessing pool. The args are expected to unpack to: | 243 Called trough multiprocessing pool. The args are expected to unpack to: |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
335 os.path.isdir(options.coverage_dir)) | 339 os.path.isdir(options.coverage_dir)) |
336 | 340 |
337 # Inputs for multiprocessing. List of tuples of: | 341 # Inputs for multiprocessing. List of tuples of: |
338 # Coverage dir, executable name, sancov file name. | 342 # Coverage dir, executable name, sancov file name. |
339 inputs = [] | 343 inputs = [] |
340 for f in os.listdir(options.coverage_dir): | 344 for f in os.listdir(options.coverage_dir): |
341 match = SANCOV_FILE_RE.match(f) | 345 match = SANCOV_FILE_RE.match(f) |
342 if match: | 346 if match: |
343 inputs.append((options.coverage_dir, match.group(1), f)) | 347 inputs.append((options.coverage_dir, match.group(1), f)) |
344 | 348 |
345 logging.info('Merging %d sancov files into %s' % | 349 logging.info('Merging %d sancov files into %s', |
346 (len(inputs), options.json_input)) | 350 len(inputs), options.json_input) |
347 | 351 |
348 # Post-process covered lines in parallel. | 352 # Post-process covered lines in parallel. |
349 pool = Pool(CPUS) | 353 pool = Pool(CPUS) |
350 try: | 354 try: |
351 results = pool.imap_unordered(get_covered_lines, inputs) | 355 results = pool.imap_unordered(get_covered_lines, inputs) |
352 finally: | 356 finally: |
353 pool.close() | 357 pool.close() |
354 | 358 |
355 # Load existing json data file for merging the results. | 359 # Load existing json data file for merging the results. |
356 with open(options.json_input, 'r') as f: | 360 with open(options.json_input, 'r') as f: |
357 data = json.load(f) | 361 data = json.load(f) |
358 | 362 |
359 # Merge muliprocessing results. Mutates data. | 363 # Merge muliprocessing results. Mutates data. |
360 merge_covered_line_results(data, results) | 364 merge_covered_line_results(data, results) |
361 | 365 |
362 logging.info('Merged data from %d executables, which covers %d files.' % | 366 logging.info('Merged data from %d executables, which covers %d files.', |
363 (len(data['tests']), len(data['files']))) | 367 len(data['tests']), len(data['files'])) |
364 logging.info('Writing results to %s' % options.json_output) | 368 logging.info('Writing results to %s', options.json_output) |
365 | 369 |
366 # Write merged results to file. | 370 # Write merged results to file. |
367 with open(options.json_output, 'w') as f: | 371 with open(options.json_output, 'w') as f: |
368 json.dump(data, f, sort_keys=True) | 372 json.dump(data, f, sort_keys=True) |
369 | 373 |
370 | 374 |
375 def split(options): | |
376 """Implements the 'split' action of this tool.""" | |
377 # Load existing json data file for splitting. | |
378 with open(options.json_input, 'r') as f: | |
379 data = json.load(f) | |
380 | |
381 logging.info('Splitting off %d coverage files from %s', | |
382 len(data['files']), options.json_input) | |
383 | |
384 for file_name, coverage in data['files'].iteritems(): | |
385 # Preserve relative directories that are part of the file name. | |
386 file_path = os.path.join(options.output_dir, file_name + '.json') | |
tandrii(chromium)
2016/03/16 16:31:31
my only concern here, which I was not able to comp
Michael Achenbach
2016/03/16 17:34:00
Only normalized relative paths in the project dir
tandrii(chromium)
2016/03/16 17:49:46
Acknowledged.
| |
387 try: | |
388 os.makedirs(os.path.dirname(file_path)) | |
389 except OSError: | |
390 # Ignore existing directories. | |
391 pass | |
392 | |
393 with open(file_path, 'w') as f: | |
394 # Flat-copy the old dict. | |
395 new_data = dict(data) | |
Michael Hablich
2016/03/16 13:39:12
I thought you only want the data of the individual
Michael Achenbach
2016/03/16 14:13:32
This assignment copies the key/values of data flat
Michael Hablich
2016/03/17 08:16:47
Acknowledged.
| |
396 | |
397 # Update current file. | |
398 new_data['files'] = {file_name: coverage} | |
Michael Hablich
2016/03/16 13:39:12
'file_name: coverage' does not make sense to me in
Michael Achenbach
2016/03/16 14:13:32
These are not strings. These are the variables fro
Michael Hablich
2016/03/17 08:16:47
Huh, I never indicated that I think these are simp
Michael Achenbach
2016/03/17 08:34:54
How would you map it? The code coverage data conta
| |
399 | |
400 # Write json data. | |
401 json.dump(new_data, f, sort_keys=True) | |
402 | |
403 | |
371 def main(): | 404 def main(): |
372 parser = argparse.ArgumentParser() | 405 parser = argparse.ArgumentParser() |
373 parser.add_argument('--coverage-dir', | 406 parser.add_argument('--coverage-dir', |
374 help='Path to the sancov output files.') | 407 help='Path to the sancov output files.') |
375 parser.add_argument('--json-input', | 408 parser.add_argument('--json-input', |
376 help='Path to an existing json file with coverage data.') | 409 help='Path to an existing json file with coverage data.') |
377 parser.add_argument('--json-output', required=True, | 410 parser.add_argument('--json-output', |
378 help='Path to a file to write json output to.') | 411 help='Path to a file to write json output to.') |
379 parser.add_argument('action', choices=['all', 'merge'], | 412 parser.add_argument('--output-dir', |
413 help='Directory where to put split output files to.') | |
414 parser.add_argument('action', choices=['all', 'merge', 'split'], | |
380 help='Action to perform.') | 415 help='Action to perform.') |
381 | 416 |
382 options = parser.parse_args() | 417 options = parser.parse_args() |
383 if options.action.lower() == 'all': | 418 if options.action.lower() == 'all': |
419 if not options.json_output: | |
420 print '--json-output is required' | |
421 return 1 | |
384 write_instrumented(options) | 422 write_instrumented(options) |
385 elif options.action.lower() == 'merge': | 423 elif options.action.lower() == 'merge': |
386 if not options.coverage_dir: | 424 if not options.coverage_dir: |
387 print '--coverage-dir is required' | 425 print '--coverage-dir is required' |
388 return 1 | 426 return 1 |
389 if not options.json_input: | 427 if not options.json_input: |
390 print '--json-input is required' | 428 print '--json-input is required' |
391 return 1 | 429 return 1 |
430 if not options.json_output: | |
431 print '--json-output is required' | |
432 return 1 | |
392 merge(options) | 433 merge(options) |
434 elif options.action.lower() == 'split': | |
435 if not options.json_input: | |
436 print '--json-input is required' | |
437 return 1 | |
438 if not options.output_dir: | |
439 print '--output-dir is required' | |
440 return 1 | |
441 split(options) | |
393 return 0 | 442 return 0 |
394 | 443 |
395 | 444 |
396 if __name__ == '__main__': | 445 if __name__ == '__main__': |
397 sys.exit(main()) | 446 sys.exit(main()) |
OLD | NEW |