OLD | NEW |
(Empty) | |
| 1 #!/usr/bin/env python |
| 2 # Copyright 2016 the V8 project authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. |
| 5 |
| 6 """ |
| 7 Script for merging sancov files in parallel. |
| 8 |
| 9 The sancov files are expected |
| 10 to be located in one directory with the file-name pattern: |
| 11 <executable name>.test.<id>.sancov |
| 12 |
| 13 For each executable, this script writes a new file: |
| 14 <executable name>.result.sancov |
| 15 |
| 16 The sancov tool is expected to be in the llvm compiler-rt third-party |
| 17 directory. It's not checked out by default and must be added as a custom deps: |
| 18 'v8/third_party/llvm/projects/compiler-rt': |
| 19 'https://chromium.googlesource.com/external/llvm.org/compiler-rt.git' |
| 20 """ |
| 21 |
| 22 import argparse |
| 23 import math |
| 24 from multiprocessing import Pool, cpu_count |
| 25 import os |
| 26 import re |
| 27 import subprocess |
| 28 import sys |
| 29 |
| 30 # V8 checkout directory. |
| 31 BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname( |
| 32 os.path.abspath(__file__)))) |
| 33 |
| 34 # The sancov tool location. |
| 35 SANCOV_TOOL = os.path.join( |
| 36 BASE_DIR, 'third_party', 'llvm', 'projects', 'compiler-rt', |
| 37 'lib', 'sanitizer_common', 'scripts', 'sancov.py') |
| 38 |
| 39 # Number of cpus. |
| 40 CPUS = cpu_count() |
| 41 |
| 42 # Regexp to find sancov file as output by the v8 test runner. Also grabs the |
| 43 # executable name in group 1. |
| 44 SANCOV_FILE_RE = re.compile(r'^(.*)\.test\.\d+\.sancov$') |
| 45 |
| 46 |
| 47 def merge(args): |
| 48 """Merge several sancov files into one. |
| 49 |
| 50 Called trough multiprocessing pool. The args are expected to unpack to: |
| 51 keep: Option if source and intermediate sancov files should be kept. |
| 52 coverage_dir: Folder where to find the sancov files. |
| 53 executable: Name of the executable whose sancov files should be merged. |
| 54 index: A number to be put into the intermediate result file name. |
| 55 If None, this is a final result. |
| 56 bucket: The list of sancov files to be merged. |
| 57 Returns: A tuple with the executable name and the result file name. |
| 58 """ |
| 59 keep, coverage_dir, executable, index, bucket = args |
| 60 process = subprocess.Popen( |
| 61 [SANCOV_TOOL, 'merge'] + bucket, |
| 62 stdout=subprocess.PIPE, |
| 63 stderr=subprocess.PIPE, |
| 64 cwd=coverage_dir, |
| 65 ) |
| 66 output, _ = process.communicate() |
| 67 if index is not None: |
| 68 # This is an intermediate result, add the bucket index to the file name. |
| 69 result_file_name = '%s.result.%d.sancov' % (executable, index) |
| 70 else: |
| 71 # This is the final result without bucket index. |
| 72 result_file_name = '%s.result.sancov' % executable |
| 73 with open(os.path.join(coverage_dir, result_file_name), "wb") as f: |
| 74 f.write(output) |
| 75 if not keep: |
| 76 for f in bucket: |
| 77 os.remove(os.path.join(coverage_dir, f)) |
| 78 return executable, result_file_name |
| 79 |
| 80 |
| 81 def generate_inputs(keep, coverage_dir, file_map, cpus): |
| 82 """Generate inputs for multiprocessed merging. |
| 83 |
| 84 Splits the sancov files into several buckets, so that each bucket can be |
| 85 merged in a separate process. |
| 86 |
| 87 Returns: List of args as expected by merge above. |
| 88 """ |
| 89 inputs = [] |
| 90 for executable, files in file_map.iteritems(): |
| 91 # What's the bucket size for distributing files for merging? E.g. with |
| 92 # 2 cpus and 9 files we want bucket size 5. |
| 93 n = max(2, int(math.ceil(len(files) / float(cpus)))) |
| 94 |
| 95 # Chop files into buckets. |
| 96 buckets = [files[i:i+n] for i in xrange(0, len(files), n)] |
| 97 |
| 98 # Inputs for multiprocessing. List of tuples containing: |
| 99 # Keep-files option, base path, executable name, index of bucket, |
| 100 # list of files. |
| 101 inputs.extend([(keep, coverage_dir, executable, i, b) |
| 102 for i, b in enumerate(buckets)]) |
| 103 return inputs |
| 104 |
| 105 |
| 106 def merge_parallel(inputs): |
| 107 """Process several merge jobs in parallel.""" |
| 108 pool = Pool(CPUS) |
| 109 try: |
| 110 return pool.map(merge, inputs) |
| 111 finally: |
| 112 pool.close() |
| 113 |
| 114 |
| 115 def main(): |
| 116 parser = argparse.ArgumentParser() |
| 117 parser.add_argument('--coverage-dir', required=True, |
| 118 help='Path to the sancov output files.') |
| 119 parser.add_argument('--keep', default=False, action='store_true', |
| 120 help='Keep sancov output files after merging.') |
| 121 options = parser.parse_args() |
| 122 |
| 123 # Check if folder with coverage output exists. |
| 124 assert (os.path.exists(options.coverage_dir) and |
| 125 os.path.isdir(options.coverage_dir)) |
| 126 |
| 127 # Map executable names to their respective sancov files. |
| 128 file_map = {} |
| 129 for f in os.listdir(options.coverage_dir): |
| 130 match = SANCOV_FILE_RE.match(f) |
| 131 if match: |
| 132 file_map.setdefault(match.group(1), []).append(f) |
| 133 |
| 134 inputs = generate_inputs( |
| 135 options.keep, options.coverage_dir, file_map, CPUS) |
| 136 |
| 137 results = merge_parallel(inputs) |
| 138 |
| 139 # Map executable names to intermediate bucket result files. |
| 140 file_map = {} |
| 141 for executable, f in results: |
| 142 file_map.setdefault(executable, []).append(f) |
| 143 |
| 144 # Merge the bucket results for each executable. |
| 145 # The final result has index None, so no index will appear in the |
| 146 # file name. |
| 147 inputs = [(options.keep, options.coverage_dir, executable, None, files) |
| 148 for executable, files in file_map.iteritems()] |
| 149 merge_parallel(inputs) |
| 150 return 0 |
| 151 |
| 152 |
| 153 if __name__ == '__main__': |
| 154 sys.exit(main()) |
OLD | NEW |