| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2017 The Chromium Authors. All rights reserved. | 2 # Copyright 2017 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """Find header files missing in GN. | 6 """Find header files missing in GN. |
| 7 | 7 |
| 8 This script gets all the header files from ninja_deps, which is from the true | 8 This script gets all the header files from ninja_deps, which is from the true |
| 9 dependency generated by the compiler, and report if they don't exist in GN. | 9 dependency generated by the compiler, and report if they don't exist in GN. |
| 10 """ | 10 """ |
| 11 | 11 |
| 12 import argparse | 12 import argparse |
| 13 import json | 13 import json |
| 14 import os | 14 import os |
| 15 import re | 15 import re |
| 16 import shutil | 16 import shutil |
| 17 import subprocess | 17 import subprocess |
| 18 import sys | 18 import sys |
| 19 import tempfile | 19 import tempfile |
| 20 from multiprocessing import Process, Queue | 20 from multiprocessing import Process, Queue |
| 21 | 21 |
| 22 SRC_DIR = os.path.abspath( | 22 SRC_DIR = os.path.abspath( |
| 23 os.path.join(os.path.abspath(os.path.dirname(__file__)), os.path.pardir)) | 23 os.path.join(os.path.abspath(os.path.dirname(__file__)), os.path.pardir)) |
| 24 DEPOT_TOOLS_DIR = os.path.join(SRC_DIR, 'third_party', 'depot_tools') | 24 DEPOT_TOOLS_DIR = os.path.join(SRC_DIR, 'third_party', 'depot_tools') |
| 25 | 25 |
| 26 | 26 |
| 27 def GetHeadersFromNinja(out_dir, q): | 27 def GetHeadersFromNinja(out_dir, skip_obj, q): |
| 28 """Return all the header files from ninja_deps""" | 28 """Return all the header files from ninja_deps""" |
| 29 | 29 |
| 30 def NinjaSource(): | 30 def NinjaSource(): |
| 31 cmd = [os.path.join(DEPOT_TOOLS_DIR, 'ninja'), '-C', out_dir, '-t', 'deps'] | 31 cmd = [os.path.join(DEPOT_TOOLS_DIR, 'ninja'), '-C', out_dir, '-t', 'deps'] |
| 32 # A negative bufsize means to use the system default, which usually | 32 # A negative bufsize means to use the system default, which usually |
| 33 # means fully buffered. | 33 # means fully buffered. |
| 34 popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=-1) | 34 popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=-1) |
| 35 for line in iter(popen.stdout.readline, ''): | 35 for line in iter(popen.stdout.readline, ''): |
| 36 yield line.rstrip() | 36 yield line.rstrip() |
| 37 | 37 |
| 38 popen.stdout.close() | 38 popen.stdout.close() |
| 39 return_code = popen.wait() | 39 return_code = popen.wait() |
| 40 if return_code: | 40 if return_code: |
| 41 raise subprocess.CalledProcessError(return_code, cmd) | 41 raise subprocess.CalledProcessError(return_code, cmd) |
| 42 | 42 |
| 43 ans, err = set(), None | 43 ans, err = set(), None |
| 44 try: | 44 try: |
| 45 ans = ParseNinjaDepsOutput(NinjaSource(), out_dir) | 45 ans = ParseNinjaDepsOutput(NinjaSource(), out_dir, skip_obj) |
| 46 except Exception as e: | 46 except Exception as e: |
| 47 err = str(e) | 47 err = str(e) |
| 48 q.put((ans, err)) | 48 q.put((ans, err)) |
| 49 | 49 |
| 50 | 50 |
| 51 def ParseNinjaDepsOutput(ninja_out, out_dir): | 51 def ParseNinjaDepsOutput(ninja_out, out_dir, skip_obj): |
| 52 """Parse ninja output and get the header files""" | 52 """Parse ninja output and get the header files""" |
| 53 all_headers = set() | 53 all_headers = {} |
| 54 | 54 |
| 55 # Ninja always uses "/", even on Windows. | 55 # Ninja always uses "/", even on Windows. |
| 56 prefix = '../../' | 56 prefix = '../../' |
| 57 | 57 |
| 58 is_valid = False | 58 is_valid = False |
| 59 obj_file = '' |
| 59 for line in ninja_out: | 60 for line in ninja_out: |
| 60 if line.startswith(' '): | 61 if line.startswith(' '): |
| 61 if not is_valid: | 62 if not is_valid: |
| 62 continue | 63 continue |
| 63 if line.endswith('.h') or line.endswith('.hh'): | 64 if line.endswith('.h') or line.endswith('.hh'): |
| 64 f = line.strip() | 65 f = line.strip() |
| 65 if f.startswith(prefix): | 66 if f.startswith(prefix): |
| 66 f = f[6:] # Remove the '../../' prefix | 67 f = f[6:] # Remove the '../../' prefix |
| 67 # build/ only contains build-specific files like build_config.h | 68 # build/ only contains build-specific files like build_config.h |
| 68 # and buildflag.h, and system header files, so they should be | 69 # and buildflag.h, and system header files, so they should be |
| 69 # skipped. | 70 # skipped. |
| 70 if f.startswith(out_dir) or f.startswith('out'): | 71 if f.startswith(out_dir) or f.startswith('out'): |
| 71 continue | 72 continue |
| 72 if not f.startswith('build'): | 73 if not f.startswith('build'): |
| 73 all_headers.add(f) | 74 all_headers.setdefault(f, []) |
| 75 if not skip_obj: |
| 76 all_headers[f].append(obj_file) |
| 74 else: | 77 else: |
| 75 is_valid = line.endswith('(VALID)') | 78 is_valid = line.endswith('(VALID)') |
| 79 obj_file = line.split(':')[0] |
| 76 | 80 |
| 77 return all_headers | 81 return all_headers |
| 78 | 82 |
| 79 | 83 |
| 80 def GetHeadersFromGN(out_dir, q): | 84 def GetHeadersFromGN(out_dir, q): |
| 81 """Return all the header files from GN""" | 85 """Return all the header files from GN""" |
| 82 | 86 |
| 83 tmp = None | 87 tmp = None |
| 84 ans, err = set(), None | 88 ans, err = set(), None |
| 85 try: | 89 try: |
| 86 # Argument |dir| is needed to make sure it's on the same drive on Windows. | 90 # Argument |dir| is needed to make sure it's on the same drive on Windows. |
| 87 # dir='' means dir='.', but doesn't introduce an unneeded prefix. | 91 # dir='' means dir='.', but doesn't introduce an unneeded prefix. |
| 88 tmp = tempfile.mkdtemp(dir='') | 92 tmp = tempfile.mkdtemp(dir='') |
| 89 shutil.copy2(os.path.join(out_dir, 'args.gn'), | 93 shutil.copy2(os.path.join(out_dir, 'args.gn'), |
| 90 os.path.join(tmp, 'args.gn')) | 94 os.path.join(tmp, 'args.gn')) |
| 91 # Do "gn gen" in a temp dir to prevent dirtying |out_dir|. | 95 # Do "gn gen" in a temp dir to prevent dirtying |out_dir|. |
| 92 gn_exe = 'gn.bat' if sys.platform == 'win32' else 'gn' | 96 gn_exe = 'gn.bat' if sys.platform == 'win32' else 'gn' |
| 93 subprocess.check_call([ | 97 subprocess.check_call([ |
| 94 os.path.join(DEPOT_TOOLS_DIR, gn_exe), 'gen', tmp, '--ide=json', '-q']) | 98 os.path.join(DEPOT_TOOLS_DIR, gn_exe), 'gen', tmp, '--ide=json', '-q']) |
| 95 gn_json = json.load(open(os.path.join(tmp, 'project.json'))) | 99 gn_json = json.load(open(os.path.join(tmp, 'project.json'))) |
| 96 ans = ParseGNProjectJSON(gn_json, out_dir, tmp) | 100 ans = ParseGNProjectJSON(gn_json, out_dir, tmp) |
| 97 except Exception as e: | 101 except Exception as e: |
| 98 err = str(e) | 102 err = str(e) |
| 99 finally: | 103 finally: |
| 100 if tmp: | 104 if tmp: |
| 101 shutil.rmtree(tmp) | 105 shutil.rmtree(tmp) |
| 102 q.put((ans, err)) | 106 q.put((ans, err)) |
| 103 | 107 |
| 104 | 108 |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 183 parser = argparse.ArgumentParser(description=''' | 187 parser = argparse.ArgumentParser(description=''' |
| 184 NOTE: Use ninja to build all targets in OUT_DIR before running | 188 NOTE: Use ninja to build all targets in OUT_DIR before running |
| 185 this script.''') | 189 this script.''') |
| 186 parser.add_argument('--out-dir', metavar='OUT_DIR', default='out/Release', | 190 parser.add_argument('--out-dir', metavar='OUT_DIR', default='out/Release', |
| 187 help='output directory of the build') | 191 help='output directory of the build') |
| 188 parser.add_argument('--json', | 192 parser.add_argument('--json', |
| 189 help='JSON output filename for missing headers') | 193 help='JSON output filename for missing headers') |
| 190 parser.add_argument('--whitelist', help='file containing whitelist') | 194 parser.add_argument('--whitelist', help='file containing whitelist') |
| 191 parser.add_argument('--skip-dirty-check', action='store_true', | 195 parser.add_argument('--skip-dirty-check', action='store_true', |
| 192 help='skip checking whether the build is dirty') | 196 help='skip checking whether the build is dirty') |
| 197 parser.add_argument('--verbose', action='store_true', |
| 198 help='print more diagnostic info') |
| 193 | 199 |
| 194 args, _extras = parser.parse_known_args() | 200 args, _extras = parser.parse_known_args() |
| 195 | 201 |
| 196 if not os.path.isdir(args.out_dir): | 202 if not os.path.isdir(args.out_dir): |
| 197 parser.error('OUT_DIR "%s" does not exist.' % args.out_dir) | 203 parser.error('OUT_DIR "%s" does not exist.' % args.out_dir) |
| 198 | 204 |
| 199 if not args.skip_dirty_check and not IsBuildClean(args.out_dir): | 205 if not args.skip_dirty_check and not IsBuildClean(args.out_dir): |
| 200 dirty_msg = 'OUT_DIR looks dirty. You need to build all there.' | 206 dirty_msg = 'OUT_DIR looks dirty. You need to build all there.' |
| 201 if args.json: | 207 if args.json: |
| 202 # Assume running on the bots. Silently skip this step. | 208 # Assume running on the bots. Silently skip this step. |
| 203 # This is possible because "analyze" step can be wrong due to | 209 # This is possible because "analyze" step can be wrong due to |
| 204 # underspecified header files. See crbug.com/725877 | 210 # underspecified header files. See crbug.com/725877 |
| 205 print dirty_msg | 211 print dirty_msg |
| 206 DumpJson([]) | 212 DumpJson([]) |
| 207 return 0 | 213 return 0 |
| 208 else: | 214 else: |
| 209 # Assume running interactively. | 215 # Assume running interactively. |
| 210 parser.error(dirty_msg) | 216 parser.error(dirty_msg) |
| 211 | 217 |
| 212 d_q = Queue() | 218 d_q = Queue() |
| 213 d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, d_q,)) | 219 d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, True, d_q,)) |
| 214 d_p.start() | 220 d_p.start() |
| 215 | 221 |
| 216 gn_q = Queue() | 222 gn_q = Queue() |
| 217 gn_p = Process(target=GetHeadersFromGN, args=(args.out_dir, gn_q,)) | 223 gn_p = Process(target=GetHeadersFromGN, args=(args.out_dir, gn_q,)) |
| 218 gn_p.start() | 224 gn_p.start() |
| 219 | 225 |
| 220 deps_q = Queue() | 226 deps_q = Queue() |
| 221 deps_p = Process(target=GetDepsPrefixes, args=(deps_q,)) | 227 deps_p = Process(target=GetDepsPrefixes, args=(deps_q,)) |
| 222 deps_p.start() | 228 deps_p.start() |
| 223 | 229 |
| 224 d, d_err = d_q.get() | 230 d, d_err = d_q.get() |
| 225 gn, gn_err = gn_q.get() | 231 gn, gn_err = gn_q.get() |
| 226 missing = d - gn | 232 missing = set(d.keys()) - gn |
| 227 nonexisting = GetNonExistingFiles(gn) | 233 nonexisting = GetNonExistingFiles(gn) |
| 228 | 234 |
| 229 deps, deps_err = deps_q.get() | 235 deps, deps_err = deps_q.get() |
| 230 missing = FilterOutDepsedRepo(missing, deps) | 236 missing = FilterOutDepsedRepo(missing, deps) |
| 231 nonexisting = FilterOutDepsedRepo(nonexisting, deps) | 237 nonexisting = FilterOutDepsedRepo(nonexisting, deps) |
| 232 | 238 |
| 233 d_p.join() | 239 d_p.join() |
| 234 gn_p.join() | 240 gn_p.join() |
| 235 deps_p.join() | 241 deps_p.join() |
| 236 | 242 |
| (...skipping 28 matching lines...) Expand all Loading... |
| 265 if len(missing) > 0: | 271 if len(missing) > 0: |
| 266 print '\nThe following files should be included in gn files:' | 272 print '\nThe following files should be included in gn files:' |
| 267 for i in missing: | 273 for i in missing: |
| 268 print i | 274 print i |
| 269 | 275 |
| 270 if len(nonexisting) > 0: | 276 if len(nonexisting) > 0: |
| 271 print '\nThe following non-existing files should be removed from gn files:' | 277 print '\nThe following non-existing files should be removed from gn files:' |
| 272 for i in nonexisting: | 278 for i in nonexisting: |
| 273 print i | 279 print i |
| 274 | 280 |
| 281 if args.verbose: |
| 282 # Only get detailed obj dependency here since it is slower. |
| 283 GetHeadersFromNinja(args.out_dir, False, d_q) |
| 284 d, d_err = d_q.get() |
| 285 print '\nDetailed dependency info:' |
| 286 for f in missing: |
| 287 print f |
| 288 for cc in d[f]: |
| 289 print ' ', cc |
| 290 |
| 291 print '\nMissing headers sorted by number of affected object files:' |
| 292 count = {k: len(v) for (k, v) in d.iteritems()} |
| 293 for f in sorted(count, key=count.get, reverse=True): |
| 294 if f in missing: |
| 295 print count[f], f |
| 296 |
| 275 return 1 | 297 return 1 |
| 276 | 298 |
| 277 | 299 |
| 278 if __name__ == '__main__': | 300 if __name__ == '__main__': |
| 279 sys.exit(main()) | 301 sys.exit(main()) |
| OLD | NEW |