| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2017 The Chromium Authors. All rights reserved. | 2 # Copyright 2017 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """Find header files missing in GN. | 6 """Find header files missing in GN. |
| 7 | 7 |
| 8 This script gets all the header files from ninja_deps, which is from the true | 8 This script gets all the header files from ninja_deps, which is from the true |
| 9 dependency generated by the compiler, and report if they don't exist in GN. | 9 dependency generated by the compiler, and report if they don't exist in GN. |
| 10 """ | 10 """ |
| 11 | 11 |
| 12 import argparse | 12 import argparse |
| 13 import json | 13 import json |
| 14 import os | 14 import os |
| 15 import re | 15 import re |
| 16 import shutil | |
| 17 import subprocess | 16 import subprocess |
| 18 import sys | 17 import sys |
| 19 import tempfile | |
| 20 from multiprocessing import Process, Queue | 18 from multiprocessing import Process, Queue |
| 21 | 19 |
| 22 | 20 |
| 23 def GetHeadersFromNinja(out_dir, q): | 21 def GetHeadersFromNinja(out_dir, q): |
| 24 """Return all the header files from ninja_deps""" | 22 """Return all the header files from ninja_deps""" |
| 25 | 23 |
| 26 def NinjaSource(): | 24 def NinjaSource(): |
| 27 cmd = ['ninja', '-C', out_dir, '-t', 'deps'] | 25 cmd = ['ninja', '-C', out_dir, '-t', 'deps'] |
| 28 # A negative bufsize means to use the system default, which usually | 26 # A negative bufsize means to use the system default, which usually |
| 29 # means fully buffered. | 27 # means fully buffered. |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 61 if not f.startswith('build'): | 59 if not f.startswith('build'): |
| 62 all_headers.add(f) | 60 all_headers.add(f) |
| 63 else: | 61 else: |
| 64 is_valid = line.endswith('(VALID)') | 62 is_valid = line.endswith('(VALID)') |
| 65 | 63 |
| 66 return all_headers | 64 return all_headers |
| 67 | 65 |
| 68 | 66 |
| 69 def GetHeadersFromGN(out_dir, q): | 67 def GetHeadersFromGN(out_dir, q): |
| 70 """Return all the header files from GN""" | 68 """Return all the header files from GN""" |
| 71 | 69 subprocess.check_call(['gn', 'gen', out_dir, '--ide=json', '-q']) |
| 72 tmp = None | 70 gn_json = json.load(open(os.path.join(out_dir, 'project.json'))) |
| 73 try: | 71 q.put(ParseGNProjectJSON(gn_json)) |
| 74 tmp = tempfile.mkdtemp() | |
| 75 shutil.copy2(os.path.join(out_dir, 'args.gn'), | |
| 76 os.path.join(tmp, 'args.gn')) | |
| 77 # Do "gn gen" in a temp dir to prevent dirtying |out_dir|. | |
| 78 subprocess.check_call(['gn', 'gen', tmp, '--ide=json', '-q']) | |
| 79 gn_json = json.load(open(os.path.join(tmp, 'project.json'))) | |
| 80 finally: | |
| 81 if tmp: | |
| 82 shutil.rmtree(tmp) | |
| 83 q.put(ParseGNProjectJSON(gn_json, out_dir, tmp)) | |
| 84 | 72 |
| 85 | 73 |
| 86 def ParseGNProjectJSON(gn, out_dir, tmp_out): | 74 def ParseGNProjectJSON(gn): |
| 87 """Parse GN output and get the header files""" | 75 """Parse GN output and get the header files""" |
| 88 all_headers = set() | 76 all_headers = set() |
| 89 | 77 |
| 90 for _target, properties in gn['targets'].iteritems(): | 78 for _target, properties in gn['targets'].iteritems(): |
| 91 sources = properties.get('sources', []) | 79 sources = properties.get('sources', []) |
| 92 public = properties.get('public', []) | 80 public = properties.get('public', []) |
| 93 # Exclude '"public": "*"'. | 81 # Exclude '"public": "*"'. |
| 94 if type(public) is list: | 82 if type(public) is list: |
| 95 sources += public | 83 sources += public |
| 96 for f in sources: | 84 for f in sources: |
| 97 if f.endswith('.h') or f.endswith('.hh'): | 85 if f.endswith('.h') or f.endswith('.hh'): |
| 98 if f.startswith('//'): | 86 if f.startswith('//'): |
| 99 f = f[2:] # Strip the '//' prefix. | 87 f = f[2:] # Strip the '//' prefix. |
| 100 if f.startswith(tmp_out): | |
| 101 f = out_dir + f[len(tmp_out):] | |
| 102 all_headers.add(f) | 88 all_headers.add(f) |
| 103 | 89 |
| 104 return all_headers | 90 return all_headers |
| 105 | 91 |
| 106 | 92 |
| 107 def GetDepsPrefixes(q): | 93 def GetDepsPrefixes(q): |
| 108 """Return all the folders controlled by DEPS file""" | 94 """Return all the folders controlled by DEPS file""" |
| 109 gclient_out = subprocess.check_output( | 95 gclient_out = subprocess.check_output( |
| 110 ['gclient', 'recurse', '--no-progress', '-j1', | 96 ['gclient', 'recurse', '--no-progress', '-j1', |
| 111 'python', '-c', 'import os;print os.environ["GCLIENT_DEP_PATH"]']) | 97 'python', '-c', 'import os;print os.environ["GCLIENT_DEP_PATH"]']) |
| (...skipping 20 matching lines...) Expand all Loading... |
| 132 | 118 |
| 133 def GetNonExistingFiles(lst): | 119 def GetNonExistingFiles(lst): |
| 134 out = set() | 120 out = set() |
| 135 for f in lst: | 121 for f in lst: |
| 136 if not os.path.isfile(f): | 122 if not os.path.isfile(f): |
| 137 out.add(f) | 123 out.add(f) |
| 138 return out | 124 return out |
| 139 | 125 |
| 140 | 126 |
| 141 def main(): | 127 def main(): |
| 142 parser = argparse.ArgumentParser(description=''' | 128 parser = argparse.ArgumentParser() |
| 143 NOTE: Use ninja to build all targets in OUT_DIR before running | 129 parser.add_argument('--out-dir', default='out/Release') |
| 144 this script.''') | 130 parser.add_argument('--json') |
| 145 parser.add_argument('--out-dir', metavar='OUT_DIR', default='out/Release', | 131 parser.add_argument('--whitelist') |
| 146 help='output directory of the build') | 132 parser.add_argument('args', nargs=argparse.REMAINDER) |
| 147 parser.add_argument('--json', | |
| 148 help='JSON output filename for missing headers') | |
| 149 parser.add_argument('--whitelist', help='file containing whitelist') | |
| 150 | 133 |
| 151 args, _extras = parser.parse_known_args() | 134 args, _extras = parser.parse_known_args() |
| 152 | 135 |
| 153 if not os.path.isdir(args.out_dir): | |
| 154 parser.error('OUT_DIR "%s" does not exist.' % args.out_dir) | |
| 155 | |
| 156 d_q = Queue() | 136 d_q = Queue() |
| 157 d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, d_q,)) | 137 d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, d_q,)) |
| 158 d_p.start() | 138 d_p.start() |
| 159 | 139 |
| 160 gn_q = Queue() | 140 gn_q = Queue() |
| 161 gn_p = Process(target=GetHeadersFromGN, args=(args.out_dir, gn_q,)) | 141 gn_p = Process(target=GetHeadersFromGN, args=(args.out_dir, gn_q,)) |
| 162 gn_p.start() | 142 gn_p.start() |
| 163 | 143 |
| 164 deps_q = Queue() | 144 deps_q = Queue() |
| 165 deps_p = Process(target=GetDepsPrefixes, args=(deps_q,)) | 145 deps_p = Process(target=GetDepsPrefixes, args=(deps_q,)) |
| 166 deps_p.start() | 146 deps_p.start() |
| 167 | 147 |
| 168 d = d_q.get() | 148 d = d_q.get() |
| 149 assert len(GetNonExistingFiles(d)) == 0, \ |
| 150 'Found non-existing files in ninja deps' |
| 169 gn = gn_q.get() | 151 gn = gn_q.get() |
| 170 missing = d - gn | 152 missing = d - gn |
| 171 nonexisting = GetNonExistingFiles(gn) | 153 nonexisting = GetNonExistingFiles(gn) |
| 172 | 154 |
| 173 deps = deps_q.get() | 155 deps = deps_q.get() |
| 174 missing = FilterOutDepsedRepo(missing, deps) | 156 missing = FilterOutDepsedRepo(missing, deps) |
| 175 nonexisting = FilterOutDepsedRepo(nonexisting, deps) | 157 nonexisting = FilterOutDepsedRepo(nonexisting, deps) |
| 176 | 158 |
| 177 d_p.join() | 159 d_p.join() |
| 178 gn_p.join() | 160 gn_p.join() |
| 179 deps_p.join() | 161 deps_p.join() |
| 180 | 162 |
| 181 if len(GetNonExistingFiles(d)) > 0: | |
| 182 parser.error('''Found non-existing files in ninja deps. You should | |
| 183 build all in OUT_DIR.''') | |
| 184 if len(d) == 0: | |
| 185 parser.error('OUT_DIR looks empty. You should build all there.') | |
| 186 if any((('/gen/' in i) for i in nonexisting)): | |
| 187 parser.error('OUT_DIR looks wrong. You should build all there.') | |
| 188 | |
| 189 if args.whitelist: | 163 if args.whitelist: |
| 190 whitelist = ParseWhiteList(open(args.whitelist).read()) | 164 whitelist = ParseWhiteList(open(args.whitelist).read()) |
| 191 missing -= whitelist | 165 missing -= whitelist |
| 192 | 166 |
| 193 missing = sorted(missing) | 167 missing = sorted(missing) |
| 194 nonexisting = sorted(nonexisting) | 168 nonexisting = sorted(nonexisting) |
| 195 | 169 |
| 196 if args.json: | 170 if args.json: |
| 197 with open(args.json, 'w') as f: | 171 with open(args.json, 'w') as f: |
| 198 json.dump(missing, f) | 172 json.dump(missing, f) |
| 199 | 173 |
| 200 if len(missing) == 0 and len(nonexisting) == 0: | 174 if len(missing) == 0 and len(nonexisting) == 0: |
| 201 return 0 | 175 return 0 |
| 202 | 176 |
| 203 if len(missing) > 0: | 177 if len(missing) > 0: |
| 204 print '\nThe following files should be included in gn files:' | 178 print '\nThe following files should be included in gn files:' |
| 205 for i in missing: | 179 for i in missing: |
| 206 print i | 180 print i |
| 207 | 181 |
| 208 if len(nonexisting) > 0: | 182 if len(nonexisting) > 0: |
| 209 print '\nThe following non-existing files should be removed from gn files:' | 183 print '\nThe following non-existing files should be removed from gn files:' |
| 210 for i in nonexisting: | 184 for i in nonexisting: |
| 211 print i | 185 print i |
| 212 | 186 |
| 213 return 1 | 187 return 1 |
| 214 | 188 |
| 215 | 189 |
| 216 if __name__ == '__main__': | 190 if __name__ == '__main__': |
| 217 sys.exit(main()) | 191 sys.exit(main()) |
| OLD | NEW |