OLD | NEW |
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2017 The Chromium Authors. All rights reserved. | 2 # Copyright 2017 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Find header files missing in GN. | 6 """Find header files missing in GN. |
7 | 7 |
8 This script gets all the header files from ninja_deps, which is from the true | 8 This script gets all the header files from ninja_deps, which is from the true |
9 dependency generated by the compiler, and report if they don't exist in GN. | 9 dependency generated by the compiler, and report if they don't exist in GN. |
10 """ | 10 """ |
11 | 11 |
12 import argparse | 12 import argparse |
13 import json | 13 import json |
14 import os | 14 import os |
15 import re | 15 import re |
| 16 import shutil |
16 import subprocess | 17 import subprocess |
17 import sys | 18 import sys |
| 19 import tempfile |
18 from multiprocessing import Process, Queue | 20 from multiprocessing import Process, Queue |
19 | 21 |
20 | 22 |
21 def GetHeadersFromNinja(out_dir, q): | 23 def GetHeadersFromNinja(out_dir, q): |
22 """Return all the header files from ninja_deps""" | 24 """Return all the header files from ninja_deps""" |
23 | 25 |
24 def NinjaSource(): | 26 def NinjaSource(): |
25 cmd = ['ninja', '-C', out_dir, '-t', 'deps'] | 27 cmd = ['ninja', '-C', out_dir, '-t', 'deps'] |
26 # A negative bufsize means to use the system default, which usually | 28 # A negative bufsize means to use the system default, which usually |
27 # means fully buffered. | 29 # means fully buffered. |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
59 if not f.startswith('build'): | 61 if not f.startswith('build'): |
60 all_headers.add(f) | 62 all_headers.add(f) |
61 else: | 63 else: |
62 is_valid = line.endswith('(VALID)') | 64 is_valid = line.endswith('(VALID)') |
63 | 65 |
64 return all_headers | 66 return all_headers |
65 | 67 |
66 | 68 |
67 def GetHeadersFromGN(out_dir, q): | 69 def GetHeadersFromGN(out_dir, q): |
68 """Return all the header files from GN""" | 70 """Return all the header files from GN""" |
69 subprocess.check_call(['gn', 'gen', out_dir, '--ide=json', '-q']) | 71 |
70 gn_json = json.load(open(os.path.join(out_dir, 'project.json'))) | 72 tmp = None |
71 q.put(ParseGNProjectJSON(gn_json)) | 73 try: |
| 74 tmp = tempfile.mkdtemp() |
| 75 shutil.copy2(os.path.join(out_dir, 'args.gn'), |
| 76 os.path.join(tmp, 'args.gn')) |
| 77 # Do "gn gen" in a temp dir to prevent dirtying |out_dir|. |
| 78 subprocess.check_call(['gn', 'gen', tmp, '--ide=json', '-q']) |
| 79 gn_json = json.load(open(os.path.join(tmp, 'project.json'))) |
| 80 finally: |
| 81 if tmp: |
| 82 shutil.rmtree(tmp) |
| 83 q.put(ParseGNProjectJSON(gn_json, out_dir, tmp)) |
72 | 84 |
73 | 85 |
74 def ParseGNProjectJSON(gn): | 86 def ParseGNProjectJSON(gn, out_dir, tmp_out): |
75 """Parse GN output and get the header files""" | 87 """Parse GN output and get the header files""" |
76 all_headers = set() | 88 all_headers = set() |
77 | 89 |
78 for _target, properties in gn['targets'].iteritems(): | 90 for _target, properties in gn['targets'].iteritems(): |
79 sources = properties.get('sources', []) | 91 sources = properties.get('sources', []) |
80 public = properties.get('public', []) | 92 public = properties.get('public', []) |
81 # Exclude '"public": "*"'. | 93 # Exclude '"public": "*"'. |
82 if type(public) is list: | 94 if type(public) is list: |
83 sources += public | 95 sources += public |
84 for f in sources: | 96 for f in sources: |
85 if f.endswith('.h') or f.endswith('.hh'): | 97 if f.endswith('.h') or f.endswith('.hh'): |
86 if f.startswith('//'): | 98 if f.startswith('//'): |
87 f = f[2:] # Strip the '//' prefix. | 99 f = f[2:] # Strip the '//' prefix. |
| 100 if f.startswith(tmp_out): |
| 101 f = out_dir + f[len(tmp_out):] |
88 all_headers.add(f) | 102 all_headers.add(f) |
89 | 103 |
90 return all_headers | 104 return all_headers |
91 | 105 |
92 | 106 |
93 def GetDepsPrefixes(q): | 107 def GetDepsPrefixes(q): |
94 """Return all the folders controlled by DEPS file""" | 108 """Return all the folders controlled by DEPS file""" |
95 gclient_out = subprocess.check_output( | 109 gclient_out = subprocess.check_output( |
96 ['gclient', 'recurse', '--no-progress', '-j1', | 110 ['gclient', 'recurse', '--no-progress', '-j1', |
97 'python', '-c', 'import os;print os.environ["GCLIENT_DEP_PATH"]']) | 111 'python', '-c', 'import os;print os.environ["GCLIENT_DEP_PATH"]']) |
(...skipping 20 matching lines...) Expand all Loading... |
118 | 132 |
119 def GetNonExistingFiles(lst): | 133 def GetNonExistingFiles(lst): |
120 out = set() | 134 out = set() |
121 for f in lst: | 135 for f in lst: |
122 if not os.path.isfile(f): | 136 if not os.path.isfile(f): |
123 out.add(f) | 137 out.add(f) |
124 return out | 138 return out |
125 | 139 |
126 | 140 |
127 def main(): | 141 def main(): |
128 parser = argparse.ArgumentParser() | 142 parser = argparse.ArgumentParser(description=''' |
129 parser.add_argument('--out-dir', default='out/Release') | 143 NOTE: Use ninja to build all targets in OUT_DIR before running |
130 parser.add_argument('--json') | 144 this script.''') |
131 parser.add_argument('--whitelist') | 145 parser.add_argument('--out-dir', metavar='OUT_DIR', default='out/Release', |
132 parser.add_argument('args', nargs=argparse.REMAINDER) | 146 help='output directory of the build') |
| 147 parser.add_argument('--json', |
| 148 help='JSON output filename for missing headers') |
| 149 parser.add_argument('--whitelist', help='file containing whitelist') |
133 | 150 |
134 args, _extras = parser.parse_known_args() | 151 args, _extras = parser.parse_known_args() |
135 | 152 |
| 153 if not os.path.isdir(args.out_dir): |
| 154 parser.error('OUT_DIR "%s" does not exist.' % args.out_dir) |
| 155 |
136 d_q = Queue() | 156 d_q = Queue() |
137 d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, d_q,)) | 157 d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, d_q,)) |
138 d_p.start() | 158 d_p.start() |
139 | 159 |
140 gn_q = Queue() | 160 gn_q = Queue() |
141 gn_p = Process(target=GetHeadersFromGN, args=(args.out_dir, gn_q,)) | 161 gn_p = Process(target=GetHeadersFromGN, args=(args.out_dir, gn_q,)) |
142 gn_p.start() | 162 gn_p.start() |
143 | 163 |
144 deps_q = Queue() | 164 deps_q = Queue() |
145 deps_p = Process(target=GetDepsPrefixes, args=(deps_q,)) | 165 deps_p = Process(target=GetDepsPrefixes, args=(deps_q,)) |
146 deps_p.start() | 166 deps_p.start() |
147 | 167 |
148 d = d_q.get() | 168 d = d_q.get() |
149 assert len(GetNonExistingFiles(d)) == 0, \ | |
150 'Found non-existing files in ninja deps' | |
151 gn = gn_q.get() | 169 gn = gn_q.get() |
152 missing = d - gn | 170 missing = d - gn |
153 nonexisting = GetNonExistingFiles(gn) | 171 nonexisting = GetNonExistingFiles(gn) |
154 | 172 |
155 deps = deps_q.get() | 173 deps = deps_q.get() |
156 missing = FilterOutDepsedRepo(missing, deps) | 174 missing = FilterOutDepsedRepo(missing, deps) |
157 nonexisting = FilterOutDepsedRepo(nonexisting, deps) | 175 nonexisting = FilterOutDepsedRepo(nonexisting, deps) |
158 | 176 |
159 d_p.join() | 177 d_p.join() |
160 gn_p.join() | 178 gn_p.join() |
161 deps_p.join() | 179 deps_p.join() |
162 | 180 |
| 181 if len(GetNonExistingFiles(d)) > 0: |
| 182 parser.error('''Found non-existing files in ninja deps. You should |
| 183 build all in OUT_DIR.''') |
| 184 if len(d) == 0: |
| 185 parser.error('OUT_DIR looks empty. You should build all there.') |
| 186 if any((('/gen/' in i) for i in nonexisting)): |
| 187 parser.error('OUT_DIR looks wrong. You should build all there.') |
| 188 |
163 if args.whitelist: | 189 if args.whitelist: |
164 whitelist = ParseWhiteList(open(args.whitelist).read()) | 190 whitelist = ParseWhiteList(open(args.whitelist).read()) |
165 missing -= whitelist | 191 missing -= whitelist |
166 | 192 |
167 missing = sorted(missing) | 193 missing = sorted(missing) |
168 nonexisting = sorted(nonexisting) | 194 nonexisting = sorted(nonexisting) |
169 | 195 |
170 if args.json: | 196 if args.json: |
171 with open(args.json, 'w') as f: | 197 with open(args.json, 'w') as f: |
172 json.dump(missing, f) | 198 json.dump(missing, f) |
173 | 199 |
174 if len(missing) == 0 and len(nonexisting) == 0: | 200 if len(missing) == 0 and len(nonexisting) == 0: |
175 return 0 | 201 return 0 |
176 | 202 |
177 if len(missing) > 0: | 203 if len(missing) > 0: |
178 print '\nThe following files should be included in gn files:' | 204 print '\nThe following files should be included in gn files:' |
179 for i in missing: | 205 for i in missing: |
180 print i | 206 print i |
181 | 207 |
182 if len(nonexisting) > 0: | 208 if len(nonexisting) > 0: |
183 print '\nThe following non-existing files should be removed from gn files:' | 209 print '\nThe following non-existing files should be removed from gn files:' |
184 for i in nonexisting: | 210 for i in nonexisting: |
185 print i | 211 print i |
186 | 212 |
187 return 1 | 213 return 1 |
188 | 214 |
189 | 215 |
190 if __name__ == '__main__': | 216 if __name__ == '__main__': |
191 sys.exit(main()) | 217 sys.exit(main()) |
OLD | NEW |