OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2017 The Chromium Authors. All rights reserved. | 2 # Copyright 2017 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Find header files missing in GN. | 6 """Find header files missing in GN. |
7 | 7 |
8 This script gets all the header files from ninja_deps, which is from the true | 8 This script gets all the header files from ninja_deps, which is from the true |
9 dependency generated by the compiler, and report if they don't exist in GN. | 9 dependency generated by the compiler, and report if they don't exist in GN. |
10 """ | 10 """ |
(...skipping 20 matching lines...) Expand all Loading... | |
31 for line in iter(popen.stdout.readline, ''): | 31 for line in iter(popen.stdout.readline, ''): |
32 yield line.rstrip() | 32 yield line.rstrip() |
33 | 33 |
34 popen.stdout.close() | 34 popen.stdout.close() |
35 return_code = popen.wait() | 35 return_code = popen.wait() |
36 if return_code: | 36 if return_code: |
37 raise subprocess.CalledProcessError(return_code, cmd) | 37 raise subprocess.CalledProcessError(return_code, cmd) |
38 | 38 |
39 ans, err = set(), None | 39 ans, err = set(), None |
40 try: | 40 try: |
41 ans = ParseNinjaDepsOutput(NinjaSource()) | 41 ans = ParseNinjaDepsOutput(NinjaSource(), out_dir) |
42 except Exception as e: | 42 except Exception as e: |
43 err = str(e) | 43 err = str(e) |
44 q.put((ans, err)) | 44 q.put((ans, err)) |
45 | 45 |
46 | 46 |
47 def ParseNinjaDepsOutput(ninja_out): | 47 def ParseNinjaDepsOutput(ninja_out, out_dir): |
48 """Parse ninja output and get the header files""" | 48 """Parse ninja output and get the header files""" |
49 all_headers = set() | 49 all_headers = set() |
50 | 50 |
51 prefix = '..' + os.sep + '..' + os.sep | 51 prefix = '..' + os.sep + '..' + os.sep |
52 | 52 |
53 is_valid = False | 53 is_valid = False |
54 for line in ninja_out: | 54 for line in ninja_out: |
55 if line.startswith(' '): | 55 if line.startswith(' '): |
56 if not is_valid: | 56 if not is_valid: |
57 continue | 57 continue |
58 if line.endswith('.h') or line.endswith('.hh'): | 58 if line.endswith('.h') or line.endswith('.hh'): |
59 f = line.strip() | 59 f = line.strip() |
60 if f.startswith(prefix): | 60 if f.startswith(prefix): |
61 f = f[6:] # Remove the '../../' prefix | 61 f = f[6:] # Remove the '../../' prefix |
62 # build/ only contains build-specific files like build_config.h | 62 # build/ only contains build-specific files like build_config.h |
63 # and buildflag.h, and system header files, so they should be | 63 # and buildflag.h, and system header files, so they should be |
64 # skipped. | 64 # skipped. |
65 if f.startswith(out_dir) or f.startswith('out'): | |
66 continue | |
65 if not f.startswith('build'): | 67 if not f.startswith('build'): |
66 all_headers.add(f) | 68 all_headers.add(f) |
67 else: | 69 else: |
68 is_valid = line.endswith('(VALID)') | 70 is_valid = line.endswith('(VALID)') |
69 | 71 |
70 return all_headers | 72 return all_headers |
71 | 73 |
72 | 74 |
73 def GetHeadersFromGN(out_dir, q): | 75 def GetHeadersFromGN(out_dir, q): |
74 """Return all the header files from GN""" | 76 """Return all the header files from GN""" |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
121 'python', '-c', 'import os;print os.environ["GCLIENT_DEP_PATH"]']) | 123 'python', '-c', 'import os;print os.environ["GCLIENT_DEP_PATH"]']) |
122 for i in gclient_out.split('\n'): | 124 for i in gclient_out.split('\n'): |
123 if i.startswith('src/'): | 125 if i.startswith('src/'): |
124 i = i[4:] | 126 i = i[4:] |
125 prefixes.add(i) | 127 prefixes.add(i) |
126 except Exception as e: | 128 except Exception as e: |
127 err = str(e) | 129 err = str(e) |
128 q.put((prefixes, err)) | 130 q.put((prefixes, err)) |
129 | 131 |
130 | 132 |
133 def IsBuildClean(out_dir): | |
134 cmd = ['ninja', '-C', out_dir, '-n'] | |
135 out = subprocess.check_output(cmd) | |
136 return 'no work to do.' in out | |
137 | |
138 | |
131 def ParseWhiteList(whitelist): | 139 def ParseWhiteList(whitelist): |
132 out = set() | 140 out = set() |
133 for line in whitelist.split('\n'): | 141 for line in whitelist.split('\n'): |
134 line = re.sub(r'#.*', '', line).strip() | 142 line = re.sub(r'#.*', '', line).strip() |
135 if line: | 143 if line: |
136 out.add(line) | 144 out.add(line) |
137 return out | 145 return out |
138 | 146 |
139 | 147 |
140 def FilterOutDepsedRepo(files, deps): | 148 def FilterOutDepsedRepo(files, deps): |
141 return {f for f in files if not any(f.startswith(d) for d in deps)} | 149 return {f for f in files if not any(f.startswith(d) for d in deps)} |
142 | 150 |
143 | 151 |
144 def GetNonExistingFiles(lst): | 152 def GetNonExistingFiles(lst): |
145 out = set() | 153 out = set() |
146 for f in lst: | 154 for f in lst: |
147 if not os.path.isfile(f): | 155 if not os.path.isfile(f): |
148 out.add(f) | 156 out.add(f) |
149 return out | 157 return out |
150 | 158 |
151 | 159 |
152 def main(): | 160 def main(): |
161 | |
162 def DumpJson(data): | |
163 if args.json: | |
164 with open(args.json, 'w') as f: | |
165 json.dump(data, f) | |
166 | |
167 def PrintError(msg): | |
168 DumpJson([]) | |
169 parser.error(msg) | |
170 | |
153 parser = argparse.ArgumentParser(description=''' | 171 parser = argparse.ArgumentParser(description=''' |
154 NOTE: Use ninja to build all targets in OUT_DIR before running | 172 NOTE: Use ninja to build all targets in OUT_DIR before running |
155 this script.''') | 173 this script.''') |
156 parser.add_argument('--out-dir', metavar='OUT_DIR', default='out/Release', | 174 parser.add_argument('--out-dir', metavar='OUT_DIR', default='out/Release', |
157 help='output directory of the build') | 175 help='output directory of the build') |
158 parser.add_argument('--json', | 176 parser.add_argument('--json', |
159 help='JSON output filename for missing headers') | 177 help='JSON output filename for missing headers') |
160 parser.add_argument('--whitelist', help='file containing whitelist') | 178 parser.add_argument('--whitelist', help='file containing whitelist') |
179 parser.add_argument('--skip-dirty-check', action='store_true', | |
180 help='skip checking whether the build is dirty') | |
161 | 181 |
162 args, _extras = parser.parse_known_args() | 182 args, _extras = parser.parse_known_args() |
163 | 183 |
164 if not os.path.isdir(args.out_dir): | 184 if not os.path.isdir(args.out_dir): |
165 parser.error('OUT_DIR "%s" does not exist.' % args.out_dir) | 185 parser.error('OUT_DIR "%s" does not exist.' % args.out_dir) |
166 | 186 |
187 if not args.skip_dirty_check and not IsBuildClean(args.out_dir): | |
188 dirty_msg = 'OUT_DIR looks dirty. You need to build all there.' | |
189 if args.json: | |
190 # Assume running on the bots. Silently skip this step. | |
191 # This is possible because "analyze" step can be wrong due to | |
192 # underspecified header files. See crbug.com/725877 | |
193 print dirty_msg | |
194 DumpJson([]) | |
195 return 0 | |
196 else: | |
197 # Assume running interactively. | |
198 parser.error(dirty_msg) | |
199 | |
167 d_q = Queue() | 200 d_q = Queue() |
168 d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, d_q,)) | 201 d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, d_q,)) |
169 d_p.start() | 202 d_p.start() |
170 | 203 |
171 gn_q = Queue() | 204 gn_q = Queue() |
172 gn_p = Process(target=GetHeadersFromGN, args=(args.out_dir, gn_q,)) | 205 gn_p = Process(target=GetHeadersFromGN, args=(args.out_dir, gn_q,)) |
173 gn_p.start() | 206 gn_p.start() |
174 | 207 |
175 deps_q = Queue() | 208 deps_q = Queue() |
176 deps_p = Process(target=GetDepsPrefixes, args=(deps_q,)) | 209 deps_p = Process(target=GetDepsPrefixes, args=(deps_q,)) |
177 deps_p.start() | 210 deps_p.start() |
178 | 211 |
179 d, d_err = d_q.get() | 212 d, d_err = d_q.get() |
180 gn, gn_err = gn_q.get() | 213 gn, gn_err = gn_q.get() |
181 missing = d - gn | 214 missing = d - gn |
182 nonexisting = GetNonExistingFiles(gn) | 215 nonexisting = GetNonExistingFiles(gn) |
183 | 216 |
184 deps, deps_err = deps_q.get() | 217 deps, deps_err = deps_q.get() |
185 missing = FilterOutDepsedRepo(missing, deps) | 218 missing = FilterOutDepsedRepo(missing, deps) |
186 nonexisting = FilterOutDepsedRepo(nonexisting, deps) | 219 nonexisting = FilterOutDepsedRepo(nonexisting, deps) |
187 | 220 |
188 d_p.join() | 221 d_p.join() |
189 gn_p.join() | 222 gn_p.join() |
190 deps_p.join() | 223 deps_p.join() |
191 | 224 |
192 if d_err: | 225 if d_err: |
193 parser.error(d_err) | 226 PrintError(d_err) |
Dirk Pranke
2017/06/03 01:16:37
I'd pass the parser into PrintError; relying on th
wychen
2017/06/03 07:37:37
It requires more than just |parser|. It also needs
| |
194 if gn_err: | 227 if gn_err: |
195 parser.error(gn_err) | 228 PrintError(gn_err) |
196 if deps_err: | 229 if deps_err: |
197 parser.error(deps_err) | 230 PrintError(deps_err) |
198 if len(GetNonExistingFiles(d)) > 0: | 231 if len(GetNonExistingFiles(d)) > 0: |
199 parser.error('''Found non-existing files in ninja deps. You should | 232 print 'Non-existing files in ninja deps:', GetNonExistingFiles(d) |
200 build all in OUT_DIR.''') | 233 PrintError('Found non-existing files in ninja deps. You should ' + |
234 'build all in OUT_DIR.') | |
201 if len(d) == 0: | 235 if len(d) == 0: |
202 parser.error('OUT_DIR looks empty. You should build all there.') | 236 PrintError('OUT_DIR looks empty. You should build all there.') |
203 if any((('/gen/' in i) for i in nonexisting)): | 237 if any((('/gen/' in i) for i in nonexisting)): |
204 parser.error('OUT_DIR looks wrong. You should build all there.') | 238 PrintError('OUT_DIR looks wrong. You should build all there.') |
205 | 239 |
206 if args.whitelist: | 240 if args.whitelist: |
207 whitelist = ParseWhiteList(open(args.whitelist).read()) | 241 whitelist = ParseWhiteList(open(args.whitelist).read()) |
208 missing -= whitelist | 242 missing -= whitelist |
243 nonexisting -= whitelist | |
209 | 244 |
210 missing = sorted(missing) | 245 missing = sorted(missing) |
211 nonexisting = sorted(nonexisting) | 246 nonexisting = sorted(nonexisting) |
212 | 247 |
213 if args.json: | 248 DumpJson(sorted(missing + nonexisting)) |
214 with open(args.json, 'w') as f: | |
215 json.dump(missing, f) | |
216 | 249 |
217 if len(missing) == 0 and len(nonexisting) == 0: | 250 if len(missing) == 0 and len(nonexisting) == 0: |
218 return 0 | 251 return 0 |
219 | 252 |
220 if len(missing) > 0: | 253 if len(missing) > 0: |
221 print '\nThe following files should be included in gn files:' | 254 print '\nThe following files should be included in gn files:' |
222 for i in missing: | 255 for i in missing: |
223 print i | 256 print i |
224 | 257 |
225 if len(nonexisting) > 0: | 258 if len(nonexisting) > 0: |
226 print '\nThe following non-existing files should be removed from gn files:' | 259 print '\nThe following non-existing files should be removed from gn files:' |
227 for i in nonexisting: | 260 for i in nonexisting: |
228 print i | 261 print i |
229 | 262 |
230 return 1 | 263 return 1 |
231 | 264 |
232 | 265 |
233 if __name__ == '__main__': | 266 if __name__ == '__main__': |
234 sys.exit(main()) | 267 sys.exit(main()) |
OLD | NEW |