OLD | NEW |
| (Empty) |
1 #!/usr/bin/env python | |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
3 # Use of this source code is governed by a BSD-style license that can be | |
4 # found in the LICENSE file. | |
5 | |
6 '''The 'grit build' tool along with integration for this tool with the | |
7 SCons build system. | |
8 ''' | |
9 | |
10 import codecs | |
11 import filecmp | |
12 import getopt | |
13 import os | |
14 import shutil | |
15 import sys | |
16 | |
17 from grit import grd_reader | |
18 from grit import util | |
19 from grit.tool import interface | |
20 from grit import shortcuts | |
21 | |
22 | |
23 # It would be cleaner to have each module register itself, but that would | |
24 # require importing all of them on every run of GRIT. | |
25 '''Map from <output> node types to modules under grit.format.''' | |
26 _format_modules = { | |
27 'android': 'android_xml', | |
28 'c_format': 'c_format', | |
29 'chrome_messages_json': 'chrome_messages_json', | |
30 'data_package': 'data_pack', | |
31 'js_map_format': 'js_map_format', | |
32 'rc_all': 'rc', | |
33 'rc_translateable': 'rc', | |
34 'rc_nontranslateable': 'rc', | |
35 'rc_header': 'rc_header', | |
36 'resource_map_header': 'resource_map', | |
37 'resource_map_source': 'resource_map', | |
38 'resource_file_map_source': 'resource_map', | |
39 } | |
40 _format_modules.update( | |
41 (type, 'policy_templates.template_formatter') for type in | |
42 [ 'adm', 'admx', 'adml', 'reg', 'doc', 'json', | |
43 'plist', 'plist_strings', 'ios_plist', 'android_policy' ]) | |
44 | |
45 | |
46 def GetFormatter(type): | |
47 modulename = 'grit.format.' + _format_modules[type] | |
48 __import__(modulename) | |
49 module = sys.modules[modulename] | |
50 try: | |
51 return module.Format | |
52 except AttributeError: | |
53 return module.GetFormatter(type) | |
54 | |
55 | |
56 class RcBuilder(interface.Tool): | |
57 '''A tool that builds RC files and resource header files for compilation. | |
58 | |
59 Usage: grit build [-o OUTPUTDIR] [-D NAME[=VAL]]* | |
60 | |
61 All output options for this tool are specified in the input file (see | |
62 'grit help' for details on how to specify the input file - it is a global | |
63 option). | |
64 | |
65 Options: | |
66 | |
67 -a FILE Assert that the given file is an output. There can be | |
68 multiple "-a" flags listed for multiple outputs. If a "-a" | |
69 or "--assert-file-list" argument is present, then the list | |
70 of asserted files must match the output files or the tool | |
71 will fail. The use-case is for the build system to maintain | |
72 separate lists of output files and to catch errors if the | |
73 build system's list and the grit list are out-of-sync. | |
74 | |
75 --assert-file-list Provide a file listing multiple asserted output files. | |
76 There is one file name per line. This acts like specifying | |
77 each file with "-a" on the command line, but without the | |
78 possibility of running into OS line-length limits for very | |
79 long lists. | |
80 | |
81 -o OUTPUTDIR Specify what directory output paths are relative to. | |
82 Defaults to the current directory. | |
83 | |
84 -D NAME[=VAL] Specify a C-preprocessor-like define NAME with optional | |
85 value VAL (defaults to 1) which will be used to control | |
86 conditional inclusion of resources. | |
87 | |
88 -E NAME=VALUE Set environment variable NAME to VALUE (within grit). | |
89 | |
90 -f FIRSTIDSFILE Path to a python file that specifies the first id of | |
91 value to use for resources. A non-empty value here will | |
92 override the value specified in the <grit> node's | |
93 first_ids_file. | |
94 | |
95 -w WHITELISTFILE Path to a file containing the string names of the | |
96 resources to include. Anything not listed is dropped. | |
97 | |
98 -t PLATFORM Specifies the platform the build is targeting; defaults | |
99 to the value of sys.platform. The value provided via this | |
100 flag should match what sys.platform would report for your | |
101 target platform; see grit.node.base.EvaluateCondition. | |
102 | |
103 -h HEADERFORMAT Custom format string to use for generating rc header files. | |
104 The string should have two placeholders: {textual_id} | |
105 and {numeric_id}. E.g. "#define {textual_id} {numeric_id}" | |
106 Otherwise it will use the default "#define SYMBOL 1234" | |
107 | |
108 --output-all-resource-defines | |
109 --no-output-all-resource-defines If specified, overrides the value of the | |
110 output_all_resource_defines attribute of the root <grit> | |
111 element of the input .grd file. | |
112 | |
113 --write-only-new flag | |
114 If flag is non-0, write output files to a temporary file | |
115 first, and copy it to the real output only if the new file | |
116 is different from the old file. This allows some build | |
117 systems to realize that dependent build steps might be | |
118 unnecessary, at the cost of comparing the output data at | |
119 grit time. | |
120 | |
121 --depend-on-stamp | |
122 If specified along with --depfile and --depdir, the depfile | |
123 generated will depend on a stampfile instead of the first | |
124 output in the input .grd file. | |
125 | |
126 Conditional inclusion of resources only affects the output of files which | |
127 control which resources get linked into a binary, e.g. it affects .rc files | |
128 meant for compilation but it does not affect resource header files (that define | |
129 IDs). This helps ensure that values of IDs stay the same, that all messages | |
130 are exported to translation interchange files (e.g. XMB files), etc. | |
131 ''' | |
132 | |
133 def ShortDescription(self): | |
134 return 'A tool that builds RC files for compilation.' | |
135 | |
136 def Run(self, opts, args): | |
137 self.output_directory = '.' | |
138 first_ids_file = None | |
139 whitelist_filenames = [] | |
140 assert_output_files = [] | |
141 target_platform = None | |
142 depfile = None | |
143 depdir = None | |
144 rc_header_format = None | |
145 output_all_resource_defines = None | |
146 write_only_new = False | |
147 depend_on_stamp = False | |
148 (own_opts, args) = getopt.getopt(args, 'a:o:D:E:f:w:t:h:', | |
149 ('depdir=','depfile=','assert-file-list=', | |
150 'output-all-resource-defines', | |
151 'no-output-all-resource-defines', | |
152 'depend-on-stamp', | |
153 'write-only-new=')) | |
154 for (key, val) in own_opts: | |
155 if key == '-a': | |
156 assert_output_files.append(val) | |
157 elif key == '--assert-file-list': | |
158 with open(val) as f: | |
159 assert_output_files += f.read().splitlines() | |
160 elif key == '-o': | |
161 self.output_directory = val | |
162 elif key == '-D': | |
163 name, val = util.ParseDefine(val) | |
164 self.defines[name] = val | |
165 elif key == '-E': | |
166 (env_name, env_value) = val.split('=', 1) | |
167 os.environ[env_name] = env_value | |
168 elif key == '-f': | |
169 # TODO(joi@chromium.org): Remove this override once change | |
170 # lands in WebKit.grd to specify the first_ids_file in the | |
171 # .grd itself. | |
172 first_ids_file = val | |
173 elif key == '-w': | |
174 whitelist_filenames.append(val) | |
175 elif key == '--output-all-resource-defines': | |
176 output_all_resource_defines = True | |
177 elif key == '--no-output-all-resource-defines': | |
178 output_all_resource_defines = False | |
179 elif key == '-t': | |
180 target_platform = val | |
181 elif key == '-h': | |
182 rc_header_format = val | |
183 elif key == '--depdir': | |
184 depdir = val | |
185 elif key == '--depfile': | |
186 depfile = val | |
187 elif key == '--write-only-new': | |
188 write_only_new = val != '0' | |
189 elif key == '--depend-on-stamp': | |
190 depend_on_stamp = True | |
191 | |
192 if len(args): | |
193 print 'This tool takes no tool-specific arguments.' | |
194 return 2 | |
195 self.SetOptions(opts) | |
196 if self.scons_targets: | |
197 self.VerboseOut('Using SCons targets to identify files to output.\n') | |
198 else: | |
199 self.VerboseOut('Output directory: %s (absolute path: %s)\n' % | |
200 (self.output_directory, | |
201 os.path.abspath(self.output_directory))) | |
202 | |
203 if whitelist_filenames: | |
204 self.whitelist_names = set() | |
205 for whitelist_filename in whitelist_filenames: | |
206 self.VerboseOut('Using whitelist: %s\n' % whitelist_filename); | |
207 whitelist_contents = util.ReadFile(whitelist_filename, util.RAW_TEXT) | |
208 self.whitelist_names.update(whitelist_contents.strip().split('\n')) | |
209 | |
210 self.write_only_new = write_only_new | |
211 | |
212 self.res = grd_reader.Parse(opts.input, | |
213 debug=opts.extra_verbose, | |
214 first_ids_file=first_ids_file, | |
215 defines=self.defines, | |
216 target_platform=target_platform) | |
217 | |
218 # If the output_all_resource_defines option is specified, override the value | |
219 # found in the grd file. | |
220 if output_all_resource_defines is not None: | |
221 self.res.SetShouldOutputAllResourceDefines(output_all_resource_defines) | |
222 | |
223 # Set an output context so that conditionals can use defines during the | |
224 # gathering stage; we use a dummy language here since we are not outputting | |
225 # a specific language. | |
226 self.res.SetOutputLanguage('en') | |
227 if rc_header_format: | |
228 self.res.AssignRcHeaderFormat(rc_header_format) | |
229 self.res.RunGatherers() | |
230 self.Process() | |
231 | |
232 if assert_output_files: | |
233 if not self.CheckAssertedOutputFiles(assert_output_files): | |
234 return 2 | |
235 | |
236 if depfile and depdir: | |
237 self.GenerateDepfile(depfile, depdir, first_ids_file, depend_on_stamp) | |
238 | |
239 return 0 | |
240 | |
241 def __init__(self, defines=None): | |
242 # Default file-creation function is codecs.open(). Only done to allow | |
243 # overriding by unit test. | |
244 self.fo_create = codecs.open | |
245 | |
246 # key/value pairs of C-preprocessor like defines that are used for | |
247 # conditional output of resources | |
248 self.defines = defines or {} | |
249 | |
250 # self.res is a fully-populated resource tree if Run() | |
251 # has been called, otherwise None. | |
252 self.res = None | |
253 | |
254 # Set to a list of filenames for the output nodes that are relative | |
255 # to the current working directory. They are in the same order as the | |
256 # output nodes in the file. | |
257 self.scons_targets = None | |
258 | |
259 # The set of names that are whitelisted to actually be included in the | |
260 # output. | |
261 self.whitelist_names = None | |
262 | |
263 # Whether to compare outputs to their old contents before writing. | |
264 self.write_only_new = False | |
265 | |
266 @staticmethod | |
267 def AddWhitelistTags(start_node, whitelist_names): | |
268 # Walk the tree of nodes added attributes for the nodes that shouldn't | |
269 # be written into the target files (skip markers). | |
270 from grit.node import include | |
271 from grit.node import message | |
272 from grit.node import structure | |
273 for node in start_node: | |
274 # Same trick data_pack.py uses to see what nodes actually result in | |
275 # real items. | |
276 if (isinstance(node, include.IncludeNode) or | |
277 isinstance(node, message.MessageNode) or | |
278 isinstance(node, structure.StructureNode)): | |
279 text_ids = node.GetTextualIds() | |
280 # Mark the item to be skipped if it wasn't in the whitelist. | |
281 if text_ids and text_ids[0] not in whitelist_names: | |
282 node.SetWhitelistMarkedAsSkip(True) | |
283 | |
284 @staticmethod | |
285 def ProcessNode(node, output_node, outfile): | |
286 '''Processes a node in-order, calling its formatter before and after | |
287 recursing to its children. | |
288 | |
289 Args: | |
290 node: grit.node.base.Node subclass | |
291 output_node: grit.node.io.OutputNode | |
292 outfile: open filehandle | |
293 ''' | |
294 base_dir = util.dirname(output_node.GetOutputFilename()) | |
295 | |
296 formatter = GetFormatter(output_node.GetType()) | |
297 formatted = formatter(node, output_node.GetLanguage(), output_dir=base_dir) | |
298 outfile.writelines(formatted) | |
299 | |
300 | |
301 def Process(self): | |
302 # Update filenames with those provided by SCons if we're being invoked | |
303 # from SCons. The list of SCons targets also includes all <structure> | |
304 # node outputs, but it starts with our output files, in the order they | |
305 # occur in the .grd | |
306 if self.scons_targets: | |
307 assert len(self.scons_targets) >= len(self.res.GetOutputFiles()) | |
308 outfiles = self.res.GetOutputFiles() | |
309 for ix in range(len(outfiles)): | |
310 outfiles[ix].output_filename = os.path.abspath( | |
311 self.scons_targets[ix]) | |
312 else: | |
313 for output in self.res.GetOutputFiles(): | |
314 output.output_filename = os.path.abspath(os.path.join( | |
315 self.output_directory, output.GetFilename())) | |
316 | |
317 # If there are whitelisted names, tag the tree once up front, this way | |
318 # while looping through the actual output, it is just an attribute check. | |
319 if self.whitelist_names: | |
320 self.AddWhitelistTags(self.res, self.whitelist_names) | |
321 | |
322 for output in self.res.GetOutputFiles(): | |
323 self.VerboseOut('Creating %s...' % output.GetFilename()) | |
324 | |
325 # Microsoft's RC compiler can only deal with single-byte or double-byte | |
326 # files (no UTF-8), so we make all RC files UTF-16 to support all | |
327 # character sets. | |
328 if output.GetType() in ('rc_header', 'resource_map_header', | |
329 'resource_map_source', 'resource_file_map_source'): | |
330 encoding = 'cp1252' | |
331 elif output.GetType() in ('android', 'c_format', 'js_map_format', 'plist', | |
332 'plist_strings', 'doc', 'json', 'android_policy'
): | |
333 encoding = 'utf_8' | |
334 elif output.GetType() in ('chrome_messages_json'): | |
335 # Chrome Web Store currently expects BOM for UTF-8 files :-( | |
336 encoding = 'utf-8-sig' | |
337 else: | |
338 # TODO(gfeher) modify here to set utf-8 encoding for admx/adml | |
339 encoding = 'utf_16' | |
340 | |
341 # Set the context, for conditional inclusion of resources | |
342 self.res.SetOutputLanguage(output.GetLanguage()) | |
343 self.res.SetOutputContext(output.GetContext()) | |
344 self.res.SetFallbackToDefaultLayout(output.GetFallbackToDefaultLayout()) | |
345 self.res.SetDefines(self.defines) | |
346 | |
347 # Make the output directory if it doesn't exist. | |
348 self.MakeDirectoriesTo(output.GetOutputFilename()) | |
349 | |
350 # Write the results to a temporary file and only overwrite the original | |
351 # if the file changed. This avoids unnecessary rebuilds. | |
352 outfile = self.fo_create(output.GetOutputFilename() + '.tmp', 'wb') | |
353 | |
354 if output.GetType() != 'data_package': | |
355 outfile = util.WrapOutputStream(outfile, encoding) | |
356 | |
357 # Iterate in-order through entire resource tree, calling formatters on | |
358 # the entry into a node and on exit out of it. | |
359 with outfile: | |
360 self.ProcessNode(self.res, output, outfile) | |
361 | |
362 # Now copy from the temp file back to the real output, but on Windows, | |
363 # only if the real output doesn't exist or the contents of the file | |
364 # changed. This prevents identical headers from being written and .cc | |
365 # files from recompiling (which is painful on Windows). | |
366 if not os.path.exists(output.GetOutputFilename()): | |
367 os.rename(output.GetOutputFilename() + '.tmp', | |
368 output.GetOutputFilename()) | |
369 else: | |
370 # CHROMIUM SPECIFIC CHANGE. | |
371 # This clashes with gyp + vstudio, which expect the output timestamp | |
372 # to change on a rebuild, even if nothing has changed, so only do | |
373 # it when opted in. | |
374 if not self.write_only_new: | |
375 write_file = True | |
376 else: | |
377 files_match = filecmp.cmp(output.GetOutputFilename(), | |
378 output.GetOutputFilename() + '.tmp') | |
379 write_file = not files_match | |
380 if write_file: | |
381 shutil.copy2(output.GetOutputFilename() + '.tmp', | |
382 output.GetOutputFilename()) | |
383 os.remove(output.GetOutputFilename() + '.tmp') | |
384 | |
385 self.VerboseOut(' done.\n') | |
386 | |
387 # Print warnings if there are any duplicate shortcuts. | |
388 warnings = shortcuts.GenerateDuplicateShortcutsWarnings( | |
389 self.res.UberClique(), self.res.GetTcProject()) | |
390 if warnings: | |
391 print '\n'.join(warnings) | |
392 | |
393 # Print out any fallback warnings, and missing translation errors, and | |
394 # exit with an error code if there are missing translations in a non-pseudo | |
395 # and non-official build. | |
396 warnings = (self.res.UberClique().MissingTranslationsReport(). | |
397 encode('ascii', 'replace')) | |
398 if warnings: | |
399 self.VerboseOut(warnings) | |
400 if self.res.UberClique().HasMissingTranslations(): | |
401 print self.res.UberClique().missing_translations_ | |
402 sys.exit(-1) | |
403 | |
404 | |
405 def CheckAssertedOutputFiles(self, assert_output_files): | |
406 '''Checks that the asserted output files are specified in the given list. | |
407 | |
408 Returns true if the asserted files are present. If they are not, returns | |
409 False and prints the failure. | |
410 ''' | |
411 # Compare the absolute path names, sorted. | |
412 asserted = sorted([os.path.abspath(i) for i in assert_output_files]) | |
413 actual = sorted([ | |
414 os.path.abspath(os.path.join(self.output_directory, i.GetFilename())) | |
415 for i in self.res.GetOutputFiles()]) | |
416 | |
417 if asserted != actual: | |
418 missing = list(set(actual) - set(asserted)) | |
419 extra = list(set(asserted) - set(actual)) | |
420 error = '''Asserted file list does not match. | |
421 | |
422 Expected output files: | |
423 %s | |
424 Actual output files: | |
425 %s | |
426 Missing output files: | |
427 %s | |
428 Extra output files: | |
429 %s | |
430 ''' | |
431 print error % ('\n'.join(asserted), '\n'.join(actual), '\n'.join(missing), | |
432 '\n'.join(extra)) | |
433 return False | |
434 return True | |
435 | |
436 | |
437 def GenerateDepfile(self, depfile, depdir, first_ids_file, depend_on_stamp): | |
438 '''Generate a depfile that contains the imlicit dependencies of the input | |
439 grd. The depfile will be in the same format as a makefile, and will contain | |
440 references to files relative to |depdir|. It will be put in |depfile|. | |
441 | |
442 For example, supposing we have three files in a directory src/ | |
443 | |
444 src/ | |
445 blah.grd <- depends on input{1,2}.xtb | |
446 input1.xtb | |
447 input2.xtb | |
448 | |
449 and we run | |
450 | |
451 grit -i blah.grd -o ../out/gen --depdir ../out --depfile ../out/gen/blah.r
d.d | |
452 | |
453 from the directory src/ we will generate a depfile ../out/gen/blah.grd.d | |
454 that has the contents | |
455 | |
456 gen/blah.h: ../src/input1.xtb ../src/input2.xtb | |
457 | |
458 Where "gen/blah.h" is the first output (Ninja expects the .d file to list | |
459 the first output in cases where there is more than one). If the flag | |
460 --depend-on-stamp is specified, "gen/blah.rd.d.stamp" will be used that is | |
461 'touched' whenever a new depfile is generated. | |
462 | |
463 Note that all paths in the depfile are relative to ../out, the depdir. | |
464 ''' | |
465 depfile = os.path.abspath(depfile) | |
466 depdir = os.path.abspath(depdir) | |
467 infiles = self.res.GetInputFiles() | |
468 | |
469 # We want to trigger a rebuild if the first ids change. | |
470 if first_ids_file is not None: | |
471 infiles.append(first_ids_file) | |
472 | |
473 if (depend_on_stamp): | |
474 output_file = depfile + ".stamp" | |
475 # Touch the stamp file before generating the depfile. | |
476 with open(output_file, 'a'): | |
477 os.utime(output_file, None) | |
478 else: | |
479 # Get the first output file relative to the depdir. | |
480 outputs = self.res.GetOutputFiles() | |
481 output_file = os.path.join(self.output_directory, | |
482 outputs[0].GetFilename()) | |
483 | |
484 output_file = os.path.relpath(output_file, depdir) | |
485 # The path prefix to prepend to dependencies in the depfile. | |
486 prefix = os.path.relpath(os.getcwd(), depdir) | |
487 deps_text = ' '.join([os.path.join(prefix, i) for i in infiles]) | |
488 | |
489 depfile_contents = output_file + ': ' + deps_text | |
490 self.MakeDirectoriesTo(depfile) | |
491 outfile = self.fo_create(depfile, 'w', encoding='utf-8') | |
492 outfile.writelines(depfile_contents) | |
493 | |
494 @staticmethod | |
495 def MakeDirectoriesTo(file): | |
496 '''Creates directories necessary to contain |file|.''' | |
497 dir = os.path.split(file)[0] | |
498 if not os.path.exists(dir): | |
499 os.makedirs(dir) | |
OLD | NEW |