OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2016 The Chromium Authors. All rights reserved. | 2 # Copyright 2016 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 import argparse | 6 import argparse |
7 import itertools | 7 import itertools |
8 import os | 8 import os |
9 import platform | 9 import platform |
10 import re | 10 import re |
11 import sys | 11 import sys |
12 import tempfile | 12 import tempfile |
13 import json | |
michaelpg
2017/07/19 18:51:59
nit: alphabetical
dpapad
2017/07/19 20:49:38
Done.
| |
14 import shutil | |
13 | 15 |
14 | 16 |
15 _HERE_PATH = os.path.dirname(__file__) | 17 _HERE_PATH = os.path.dirname(__file__) |
16 _SRC_PATH = os.path.normpath(os.path.join(_HERE_PATH, '..', '..', '..')) | 18 _SRC_PATH = os.path.normpath(os.path.join(_HERE_PATH, '..', '..', '..')) |
17 _CWD = os.getcwd() # NOTE(dbeam): this is typically out/<gn_name>/. | 19 _CWD = os.getcwd() # NOTE(dbeam): this is typically out/<gn_name>/. |
18 | 20 |
19 sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'node')) | 21 sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'node')) |
20 import node | 22 import node |
21 import node_modules | 23 import node_modules |
22 | 24 |
(...skipping 13 matching lines...) Expand all Loading... | |
36 _JS_RESOURCES_PATH = os.path.join(_RESOURCES_PATH, 'js') | 38 _JS_RESOURCES_PATH = os.path.join(_RESOURCES_PATH, 'js') |
37 | 39 |
38 | 40 |
39 _POLYMER_PATH = os.path.join( | 41 _POLYMER_PATH = os.path.join( |
40 _SRC_PATH, 'third_party', 'polymer', 'v1_0', 'components-chromium') | 42 _SRC_PATH, 'third_party', 'polymer', 'v1_0', 'components-chromium') |
41 | 43 |
42 | 44 |
43 _VULCANIZE_BASE_ARGS = [ | 45 _VULCANIZE_BASE_ARGS = [ |
44 # These files are already combined and minified. | 46 # These files are already combined and minified. |
45 '--exclude', 'chrome://resources/html/polymer.html', | 47 '--exclude', 'chrome://resources/html/polymer.html', |
46 '--exclude', 'web-animations-next-lite.min.js', | 48 '--exclude', 'chrome://resources/polymer/v1_0/polymer/polymer.html', |
49 '--exclude', 'chrome://resources/polymer/v1_0/polymer/polymer-micro.html', | |
50 '--exclude', 'chrome://resources/polymer/v1_0/polymer/polymer-mini.html', | |
51 '--exclude', 'chrome://resources/polymer/v1_0/web-animations-js/web-animations -next-lite.min.js', | |
47 | 52 |
48 # These files are dynamically created by C++. | 53 '--exclude', 'chrome://resources/css/roboto.css', |
49 '--exclude', 'load_time_data.js', | 54 '--exclude', 'chrome://resources/css/text_defaults.css', |
50 '--exclude', 'strings.js', | 55 '--exclude', 'chrome://resources/css/text_defaults_md.css', |
51 '--exclude', 'text_defaults.css', | 56 '--exclude', 'chrome://resources/js/load_time_data.js', |
52 '--exclude', 'text_defaults_md.css', | |
53 | 57 |
54 '--inline-css', | 58 '--inline-css', |
55 '--inline-scripts', | 59 '--inline-scripts', |
60 '--rewrite-urls-in-templates', | |
56 '--strip-comments', | 61 '--strip-comments', |
57 ] | 62 ] |
58 | 63 |
59 | 64 |
60 _URL_MAPPINGS = [ | 65 _URL_MAPPINGS = [ |
61 ('chrome://resources/cr_elements/', _CR_ELEMENTS_PATH), | 66 ('chrome://resources/cr_elements/', _CR_ELEMENTS_PATH), |
62 ('chrome://resources/css/', _CSS_RESOURCES_PATH), | 67 ('chrome://resources/css/', _CSS_RESOURCES_PATH), |
63 ('chrome://resources/html/', _HTML_RESOURCES_PATH), | 68 ('chrome://resources/html/', _HTML_RESOURCES_PATH), |
64 ('chrome://resources/js/', _JS_RESOURCES_PATH), | 69 ('chrome://resources/js/', _JS_RESOURCES_PATH), |
65 ('chrome://resources/polymer/v1_0/', _POLYMER_PATH) | 70 ('chrome://resources/polymer/v1_0/', _POLYMER_PATH) |
66 ] | 71 ] |
67 | 72 |
68 | 73 |
69 _VULCANIZE_REDIRECT_ARGS = list(itertools.chain.from_iterable(map( | 74 _VULCANIZE_REDIRECT_ARGS = list(itertools.chain.from_iterable(map( |
70 lambda m: ['--redirect', '"%s|%s"' % (m[0], m[1])], _URL_MAPPINGS))) | 75 lambda m: ['--redirect', '"%s|%s"' % (m[0], m[1])], _URL_MAPPINGS))) |
71 | 76 |
72 | 77 |
73 def _undo_mapping(mappings, url): | 78 def _undo_mapping(mappings, url): |
74 for (redirect_url, file_path) in mappings: | 79 for (redirect_url, file_path) in mappings: |
75 if url.startswith(redirect_url): | 80 if url.startswith(redirect_url): |
76 return url.replace(redirect_url, file_path + os.sep) | 81 return url.replace(redirect_url, file_path + os.sep) |
77 # TODO(dbeam): can we make this stricter? | 82 # TODO(dbeam): can we make this stricter? |
78 return url | 83 return url |
79 | 84 |
80 def _request_list_path(out_path, html_out_file): | 85 def _request_list_path(out_path, host): |
81 return os.path.join(out_path, html_out_file + '_requestlist.txt') | 86 return os.path.join(out_path, host + '_requestlist.txt') |
82 | 87 |
83 # Get a list of all files that were bundled with Vulcanize and update the | 88 # Get a list of all files that were bundled with polymer-bundler and update the |
84 # depfile accordingly such that Ninja knows when to trigger re-vulcanization. | 89 # depfile accordingly such that Ninja knows when to re-trigger. |
85 def _update_dep_file(in_folder, args): | 90 def _update_dep_file(in_folder, args, manifest): |
86 in_path = os.path.join(_CWD, in_folder) | 91 in_path = os.path.join(_CWD, in_folder) |
87 out_path = os.path.join(_CWD, args.out_folder) | |
88 | 92 |
89 # Prior call to vulcanize already generated the deps list, grab it from there. | 93 # Gather the dependencies of all bundled root HTML files. |
90 request_list_path = _request_list_path(out_path, args.html_out_file) | 94 request_list = [] |
91 request_list = open(request_list_path, 'r').read().splitlines() | 95 for html_file in manifest: |
96 request_list += manifest[html_file] | |
92 | 97 |
93 if platform.system() == 'Windows': | 98 # Add a slash in front of every dependency that is not a chrome:// URL, so |
94 # TODO(dbeam): UGH. For some reason Vulcanize is interpreting the target | 99 # that we can map it to the correct source file path below. |
95 # file path as a URL and using the drive letter (e.g. D:\) as a protocol. | 100 request_list = map( |
96 # This is a little insane, but we're fixing here by normalizing case (which | 101 lambda dep: '/' + dep if not dep.startswith('chrome://') else dep, |
97 # really shouldn't matter, these are all file paths and generally are all | 102 request_list) |
98 # lower case) and writing from / to \ (file path) and then back again. This | |
99 # is compounded by NodeJS having a bug in url.resolve() that handles | |
100 # chrome:// protocol URLs poorly as well as us using startswith() to strip | |
101 # file paths (which isn't crazy awesome either). Don't remove unless you | |
102 # really really know what you're doing. | |
103 norm = lambda u: u.lower().replace('/', '\\') | |
104 request_list = [norm(u).replace(norm(in_path), '').replace('\\', '/') | |
105 for u in request_list] | |
106 | 103 |
107 # Undo the URL mappings applied by vulcanize to get file paths relative to | 104 # Undo the URL mappings applied by vulcanize to get file paths relative to |
108 # current working directory. | 105 # current working directory. |
109 url_mappings = _URL_MAPPINGS + [ | 106 url_mappings = _URL_MAPPINGS + [ |
110 ('/', os.path.relpath(in_path, _CWD)), | 107 ('/', os.path.relpath(in_path, _CWD)), |
111 ('chrome://%s/' % args.host, os.path.relpath(in_path, _CWD)), | 108 ('chrome://%s/' % args.host, os.path.relpath(in_path, _CWD)), |
112 ] | 109 ] |
113 | 110 |
114 deps = [_undo_mapping(url_mappings, u) for u in request_list] | 111 deps = [_undo_mapping(url_mappings, u) for u in request_list] |
115 deps = map(os.path.normpath, deps) | 112 deps = map(os.path.normpath, deps) |
116 | 113 |
117 # If the input was a folder holding an unpacked .pak file, the generated | 114 # If the input was a folder holding an unpacked .pak file, the generated |
118 # depfile should not list files already in the .pak file. | 115 # depfile should not list files already in the .pak file. |
119 if args.input.endswith('.unpak'): | 116 if args.input.endswith('.unpak'): |
120 filter_url = args.input | 117 filter_url = args.input |
121 deps = [d for d in deps if not d.startswith(filter_url)] | 118 deps = [d for d in deps if not d.startswith(filter_url)] |
122 | 119 |
123 with open(os.path.join(_CWD, args.depfile), 'w') as f: | 120 with open(os.path.join(_CWD, args.depfile), 'w') as f: |
124 deps_file_header = os.path.join(args.out_folder, args.html_out_file) | 121 deps_file_header = os.path.join(args.out_folder, args.html_out_files[0]) |
125 f.write(deps_file_header + ': ' + ' '.join(deps)) | 122 f.write(deps_file_header + ': ' + ' '.join(deps)) |
126 | 123 |
127 | 124 |
128 def _vulcanize(in_folder, args): | 125 def _vulcanize(in_folder, args): |
129 in_path = os.path.normpath(os.path.join(_CWD, in_folder)) | 126 in_path = os.path.normpath(os.path.join(_CWD, in_folder)) |
130 out_path = os.path.join(_CWD, args.out_folder) | 127 out_path = os.path.join(_CWD, args.out_folder) |
131 | 128 manifest_out_path = _request_list_path(out_path, args.host) |
132 html_out_path = os.path.join(out_path, args.html_out_file) | |
133 js_out_path = os.path.join(out_path, args.js_out_file) | |
134 | 129 |
135 exclude_args = [] | 130 exclude_args = [] |
136 for f in args.exclude or []: | 131 for f in args.exclude or []: |
137 exclude_args.append('--exclude') | 132 exclude_args.append('--exclude') |
138 exclude_args.append(f) | 133 exclude_args.append(f) |
139 | 134 |
140 output = node.RunNode( | 135 in_html_args = [] |
141 [node_modules.PathToVulcanize()] + | 136 for f in args.html_in_files: |
137 in_html_args.append('--in-html') | |
138 in_html_args.append(f) | |
139 | |
140 tmp_out_dir = os.path.join(out_path, 'bundled') | |
141 node.RunNode( | |
142 [node_modules.PathToBundler()] + | |
142 _VULCANIZE_BASE_ARGS + _VULCANIZE_REDIRECT_ARGS + exclude_args + | 143 _VULCANIZE_BASE_ARGS + _VULCANIZE_REDIRECT_ARGS + exclude_args + |
143 ['--out-request-list', _request_list_path(out_path, args.html_out_file), | 144 [# This file is dynamically created by C++. Need to specify an exlusion |
calamity
2017/07/19 06:15:33
nit: exclusion
dpapad
2017/07/19 20:49:38
Done.
| |
144 '--redirect', '"/|%s"' % in_path, | 145 # URL for both the relative URL and chrome:// URL syntax. |
146 '--exclude', 'strings.js', | |
147 '--exclude', 'chrome://%s/strings.js' % args.host, | |
148 | |
149 '--manifest-out', manifest_out_path, | |
150 '--root', in_path, | |
145 '--redirect', '"chrome://%s/|%s"' % (args.host, in_path), | 151 '--redirect', '"chrome://%s/|%s"' % (args.host, in_path), |
146 # TODO(dpapad): Figure out why vulcanize treats the input path | 152 '--out-dir', os.path.relpath(tmp_out_dir, _CWD), |
147 # differently on Windows VS Linux/Mac. | 153 '--shell', args.html_in_files[0], |
148 os.path.join( | 154 ] + in_html_args) |
149 in_path if platform.system() == 'Windows' else os.sep, | |
150 args.html_in_file)]) | |
151 | 155 |
152 # Grit includes are not supported, use HTML imports instead. | 156 for index, html_file in enumerate(args.html_in_files): |
153 output = output.replace('<include src="', '<include src-disabled="') | 157 with open( |
158 os.path.join(os.path.relpath(tmp_out_dir, _CWD), html_file), 'r') as f: | |
159 output = f.read() | |
154 | 160 |
155 if args.insert_in_head: | 161 # Grit includes are not supported, use HTML imports instead. |
156 assert '<head>' in output | 162 output = output.replace('<include src="', '<include src-disabled="') |
157 # NOTE(dbeam): Vulcanize eats <base> tags after processing. This undoes | |
158 # that by adding a <base> tag to the (post-processed) generated output. | |
159 output = output.replace('<head>', '<head>' + args.insert_in_head) | |
160 | 163 |
161 crisper_input = tempfile.NamedTemporaryFile(mode='wt+', delete=False) | 164 if args.insert_in_head: |
162 crisper_input.write(output) | 165 assert '<head>' in output |
163 crisper_input.close() | 166 # NOTE(dbeam): polymer-bundler eats <base> tags after processing. This |
167 # undoes that by adding a <base> tag to the (post-processed) generated | |
168 # output. | |
169 output = output.replace('<head>', '<head>' + args.insert_in_head) | |
164 | 170 |
165 crisper_output = tempfile.NamedTemporaryFile(mode='wt+', delete=False) | 171 # Open file again with 'w' such that the previous contents are overwritten. |
166 crisper_output.close() | 172 with open( |
173 os.path.join(os.path.relpath(tmp_out_dir, _CWD), html_file), 'w') as f: | |
174 f.write(output) | |
175 f.close() | |
167 | 176 |
168 try: | 177 try: |
169 node.RunNode([node_modules.PathToCrisper(), | 178 for index, html_in_file in enumerate(args.html_in_files): |
170 '--source', crisper_input.name, | 179 html_out_file = args.html_out_files[index] |
171 '--script-in-head', 'false', | 180 js_out_file = args.js_out_files[index] |
172 '--only-split', | |
173 '--html', html_out_path, | |
174 '--js', crisper_output.name]) | |
175 | 181 |
176 # Crisper by default inserts a <script> tag with the name of the --js file, | 182 # Run crisper to separate the JS from the HTML file. |
177 # but since we are using a temporary file, need to manually insert a | 183 node.RunNode([node_modules.PathToCrisper(), |
178 # <script> tag with the correct final filename (in combination with | 184 '--source', os.path.join(tmp_out_dir, html_in_file), |
179 # --only-split flag). There is no way currently to manually specify the | 185 '--script-in-head', 'false', |
180 # <script> tag's path, see https://github.com/PolymerLabs/crisper/issues/46. | 186 '--html', os.path.join(tmp_out_dir, html_out_file), |
181 with open(html_out_path, 'r+') as f: | 187 '--js', os.path.join(tmp_out_dir, js_out_file)]) |
calamity
2017/07/19 06:15:33
If we move the polymer_css_build step into this sc
dpapad
2017/07/19 20:49:38
Acknowledged. As said in previous comment, prefer
| |
182 data = f.read() | |
183 new_data = data.replace( | |
184 '</body></html>', | |
185 '<script src="' + args.js_out_file + '"></script></body></html>') | |
186 assert new_data != data, 'Expected to find </body></html> token.' | |
187 f.seek(0) | |
188 f.write(new_data) | |
189 f.truncate() | |
190 | 188 |
191 node.RunNode([node_modules.PathToUglify(), crisper_output.name, | 189 # Move the HTML file to its final destination. |
192 '--comments', '"/Copyright|license|LICENSE|\<\/?if/"', | 190 shutil.copy(os.path.join(tmp_out_dir, html_out_file), out_path) |
193 '--output', js_out_path]) | 191 |
192 # Pass the JS file through Uglify and write the output to its final | |
193 # destination. | |
194 node.RunNode([node_modules.PathToUglify(), | |
195 os.path.join(tmp_out_dir, js_out_file), | |
196 '--comments', '"/Copyright|license|LICENSE|\<\/?if/"', | |
197 '--output', os.path.join(out_path, js_out_file)]) | |
194 finally: | 198 finally: |
195 if os.path.exists(crisper_input.name): | 199 shutil.rmtree(tmp_out_dir) |
calamity
2017/07/19 06:15:33
If any of the commands here fail, will the build s
michaelpg
2017/07/19 18:51:59
Is it worth leaving the output files around (inste
dpapad
2017/07/19 20:49:38
The build still fails if any command fails.
If yo
michaelpg
2017/07/21 00:57:11
d'oh, yeah I saw "tmp" and thought it was a unique
| |
196 os.remove(crisper_input.name) | 200 return manifest_out_path |
197 if os.path.exists(crisper_output.name): | |
198 os.remove(crisper_output.name) | |
199 | 201 |
200 | 202 |
201 def main(argv): | 203 def main(argv): |
202 parser = argparse.ArgumentParser() | 204 parser = argparse.ArgumentParser() |
203 parser.add_argument('--depfile', required=True) | 205 parser.add_argument('--depfile', required=True) |
204 parser.add_argument('--exclude', nargs='*') | 206 parser.add_argument('--exclude', nargs='*') |
205 parser.add_argument('--host', required=True) | 207 parser.add_argument('--host', required=True) |
206 parser.add_argument('--html_in_file', required=True) | 208 parser.add_argument('--html_in_files', nargs='*', required=True) |
207 parser.add_argument('--html_out_file', required=True) | 209 parser.add_argument('--html_out_files', nargs='*', required=True) |
208 parser.add_argument('--input', required=True) | 210 parser.add_argument('--input', required=True) |
209 parser.add_argument('--insert_in_head') | 211 parser.add_argument('--insert_in_head') |
210 parser.add_argument('--js_out_file', required=True) | 212 parser.add_argument('--js_out_files', nargs='*', required=True) |
211 parser.add_argument('--out_folder', required=True) | 213 parser.add_argument('--out_folder', required=True) |
212 args = parser.parse_args(argv) | 214 args = parser.parse_args(argv) |
213 | 215 |
214 # NOTE(dbeam): on Windows, GN can send dirs/like/this. When joined, you might | 216 # NOTE(dbeam): on Windows, GN can send dirs/like/this. When joined, you might |
215 # get dirs/like/this\file.txt. This looks odd to windows. Normalize to right | 217 # get dirs/like/this\file.txt. This looks odd to windows. Normalize to right |
216 # the slashes. | 218 # the slashes. |
217 args.depfile = os.path.normpath(args.depfile) | 219 args.depfile = os.path.normpath(args.depfile) |
218 args.input = os.path.normpath(args.input) | 220 args.input = os.path.normpath(args.input) |
219 args.out_folder = os.path.normpath(args.out_folder) | 221 args.out_folder = os.path.normpath(args.out_folder) |
220 | 222 |
221 _vulcanize(args.input, args) | 223 manifest_out_path = _vulcanize(args.input, args) |
222 _update_dep_file(args.input, args) | 224 |
225 # Prior call to _vulcanize() generated an output manifest file, containing | |
226 # information about all files that were bundled. Grab it from there. | |
227 manifest = json.loads(open(manifest_out_path, 'r').read()) | |
228 | |
229 # polymer-bundler reports any missing files in the output manifest, instead of | |
230 # directly failing. Ensure that no such files were encountered. | |
231 if '_missing' in manifest: | |
232 raise Exception( | |
233 'polymer-bundler could not find files for the following URLs:\n' + | |
234 '\n'.join(manifest['_missing'])) | |
235 | |
236 _update_dep_file(args.input, args, manifest) | |
223 | 237 |
224 | 238 |
225 if __name__ == '__main__': | 239 if __name__ == '__main__': |
226 main(sys.argv[1:]) | 240 main(sys.argv[1:]) |
OLD | NEW |