OLD | NEW |
---|---|
(Empty) | |
1 #!/usr/bin/env python | |
2 # Copyright 2016 The Chromium Authors. All rights reserved. | |
3 # Use of this source code is governed by a BSD-style license that can be | |
4 # found in the LICENSE file. | |
5 | |
6 import argparse | |
7 import itertools | |
8 import os | |
9 import re | |
10 import subprocess | |
11 import sys | |
12 import tempfile | |
13 | |
14 _HERE_PATH = os.path.join(os.path.dirname(__file__)) | |
15 _SRC_PATH = os.path.normpath(os.path.join(_HERE_PATH, '..', '..', '..')) | |
16 | |
17 sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'node')) | |
18 import node | |
19 import node_modules | |
20 | |
21 _NODE_BINARY = node.GetBinaryPath() | |
22 | |
23 | |
24 _RESOURCES_PATH = os.path.join(_SRC_PATH, 'ui', 'webui', 'resources') | |
25 | |
Dan Beam
2017/01/20 23:49:30
i think we could do one of two things:
a) add one
dpapad
2017/01/21 02:32:15
Added one more line.
| |
26 _CR_ELEMENTS_PATH = os.path.join(_RESOURCES_PATH, 'cr_elements') | |
27 | |
28 | |
29 _CSS_RESOURCES_PATH = os.path.join(_RESOURCES_PATH, 'css') | |
30 | |
31 | |
32 _HTML_RESOURCES_PATH = os.path.join(_RESOURCES_PATH, 'html') | |
33 | |
34 | |
35 _JS_RESOURCES_PATH = os.path.join(_RESOURCES_PATH, 'js') | |
36 | |
37 | |
38 _POLYMER_PATH = os.path.join( | |
39 _SRC_PATH, 'third_party', 'polymer', 'v1_0', 'components-chromium') | |
40 | |
41 | |
42 _VULCANIZE_BASE_ARGS = [ | |
43 '--exclude', 'crisper.js', | |
44 | |
45 # These files are already combined and minified. | |
46 '--exclude', 'chrome://resources/html/polymer.html', | |
47 '--exclude', 'web-animations-next-lite.min.js', | |
48 | |
49 # These files are dynamically created by C++. | |
50 '--exclude', 'load_time_data.js', | |
51 '--exclude', 'strings.js', | |
52 '--exclude', 'text_defaults.css', | |
53 '--exclude', 'text_defaults_md.css', | |
54 | |
55 '--inline-css', | |
56 '--inline-scripts', | |
57 '--strip-comments', | |
58 ] | |
59 | |
60 | |
61 _URL_MAPPINGS = [ | |
62 ('chrome://resources/cr_elements/', _CR_ELEMENTS_PATH), | |
63 ('chrome://resources/css/', _CSS_RESOURCES_PATH), | |
64 ('chrome://resources/html/', _HTML_RESOURCES_PATH), | |
65 ('chrome://resources/js/', _JS_RESOURCES_PATH), | |
66 ('chrome://resources/polymer/v1_0/', _POLYMER_PATH) | |
67 ] | |
68 | |
69 | |
70 _VULCANIZE_REDIRECT_ARGS = list(itertools.chain.from_iterable(map( | |
71 lambda m: ['--redirect', '%s|%s' % (m[0], m[1])], _URL_MAPPINGS))) | |
72 | |
73 | |
74 def _run_cmd(cmd_parts, stdout=None): | |
75 cmd = "'" + "' '".join(cmd_parts) + "'" | |
76 process = subprocess.Popen( | |
77 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) | |
78 stdout, stderr = process.communicate() | |
79 | |
80 if stderr: | |
81 print >> sys.stderr, '%s failed: %s' % (cmd, stderr) | |
82 raise | |
83 | |
84 return stdout | |
85 | |
86 | |
87 def _undo_mapping(mappings, url): | |
88 for tup in mappings: | |
Dan Beam
2017/01/20 23:49:30
I don't think |tup| is an explanatory variable nam
dpapad
2017/01/21 02:32:15
Done, sort of. No need to use enumerate().
| |
89 res = re.match(tup[0], url) | |
Dan Beam
2017/01/20 23:49:30
don't use re, use url.startswith()
dpapad
2017/01/21 02:32:15
Done.
| |
90 if res: | |
91 return url.replace(tup[0], tup[1] + '/'); | |
92 return url | |
93 | |
94 | |
95 # Get a list of all files that were bundled with Vulcanize and update the | |
96 # depfile accordingly such that Ninja knows when to trigger re-vulcanization. | |
97 def _update_dep_file(in_folder, out_folder, host, depfile, html_out_file): | |
98 in_path = os.path.join(os.getcwd(), in_folder) | |
99 out_path = os.path.join(os.getcwd(), out_folder) | |
100 | |
101 # Prior call to vulcanize already generated the deps list, grab it from there. | |
102 request_list = open(os.path.join( | |
103 out_path, 'request_list.txt'), 'r').read().splitlines() | |
104 | |
105 # Undo the URL mappings applied by vulcanize to get file paths relative to | |
106 # current working directory. | |
107 url_mappings = _URL_MAPPINGS + [ | |
108 ('/', os.path.relpath(in_path, os.getcwd())), | |
109 ('chrome://%s/' % host, os.path.relpath(in_path, os.getcwd())), | |
110 ] | |
111 | |
112 dependencies = map( | |
113 lambda url: _undo_mapping(url_mappings, url), request_list) | |
114 | |
115 f = open(os.path.join(os.getcwd(), depfile), 'w') | |
116 f.write(os.path.join( | |
117 out_folder, html_out_file + ': ') + ' '.join(dependencies)) | |
118 f.close() | |
Dan Beam
2017/01/20 23:49:30
nit: use with, which will auto-close the file for
dpapad
2017/01/21 02:32:15
Done, used with. Did not create a temp out_path va
| |
119 | |
120 | |
121 def _vulcanize(in_folder, out_folder, host, html_in_file, | |
122 html_out_file, js_out_file): | |
123 in_path = os.path.join(os.getcwd(), in_folder) | |
124 out_path = os.path.join(os.getcwd(), out_folder) | |
125 | |
126 html_out_path = os.path.join(out_path, html_out_file) | |
127 js_out_path = os.path.join(out_path, js_out_file) | |
128 | |
129 output = _run_cmd( | |
130 [_NODE_BINARY, node_modules.PathToVulcanize()] + | |
131 _VULCANIZE_BASE_ARGS + _VULCANIZE_REDIRECT_ARGS + | |
132 ['--out-request-list', os.path.join(out_path, 'request_list.txt'), | |
Dan Beam
2017/01/20 23:49:30
can we make a tempfile instead of using request_li
dpapad
2017/01/21 02:32:15
Per offline discussion, I am keeping this file und
| |
133 '--redirect', '/|%s' % in_path, | |
134 '--redirect', 'chrome://%s/|%s' % (host, in_path), | |
135 os.path.join('/', html_in_file)]) | |
136 | |
137 with tempfile.NamedTemporaryFile(mode='wt+', delete=False) as tmp: | |
138 # Grit includes are not supported, use HTML imports instead. | |
139 tmp.write(output.replace( | |
140 '<include src="', '<include src-disabled="')) | |
141 | |
142 try: | |
143 _run_cmd([_NODE_BINARY, node_modules.PathToCrisper(), | |
144 '--source', tmp.name, | |
145 '--script-in-head', 'false', | |
146 '--html', html_out_path, | |
147 '--js', js_out_path]) | |
148 | |
149 # TODO(tsergeant): Remove when JS resources are minified by default: | |
150 # crbug.com/619091. | |
151 _run_cmd([_NODE_BINARY, node_modules.PathToUglifyJs(), js_out_path, | |
152 '--comments', '/Copyright|license|LICENSE|\<\/?if/', | |
153 '--output', js_out_path]) | |
154 finally: | |
155 os.remove(tmp.name) | |
156 | |
157 | |
158 def _css_build(out_folder, files): | |
159 out_path = os.path.join(os.getcwd(), out_folder) | |
160 paths = map(lambda f: os.path.join(out_path, f), files) | |
161 | |
162 _run_cmd([_NODE_BINARY, node_modules.PathToPolymerCssBuild()] + paths) | |
163 | |
164 | |
165 def main(): | |
166 parser = argparse.ArgumentParser() | |
167 parser.add_argument('--host') | |
168 parser.add_argument('--html_in_file') | |
169 parser.add_argument('--html_out_file') | |
170 parser.add_argument('--input_type') | |
171 parser.add_argument('--input') | |
172 parser.add_argument('--js_out_file') | |
173 parser.add_argument('--out_folder') | |
174 parser.add_argument('--depfile') | |
Dan Beam
2017/01/20 23:49:30
nit: move to top? (alpha)
dpapad
2017/01/21 02:32:15
Done.
| |
175 args = parser.parse_args() | |
176 | |
177 vulcanize_input_folder = args.input; | |
178 | |
179 # If a .pak file was specified, unpack that file first and pass the output to | |
180 # vulcanize. | |
181 if (args.input_type == 'PAK_FILE'): | |
182 import unpack_pak | |
183 input_folder = os.path.join(os.getcwd(), args.input) | |
184 output_folder = os.path.join(args.out_folder, 'flattened'); | |
185 unpack_pak.unpack(args.input, output_folder) | |
186 vulcanize_input_folder = output_folder | |
187 | |
188 _vulcanize(vulcanize_input_folder, args.out_folder, args.host, | |
189 args.html_in_file, args.html_out_file, args.js_out_file); | |
190 _css_build(args.out_folder, files=[args.html_out_file]) | |
191 | |
192 _update_dep_file(vulcanize_input_folder, args.out_folder, args.host, | |
193 args.depfile, args.html_out_file) | |
194 | |
Dan Beam
2017/01/20 23:49:30
\n\n
dpapad
2017/01/21 02:32:15
Done.
| |
195 if __name__ == '__main__': | |
196 main() | |
OLD | NEW |