Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2016 The Chromium Authors. All rights reserved. | 2 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 import argparse | 6 import argparse |
| 7 import itertools | 7 import itertools |
| 8 import os | 8 import os |
| 9 import platform | 9 import platform |
| 10 import re | 10 import re |
| (...skipping 24 matching lines...) Expand all Loading... | |
| 35 | 35 |
| 36 | 36 |
| 37 _JS_RESOURCES_PATH = os.path.join(_RESOURCES_PATH, 'js') | 37 _JS_RESOURCES_PATH = os.path.join(_RESOURCES_PATH, 'js') |
| 38 | 38 |
| 39 | 39 |
| 40 _POLYMER_PATH = os.path.join( | 40 _POLYMER_PATH = os.path.join( |
| 41 _SRC_PATH, 'third_party', 'polymer', 'v1_0', 'components-chromium') | 41 _SRC_PATH, 'third_party', 'polymer', 'v1_0', 'components-chromium') |
| 42 | 42 |
| 43 | 43 |
| 44 _VULCANIZE_BASE_ARGS = [ | 44 _VULCANIZE_BASE_ARGS = [ |
| 45 '--exclude', 'crisper.js', | 45 '--exclude', 'crisper.js', |
|
dpapad
2017/02/08 23:04:18
Is this necessary? We run crisper after vulcanize,
calamity
2017/02/09 03:07:35
I've removed this and nothing bad seems to have ha
| |
| 46 | 46 |
| 47 # These files are already combined and minified. | 47 # These files are already combined and minified. |
| 48 '--exclude', 'chrome://resources/html/polymer.html', | 48 '--exclude', 'chrome://resources/html/polymer.html', |
| 49 '--exclude', 'web-animations-next-lite.min.js', | 49 '--exclude', 'web-animations-next-lite.min.js', |
| 50 | 50 |
| 51 # These files are dynamically created by C++. | 51 # These files are dynamically created by C++. |
| 52 '--exclude', 'load_time_data.js', | 52 '--exclude', 'load_time_data.js', |
| 53 '--exclude', 'strings.js', | 53 '--exclude', 'strings.js', |
| 54 '--exclude', 'text_defaults.css', | 54 '--exclude', 'text_defaults.css', |
| 55 '--exclude', 'text_defaults_md.css', | 55 '--exclude', 'text_defaults_md.css', |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 66 ('chrome://resources/html/', _HTML_RESOURCES_PATH), | 66 ('chrome://resources/html/', _HTML_RESOURCES_PATH), |
| 67 ('chrome://resources/js/', _JS_RESOURCES_PATH), | 67 ('chrome://resources/js/', _JS_RESOURCES_PATH), |
| 68 ('chrome://resources/polymer/v1_0/', _POLYMER_PATH) | 68 ('chrome://resources/polymer/v1_0/', _POLYMER_PATH) |
| 69 ] | 69 ] |
| 70 | 70 |
| 71 | 71 |
| 72 _VULCANIZE_REDIRECT_ARGS = list(itertools.chain.from_iterable(map( | 72 _VULCANIZE_REDIRECT_ARGS = list(itertools.chain.from_iterable(map( |
| 73 lambda m: ['--redirect', '"%s|%s"' % (m[0], m[1])], _URL_MAPPINGS))) | 73 lambda m: ['--redirect', '"%s|%s"' % (m[0], m[1])], _URL_MAPPINGS))) |
| 74 | 74 |
| 75 | 75 |
| 76 _REQUEST_LIST_FILE = 'request_list.txt' | |
| 77 | |
| 78 | |
| 79 _PAK_UNPACK_FOLDER = 'flattened' | 76 _PAK_UNPACK_FOLDER = 'flattened' |
| 80 | 77 |
| 81 | 78 |
| 82 def _run_node(cmd_parts, stdout=None): | 79 def _run_node(cmd_parts, stdout=None): |
| 83 cmd = " ".join([node.GetBinaryPath()] + cmd_parts) | 80 cmd = " ".join([node.GetBinaryPath()] + cmd_parts) |
| 84 process = subprocess.Popen( | 81 process = subprocess.Popen( |
| 85 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) | 82 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) |
| 86 stdout, stderr = process.communicate() | 83 stdout, stderr = process.communicate() |
| 87 | 84 |
| 88 if stderr: | 85 if stderr: |
| 89 print >> sys.stderr, '%s failed: %s' % (cmd, stderr) | 86 print >> sys.stderr, '%s failed: %s' % (cmd, stderr) |
| 90 raise | 87 raise |
| 91 | 88 |
| 92 return stdout | 89 return stdout |
| 93 | 90 |
| 94 | 91 |
| 95 def _undo_mapping(mappings, url): | 92 def _undo_mapping(mappings, url): |
| 96 for (redirect_url, file_path) in mappings: | 93 for (redirect_url, file_path) in mappings: |
| 97 if url.startswith(redirect_url): | 94 if url.startswith(redirect_url): |
| 98 return url.replace(redirect_url, file_path + os.sep) | 95 return url.replace(redirect_url, file_path + os.sep) |
| 99 return url | 96 return url |
| 100 | 97 |
| 98 def _request_list_path(out_path, html_out_file): | |
| 99 return os.path.join(out_path, html_out_file + '.requestlist') | |
|
dpapad
2017/02/08 23:04:19
I am not super fond of using random file extension
calamity
2017/02/09 03:07:35
I've changed it to <html_out_file>_requestlist.txt
| |
| 101 | 100 |
| 102 # Get a list of all files that were bundled with Vulcanize and update the | 101 # Get a list of all files that were bundled with Vulcanize and update the |
| 103 # depfile accordingly such that Ninja knows when to trigger re-vulcanization. | 102 # depfile accordingly such that Ninja knows when to trigger re-vulcanization. |
| 104 def _update_dep_file(in_folder, args): | 103 def _update_dep_file(in_folder, args): |
| 105 in_path = os.path.join(_CWD, in_folder) | 104 in_path = os.path.join(_CWD, in_folder) |
| 106 out_path = os.path.join(_CWD, args.out_folder) | 105 out_path = os.path.join(_CWD, args.out_folder) |
| 107 | 106 |
| 108 # Prior call to vulcanize already generated the deps list, grab it from there. | 107 # Prior call to vulcanize already generated the deps list, grab it from there. |
| 109 request_list = open(os.path.join( | 108 request_list = open(_request_list_path(out_path, args.html_out_file), |
| 110 out_path, _REQUEST_LIST_FILE), 'r').read().splitlines() | 109 'r').read().splitlines() |
|
Dan Beam
2017/02/08 05:41:55
rebase
calamity
2017/02/09 03:07:35
Done.
| |
| 111 | 110 |
| 112 # Undo the URL mappings applied by vulcanize to get file paths relative to | 111 # Undo the URL mappings applied by vulcanize to get file paths relative to |
| 113 # current working directory. | 112 # current working directory. |
| 114 url_mappings = _URL_MAPPINGS + [ | 113 url_mappings = _URL_MAPPINGS + [ |
| 115 ('/', os.path.relpath(in_path, _CWD)), | 114 ('/', os.path.relpath(in_path, _CWD)), |
| 116 ('chrome://%s/' % args.host, os.path.relpath(in_path, _CWD)), | 115 ('chrome://%s/' % args.host, os.path.relpath(in_path, _CWD)), |
| 117 ] | 116 ] |
| 118 | 117 |
| 119 dependencies = map( | 118 dependencies = map( |
| 120 lambda url: _undo_mapping(url_mappings, url), request_list) | 119 lambda url: _undo_mapping(url_mappings, url), request_list) |
| (...skipping 12 matching lines...) Expand all Loading... | |
| 133 filtered_dependencies)) | 132 filtered_dependencies)) |
| 134 | 133 |
| 135 | 134 |
| 136 def _vulcanize(in_folder, args): | 135 def _vulcanize(in_folder, args): |
| 137 in_path = os.path.normpath(os.path.join(_CWD, in_folder)) | 136 in_path = os.path.normpath(os.path.join(_CWD, in_folder)) |
| 138 out_path = os.path.join(_CWD, args.out_folder) | 137 out_path = os.path.join(_CWD, args.out_folder) |
| 139 | 138 |
| 140 html_out_path = os.path.join(out_path, args.html_out_file) | 139 html_out_path = os.path.join(out_path, args.html_out_file) |
| 141 js_out_path = os.path.join(out_path, args.js_out_file) | 140 js_out_path = os.path.join(out_path, args.js_out_file) |
| 142 | 141 |
| 142 exclude_args = [] | |
| 143 for f in args.exclude or []: | |
| 144 exclude_args.append('--exclude') | |
| 145 exclude_args.append(f) | |
| 146 | |
| 143 output = _run_node( | 147 output = _run_node( |
| 144 [node_modules.PathToVulcanize()] + | 148 [node_modules.PathToVulcanize()] + |
| 145 _VULCANIZE_BASE_ARGS + _VULCANIZE_REDIRECT_ARGS + | 149 _VULCANIZE_BASE_ARGS + _VULCANIZE_REDIRECT_ARGS + exclude_args + |
| 146 ['--out-request-list', os.path.join(out_path, _REQUEST_LIST_FILE), | 150 ['--out-request-list', _request_list_path(out_path, args.html_out_file), |
| 147 '--redirect', '"/|%s"' % in_path, | 151 '--redirect', '"/|%s"' % in_path, |
| 148 '--redirect', '"chrome://%s/|%s"' % (args.host, in_path), | 152 '--redirect', '"chrome://%s/|%s"' % (args.host, in_path), |
| 149 # TODO(dpapad): Figure out why vulcanize treats the input path | 153 # TODO(dpapad): Figure out why vulcanize treats the input path |
| 150 # differently on Windows VS Linux/Mac. | 154 # differently on Windows VS Linux/Mac. |
| 151 os.path.join( | 155 os.path.join( |
| 152 in_path if platform.system() == 'Windows' else os.sep, | 156 in_path if platform.system() == 'Windows' else os.sep, |
| 153 args.html_in_file)]) | 157 args.html_in_file)]) |
| 154 | 158 |
| 155 # Grit includes are not supported, use HTML imports instead. | 159 # Grit includes are not supported, use HTML imports instead. |
| 156 output = output.replace('<include src="', '<include src-disabled="') | 160 output = output.replace('<include src="', '<include src-disabled="') |
| (...skipping 24 matching lines...) Expand all Loading... | |
| 181 def _css_build(out_folder, files): | 185 def _css_build(out_folder, files): |
| 182 out_path = os.path.join(_CWD, out_folder) | 186 out_path = os.path.join(_CWD, out_folder) |
| 183 paths = map(lambda f: os.path.join(out_path, f), files) | 187 paths = map(lambda f: os.path.join(out_path, f), files) |
| 184 | 188 |
| 185 _run_node([node_modules.PathToPolymerCssBuild()] + paths) | 189 _run_node([node_modules.PathToPolymerCssBuild()] + paths) |
| 186 | 190 |
| 187 | 191 |
| 188 def main(): | 192 def main(): |
| 189 parser = argparse.ArgumentParser() | 193 parser = argparse.ArgumentParser() |
| 190 parser.add_argument('--depfile') | 194 parser.add_argument('--depfile') |
| 195 parser.add_argument('--exclude', action='append') | |
|
Dan Beam
2017/02/08 05:41:55
rebase
calamity
2017/02/09 03:07:35
Done.
| |
| 191 parser.add_argument('--host') | 196 parser.add_argument('--host') |
| 192 parser.add_argument('--html_in_file') | 197 parser.add_argument('--html_in_file') |
| 193 parser.add_argument('--html_out_file') | 198 parser.add_argument('--html_out_file') |
| 194 parser.add_argument('--input') | 199 parser.add_argument('--input') |
| 195 parser.add_argument('--input_type') | 200 parser.add_argument('--input_type') |
| 196 parser.add_argument('--insert_in_head') | 201 parser.add_argument('--insert_in_head') |
| 197 parser.add_argument('--js_out_file') | 202 parser.add_argument('--js_out_file') |
| 198 parser.add_argument('--out_folder') | 203 parser.add_argument('--out_folder') |
| 199 args = parser.parse_args() | 204 args = parser.parse_args() |
| 200 args.input = os.path.normpath(args.input) | 205 args.input = os.path.normpath(args.input) |
| 201 | 206 |
| 202 vulcanize_input_folder = args.input | 207 vulcanize_input_folder = args.input |
| 203 | 208 |
| 204 # If a .pak file was specified, unpack that file first and pass the output to | 209 # If a .pak file was specified, unpack that file first and pass the output to |
| 205 # vulcanize. | 210 # vulcanize. |
| 206 if (args.input_type == 'PAK_FILE'): | 211 if (args.input_type == 'PAK_FILE'): |
| 207 import unpack_pak | 212 import unpack_pak |
| 208 input_folder = os.path.join(_CWD, args.input) | |
| 209 output_folder = os.path.join(args.out_folder, _PAK_UNPACK_FOLDER) | 213 output_folder = os.path.join(args.out_folder, _PAK_UNPACK_FOLDER) |
| 210 unpack_pak.unpack(args.input, output_folder) | 214 unpack_pak.unpack(args.input, output_folder) |
| 211 vulcanize_input_folder = output_folder | 215 vulcanize_input_folder = output_folder |
| 212 | 216 |
| 213 _vulcanize(vulcanize_input_folder, args) | 217 _vulcanize(vulcanize_input_folder, args) |
| 214 _css_build(args.out_folder, files=[args.html_out_file]) | 218 _css_build(args.out_folder, files=[args.html_out_file]) |
| 215 | 219 |
| 216 _update_dep_file(vulcanize_input_folder, args) | 220 _update_dep_file(vulcanize_input_folder, args) |
| 217 | 221 |
| 218 | 222 |
| 219 if __name__ == '__main__': | 223 if __name__ == '__main__': |
| 220 main() | 224 main() |
| OLD | NEW |