Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2016 The Chromium Authors. All rights reserved. | 2 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 import argparse | 6 import argparse |
| 7 import itertools | 7 import itertools |
| 8 import os | 8 import os |
| 9 import platform | 9 import platform |
| 10 import re | 10 import re |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 89 print >> sys.stderr, '%s failed: %s' % (cmd, stderr) | 89 print >> sys.stderr, '%s failed: %s' % (cmd, stderr) |
| 90 raise | 90 raise |
| 91 | 91 |
| 92 return stdout | 92 return stdout |
| 93 | 93 |
| 94 | 94 |
| 95 def _undo_mapping(mappings, url): | 95 def _undo_mapping(mappings, url): |
| 96 for (redirect_url, file_path) in mappings: | 96 for (redirect_url, file_path) in mappings: |
| 97 if url.startswith(redirect_url): | 97 if url.startswith(redirect_url): |
| 98 return url.replace(redirect_url, file_path + os.sep) | 98 return url.replace(redirect_url, file_path + os.sep) |
| 99 return url | 99 print >> sys.stderr, 'failed to find: ' + url |
| 100 raise | |
| 100 | 101 |
| 101 | 102 |
| 102 # Get a list of all files that were bundled with Vulcanize and update the | 103 # Get a list of all files that were bundled with Vulcanize and update the |
| 103 # depfile accordingly such that Ninja knows when to trigger re-vulcanization. | 104 # depfile accordingly such that Ninja knows when to trigger re-vulcanization. |
| 104 def _update_dep_file(in_folder, args): | 105 def _update_dep_file(in_folder, args): |
| 105 in_path = os.path.join(_CWD, in_folder) | 106 in_path = os.path.join(_CWD, in_folder) |
| 106 out_path = os.path.join(_CWD, args.out_folder) | 107 out_path = os.path.join(_CWD, args.out_folder) |
| 107 | 108 |
| 108 # Prior call to vulcanize already generated the deps list, grab it from there. | 109 # Prior call to vulcanize already generated the deps list, grab it from there. |
| 109 request_list = open(os.path.join( | 110 request_list_path = os.path.join(out_path, _REQUEST_LIST_FILE) |
| 110 out_path, _REQUEST_LIST_FILE), 'r').read().splitlines() | 111 request_list = open(request_list_path, 'r').read().splitlines() |
| 112 | |
| 113 if platform.system() == 'Windows': | |
| 114 # TODO(dbeam): UGH. For some reason Vulcanize is interpreting the target | |
| 115 # file path as a URL and using the drive letter (e.g. D:\) as a protocol. | |
| 116 # This is a little insane, but we're fixing here by normalizing case (which | |
| 117 # really shouldn't matter, these are all file paths and generally are all | |
| 118 # lower case) and writing from / to \ (file path) and then back again. This | |
| 119 # is compounded by NodeJS having a bug in url.resolve() that handles | |
| 120 # chrome:// protocol URLs poorly as well as us using startswith() to strip | |
| 121 # file paths (which isn't crazy awesome either). Don't remove unless you | |
| 122 # really really know what you're doing. | |
| 123 norm = lambda u: u.lower().replace('/', '\\') | |
| 124 request_list = [norm(u).replace(norm(in_path), '').replace('\\', '/') | |
| 125 for u in request_list] | |
| 111 | 126 |
| 112 # Undo the URL mappings applied by vulcanize to get file paths relative to | 127 # Undo the URL mappings applied by vulcanize to get file paths relative to |
| 113 # current working directory. | 128 # current working directory. |
| 114 url_mappings = _URL_MAPPINGS + [ | 129 url_mappings = _URL_MAPPINGS + [ |
| 115 ('/', os.path.relpath(in_path, _CWD)), | 130 ('/', os.path.relpath(in_path, _CWD)), |
| 116 ('chrome://%s/' % args.host, os.path.relpath(in_path, _CWD)), | 131 ('chrome://%s/' % args.host, os.path.relpath(in_path, _CWD)), |
| 117 ] | 132 ] |
| 118 | 133 |
| 119 dependencies = map( | 134 deps = [_undo_mapping(url_mappings, u) for u in request_list] |
| 120 lambda url: _undo_mapping(url_mappings, url), request_list) | 135 deps = map(os.path.normpath, deps) |
| 121 | 136 |
| 122 # If the input was a .pak file, the generated depfile should not list files | 137 # If the input was a .pak file, the generated depfile should not list files |
| 123 # already in the .pak file. | 138 # already in the .pak file. |
| 124 filtered_dependencies = dependencies | |
| 125 if (args.input_type == 'PAK_FILE'): | 139 if (args.input_type == 'PAK_FILE'): |
| 126 filter_url = os.path.join(args.out_folder, _PAK_UNPACK_FOLDER) | 140 filter_url = os.path.join(args.out_folder, _PAK_UNPACK_FOLDER) |
| 127 filtered_dependencies = filter( | 141 deps = [d for d in deps if d.startswith(filter_url)] |
|
dpapad
2017/02/02 01:39:37
I think this is missing a "not"
deps = [d for d i
dpapad
2017/02/02 01:43:03
Verified locally (on Linux). This was causing the
Dan Beam
2017/02/03 22:42:05
Done.
| |
| 128 lambda url: not url.startswith(filter_url), dependencies) | |
| 129 | 142 |
| 130 with open(os.path.join(_CWD, args.depfile), 'w') as f: | 143 with open(os.path.join(_CWD, args.depfile), 'w') as f: |
| 131 f.write(os.path.join( | 144 deps_file_header = os.path.join(args.out_folder, args.html_out_file) |
| 132 args.out_folder, args.html_out_file) + ': ' + ' '.join( | 145 f.write(deps_file_header + ': ' + ' '.join(deps)) |
| 133 filtered_dependencies)) | |
| 134 | 146 |
| 135 | 147 |
| 136 def _vulcanize(in_folder, args): | 148 def _vulcanize(in_folder, args): |
| 137 in_path = os.path.normpath(os.path.join(_CWD, in_folder)) | 149 in_path = os.path.normpath(os.path.join(_CWD, in_folder)) |
| 138 out_path = os.path.join(_CWD, args.out_folder) | 150 out_path = os.path.join(_CWD, args.out_folder) |
| 139 | 151 |
| 140 html_out_path = os.path.join(out_path, args.html_out_file) | 152 html_out_path = os.path.join(out_path, args.html_out_file) |
| 141 js_out_path = os.path.join(out_path, args.js_out_file) | 153 js_out_path = os.path.join(out_path, args.js_out_file) |
| 142 | 154 |
| 143 output = _run_node( | 155 output = _run_node( |
| (...skipping 30 matching lines...) Expand all Loading... | |
| 174 # crbug.com/619091. | 186 # crbug.com/619091. |
| 175 _run_node([node_modules.PathToUglifyJs(), js_out_path, | 187 _run_node([node_modules.PathToUglifyJs(), js_out_path, |
| 176 '--comments', '"/Copyright|license|LICENSE|\<\/?if/"', | 188 '--comments', '"/Copyright|license|LICENSE|\<\/?if/"', |
| 177 '--output', js_out_path]) | 189 '--output', js_out_path]) |
| 178 finally: | 190 finally: |
| 179 os.remove(tmp.name) | 191 os.remove(tmp.name) |
| 180 | 192 |
| 181 | 193 |
| 182 def _css_build(out_folder, files): | 194 def _css_build(out_folder, files): |
| 183 out_path = os.path.join(_CWD, out_folder) | 195 out_path = os.path.join(_CWD, out_folder) |
| 184 paths = map(lambda f: os.path.join(out_path, f), files) | 196 paths = [os.path.join(out_path, f) for f in files] |
| 185 | 197 |
| 186 _run_node([node_modules.PathToPolymerCssBuild()] + paths) | 198 _run_node([node_modules.PathToPolymerCssBuild()] + paths) |
| 187 | 199 |
| 188 | 200 |
| 189 def main(): | 201 def main(): |
| 190 parser = argparse.ArgumentParser() | 202 parser = argparse.ArgumentParser() |
| 191 parser.add_argument('--depfile') | 203 parser.add_argument('--depfile') |
| 192 parser.add_argument('--host') | 204 parser.add_argument('--host') |
| 193 parser.add_argument('--html_in_file') | 205 parser.add_argument('--html_in_file') |
| 194 parser.add_argument('--html_out_file') | 206 parser.add_argument('--html_out_file') |
| 195 parser.add_argument('--input') | 207 parser.add_argument('--input') |
| 196 parser.add_argument('--input_type') | 208 parser.add_argument('--input_type') |
| 197 parser.add_argument('--insert_in_head') | 209 parser.add_argument('--insert_in_head') |
| 198 parser.add_argument('--js_out_file') | 210 parser.add_argument('--js_out_file') |
| 199 parser.add_argument('--out_folder') | 211 parser.add_argument('--out_folder') |
| 200 args = parser.parse_args() | 212 args = parser.parse_args() |
| 213 | |
| 214 args.depfile = os.path.normpath(args.depfile) | |
| 201 args.input = os.path.normpath(args.input) | 215 args.input = os.path.normpath(args.input) |
| 216 args.out_folder = os.path.normpath(args.out_folder) | |
| 202 | 217 |
| 203 vulcanize_input_folder = args.input | 218 vulcanize_input_folder = args.input |
| 204 | 219 |
| 205 # If a .pak file was specified, unpack that file first and pass the output to | 220 # If a .pak file was specified, unpack that file first and pass the output to |
| 206 # vulcanize. | 221 # vulcanize. |
| 207 if (args.input_type == 'PAK_FILE'): | 222 if (args.input_type == 'PAK_FILE'): |
| 208 import unpack_pak | 223 import unpack_pak |
| 209 input_folder = os.path.join(_CWD, args.input) | 224 input_folder = os.path.join(_CWD, args.input) |
| 210 output_folder = os.path.join(args.out_folder, _PAK_UNPACK_FOLDER) | 225 output_folder = os.path.join(args.out_folder, _PAK_UNPACK_FOLDER) |
| 211 unpack_pak.unpack(args.input, output_folder) | 226 unpack_pak.unpack(args.input, output_folder) |
| 212 vulcanize_input_folder = output_folder | 227 vulcanize_input_folder = output_folder |
| 213 | 228 |
| 214 _vulcanize(vulcanize_input_folder, args) | 229 _vulcanize(vulcanize_input_folder, args) |
| 215 _css_build(args.out_folder, files=[args.html_out_file]) | 230 _css_build(args.out_folder, files=[args.html_out_file]) |
| 216 | 231 |
| 217 _update_dep_file(vulcanize_input_folder, args) | 232 _update_dep_file(vulcanize_input_folder, args) |
| 218 | 233 |
| 219 | 234 |
| 220 if __name__ == '__main__': | 235 if __name__ == '__main__': |
| 221 main() | 236 main() |
| OLD | NEW |