Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2016 The Chromium Authors. All rights reserved. | 2 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 from bs4 import BeautifulSoup | 6 from bs4 import BeautifulSoup |
| 7 from datetime import date | 7 from datetime import date |
| 8 import os.path as path | 8 import os.path as path |
| 9 import sys | 9 import sys |
| 10 | 10 |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 25 'targets': [ | 25 'targets': [ |
| 26 %s | 26 %s |
| 27 ], | 27 ], |
| 28 } | 28 } |
| 29 """.strip() | 29 """.strip() |
| 30 | 30 |
| 31 | 31 |
| 32 def main(created_by, html_files): | 32 def main(created_by, html_files): |
| 33 targets = "" | 33 targets = "" |
| 34 | 34 |
| 35 def _html_to_extracted(html_file): | |
| 36 assert html_file.endswith(".html") | |
| 37 return html_file[:-len(".html")] + "-extracted" | |
| 38 | |
| 35 def _target_name(target_file): | 39 def _target_name(target_file): |
| 36 assert target_file.endswith(".html") | 40 return _html_to_extracted(path.basename(target_file)) |
| 37 return path.basename(target_file)[:-len(".html")] + "-extracted" | 41 |
| 42 def _has_extracted_js(html_file): | |
| 43 return path.isfile(_html_to_extracted(html_file) + ".js") | |
| 44 | |
| 45 html_files = filter(_has_extracted_js, html_files) | |
|
Dan Beam
2017/05/10 22:35:40
the fix
| |
| 38 | 46 |
| 39 for html_file in sorted(html_files, key=_target_name): | 47 for html_file in sorted(html_files, key=_target_name): |
| 40 html_base = path.basename(html_file) | 48 html_base = path.basename(html_file) |
| 41 if html_base in _POLYMERS: | 49 if html_base in _POLYMERS: |
| 42 continue | 50 continue |
| 43 | 51 |
| 44 parsed = BeautifulSoup(open(html_file), "html.parser") | 52 parsed = BeautifulSoup(open(html_file), "html.parser") |
| 45 imports = set(i.get("href") for i in parsed.find_all("link", rel="import")) | 53 imports = set(i.get("href") for i in parsed.find_all("link", rel="import")) |
| 46 | 54 |
| 47 html_dir = path.dirname(html_file) | 55 html_dir = path.dirname(html_file) |
| 48 dependencies = [] | 56 dependencies = [] |
| 49 | 57 |
| 50 for html_import in sorted(imports): | 58 for html_import in sorted(imports): |
| 51 import_dir, import_base = path.split(html_import.encode("ascii")) | 59 import_dir, import_base = path.split(html_import.encode("ascii")) |
| 52 if import_base in _POLYMERS: | 60 if import_base in _POLYMERS: |
| 53 continue | 61 continue |
| 54 | 62 |
| 55 if import_base == _WEB_ANIMATIONS_BASE: | 63 if import_base == _WEB_ANIMATIONS_BASE: |
| 56 dependencies.append(_WEB_ANIMATIONS_TARGET) | 64 dependencies.append(_WEB_ANIMATIONS_TARGET) |
| 57 continue | 65 continue |
| 58 | 66 |
| 67 # Only exclude these after appending web animations externs. | |
| 68 if not _has_extracted_js(path.join(html_dir, html_import)): | |
| 69 continue | |
| 70 | |
| 59 target = _target_name(import_base) | 71 target = _target_name(import_base) |
| 60 if not path.isfile(path.join(html_dir, import_dir, target + ".js")): | |
| 61 continue | |
| 62 | 72 |
| 63 if import_dir: | 73 if import_dir: |
| 64 target = "compiled_resources2.gyp:" + target | 74 target = "compiled_resources2.gyp:" + target |
| 65 | 75 |
| 66 dependencies.append(path.join(import_dir, target)) | 76 dependencies.append(path.join(import_dir, target)) |
| 67 | 77 |
| 68 path_to_compile_js = path.relpath(_COMPILE_JS, html_dir) | 78 path_to_compile_js = path.relpath(_COMPILE_JS, html_dir) |
| 69 | 79 |
| 70 targets += "\n {" | 80 targets += "\n {" |
| 71 targets += "\n 'target_name': '%s-extracted'," % html_base[:-5] | 81 targets += "\n 'target_name': '%s-extracted'," % html_base[:-5] |
| 72 if dependencies: | 82 if dependencies: |
| 73 targets += "\n 'dependencies': [" | 83 targets += "\n 'dependencies': [" |
| 74 targets += "\n '%s'," % "',\n '".join(dependencies) | 84 targets += "\n '%s'," % "',\n '".join(dependencies) |
| 75 targets += "\n ]," | 85 targets += "\n ]," |
| 76 targets += "\n 'includes': ['%s']," % path_to_compile_js | 86 targets += "\n 'includes': ['%s']," % path_to_compile_js |
| 77 targets += "\n }," | 87 targets += "\n }," |
| 78 | 88 |
| 79 targets = targets.strip() | 89 targets = targets.strip() |
| 80 | 90 |
| 81 if targets: | 91 if targets: |
| 82 current_year = date.today().year | 92 current_year = date.today().year |
| 83 print _COMPILED_RESOURCES_TEMPLATE % (current_year, created_by, targets) | 93 print _COMPILED_RESOURCES_TEMPLATE % (current_year, created_by, targets) |
| 84 | 94 |
| 85 | 95 |
| 86 if __name__ == "__main__": | 96 if __name__ == "__main__": |
| 87 main(path.basename(sys.argv[0]), sys.argv[1:]) | 97 main(path.basename(sys.argv[0]), sys.argv[1:]) |
| OLD | NEW |