OLD | NEW |
| (Empty) |
1 #!/usr/bin/env python | |
2 # Copyright 2015 The Chromium Authors. All rights reserved. | |
3 # Use of this source code is governed by a BSD-style license that can be | |
4 # found in the LICENSE file. | |
5 | |
6 """Tool to manage external mojom interfaces.""" | |
7 | |
8 import argparse | |
9 import errno | |
10 import logging | |
11 import os | |
12 import sys | |
13 import urllib2 | |
14 | |
15 # Local library | |
16 sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), | |
17 "pylib")) | |
18 # Bindings library | |
19 sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), | |
20 "..", "bindings", "pylib")) | |
21 # Requests library | |
22 sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), | |
23 "..", "..", "..", "..", "third_party", | |
24 "requests", "src")) | |
25 | |
26 import requests | |
27 | |
28 from fetcher.repository import Repository | |
29 from fetcher.dependency import Dependency | |
30 from mojom.parse.parser import Parse, ParseError | |
31 | |
32 | |
33 class UrlRewriterException(Exception): | |
34 """Exception when processing URL rewrite rules.""" | |
35 pass | |
36 | |
37 class UrlRewriter(object): | |
38 """UrlRewriter rewrites URLs according to the provided mappings. | |
39 | |
40 Note that mappings are not followed transitively. If mappings contains | |
41 {"a": "b", "b": "c"}, then UrlRewriter.rewrite("a") will return "b", not "c". | |
42 """ | |
43 | |
44 def __init__(self, mappings): | |
45 self._mappings = mappings | |
46 for target in self._mappings.values(): | |
47 for source in self._mappings.keys(): | |
48 if source in target or target in source: | |
49 raise UrlRewriterException( | |
50 "%s and %s share a common subpath" % (source, target)) | |
51 | |
52 def rewrite(self, path): | |
53 for origin, destination in self._mappings.items(): | |
54 if path.startswith(origin): | |
55 return destination + path[len(origin):] | |
56 return path | |
57 | |
58 | |
59 class MojomFetcher(object): | |
60 def __init__(self, repository, url_rewriter): | |
61 self._repository = repository | |
62 self._url_rewriter = url_rewriter | |
63 | |
64 def _requests_get(self, url): | |
65 return requests.get(url, verify=True) | |
66 | |
67 def _os_makedirs(self, dirs): | |
68 try: | |
69 os.makedirs(dirs) | |
70 except OSError as e: | |
71 # The directory may already exist, we don't care. | |
72 if e.errno != errno.EEXIST: | |
73 raise | |
74 | |
75 def _open(self, f, mode="r"): | |
76 return open(f, mode) | |
77 | |
78 def _download_dependencies(self, dependencies): | |
79 """Takes the list of mojom dependencies and download the external ones. | |
80 Returns the number of successfully downloaded dependencies.""" | |
81 | |
82 downloaded = 0 | |
83 for dep in dependencies: | |
84 if self._maybe_download_dep(dep): | |
85 downloaded += 1 | |
86 return downloaded | |
87 | |
88 def _maybe_download_dep(self, dep): | |
89 if not dep.maybe_is_a_url(): | |
90 return False | |
91 | |
92 for candidate in dep.generate_candidate_urls(): | |
93 url = self._url_rewriter.rewrite(candidate) | |
94 response = self._requests_get("https://" + url) | |
95 if not response.ok: | |
96 # If we get an error, it just mean that this candidate URL is not | |
97 # correct. We must try the other ones before giving up. | |
98 logging.debug("Error while downloading %s (%s)", candidate, url) | |
99 continue | |
100 # This is an external dependency | |
101 directory = os.path.dirname(candidate) | |
102 full_directory = os.path.join(self._repository.get_external_directory(), | |
103 directory) | |
104 try: | |
105 self._os_makedirs(full_directory) | |
106 except OSError as e: | |
107 # The directory may already exist, we don't care. | |
108 if e.errno != errno.EEXIST: | |
109 raise | |
110 with self._open(os.path.join(self._repository.get_external_directory(), | |
111 candidate), "w") as f: | |
112 data = response.content | |
113 try: | |
114 Parse(data, candidate) | |
115 except ParseError: | |
116 logging.warn("File at %s is not a mojom", url) | |
117 break | |
118 f.write(data) | |
119 return True | |
120 return False | |
121 | |
122 def discover(self): | |
123 """Discover missing .mojom dependencies and download them.""" | |
124 while True: | |
125 missing_deps = self._repository.get_missing_dependencies() | |
126 downloaded = self._download_dependencies(missing_deps) | |
127 if downloaded == 0: | |
128 return 0 | |
129 | |
130 def get(self, dep): | |
131 dependency = Dependency(self._repository, ".", dep) | |
132 downloaded = self._download_dependencies([dependency]) | |
133 if downloaded != 0: | |
134 return self.discover() | |
135 else: | |
136 return -1 | |
137 | |
138 def update(self): | |
139 dependencies = [Dependency(self._repository, ".", f) | |
140 for f in self._repository.get_external_urls()] | |
141 # TODO(etiennej): We may want to suggest to the user to delete | |
142 # un-downloadable dependencies. | |
143 downloaded = self._download_dependencies(dependencies) | |
144 if downloaded != 0: | |
145 return self.discover() | |
146 else: | |
147 return -1 | |
148 | |
149 def _main(args): | |
150 if args.prefix_rewrite: | |
151 rewrite_rules = dict([x.split(':', 1) for x in args.prefix_rewrite]) | |
152 else: | |
153 rewrite_rules = {} | |
154 rewriter = UrlRewriter(rewrite_rules) | |
155 repository_path = os.path.abspath(args.repository_path) | |
156 repository = Repository(repository_path, args.external_dir) | |
157 fetcher = MojomFetcher(repository, rewriter) | |
158 if args.action == 'discover': | |
159 return fetcher.discover() | |
160 elif args.action == 'get': | |
161 return fetcher.get(args.url) | |
162 elif args.action == 'update': | |
163 return fetcher.update() | |
164 else: | |
165 logging.error("No matching action %s", args.action[0]) | |
166 return -1 | |
167 | |
168 def main(): | |
169 logging.basicConfig(level=logging.ERROR) | |
170 parser = argparse.ArgumentParser(description='Download mojom dependencies.') | |
171 parser.add_argument('--repository-path', type=str, action='store', | |
172 default='.', help='The path to the client repository.') | |
173 parser.add_argument('--external-dir', type=str, action='store', | |
174 default='external', | |
175 help='Directory for external interfaces') | |
176 parser.add_argument('--prefix-rewrite', type=str, action='append', | |
177 help='If present, "origin:destination" pairs. "origin" ' | |
178 'prefixes will be rewritten as "destination". May be ' | |
179 'used several times. Rewrites are not transitive.') | |
180 | |
181 subparsers = parser.add_subparsers(dest='action', help='action') | |
182 parser_get = subparsers.add_parser( | |
183 'get', help='Get the specified URL and all its transitive dependencies') | |
184 parser_get.add_argument('url', type=str, nargs=1, | |
185 help='URL to download for get action') | |
186 subparsers.add_parser( | |
187 'discover', | |
188 help='Recursively discover and download new external dependencies') | |
189 subparsers.add_parser('update', help='Update all external dependencies') | |
190 | |
191 args = parser.parse_args() | |
192 return _main(args) | |
193 | |
194 | |
195 if __name__ == '__main__': | |
196 sys.exit(main()) | |
OLD | NEW |