| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2013 The Chromium Authors. All rights reserved. | 2 # Copyright 2013 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """The frontend for the Mojo bindings system.""" | 6 """The frontend for the Mojo bindings system.""" |
| 7 | 7 |
| 8 | 8 |
| 9 import argparse | 9 import argparse |
| 10 import imp | 10 import imp |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 42 from mojom.parse.parser import Parse | 42 from mojom.parse.parser import Parse |
| 43 from mojom.parse.translate import Translate | 43 from mojom.parse.translate import Translate |
| 44 | 44 |
| 45 | 45 |
| 46 _BUILTIN_GENERATORS = { | 46 _BUILTIN_GENERATORS = { |
| 47 "c++": "mojom_cpp_generator.py", | 47 "c++": "mojom_cpp_generator.py", |
| 48 "javascript": "mojom_js_generator.py", | 48 "javascript": "mojom_js_generator.py", |
| 49 "java": "mojom_java_generator.py", | 49 "java": "mojom_java_generator.py", |
| 50 } | 50 } |
| 51 | 51 |
| 52 |
| 52 def LoadGenerators(generators_string): | 53 def LoadGenerators(generators_string): |
| 53 if not generators_string: | 54 if not generators_string: |
| 54 return [] # No generators. | 55 return [] # No generators. |
| 55 | 56 |
| 56 script_dir = os.path.dirname(os.path.abspath(__file__)) | 57 script_dir = os.path.dirname(os.path.abspath(__file__)) |
| 57 generators = {} | 58 generators = {} |
| 58 for generator_name in [s.strip() for s in generators_string.split(",")]: | 59 for generator_name in [s.strip() for s in generators_string.split(",")]: |
| 59 language = generator_name.lower() | 60 language = generator_name.lower() |
| 60 if language in _BUILTIN_GENERATORS: | 61 if language in _BUILTIN_GENERATORS: |
| 61 generator_name = os.path.join(script_dir, "generators", | 62 generator_name = os.path.join(script_dir, "generators", |
| 62 _BUILTIN_GENERATORS[language]) | 63 _BUILTIN_GENERATORS[language]) |
| 63 else: | 64 else: |
| 64 print "Unknown generator name %s" % generator_name | 65 print "Unknown generator name %s" % generator_name |
| 65 sys.exit(1) | 66 sys.exit(1) |
| 66 generator_module = imp.load_source(os.path.basename(generator_name)[:-3], | 67 generator_module = imp.load_source(os.path.basename(generator_name)[:-3], |
| 67 generator_name) | 68 generator_name) |
| 68 generators[language] = generator_module | 69 generators[language] = generator_module |
| 69 return generators | 70 return generators |
| 70 | 71 |
| 71 | 72 |
| 72 def MakeImportStackMessage(imported_filename_stack): | 73 def MakeImportStackMessage(imported_filename_stack): |
| 73 """Make a (human-readable) message listing a chain of imports. (Returned | 74 """Make a (human-readable) message listing a chain of imports. (Returned |
| 74 string begins with a newline (if nonempty) and does not end with one.)""" | 75 string begins with a newline (if nonempty) and does not end with one.)""" |
| 75 return ''.join( | 76 return ''.join( |
| 76 reversed(["\n %s was imported by %s" % (a, b) for (a, b) in \ | 77 reversed(["\n %s was imported by %s" % (a, b) for (a, b) in \ |
| 77 zip(imported_filename_stack[1:], imported_filename_stack)])) | 78 zip(imported_filename_stack[1:], imported_filename_stack)])) |
| 78 | 79 |
| 79 | 80 |
| 80 def FindImportFile(dir_name, file_name, search_dirs): | 81 class RelativePath(object): |
| 81 for search_dir in [dir_name] + search_dirs: | 82 """Represents a path relative to the source tree.""" |
| 82 path = os.path.join(search_dir, file_name) | 83 def __init__(self, path, source_root): |
| 84 self.path = path |
| 85 self.source_root = source_root |
| 86 |
| 87 def relative_path(self): |
| 88 return os.path.relpath(os.path.abspath(self.path), |
| 89 os.path.abspath(self.source_root)) |
| 90 |
| 91 |
| 92 def FindImportFile(rel_dir, file_name, search_rel_dirs): |
| 93 """Finds |file_name| in either |rel_dir| or |search_rel_dirs|. Returns a |
| 94 RelativePath with first file found, or an arbitrary non-existent file |
| 95 otherwise.""" |
| 96 for rel_search_dir in [rel_dir] + search_rel_dirs: |
| 97 path = os.path.join(rel_search_dir.path, file_name) |
| 83 if os.path.isfile(path): | 98 if os.path.isfile(path): |
| 84 return path | 99 return RelativePath(path, rel_search_dir.source_root) |
| 85 return os.path.join(dir_name, file_name) | 100 return RelativePath(os.path.join(rel_dir.path, file_name), |
| 101 rel_dir.source_root) |
| 102 |
| 86 | 103 |
| 87 class MojomProcessor(object): | 104 class MojomProcessor(object): |
| 88 def __init__(self, should_generate): | 105 def __init__(self, should_generate): |
| 89 self._should_generate = should_generate | 106 self._should_generate = should_generate |
| 90 self._processed_files = {} | 107 self._processed_files = {} |
| 91 self._parsed_files = {} | 108 self._parsed_files = {} |
| 92 self._typemap = {} | 109 self._typemap = {} |
| 93 | 110 |
| 94 def LoadTypemaps(self, typemaps): | 111 def LoadTypemaps(self, typemaps): |
| 95 # Support some very simple single-line comments in typemap JSON. | 112 # Support some very simple single-line comments in typemap JSON. |
| 96 comment_expr = r"^\s*//.*$" | 113 comment_expr = r"^\s*//.*$" |
| 97 def no_comments(line): | 114 def no_comments(line): |
| 98 return not re.match(comment_expr, line) | 115 return not re.match(comment_expr, line) |
| 99 for filename in typemaps: | 116 for filename in typemaps: |
| 100 with open(filename) as f: | 117 with open(filename) as f: |
| 101 typemaps = json.loads("".join(filter(no_comments, f.readlines()))) | 118 typemaps = json.loads("".join(filter(no_comments, f.readlines()))) |
| 102 for language, typemap in typemaps.iteritems(): | 119 for language, typemap in typemaps.iteritems(): |
| 103 language_map = self._typemap.get(language, {}) | 120 language_map = self._typemap.get(language, {}) |
| 104 language_map.update(typemap) | 121 language_map.update(typemap) |
| 105 self._typemap[language] = language_map | 122 self._typemap[language] = language_map |
| 106 | 123 |
| 107 def ProcessFile(self, args, remaining_args, generator_modules, filename): | 124 def ProcessFile(self, args, remaining_args, generator_modules, filename): |
| 108 self._ParseFileAndImports(filename, args.import_directories, []) | 125 self._ParseFileAndImports(RelativePath(filename, args.depth), |
| 126 args.import_directories, []) |
| 109 | 127 |
| 110 return self._GenerateModule(args, remaining_args, generator_modules, | 128 return self._GenerateModule(args, remaining_args, generator_modules, |
| 111 filename) | 129 RelativePath(filename, args.depth)) |
| 112 | 130 |
| 113 def _GenerateModule(self, args, remaining_args, generator_modules, filename): | 131 def _GenerateModule(self, args, remaining_args, generator_modules, |
| 132 rel_filename): |
| 114 # Return the already-generated module. | 133 # Return the already-generated module. |
| 115 if filename in self._processed_files: | 134 if rel_filename.path in self._processed_files: |
| 116 return self._processed_files[filename] | 135 return self._processed_files[rel_filename.path] |
| 117 tree = self._parsed_files[filename] | 136 tree = self._parsed_files[rel_filename.path] |
| 118 | 137 |
| 119 dirname, name = os.path.split(filename) | 138 dirname, name = os.path.split(rel_filename.path) |
| 120 mojom = Translate(tree, name) | 139 mojom = Translate(tree, name) |
| 121 if args.debug_print_intermediate: | 140 if args.debug_print_intermediate: |
| 122 pprint.PrettyPrinter().pprint(mojom) | 141 pprint.PrettyPrinter().pprint(mojom) |
| 123 | 142 |
| 124 # Process all our imports first and collect the module object for each. | 143 # Process all our imports first and collect the module object for each. |
| 125 # We use these to generate proper type info. | 144 # We use these to generate proper type info. |
| 126 for import_data in mojom['imports']: | 145 for import_data in mojom['imports']: |
| 127 import_filename = FindImportFile(dirname, | 146 rel_import_file = FindImportFile( |
| 128 import_data['filename'], | 147 RelativePath(dirname, rel_filename.source_root), |
| 129 args.import_directories) | 148 import_data['filename'], args.import_directories) |
| 130 import_data['module'] = self._GenerateModule( | 149 import_data['module'] = self._GenerateModule( |
| 131 args, remaining_args, generator_modules, import_filename) | 150 args, remaining_args, generator_modules, rel_import_file) |
| 132 | 151 |
| 133 module = OrderedModuleFromData(mojom) | 152 module = OrderedModuleFromData(mojom) |
| 134 | 153 |
| 135 # Set the path as relative to the source root. | 154 # Set the path as relative to the source root. |
| 136 module.path = os.path.relpath(os.path.abspath(filename), | 155 module.path = rel_filename.relative_path() |
| 137 os.path.abspath(args.depth)) | |
| 138 | 156 |
| 139 # Normalize to unix-style path here to keep the generators simpler. | 157 # Normalize to unix-style path here to keep the generators simpler. |
| 140 module.path = module.path.replace('\\', '/') | 158 module.path = module.path.replace('\\', '/') |
| 141 | 159 |
| 142 if self._should_generate(filename): | 160 if self._should_generate(rel_filename.path): |
| 143 for language, generator_module in generator_modules.iteritems(): | 161 for language, generator_module in generator_modules.iteritems(): |
| 144 generator = generator_module.Generator( | 162 generator = generator_module.Generator( |
| 145 module, args.output_dir, typemap=self._typemap.get(language, {}), | 163 module, args.output_dir, typemap=self._typemap.get(language, {}), |
| 146 variant=args.variant, bytecode_path=args.bytecode_path, | 164 variant=args.variant, bytecode_path=args.bytecode_path, |
| 147 for_blink=args.for_blink, | 165 for_blink=args.for_blink, |
| 148 use_new_wrapper_types=args.use_new_wrapper_types) | 166 use_new_wrapper_types=args.use_new_wrapper_types) |
| 149 filtered_args = [] | 167 filtered_args = [] |
| 150 if hasattr(generator_module, 'GENERATOR_PREFIX'): | 168 if hasattr(generator_module, 'GENERATOR_PREFIX'): |
| 151 prefix = '--' + generator_module.GENERATOR_PREFIX + '_' | 169 prefix = '--' + generator_module.GENERATOR_PREFIX + '_' |
| 152 filtered_args = [arg for arg in remaining_args | 170 filtered_args = [arg for arg in remaining_args |
| 153 if arg.startswith(prefix)] | 171 if arg.startswith(prefix)] |
| 154 generator.GenerateFiles(filtered_args) | 172 generator.GenerateFiles(filtered_args) |
| 155 | 173 |
| 156 # Save result. | 174 # Save result. |
| 157 self._processed_files[filename] = module | 175 self._processed_files[rel_filename.path] = module |
| 158 return module | 176 return module |
| 159 | 177 |
| 160 def _ParseFileAndImports(self, filename, import_directories, | 178 def _ParseFileAndImports(self, rel_filename, import_directories, |
| 161 imported_filename_stack): | 179 imported_filename_stack): |
| 162 # Ignore already-parsed files. | 180 # Ignore already-parsed files. |
| 163 if filename in self._parsed_files: | 181 if rel_filename.path in self._parsed_files: |
| 164 return | 182 return |
| 165 | 183 |
| 166 if filename in imported_filename_stack: | 184 if rel_filename.path in imported_filename_stack: |
| 167 print "%s: Error: Circular dependency" % filename + \ | 185 print "%s: Error: Circular dependency" % rel_filename.path + \ |
| 168 MakeImportStackMessage(imported_filename_stack + [filename]) | 186 MakeImportStackMessage(imported_filename_stack + [rel_filename.path]) |
| 169 sys.exit(1) | 187 sys.exit(1) |
| 170 | 188 |
| 171 try: | 189 try: |
| 172 with open(filename) as f: | 190 with open(rel_filename.path) as f: |
| 173 source = f.read() | 191 source = f.read() |
| 174 except IOError as e: | 192 except IOError as e: |
| 175 print "%s: Error: %s" % (e.filename, e.strerror) + \ | 193 print "%s: Error: %s" % (e.rel_filename.path, e.strerror) + \ |
| 176 MakeImportStackMessage(imported_filename_stack + [filename]) | 194 MakeImportStackMessage(imported_filename_stack + [rel_filename.path]) |
| 177 sys.exit(1) | 195 sys.exit(1) |
| 178 | 196 |
| 179 try: | 197 try: |
| 180 tree = Parse(source, filename) | 198 tree = Parse(source, rel_filename.path) |
| 181 except Error as e: | 199 except Error as e: |
| 182 full_stack = imported_filename_stack + [filename] | 200 full_stack = imported_filename_stack + [rel_filename.path] |
| 183 print str(e) + MakeImportStackMessage(full_stack) | 201 print str(e) + MakeImportStackMessage(full_stack) |
| 184 sys.exit(1) | 202 sys.exit(1) |
| 185 | 203 |
| 186 dirname = os.path.split(filename)[0] | 204 dirname = os.path.split(rel_filename.path)[0] |
| 187 for imp_entry in tree.import_list: | 205 for imp_entry in tree.import_list: |
| 188 import_filename = FindImportFile(dirname, | 206 import_file_entry = FindImportFile( |
| 207 RelativePath(dirname, rel_filename.source_root), |
| 189 imp_entry.import_filename, import_directories) | 208 imp_entry.import_filename, import_directories) |
| 190 self._ParseFileAndImports(import_filename, import_directories, | 209 self._ParseFileAndImports(import_file_entry, import_directories, |
| 191 imported_filename_stack + [filename]) | 210 imported_filename_stack + [rel_filename.path]) |
| 192 | 211 |
| 193 self._parsed_files[filename] = tree | 212 self._parsed_files[rel_filename.path] = tree |
| 194 | 213 |
| 195 | 214 |
| 196 def _Generate(args, remaining_args): | 215 def _Generate(args, remaining_args): |
| 197 if args.variant == "none": | 216 if args.variant == "none": |
| 198 args.variant = None | 217 args.variant = None |
| 199 | 218 |
| 219 for idx, import_dir in enumerate(args.import_directories): |
| 220 tokens = import_dir.split(":") |
| 221 if len(tokens) >= 2: |
| 222 args.import_directories[idx] = RelativePath(tokens[0], tokens[1]) |
| 223 else: |
| 224 args.import_directories[idx] = RelativePath(tokens[0], args.depth) |
| 200 generator_modules = LoadGenerators(args.generators_string) | 225 generator_modules = LoadGenerators(args.generators_string) |
| 201 | 226 |
| 202 fileutil.EnsureDirectoryExists(args.output_dir) | 227 fileutil.EnsureDirectoryExists(args.output_dir) |
| 203 | 228 |
| 204 processor = MojomProcessor(lambda filename: filename in args.filename) | 229 processor = MojomProcessor(lambda filename: filename in args.filename) |
| 205 processor.LoadTypemaps(set(args.typemaps)) | 230 processor.LoadTypemaps(set(args.typemaps)) |
| 206 for filename in args.filename: | 231 for filename in args.filename: |
| 207 processor.ProcessFile(args, remaining_args, generator_modules, filename) | 232 processor.ProcessFile(args, remaining_args, generator_modules, filename) |
| 208 | 233 |
| 209 return 0 | 234 return 0 |
| (...skipping 26 matching lines...) Expand all Loading... |
| 236 generate_parser.add_argument("--debug_print_intermediate", | 261 generate_parser.add_argument("--debug_print_intermediate", |
| 237 action="store_true", | 262 action="store_true", |
| 238 help="print the intermediate representation") | 263 help="print the intermediate representation") |
| 239 generate_parser.add_argument("-g", "--generators", | 264 generate_parser.add_argument("-g", "--generators", |
| 240 dest="generators_string", | 265 dest="generators_string", |
| 241 metavar="GENERATORS", | 266 metavar="GENERATORS", |
| 242 default="c++,javascript,java", | 267 default="c++,javascript,java", |
| 243 help="comma-separated list of generators") | 268 help="comma-separated list of generators") |
| 244 generate_parser.add_argument( | 269 generate_parser.add_argument( |
| 245 "-I", dest="import_directories", action="append", metavar="directory", | 270 "-I", dest="import_directories", action="append", metavar="directory", |
| 246 default=[], help="add a directory to be searched for import files") | 271 default=[], |
| 272 help="add a directory to be searched for import files. The depth from " |
| 273 "source root can be specified for each import by appending it after " |
| 274 "a colon") |
| 247 generate_parser.add_argument("--typemap", action="append", metavar="TYPEMAP", | 275 generate_parser.add_argument("--typemap", action="append", metavar="TYPEMAP", |
| 248 default=[], dest="typemaps", | 276 default=[], dest="typemaps", |
| 249 help="apply TYPEMAP to generated output") | 277 help="apply TYPEMAP to generated output") |
| 250 generate_parser.add_argument("--variant", dest="variant", default=None, | 278 generate_parser.add_argument("--variant", dest="variant", default=None, |
| 251 help="output a named variant of the bindings") | 279 help="output a named variant of the bindings") |
| 252 generate_parser.add_argument( | 280 generate_parser.add_argument( |
| 253 "--bytecode_path", type=str, required=True, help=( | 281 "--bytecode_path", type=str, required=True, help=( |
| 254 "the path from which to load template bytecode; to generate template " | 282 "the path from which to load template bytecode; to generate template " |
| 255 "bytecode, run %s precompile BYTECODE_PATH" % os.path.basename( | 283 "bytecode, run %s precompile BYTECODE_PATH" % os.path.basename( |
| 256 sys.argv[0]))) | 284 sys.argv[0]))) |
| (...skipping 12 matching lines...) Expand all Loading... |
| 269 "-o", "--output_dir", dest="output_dir", default=".", | 297 "-o", "--output_dir", dest="output_dir", default=".", |
| 270 help="output directory for precompiled templates") | 298 help="output directory for precompiled templates") |
| 271 precompile_parser.set_defaults(func=_Precompile) | 299 precompile_parser.set_defaults(func=_Precompile) |
| 272 | 300 |
| 273 args, remaining_args = parser.parse_known_args() | 301 args, remaining_args = parser.parse_known_args() |
| 274 return args.func(args, remaining_args) | 302 return args.func(args, remaining_args) |
| 275 | 303 |
| 276 | 304 |
| 277 if __name__ == "__main__": | 305 if __name__ == "__main__": |
| 278 sys.exit(main()) | 306 sys.exit(main()) |
| OLD | NEW |