OLD | NEW |
1 #!/usr/bin/python | 1 #!/usr/bin/python |
2 # Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file | 2 # Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file |
3 # for details. All rights reserved. Use of this source code is governed by a | 3 # for details. All rights reserved. Use of this source code is governed by a |
4 # BSD-style license that can be found in the LICENSE file. | 4 # BSD-style license that can be found in the LICENSE file. |
5 | 5 |
6 import copy | 6 import copy |
7 import database | 7 import database |
8 import idlparser | 8 import idlparser |
9 import logging | 9 import logging |
10 import multiprocessing | 10 import multiprocessing |
11 import os | 11 import os |
12 import os.path | 12 import os.path |
13 import re | 13 import re |
14 import sys | |
15 import tempfile | |
16 import time | |
17 import traceback | |
18 | |
19 import idl_validator | |
20 | |
21 import compiler | |
22 import compute_interfaces_info_individual | |
23 from compute_interfaces_info_individual import compute_info_individual, info_ind
ividual | |
24 import compute_interfaces_info_overall | |
25 from compute_interfaces_info_overall import compute_interfaces_info_overall, int
erfaces_info | |
26 import idl_definitions | |
27 | 14 |
28 from idlnode import * | 15 from idlnode import * |
29 | 16 |
30 _logger = logging.getLogger('databasebuilder') | 17 _logger = logging.getLogger('databasebuilder') |
31 | 18 |
32 # Used in source annotations to specify the parent interface declaring | 19 # Used in source annotations to specify the parent interface declaring |
33 # a displaced declaration. The 'via' attribute specifies the parent interface | 20 # a displaced declaration. The 'via' attribute specifies the parent interface |
34 # which implements a displaced declaration. | 21 # which implements a displaced declaration. |
35 _VIA_ANNOTATION_ATTR_NAME = 'via' | 22 _VIA_ANNOTATION_ATTR_NAME = 'via' |
36 | 23 |
37 | 24 |
38 class DatabaseBuilderOptions(object): | 25 class DatabaseBuilderOptions(object): |
39 """Used in specifying options when importing new interfaces""" | 26 """Used in specifying options when importing new interfaces""" |
40 | 27 |
41 def __init__(self, | 28 def __init__(self, |
42 idl_syntax=idlparser.WEBIDL_SYNTAX, | 29 idl_syntax=idlparser.WEBIDL_SYNTAX, |
43 idl_defines=[], | 30 idl_defines=[], |
44 source=None, source_attributes={}, | 31 source=None, source_attributes={}, |
45 rename_operation_arguments_on_merge=False, | 32 rename_operation_arguments_on_merge=False, |
46 add_new_interfaces=True, | 33 add_new_interfaces=True, |
47 obsolete_old_declarations=False, | 34 obsolete_old_declarations=False): |
48 logging_level=logging.WARNING): | |
49 """Constructor. | 35 """Constructor. |
50 Args: | 36 Args: |
51 idl_syntax -- the syntax of the IDL file that is imported. | 37 idl_syntax -- the syntax of the IDL file that is imported. |
52 idl_defines -- list of definitions for the idl gcc pre-processor | 38 idl_defines -- list of definitions for the idl gcc pre-processor |
53 source -- the origin of the IDL file, used for annotating the | 39 source -- the origin of the IDL file, used for annotating the |
54 database. | 40 database. |
55 source_attributes -- this map of attributes is used as | 41 source_attributes -- this map of attributes is used as |
56 annotation attributes. | 42 annotation attributes. |
57 rename_operation_arguments_on_merge -- if True, will rename | 43 rename_operation_arguments_on_merge -- if True, will rename |
58 operation arguments when merging using the new name rather | 44 operation arguments when merging using the new name rather |
59 than the old. | 45 than the old. |
60 add_new_interfaces -- when False, if an interface is a new | 46 add_new_interfaces -- when False, if an interface is a new |
61 addition, it will be ignored. | 47 addition, it will be ignored. |
62 obsolete_old_declarations -- when True, if a declaration | 48 obsolete_old_declarations -- when True, if a declaration |
63 from a certain source is not re-declared, it will be removed. | 49 from a certain source is not re-declared, it will be removed. |
64 """ | 50 """ |
65 self.source = source | 51 self.source = source |
66 self.source_attributes = source_attributes | 52 self.source_attributes = source_attributes |
67 self.idl_syntax = idl_syntax | 53 self.idl_syntax = idl_syntax |
68 self.idl_defines = idl_defines | 54 self.idl_defines = idl_defines |
69 self.rename_operation_arguments_on_merge = \ | 55 self.rename_operation_arguments_on_merge = \ |
70 rename_operation_arguments_on_merge | 56 rename_operation_arguments_on_merge |
71 self.add_new_interfaces = add_new_interfaces | 57 self.add_new_interfaces = add_new_interfaces |
72 self.obsolete_old_declarations = obsolete_old_declarations | 58 self.obsolete_old_declarations = obsolete_old_declarations |
73 _logger.setLevel(logging_level) | |
74 | 59 |
75 | 60 |
76 def _load_idl_file(build, file_name, import_options): | 61 def _load_idl_file(file_name, import_options): |
77 """Loads an IDL file into memory""" | 62 """Loads an IDL file into memory""" |
78 idl_parser = idlparser.IDLParser(import_options.idl_syntax) | 63 idl_parser = idlparser.IDLParser(import_options.idl_syntax) |
79 | 64 |
80 try: | 65 try: |
81 f = open(file_name, 'r') | 66 f = open(file_name, 'r') |
82 content = f.read() | 67 content = f.read() |
83 f.close() | 68 f.close() |
84 | 69 |
85 idl_ast = idl_parser.parse(content) | 70 idl_ast = idl_parser.parse(content) |
86 | |
87 return IDLFile(idl_ast, file_name) | 71 return IDLFile(idl_ast, file_name) |
88 except SyntaxError, e: | 72 except SyntaxError, e: |
89 raise RuntimeError('Failed to load file %s: %s: Content: %s[end]' | 73 raise RuntimeError('Failed to load file %s: %s: Content: %s[end]' |
90 % (file_name, e, content)) | 74 % (file_name, e, content)) |
91 | 75 |
92 | 76 |
93 def format_exception(e): | |
94 exception_list = traceback.format_stack() | |
95 exception_list = exception_list[:-2] | |
96 exception_list.extend(traceback.format_tb(sys.exc_info()[2])) | |
97 exception_list.extend(traceback.format_exception_only(sys.exc_info()[0], sys
.exc_info()[1])) | |
98 | |
99 exception_str = "Traceback (most recent call last):\n" | |
100 exception_str += "".join(exception_list) | |
101 # Removing the last \n | |
102 exception_str = exception_str[:-1] | |
103 | |
104 return exception_str | |
105 | |
106 | |
107 # Compile IDL using Blink's IDL compiler. | |
108 def _new_compile_idl_file(build, file_name, import_options): | |
109 try: | |
110 idl_file_fullpath = os.path.realpath(file_name) | |
111 idl_definition = build.idl_compiler.compile_file(idl_file_fullpath) | |
112 return idl_definition | |
113 except Exception as err: | |
114 print 'ERROR: idl_compiler.py: ' + os.path.basename(file_name) | |
115 print err | |
116 print | |
117 print 'Stack Dump:' | |
118 print format_exception(err) | |
119 | |
120 return 1 | |
121 | |
122 | |
123 # Create the Model (IDLFile) from the new AST of the compiled IDL file. | |
124 def _new_load_idl_file(build, file_name, import_options): | |
125 try: | |
126 # Compute interface name from IDL filename (it's one for one in WebKit). | |
127 name = os.path.splitext(os.path.basename(file_name))[0] | |
128 | |
129 idl_definition = new_asts[name] | |
130 return IDLFile(idl_definition, file_name) | |
131 except Exception as err: | |
132 print 'ERROR: loading AST from cache: ' + os.path.basename(file_name) | |
133 print err | |
134 print | |
135 print 'Stack Dump:' | |
136 print format_exception(err) | |
137 | |
138 return 1 | |
139 | |
140 | |
141 # New IDL parser builder. | |
142 class Build(): | |
143 def __init__(self, provider): | |
144 # TODO(terry): Consider using the generator to do the work today we're | |
145 # driven by the databasebuilder. Blink compiler requires | |
146 # an output directory even though we don't use (yet). Might | |
147 # use the code generator portion of the new IDL compiler | |
148 # then we'd have a real output directory. Today we use the | |
149 # compiler to only create an AST. | |
150 self.output_directory = tempfile.mkdtemp() | |
151 attrib_file = os.path.join('Source', idl_validator.EXTENDED_ATTRIBUTES_F
ILENAME) | |
152 # Create compiler. | |
153 self.idl_compiler = compiler.IdlCompilerDart(self.output_directory, | |
154 attrib_file, | |
155 interfaces_info=interfaces_info, | |
156 only_if_changed=True) | |
157 | |
158 def format_exception(self, e): | |
159 exception_list = traceback.format_stack() | |
160 exception_list = exception_list[:-2] | |
161 exception_list.extend(traceback.format_tb(sys.exc_info()[2])) | |
162 exception_list.extend(traceback.format_exception_only(sys.exc_info()[0],
sys.exc_info()[1])) | |
163 | |
164 exception_str = "Traceback (most recent call last):\n" | |
165 exception_str += "".join(exception_list) | |
166 # Removing the last \n | |
167 exception_str = exception_str[:-1] | |
168 | |
169 return exception_str | |
170 | |
171 def generate_from_idl(self, idl_file): | |
172 try: | |
173 idl_file_fullpath = os.path.realpath(idl_file) | |
174 self.idl_compiler.compile_file(idl_file_fullpath) | |
175 except Exception as err: | |
176 print 'ERROR: idl_compiler.py: ' + os.path.basename(idl_file) | |
177 print err | |
178 print | |
179 print 'Stack Dump:' | |
180 print self.format_exception(err) | |
181 | |
182 return 1 | |
183 | |
184 return IDLFile(idl_ast, file_name) | |
185 | |
186 | |
187 class DatabaseBuilder(object): | 77 class DatabaseBuilder(object): |
188 def __init__(self, database): | 78 def __init__(self, database): |
189 """DatabaseBuilder is used for importing and merging interfaces into | 79 """DatabaseBuilder is used for importing and merging interfaces into |
190 the Database""" | 80 the Database""" |
191 self._database = database | 81 self._database = database |
192 self._imported_interfaces = [] | 82 self._imported_interfaces = [] |
193 self._impl_stmts = [] | 83 self._impl_stmts = [] |
194 self.conditionals_met = set() | 84 self.conditionals_met = set() |
195 | 85 |
196 # Spin up the new IDL parser. | |
197 self.build = Build(None) | |
198 | |
199 def _resolve_type_defs(self, idl_file): | 86 def _resolve_type_defs(self, idl_file): |
200 type_def_map = {} | 87 type_def_map = {} |
201 # build map | 88 # build map |
202 for type_def in idl_file.typeDefs: | 89 for type_def in idl_file.typeDefs: |
203 if type_def.type.id != type_def.id: # sanity check | 90 if type_def.type.id != type_def.id: # sanity check |
204 type_def_map[type_def.id] = type_def.type.id | 91 type_def_map[type_def.id] = type_def.type.id |
205 # use the map | 92 # use the map |
206 for type_node in idl_file.all(IDLType): | 93 for type_node in idl_file.all(IDLType): |
207 while type_node.id in type_def_map: | 94 while type_node.id in type_def_map: |
208 type_node.id = type_def_map[type_node.id] | 95 type_node.id = type_def_map[type_node.id] |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
298 | 185 |
299 def _build_signatures_map(self, idl_node_list): | 186 def _build_signatures_map(self, idl_node_list): |
300 """Creates a hash table mapping signatures to idl_nodes for the | 187 """Creates a hash table mapping signatures to idl_nodes for the |
301 given list of nodes""" | 188 given list of nodes""" |
302 res = {} | 189 res = {} |
303 for idl_node in idl_node_list: | 190 for idl_node in idl_node_list: |
304 sig = self._sign(idl_node) | 191 sig = self._sign(idl_node) |
305 if sig is None: | 192 if sig is None: |
306 continue | 193 continue |
307 if sig in res: | 194 if sig in res: |
308 op = res[sig] | 195 raise RuntimeError('Warning: Multiple members have the same ' |
309 # Only report if the the operations that match are either both suppresse
d | 196 'signature: "%s"' % sig) |
310 # or both not suppressed. Optional args aren't part of type signature | |
311 # for this routine. Suppressing a non-optional type and supplementing | |
312 # with an optional type appear the same. | |
313 if idl_node.is_fc_suppressed == op.is_fc_suppressed: | |
314 raise RuntimeError('Warning: Multiple members have the same ' | |
315 ' signature: "%s"' % sig) | |
316 res[sig] = idl_node | 197 res[sig] = idl_node |
317 return res | 198 return res |
318 | 199 |
319 def _get_parent_interfaces(self, interface): | 200 def _get_parent_interfaces(self, interface): |
320 """Return a list of all the parent interfaces of a given interface""" | 201 """Return a list of all the parent interfaces of a given interface""" |
321 res = [] | 202 res = [] |
322 | 203 |
323 def recurse(current_interface): | 204 def recurse(current_interface): |
324 if current_interface in res: | 205 if current_interface in res: |
325 return | 206 return |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
487 import_options.source_attributes) | 368 import_options.source_attributes) |
488 return | 369 return |
489 # not found, so add new one | 370 # not found, so add new one |
490 parent = IDLParentInterface(None) | 371 parent = IDLParentInterface(None) |
491 parent.type = IDLType(implemented_name) | 372 parent.type = IDLType(implemented_name) |
492 if source: | 373 if source: |
493 parent.annotations[source] = IDLAnnotation( | 374 parent.annotations[source] = IDLAnnotation( |
494 import_options.source_attributes) | 375 import_options.source_attributes) |
495 interface.parents.append(parent) | 376 interface.parents.append(parent) |
496 | 377 |
497 def merge_imported_interfaces(self, blink_parser): | 378 def merge_imported_interfaces(self): |
498 """Merges all imported interfaces and loads them into the DB.""" | 379 """Merges all imported interfaces and loads them into the DB.""" |
499 imported_interfaces = self._imported_interfaces | |
500 | 380 |
501 # Step 1: Pre process imported interfaces | 381 # Step 1: Pre process imported interfaces |
502 # for interface, import_options in imported_interfaces.iteritems(): | 382 for interface, import_options in self._imported_interfaces: |
503 for interface, import_options in imported_interfaces: | |
504 self._annotate(interface, import_options) | 383 self._annotate(interface, import_options) |
505 | 384 |
506 # Step 2: Add all new interfaces and merge overlapping ones | 385 # Step 2: Add all new interfaces and merge overlapping ones |
507 for interface, import_options in imported_interfaces: | 386 for interface, import_options in self._imported_interfaces: |
508 if not interface.is_supplemental: | 387 if not interface.is_supplemental: |
509 if self._database.HasInterface(interface.id): | 388 if self._database.HasInterface(interface.id): |
510 old_interface = self._database.GetInterface(interface.id) | 389 old_interface = self._database.GetInterface(interface.id) |
511 self._merge_interfaces(old_interface, interface, import_options) | 390 self._merge_interfaces(old_interface, interface, import_options) |
512 else: | 391 else: |
513 if import_options.add_new_interfaces: | 392 if import_options.add_new_interfaces: |
514 self._database.AddInterface(interface) | 393 self._database.AddInterface(interface) |
515 | 394 |
516 # Step 3: Merge in supplemental interfaces | 395 # Step 3: Merge in supplemental interfaces |
517 for interface, import_options in imported_interfaces: | 396 for interface, import_options in self._imported_interfaces: |
518 if interface.is_supplemental: | 397 if interface.is_supplemental: |
519 target = interface.id | 398 target_name = interface.ext_attrs['Supplemental'] |
| 399 if target_name: |
| 400 # [Supplemental=Window] - merge into Window. |
| 401 target = target_name |
| 402 else: |
| 403 # [Supplemental] - merge into existing inteface with same name. |
| 404 target = interface.id |
520 if self._database.HasInterface(target): | 405 if self._database.HasInterface(target): |
521 old_interface = self._database.GetInterface(target) | 406 old_interface = self._database.GetInterface(target) |
522 self._merge_interfaces(old_interface, interface, import_options) | 407 self._merge_interfaces(old_interface, interface, import_options) |
523 else: | 408 else: |
524 _logger.warning("Supplemental target '%s' not found", target) | 409 _logger.warning("Supplemental target '%s' not found", target) |
525 | 410 |
526 # Step 4: Resolve 'implements' statements | 411 # Step 4: Resolve 'implements' statements |
527 for impl_stmt, import_options in self._impl_stmts: | 412 for impl_stmt, import_options in self._impl_stmts: |
528 self._merge_impl_stmt(impl_stmt, import_options) | 413 self._merge_impl_stmt(impl_stmt, import_options) |
529 | 414 |
530 self._impl_stmts = [] | 415 self._impl_stmts = [] |
531 self._imported_interfaces = [] | 416 self._imported_interfaces = [] |
532 | 417 |
533 # Compile the IDL file with the Blink compiler and remember each AST for the | 418 def import_idl_files(self, file_paths, import_options, parallel): |
534 # IDL. | |
535 def _blink_compile_idl_files(self, file_paths, import_options, parallel, is_da
rt_idl): | |
536 if not(is_dart_idl): | |
537 start_time = time.time() | |
538 | |
539 # 2-stage computation: individual, then overall | |
540 for file_path in file_paths: | |
541 filename = os.path.splitext(os.path.basename(file_path))[0] | |
542 compute_info_individual(file_path, 'dart') | |
543 info_individuals = [info_individual()] | |
544 compute_interfaces_info_overall(info_individuals) | |
545 | |
546 end_time = time.time() | |
547 print 'Compute dependencies %s seconds' % round((end_time - start_time), | |
548 2) | |
549 | |
550 # use --parallel for async on a pool. Look at doing it like Blink | |
551 blink_compiler = _new_compile_idl_file | |
552 process_ast = self._process_ast | |
553 | |
554 if parallel: | 419 if parallel: |
555 # Parse the IDL files in parallel. | 420 # Parse the IDL files in parallel. |
556 pool = multiprocessing.Pool() | 421 pool = multiprocessing.Pool() |
557 try: | 422 try: |
558 for file_path in file_paths: | 423 for file_path in file_paths: |
559 pool.apply_async(blink_compiler, | 424 pool.apply_async(_load_idl_file, |
560 [ self.build, file_path, import_options], | 425 [ file_path, import_options], |
561 callback = lambda new_ast: process_ast(new_ast, True)
) | |
562 pool.close() | |
563 pool.join() | |
564 except: | |
565 pool.terminate() | |
566 raise | |
567 else: | |
568 # Parse the IDL files serially. | |
569 start_time = time.time() | |
570 | |
571 for file_path in file_paths: | |
572 file_path = os.path.normpath(file_path) | |
573 ast = blink_compiler(self.build, file_path, import_options) | |
574 process_ast(os.path.splitext(os.path.basename(file_path))[0], ast, True) | |
575 | |
576 end_time = time.time() | |
577 print 'Compiled %s IDL files in %s seconds' % (len(file_paths), | |
578 round((end_time - start_time
), 2)) | |
579 | |
580 def _process_ast(self, filename, ast, blink_parser = False): | |
581 if blink_parser: | |
582 new_asts[filename] = ast | |
583 else: | |
584 for name in ast.interfaces: | |
585 # Index by filename some files are partial on another interface (e.g., | |
586 # DocumentFontFaceSet.idl). | |
587 new_asts[filename] = ast.interfaces | |
588 | |
589 def import_idl_files(self, file_paths, import_options, parallel, blink_parser,
is_dart_idl): | |
590 if blink_parser: | |
591 self._blink_compile_idl_files(file_paths, import_options, parallel, is_dar
t_idl) | |
592 | |
593 # use --parallel for async on a pool. Look at doing it like Blink | |
594 idl_loader = _new_load_idl_file if blink_parser else _load_idl_file | |
595 | |
596 if parallel: | |
597 # Parse the IDL files in parallel. | |
598 pool = multiprocessing.Pool() | |
599 try: | |
600 for file_path in file_paths: | |
601 pool.apply_async(idl_loader, | |
602 [ self.build, file_path, import_options], | |
603 callback = lambda idl_file: | 426 callback = lambda idl_file: |
604 self._process_idl_file(idl_file, import_options)) | 427 self._process_idl_file(idl_file, import_options)) |
605 pool.close() | 428 pool.close() |
606 pool.join() | 429 pool.join() |
607 except: | 430 except: |
608 pool.terminate() | 431 pool.terminate() |
609 raise | 432 raise |
610 else: | 433 else: |
611 start_time = time.time() | |
612 | |
613 # Parse the IDL files in serial. | 434 # Parse the IDL files in serial. |
614 for file_path in file_paths: | 435 for file_path in file_paths: |
615 file_path = os.path.normpath(file_path) | 436 idl_file = _load_idl_file(file_path, import_options) |
616 idl_file = idl_loader(self.build, file_path, import_options) | 437 self._process_idl_file(idl_file, import_options) |
617 _logger.info('Processing %s' % os.path.splitext(os.path.basename(file_pa
th))[0]) | |
618 self._process_idl_file(idl_file, import_options, is_dart_idl) | |
619 | 438 |
620 end_time = time.time() | 439 def _process_idl_file(self, idl_file, |
621 | 440 import_options): |
622 print 'Total %s files %sprocessed in databasebuilder in %s seconds' % \ | 441 self._strip_ext_attributes(idl_file) |
623 (len(file_paths), '' if blink_parser else 'compiled/', \ | |
624 round((end_time - start_time), 2)) | |
625 | |
626 def _process_idl_file(self, idl_file, import_options, dart_idl = False): | |
627 # TODO(terry): strip_ext_attributes on an idl_file does nothing. | |
628 #self._strip_ext_attributes(idl_file) | |
629 self._resolve_type_defs(idl_file) | 442 self._resolve_type_defs(idl_file) |
630 self._rename_types(idl_file, import_options) | 443 self._rename_types(idl_file, import_options) |
631 | 444 |
632 def enabled(idl_node): | 445 def enabled(idl_node): |
633 return self._is_node_enabled(idl_node, import_options.idl_defines) | 446 return self._is_node_enabled(idl_node, import_options.idl_defines) |
634 | 447 |
635 for interface in idl_file.interfaces: | 448 for interface in idl_file.interfaces: |
636 if not self._is_node_enabled(interface, import_options.idl_defines): | 449 if not self._is_node_enabled(interface, import_options.idl_defines): |
637 _logger.info('skipping interface %s (source=%s)' | 450 _logger.info('skipping interface %s (source=%s)' |
638 % (interface.id, import_options.source)) | 451 % (interface.id, import_options.source)) |
639 continue | 452 continue |
640 | 453 |
641 _logger.info('importing interface %s (source=%s file=%s)' | 454 _logger.info('importing interface %s (source=%s file=%s)' |
642 % (interface.id, import_options.source, os.path.basename(idl_file.filena
me))) | 455 % (interface.id, import_options.source, idl_file)) |
643 | |
644 interface.attributes = filter(enabled, interface.attributes) | 456 interface.attributes = filter(enabled, interface.attributes) |
645 interface.operations = filter(enabled, interface.operations) | 457 interface.operations = filter(enabled, interface.operations) |
646 self._imported_interfaces.append((interface, import_options)) | 458 self._imported_interfaces.append((interface, import_options)) |
647 | 459 |
648 for implStmt in idl_file.implementsStatements: | 460 for implStmt in idl_file.implementsStatements: |
649 self._impl_stmts.append((implStmt, import_options)) | 461 self._impl_stmts.append((implStmt, import_options)) |
650 | 462 |
651 for enum in idl_file.enums: | 463 for enum in idl_file.enums: |
652 self._database.AddEnum(enum) | 464 self._database.AddEnum(enum) |
653 | 465 |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
755 # TODO(antonm): Ideally we'd like to have pristine copy of WebKit IDLs and
fetch | 567 # TODO(antonm): Ideally we'd like to have pristine copy of WebKit IDLs and
fetch |
756 # this information directly from it. Unfortunately right now database is
massaged | 568 # this information directly from it. Unfortunately right now database is
massaged |
757 # a lot so it's difficult to maintain necessary information on Window itse
lf. | 569 # a lot so it's difficult to maintain necessary information on Window itse
lf. |
758 interface = self._database.GetInterface(type) | 570 interface = self._database.GetInterface(type) |
759 if 'V8EnabledPerContext' in attr.ext_attrs: | 571 if 'V8EnabledPerContext' in attr.ext_attrs: |
760 interface.ext_attrs['synthesizedV8EnabledPerContext'] = \ | 572 interface.ext_attrs['synthesizedV8EnabledPerContext'] = \ |
761 attr.ext_attrs['V8EnabledPerContext'] | 573 attr.ext_attrs['V8EnabledPerContext'] |
762 if 'V8EnabledAtRuntime' in attr.ext_attrs: | 574 if 'V8EnabledAtRuntime' in attr.ext_attrs: |
763 interface.ext_attrs['synthesizedV8EnabledAtRuntime'] = \ | 575 interface.ext_attrs['synthesizedV8EnabledAtRuntime'] = \ |
764 attr.ext_attrs['V8EnabledAtRuntime'] or attr.id | 576 attr.ext_attrs['V8EnabledAtRuntime'] or attr.id |
OLD | NEW |