OLD | NEW |
1 # Copyright (C) 2013 Google Inc. All rights reserved. | 1 # Copyright (C) 2013 Google Inc. All rights reserved. |
2 # | 2 # |
3 # Redistribution and use in source and binary forms, with or without | 3 # Redistribution and use in source and binary forms, with or without |
4 # modification, are permitted provided that the following conditions are | 4 # modification, are permitted provided that the following conditions are |
5 # met: | 5 # met: |
6 # | 6 # |
7 # * Redistributions of source code must retain the above copyright | 7 # * Redistributions of source code must retain the above copyright |
8 # notice, this list of conditions and the following disclaimer. | 8 # notice, this list of conditions and the following disclaimer. |
9 # * Redistributions in binary form must reproduce the above | 9 # * Redistributions in binary form must reproduce the above |
10 # copyright notice, this list of conditions and the following disclaimer | 10 # copyright notice, this list of conditions and the following disclaimer |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
64 sys.path.insert(1, third_party) | 64 sys.path.insert(1, third_party) |
65 from ply import yacc | 65 from ply import yacc |
66 | 66 |
67 # Base parser is in Chromium src/tools/idl_parser | 67 # Base parser is in Chromium src/tools/idl_parser |
68 tools_dir = os.path.join(module_path, os.pardir, os.pardir, os.pardir, os.pardir
, os.pardir, 'tools') | 68 tools_dir = os.path.join(module_path, os.pardir, os.pardir, os.pardir, os.pardir
, os.pardir, 'tools') |
69 sys.path.append(tools_dir) | 69 sys.path.append(tools_dir) |
70 from idl_parser.idl_parser import IDLParser, ListFromConcat | 70 from idl_parser.idl_parser import IDLParser, ListFromConcat |
71 from idl_parser.idl_parser import ParseFile as parse_file | 71 from idl_parser.idl_parser import ParseFile as parse_file |
72 | 72 |
73 from blink_idl_lexer import BlinkIDLLexer | 73 from blink_idl_lexer import BlinkIDLLexer |
| 74 import blink_idl_lexer |
74 | 75 |
75 | 76 |
76 # Explicitly set starting symbol to rule defined only in base parser. | 77 # Explicitly set starting symbol to rule defined only in base parser. |
77 # BEWARE that the starting symbol should NOT be defined in both the base parser | 78 # BEWARE that the starting symbol should NOT be defined in both the base parser |
78 # and the derived one, as otherwise which is used depends on which line number | 79 # and the derived one, as otherwise which is used depends on which line number |
79 # is lower, which is fragile. Instead, either use one in base parser or | 80 # is lower, which is fragile. Instead, either use one in base parser or |
80 # create a new symbol, so that this is unambiguous. | 81 # create a new symbol, so that this is unambiguous. |
81 # FIXME: unfortunately, this doesn't work in PLY 3.4, so need to duplicate the | 82 # FIXME: unfortunately, this doesn't work in PLY 3.4, so need to duplicate the |
82 # rule below. | 83 # rule below. |
83 STARTING_SYMBOL = 'Definitions' | 84 STARTING_SYMBOL = 'Definitions' |
(...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
325 # FIXME: Upstream | 326 # FIXME: Upstream |
326 def p_TypeSuffix(self, p): | 327 def p_TypeSuffix(self, p): |
327 """TypeSuffix : '[' ']' TypeSuffix | 328 """TypeSuffix : '[' ']' TypeSuffix |
328 | '?' TypeSuffixStartingWithArray | 329 | '?' TypeSuffixStartingWithArray |
329 |""" | 330 |""" |
330 if len(p) == 4: | 331 if len(p) == 4: |
331 p[0] = self.BuildProduction('Array', p, 1, p[3]) | 332 p[0] = self.BuildProduction('Array', p, 1, p[3]) |
332 elif len(p) == 3: | 333 elif len(p) == 3: |
333 p[0] = ListFromConcat(self.BuildTrue('NULLABLE'), p[2]) | 334 p[0] = ListFromConcat(self.BuildTrue('NULLABLE'), p[2]) |
334 | 335 |
335 # [b76.1] Add support for compound Extended Attribute values (A&B and A|B) | 336 # [b94] Add support for OR Extended Attribute values "A|B" |
336 def p_ExtendedAttributeIdentList(self, p): | 337 def p_ExtendedAttributeIdentList(self, p): |
337 """ExtendedAttributeIdentList : identifier '=' identifier '&' IdentAndLi
st | 338 """ExtendedAttributeIdentList : identifier '=' '(' IdentifierList ')' |
338 | identifier '=' identifier '|' IdentOrLis
t""" | 339 | identifier '=' identifier '|' IdentOrLis
t""" |
339 value = self.BuildAttribute('VALUE', p[3] + p[4] + p[5]) | 340 if type(p[4]) is list: |
| 341 value = self.BuildAttribute('VALUE', ','.join(p[4])) |
| 342 else: |
| 343 value = self.BuildAttribute('VALUE', p[3] + p[4] + p[5]) |
340 p[0] = self.BuildNamed('ExtAttribute', p, 1, value) | 344 p[0] = self.BuildNamed('ExtAttribute', p, 1, value) |
341 | 345 |
342 # [b76.2] A&B&C | 346 # [b94.1] A|B|C |
343 def p_IdentAndList(self, p): | |
344 """IdentAndList : identifier '&' IdentAndList | |
345 | identifier""" | |
346 if len(p) > 3: | |
347 p[0] = p[1] + p[2] + p[3] | |
348 else: | |
349 p[0] = p[1] | |
350 | |
351 # [b76.3] A|B|C | |
352 def p_IdentOrList(self, p): | 347 def p_IdentOrList(self, p): |
353 """IdentOrList : identifier '|' IdentOrList | 348 """IdentOrList : identifier '|' IdentOrList |
354 | identifier""" | 349 | identifier""" |
355 if len(p) > 3: | 350 if len(p) > 3: |
356 p[0] = p[1] + p[2] + p[3] | 351 p[0] = p[1] + p[2] + p[3] |
357 else: | 352 else: |
358 p[0] = p[1] | 353 p[0] = p[1] |
359 | 354 |
360 # Blink extension: Add support for compound Extended Attribute values over s
tring literals ("A"|"B") | 355 # Blink extension: Add support for compound Extended Attribute values over s
tring literals ("A"|"B") |
361 def p_ExtendedAttributeStringLiteralList(self, p): | 356 def p_ExtendedAttributeStringLiteralList(self, p): |
(...skipping 11 matching lines...) Expand all Loading... |
373 return ls[1].value | 368 return ls[1].value |
374 | 369 |
375 if len(p) > 3: | 370 if len(p) > 3: |
376 p[0] = unwrap_string(p[1]) + p[2] + p[3] | 371 p[0] = unwrap_string(p[1]) + p[2] + p[3] |
377 else: | 372 else: |
378 p[0] = unwrap_string(p[1]) | 373 p[0] = unwrap_string(p[1]) |
379 | 374 |
380 def __init__(self, | 375 def __init__(self, |
381 # common parameters | 376 # common parameters |
382 debug=False, | 377 debug=False, |
| 378 # local parameters |
| 379 rewrite_tables=False, |
383 # idl_parser parameters | 380 # idl_parser parameters |
384 lexer=None, verbose=False, mute_error=False, | 381 lexer=None, verbose=False, mute_error=False, |
385 # yacc parameters | 382 # yacc parameters |
386 outputdir='', optimize=True, write_tables=False, | 383 outputdir='', optimize=True, write_tables=False, |
387 picklefile=None): | 384 picklefile=None): |
388 if debug: | 385 if debug: |
389 # Turn off optimization and caching, and write out tables, | 386 # Turn off optimization and caching, and write out tables, |
390 # to help debugging | 387 # to help debugging |
391 optimize = False | 388 optimize = False |
392 outputdir = None | 389 outputdir = None |
393 picklefile = None | 390 picklefile = None |
394 write_tables = True | 391 write_tables = True |
395 if outputdir: | 392 if outputdir: |
396 picklefile = picklefile or os.path.join(outputdir, 'parsetab.pickle'
) | 393 picklefile = picklefile or os.path.join(outputdir, 'parsetab.pickle'
) |
| 394 if rewrite_tables: |
| 395 try: |
| 396 os.unlink(picklefile) |
| 397 except OSError: |
| 398 pass |
397 | 399 |
398 lexer = lexer or BlinkIDLLexer(debug=debug, | 400 lexer = lexer or BlinkIDLLexer(debug=debug, |
399 outputdir=outputdir, | 401 outputdir=outputdir, |
400 optimize=optimize) | 402 optimize=optimize) |
401 self.lexer = lexer | 403 self.lexer = lexer |
402 self.tokens = lexer.KnownTokens() | 404 self.tokens = lexer.KnownTokens() |
403 # Using SLR (instead of LALR) generates the table faster, | 405 # Using SLR (instead of LALR) generates the table faster, |
404 # but produces the same output. This is ok b/c Web IDL (and Blink IDL) | 406 # but produces the same output. This is ok b/c Web IDL (and Blink IDL) |
405 # is an SLR grammar (as is often the case for simple LL(1) grammars). | 407 # is an SLR grammar (as is often the case for simple LL(1) grammars). |
406 # | 408 # |
(...skipping 19 matching lines...) Expand all Loading... |
426 self._parse_errors = 0 | 428 self._parse_errors = 0 |
427 self._parse_warnings = 0 | 429 self._parse_warnings = 0 |
428 self._last_error_msg = None | 430 self._last_error_msg = None |
429 self._last_error_lineno = 0 | 431 self._last_error_lineno = 0 |
430 self._last_error_pos = 0 | 432 self._last_error_pos = 0 |
431 | 433 |
432 | 434 |
433 ################################################################################ | 435 ################################################################################ |
434 | 436 |
435 def main(argv): | 437 def main(argv): |
436 # If file itself executed, cache parse table | 438 # If file itself executed, cache lex/parse tables |
437 try: | 439 try: |
438 outputdir = argv[1] | 440 outputdir = argv[1] |
439 except IndexError as err: | 441 except IndexError as err: |
440 print 'Usage: %s OUTPUT_DIR' % argv[0] | 442 print 'Usage: %s OUTPUT_DIR' % argv[0] |
441 return 1 | 443 return 1 |
442 parser = BlinkIDLParser(outputdir=outputdir) | 444 blink_idl_lexer.main(argv) |
| 445 # Important: rewrite_tables=True causes the cache file to be deleted if it |
| 446 # exists, thus making sure that PLY doesn't load it instead of regenerating |
| 447 # the parse table. |
| 448 parser = BlinkIDLParser(outputdir=outputdir, rewrite_tables=True) |
443 | 449 |
444 | 450 |
445 if __name__ == '__main__': | 451 if __name__ == '__main__': |
446 sys.exit(main(sys.argv)) | 452 sys.exit(main(sys.argv)) |
OLD | NEW |