| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright (c) 2013 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2013 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """ Lexer for PPAPI IDL | 6 """ Lexer for PPAPI IDL |
| 7 | 7 |
| 8 The lexer uses the PLY library to build a tokenizer which understands both | 8 The lexer uses the PLY library to build a tokenizer which understands both |
| 9 WebIDL and Pepper tokens. | 9 WebIDL and Pepper tokens. |
| 10 | 10 |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 60 ] | 60 ] |
| 61 | 61 |
| 62 # 'keywords' is a map of string to token type. All tokens matching | 62 # 'keywords' is a map of string to token type. All tokens matching |
| 63 # KEYWORD_OR_SYMBOL are matched against keywords dictionary, to determine | 63 # KEYWORD_OR_SYMBOL are matched against keywords dictionary, to determine |
| 64 # if the token is actually a keyword. | 64 # if the token is actually a keyword. |
| 65 keywords = { | 65 keywords = { |
| 66 'any' : 'ANY', | 66 'any' : 'ANY', |
| 67 'attribute' : 'ATTRIBUTE', | 67 'attribute' : 'ATTRIBUTE', |
| 68 'boolean' : 'BOOLEAN', | 68 'boolean' : 'BOOLEAN', |
| 69 'byte' : 'BYTE', | 69 'byte' : 'BYTE', |
| 70 'ByteString' : 'BYTESTRING', |
| 70 'callback' : 'CALLBACK', | 71 'callback' : 'CALLBACK', |
| 71 'const' : 'CONST', | 72 'const' : 'CONST', |
| 72 'creator' : 'CREATOR', | 73 'creator' : 'CREATOR', |
| 73 'Date' : 'DATE', | 74 'Date' : 'DATE', |
| 74 'deleter' : 'DELETER', | 75 'deleter' : 'DELETER', |
| 75 'dictionary' : 'DICTIONARY', | 76 'dictionary' : 'DICTIONARY', |
| 76 'DOMString' : 'DOMSTRING', | 77 'DOMString' : 'DOMSTRING', |
| 77 'double' : 'DOUBLE', | 78 'double' : 'DOUBLE', |
| 78 'enum' : 'ENUM', | 79 'enum' : 'ENUM', |
| 79 'false' : 'FALSE', | 80 'false' : 'FALSE', |
| 80 'float' : 'FLOAT', | 81 'float' : 'FLOAT', |
| 81 'exception' : 'EXCEPTION', | 82 'exception' : 'EXCEPTION', |
| 82 'getter': 'GETTER', | 83 'getter': 'GETTER', |
| 83 'implements' : 'IMPLEMENTS', | 84 'implements' : 'IMPLEMENTS', |
| 84 'Infinity' : 'INFINITY', | 85 'Infinity' : 'INFINITY', |
| 85 'inherit' : 'INHERIT', | 86 'inherit' : 'INHERIT', |
| 86 'interface' : 'INTERFACE', | 87 'interface' : 'INTERFACE', |
| 87 'legacycaller' : 'LEGACYCALLER', | 88 'legacycaller' : 'LEGACYCALLER', |
| 88 'long' : 'LONG', | 89 'long' : 'LONG', |
| 89 'Nan' : 'NAN', | 90 'Nan' : 'NAN', |
| 90 'null' : 'NULL', | 91 'null' : 'NULL', |
| 91 'object' : 'OBJECT', | 92 'object' : 'OBJECT', |
| 92 'octet' : 'OCTET', | 93 'octet' : 'OCTET', |
| 93 'optional' : 'OPTIONAL', | 94 'optional' : 'OPTIONAL', |
| 94 'or' : 'OR', | 95 'or' : 'OR', |
| 95 'partial' : 'PARTIAL', | 96 'partial' : 'PARTIAL', |
| 96 'readonly' : 'READONLY', | 97 'readonly' : 'READONLY', |
| 98 'RegExp' : 'REGEXP', |
| 97 'sequence' : 'SEQUENCE', | 99 'sequence' : 'SEQUENCE', |
| 100 'serializer' : 'SERIALIZER', |
| 98 'setter': 'SETTER', | 101 'setter': 'SETTER', |
| 99 'short' : 'SHORT', | 102 'short' : 'SHORT', |
| 100 'static' : 'STATIC', | 103 'static' : 'STATIC', |
| 101 'stringifier' : 'STRINGIFIER', | 104 'stringifier' : 'STRINGIFIER', |
| 102 'typedef' : 'TYPEDEF', | 105 'typedef' : 'TYPEDEF', |
| 103 'true' : 'TRUE', | 106 'true' : 'TRUE', |
| 104 'unsigned' : 'UNSIGNED', | 107 'unsigned' : 'UNSIGNED', |
| 105 'unrestricted' : 'UNRESTRICTED', | 108 'unrestricted' : 'UNRESTRICTED', |
| 106 'void' : 'VOID' | 109 'void' : 'VOID' |
| 107 } | 110 } |
| (...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 276 self.tokens = [] | 279 self.tokens = [] |
| 277 self._AddTokens(IDLLexer.tokens) | 280 self._AddTokens(IDLLexer.tokens) |
| 278 self._AddKeywords(IDLLexer.keywords) | 281 self._AddKeywords(IDLLexer.keywords) |
| 279 self._lexobj = None | 282 self._lexobj = None |
| 280 self.last = None | 283 self.last = None |
| 281 self.lines = None | 284 self.lines = None |
| 282 | 285 |
| 283 # If run by itself, attempt to build the lexer | 286 # If run by itself, attempt to build the lexer |
| 284 if __name__ == '__main__': | 287 if __name__ == '__main__': |
| 285 lexer_object = IDLLexer() | 288 lexer_object = IDLLexer() |
| OLD | NEW |