Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright (c) 2013 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2013 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """ Lexer for PPAPI IDL | 6 """ Lexer for PPAPI IDL |
| 7 | 7 |
| 8 The lexer uses the PLY library to build a tokenizer which understands both | 8 The lexer uses the PLY library to build a tokenizer which understands both |
| 9 WebIDL and Pepper tokens. | 9 WebIDL and Pepper tokens. |
| 10 | 10 |
| 11 WebIDL, and WebIDL regular expressions can be found at: | 11 WebIDL, and WebIDL regular expressions can be found at: |
| 12 http://www.w3.org/TR/2012/CR-WebIDL-20120419/ | 12 http://heycam.github.io/webidl/ |
| 13 PLY can be found at: | 13 PLY can be found at: |
| 14 http://www.dabeaz.com/ply/ | 14 http://www.dabeaz.com/ply/ |
| 15 """ | 15 """ |
| 16 | 16 |
| 17 import os.path | 17 import os.path |
| 18 import sys | 18 import sys |
| 19 | 19 |
| 20 SRC_DIR = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir) | 20 SRC_DIR = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir) |
| 21 sys.path.insert(0, os.path.join(SRC_DIR, 'third_party')) | 21 sys.path.insert(0, os.path.join(SRC_DIR, 'third_party')) |
| 22 from ply import lex | 22 from ply import lex |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 84 'Nan' : 'NAN', | 84 'Nan' : 'NAN', |
| 85 'null' : 'NULL', | 85 'null' : 'NULL', |
| 86 'object' : 'OBJECT', | 86 'object' : 'OBJECT', |
| 87 'octet' : 'OCTET', | 87 'octet' : 'OCTET', |
| 88 'optional' : 'OPTIONAL', | 88 'optional' : 'OPTIONAL', |
| 89 'or' : 'OR', | 89 'or' : 'OR', |
| 90 'partial' : 'PARTIAL', | 90 'partial' : 'PARTIAL', |
| 91 'Promise' : 'PROMISE', | 91 'Promise' : 'PROMISE', |
| 92 'readonly' : 'READONLY', | 92 'readonly' : 'READONLY', |
| 93 'RegExp' : 'REGEXP', | 93 'RegExp' : 'REGEXP', |
| 94 'record' : 'RECORD', | |
| 94 'required' : 'REQUIRED', | 95 'required' : 'REQUIRED', |
| 95 'sequence' : 'SEQUENCE', | 96 'sequence' : 'SEQUENCE', |
| 96 'serializer' : 'SERIALIZER', | 97 'serializer' : 'SERIALIZER', |
| 97 'setlike' : 'SETLIKE', | 98 'setlike' : 'SETLIKE', |
| 98 'setter': 'SETTER', | 99 'setter': 'SETTER', |
| 99 'short' : 'SHORT', | 100 'short' : 'SHORT', |
| 100 'static' : 'STATIC', | 101 'static' : 'STATIC', |
| 101 'stringifier' : 'STRINGIFIER', | 102 'stringifier' : 'STRINGIFIER', |
| 102 'typedef' : 'TYPEDEF', | 103 'typedef' : 'TYPEDEF', |
| 103 'true' : 'TRUE', | 104 'true' : 'TRUE', |
| 104 'unsigned' : 'UNSIGNED', | 105 'unsigned' : 'UNSIGNED', |
| 105 'unrestricted' : 'UNRESTRICTED', | 106 'unrestricted' : 'UNRESTRICTED', |
| 107 'USVString' : 'USVSTRING', | |
|
haraken
2017/02/21 23:36:09
alphabetical order?
Yuki
2017/02/22 07:44:57
It's already case-insensitive alphabetical order,
| |
| 106 'void' : 'VOID' | 108 'void' : 'VOID' |
| 107 } | 109 } |
| 108 | 110 |
| 109 # Token definitions | 111 # Token definitions |
| 110 # | 112 # |
| 111 # Lex assumes any value or function in the form of 't_<TYPE>' represents a | 113 # Lex assumes any value or function in the form of 't_<TYPE>' represents a |
| 112 # regular expression where a match will emit a token of type <TYPE>. In the | 114 # regular expression where a match will emit a token of type <TYPE>. In the |
| 113 # case of a function, the function is called when a match is made. These | 115 # case of a function, the function is called when a match is made. These |
| 114 # definitions come from WebIDL. | 116 # definitions come from WebIDL. |
| 115 # | 117 # |
| (...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 276 self.tokens = [] | 278 self.tokens = [] |
| 277 self._AddTokens(IDLLexer.tokens) | 279 self._AddTokens(IDLLexer.tokens) |
| 278 self._AddKeywords(IDLLexer.keywords) | 280 self._AddKeywords(IDLLexer.keywords) |
| 279 self._lexobj = None | 281 self._lexobj = None |
| 280 self.last = None | 282 self.last = None |
| 281 self.lines = None | 283 self.lines = None |
| 282 | 284 |
| 283 # If run by itself, attempt to build the lexer | 285 # If run by itself, attempt to build the lexer |
| 284 if __name__ == '__main__': | 286 if __name__ == '__main__': |
| 285 lexer_object = IDLLexer() | 287 lexer_object = IDLLexer() |
| OLD | NEW |