| OLD | NEW |
| (Empty) |
| 1 # Copyright 2014 The Chromium Authors. All rights reserved. | |
| 2 # Use of this source code is governed by a BSD-style license that can be | |
| 3 # found in the LICENSE file. | |
| 4 | |
| 5 import mojo_lexer | |
| 6 import unittest | |
| 7 | |
| 8 # Try to load the ply module, if not, then assume it is in the third_party | |
| 9 # directory. | |
| 10 try: | |
| 11 # Disable lint check which fails to find the ply module. | |
| 12 # pylint: disable=F0401 | |
| 13 from ply import lex | |
| 14 except ImportError: | |
| 15 module_path, module_name = os.path.split(__file__) | |
| 16 third_party = os.path.join(module_path, os.pardir, os.pardir, os.pardir, | |
| 17 os.pardir, os.pardir, 'third_party') | |
| 18 sys.path.append(third_party) | |
| 19 # pylint: disable=F0401 | |
| 20 from ply import lex | |
| 21 | |
| 22 | |
| 23 # This (monkey-patching LexToken to make comparison value-based) is evil, but | |
| 24 # we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing | |
| 25 # for object identity.) | |
| 26 def _LexTokenEq(self, other): | |
| 27 return self.type == other.type and self.value == other.value and \ | |
| 28 self.lineno == other.lineno and self.lexpos == other.lexpos | |
| 29 setattr(lex.LexToken, '__eq__', _LexTokenEq) | |
| 30 | |
| 31 | |
| 32 def _MakeLexToken(type, value, lineno=1, lexpos=0): | |
| 33 """Makes a LexToken with the given parameters. (Note that lineno is 1-based, | |
| 34 but lexpos is 0-based.)""" | |
| 35 rv = lex.LexToken() | |
| 36 rv.type, rv.value, rv.lineno, rv.lexpos = type, value, lineno, lexpos | |
| 37 return rv | |
| 38 | |
| 39 | |
| 40 def _MakeLexTokenForKeyword(keyword, **kwargs): | |
| 41 """Makes a LexToken for the given keyword.""" | |
| 42 return _MakeLexToken(keyword.upper(), keyword.lower(), **kwargs) | |
| 43 | |
| 44 | |
| 45 class MojoLexerTest(unittest.TestCase): | |
| 46 """Tests mojo_lexer (in particular, Lexer).""" | |
| 47 | |
| 48 def __init__(self, *args, **kwargs): | |
| 49 unittest.TestCase.__init__(self, *args, **kwargs) | |
| 50 # Clone all lexer instances from this one, since making a lexer is slow. | |
| 51 self._zygote_lexer = lex.lex(mojo_lexer.Lexer("my_file.mojom")) | |
| 52 | |
| 53 def testValidSingleKeywords(self): | |
| 54 """Tests valid, single keywords.""" | |
| 55 self.assertEquals(self._SingleTokenForInput("handle"), | |
| 56 _MakeLexTokenForKeyword("handle")) | |
| 57 self.assertEquals(self._SingleTokenForInput("data_pipe_consumer"), | |
| 58 _MakeLexTokenForKeyword("data_pipe_consumer")) | |
| 59 self.assertEquals(self._SingleTokenForInput("data_pipe_producer"), | |
| 60 _MakeLexTokenForKeyword("data_pipe_producer")) | |
| 61 self.assertEquals(self._SingleTokenForInput("message_pipe"), | |
| 62 _MakeLexTokenForKeyword("message_pipe")) | |
| 63 self.assertEquals(self._SingleTokenForInput("import"), | |
| 64 _MakeLexTokenForKeyword("import")) | |
| 65 self.assertEquals(self._SingleTokenForInput("module"), | |
| 66 _MakeLexTokenForKeyword("module")) | |
| 67 self.assertEquals(self._SingleTokenForInput("struct"), | |
| 68 _MakeLexTokenForKeyword("struct")) | |
| 69 self.assertEquals(self._SingleTokenForInput("interface"), | |
| 70 _MakeLexTokenForKeyword("interface")) | |
| 71 self.assertEquals(self._SingleTokenForInput("enum"), | |
| 72 _MakeLexTokenForKeyword("enum")) | |
| 73 | |
| 74 def testValidSingleTokens(self): | |
| 75 """Tests valid, single (non-keyword) tokens.""" | |
| 76 self.assertEquals(self._SingleTokenForInput("asdf"), | |
| 77 _MakeLexToken("NAME", "asdf")) | |
| 78 self.assertEquals(self._SingleTokenForInput("@123"), | |
| 79 _MakeLexToken("ORDINAL", "@123")) | |
| 80 self.assertEquals(self._SingleTokenForInput("456"), | |
| 81 _MakeLexToken("INT_CONST_DEC", "456")) | |
| 82 self.assertEquals(self._SingleTokenForInput("0765"), | |
| 83 _MakeLexToken("INT_CONST_OCT", "0765")) | |
| 84 self.assertEquals(self._SingleTokenForInput("0x01aB2eF3"), | |
| 85 _MakeLexToken("INT_CONST_HEX", "0x01aB2eF3")) | |
| 86 self.assertEquals(self._SingleTokenForInput("123.456"), | |
| 87 _MakeLexToken("FLOAT_CONST", "123.456")) | |
| 88 self.assertEquals(self._SingleTokenForInput("'x'"), | |
| 89 _MakeLexToken("CHAR_CONST", "'x'")) | |
| 90 self.assertEquals(self._SingleTokenForInput("\"hello\""), | |
| 91 _MakeLexToken("STRING_LITERAL", "\"hello\"")) | |
| 92 self.assertEquals(self._SingleTokenForInput("+"), | |
| 93 _MakeLexToken("PLUS", "+")) | |
| 94 self.assertEquals(self._SingleTokenForInput("-"), | |
| 95 _MakeLexToken("MINUS", "-")) | |
| 96 self.assertEquals(self._SingleTokenForInput("*"), | |
| 97 _MakeLexToken("TIMES", "*")) | |
| 98 self.assertEquals(self._SingleTokenForInput("/"), | |
| 99 _MakeLexToken("DIVIDE", "/")) | |
| 100 self.assertEquals(self._SingleTokenForInput("%"), | |
| 101 _MakeLexToken("MOD", "%")) | |
| 102 self.assertEquals(self._SingleTokenForInput("|"), | |
| 103 _MakeLexToken("OR", "|")) | |
| 104 self.assertEquals(self._SingleTokenForInput("~"), | |
| 105 _MakeLexToken("NOT", "~")) | |
| 106 self.assertEquals(self._SingleTokenForInput("^"), | |
| 107 _MakeLexToken("XOR", "^")) | |
| 108 self.assertEquals(self._SingleTokenForInput("<<"), | |
| 109 _MakeLexToken("LSHIFT", "<<")) | |
| 110 self.assertEquals(self._SingleTokenForInput(">>"), | |
| 111 _MakeLexToken("RSHIFT", ">>")) | |
| 112 self.assertEquals(self._SingleTokenForInput("="), | |
| 113 _MakeLexToken("EQUALS", "=")) | |
| 114 self.assertEquals(self._SingleTokenForInput("=>"), | |
| 115 _MakeLexToken("RESPONSE", "=>")) | |
| 116 self.assertEquals(self._SingleTokenForInput("("), | |
| 117 _MakeLexToken("LPAREN", "(")) | |
| 118 self.assertEquals(self._SingleTokenForInput(")"), | |
| 119 _MakeLexToken("RPAREN", ")")) | |
| 120 self.assertEquals(self._SingleTokenForInput("["), | |
| 121 _MakeLexToken("LBRACKET", "[")) | |
| 122 self.assertEquals(self._SingleTokenForInput("]"), | |
| 123 _MakeLexToken("RBRACKET", "]")) | |
| 124 self.assertEquals(self._SingleTokenForInput("{"), | |
| 125 _MakeLexToken("LBRACE", "{")) | |
| 126 self.assertEquals(self._SingleTokenForInput("}"), | |
| 127 _MakeLexToken("RBRACE", "}")) | |
| 128 self.assertEquals(self._SingleTokenForInput("<"), | |
| 129 _MakeLexToken("LANGLE", "<")) | |
| 130 self.assertEquals(self._SingleTokenForInput(">"), | |
| 131 _MakeLexToken("RANGLE", ">")) | |
| 132 self.assertEquals(self._SingleTokenForInput(";"), | |
| 133 _MakeLexToken("SEMI", ";")) | |
| 134 self.assertEquals(self._SingleTokenForInput(","), | |
| 135 _MakeLexToken("COMMA", ",")) | |
| 136 self.assertEquals(self._SingleTokenForInput("."), | |
| 137 _MakeLexToken("DOT", ".")) | |
| 138 | |
| 139 def _TokensForInput(self, input): | |
| 140 """Gets a list of tokens for the given input string.""" | |
| 141 lexer = self._zygote_lexer.clone() | |
| 142 lexer.input(input) | |
| 143 rv = [] | |
| 144 while True: | |
| 145 tok = lexer.token() | |
| 146 if not tok: | |
| 147 return rv | |
| 148 rv.append(tok) | |
| 149 | |
| 150 def _SingleTokenForInput(self, input): | |
| 151 """Gets the single token for the given input string. (Raises an exception if | |
| 152 the input string does not result in exactly one token.)""" | |
| 153 toks = self._TokensForInput(input) | |
| 154 assert len(toks) == 1 | |
| 155 return toks[0] | |
| 156 | |
| 157 | |
| 158 if __name__ == "__main__": | |
| 159 unittest.main() | |
| OLD | NEW |