OLD | NEW |
1 # Copyright 2014 The Chromium Authors. All rights reserved. | 1 # Copyright 2014 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import imp | 5 import imp |
6 import os.path | 6 import os.path |
7 import sys | 7 import sys |
8 import unittest | 8 import unittest |
9 | 9 |
10 # Disable lint check for finding modules: | 10 # Disable lint check for finding modules: |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
56 | 56 |
57 | 57 |
58 class LexerTest(unittest.TestCase): | 58 class LexerTest(unittest.TestCase): |
59 """Tests |mojom.parse.lexer.Lexer|.""" | 59 """Tests |mojom.parse.lexer.Lexer|.""" |
60 | 60 |
61 def __init__(self, *args, **kwargs): | 61 def __init__(self, *args, **kwargs): |
62 unittest.TestCase.__init__(self, *args, **kwargs) | 62 unittest.TestCase.__init__(self, *args, **kwargs) |
63 # Clone all lexer instances from this one, since making a lexer is slow. | 63 # Clone all lexer instances from this one, since making a lexer is slow. |
64 self._zygote_lexer = lex.lex(mojom.parse.lexer.Lexer("my_file.mojom")) | 64 self._zygote_lexer = lex.lex(mojom.parse.lexer.Lexer("my_file.mojom")) |
65 | 65 |
| 66 def testValidIdentifiers(self): |
| 67 """Tests identifiers.""" |
| 68 self.assertEquals(self._SingleTokenForInput("abcd"), |
| 69 _MakeLexToken("NAME", "abcd")) |
| 70 self.assertEquals(self._SingleTokenForInput("AbC_d012_"), |
| 71 _MakeLexToken("NAME", "AbC_d012_")) |
| 72 self.assertEquals(self._SingleTokenForInput("_0123"), |
| 73 _MakeLexToken("NAME", "_0123")) |
| 74 |
| 75 def testInvalidIdentifiers(self): |
| 76 with self.assertRaisesRegexp( |
| 77 mojom.parse.lexer.LexError, |
| 78 r"^my_file\.mojom:1: Error: Illegal character '\$'$"): |
| 79 self._TokensForInput("$abc") |
| 80 with self.assertRaisesRegexp( |
| 81 mojom.parse.lexer.LexError, |
| 82 r"^my_file\.mojom:1: Error: Illegal character '\$'$"): |
| 83 self._TokensForInput("a$bc") |
| 84 |
66 def testValidSingleKeywords(self): | 85 def testValidSingleKeywords(self): |
67 """Tests valid, single keywords.""" | 86 """Tests valid, single keywords.""" |
68 self.assertEquals(self._SingleTokenForInput("handle"), | 87 self.assertEquals(self._SingleTokenForInput("handle"), |
69 _MakeLexTokenForKeyword("handle")) | 88 _MakeLexTokenForKeyword("handle")) |
70 self.assertEquals(self._SingleTokenForInput("data_pipe_consumer"), | 89 self.assertEquals(self._SingleTokenForInput("data_pipe_consumer"), |
71 _MakeLexTokenForKeyword("data_pipe_consumer")) | 90 _MakeLexTokenForKeyword("data_pipe_consumer")) |
72 self.assertEquals(self._SingleTokenForInput("data_pipe_producer"), | 91 self.assertEquals(self._SingleTokenForInput("data_pipe_producer"), |
73 _MakeLexTokenForKeyword("data_pipe_producer")) | 92 _MakeLexTokenForKeyword("data_pipe_producer")) |
74 self.assertEquals(self._SingleTokenForInput("message_pipe"), | 93 self.assertEquals(self._SingleTokenForInput("message_pipe"), |
75 _MakeLexTokenForKeyword("message_pipe")) | 94 _MakeLexTokenForKeyword("message_pipe")) |
76 self.assertEquals(self._SingleTokenForInput("import"), | 95 self.assertEquals(self._SingleTokenForInput("import"), |
77 _MakeLexTokenForKeyword("import")) | 96 _MakeLexTokenForKeyword("import")) |
78 self.assertEquals(self._SingleTokenForInput("module"), | 97 self.assertEquals(self._SingleTokenForInput("module"), |
79 _MakeLexTokenForKeyword("module")) | 98 _MakeLexTokenForKeyword("module")) |
80 self.assertEquals(self._SingleTokenForInput("struct"), | 99 self.assertEquals(self._SingleTokenForInput("struct"), |
81 _MakeLexTokenForKeyword("struct")) | 100 _MakeLexTokenForKeyword("struct")) |
82 self.assertEquals(self._SingleTokenForInput("interface"), | 101 self.assertEquals(self._SingleTokenForInput("interface"), |
83 _MakeLexTokenForKeyword("interface")) | 102 _MakeLexTokenForKeyword("interface")) |
84 self.assertEquals(self._SingleTokenForInput("enum"), | 103 self.assertEquals(self._SingleTokenForInput("enum"), |
85 _MakeLexTokenForKeyword("enum")) | 104 _MakeLexTokenForKeyword("enum")) |
86 | 105 |
87 def testValidSingleTokens(self): | 106 def testValidSingleTokens(self): |
88 """Tests valid, single (non-keyword) tokens.""" | 107 """Tests valid, single (non-keyword) tokens.""" |
89 self.assertEquals(self._SingleTokenForInput("asdf"), | 108 # NAME tested in |testValidIdentifiers|. |
90 _MakeLexToken("NAME", "asdf")) | |
91 self.assertEquals(self._SingleTokenForInput("@123"), | 109 self.assertEquals(self._SingleTokenForInput("@123"), |
92 _MakeLexToken("ORDINAL", "@123")) | 110 _MakeLexToken("ORDINAL", "@123")) |
93 self.assertEquals(self._SingleTokenForInput("456"), | 111 self.assertEquals(self._SingleTokenForInput("456"), |
94 _MakeLexToken("INT_CONST_DEC", "456")) | 112 _MakeLexToken("INT_CONST_DEC", "456")) |
95 self.assertEquals(self._SingleTokenForInput("0765"), | 113 self.assertEquals(self._SingleTokenForInput("0765"), |
96 _MakeLexToken("INT_CONST_OCT", "0765")) | 114 _MakeLexToken("INT_CONST_OCT", "0765")) |
97 self.assertEquals(self._SingleTokenForInput("0x01aB2eF3"), | 115 self.assertEquals(self._SingleTokenForInput("0x01aB2eF3"), |
98 _MakeLexToken("INT_CONST_HEX", "0x01aB2eF3")) | 116 _MakeLexToken("INT_CONST_HEX", "0x01aB2eF3")) |
99 self.assertEquals(self._SingleTokenForInput("123.456"), | 117 self.assertEquals(self._SingleTokenForInput("123.456"), |
100 _MakeLexToken("FLOAT_CONST", "123.456")) | 118 _MakeLexToken("FLOAT_CONST", "123.456")) |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
163 def _SingleTokenForInput(self, input_string): | 181 def _SingleTokenForInput(self, input_string): |
164 """Gets the single token for the given input string. (Raises an exception if | 182 """Gets the single token for the given input string. (Raises an exception if |
165 the input string does not result in exactly one token.)""" | 183 the input string does not result in exactly one token.)""" |
166 toks = self._TokensForInput(input_string) | 184 toks = self._TokensForInput(input_string) |
167 assert len(toks) == 1 | 185 assert len(toks) == 1 |
168 return toks[0] | 186 return toks[0] |
169 | 187 |
170 | 188 |
171 if __name__ == "__main__": | 189 if __name__ == "__main__": |
172 unittest.main() | 190 unittest.main() |
OLD | NEW |