OLD | NEW |
1 # Copyright 2014 The Chromium Authors. All rights reserved. | 1 # Copyright 2014 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import imp | 5 import imp |
6 import os.path | 6 import os.path |
7 import sys | 7 import sys |
8 import unittest | 8 import unittest |
9 | 9 |
10 def _GetDirAbove(dirname): | 10 def _GetDirAbove(dirname): |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
77 self.assertEquals(self._SingleTokenForInput("const"), | 77 self.assertEquals(self._SingleTokenForInput("const"), |
78 _MakeLexTokenForKeyword("const")) | 78 _MakeLexTokenForKeyword("const")) |
79 self.assertEquals(self._SingleTokenForInput("true"), | 79 self.assertEquals(self._SingleTokenForInput("true"), |
80 _MakeLexTokenForKeyword("true")) | 80 _MakeLexTokenForKeyword("true")) |
81 self.assertEquals(self._SingleTokenForInput("false"), | 81 self.assertEquals(self._SingleTokenForInput("false"), |
82 _MakeLexTokenForKeyword("false")) | 82 _MakeLexTokenForKeyword("false")) |
83 self.assertEquals(self._SingleTokenForInput("default"), | 83 self.assertEquals(self._SingleTokenForInput("default"), |
84 _MakeLexTokenForKeyword("default")) | 84 _MakeLexTokenForKeyword("default")) |
85 self.assertEquals(self._SingleTokenForInput("array"), | 85 self.assertEquals(self._SingleTokenForInput("array"), |
86 _MakeLexTokenForKeyword("array")) | 86 _MakeLexTokenForKeyword("array")) |
| 87 self.assertEquals(self._SingleTokenForInput("map"), |
| 88 _MakeLexTokenForKeyword("map")) |
87 | 89 |
88 def testValidIdentifiers(self): | 90 def testValidIdentifiers(self): |
89 """Tests identifiers.""" | 91 """Tests identifiers.""" |
90 self.assertEquals(self._SingleTokenForInput("abcd"), | 92 self.assertEquals(self._SingleTokenForInput("abcd"), |
91 _MakeLexToken("NAME", "abcd")) | 93 _MakeLexToken("NAME", "abcd")) |
92 self.assertEquals(self._SingleTokenForInput("AbC_d012_"), | 94 self.assertEquals(self._SingleTokenForInput("AbC_d012_"), |
93 _MakeLexToken("NAME", "AbC_d012_")) | 95 _MakeLexToken("NAME", "AbC_d012_")) |
94 self.assertEquals(self._SingleTokenForInput("_0123"), | 96 self.assertEquals(self._SingleTokenForInput("_0123"), |
95 _MakeLexToken("NAME", "_0123")) | 97 _MakeLexToken("NAME", "_0123")) |
96 | 98 |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
177 def _SingleTokenForInput(self, input_string): | 179 def _SingleTokenForInput(self, input_string): |
178 """Gets the single token for the given input string. (Raises an exception if | 180 """Gets the single token for the given input string. (Raises an exception if |
179 the input string does not result in exactly one token.)""" | 181 the input string does not result in exactly one token.)""" |
180 toks = self._TokensForInput(input_string) | 182 toks = self._TokensForInput(input_string) |
181 assert len(toks) == 1 | 183 assert len(toks) == 1 |
182 return toks[0] | 184 return toks[0] |
183 | 185 |
184 | 186 |
185 if __name__ == "__main__": | 187 if __name__ == "__main__": |
186 unittest.main() | 188 unittest.main() |
OLD | NEW |