| OLD | NEW |
| 1 # Copyright 2014 The Chromium Authors. All rights reserved. | 1 # Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import imp | 5 import imp |
| 6 import os.path | 6 import os.path |
| 7 import sys | 7 import sys |
| 8 import unittest | 8 import unittest |
| 9 | 9 |
| 10 def _GetDirAbove(dirname): | 10 def _GetDirAbove(dirname): |
| 11 """Returns the directory "above" this file containing |dirname| (which must | 11 """Returns the directory "above" this file containing |dirname| (which must |
| 12 also be "above" this file).""" | 12 also be "above" this file).""" |
| 13 path = os.path.abspath(__file__) | 13 path = os.path.abspath(__file__) |
| 14 while True: | 14 while True: |
| 15 path, tail = os.path.split(path) | 15 path, tail = os.path.split(path) |
| 16 assert tail | 16 assert tail |
| 17 if tail == dirname: | 17 if tail == dirname: |
| 18 return path | 18 return path |
| 19 | 19 |
| 20 try: | 20 try: |
| 21 imp.find_module("ply") | 21 imp.find_module("ply") |
| 22 except ImportError: | 22 except ImportError: |
| 23 sys.path.append(os.path.join(_GetDirAbove("mojo"), "third_party")) | 23 sys.path.append(os.path.join(_GetDirAbove("public"), "public/third_party")) |
| 24 from ply import lex | 24 from ply import lex |
| 25 | 25 |
| 26 try: | 26 try: |
| 27 imp.find_module("mojom") | 27 imp.find_module("mojom") |
| 28 except ImportError: | 28 except ImportError: |
| 29 sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib")) | 29 sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib")) |
| 30 import mojom.parse.lexer | 30 import mojom.parse.lexer |
| 31 | 31 |
| 32 | 32 |
| 33 # This (monkey-patching LexToken to make comparison value-based) is evil, but | 33 # This (monkey-patching LexToken to make comparison value-based) is evil, but |
| (...skipping 29 matching lines...) Expand all Loading... |
| 63 def testValidKeywords(self): | 63 def testValidKeywords(self): |
| 64 """Tests valid keywords.""" | 64 """Tests valid keywords.""" |
| 65 self.assertEquals(self._SingleTokenForInput("handle"), | 65 self.assertEquals(self._SingleTokenForInput("handle"), |
| 66 _MakeLexTokenForKeyword("handle")) | 66 _MakeLexTokenForKeyword("handle")) |
| 67 self.assertEquals(self._SingleTokenForInput("import"), | 67 self.assertEquals(self._SingleTokenForInput("import"), |
| 68 _MakeLexTokenForKeyword("import")) | 68 _MakeLexTokenForKeyword("import")) |
| 69 self.assertEquals(self._SingleTokenForInput("module"), | 69 self.assertEquals(self._SingleTokenForInput("module"), |
| 70 _MakeLexTokenForKeyword("module")) | 70 _MakeLexTokenForKeyword("module")) |
| 71 self.assertEquals(self._SingleTokenForInput("struct"), | 71 self.assertEquals(self._SingleTokenForInput("struct"), |
| 72 _MakeLexTokenForKeyword("struct")) | 72 _MakeLexTokenForKeyword("struct")) |
| 73 self.assertEquals(self._SingleTokenForInput("union"), |
| 74 _MakeLexTokenForKeyword("union")) |
| 73 self.assertEquals(self._SingleTokenForInput("interface"), | 75 self.assertEquals(self._SingleTokenForInput("interface"), |
| 74 _MakeLexTokenForKeyword("interface")) | 76 _MakeLexTokenForKeyword("interface")) |
| 75 self.assertEquals(self._SingleTokenForInput("enum"), | 77 self.assertEquals(self._SingleTokenForInput("enum"), |
| 76 _MakeLexTokenForKeyword("enum")) | 78 _MakeLexTokenForKeyword("enum")) |
| 77 self.assertEquals(self._SingleTokenForInput("const"), | 79 self.assertEquals(self._SingleTokenForInput("const"), |
| 78 _MakeLexTokenForKeyword("const")) | 80 _MakeLexTokenForKeyword("const")) |
| 79 self.assertEquals(self._SingleTokenForInput("true"), | 81 self.assertEquals(self._SingleTokenForInput("true"), |
| 80 _MakeLexTokenForKeyword("true")) | 82 _MakeLexTokenForKeyword("true")) |
| 81 self.assertEquals(self._SingleTokenForInput("false"), | 83 self.assertEquals(self._SingleTokenForInput("false"), |
| 82 _MakeLexTokenForKeyword("false")) | 84 _MakeLexTokenForKeyword("false")) |
| (...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 179 def _SingleTokenForInput(self, input_string): | 181 def _SingleTokenForInput(self, input_string): |
| 180 """Gets the single token for the given input string. (Raises an exception if | 182 """Gets the single token for the given input string. (Raises an exception if |
| 181 the input string does not result in exactly one token.)""" | 183 the input string does not result in exactly one token.)""" |
| 182 toks = self._TokensForInput(input_string) | 184 toks = self._TokensForInput(input_string) |
| 183 assert len(toks) == 1 | 185 assert len(toks) == 1 |
| 184 return toks[0] | 186 return toks[0] |
| 185 | 187 |
| 186 | 188 |
| 187 if __name__ == "__main__": | 189 if __name__ == "__main__": |
| 188 unittest.main() | 190 unittest.main() |
| OLD | NEW |