Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(386)

Side by Side Diff: mojo/public/tools/bindings/pylib/mojom_tests/parse/lexer_unittest.py

Issue 814543006: Move //mojo/{public, edk} underneath //third_party (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Rebase Created 5 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5 import imp
6 import os.path
7 import sys
8 import unittest
9
10 def _GetDirAbove(dirname):
11 """Returns the directory "above" this file containing |dirname| (which must
12 also be "above" this file)."""
13 path = os.path.abspath(__file__)
14 while True:
15 path, tail = os.path.split(path)
16 assert tail
17 if tail == dirname:
18 return path
19
20 try:
21 imp.find_module("ply")
22 except ImportError:
23 sys.path.append(os.path.join(_GetDirAbove("public"), "public/third_party"))
24 from ply import lex
25
26 try:
27 imp.find_module("mojom")
28 except ImportError:
29 sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
30 import mojom.parse.lexer
31
32
33 # This (monkey-patching LexToken to make comparison value-based) is evil, but
34 # we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing
35 # for object identity.)
36 def _LexTokenEq(self, other):
37 return self.type == other.type and self.value == other.value and \
38 self.lineno == other.lineno and self.lexpos == other.lexpos
39 setattr(lex.LexToken, '__eq__', _LexTokenEq)
40
41
42 def _MakeLexToken(token_type, value, lineno=1, lexpos=0):
43 """Makes a LexToken with the given parameters. (Note that lineno is 1-based,
44 but lexpos is 0-based.)"""
45 rv = lex.LexToken()
46 rv.type, rv.value, rv.lineno, rv.lexpos = token_type, value, lineno, lexpos
47 return rv
48
49
50 def _MakeLexTokenForKeyword(keyword, **kwargs):
51 """Makes a LexToken for the given keyword."""
52 return _MakeLexToken(keyword.upper(), keyword.lower(), **kwargs)
53
54
55 class LexerTest(unittest.TestCase):
56 """Tests |mojom.parse.lexer.Lexer|."""
57
58 def __init__(self, *args, **kwargs):
59 unittest.TestCase.__init__(self, *args, **kwargs)
60 # Clone all lexer instances from this one, since making a lexer is slow.
61 self._zygote_lexer = lex.lex(mojom.parse.lexer.Lexer("my_file.mojom"))
62
63 def testValidKeywords(self):
64 """Tests valid keywords."""
65 self.assertEquals(self._SingleTokenForInput("handle"),
66 _MakeLexTokenForKeyword("handle"))
67 self.assertEquals(self._SingleTokenForInput("import"),
68 _MakeLexTokenForKeyword("import"))
69 self.assertEquals(self._SingleTokenForInput("module"),
70 _MakeLexTokenForKeyword("module"))
71 self.assertEquals(self._SingleTokenForInput("struct"),
72 _MakeLexTokenForKeyword("struct"))
73 self.assertEquals(self._SingleTokenForInput("union"),
74 _MakeLexTokenForKeyword("union"))
75 self.assertEquals(self._SingleTokenForInput("interface"),
76 _MakeLexTokenForKeyword("interface"))
77 self.assertEquals(self._SingleTokenForInput("enum"),
78 _MakeLexTokenForKeyword("enum"))
79 self.assertEquals(self._SingleTokenForInput("const"),
80 _MakeLexTokenForKeyword("const"))
81 self.assertEquals(self._SingleTokenForInput("true"),
82 _MakeLexTokenForKeyword("true"))
83 self.assertEquals(self._SingleTokenForInput("false"),
84 _MakeLexTokenForKeyword("false"))
85 self.assertEquals(self._SingleTokenForInput("default"),
86 _MakeLexTokenForKeyword("default"))
87 self.assertEquals(self._SingleTokenForInput("array"),
88 _MakeLexTokenForKeyword("array"))
89 self.assertEquals(self._SingleTokenForInput("map"),
90 _MakeLexTokenForKeyword("map"))
91
92 def testValidIdentifiers(self):
93 """Tests identifiers."""
94 self.assertEquals(self._SingleTokenForInput("abcd"),
95 _MakeLexToken("NAME", "abcd"))
96 self.assertEquals(self._SingleTokenForInput("AbC_d012_"),
97 _MakeLexToken("NAME", "AbC_d012_"))
98 self.assertEquals(self._SingleTokenForInput("_0123"),
99 _MakeLexToken("NAME", "_0123"))
100
101 def testInvalidIdentifiers(self):
102 with self.assertRaisesRegexp(
103 mojom.parse.lexer.LexError,
104 r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
105 self._TokensForInput("$abc")
106 with self.assertRaisesRegexp(
107 mojom.parse.lexer.LexError,
108 r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
109 self._TokensForInput("a$bc")
110
111 def testDecimalIntegerConstants(self):
112 self.assertEquals(self._SingleTokenForInput("0"),
113 _MakeLexToken("INT_CONST_DEC", "0"))
114 self.assertEquals(self._SingleTokenForInput("1"),
115 _MakeLexToken("INT_CONST_DEC", "1"))
116 self.assertEquals(self._SingleTokenForInput("123"),
117 _MakeLexToken("INT_CONST_DEC", "123"))
118 self.assertEquals(self._SingleTokenForInput("10"),
119 _MakeLexToken("INT_CONST_DEC", "10"))
120
121 def testValidTokens(self):
122 """Tests valid tokens (which aren't tested elsewhere)."""
123 # Keywords tested in |testValidKeywords|.
124 # NAME tested in |testValidIdentifiers|.
125 self.assertEquals(self._SingleTokenForInput("@123"),
126 _MakeLexToken("ORDINAL", "@123"))
127 self.assertEquals(self._SingleTokenForInput("456"),
128 _MakeLexToken("INT_CONST_DEC", "456"))
129 self.assertEquals(self._SingleTokenForInput("0x01aB2eF3"),
130 _MakeLexToken("INT_CONST_HEX", "0x01aB2eF3"))
131 self.assertEquals(self._SingleTokenForInput("123.456"),
132 _MakeLexToken("FLOAT_CONST", "123.456"))
133 self.assertEquals(self._SingleTokenForInput("\"hello\""),
134 _MakeLexToken("STRING_LITERAL", "\"hello\""))
135 self.assertEquals(self._SingleTokenForInput("+"),
136 _MakeLexToken("PLUS", "+"))
137 self.assertEquals(self._SingleTokenForInput("-"),
138 _MakeLexToken("MINUS", "-"))
139 self.assertEquals(self._SingleTokenForInput("&"),
140 _MakeLexToken("AMP", "&"))
141 self.assertEquals(self._SingleTokenForInput("?"),
142 _MakeLexToken("QSTN", "?"))
143 self.assertEquals(self._SingleTokenForInput("="),
144 _MakeLexToken("EQUALS", "="))
145 self.assertEquals(self._SingleTokenForInput("=>"),
146 _MakeLexToken("RESPONSE", "=>"))
147 self.assertEquals(self._SingleTokenForInput("("),
148 _MakeLexToken("LPAREN", "("))
149 self.assertEquals(self._SingleTokenForInput(")"),
150 _MakeLexToken("RPAREN", ")"))
151 self.assertEquals(self._SingleTokenForInput("["),
152 _MakeLexToken("LBRACKET", "["))
153 self.assertEquals(self._SingleTokenForInput("]"),
154 _MakeLexToken("RBRACKET", "]"))
155 self.assertEquals(self._SingleTokenForInput("{"),
156 _MakeLexToken("LBRACE", "{"))
157 self.assertEquals(self._SingleTokenForInput("}"),
158 _MakeLexToken("RBRACE", "}"))
159 self.assertEquals(self._SingleTokenForInput("<"),
160 _MakeLexToken("LANGLE", "<"))
161 self.assertEquals(self._SingleTokenForInput(">"),
162 _MakeLexToken("RANGLE", ">"))
163 self.assertEquals(self._SingleTokenForInput(";"),
164 _MakeLexToken("SEMI", ";"))
165 self.assertEquals(self._SingleTokenForInput(","),
166 _MakeLexToken("COMMA", ","))
167 self.assertEquals(self._SingleTokenForInput("."),
168 _MakeLexToken("DOT", "."))
169
170 def _TokensForInput(self, input_string):
171 """Gets a list of tokens for the given input string."""
172 lexer = self._zygote_lexer.clone()
173 lexer.input(input_string)
174 rv = []
175 while True:
176 tok = lexer.token()
177 if not tok:
178 return rv
179 rv.append(tok)
180
181 def _SingleTokenForInput(self, input_string):
182 """Gets the single token for the given input string. (Raises an exception if
183 the input string does not result in exactly one token.)"""
184 toks = self._TokensForInput(input_string)
185 assert len(toks) == 1
186 return toks[0]
187
188
189 if __name__ == "__main__":
190 unittest.main()
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698