Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(316)

Side by Side Diff: mojom/mojom_parser/lexer/lexer_test.go

Issue 1387893002: New lexer for mojom written in go. (Closed) Base URL: https://github.com/domokit/mojo.git@master
Patch Set: Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « mojom/mojom_parser/lexer/lexer.go ('k') | mojom/mojom_parser/lexer/token_stream.go » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 package lexer 5 package lexer
6 6
7 import "testing" 7 import "testing"
8 8
9 // TODO(rudominer) This dummy test is here in order to be able to test the 9 func checkEq(t *testing.T, expected, actual interface{}) {
10 // go unit test infrastructure. It will eventually be replaced by a real test. 10 » if expected != actual {
11 func TestDummyLexerTest(t *testing.T) { 11 » » t.Fatalf("Failed check: Expected (%v), Actual (%v)", expected, a ctual)
12 » if 5.1 > 2.1*3.1 { 12 » }
13 » » t.Fatalf("Something is wrong.") 13 }
14 » } 14
15 } 15 // pumpTokens pumps all the tokens from a channel into a slice.
16 func pumpTokens(tokensChan chan Token) []Token {
17 » tokens := []Token{}
18 » for token := range tokensChan {
19 » » tokens = append(tokens, token)
20 » }
21 » return tokens
22 }
23
24 // TestAllSingleTokens tests for each token that a valid string is accepted as
25 // the correct token.
26 func TestAllSingleTokens(t *testing.T) {
27 » testData := []struct {
28 » » source string
29 » » token TokenKind
30 » }{
31 » » {"(", LParen},
32 » » {")", RParen},
33 » » {"[", LBracket},
34 » » {"]", RBracket},
35 » » {"{", LBrace},
36 » » {"}", RBrace},
37 » » {"<", LAngle},
38 » » {">", RAngle},
39 » » {";", Semi},
40 » » {",", Comma},
41 » » {".", Dot},
42 » » {"-", Minus},
43 » » {"+", Plus},
44 » » {"&", Amp},
45 » » {"?", Qstn},
46 » » {"=", Equals},
47 » » {"=>", Response},
48 » » {"somet_hi3ng", Name},
49 » » {"import", Import},
50 » » {"module", Module},
51 » » {"struct", Struct},
52 » » {"union", Union},
53 » » {"interface", Interface},
54 » » {"enum", Enum},
55 » » {"const", Const},
56 » » {"true", True},
57 » » {"false", False},
58 » » {"default", Default},
59 » » {"@10", Ordinal},
60 » » {"10", IntConstDec},
61 » » {"0", IntConstDec},
62 » » {"0xA10", IntConstHex},
63 » » {"0xa10", IntConstHex},
64 » » {"0XA10", IntConstHex},
65 » » {"0Xa10", IntConstHex},
66 » » {"10.5", FloatConst},
67 » » {"10e5", FloatConst},
68 » » {"0.5", FloatConst},
69 » » {"0e5", FloatConst},
70 » » {"10e+5", FloatConst},
71 » » {"10e-5", FloatConst},
72 » » {"\"hello world\"", StringLiteral},
73 » » {"\"hello \\\"real\\\" world\"", StringLiteral},
74 » }
75
76 » for i := range testData {
77 » » l := lexer{source: testData[i].source, tokens: make(chan Token)}
78 » » go l.run()
79 » » tokens := pumpTokens(l.tokens)
80
81 » » if len(tokens) != 1 {
82 » » » t.Fatalf("Source('%v'): Expected 1 token but got %v inst ead: %v",
83 » » » » testData[i].source, len(tokens), tokens)
84 » » }
85
86 » » checkEq(t, testData[i].source, tokens[0].Text)
87 » » checkEq(t, testData[i].token, tokens[0].Kind)
88 » }
89 }
90
91 // TestTokenPosition tests that the position in the source string, the line
92 // number and the position in the line of the lexed token are correctly found.
93 func TestTokenPosition(t *testing.T) {
94 » source := " \n ."
95 » l := lexer{source: source, tokens: make(chan Token)}
96 » go l.run()
97 » tokens := pumpTokens(l.tokens)
98 » token := tokens[0]
99
100 » checkEq(t, 5, token.CharPos)
101 » checkEq(t, 1, token.LineNo)
102 » checkEq(t, 2, token.LinePos)
103 }
104
105 // TestTokenPositionChineseString tests that CharPos is expressed as a number
106 // of runes and not a number of bytes.
107 func TestTokenPositionChineseString(t *testing.T) {
108 » source := "\"您好\" is"
109 » ts := Tokenize(source)
110 » checkEq(t, StringLiteral, ts.PeekNext().Kind)
111 » ts.ConsumeNext()
112 » checkEq(t, 5, ts.PeekNext().CharPos)
113 }
114
115 // TestSkipSkippable tests that all skippable characters are skipped.
116 func TestSkipSkippable(t *testing.T) {
117 » source := " \t \r \n ."
118 » l := lexer{source: source, tokens: make(chan Token)}
119 » go l.run()
120 » tokens := pumpTokens(l.tokens)
121
122 » checkEq(t, Dot, tokens[0].Kind)
123 }
124
125 // TestTokenize tests that a single token embedded in a larger string is
126 // correctly lexed.
127 func TestTokenize(t *testing.T) {
128 » ts := Tokenize(" \t . ")
129 » token := ts.PeekNext()
130 » checkEq(t, Dot, token.Kind)
131
132 » ts.ConsumeNext()
133 » token = ts.PeekNext()
134 » checkEq(t, EOF, token.Kind)
135 }
136
137 // TestTokenizeBadUTF8String tests that an invalid UTF8 string is handled.
138 func TestTokenizeBadUTF8String(t *testing.T) {
139 » ts := Tokenize("\xF0")
140 » checkEq(t, ErrorIllegalChar, ts.PeekNext().Kind)
141 }
142
143 // TestTokenizeEmptyString tests that empty strings are handled correctly.
144 func TestTokenizeEmptyString(t *testing.T) {
145 » ts := Tokenize("")
146 » checkEq(t, EOF, ts.PeekNext().Kind)
147 }
148
149 // TestTokenizeMoreThanOne tests that more than one token is correctly lexed.
150 func TestTokenizeMoreThanOne(t *testing.T) {
151 » ts := Tokenize("()")
152 » checkEq(t, LParen, ts.PeekNext().Kind)
153 » ts.ConsumeNext()
154 » checkEq(t, RParen, ts.PeekNext().Kind)
155 » ts.ConsumeNext()
156 » checkEq(t, EOF, ts.PeekNext().Kind)
157 }
158
159 // TestIllegalChar tests that an illegal character is correctly spotted.
160 func TestIllegalChar(t *testing.T) {
161 » ts := Tokenize(" \t $ ")
162 » checkEq(t, ErrorIllegalChar, ts.PeekNext().Kind)
163 }
164
165 // TestUnterminatedStringLiteralEos tests that the correct error is emitted if
166 // a quoted string is never closed.
167 func TestUnterminatedStringLiteralEos(t *testing.T) {
168 » ts := Tokenize("\"hello world")
169 » checkEq(t, ErrorUnterminatedStringLiteral, ts.PeekNext().Kind)
170 }
171
172 // TestUnterminatedStringLiteralEol tests that the correct error is emitted if
173 // a quoted string is closed on a subsequent line.
174 func TestUnterminatedStringLiteralEol(t *testing.T) {
175 » ts := Tokenize("\"hello\n world\"")
176 » checkEq(t, ErrorUnterminatedStringLiteral, ts.PeekNext().Kind)
177 }
178
179 // TestSingleLineComment tests that single line comments are correctly skipped.
180 func TestSingleLineComment(t *testing.T) {
181 » ts := Tokenize("( // some stuff\n)")
182 » checkEq(t, LParen, ts.PeekNext().Kind)
183 » ts.ConsumeNext()
184 » checkEq(t, RParen, ts.PeekNext().Kind)
185 }
186
187 // TestMultiLineComment tests that multi line comments are correctly skipped.
188 func TestMultiLineComment(t *testing.T) {
189 » ts := Tokenize("( /* hello world/ * *\n */)")
190 » checkEq(t, LParen, ts.PeekNext().Kind)
191 » ts.ConsumeNext()
192 » checkEq(t, RParen, ts.PeekNext().Kind)
193 }
194
195 // TestUnterminatedMultiLineComment tests that unterminated multiline comments
196 // emit the correct error.
197 func TestUnterminatedMultiLineComment(t *testing.T) {
198 » ts := Tokenize("( /* hello world/ * *\n )")
199 » checkEq(t, LParen, ts.PeekNext().Kind)
200 » ts.ConsumeNext()
201 » checkEq(t, ErrorUnterminatedComment, ts.PeekNext().Kind)
202 }
203
204 // TestUnterminatedMultiLineCommentAtStar tests that if the string ends at a *
205 // (which could be the beginning of the close of a multiline comment) the right
206 // error is emitted.
207 func TestUnterminatedMultiLineCommentAtStar(t *testing.T) {
208 » ts := Tokenize("( /* hello world/ *")
209 » checkEq(t, LParen, ts.PeekNext().Kind)
210 » ts.ConsumeNext()
211 » checkEq(t, ErrorUnterminatedComment, ts.PeekNext().Kind)
212 }
OLDNEW
« no previous file with comments | « mojom/mojom_parser/lexer/lexer.go ('k') | mojom/mojom_parser/lexer/token_stream.go » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698