Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 package lexer | |
| 6 | |
| 7 import "testing" | |
| 8 | |
| 9 func checkEq(t *testing.T, expected, actual interface{}) { | |
| 10 if expected != actual { | |
| 11 t.Fatalf("Failed check: Expected (%v), Actual (%v)", expected, a ctual) | |
| 12 } | |
| 13 } | |
| 14 | |
|
rudominer
2015/10/12 18:06:06
Can you add a test where
- the input string is emp
azani
2015/10/13 00:23:46
Done.
| |
| 15 // pumpTokens pumps all the tokens from a channel into a slice. | |
| 16 func pumpTokens(tokensChan chan Token) []Token { | |
| 17 tokens := []Token{} | |
| 18 for token := range tokensChan { | |
| 19 tokens = append(tokens, token) | |
| 20 } | |
| 21 return tokens | |
| 22 } | |
| 23 | |
| 24 // TestAllSingleTokens tests for each token that a valid string is accepted as | |
| 25 // the correct token. | |
| 26 func TestAllSingleTokens(t *testing.T) { | |
| 27 testData := []struct { | |
| 28 source string | |
| 29 token TokenKind | |
| 30 }{ | |
| 31 {"(", LPAREN}, | |
| 32 {")", RPAREN}, | |
| 33 {"[", LBRACKET}, | |
| 34 {"]", RBRACKET}, | |
| 35 {"{", LBRACE}, | |
| 36 {"}", RBRACE}, | |
| 37 {"<", LANGLE}, | |
| 38 {">", RANGLE}, | |
| 39 {";", SEMI}, | |
| 40 {",", COMMA}, | |
| 41 {".", DOT}, | |
| 42 {"-", MINUS}, | |
| 43 {"+", PLUS}, | |
| 44 {"&", AMP}, | |
| 45 {"?", QSTN}, | |
| 46 {"=", EQUALS}, | |
| 47 {"=>", RESPONSE}, | |
| 48 {"somet_hi3ng", NAME}, | |
| 49 {"import", IMPORT}, | |
| 50 {"module", MODULE}, | |
| 51 {"struct", STRUCT}, | |
| 52 {"union", UNION}, | |
| 53 {"interface", INTERFACE}, | |
| 54 {"enum", ENUM}, | |
| 55 {"const", CONST}, | |
| 56 {"true", TRUE}, | |
| 57 {"false", FALSE}, | |
| 58 {"default", DEFAULT}, | |
| 59 {"@10", ORDINAL}, | |
| 60 {"10", INT_CONST_DEC}, | |
| 61 {"0", INT_CONST_DEC}, | |
| 62 {"0xA10", INT_CONST_HEX}, | |
| 63 {"0xa10", INT_CONST_HEX}, | |
| 64 {"0XA10", INT_CONST_HEX}, | |
| 65 {"0Xa10", INT_CONST_HEX}, | |
| 66 {"10.5", FLOAT_CONST}, | |
| 67 {"10e5", FLOAT_CONST}, | |
| 68 {"0.5", FLOAT_CONST}, | |
| 69 {"0e5", FLOAT_CONST}, | |
| 70 {"10e+5", FLOAT_CONST}, | |
| 71 {"10e-5", FLOAT_CONST}, | |
| 72 {"\"hello world\"", STRING_LITERAL}, | |
| 73 {"\"hello \\\"real\\\" world\"", STRING_LITERAL}, | |
| 74 } | |
| 75 | |
| 76 for i := range testData { | |
| 77 l := lexer{source: testData[i].source, tokens: make(chan Token)} | |
| 78 go l.run() | |
| 79 tokens := pumpTokens(l.tokens) | |
| 80 | |
| 81 if len(tokens) != 1 { | |
| 82 t.Fatalf("Source('%v'): Expected 1 token but got %v inst ead: %v", | |
| 83 testData[i].source, len(tokens), tokens) | |
| 84 } | |
| 85 | |
| 86 checkEq(t, testData[i].source, tokens[0].Text) | |
| 87 checkEq(t, testData[i].token, tokens[0].Kind) | |
| 88 } | |
| 89 } | |
| 90 | |
| 91 // TestTokenPosition tests that the position in the source string, the line | |
| 92 // number and the position in the line of the lexed token are correctly found. | |
| 93 func TestTokenPosition(t *testing.T) { | |
| 94 source := " \n ." | |
| 95 l := lexer{source: source, tokens: make(chan Token)} | |
| 96 go l.run() | |
| 97 tokens := pumpTokens(l.tokens) | |
| 98 token := tokens[0] | |
| 99 | |
| 100 checkEq(t, 5, token.CharPos) | |
| 101 checkEq(t, 1, token.LineNo) | |
| 102 checkEq(t, 2, token.LinePos) | |
| 103 } | |
| 104 | |
| 105 // TestSkipSkippable tests that all skippable characters are skipped. | |
| 106 func TestSkipSkippable(t *testing.T) { | |
| 107 source := " \t \r \n ." | |
| 108 l := lexer{source: source, tokens: make(chan Token)} | |
| 109 go l.run() | |
| 110 tokens := pumpTokens(l.tokens) | |
| 111 | |
| 112 checkEq(t, DOT, tokens[0].Kind) | |
| 113 } | |
| 114 | |
| 115 // TestTokenize tests that a single token embedded in a larger string is | |
| 116 // correctly lexed. | |
| 117 func TestTokenize(t *testing.T) { | |
| 118 ts := Tokenize(" \t . ") | |
| 119 token := ts.PeekNext() | |
| 120 checkEq(t, DOT, token.Kind) | |
| 121 | |
| 122 ts.ConsumeNext() | |
| 123 token = ts.PeekNext() | |
| 124 checkEq(t, EOF, token.Kind) | |
| 125 } | |
| 126 | |
| 127 // TestTokenizeMoreThanOne tests that more than one token is correctly lexed. | |
| 128 func TestTokenizeMoreThanOne(t *testing.T) { | |
| 129 ts := Tokenize("()") | |
| 130 checkEq(t, LPAREN, ts.PeekNext().Kind) | |
| 131 ts.ConsumeNext() | |
| 132 checkEq(t, RPAREN, ts.PeekNext().Kind) | |
| 133 ts.ConsumeNext() | |
| 134 checkEq(t, EOF, ts.PeekNext().Kind) | |
| 135 } | |
| 136 | |
| 137 // TestIllegalChar tests that an illegal character is correctly spotted. | |
| 138 func TestIllegalChar(t *testing.T) { | |
| 139 ts := Tokenize(" \t $ ") | |
| 140 checkEq(t, ERROR_ILLEGAL_CHAR, ts.PeekNext().Kind) | |
| 141 } | |
| 142 | |
| 143 // TestUnterminatedStringLiteralEos tests that the correct error is emitted if | |
| 144 // a quoted string is never closed. | |
| 145 func TestUnterminatedStringLiteralEos(t *testing.T) { | |
| 146 ts := Tokenize("\"hello world") | |
| 147 checkEq(t, ERROR_UNTERMINATED_STRING_LITERAL, ts.PeekNext().Kind) | |
| 148 } | |
| 149 | |
| 150 // TestUnterminatedStringLiteralEol tests that the correct error is emitted if | |
| 151 // a quoted string is closed on a subsequent line. | |
| 152 func TestUnterminatedStringLiteralEol(t *testing.T) { | |
| 153 ts := Tokenize("\"hello\n world\"") | |
| 154 checkEq(t, ERROR_UNTERMINATED_STRING_LITERAL, ts.PeekNext().Kind) | |
| 155 } | |
| 156 | |
| 157 // TestSingleLineComment tests that single line comments are correctly skipped. | |
| 158 func TestSingleLineComment(t *testing.T) { | |
| 159 ts := Tokenize("( // some stuff\n)") | |
| 160 checkEq(t, LPAREN, ts.PeekNext().Kind) | |
| 161 ts.ConsumeNext() | |
| 162 checkEq(t, RPAREN, ts.PeekNext().Kind) | |
| 163 } | |
| 164 | |
| 165 // TestMultiLineComment tests that multi line comments are correctly skipped. | |
| 166 func TestMultiLineComment(t *testing.T) { | |
| 167 ts := Tokenize("( /* hello world/ * *\n */)") | |
| 168 checkEq(t, LPAREN, ts.PeekNext().Kind) | |
| 169 ts.ConsumeNext() | |
| 170 checkEq(t, RPAREN, ts.PeekNext().Kind) | |
| 171 } | |
| 172 | |
| 173 // TestUnterminatedMultiLineComment tests that unterminated multiline comments | |
| 174 // emit the correct error. | |
| 175 func TestUnterminatedMultiLineComment(t *testing.T) { | |
| 176 ts := Tokenize("( /* hello world/ * *\n )") | |
| 177 checkEq(t, LPAREN, ts.PeekNext().Kind) | |
| 178 ts.ConsumeNext() | |
| 179 checkEq(t, ERROR_UNTERMINATED_COMMENT, ts.PeekNext().Kind) | |
| 180 } | |
| 181 | |
| 182 // TestUnterminatedMultiLineCommentAtStar tests that if the string ends at a * | |
| 183 // (which could be the beginning of the close of a multiline comment) the right | |
| 184 // error is emitted. | |
| 185 func TestUnterminatedMultiLineCommentAtStar(t *testing.T) { | |
| 186 ts := Tokenize("( /* hello world/ *") | |
| 187 checkEq(t, LPAREN, ts.PeekNext().Kind) | |
| 188 ts.ConsumeNext() | |
| 189 checkEq(t, ERROR_UNTERMINATED_COMMENT, ts.PeekNext().Kind) | |
| 190 } | |
| OLD | NEW |