OLD | NEW |
(Empty) | |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. |
| 4 |
| 5 library tokenizer_test; |
| 6 |
| 7 import 'package:polymer_expressions/tokenizer.dart'; |
| 8 import 'package:unittest/unittest.dart'; |
| 9 |
| 10 main() { |
| 11 |
| 12 group('tokenizer', () { |
| 13 |
| 14 test('should tokenize an empty expression', () { |
| 15 expectTokens('', []); |
| 16 }); |
| 17 |
| 18 test('should tokenize an identifier', () { |
| 19 expectTokens('abc', [t(IDENTIFIER_TOKEN, 'abc')]); |
| 20 }); |
| 21 |
| 22 test('should tokenize a double quoted String', () { |
| 23 expectTokens('"abc"', [t(STRING_TOKEN, 'abc')]); |
| 24 }); |
| 25 |
| 26 test('should tokenize a single quoted String', () { |
| 27 expectTokens("'abc'", [t(STRING_TOKEN, 'abc')]); |
| 28 }); |
| 29 |
| 30 test('should tokenize a String with escaping', () { |
| 31 expectTokens('"a\\b\\\\c\\\'\\""', [t(STRING_TOKEN, 'ab\\c\'"')]); |
| 32 }); |
| 33 |
| 34 test('should tokenize a dot operator', () { |
| 35 expectTokens('a.b', [ |
| 36 t(IDENTIFIER_TOKEN, 'a'), |
| 37 t(DOT_TOKEN, '.'), |
| 38 t(IDENTIFIER_TOKEN, 'b')]); |
| 39 }); |
| 40 |
| 41 test('should tokenize a unary plus operator', () { |
| 42 expectTokens('+a', [ |
| 43 t(OPERATOR_TOKEN, '+'), |
| 44 t(IDENTIFIER_TOKEN, 'a')]); |
| 45 }); |
| 46 |
| 47 test('should tokenize a binary plus operator', () { |
| 48 expectTokens('a + b', [ |
| 49 t(IDENTIFIER_TOKEN, 'a'), |
| 50 t(OPERATOR_TOKEN, '+'), |
| 51 t(IDENTIFIER_TOKEN, 'b')]); |
| 52 }); |
| 53 |
| 54 test('should tokenize a logical and operator', () { |
| 55 expectTokens('a && b', [ |
| 56 t(IDENTIFIER_TOKEN, 'a'), |
| 57 t(OPERATOR_TOKEN, '&&'), |
| 58 t(IDENTIFIER_TOKEN, 'b')]); |
| 59 }); |
| 60 |
| 61 test('should tokenize a ternary operator', () { |
| 62 expectTokens('a ? b : c', [ |
| 63 t(IDENTIFIER_TOKEN, 'a'), |
| 64 t(OPERATOR_TOKEN, '?'), |
| 65 t(IDENTIFIER_TOKEN, 'b'), |
| 66 t(COLON_TOKEN, ':'), |
| 67 t(IDENTIFIER_TOKEN, 'c')]); |
| 68 }); |
| 69 |
| 70 test('should tokenize "in" expressions', () { |
| 71 expectTokens('item in items', [ |
| 72 t(IDENTIFIER_TOKEN, 'item'), |
| 73 t(KEYWORD_TOKEN, 'in'), |
| 74 t(IDENTIFIER_TOKEN, 'items')]); |
| 75 }); |
| 76 |
| 77 test('should takenize an "as" expression', () { |
| 78 expectTokens('a as b', [ |
| 79 t(IDENTIFIER_TOKEN, 'a'), |
| 80 t(KEYWORD_TOKEN, 'as'), |
| 81 t(IDENTIFIER_TOKEN, 'b')]); |
| 82 }); |
| 83 |
| 84 test('should tokenize keywords', () { |
| 85 expectTokens('in', [t(KEYWORD_TOKEN, 'in')]); |
| 86 expectTokens('as', [t(KEYWORD_TOKEN, 'as')]); |
| 87 expectTokens('this', [t(KEYWORD_TOKEN, 'this')]); |
| 88 }); |
| 89 |
| 90 test('should tokenize groups', () { |
| 91 expectTokens('a(b)[]{}', [ |
| 92 t(IDENTIFIER_TOKEN, 'a'), |
| 93 t(GROUPER_TOKEN, '('), |
| 94 t(IDENTIFIER_TOKEN, 'b'), |
| 95 t(GROUPER_TOKEN, ')'), |
| 96 t(GROUPER_TOKEN, '['), |
| 97 t(GROUPER_TOKEN, ']'), |
| 98 t(GROUPER_TOKEN, '{'), |
| 99 t(GROUPER_TOKEN, '}')]); |
| 100 }); |
| 101 |
| 102 test('should tokenize argument lists', () { |
| 103 expectTokens('(a, b)', [ |
| 104 t(GROUPER_TOKEN, '('), |
| 105 t(IDENTIFIER_TOKEN, 'a'), |
| 106 t(COMMA_TOKEN, ','), |
| 107 t(IDENTIFIER_TOKEN, 'b'), |
| 108 t(GROUPER_TOKEN, ')')]); |
| 109 }); |
| 110 |
| 111 test('should tokenize maps', () { |
| 112 expectTokens("{'a': b}", [ |
| 113 t(GROUPER_TOKEN, '{'), |
| 114 t(STRING_TOKEN, 'a'), |
| 115 t(COLON_TOKEN, ':'), |
| 116 t(IDENTIFIER_TOKEN, 'b'), |
| 117 t(GROUPER_TOKEN, '}')]); |
| 118 }); |
| 119 |
| 120 test('should tokenize lists', () { |
| 121 expectTokens("[1, 'a', b]", [ |
| 122 t(GROUPER_TOKEN, '['), |
| 123 t(INTEGER_TOKEN, '1'), |
| 124 t(COMMA_TOKEN, ','), |
| 125 t(STRING_TOKEN, 'a'), |
| 126 t(COMMA_TOKEN, ','), |
| 127 t(IDENTIFIER_TOKEN, 'b'), |
| 128 t(GROUPER_TOKEN, ']')]); |
| 129 }); |
| 130 |
| 131 test('should tokenize integers', () { |
| 132 expectTokens('123', [t(INTEGER_TOKEN, '123')]); |
| 133 expectTokens('+123', [t(OPERATOR_TOKEN, '+'), t(INTEGER_TOKEN, '123')]); |
| 134 expectTokens('-123', [t(OPERATOR_TOKEN, '-'), t(INTEGER_TOKEN, '123')]); |
| 135 }); |
| 136 |
| 137 test('should tokenize decimals', () { |
| 138 expectTokens('1.23', [t(DECIMAL_TOKEN, '1.23')]); |
| 139 expectTokens('+1.23', [t(OPERATOR_TOKEN, '+'), t(DECIMAL_TOKEN, '1.23')]); |
| 140 expectTokens('-1.23', [t(OPERATOR_TOKEN, '-'), t(DECIMAL_TOKEN, '1.23')]); |
| 141 }); |
| 142 |
| 143 test('should tokenize booleans as identifiers', () { |
| 144 expectTokens('true', [t(IDENTIFIER_TOKEN, 'true')]); |
| 145 expectTokens('false', [t(IDENTIFIER_TOKEN, 'false')]); |
| 146 }); |
| 147 |
| 148 }); |
| 149 } |
| 150 |
| 151 TokenMatcher isToken(int index, String text) => new TokenMatcher(index, text); |
| 152 |
| 153 class TokenMatcher extends Matcher { |
| 154 final int kind; |
| 155 final String value; |
| 156 |
| 157 TokenMatcher(this.kind, this.value); |
| 158 |
| 159 bool matches(Token t, Map m) => t.kind == kind && t.value == value; |
| 160 |
| 161 Description describe(Description d) => d.add('isToken($kind, $value) '); |
| 162 } |
| 163 |
| 164 expectTokens(String s, List<Token> expected) { |
| 165 var tokens = new Tokenizer(s).tokenize(); |
| 166 var matchers = expected.map((t) => isToken(t.kind, t.value)).toList(); |
| 167 expect(tokens, matchList(matchers), reason: s); |
| 168 } |
| 169 |
| 170 Token t(int kind, String value) => new Token(kind, value); |
| 171 |
| 172 MatcherList matchList(List matchers) => new MatcherList(matchers); |
| 173 |
| 174 class MatcherList extends Matcher { |
| 175 final List<Matcher> matchers; |
| 176 |
| 177 MatcherList(this.matchers); |
| 178 |
| 179 bool matches(List o, Map matchState) { |
| 180 if (o.length != matchers.length) return false; |
| 181 for (int i = 0; i < o.length; i++) { |
| 182 var state = new Map(); |
| 183 if (!matchers[i].matches(o[i], state)) { |
| 184 matchState.addAll({ |
| 185 'index': i, |
| 186 'value': o[i], |
| 187 'state': state, |
| 188 }); |
| 189 return false; |
| 190 } |
| 191 } |
| 192 return true; |
| 193 } |
| 194 |
| 195 Description describe(Description d) { |
| 196 d.add('matches all: '); |
| 197 matchers.forEach((m) => m.describe(d)); |
| 198 } |
| 199 |
| 200 Description describeMismatch(item, Description mismatchDescription, |
| 201 Map matchState, bool verbose) { |
| 202 if (matchState != null) { |
| 203 var index = matchState['index']; |
| 204 var value = matchState['value']; |
| 205 var state = matchState['state']; |
| 206 var matcher = matchers[index]; |
| 207 mismatchDescription.add("Mismatch at index $index: "); |
| 208 matcher.describeMismatch(value, mismatchDescription, state, verbose); |
| 209 } else { |
| 210 if (item.length != matchers.length) { |
| 211 mismatchDescription.add('wrong lengths'); |
| 212 } else { |
| 213 mismatchDescription.add('was ').addDescriptionOf(item); |
| 214 } |
| 215 } |
| 216 } |
| 217 |
| 218 } |
OLD | NEW |