OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 library tokenizer_test; | 5 library tokenizer_test; |
6 | 6 |
7 import 'package:polymer_expressions/tokenizer.dart'; | 7 import 'package:polymer_expressions/tokenizer.dart'; |
8 import 'package:unittest/unittest.dart'; | 8 import 'package:unittest/unittest.dart'; |
9 | 9 |
10 main() { | 10 main() { |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
102 | 102 |
103 test('should tokenize maps', () { | 103 test('should tokenize maps', () { |
104 expectTokens("{'a': b}", [ | 104 expectTokens("{'a': b}", [ |
105 t(GROUPER_TOKEN, '{'), | 105 t(GROUPER_TOKEN, '{'), |
106 t(STRING_TOKEN, 'a'), | 106 t(STRING_TOKEN, 'a'), |
107 t(COLON_TOKEN, ':'), | 107 t(COLON_TOKEN, ':'), |
108 t(IDENTIFIER_TOKEN, 'b'), | 108 t(IDENTIFIER_TOKEN, 'b'), |
109 t(GROUPER_TOKEN, '}')]); | 109 t(GROUPER_TOKEN, '}')]); |
110 }); | 110 }); |
111 | 111 |
| 112 test('should tokenize lists', () { |
| 113 expectTokens("[1, 'a', b]", [ |
| 114 t(GROUPER_TOKEN, '['), |
| 115 t(INTEGER_TOKEN, '1'), |
| 116 t(COMMA_TOKEN, ','), |
| 117 t(STRING_TOKEN, 'a'), |
| 118 t(COMMA_TOKEN, ','), |
| 119 t(IDENTIFIER_TOKEN, 'b'), |
| 120 t(GROUPER_TOKEN, ']')]); |
| 121 }); |
| 122 |
112 test('should tokenize integers', () { | 123 test('should tokenize integers', () { |
113 expectTokens('123', [t(INTEGER_TOKEN, '123')]); | 124 expectTokens('123', [t(INTEGER_TOKEN, '123')]); |
114 expectTokens('+123', [t(OPERATOR_TOKEN, '+'), t(INTEGER_TOKEN, '123')]); | 125 expectTokens('+123', [t(OPERATOR_TOKEN, '+'), t(INTEGER_TOKEN, '123')]); |
115 expectTokens('-123', [t(OPERATOR_TOKEN, '-'), t(INTEGER_TOKEN, '123')]); | 126 expectTokens('-123', [t(OPERATOR_TOKEN, '-'), t(INTEGER_TOKEN, '123')]); |
116 }); | 127 }); |
117 | 128 |
118 test('should tokenize decimals', () { | 129 test('should tokenize decimals', () { |
119 expectTokens('1.23', [t(DECIMAL_TOKEN, '1.23')]); | 130 expectTokens('1.23', [t(DECIMAL_TOKEN, '1.23')]); |
120 expectTokens('+1.23', [t(OPERATOR_TOKEN, '+'), t(DECIMAL_TOKEN, '1.23')]); | 131 expectTokens('+1.23', [t(OPERATOR_TOKEN, '+'), t(DECIMAL_TOKEN, '1.23')]); |
121 expectTokens('-1.23', [t(OPERATOR_TOKEN, '-'), t(DECIMAL_TOKEN, '1.23')]); | 132 expectTokens('-1.23', [t(OPERATOR_TOKEN, '-'), t(DECIMAL_TOKEN, '1.23')]); |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
190 } else { | 201 } else { |
191 if (item.length != matchers.length) { | 202 if (item.length != matchers.length) { |
192 mismatchDescription.add('wrong lengths'); | 203 mismatchDescription.add('wrong lengths'); |
193 } else { | 204 } else { |
194 mismatchDescription.add('was ').addDescriptionOf(item); | 205 mismatchDescription.add('was ').addDescriptionOf(item); |
195 } | 206 } |
196 } | 207 } |
197 } | 208 } |
198 | 209 |
199 } | 210 } |
OLD | NEW |