| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 library tokenizer_test; | 5 library tokenizer_test; |
| 6 | 6 |
| 7 import 'package:polymer_expressions/tokenizer.dart'; | 7 import 'package:polymer_expressions/tokenizer.dart'; |
| 8 import 'package:unittest/unittest.dart'; | 8 import 'package:unittest/unittest.dart'; |
| 9 | 9 |
| 10 main() { | 10 main() { |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 67 t(IDENTIFIER_TOKEN, 'c')]); | 67 t(IDENTIFIER_TOKEN, 'c')]); |
| 68 }); | 68 }); |
| 69 | 69 |
| 70 test('should tokenize an iterate expression with "in" keyword', () { | 70 test('should tokenize an iterate expression with "in" keyword', () { |
| 71 expectTokens('item in items', [ | 71 expectTokens('item in items', [ |
| 72 t(IDENTIFIER_TOKEN, 'item'), | 72 t(IDENTIFIER_TOKEN, 'item'), |
| 73 t(KEYWORD_TOKEN, 'in'), | 73 t(KEYWORD_TOKEN, 'in'), |
| 74 t(IDENTIFIER_TOKEN, 'items')]); | 74 t(IDENTIFIER_TOKEN, 'items')]); |
| 75 }); | 75 }); |
| 76 | 76 |
| 77 test('should takenize an "as" expression', () { |
| 78 expectTokens('a as b', [ |
| 79 t(IDENTIFIER_TOKEN, 'a'), |
| 80 t(KEYWORD_TOKEN, 'as'), |
| 81 t(IDENTIFIER_TOKEN, 'b')]); |
| 82 }); |
| 83 |
| 77 test('should tokenize keywords', () { | 84 test('should tokenize keywords', () { |
| 78 expectTokens('in', [t(KEYWORD_TOKEN, 'in')]); | 85 expectTokens('in', [t(KEYWORD_TOKEN, 'in')]); |
| 79 expectTokens('this', [t(KEYWORD_TOKEN, 'this')]); | 86 expectTokens('this', [t(KEYWORD_TOKEN, 'this')]); |
| 80 }); | 87 }); |
| 81 | 88 |
| 82 test('should tokenize groups', () { | 89 test('should tokenize groups', () { |
| 83 expectTokens('a(b)[]{}', [ | 90 expectTokens('a(b)[]{}', [ |
| 84 t(IDENTIFIER_TOKEN, 'a'), | 91 t(IDENTIFIER_TOKEN, 'a'), |
| 85 t(GROUPER_TOKEN, '('), | 92 t(GROUPER_TOKEN, '('), |
| 86 t(IDENTIFIER_TOKEN, 'b'), | 93 t(IDENTIFIER_TOKEN, 'b'), |
| (...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 201 } else { | 208 } else { |
| 202 if (item.length != matchers.length) { | 209 if (item.length != matchers.length) { |
| 203 mismatchDescription.add('wrong lengths'); | 210 mismatchDescription.add('wrong lengths'); |
| 204 } else { | 211 } else { |
| 205 mismatchDescription.add('was ').addDescriptionOf(item); | 212 mismatchDescription.add('was ').addDescriptionOf(item); |
| 206 } | 213 } |
| 207 } | 214 } |
| 208 } | 215 } |
| 209 | 216 |
| 210 } | 217 } |
| OLD | NEW |