| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 library tokenizer_test; | 5 library tokenizer_test; |
| 6 | 6 |
| 7 import 'package:polymer_expressions/tokenizer.dart'; | 7 import 'package:polymer_expressions/tokenizer.dart'; |
| 8 import 'package:unittest/unittest.dart'; | 8 import 'package:unittest/unittest.dart'; |
| 9 | 9 |
| 10 main() { | 10 main() { |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 60 | 60 |
| 61 test('should tokenize a ternary operator', () { | 61 test('should tokenize a ternary operator', () { |
| 62 expectTokens('a ? b : c', [ | 62 expectTokens('a ? b : c', [ |
| 63 t(IDENTIFIER_TOKEN, 'a'), | 63 t(IDENTIFIER_TOKEN, 'a'), |
| 64 t(OPERATOR_TOKEN, '?'), | 64 t(OPERATOR_TOKEN, '?'), |
| 65 t(IDENTIFIER_TOKEN, 'b'), | 65 t(IDENTIFIER_TOKEN, 'b'), |
| 66 t(COLON_TOKEN, ':'), | 66 t(COLON_TOKEN, ':'), |
| 67 t(IDENTIFIER_TOKEN, 'c')]); | 67 t(IDENTIFIER_TOKEN, 'c')]); |
| 68 }); | 68 }); |
| 69 | 69 |
| 70 test('should tokenize an iterate expression with "in" keyword', () { | 70 test('should tokenize "in" expressions', () { |
| 71 expectTokens('item in items', [ | 71 expectTokens('item in items', [ |
| 72 t(IDENTIFIER_TOKEN, 'item'), | 72 t(IDENTIFIER_TOKEN, 'item'), |
| 73 t(KEYWORD_TOKEN, 'in'), | 73 t(KEYWORD_TOKEN, 'in'), |
| 74 t(IDENTIFIER_TOKEN, 'items')]); | 74 t(IDENTIFIER_TOKEN, 'items')]); |
| 75 }); | 75 }); |
| 76 | 76 |
| 77 test('should takenize an "as" expression', () { |
| 78 expectTokens('a as b', [ |
| 79 t(IDENTIFIER_TOKEN, 'a'), |
| 80 t(KEYWORD_TOKEN, 'as'), |
| 81 t(IDENTIFIER_TOKEN, 'b')]); |
| 82 }); |
| 83 |
| 77 test('should tokenize keywords', () { | 84 test('should tokenize keywords', () { |
| 78 expectTokens('in', [t(KEYWORD_TOKEN, 'in')]); | 85 expectTokens('in', [t(KEYWORD_TOKEN, 'in')]); |
| 86 expectTokens('as', [t(KEYWORD_TOKEN, 'as')]); |
| 79 expectTokens('this', [t(KEYWORD_TOKEN, 'this')]); | 87 expectTokens('this', [t(KEYWORD_TOKEN, 'this')]); |
| 80 }); | 88 }); |
| 81 | 89 |
| 82 test('should tokenize groups', () { | 90 test('should tokenize groups', () { |
| 83 expectTokens('a(b)[]{}', [ | 91 expectTokens('a(b)[]{}', [ |
| 84 t(IDENTIFIER_TOKEN, 'a'), | 92 t(IDENTIFIER_TOKEN, 'a'), |
| 85 t(GROUPER_TOKEN, '('), | 93 t(GROUPER_TOKEN, '('), |
| 86 t(IDENTIFIER_TOKEN, 'b'), | 94 t(IDENTIFIER_TOKEN, 'b'), |
| 87 t(GROUPER_TOKEN, ')'), | 95 t(GROUPER_TOKEN, ')'), |
| 88 t(GROUPER_TOKEN, '['), | 96 t(GROUPER_TOKEN, '['), |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 201 } else { | 209 } else { |
| 202 if (item.length != matchers.length) { | 210 if (item.length != matchers.length) { |
| 203 mismatchDescription.add('wrong lengths'); | 211 mismatchDescription.add('wrong lengths'); |
| 204 } else { | 212 } else { |
| 205 mismatchDescription.add('was ').addDescriptionOf(item); | 213 mismatchDescription.add('was ').addDescriptionOf(item); |
| 206 } | 214 } |
| 207 } | 215 } |
| 208 } | 216 } |
| 209 | 217 |
| 210 } | 218 } |
| OLD | NEW |