| Index: pkg/polymer_expressions/test/tokenizer_test.dart
|
| diff --git a/pkg/polymer_expressions/test/tokenizer_test.dart b/pkg/polymer_expressions/test/tokenizer_test.dart
|
| index a93dce73ba615fdc61edaaad20d7c7e761bf5da8..05cfffec2f86fd4569c4e455f201fc3c438b6f9b 100644
|
| --- a/pkg/polymer_expressions/test/tokenizer_test.dart
|
| +++ b/pkg/polymer_expressions/test/tokenizer_test.dart
|
| @@ -74,6 +74,13 @@ main() {
|
| t(IDENTIFIER_TOKEN, 'items')]);
|
| });
|
|
|
| + test('should takenize an "as" expression', () {
|
| + expectTokens('a as b', [
|
| + t(IDENTIFIER_TOKEN, 'a'),
|
| + t(KEYWORD_TOKEN, 'as'),
|
| + t(IDENTIFIER_TOKEN, 'b')]);
|
| + });
|
| +
|
| test('should tokenize keywords', () {
|
| expectTokens('in', [t(KEYWORD_TOKEN, 'in')]);
|
| expectTokens('this', [t(KEYWORD_TOKEN, 'this')]);
|
|
|