Index: third_party/pkg/angular/test/core/parser/lexer_spec.dart |
diff --git a/third_party/pkg/angular/test/core/parser/lexer_spec.dart b/third_party/pkg/angular/test/core/parser/lexer_spec.dart |
index 7844268e2933134f18661e5aeddf5bce4ed5e92f..8ccbc26696d7b3b5c595c691763ba1bfb59795c8 100644 |
--- a/third_party/pkg/angular/test/core/parser/lexer_spec.dart |
+++ b/third_party/pkg/angular/test/core/parser/lexer_spec.dart |
@@ -4,221 +4,222 @@ import '../../_specs.dart'; |
class LexerExpect extends Expect { |
LexerExpect(actual) : super(actual); |
- toBeToken(int index, String text) { |
+ |
+ toBeToken(int index) { |
expect(actual is Token).toEqual(true); |
expect(actual.index).toEqual(index); |
- expect(actual.text).toEqual(text); |
+ } |
+ |
+ toBeCharacterToken(int index, String character) { |
+ toBeToken(index); |
+ expect(character.length).toEqual(1); |
+ expect(actual.isCharacter(character.codeUnitAt(0))).toEqual(true); |
+ } |
+ |
+ toBeIdentifierToken(int index, String text) { |
+ toBeToken(index); |
+ expect(actual.isIdentifier).toEqual(true); |
+ expect(actual.toString()).toEqual(text); |
+ } |
+ |
+ toBeKeywordUndefinedToken(int index) { |
+ toBeToken(index); |
+ expect(actual.isKeywordUndefined).toEqual(true); |
+ } |
+ |
+ toBeOperatorToken(int index, String operator) { |
+ toBeToken(index); |
+ expect(actual.isOperator(operator)).toEqual(true); |
+ } |
+ |
+ toBeStringToken(int index, String input, String value) { |
+ toBeToken(index); |
+ expect(actual.isString).toEqual(true); |
+ StringToken token = actual; |
+ expect(token.input).toEqual(input); |
+ expect(token.toString()).toEqual(value); |
+ } |
+ |
+ toBeNumberToken(int index, num value) { |
+ toBeToken(index); |
+ expect(actual.isNumber).toEqual(true); |
+ NumberToken token = actual; |
+ expect(token.toNumber()).toEqual(value); |
} |
} |
+ |
expect(actual) => new LexerExpect(actual); |
main() { |
describe('lexer', () { |
Lexer lex; |
- beforeEach(inject((Lexer lexer) { |
+ beforeEach((Lexer lexer) { |
lex = lexer; |
- })); |
+ }); |
// New test case |
it('should tokenize a simple identifier', () { |
- var tokens = lex("j"); |
+ List<Token> tokens = lex("j"); |
expect(tokens.length).toEqual(1); |
- expect(tokens[0]).toBeToken(0, 'j'); |
+ expect(tokens[0]).toBeIdentifierToken(0, 'j'); |
}); |
// New test case |
it('should tokenize a dotted identifier', () { |
- var tokens = lex("j.k"); |
+ List<Token> tokens = lex("j.k"); |
expect(tokens.length).toEqual(3); |
- expect(tokens[0]).toBeToken(0, 'j'); |
- expect(tokens[1]).toBeToken(1, '.'); |
- expect(tokens[2]).toBeToken(2, 'k'); |
+ expect(tokens[0]).toBeIdentifierToken(0, 'j'); |
+ expect(tokens[1]).toBeCharacterToken(1, '.'); |
+ expect(tokens[2]).toBeIdentifierToken(2, 'k'); |
}); |
it('should tokenize an operator', () { |
- var tokens = lex('j-k'); |
+ List<Token> tokens = lex('j-k'); |
expect(tokens.length).toEqual(3); |
- expect(tokens[1]).toBeToken(1, '-'); |
+ expect(tokens[1]).toBeOperatorToken(1, '-'); |
}); |
it('should tokenize an indexed operator', () { |
- var tokens = lex('j[k]'); |
+ List<Token> tokens = lex('j[k]'); |
expect(tokens.length).toEqual(4); |
- expect(tokens[1]).toBeToken(1, '['); |
+ expect(tokens[1]).toBeCharacterToken(1, '['); |
}); |
it('should tokenize numbers', () { |
- var tokens = lex('88'); |
+ List<Token> tokens = lex('88'); |
expect(tokens.length).toEqual(1); |
- expect(tokens[0]).toBeToken(0, '88'); |
+ expect(tokens[0]).toBeNumberToken(0, 88); |
}); |
it('should tokenize numbers within index ops', () { |
- expect(lex('a[22]')[2]).toBeToken(2, '22'); |
+ expect(lex('a[22]')[2]).toBeNumberToken(2, 22); |
}); |
it('should tokenize simple quoted strings', () { |
- expect(lex('"a"')[0]).toBeToken(0, '"a"'); |
+ expect(lex('"a"')[0]).toBeStringToken(0, '"a"', 'a'); |
}); |
it('should tokenize quoted strings with escaped quotes', () { |
- expect(lex('"a\\""')[0]).toBeToken(0, '"a\\""'); |
+ expect(lex('"a\\""')[0]).toBeStringToken(0, '"a\\""', 'a"'); |
}); |
it('should tokenize a string', () { |
- var tokens = lex("j-a.bc[22]+1.3|f:'a\\\'c':\"d\\\"e\""); |
- var i = 0; |
- expect(tokens[i]).toBeToken(0, 'j'); |
- |
- i++; |
- expect(tokens[i]).toBeToken(1, '-'); |
- |
- i++; |
- expect(tokens[i]).toBeToken(2, 'a'); |
- |
- i++; |
- expect(tokens[i]).toBeToken(3, '.'); |
- |
- i++; |
- expect(tokens[i]).toBeToken(4, 'bc'); |
- |
- i++; |
- expect(tokens[i]).toBeToken(6, '['); |
- |
- i++; |
- expect(tokens[i]).toBeToken(7, '22'); |
- |
- i++; |
- expect(tokens[i]).toBeToken(9, ']'); |
- |
- i++; |
- expect(tokens[i]).toBeToken(10, '+'); |
- |
- i++; |
- expect(tokens[i]).toBeToken(11, '1.3'); |
- |
- i++; |
- expect(tokens[i]).toBeToken(14, '|'); |
- |
- i++; |
- expect(tokens[i]).toBeToken(15, 'f'); |
- |
- i++; |
- expect(tokens[i]).toBeToken(16, ':'); |
- |
- i++; |
- expect(tokens[i]).toBeToken(17, '\'a\\\'c\''); |
- |
- i++; |
- expect(tokens[i]).toBeToken(23, ':'); |
- |
- i++; |
- expect(tokens[i]).toBeToken(24, '"d\\"e"'); |
+ List<Token> tokens = lex("j-a.bc[22]+1.3|f:'a\\\'c':\"d\\\"e\""); |
+ expect(tokens[0]).toBeIdentifierToken(0, 'j'); |
+ expect(tokens[1]).toBeOperatorToken(1, '-'); |
+ expect(tokens[2]).toBeIdentifierToken(2, 'a'); |
+ expect(tokens[3]).toBeCharacterToken(3, '.'); |
+ expect(tokens[4]).toBeIdentifierToken(4, 'bc'); |
+ expect(tokens[5]).toBeCharacterToken(6, '['); |
+ expect(tokens[6]).toBeNumberToken(7, 22); |
+ expect(tokens[7]).toBeCharacterToken(9, ']'); |
+ expect(tokens[8]).toBeOperatorToken(10, '+'); |
+ expect(tokens[9]).toBeNumberToken(11, 1.3); |
+ expect(tokens[10]).toBeOperatorToken(14, '|'); |
+ expect(tokens[11]).toBeIdentifierToken(15, 'f'); |
+ expect(tokens[12]).toBeCharacterToken(16, ':'); |
+ expect(tokens[13]).toBeStringToken(17, "'a\\'c'", "a'c"); |
+ expect(tokens[14]).toBeCharacterToken(23, ':'); |
+ expect(tokens[15]).toBeStringToken(24, '"d\\"e"', 'd"e'); |
}); |
it('should tokenize undefined', () { |
- var tokens = lex("undefined"); |
- var i = 0; |
- expect(tokens[i]).toBeToken(0, 'undefined'); |
- expect(tokens[i].value).toEqual(null); |
+ List<Token> tokens = lex("undefined"); |
+ expect(tokens[0]).toBeKeywordUndefinedToken(0); |
}); |
it('should ignore whitespace', () { |
- var tokens = lex("a \t \n \r b"); |
- expect(tokens[0].text).toEqual('a'); |
- expect(tokens[1].text).toEqual('b'); |
+ List<Token> tokens = lex("a \t \n \r b"); |
+ expect(tokens[0]).toBeIdentifierToken(0, 'a'); |
+ expect(tokens[1]).toBeIdentifierToken(8, 'b'); |
}); |
it('should tokenize quoted string', () { |
var str = "['\\'', \"\\\"\"]"; |
- var tokens = lex(str); |
- |
- expect(tokens[1].index).toEqual(1); |
- expect(tokens[1].value).toEqual("'"); |
- |
- expect(tokens[3].index).toEqual(7); |
- expect(tokens[3].value).toEqual('"'); |
+ List<Token> tokens = lex(str); |
+ expect(tokens[1]).toBeStringToken(1, "'\\''", "'"); |
+ expect(tokens[3]).toBeStringToken(7, '"\\""', '"'); |
}); |
it('should tokenize escaped quoted string', () { |
var str = '"\\"\\n\\f\\r\\t\\v\\u00A0"'; |
- var tokens = lex(str); |
- |
- expect(tokens[0].value).toEqual('"\n\f\r\t\v\u00A0'); |
+ List<Token> tokens = lex(str); |
+ expect(tokens.length).toEqual(1); |
+ expect(tokens[0].toString()).toEqual('"\n\f\r\t\v\u00A0'); |
}); |
it('should tokenize unicode', () { |
- var tokens = lex('"\\u00A0"'); |
+ List<Token> tokens = lex('"\\u00A0"'); |
expect(tokens.length).toEqual(1); |
- expect(tokens[0].value).toEqual('\u00a0'); |
+ expect(tokens[0].toString()).toEqual('\u00a0'); |
}); |
it('should tokenize relation', () { |
- var tokens = lex("! == != < > <= >="); |
- expect(tokens[0].text).toEqual('!'); |
- expect(tokens[1].text).toEqual('=='); |
- expect(tokens[2].text).toEqual('!='); |
- expect(tokens[3].text).toEqual('<'); |
- expect(tokens[4].text).toEqual('>'); |
- expect(tokens[5].text).toEqual('<='); |
- expect(tokens[6].text).toEqual('>='); |
+ List<Token> tokens = lex("! == != < > <= >="); |
+ expect(tokens[0]).toBeOperatorToken(0, '!'); |
+ expect(tokens[1]).toBeOperatorToken(2, '=='); |
+ expect(tokens[2]).toBeOperatorToken(5, '!='); |
+ expect(tokens[3]).toBeOperatorToken(8, '<'); |
+ expect(tokens[4]).toBeOperatorToken(10, '>'); |
+ expect(tokens[5]).toBeOperatorToken(12, '<='); |
+ expect(tokens[6]).toBeOperatorToken(15, '>='); |
}); |
it('should tokenize statements', () { |
- var tokens = lex("a;b;"); |
- expect(tokens[0].text).toEqual('a'); |
- expect(tokens[1].text).toEqual(';'); |
- expect(tokens[2].text).toEqual('b'); |
- expect(tokens[3].text).toEqual(';'); |
+ List<Token> tokens = lex("a;b;"); |
+ expect(tokens[0]).toBeIdentifierToken(0, 'a'); |
+ expect(tokens[1]).toBeCharacterToken(1, ';'); |
+ expect(tokens[2]).toBeIdentifierToken(2, 'b'); |
+ expect(tokens[3]).toBeCharacterToken(3, ';'); |
}); |
it('should tokenize function invocation', () { |
- var tokens = lex("a()"); |
- expect(tokens[0]).toBeToken(0, 'a'); |
- expect(tokens[1]).toBeToken(1, '('); |
- expect(tokens[2]).toBeToken(2, ')'); |
+ List<Token> tokens = lex("a()"); |
+ expect(tokens[0]).toBeIdentifierToken(0, 'a'); |
+ expect(tokens[1]).toBeCharacterToken(1, '('); |
+ expect(tokens[2]).toBeCharacterToken(2, ')'); |
}); |
it('should tokenize simple method invocations', () { |
- var tokens = lex("a.method()"); |
- expect(tokens[2]).toBeToken(2, 'method'); |
+ List<Token> tokens = lex("a.method()"); |
+ expect(tokens[2]).toBeIdentifierToken(2, 'method'); |
}); |
it('should tokenize method invocation', () { |
- var tokens = lex("a.b.c (d) - e.f()"); |
- expect(tokens[0]).toBeToken(0, 'a'); |
- expect(tokens[1]).toBeToken(1, '.'); |
- expect(tokens[2]).toBeToken(2, 'b'); |
- expect(tokens[3]).toBeToken(3, '.'); |
- expect(tokens[4]).toBeToken(4, 'c'); |
- expect(tokens[5]).toBeToken(6, '('); |
- expect(tokens[6]).toBeToken(7, 'd'); |
- expect(tokens[7]).toBeToken(8, ')'); |
- expect(tokens[8]).toBeToken(10, '-'); |
- expect(tokens[9]).toBeToken(12, 'e'); |
- expect(tokens[10]).toBeToken(13, '.'); |
- expect(tokens[11]).toBeToken(14, 'f'); |
- expect(tokens[12]).toBeToken(15, '('); |
- expect(tokens[13]).toBeToken(16, ')'); |
+ List<Token> tokens = lex("a.b.c (d) - e.f()"); |
+ expect(tokens[0]).toBeIdentifierToken(0, 'a'); |
+ expect(tokens[1]).toBeCharacterToken(1, '.'); |
+ expect(tokens[2]).toBeIdentifierToken(2, 'b'); |
+ expect(tokens[3]).toBeCharacterToken(3, '.'); |
+ expect(tokens[4]).toBeIdentifierToken(4, 'c'); |
+ expect(tokens[5]).toBeCharacterToken(6, '('); |
+ expect(tokens[6]).toBeIdentifierToken(7, 'd'); |
+ expect(tokens[7]).toBeCharacterToken(8, ')'); |
+ expect(tokens[8]).toBeOperatorToken(10, '-'); |
+ expect(tokens[9]).toBeIdentifierToken(12, 'e'); |
+ expect(tokens[10]).toBeCharacterToken(13, '.'); |
+ expect(tokens[11]).toBeIdentifierToken(14, 'f'); |
+ expect(tokens[12]).toBeCharacterToken(15, '('); |
+ expect(tokens[13]).toBeCharacterToken(16, ')'); |
}); |
it('should tokenize number', () { |
- var tokens = lex("0.5"); |
- expect(tokens[0].value).toEqual(0.5); |
+ List<Token> tokens = lex("0.5"); |
+ expect(tokens[0]).toBeNumberToken(0, 0.5); |
}); |
// NOTE(deboer): NOT A LEXER TEST |
// it('should tokenize negative number', () { |
- // var tokens = lex("-0.5"); |
- // expect(tokens[0].value).toEqual(-0.5); |
+ // List<Token> tokens = lex("-0.5"); |
+ // expect(tokens[0]).toBeNumberToken(0, -0.5); |
// }); |
it('should tokenize number with exponent', () { |
- var tokens = lex("0.5E-10"); |
+ List<Token> tokens = lex("0.5E-10"); |
expect(tokens.length).toEqual(1); |
- expect(tokens[0].value).toEqual(0.5E-10); |
+ expect(tokens[0]).toBeNumberToken(0, 0.5E-10); |
tokens = lex("0.5E+10"); |
- expect(tokens[0].value).toEqual(0.5E+10); |
+ expect(tokens[0]).toBeNumberToken(0, 0.5E+10); |
}); |
it('should throws exception for invalid exponent', () { |
@@ -232,8 +233,8 @@ main() { |
}); |
it('should tokenize number starting with a dot', () { |
- var tokens = lex(".5"); |
- expect(tokens[0].value).toEqual(0.5); |
+ List<Token> tokens = lex(".5"); |
+ expect(tokens[0]).toBeNumberToken(0, 0.5); |
}); |
it('should throw error on invalid unicode', () { |