Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(254)

Side by Side Diff: pkg/analyzer/lib/src/generated/parser.dart

Issue 2803563003: enhance analyzer to parse uppercase and built-in/pseudo keywords (Closed)
Patch Set: rebase Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | pkg/front_end/lib/src/scanner/scanner.dart » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 library analyzer.src.generated.parser; 5 library analyzer.src.generated.parser;
6 6
7 import 'dart:collection'; 7 import 'dart:collection';
8 import "dart:math" as math; 8 import "dart:math" as math;
9 9
10 import 'package:analyzer/dart/ast/ast.dart'; 10 import 'package:analyzer/dart/ast/ast.dart';
(...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after
151 return true; 151 return true;
152 } 152 }
153 return needsSpace; 153 return needsSpace;
154 } 154 }
155 } 155 }
156 156
157 /** 157 /**
158 * A parser used to parse tokens into an AST structure. 158 * A parser used to parse tokens into an AST structure.
159 */ 159 */
160 class Parser { 160 class Parser {
161 static String ASYNC = "async"; 161 static String ASYNC = Keyword.ASYNC.syntax;
162 162
163 static String _AWAIT = "await"; 163 static String _AWAIT = Keyword.AWAIT.syntax;
164 164
165 static String _HIDE = "hide"; 165 static String _HIDE = Keyword.HIDE.syntax;
166 166
167 static String _OF = "of"; 167 static String _SHOW = Keyword.SHOW.syntax;
168 168
169 static String _ON = "on"; 169 static String SYNC = Keyword.SYNC.syntax;
170 170
171 static String _NATIVE = "native"; 171 static String _YIELD = Keyword.YIELD.syntax;
172
173 static String _SHOW = "show";
174
175 static String SYNC = "sync";
176
177 static String _YIELD = "yield";
178 172
179 /** 173 /**
180 * The source being parsed. 174 * The source being parsed.
181 */ 175 */
182 final Source _source; 176 final Source _source;
183 177
184 /** 178 /**
185 * The error listener that will be informed of any errors that are found 179 * The error listener that will be informed of any errors that are found
186 * during the parse. 180 * during the parse.
187 */ 181 */
(...skipping 994 matching lines...) Expand 10 before | Expand all | Expand 10 after
1182 } 1176 }
1183 } 1177 }
1184 if (withClause != null && extendsClause == null) { 1178 if (withClause != null && extendsClause == null) {
1185 _reportErrorForToken( 1179 _reportErrorForToken(
1186 ParserErrorCode.WITH_WITHOUT_EXTENDS, withClause.withKeyword); 1180 ParserErrorCode.WITH_WITHOUT_EXTENDS, withClause.withKeyword);
1187 } 1181 }
1188 // 1182 //
1189 // Look for and skip over the extra-lingual 'native' specification. 1183 // Look for and skip over the extra-lingual 'native' specification.
1190 // 1184 //
1191 NativeClause nativeClause = null; 1185 NativeClause nativeClause = null;
1192 if (_matchesString(_NATIVE) && _tokenMatches(_peek(), TokenType.STRING)) { 1186 if (_matchesKeyword(Keyword.NATIVE) &&
1187 _tokenMatches(_peek(), TokenType.STRING)) {
1193 nativeClause = _parseNativeClause(); 1188 nativeClause = _parseNativeClause();
1194 } 1189 }
1195 // 1190 //
1196 // Parse the body of the class. 1191 // Parse the body of the class.
1197 // 1192 //
1198 Token leftBracket = null; 1193 Token leftBracket = null;
1199 List<ClassMember> members = null; 1194 List<ClassMember> members = null;
1200 Token rightBracket = null; 1195 Token rightBracket = null;
1201 if (_matches(TokenType.OPEN_CURLY_BRACKET)) { 1196 if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
1202 leftBracket = getAndAdvance(); 1197 leftBracket = getAndAdvance();
(...skipping 376 matching lines...) Expand 10 before | Expand all | Expand 10 after
1579 1574
1580 /** 1575 /**
1581 * Parse a single combinator. Return the combinator that was parsed, or `null` 1576 * Parse a single combinator. Return the combinator that was parsed, or `null`
1582 * if no combinator is found. 1577 * if no combinator is found.
1583 * 1578 *
1584 * combinator ::= 1579 * combinator ::=
1585 * 'show' identifier (',' identifier)* 1580 * 'show' identifier (',' identifier)*
1586 * | 'hide' identifier (',' identifier)* 1581 * | 'hide' identifier (',' identifier)*
1587 */ 1582 */
1588 Combinator parseCombinator() { 1583 Combinator parseCombinator() {
1589 if (_matchesString(_SHOW)) { 1584 if (_matchesKeyword(Keyword.SHOW)) {
1590 return astFactory.showCombinator(getAndAdvance(), parseIdentifierList()); 1585 return astFactory.showCombinator(getAndAdvance(), parseIdentifierList());
1591 } else if (_matchesString(_HIDE)) { 1586 } else if (_matchesKeyword(Keyword.HIDE)) {
1592 return astFactory.hideCombinator(getAndAdvance(), parseIdentifierList()); 1587 return astFactory.hideCombinator(getAndAdvance(), parseIdentifierList());
1593 } 1588 }
1594 return null; 1589 return null;
1595 } 1590 }
1596 1591
1597 /** 1592 /**
1598 * Parse a list of combinators in a directive. Return the combinators that 1593 * Parse a list of combinators in a directive. Return the combinators that
1599 * were parsed, or `null` if there are no combinators. 1594 * were parsed, or `null` if there are no combinators.
1600 * 1595 *
1601 * combinator ::= 1596 * combinator ::=
(...skipping 308 matching lines...) Expand 10 before | Expand all | Expand 10 after
1910 ParserErrorCode.MULTIPLE_LIBRARY_DIRECTIVES); 1905 ParserErrorCode.MULTIPLE_LIBRARY_DIRECTIVES);
1911 } else { 1906 } else {
1912 if (directives.length > 0) { 1907 if (directives.length > 0) {
1913 _reportErrorForCurrentToken( 1908 _reportErrorForCurrentToken(
1914 ParserErrorCode.LIBRARY_DIRECTIVE_NOT_FIRST); 1909 ParserErrorCode.LIBRARY_DIRECTIVE_NOT_FIRST);
1915 } 1910 }
1916 libraryDirectiveFound = true; 1911 libraryDirectiveFound = true;
1917 } 1912 }
1918 return parseLibraryDirective(commentAndMetadata); 1913 return parseLibraryDirective(commentAndMetadata);
1919 } else if (keyword == Keyword.PART) { 1914 } else if (keyword == Keyword.PART) {
1920 if (_tokenMatchesString(_peek(), _OF)) { 1915 if (_tokenMatchesKeyword(_peek(), Keyword.OF)) {
1921 partOfDirectiveFound = true; 1916 partOfDirectiveFound = true;
1922 return _parsePartOfDirective(commentAndMetadata); 1917 return _parsePartOfDirective(commentAndMetadata);
1923 } else { 1918 } else {
1924 partDirectiveFound = true; 1919 partDirectiveFound = true;
1925 return _parsePartDirective(commentAndMetadata); 1920 return _parsePartDirective(commentAndMetadata);
1926 } 1921 }
1927 } else { 1922 } else {
1928 // Internal error: this method should not have been invoked if the 1923 // Internal error: this method should not have been invoked if the
1929 // current token was something other than one of the above. 1924 // current token was something other than one of the above.
1930 throw new StateError( 1925 throw new StateError(
(...skipping 1022 matching lines...) Expand 10 before | Expand all | Expand 10 after
2953 * 2948 *
2954 * forInitializerStatement ::= 2949 * forInitializerStatement ::=
2955 * localVariableDeclaration ';' 2950 * localVariableDeclaration ';'
2956 * | expression? ';' 2951 * | expression? ';'
2957 */ 2952 */
2958 Statement parseForStatement() { 2953 Statement parseForStatement() {
2959 bool wasInLoop = _inLoop; 2954 bool wasInLoop = _inLoop;
2960 _inLoop = true; 2955 _inLoop = true;
2961 try { 2956 try {
2962 Token awaitKeyword = null; 2957 Token awaitKeyword = null;
2963 if (_matchesString(_AWAIT)) { 2958 if (_matchesKeyword(Keyword.AWAIT)) {
2964 awaitKeyword = getAndAdvance(); 2959 awaitKeyword = getAndAdvance();
2965 } 2960 }
2966 Token forKeyword = _expectKeyword(Keyword.FOR); 2961 Token forKeyword = _expectKeyword(Keyword.FOR);
2967 Token leftParenthesis = _expect(TokenType.OPEN_PAREN); 2962 Token leftParenthesis = _expect(TokenType.OPEN_PAREN);
2968 VariableDeclarationList variableList = null; 2963 VariableDeclarationList variableList = null;
2969 Expression initialization = null; 2964 Expression initialization = null;
2970 if (!_matches(TokenType.SEMICOLON)) { 2965 if (!_matches(TokenType.SEMICOLON)) {
2971 CommentAndMetadata commentAndMetadata = parseCommentAndMetadata(); 2966 CommentAndMetadata commentAndMetadata = parseCommentAndMetadata();
2972 if (_matchesIdentifier() && 2967 if (_matchesIdentifier() &&
2973 (_tokenMatchesKeyword(_peek(), Keyword.IN) || 2968 (_tokenMatchesKeyword(_peek(), Keyword.IN) ||
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after
3116 if (type == TokenType.SEMICOLON) { 3111 if (type == TokenType.SEMICOLON) {
3117 if (!mayBeEmpty) { 3112 if (!mayBeEmpty) {
3118 _reportErrorForCurrentToken(emptyErrorCode); 3113 _reportErrorForCurrentToken(emptyErrorCode);
3119 } 3114 }
3120 return astFactory.emptyFunctionBody(getAndAdvance()); 3115 return astFactory.emptyFunctionBody(getAndAdvance());
3121 } 3116 }
3122 Token keyword = null; 3117 Token keyword = null;
3123 Token star = null; 3118 Token star = null;
3124 bool foundAsync = false; 3119 bool foundAsync = false;
3125 bool foundSync = false; 3120 bool foundSync = false;
3126 if (type == TokenType.IDENTIFIER) { 3121 if (type == TokenType.KEYWORD) {
3127 String lexeme = _currentToken.lexeme; 3122 String lexeme = _currentToken.lexeme;
3128 if (lexeme == ASYNC) { 3123 if (lexeme == ASYNC) {
3129 foundAsync = true; 3124 foundAsync = true;
3130 keyword = getAndAdvance(); 3125 keyword = getAndAdvance();
3131 if (_matches(TokenType.STAR)) { 3126 if (_matches(TokenType.STAR)) {
3132 star = getAndAdvance(); 3127 star = getAndAdvance();
3133 _inGenerator = true; 3128 _inGenerator = true;
3134 } 3129 }
3135 type = _currentToken.type; 3130 type = _currentToken.type;
3136 _inAsync = true; 3131 _inAsync = true;
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
3177 _reportErrorForToken( 3172 _reportErrorForToken(
3178 ParserErrorCode.MISSING_STAR_AFTER_SYNC, keyword); 3173 ParserErrorCode.MISSING_STAR_AFTER_SYNC, keyword);
3179 } 3174 }
3180 } 3175 }
3181 if (!_parseFunctionBodies) { 3176 if (!_parseFunctionBodies) {
3182 _skipBlock(); 3177 _skipBlock();
3183 return astFactory 3178 return astFactory
3184 .emptyFunctionBody(_createSyntheticToken(TokenType.SEMICOLON)); 3179 .emptyFunctionBody(_createSyntheticToken(TokenType.SEMICOLON));
3185 } 3180 }
3186 return astFactory.blockFunctionBody(keyword, star, parseBlock()); 3181 return astFactory.blockFunctionBody(keyword, star, parseBlock());
3187 } else if (_matchesString(_NATIVE)) { 3182 } else if (_matchesKeyword(Keyword.NATIVE)) {
3188 Token nativeToken = getAndAdvance(); 3183 Token nativeToken = getAndAdvance();
3189 StringLiteral stringLiteral = null; 3184 StringLiteral stringLiteral = null;
3190 if (_matches(TokenType.STRING)) { 3185 if (_matches(TokenType.STRING)) {
3191 stringLiteral = _parseStringLiteralUnchecked(); 3186 stringLiteral = _parseStringLiteralUnchecked();
3192 } 3187 }
3193 return astFactory.nativeFunctionBody( 3188 return astFactory.nativeFunctionBody(
3194 nativeToken, stringLiteral, _expect(TokenType.SEMICOLON)); 3189 nativeToken, stringLiteral, _expect(TokenType.SEMICOLON));
3195 } else { 3190 } else {
3196 // Invalid function body 3191 // Invalid function body
3197 _reportErrorForCurrentToken(emptyErrorCode); 3192 _reportErrorForCurrentToken(emptyErrorCode);
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
3331 * optionalPositionalParameterTypes ::= 3326 * optionalPositionalParameterTypes ::=
3332 * '[' normalParameterTypes ','? ']' 3327 * '[' normalParameterTypes ','? ']'
3333 * namedParameterTypes ::= 3328 * namedParameterTypes ::=
3334 * '{' typedIdentifier (',' typedIdentifier)* ','? '}' 3329 * '{' typedIdentifier (',' typedIdentifier)* ','? '}'
3335 * typedIdentifier ::= 3330 * typedIdentifier ::=
3336 * type identifier 3331 * type identifier
3337 */ 3332 */
3338 GenericFunctionType parseGenericFunctionTypeAfterReturnType( 3333 GenericFunctionType parseGenericFunctionTypeAfterReturnType(
3339 TypeAnnotation returnType) { 3334 TypeAnnotation returnType) {
3340 Token functionKeyword = null; 3335 Token functionKeyword = null;
3341 if (_matchesString('Function')) { 3336 if (_matchesKeyword(Keyword.FUNCTION)) {
3342 functionKeyword = getAndAdvance(); 3337 functionKeyword = getAndAdvance();
3343 } else if (_matchesIdentifier()) { 3338 } else if (_matchesIdentifier()) {
3344 _reportErrorForCurrentToken(ParserErrorCode.NAMED_FUNCTION_TYPE); 3339 _reportErrorForCurrentToken(ParserErrorCode.NAMED_FUNCTION_TYPE);
3345 } else { 3340 } else {
3346 _reportErrorForCurrentToken(ParserErrorCode.MISSING_FUNCTION_KEYWORD); 3341 _reportErrorForCurrentToken(ParserErrorCode.MISSING_FUNCTION_KEYWORD);
3347 } 3342 }
3348 TypeParameterList typeParameters = null; 3343 TypeParameterList typeParameters = null;
3349 if (_matches(TokenType.LT)) { 3344 if (_matches(TokenType.LT)) {
3350 typeParameters = parseTypeParameterList(); 3345 typeParameters = parseTypeParameterList();
3351 } 3346 }
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after
3540 if (_matchesKeyword(Keyword.DEFERRED)) { 3535 if (_matchesKeyword(Keyword.DEFERRED)) {
3541 deferredToken = getAndAdvance(); 3536 deferredToken = getAndAdvance();
3542 } 3537 }
3543 if (_matchesKeyword(Keyword.AS)) { 3538 if (_matchesKeyword(Keyword.AS)) {
3544 asToken = getAndAdvance(); 3539 asToken = getAndAdvance();
3545 prefix = parseSimpleIdentifier(isDeclaration: true); 3540 prefix = parseSimpleIdentifier(isDeclaration: true);
3546 } else if (deferredToken != null) { 3541 } else if (deferredToken != null) {
3547 _reportErrorForCurrentToken( 3542 _reportErrorForCurrentToken(
3548 ParserErrorCode.MISSING_PREFIX_IN_DEFERRED_IMPORT); 3543 ParserErrorCode.MISSING_PREFIX_IN_DEFERRED_IMPORT);
3549 } else if (!_matches(TokenType.SEMICOLON) && 3544 } else if (!_matches(TokenType.SEMICOLON) &&
3550 !_matchesString(_SHOW) && 3545 !_matchesKeyword(Keyword.SHOW) &&
3551 !_matchesString(_HIDE)) { 3546 !_matchesKeyword(Keyword.HIDE)) {
3552 Token nextToken = _peek(); 3547 Token nextToken = _peek();
3553 if (_tokenMatchesKeyword(nextToken, Keyword.AS) || 3548 if (_tokenMatchesKeyword(nextToken, Keyword.AS) ||
3554 _tokenMatchesString(nextToken, _SHOW) || 3549 _tokenMatchesKeyword(nextToken, Keyword.SHOW) ||
3555 _tokenMatchesString(nextToken, _HIDE)) { 3550 _tokenMatchesKeyword(nextToken, Keyword.HIDE)) {
3556 _reportErrorForCurrentToken( 3551 _reportErrorForCurrentToken(
3557 ParserErrorCode.UNEXPECTED_TOKEN, [_currentToken]); 3552 ParserErrorCode.UNEXPECTED_TOKEN, [_currentToken]);
3558 _advance(); 3553 _advance();
3559 if (_matchesKeyword(Keyword.AS)) { 3554 if (_matchesKeyword(Keyword.AS)) {
3560 asToken = getAndAdvance(); 3555 asToken = getAndAdvance();
3561 prefix = parseSimpleIdentifier(isDeclaration: true); 3556 prefix = parseSimpleIdentifier(isDeclaration: true);
3562 } 3557 }
3563 } 3558 }
3564 } 3559 }
3565 List<Combinator> combinators = parseCombinators(); 3560 List<Combinator> combinators = parseCombinators();
(...skipping 416 matching lines...) Expand 10 before | Expand all | Expand 10 after
3982 TokenType type = _currentToken.type; 3977 TokenType type = _currentToken.type;
3983 if (type == TokenType.OPEN_CURLY_BRACKET) { 3978 if (type == TokenType.OPEN_CURLY_BRACKET) {
3984 if (_tokenMatches(_peek(), TokenType.STRING)) { 3979 if (_tokenMatches(_peek(), TokenType.STRING)) {
3985 Token afterString = skipStringLiteral(_currentToken.next); 3980 Token afterString = skipStringLiteral(_currentToken.next);
3986 if (afterString != null && afterString.type == TokenType.COLON) { 3981 if (afterString != null && afterString.type == TokenType.COLON) {
3987 return astFactory.expressionStatement( 3982 return astFactory.expressionStatement(
3988 parseExpression2(), _expect(TokenType.SEMICOLON)); 3983 parseExpression2(), _expect(TokenType.SEMICOLON));
3989 } 3984 }
3990 } 3985 }
3991 return parseBlock(); 3986 return parseBlock();
3992 } else if (type == TokenType.KEYWORD && !_currentToken.keyword.isBuiltIn) { 3987 } else if (type == TokenType.KEYWORD &&
3988 !_currentToken.keyword.isBuiltInOrPseudo) {
3993 Keyword keyword = _currentToken.keyword; 3989 Keyword keyword = _currentToken.keyword;
3994 // TODO(jwren) compute some metrics to figure out a better order for this 3990 // TODO(jwren) compute some metrics to figure out a better order for this
3995 // if-then sequence to optimize performance 3991 // if-then sequence to optimize performance
3996 if (keyword == Keyword.ASSERT) { 3992 if (keyword == Keyword.ASSERT) {
3997 return parseAssertStatement(); 3993 return parseAssertStatement();
3998 } else if (keyword == Keyword.BREAK) { 3994 } else if (keyword == Keyword.BREAK) {
3999 return parseBreakStatement(); 3995 return parseBreakStatement();
4000 } else if (keyword == Keyword.CONTINUE) { 3996 } else if (keyword == Keyword.CONTINUE) {
4001 return parseContinueStatement(); 3997 return parseContinueStatement();
4002 } else if (keyword == Keyword.DO) { 3998 } else if (keyword == Keyword.DO) {
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
4098 return astFactory.expressionStatement( 4094 return astFactory.expressionStatement(
4099 parseExpression2(), _expect(TokenType.SEMICOLON)); 4095 parseExpression2(), _expect(TokenType.SEMICOLON));
4100 } else { 4096 } else {
4101 // 4097 //
4102 // We have found an error of some kind. Try to recover. 4098 // We have found an error of some kind. Try to recover.
4103 // 4099 //
4104 _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT); 4100 _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT);
4105 return astFactory 4101 return astFactory
4106 .emptyStatement(_createSyntheticToken(TokenType.SEMICOLON)); 4102 .emptyStatement(_createSyntheticToken(TokenType.SEMICOLON));
4107 } 4103 }
4108 } else if (_inGenerator && _matchesString(_YIELD)) { 4104 } else if (_inGenerator && _matchesKeyword(Keyword.YIELD)) {
4109 return parseYieldStatement(); 4105 return parseYieldStatement();
4110 } else if (_inAsync && _matchesString(_AWAIT)) { 4106 } else if (_inAsync && _matchesKeyword(Keyword.AWAIT)) {
4111 if (_tokenMatchesKeyword(_peek(), Keyword.FOR)) { 4107 if (_tokenMatchesKeyword(_peek(), Keyword.FOR)) {
4112 return parseForStatement(); 4108 return parseForStatement();
4113 } 4109 }
4114 return astFactory.expressionStatement( 4110 return astFactory.expressionStatement(
4115 parseExpression2(), _expect(TokenType.SEMICOLON)); 4111 parseExpression2(), _expect(TokenType.SEMICOLON));
4116 } else if (_matchesString(_AWAIT) && 4112 } else if (_matchesKeyword(Keyword.AWAIT) &&
4117 _tokenMatchesKeyword(_peek(), Keyword.FOR)) { 4113 _tokenMatchesKeyword(_peek(), Keyword.FOR)) {
4118 Token awaitToken = _currentToken; 4114 Token awaitToken = _currentToken;
4119 Statement statement = parseForStatement(); 4115 Statement statement = parseForStatement();
4120 if (statement is! ForStatement) { 4116 if (statement is! ForStatement) {
4121 _reportErrorForToken( 4117 _reportErrorForToken(
4122 CompileTimeErrorCode.ASYNC_FOR_IN_WRONG_CONTEXT, awaitToken); 4118 CompileTimeErrorCode.ASYNC_FOR_IN_WRONG_CONTEXT, awaitToken);
4123 } 4119 }
4124 return statement; 4120 return statement;
4125 } else if (type == TokenType.SEMICOLON) { 4121 } else if (type == TokenType.SEMICOLON) {
4126 return parseEmptyStatement(); 4122 return parseEmptyStatement();
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
4299 * 4295 *
4300 * This method assumes that the current token matches `Keyword.PART`. 4296 * This method assumes that the current token matches `Keyword.PART`.
4301 * 4297 *
4302 * partDirective ::= 4298 * partDirective ::=
4303 * metadata 'part' stringLiteral ';' 4299 * metadata 'part' stringLiteral ';'
4304 * 4300 *
4305 * partOfDirective ::= 4301 * partOfDirective ::=
4306 * metadata 'part' 'of' identifier ';' 4302 * metadata 'part' 'of' identifier ';'
4307 */ 4303 */
4308 Directive parsePartOrPartOfDirective(CommentAndMetadata commentAndMetadata) { 4304 Directive parsePartOrPartOfDirective(CommentAndMetadata commentAndMetadata) {
4309 if (_tokenMatchesString(_peek(), _OF)) { 4305 if (_tokenMatchesKeyword(_peek(), Keyword.OF)) {
4310 return _parsePartOfDirective(commentAndMetadata); 4306 return _parsePartOfDirective(commentAndMetadata);
4311 } 4307 }
4312 return _parsePartDirective(commentAndMetadata); 4308 return _parsePartDirective(commentAndMetadata);
4313 } 4309 }
4314 4310
4315 /** 4311 /**
4316 * Parse a postfix expression. Return the postfix expression that was parsed. 4312 * Parse a postfix expression. Return the postfix expression that was parsed.
4317 * 4313 *
4318 * postfixExpression ::= 4314 * postfixExpression ::=
4319 * assignableExpression postfixOperator 4315 * assignableExpression postfixOperator
(...skipping 651 matching lines...) Expand 10 before | Expand all | Expand 10 after
4971 * 'catch' '(' identifier (',' identifier)? ')' 4967 * 'catch' '(' identifier (',' identifier)? ')'
4972 * 4968 *
4973 * finallyPart ::= 4969 * finallyPart ::=
4974 * 'finally' block 4970 * 'finally' block
4975 */ 4971 */
4976 Statement parseTryStatement() { 4972 Statement parseTryStatement() {
4977 Token tryKeyword = getAndAdvance(); 4973 Token tryKeyword = getAndAdvance();
4978 Block body = _parseBlockChecked(); 4974 Block body = _parseBlockChecked();
4979 List<CatchClause> catchClauses = <CatchClause>[]; 4975 List<CatchClause> catchClauses = <CatchClause>[];
4980 Block finallyClause = null; 4976 Block finallyClause = null;
4981 while (_matchesString(_ON) || _matchesKeyword(Keyword.CATCH)) { 4977 while (_matchesKeyword(Keyword.ON) || _matchesKeyword(Keyword.CATCH)) {
4982 Token onKeyword = null; 4978 Token onKeyword = null;
4983 TypeName exceptionType = null; 4979 TypeName exceptionType = null;
4984 if (_matchesString(_ON)) { 4980 if (_matchesKeyword(Keyword.ON)) {
4985 onKeyword = getAndAdvance(); 4981 onKeyword = getAndAdvance();
4986 exceptionType = parseTypeAnnotation(false); 4982 exceptionType = parseTypeAnnotation(false);
4987 } 4983 }
4988 Token catchKeyword = null; 4984 Token catchKeyword = null;
4989 Token leftParenthesis = null; 4985 Token leftParenthesis = null;
4990 SimpleIdentifier exceptionParameter = null; 4986 SimpleIdentifier exceptionParameter = null;
4991 Token comma = null; 4987 Token comma = null;
4992 SimpleIdentifier stackTraceParameter = null; 4988 SimpleIdentifier stackTraceParameter = null;
4993 Token rightParenthesis = null; 4989 Token rightParenthesis = null;
4994 if (_matchesKeyword(Keyword.CATCH)) { 4990 if (_matchesKeyword(Keyword.CATCH)) {
(...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after
5234 _reportErrorForCurrentToken( 5230 _reportErrorForCurrentToken(
5235 ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, [operator.lexeme]); 5231 ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, [operator.lexeme]);
5236 return astFactory.prefixExpression( 5232 return astFactory.prefixExpression(
5237 operator, astFactory.superExpression(getAndAdvance())); 5233 operator, astFactory.superExpression(getAndAdvance()));
5238 } 5234 }
5239 return astFactory.prefixExpression( 5235 return astFactory.prefixExpression(
5240 operator, _parseAssignableExpressionNotStartingWithSuper(false)); 5236 operator, _parseAssignableExpressionNotStartingWithSuper(false));
5241 } else if (type == TokenType.PLUS) { 5237 } else if (type == TokenType.PLUS) {
5242 _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); 5238 _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
5243 return createSyntheticIdentifier(); 5239 return createSyntheticIdentifier();
5244 } else if (_inAsync && _matchesString(_AWAIT)) { 5240 } else if (_inAsync && _matchesKeyword(Keyword.AWAIT)) {
5245 return parseAwaitExpression(); 5241 return parseAwaitExpression();
5246 } 5242 }
5247 return parsePostfixExpression(); 5243 return parsePostfixExpression();
5248 } 5244 }
5249 5245
5250 /** 5246 /**
5251 * Parse a variable declaration. Return the variable declaration that was 5247 * Parse a variable declaration. Return the variable declaration that was
5252 * parsed. 5248 * parsed.
5253 * 5249 *
5254 * variableDeclaration ::= 5250 * variableDeclaration ::=
(...skipping 432 matching lines...) Expand 10 before | Expand all | Expand 10 after
5687 } else { 5683 } else {
5688 buffer.write(Character.toChars(codePoint)); 5684 buffer.write(Character.toChars(codePoint));
5689 } 5685 }
5690 } 5686 }
5691 5687
5692 /** 5688 /**
5693 * Return `true` if we are positioned at the keyword 'Function' in a generic 5689 * Return `true` if we are positioned at the keyword 'Function' in a generic
5694 * function type alias. 5690 * function type alias.
5695 */ 5691 */
5696 bool _atGenericFunctionTypeAfterReturnType(Token startToken) { 5692 bool _atGenericFunctionTypeAfterReturnType(Token startToken) {
5697 if (_tokenMatchesString(startToken, 'Function')) { 5693 if (_tokenMatchesKeyword(startToken, Keyword.FUNCTION)) {
5698 Token next = startToken.next; 5694 Token next = startToken.next;
5699 if (next != null && 5695 if (next != null &&
5700 (_tokenMatches(next, TokenType.OPEN_PAREN) || 5696 (_tokenMatches(next, TokenType.OPEN_PAREN) ||
5701 _tokenMatches(next, TokenType.LT))) { 5697 _tokenMatches(next, TokenType.LT))) {
5702 return true; 5698 return true;
5703 } 5699 }
5704 } 5700 }
5705 return false; 5701 return false;
5706 } 5702 }
5707 5703
(...skipping 2278 matching lines...) Expand 10 before | Expand all | Expand 10 after
7986 /** 7982 /**
7987 * Return `true` if the given [token] matches the given [keyword]. 7983 * Return `true` if the given [token] matches the given [keyword].
7988 */ 7984 */
7989 bool _tokenMatchesKeyword(Token token, Keyword keyword) => 7985 bool _tokenMatchesKeyword(Token token, Keyword keyword) =>
7990 token.keyword == keyword; 7986 token.keyword == keyword;
7991 7987
7992 /** 7988 /**
7993 * Return `true` if the given [token] matches a pseudo keyword. 7989 * Return `true` if the given [token] matches a pseudo keyword.
7994 */ 7990 */
7995 bool _tokenMatchesPseudoKeyword(Token token) => 7991 bool _tokenMatchesPseudoKeyword(Token token) =>
7996 token.keyword?.isBuiltIn ?? false; 7992 token.keyword?.isBuiltInOrPseudo ?? false;
7997 7993
7998 /** 7994 /**
7999 * Return `true` if the given [token] matches the given [identifier]. 7995 * Return `true` if the given [token] matches the given [identifier].
8000 */ 7996 */
8001 bool _tokenMatchesString(Token token, String identifier) => 7997 bool _tokenMatchesString(Token token, String identifier) =>
8002 token.type == TokenType.IDENTIFIER && token.lexeme == identifier; 7998 token.type == TokenType.IDENTIFIER && token.lexeme == identifier;
8003 7999
8004 /** 8000 /**
8005 * Translate the characters at the given [index] in the given [lexeme], 8001 * Translate the characters at the given [index] in the given [lexeme],
8006 * appending the translated character to the given [buffer]. The index is 8002 * appending the translated character to the given [buffer]. The index is
(...skipping 490 matching lines...) Expand 10 before | Expand all | Expand 10 after
8497 } 8493 }
8498 if (modifiers.finalKeyword != null) { 8494 if (modifiers.finalKeyword != null) {
8499 _reportErrorForToken( 8495 _reportErrorForToken(
8500 ParserErrorCode.FINAL_TYPEDEF, modifiers.finalKeyword); 8496 ParserErrorCode.FINAL_TYPEDEF, modifiers.finalKeyword);
8501 } 8497 }
8502 if (modifiers.varKeyword != null) { 8498 if (modifiers.varKeyword != null) {
8503 _reportErrorForToken(ParserErrorCode.VAR_TYPEDEF, modifiers.varKeyword); 8499 _reportErrorForToken(ParserErrorCode.VAR_TYPEDEF, modifiers.varKeyword);
8504 } 8500 }
8505 } 8501 }
8506 } 8502 }
OLDNEW
« no previous file with comments | « no previous file | pkg/front_end/lib/src/scanner/scanner.dart » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698