Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(811)

Unified Diff: packages/analyzer/lib/src/generated/parser.dart

Issue 2990843002: Removed fixed dependencies (Closed)
Patch Set: Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « packages/analyzer/lib/src/generated/package.dart ('k') | packages/analyzer/lib/src/generated/resolver.dart » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: packages/analyzer/lib/src/generated/parser.dart
diff --git a/packages/analyzer/lib/src/generated/parser.dart b/packages/analyzer/lib/src/generated/parser.dart
index 76f9dc7e76e32af10ddc3113de95be5c22ab1a81..fb111fff621a425b6654905c75492e0f82702878 100644
--- a/packages/analyzer/lib/src/generated/parser.dart
+++ b/packages/analyzer/lib/src/generated/parser.dart
@@ -2,470 +2,29 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
-library engine.parser;
+library analyzer.src.generated.parser;
import 'dart:collection';
import "dart:math" as math;
-import 'ast.dart';
-import 'engine.dart' show AnalysisEngine, AnalysisOptionsImpl;
-import 'error.dart';
-import 'java_core.dart';
-import 'java_engine.dart';
-import 'scanner.dart';
-import 'source.dart';
-import 'utilities_collection.dart' show TokenMap;
-import 'utilities_dart.dart';
-
-Map<String, MethodTrampoline> methodTable_Parser = <String, MethodTrampoline>{
- 'parseCompilationUnit_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target.parseCompilationUnit(arg0)),
- 'parseDirectives_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target.parseDirectives(arg0)),
- 'parseExpression_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target.parseExpression(arg0)),
- 'parseStatement_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target.parseStatement(arg0)),
- 'parseStatements_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target.parseStatements(arg0)),
- 'parseAnnotation_0':
- new MethodTrampoline(0, (Parser target) => target.parseAnnotation()),
- 'parseArgument_0':
- new MethodTrampoline(0, (Parser target) => target.parseArgument()),
- 'parseArgumentList_0':
- new MethodTrampoline(0, (Parser target) => target.parseArgumentList()),
- 'parseBitwiseOrExpression_0': new MethodTrampoline(
- 0, (Parser target) => target.parseBitwiseOrExpression()),
- 'parseBlock_0':
- new MethodTrampoline(0, (Parser target) => target.parseBlock()),
- 'parseClassMember_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target.parseClassMember(arg0)),
- 'parseCompilationUnit_0': new MethodTrampoline(
- 0, (Parser target) => target.parseCompilationUnit2()),
- 'parseConditionalExpression_0': new MethodTrampoline(
- 0, (Parser target) => target.parseConditionalExpression()),
- 'parseConstructorName_0':
- new MethodTrampoline(0, (Parser target) => target.parseConstructorName()),
- 'parseExpression_0':
- new MethodTrampoline(0, (Parser target) => target.parseExpression2()),
- 'parseExpressionWithoutCascade_0': new MethodTrampoline(
- 0, (Parser target) => target.parseExpressionWithoutCascade()),
- 'parseExtendsClause_0':
- new MethodTrampoline(0, (Parser target) => target.parseExtendsClause()),
- 'parseFormalParameterList_0': new MethodTrampoline(
- 0, (Parser target) => target.parseFormalParameterList()),
- 'parseFunctionExpression_0': new MethodTrampoline(
- 0, (Parser target) => target.parseFunctionExpression()),
- 'parseImplementsClause_0': new MethodTrampoline(
- 0, (Parser target) => target.parseImplementsClause()),
- 'parseLabel_0':
- new MethodTrampoline(0, (Parser target) => target.parseLabel()),
- 'parseLibraryIdentifier_0': new MethodTrampoline(
- 0, (Parser target) => target.parseLibraryIdentifier()),
- 'parseLogicalOrExpression_0': new MethodTrampoline(
- 0, (Parser target) => target.parseLogicalOrExpression()),
- 'parseMapLiteralEntry_0':
- new MethodTrampoline(0, (Parser target) => target.parseMapLiteralEntry()),
- 'parseNormalFormalParameter_0': new MethodTrampoline(
- 0, (Parser target) => target.parseNormalFormalParameter()),
- 'parsePrefixedIdentifier_0': new MethodTrampoline(
- 0, (Parser target) => target.parsePrefixedIdentifier()),
- 'parseReturnType_0':
- new MethodTrampoline(0, (Parser target) => target.parseReturnType()),
- 'parseSimpleIdentifier_0': new MethodTrampoline(
- 0, (Parser target) => target.parseSimpleIdentifier()),
- 'parseStatement_0':
- new MethodTrampoline(0, (Parser target) => target.parseStatement2()),
- 'parseStringLiteral_0':
- new MethodTrampoline(0, (Parser target) => target.parseStringLiteral()),
- 'parseTypeArgumentList_0': new MethodTrampoline(
- 0, (Parser target) => target.parseTypeArgumentList()),
- 'parseTypeName_0':
- new MethodTrampoline(0, (Parser target) => target.parseTypeName()),
- 'parseTypeParameter_0':
- new MethodTrampoline(0, (Parser target) => target.parseTypeParameter()),
- 'parseTypeParameterList_0': new MethodTrampoline(
- 0, (Parser target) => target.parseTypeParameterList()),
- 'parseWithClause_0':
- new MethodTrampoline(0, (Parser target) => target.parseWithClause()),
- 'advance_0': new MethodTrampoline(0, (Parser target) => target._advance()),
- 'appendScalarValue_5': new MethodTrampoline(
- 5,
- (Parser target, arg0, arg1, arg2, arg3, arg4) =>
- target._appendScalarValue(arg0, arg1, arg2, arg3, arg4)),
- 'computeStringValue_3': new MethodTrampoline(
- 3,
- (Parser target, arg0, arg1, arg2) =>
- target._computeStringValue(arg0, arg1, arg2)),
- 'convertToFunctionDeclaration_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._convertToFunctionDeclaration(arg0)),
- 'couldBeStartOfCompilationUnitMember_0': new MethodTrampoline(
- 0, (Parser target) => target._couldBeStartOfCompilationUnitMember()),
- 'createSyntheticIdentifier_0': new MethodTrampoline(
- 0, (Parser target) => target._createSyntheticIdentifier()),
- 'createSyntheticKeyword_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._createSyntheticKeyword(arg0)),
- 'createSyntheticStringLiteral_0': new MethodTrampoline(
- 0, (Parser target) => target._createSyntheticStringLiteral()),
- 'createSyntheticToken_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._createSyntheticToken(arg0)),
- 'ensureAssignable_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._ensureAssignable(arg0)),
- 'expect_1':
- new MethodTrampoline(1, (Parser target, arg0) => target._expect(arg0)),
- 'expectGt_0': new MethodTrampoline(0, (Parser target) => target._expectGt()),
- 'expectKeyword_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._expectKeyword(arg0)),
- 'expectSemicolon_0':
- new MethodTrampoline(0, (Parser target) => target._expectSemicolon()),
- 'findRange_2': new MethodTrampoline(
- 2, (Parser target, arg0, arg1) => target._findRange(arg0, arg1)),
- 'getCodeBlockRanges_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._getCodeBlockRanges(arg0)),
- 'getEndToken_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._getEndToken(arg0)),
- 'injectToken_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._injectToken(arg0)),
- 'isFunctionDeclaration_0': new MethodTrampoline(
- 0, (Parser target) => target._isFunctionDeclaration()),
- 'isFunctionExpression_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._isFunctionExpression(arg0)),
- 'isHexDigit_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._isHexDigit(arg0)),
- 'isInitializedVariableDeclaration_0': new MethodTrampoline(
- 0, (Parser target) => target._isInitializedVariableDeclaration()),
- 'isLinkText_2': new MethodTrampoline(
- 2, (Parser target, arg0, arg1) => target._isLinkText(arg0, arg1)),
- 'isOperator_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._isOperator(arg0)),
- 'isSwitchMember_0':
- new MethodTrampoline(0, (Parser target) => target._isSwitchMember()),
- 'isTypedIdentifier_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._isTypedIdentifier(arg0)),
- 'lockErrorListener_0':
- new MethodTrampoline(0, (Parser target) => target._lockErrorListener()),
- 'matches_1':
- new MethodTrampoline(1, (Parser target, arg0) => target._matches(arg0)),
- 'matchesGt_0':
- new MethodTrampoline(0, (Parser target) => target._matchesGt()),
- 'matchesIdentifier_0':
- new MethodTrampoline(0, (Parser target) => target._matchesIdentifier()),
- 'matchesKeyword_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._matchesKeyword(arg0)),
- 'matchesString_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._matchesString(arg0)),
- 'optional_1':
- new MethodTrampoline(1, (Parser target, arg0) => target._optional(arg0)),
- 'parseAdditiveExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parseAdditiveExpression()),
- 'parseAssertStatement_0': new MethodTrampoline(
- 0, (Parser target) => target._parseAssertStatement()),
- 'parseAssignableExpression_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._parseAssignableExpression(arg0)),
- 'parseAssignableSelector_2': new MethodTrampoline(
- 2,
- (Parser target, arg0, arg1) =>
- target._parseAssignableSelector(arg0, arg1)),
- 'parseAwaitExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parseAwaitExpression()),
- 'parseBitwiseAndExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parseBitwiseAndExpression()),
- 'parseBitwiseXorExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parseBitwiseXorExpression()),
- 'parseBreakStatement_0':
- new MethodTrampoline(0, (Parser target) => target._parseBreakStatement()),
- 'parseCascadeSection_0':
- new MethodTrampoline(0, (Parser target) => target._parseCascadeSection()),
- 'parseClassDeclaration_2': new MethodTrampoline(2,
- (Parser target, arg0, arg1) => target._parseClassDeclaration(arg0, arg1)),
- 'parseClassMembers_2': new MethodTrampoline(
- 2, (Parser target, arg0, arg1) => target._parseClassMembers(arg0, arg1)),
- 'parseClassTypeAlias_3': new MethodTrampoline(
- 3,
- (Parser target, arg0, arg1, arg2) =>
- target._parseClassTypeAlias(arg0, arg1, arg2)),
- 'parseCombinator_0':
- new MethodTrampoline(0, (Parser target) => target.parseCombinator()),
- 'parseCombinators_0':
- new MethodTrampoline(0, (Parser target) => target._parseCombinators()),
- 'parseCommentAndMetadata_0': new MethodTrampoline(
- 0, (Parser target) => target._parseCommentAndMetadata()),
- 'parseCommentReference_2': new MethodTrampoline(2,
- (Parser target, arg0, arg1) => target._parseCommentReference(arg0, arg1)),
- 'parseCommentReferences_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._parseCommentReferences(arg0)),
- 'parseCompilationUnitMember_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._parseCompilationUnitMember(arg0)),
- 'parseConstExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parseConstExpression()),
- 'parseConstructor_8': new MethodTrampoline(
- 8,
- (Parser target, arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7) => target
- ._parseConstructor(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)),
- 'parseConstructorFieldInitializer_0': new MethodTrampoline(
- 0, (Parser target) => target._parseConstructorFieldInitializer()),
- 'parseContinueStatement_0': new MethodTrampoline(
- 0, (Parser target) => target._parseContinueStatement()),
- 'parseDirective_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._parseDirective(arg0)),
- 'parseDirectives_0':
- new MethodTrampoline(0, (Parser target) => target._parseDirectives()),
- 'parseDocumentationComment_0': new MethodTrampoline(
- 0, (Parser target) => target._parseDocumentationComment()),
- 'parseDoStatement_0':
- new MethodTrampoline(0, (Parser target) => target._parseDoStatement()),
- 'parseEmptyStatement_0':
- new MethodTrampoline(0, (Parser target) => target._parseEmptyStatement()),
- 'parseEnumConstantDeclaration_0': new MethodTrampoline(
- 0, (Parser target) => target._parseEnumConstantDeclaration()),
- 'parseEnumDeclaration_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._parseEnumDeclaration(arg0)),
- 'parseEqualityExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parseEqualityExpression()),
- 'parseExportDirective_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._parseExportDirective(arg0)),
- 'parseExpressionList_0':
- new MethodTrampoline(0, (Parser target) => target._parseExpressionList()),
- 'parseFinalConstVarOrType_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._parseFinalConstVarOrType(arg0)),
- 'parseFormalParameter_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._parseFormalParameter(arg0)),
- 'parseForStatement_0':
- new MethodTrampoline(0, (Parser target) => target._parseForStatement()),
- 'parseFunctionBody_3': new MethodTrampoline(
- 3,
- (Parser target, arg0, arg1, arg2) =>
- target._parseFunctionBody(arg0, arg1, arg2)),
- 'parseFunctionDeclaration_3': new MethodTrampoline(
- 3,
- (Parser target, arg0, arg1, arg2) =>
- target._parseFunctionDeclaration(arg0, arg1, arg2)),
- 'parseFunctionDeclarationStatement_0': new MethodTrampoline(
- 0, (Parser target) => target._parseFunctionDeclarationStatement()),
- 'parseFunctionDeclarationStatementAfterReturnType_2': new MethodTrampoline(
- 2,
- (Parser target, arg0, arg1) =>
- target._parseFunctionDeclarationStatementAfterReturnType(arg0, arg1)),
- 'parseFunctionTypeAlias_2': new MethodTrampoline(
- 2,
- (Parser target, arg0, arg1) =>
- target._parseFunctionTypeAlias(arg0, arg1)),
- 'parseGetter_4': new MethodTrampoline(
- 4,
- (Parser target, arg0, arg1, arg2, arg3) =>
- target._parseGetter(arg0, arg1, arg2, arg3)),
- 'parseIdentifierList_0':
- new MethodTrampoline(0, (Parser target) => target._parseIdentifierList()),
- 'parseIfStatement_0':
- new MethodTrampoline(0, (Parser target) => target._parseIfStatement()),
- 'parseImportDirective_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._parseImportDirective(arg0)),
- 'parseInitializedIdentifierList_4': new MethodTrampoline(
- 4,
- (Parser target, arg0, arg1, arg2, arg3) =>
- target._parseInitializedIdentifierList(arg0, arg1, arg2, arg3)),
- 'parseInstanceCreationExpression_1': new MethodTrampoline(1,
- (Parser target, arg0) => target._parseInstanceCreationExpression(arg0)),
- 'parseLibraryDirective_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._parseLibraryDirective(arg0)),
- 'parseLibraryName_2': new MethodTrampoline(
- 2, (Parser target, arg0, arg1) => target._parseLibraryName(arg0, arg1)),
- 'parseListLiteral_2': new MethodTrampoline(
- 2, (Parser target, arg0, arg1) => target._parseListLiteral(arg0, arg1)),
- 'parseListOrMapLiteral_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._parseListOrMapLiteral(arg0)),
- 'parseLogicalAndExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parseLogicalAndExpression()),
- 'parseMapLiteral_2': new MethodTrampoline(
- 2, (Parser target, arg0, arg1) => target._parseMapLiteral(arg0, arg1)),
- 'parseMethodDeclarationAfterParameters_7': new MethodTrampoline(
- 7,
- (Parser target, arg0, arg1, arg2, arg3, arg4, arg5, arg6) =>
- target._parseMethodDeclarationAfterParameters(
- arg0, arg1, arg2, arg3, arg4, arg5, arg6)),
- 'parseMethodDeclarationAfterReturnType_4': new MethodTrampoline(
- 4,
- (Parser target, arg0, arg1, arg2, arg3) => target
- ._parseMethodDeclarationAfterReturnType(arg0, arg1, arg2, arg3)),
- 'parseModifiers_0':
- new MethodTrampoline(0, (Parser target) => target._parseModifiers()),
- 'parseMultiplicativeExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parseMultiplicativeExpression()),
- 'parseNativeClause_0':
- new MethodTrampoline(0, (Parser target) => target._parseNativeClause()),
- 'parseNewExpression_0':
- new MethodTrampoline(0, (Parser target) => target._parseNewExpression()),
- 'parseNonLabeledStatement_0': new MethodTrampoline(
- 0, (Parser target) => target._parseNonLabeledStatement()),
- 'parseOperator_3': new MethodTrampoline(
- 3,
- (Parser target, arg0, arg1, arg2) =>
- target._parseOperator(arg0, arg1, arg2)),
- 'parseOptionalReturnType_0': new MethodTrampoline(
- 0, (Parser target) => target._parseOptionalReturnType()),
- 'parsePartDirective_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._parsePartDirective(arg0)),
- 'parsePostfixExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parsePostfixExpression()),
- 'parsePrimaryExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parsePrimaryExpression()),
- 'parseRedirectingConstructorInvocation_0': new MethodTrampoline(
- 0, (Parser target) => target._parseRedirectingConstructorInvocation()),
- 'parseRelationalExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parseRelationalExpression()),
- 'parseRethrowExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parseRethrowExpression()),
- 'parseReturnStatement_0': new MethodTrampoline(
- 0, (Parser target) => target._parseReturnStatement()),
- 'parseSetter_4': new MethodTrampoline(
- 4,
- (Parser target, arg0, arg1, arg2, arg3) =>
- target._parseSetter(arg0, arg1, arg2, arg3)),
- 'parseShiftExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parseShiftExpression()),
- 'parseStatementList_0':
- new MethodTrampoline(0, (Parser target) => target._parseStatementList()),
- 'parseStringInterpolation_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._parseStringInterpolation(arg0)),
- 'parseSuperConstructorInvocation_0': new MethodTrampoline(
- 0, (Parser target) => target._parseSuperConstructorInvocation()),
- 'parseSwitchStatement_0': new MethodTrampoline(
- 0, (Parser target) => target._parseSwitchStatement()),
- 'parseSymbolLiteral_0':
- new MethodTrampoline(0, (Parser target) => target._parseSymbolLiteral()),
- 'parseThrowExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parseThrowExpression()),
- 'parseThrowExpressionWithoutCascade_0': new MethodTrampoline(
- 0, (Parser target) => target._parseThrowExpressionWithoutCascade()),
- 'parseTryStatement_0':
- new MethodTrampoline(0, (Parser target) => target._parseTryStatement()),
- 'parseTypeAlias_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._parseTypeAlias(arg0)),
- 'parseUnaryExpression_0': new MethodTrampoline(
- 0, (Parser target) => target._parseUnaryExpression()),
- 'parseVariableDeclaration_0': new MethodTrampoline(
- 0, (Parser target) => target._parseVariableDeclaration()),
- 'parseVariableDeclarationListAfterMetadata_1': new MethodTrampoline(
- 1,
- (Parser target, arg0) =>
- target._parseVariableDeclarationListAfterMetadata(arg0)),
- 'parseVariableDeclarationListAfterType_3': new MethodTrampoline(
- 3,
- (Parser target, arg0, arg1, arg2) =>
- target._parseVariableDeclarationListAfterType(arg0, arg1, arg2)),
- 'parseVariableDeclarationStatementAfterMetadata_1': new MethodTrampoline(
- 1,
- (Parser target, arg0) =>
- target._parseVariableDeclarationStatementAfterMetadata(arg0)),
- 'parseVariableDeclarationStatementAfterType_3': new MethodTrampoline(
- 3,
- (Parser target, arg0, arg1, arg2) =>
- target._parseVariableDeclarationStatementAfterType(arg0, arg1, arg2)),
- 'parseWhileStatement_0':
- new MethodTrampoline(0, (Parser target) => target._parseWhileStatement()),
- 'parseYieldStatement_0':
- new MethodTrampoline(0, (Parser target) => target._parseYieldStatement()),
- 'peek_0': new MethodTrampoline(0, (Parser target) => target._peek()),
- 'peekAt_1':
- new MethodTrampoline(1, (Parser target, arg0) => target._peekAt(arg0)),
- 'reportError_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._reportError(arg0)),
- 'reportErrorForCurrentToken_2': new MethodTrampoline(
- 2,
- (Parser target, arg0, arg1) =>
- target._reportErrorForCurrentToken(arg0, arg1)),
- 'reportErrorForNode_3': new MethodTrampoline(
- 3,
- (Parser target, arg0, arg1, arg2) =>
- target._reportErrorForNode(arg0, arg1, arg2)),
- 'reportErrorForToken_3': new MethodTrampoline(
- 3,
- (Parser target, arg0, arg1, arg2) =>
- target._reportErrorForToken(arg0, arg1, arg2)),
- 'skipBlock_0':
- new MethodTrampoline(0, (Parser target) => target._skipBlock()),
- 'skipFinalConstVarOrType_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._skipFinalConstVarOrType(arg0)),
- 'skipFormalParameterList_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._skipFormalParameterList(arg0)),
- 'skipPastMatchingToken_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._skipPastMatchingToken(arg0)),
- 'skipPrefixedIdentifier_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._skipPrefixedIdentifier(arg0)),
- 'skipReturnType_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._skipReturnType(arg0)),
- 'skipSimpleIdentifier_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._skipSimpleIdentifier(arg0)),
- 'skipStringInterpolation_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._skipStringInterpolation(arg0)),
- 'skipStringLiteral_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._skipStringLiteral(arg0)),
- 'skipTypeArgumentList_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._skipTypeArgumentList(arg0)),
- 'skipTypeName_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._skipTypeName(arg0)),
- 'skipTypeParameterList_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._skipTypeParameterList(arg0)),
- 'tokenMatches_2': new MethodTrampoline(
- 2, (Parser target, arg0, arg1) => target._tokenMatches(arg0, arg1)),
- 'tokenMatchesIdentifier_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._tokenMatchesIdentifier(arg0)),
- 'tokenMatchesKeyword_2': new MethodTrampoline(2,
- (Parser target, arg0, arg1) => target._tokenMatchesKeyword(arg0, arg1)),
- 'tokenMatchesString_2': new MethodTrampoline(
- 2, (Parser target, arg0, arg1) => target._tokenMatchesString(arg0, arg1)),
- 'translateCharacter_3': new MethodTrampoline(
- 3,
- (Parser target, arg0, arg1, arg2) =>
- target._translateCharacter(arg0, arg1, arg2)),
- 'unlockErrorListener_0':
- new MethodTrampoline(0, (Parser target) => target._unlockErrorListener()),
- 'validateFormalParameterList_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._validateFormalParameterList(arg0)),
- 'validateModifiersForClass_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._validateModifiersForClass(arg0)),
- 'validateModifiersForConstructor_1': new MethodTrampoline(1,
- (Parser target, arg0) => target._validateModifiersForConstructor(arg0)),
- 'validateModifiersForEnum_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._validateModifiersForEnum(arg0)),
- 'validateModifiersForField_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._validateModifiersForField(arg0)),
- 'validateModifiersForFunctionDeclarationStatement_1': new MethodTrampoline(
- 1,
- (Parser target, arg0) =>
- target._validateModifiersForFunctionDeclarationStatement(arg0)),
- 'validateModifiersForGetterOrSetterOrMethod_1': new MethodTrampoline(
- 1,
- (Parser target, arg0) =>
- target._validateModifiersForGetterOrSetterOrMethod(arg0)),
- 'validateModifiersForOperator_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._validateModifiersForOperator(arg0)),
- 'validateModifiersForTopLevelDeclaration_1': new MethodTrampoline(
- 1,
- (Parser target, arg0) =>
- target._validateModifiersForTopLevelDeclaration(arg0)),
- 'validateModifiersForTopLevelFunction_1': new MethodTrampoline(
- 1,
- (Parser target, arg0) =>
- target._validateModifiersForTopLevelFunction(arg0)),
- 'validateModifiersForTopLevelVariable_1': new MethodTrampoline(
- 1,
- (Parser target, arg0) =>
- target._validateModifiersForTopLevelVariable(arg0)),
- 'validateModifiersForTypedef_1': new MethodTrampoline(
- 1, (Parser target, arg0) => target._validateModifiersForTypedef(arg0)),
-};
-
-Object invokeParserMethodImpl(
- Parser parser, String methodName, List<Object> objects, Token tokenStream) {
- parser.currentToken = tokenStream;
- MethodTrampoline method =
- methodTable_Parser['${methodName}_${objects.length}'];
- if (method == null) {
- throw new IllegalArgumentException('There is no method named $methodName');
- }
- return method.invoke(parser, objects);
-}
+import 'package:analyzer/dart/ast/ast.dart';
+import 'package:analyzer/dart/ast/token.dart';
+import 'package:analyzer/error/error.dart';
+import 'package:analyzer/error/listener.dart';
+import 'package:analyzer/src/dart/ast/ast.dart';
+import 'package:analyzer/src/dart/ast/token.dart';
+import 'package:analyzer/src/dart/error/syntactic_errors.dart';
+import 'package:analyzer/src/dart/scanner/reader.dart';
+import 'package:analyzer/src/dart/scanner/scanner.dart';
+import 'package:analyzer/src/error/codes.dart';
+import 'package:analyzer/src/generated/engine.dart' show AnalysisEngine;
+import 'package:analyzer/src/generated/java_core.dart';
+import 'package:analyzer/src/generated/java_engine.dart';
+import 'package:analyzer/src/generated/source.dart';
+import 'package:analyzer/src/generated/utilities_dart.dart';
+
+export 'package:analyzer/src/dart/ast/utilities.dart' show ResolutionCopier;
+export 'package:analyzer/src/dart/error/syntactic_errors.dart';
/**
* A simple data-holder for a method that needs to return multiple values.
@@ -477,7 +36,7 @@ class CommentAndMetadata {
final Comment comment;
/**
- * The metadata that was parsed.
+ * The metadata that was parsed, or `null` if none was given.
*/
final List<Annotation> metadata;
@@ -485,6 +44,11 @@ class CommentAndMetadata {
* Initialize a newly created holder with the given [comment] and [metadata].
*/
CommentAndMetadata(this.comment, this.metadata);
+
+ /**
+ * Return `true` if some metadata was parsed.
+ */
+ bool get hasMetadata => metadata != null && metadata.isNotEmpty;
}
/**
@@ -508,4293 +72,1431 @@ class FinalConstVarOrType {
}
/**
- * A dispatcher that will invoke the right parse method when re-parsing a
- * specified child of the visited node. All of the methods in this class assume
- * that the parser is positioned to parse the replacement for the node. All of
- * the methods will throw an [IncrementalParseException] if the node could not
- * be parsed for some reason.
+ * A simple data-holder for a method that needs to return multiple values.
*/
-class IncrementalParseDispatcher implements AstVisitor<AstNode> {
+class Modifiers {
/**
- * The parser used to parse the replacement for the node.
+ * The token representing the keyword 'abstract', or `null` if the keyword was
+ * not found.
*/
- final Parser _parser;
+ Token abstractKeyword;
/**
- * The node that is to be replaced.
+ * The token representing the keyword 'const', or `null` if the keyword was
+ * not found.
*/
- final AstNode _oldNode;
+ Token constKeyword;
/**
- * Initialize a newly created dispatcher to parse a single node that will
- * use the [_parser] to replace the [_oldNode].
+ * The token representing the keyword 'external', or `null` if the keyword was
+ * not found.
*/
- IncrementalParseDispatcher(this._parser, this._oldNode);
+ Token externalKeyword;
- @override
- AstNode visitAdjacentStrings(AdjacentStrings node) {
- if (node.strings.contains(_oldNode)) {
- return _parser.parseStringLiteral();
- }
- return _notAChild(node);
- }
+ /**
+ * The token representing the keyword 'factory', or `null` if the keyword was
+ * not found.
+ */
+ Token factoryKeyword;
- @override
- AstNode visitAnnotation(Annotation node) {
- if (identical(_oldNode, node.name)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.constructorName)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.arguments)) {
- return _parser.parseArgumentList();
- }
- return _notAChild(node);
- }
+ /**
+ * The token representing the keyword 'final', or `null` if the keyword was
+ * not found.
+ */
+ Token finalKeyword;
- @override
- AstNode visitArgumentList(ArgumentList node) {
- if (node.arguments.contains(_oldNode)) {
- return _parser.parseArgument();
- }
- return _notAChild(node);
- }
+ /**
+ * The token representing the keyword 'static', or `null` if the keyword was
+ * not found.
+ */
+ Token staticKeyword;
- @override
- AstNode visitAsExpression(AsExpression node) {
- if (identical(_oldNode, node.expression)) {
- return _parser.parseBitwiseOrExpression();
- } else if (identical(_oldNode, node.type)) {
- return _parser.parseTypeName();
- }
- return _notAChild(node);
- }
+ /**
+ * The token representing the keyword 'var', or `null` if the keyword was not
+ * found.
+ */
+ Token varKeyword;
@override
- AstNode visitAssertStatement(AssertStatement node) {
- if (identical(_oldNode, node.condition)) {
- return _parser.parseExpression2();
- }
- return _notAChild(node);
+ String toString() {
+ StringBuffer buffer = new StringBuffer();
+ bool needsSpace = _appendKeyword(buffer, false, abstractKeyword);
+ needsSpace = _appendKeyword(buffer, needsSpace, constKeyword);
+ needsSpace = _appendKeyword(buffer, needsSpace, externalKeyword);
+ needsSpace = _appendKeyword(buffer, needsSpace, factoryKeyword);
+ needsSpace = _appendKeyword(buffer, needsSpace, finalKeyword);
+ needsSpace = _appendKeyword(buffer, needsSpace, staticKeyword);
+ _appendKeyword(buffer, needsSpace, varKeyword);
+ return buffer.toString();
}
- @override
- AstNode visitAssignmentExpression(AssignmentExpression node) {
- if (identical(_oldNode, node.leftHandSide)) {
- // TODO(brianwilkerson) If the assignment is part of a cascade section,
- // then we don't have a single parse method that will work.
- // Otherwise, we can parse a conditional expression, but need to ensure
- // that the resulting expression is assignable.
-// return parser.parseConditionalExpression();
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.rightHandSide)) {
- if (_isCascadeAllowedInAssignment(node)) {
- return _parser.parseExpression2();
+ /**
+ * If the given [keyword] is not `null`, append it to the given [builder],
+ * prefixing it with a space if [needsSpace] is `true`. Return `true` if
+ * subsequent keywords need to be prefixed with a space.
+ */
+ bool _appendKeyword(StringBuffer buffer, bool needsSpace, Token keyword) {
+ if (keyword != null) {
+ if (needsSpace) {
+ buffer.writeCharCode(0x20);
}
- return _parser.parseExpressionWithoutCascade();
+ buffer.write(keyword.lexeme);
+ return true;
}
- return _notAChild(node);
+ return needsSpace;
}
+}
- @override
- AstNode visitAwaitExpression(AwaitExpression node) {
- if (identical(_oldNode, node.expression)) {
- // TODO(brianwilkerson) Depending on precedence,
- // this might not be sufficient.
- return _parser.parseExpression2();
- }
- return _notAChild(node);
- }
+/**
+ * A parser used to parse tokens into an AST structure.
+ */
+class Parser {
+ static String ASYNC = "async";
- @override
- AstNode visitBinaryExpression(BinaryExpression node) {
- if (identical(_oldNode, node.leftOperand)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.rightOperand)) {
- throw new InsufficientContextException();
- }
- return _notAChild(node);
- }
+ static String _AWAIT = "await";
- @override
- AstNode visitBlock(Block node) {
- if (node.statements.contains(_oldNode)) {
- return _parser.parseStatement2();
- }
- return _notAChild(node);
- }
+ static String _HIDE = "hide";
- @override
- AstNode visitBlockFunctionBody(BlockFunctionBody node) {
- if (identical(_oldNode, node.block)) {
- return _parser.parseBlock();
- }
- return _notAChild(node);
- }
+ static String _OF = "of";
- @override
- AstNode visitBooleanLiteral(BooleanLiteral node) => _notAChild(node);
+ static String _ON = "on";
- @override
- AstNode visitBreakStatement(BreakStatement node) {
- if (identical(_oldNode, node.label)) {
- return _parser.parseSimpleIdentifier();
- }
- return _notAChild(node);
- }
+ static String _NATIVE = "native";
- @override
- AstNode visitCascadeExpression(CascadeExpression node) {
- if (identical(_oldNode, node.target)) {
- return _parser.parseConditionalExpression();
- } else if (node.cascadeSections.contains(_oldNode)) {
- throw new InsufficientContextException();
- }
- return _notAChild(node);
- }
+ static String _SHOW = "show";
- @override
- AstNode visitCatchClause(CatchClause node) {
- if (identical(_oldNode, node.exceptionType)) {
- return _parser.parseTypeName();
- } else if (identical(_oldNode, node.exceptionParameter)) {
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.stackTraceParameter)) {
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.body)) {
- return _parser.parseBlock();
- }
- return _notAChild(node);
- }
+ static String SYNC = "sync";
- @override
- AstNode visitClassDeclaration(ClassDeclaration node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.name)) {
- // Changing the class name changes whether a member is interpreted as a
- // constructor or not, so we'll just have to re-parse the entire class.
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.typeParameters)) {
- return _parser.parseTypeParameterList();
- } else if (identical(_oldNode, node.extendsClause)) {
- return _parser.parseExtendsClause();
- } else if (identical(_oldNode, node.withClause)) {
- return _parser.parseWithClause();
- } else if (identical(_oldNode, node.implementsClause)) {
- return _parser.parseImplementsClause();
- } else if (node.members.contains(_oldNode)) {
- ClassMember member = _parser.parseClassMember(node.name.name);
- if (member == null) {
- throw new InsufficientContextException();
- }
- return member;
- }
- return _notAChild(node);
- }
+ static String _YIELD = "yield";
- @override
- AstNode visitClassTypeAlias(ClassTypeAlias node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.name)) {
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.typeParameters)) {
- return _parser.parseTypeParameterList();
- } else if (identical(_oldNode, node.superclass)) {
- return _parser.parseTypeName();
- } else if (identical(_oldNode, node.withClause)) {
- return _parser.parseWithClause();
- } else if (identical(_oldNode, node.implementsClause)) {
- return _parser.parseImplementsClause();
- }
- return _notAChild(node);
- }
+ /**
+ * The source being parsed.
+ */
+ final Source _source;
- @override
- AstNode visitComment(Comment node) {
- throw new InsufficientContextException();
- }
+ /**
+ * The error listener that will be informed of any errors that are found
+ * during the parse.
+ */
+ final AnalysisErrorListener _errorListener;
- @override
- AstNode visitCommentReference(CommentReference node) {
- if (identical(_oldNode, node.identifier)) {
- return _parser.parsePrefixedIdentifier();
- }
- return _notAChild(node);
- }
+ /**
+ * An [_errorListener] lock, if more than `0`, then errors are not reported.
+ */
+ int _errorListenerLock = 0;
- @override
- AstNode visitCompilationUnit(CompilationUnit node) {
- throw new InsufficientContextException();
- }
+ /**
+ * A flag indicating whether the parser is to parse asserts in the initializer
+ * list of a constructor.
+ */
+ bool _enableAssertInitializer = false;
- @override
- AstNode visitConditionalExpression(ConditionalExpression node) {
- if (identical(_oldNode, node.condition)) {
- return _parser.parseIfNullExpression();
- } else if (identical(_oldNode, node.thenExpression)) {
- return _parser.parseExpressionWithoutCascade();
- } else if (identical(_oldNode, node.elseExpression)) {
- return _parser.parseExpressionWithoutCascade();
- }
- return _notAChild(node);
- }
+ /**
+ * A flag indicating whether the parser is to parse the non-nullable modifier
+ * in type names.
+ */
+ bool _enableNnbd = false;
- @override
- AstNode visitConstructorDeclaration(ConstructorDeclaration node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.returnType)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.name)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.parameters)) {
- return _parser.parseFormalParameterList();
- } else if (identical(_oldNode, node.redirectedConstructor)) {
- throw new InsufficientContextException();
- } else if (node.initializers.contains(_oldNode)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.body)) {
- throw new InsufficientContextException();
- }
- return _notAChild(node);
- }
+ /**
+ * A flag indicating whether parser is to parse function bodies.
+ */
+ bool _parseFunctionBodies = true;
- @override
- AstNode visitConstructorFieldInitializer(ConstructorFieldInitializer node) {
- if (identical(_oldNode, node.fieldName)) {
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.expression)) {
- throw new InsufficientContextException();
- }
- return _notAChild(node);
- }
+ /**
+ * The next token to be parsed.
+ */
+ Token _currentToken;
- @override
- AstNode visitConstructorName(ConstructorName node) {
- if (identical(_oldNode, node.type)) {
- return _parser.parseTypeName();
- } else if (identical(_oldNode, node.name)) {
- return _parser.parseSimpleIdentifier();
- }
- return _notAChild(node);
- }
+ /**
+ * A flag indicating whether the parser is currently in a function body marked
+ * as being 'async'.
+ */
+ bool _inAsync = false;
- @override
- AstNode visitContinueStatement(ContinueStatement node) {
- if (identical(_oldNode, node.label)) {
- return _parser.parseSimpleIdentifier();
- }
- return _notAChild(node);
- }
+ /**
+ * A flag indicating whether the parser is currently in a function body marked
+ * (by a star) as being a generator.
+ */
+ bool _inGenerator = false;
- @override
- AstNode visitDeclaredIdentifier(DeclaredIdentifier node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.type)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.identifier)) {
- return _parser.parseSimpleIdentifier();
- }
- return _notAChild(node);
- }
+ /**
+ * A flag indicating whether the parser is currently in the body of a loop.
+ */
+ bool _inLoop = false;
- @override
- AstNode visitDefaultFormalParameter(DefaultFormalParameter node) {
- if (identical(_oldNode, node.parameter)) {
- return _parser.parseNormalFormalParameter();
- } else if (identical(_oldNode, node.defaultValue)) {
- return _parser.parseExpression2();
- }
- return _notAChild(node);
- }
+ /**
+ * A flag indicating whether the parser is currently in a switch statement.
+ */
+ bool _inSwitch = false;
- @override
- AstNode visitDoStatement(DoStatement node) {
- if (identical(_oldNode, node.body)) {
- return _parser.parseStatement2();
- } else if (identical(_oldNode, node.condition)) {
- return _parser.parseExpression2();
- }
- return _notAChild(node);
- }
+ /**
+ * A flag indicating whether the parser is currently in a constructor field
+ * initializer, with no intervening parentheses, braces, or brackets.
+ */
+ bool _inInitializer = false;
- @override
- AstNode visitDoubleLiteral(DoubleLiteral node) => _notAChild(node);
+ /**
+ * A flag indicating whether the parser is to parse generic method syntax.
+ */
+ bool parseGenericMethods = false;
- @override
- AstNode visitEmptyFunctionBody(EmptyFunctionBody node) => _notAChild(node);
+ /**
+ * A flag indicating whether to parse generic method comments, of the form
+ * `/*=T*/` and `/*<T>*/`.
+ */
+ bool parseGenericMethodComments = false;
- @override
- AstNode visitEmptyStatement(EmptyStatement node) => _notAChild(node);
+ /**
+ * Initialize a newly created parser to parse tokens in the given [_source]
+ * and to report any errors that are found to the given [_errorListener].
+ */
+ Parser(this._source, this._errorListener);
- @override
- AstNode visitEnumConstantDeclaration(EnumConstantDeclaration node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.name)) {
- return _parser.parseSimpleIdentifier();
- }
- return _notAChild(node);
- }
+ /**
+ * Return the current token.
+ */
+ Token get currentToken => _currentToken;
- @override
- AstNode visitEnumDeclaration(EnumDeclaration node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.name)) {
- return _parser.parseSimpleIdentifier();
- } else if (node.constants.contains(_oldNode)) {
- throw new InsufficientContextException();
- }
- return _notAChild(node);
+ /**
+ * Set the token with which the parse is to begin to the given [token].
+ */
+ void set currentToken(Token token) {
+ this._currentToken = token;
}
- @override
- AstNode visitExportDirective(ExportDirective node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.uri)) {
- return _parser.parseStringLiteral();
- } else if (node.combinators.contains(_oldNode)) {
- throw new IncrementalParseException();
- //return parser.parseCombinator();
- }
- return _notAChild(node);
- }
+ /**
+ * Return `true` if the parser is to parse asserts in the initializer list of
+ * a constructor.
+ */
+ bool get enableAssertInitializer => _enableAssertInitializer;
- @override
- AstNode visitExpressionFunctionBody(ExpressionFunctionBody node) {
- if (identical(_oldNode, node.expression)) {
- return _parser.parseExpression2();
- }
- return _notAChild(node);
+ /**
+ * Set whether the parser is to parse asserts in the initializer list of a
+ * constructor to match the given [enable] flag.
+ */
+ void set enableAssertInitializer(bool enable) {
+ _enableAssertInitializer = enable;
}
- @override
- AstNode visitExpressionStatement(ExpressionStatement node) {
- if (identical(_oldNode, node.expression)) {
- return _parser.parseExpression2();
- }
- return _notAChild(node);
- }
+ /**
+ * Return `true` if the parser is to parse the non-nullable modifier in type
+ * names.
+ */
+ bool get enableNnbd => _enableNnbd;
- @override
- AstNode visitExtendsClause(ExtendsClause node) {
- if (identical(_oldNode, node.superclass)) {
- return _parser.parseTypeName();
- }
- return _notAChild(node);
+ /**
+ * Set whether the parser is to parse the non-nullable modifier in type names
+ * to match the given [enable] flag.
+ */
+ void set enableNnbd(bool enable) {
+ _enableNnbd = enable;
}
- @override
- AstNode visitFieldDeclaration(FieldDeclaration node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.fields)) {
- throw new InsufficientContextException();
+ /**
+ * Return `true` if the current token is the first token of a return type that
+ * is followed by an identifier, possibly followed by a list of type
+ * parameters, followed by a left-parenthesis. This is used by
+ * [parseTypeAlias] to determine whether or not to parse a return type.
+ */
+ @deprecated
+ bool get hasReturnTypeInTypeAlias {
+ Token next = skipReturnType(_currentToken);
+ if (next == null) {
+ return false;
}
- return _notAChild(node);
+ return _tokenMatchesIdentifier(next);
}
- @override
- AstNode visitFieldFormalParameter(FieldFormalParameter node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.type)) {
- return _parser.parseTypeName();
- } else if (identical(_oldNode, node.identifier)) {
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.parameters)) {
- return _parser.parseFormalParameterList();
- }
- return _notAChild(node);
- }
+ /**
+ * Set whether the parser is to parse the async support.
+ *
+ * Support for removing the 'async' library has been removed.
+ */
+ @deprecated
+ void set parseAsync(bool parseAsync) {}
- @override
- AstNode visitForEachStatement(ForEachStatement node) {
- if (identical(_oldNode, node.loopVariable)) {
- throw new InsufficientContextException();
- //return parser.parseDeclaredIdentifier();
- } else if (identical(_oldNode, node.identifier)) {
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.body)) {
- return _parser.parseStatement2();
- }
- return _notAChild(node);
- }
+ @deprecated
+ bool get parseConditionalDirectives => true;
- @override
- AstNode visitFormalParameterList(FormalParameterList node) {
- // We don't know which kind of parameter to parse.
- throw new InsufficientContextException();
+ @deprecated
+ void set parseConditionalDirectives(bool value) {}
+
+ /**
+ * Set whether parser is to parse function bodies.
+ */
+ void set parseFunctionBodies(bool parseFunctionBodies) {
+ this._parseFunctionBodies = parseFunctionBodies;
}
- @override
- AstNode visitForStatement(ForStatement node) {
- if (identical(_oldNode, node.variables)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.initialization)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.condition)) {
- return _parser.parseExpression2();
- } else if (node.updaters.contains(_oldNode)) {
- return _parser.parseExpression2();
- } else if (identical(_oldNode, node.body)) {
- return _parser.parseStatement2();
+ /**
+ * Return the content of a string with the given literal representation. The
+ * [lexeme] is the literal representation of the string. The flag [isFirst] is
+ * `true` if this is the first token in a string literal. The flag [isLast] is
+ * `true` if this is the last token in a string literal.
+ */
+ String computeStringValue(String lexeme, bool isFirst, bool isLast) {
+ StringLexemeHelper helper = new StringLexemeHelper(lexeme, isFirst, isLast);
+ int start = helper.start;
+ int end = helper.end;
+ bool stringEndsAfterStart = end >= start;
+ assert(stringEndsAfterStart);
+ if (!stringEndsAfterStart) {
+ AnalysisEngine.instance.logger.logError(
+ "Internal error: computeStringValue($lexeme, $isFirst, $isLast)");
+ return "";
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitFunctionDeclaration(FunctionDeclaration node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.returnType)) {
- return _parser.parseReturnType();
- } else if (identical(_oldNode, node.name)) {
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.functionExpression)) {
- throw new InsufficientContextException();
+ if (helper.isRaw) {
+ return lexeme.substring(start, end);
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitFunctionDeclarationStatement(FunctionDeclarationStatement node) {
- if (identical(_oldNode, node.functionDeclaration)) {
- throw new InsufficientContextException();
+ StringBuffer buffer = new StringBuffer();
+ int index = start;
+ while (index < end) {
+ index = _translateCharacter(buffer, lexeme, index);
}
- return _notAChild(node);
+ return buffer.toString();
}
- @override
- AstNode visitFunctionExpression(FunctionExpression node) {
- if (identical(_oldNode, node.parameters)) {
- return _parser.parseFormalParameterList();
- } else if (identical(_oldNode, node.body)) {
- throw new InsufficientContextException();
+ /**
+ * Return a synthetic identifier.
+ */
+ SimpleIdentifier createSyntheticIdentifier({bool isDeclaration: false}) {
+ Token syntheticToken;
+ if (_currentToken.type == TokenType.KEYWORD) {
+ // Consider current keyword token as an identifier.
+ // It is not always true, e.g. "^is T" where "^" is place the place for
+ // synthetic identifier. By creating SyntheticStringToken we can
+ // distinguish a real identifier from synthetic. In the code completion
+ // behavior will depend on a cursor position - before or on "is".
+ syntheticToken = _injectToken(new SyntheticStringToken(
+ TokenType.IDENTIFIER, _currentToken.lexeme, _currentToken.offset));
+ } else {
+ syntheticToken = _createSyntheticToken(TokenType.IDENTIFIER);
}
- return _notAChild(node);
+ return new SimpleIdentifier(syntheticToken, isDeclaration: isDeclaration);
}
- @override
- AstNode visitFunctionExpressionInvocation(FunctionExpressionInvocation node) {
- if (identical(_oldNode, node.function)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.argumentList)) {
- return _parser.parseArgumentList();
- }
- return _notAChild(node);
- }
+ /**
+ * Return a synthetic string literal.
+ */
+ SimpleStringLiteral createSyntheticStringLiteral() =>
+ new SimpleStringLiteral(_createSyntheticToken(TokenType.STRING), "");
- @override
- AstNode visitFunctionTypeAlias(FunctionTypeAlias node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.returnType)) {
- return _parser.parseReturnType();
- } else if (identical(_oldNode, node.name)) {
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.typeParameters)) {
- return _parser.parseTypeParameterList();
- } else if (identical(_oldNode, node.parameters)) {
- return _parser.parseFormalParameterList();
- }
- return _notAChild(node);
+ /**
+ * Advance to the next token in the token stream, making it the new current
+ * token and return the token that was current before this method was invoked.
+ */
+ Token getAndAdvance() {
+ Token token = _currentToken;
+ _currentToken = _currentToken.next;
+ return token;
}
- @override
- AstNode visitFunctionTypedFormalParameter(FunctionTypedFormalParameter node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.returnType)) {
- return _parser.parseReturnType();
- } else if (identical(_oldNode, node.identifier)) {
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.parameters)) {
- return _parser.parseFormalParameterList();
+ /**
+ * Return `true` if the current token appears to be the beginning of a
+ * function declaration.
+ */
+ bool isFunctionDeclaration() {
+ Keyword keyword = _currentToken.keyword;
+ if (keyword == Keyword.VOID) {
+ return true;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitHideCombinator(HideCombinator node) {
- if (node.hiddenNames.contains(_oldNode)) {
- return _parser.parseSimpleIdentifier();
+ Token afterReturnType = skipTypeName(_currentToken);
+ if (afterReturnType == null) {
+ // There was no return type, but it is optional, so go back to where we
+ // started.
+ afterReturnType = _currentToken;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitIfStatement(IfStatement node) {
- if (identical(_oldNode, node.condition)) {
- return _parser.parseExpression2();
- } else if (identical(_oldNode, node.thenStatement)) {
- return _parser.parseStatement2();
- } else if (identical(_oldNode, node.elseStatement)) {
- return _parser.parseStatement2();
+ Token afterIdentifier = skipSimpleIdentifier(afterReturnType);
+ if (afterIdentifier == null) {
+ // It's possible that we parsed the function name as if it were a type
+ // name, so see whether it makes sense if we assume that there is no type.
+ afterIdentifier = skipSimpleIdentifier(_currentToken);
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitImplementsClause(ImplementsClause node) {
- if (node.interfaces.contains(node)) {
- return _parser.parseTypeName();
+ if (afterIdentifier == null) {
+ return false;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitImportDirective(ImportDirective node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.uri)) {
- return _parser.parseStringLiteral();
- } else if (identical(_oldNode, node.prefix)) {
- return _parser.parseSimpleIdentifier();
- } else if (node.combinators.contains(_oldNode)) {
- return _parser.parseCombinator();
+ if (isFunctionExpression(afterIdentifier)) {
+ return true;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitIndexExpression(IndexExpression node) {
- if (identical(_oldNode, node.target)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.index)) {
- return _parser.parseExpression2();
+ // It's possible that we have found a getter. While this isn't valid at this
+ // point we test for it in order to recover better.
+ if (keyword == Keyword.GET) {
+ Token afterName = skipSimpleIdentifier(_currentToken.next);
+ if (afterName == null) {
+ return false;
+ }
+ TokenType type = afterName.type;
+ return type == TokenType.FUNCTION || type == TokenType.OPEN_CURLY_BRACKET;
+ } else if (_tokenMatchesKeyword(afterReturnType, Keyword.GET)) {
+ Token afterName = skipSimpleIdentifier(afterReturnType.next);
+ if (afterName == null) {
+ return false;
+ }
+ TokenType type = afterName.type;
+ return type == TokenType.FUNCTION || type == TokenType.OPEN_CURLY_BRACKET;
}
- return _notAChild(node);
+ return false;
}
- @override
- AstNode visitInstanceCreationExpression(InstanceCreationExpression node) {
- if (identical(_oldNode, node.constructorName)) {
- return _parser.parseConstructorName();
- } else if (identical(_oldNode, node.argumentList)) {
- return _parser.parseArgumentList();
+ /**
+ * Return `true` if the given [token] appears to be the beginning of a
+ * function expression.
+ */
+ bool isFunctionExpression(Token token) {
+ // Function expressions aren't allowed in initializer lists.
+ if (_inInitializer) {
+ return false;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitIntegerLiteral(IntegerLiteral node) => _notAChild(node);
-
- @override
- AstNode visitInterpolationExpression(InterpolationExpression node) {
- if (identical(_oldNode, node.expression)) {
- if (node.leftBracket == null) {
- throw new InsufficientContextException();
- //return parser.parseThisOrSimpleIdentifier();
- }
- return _parser.parseExpression2();
+ Token afterTypeParameters = _skipTypeParameterList(token);
+ if (afterTypeParameters == null) {
+ afterTypeParameters = token;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitInterpolationString(InterpolationString node) {
- throw new InsufficientContextException();
- }
-
- @override
- AstNode visitIsExpression(IsExpression node) {
- if (identical(_oldNode, node.expression)) {
- return _parser.parseBitwiseOrExpression();
- } else if (identical(_oldNode, node.type)) {
- return _parser.parseTypeName();
+ Token afterParameters = _skipFormalParameterList(afterTypeParameters);
+ if (afterParameters == null) {
+ return false;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitLabel(Label node) {
- if (identical(_oldNode, node.label)) {
- return _parser.parseSimpleIdentifier();
+ if (afterParameters.matchesAny(
+ const <TokenType>[TokenType.OPEN_CURLY_BRACKET, TokenType.FUNCTION])) {
+ return true;
}
- return _notAChild(node);
+ String lexeme = afterParameters.lexeme;
+ return lexeme == ASYNC || lexeme == SYNC;
}
- @override
- AstNode visitLabeledStatement(LabeledStatement node) {
- if (node.labels.contains(_oldNode)) {
- return _parser.parseLabel();
- } else if (identical(_oldNode, node.statement)) {
- return _parser.parseStatement2();
+ /**
+ * Return `true` if the current token is the first token in an initialized
+ * variable declaration rather than an expression. This method assumes that we
+ * have already skipped past any metadata that might be associated with the
+ * declaration.
+ *
+ * initializedVariableDeclaration ::=
+ * declaredIdentifier ('=' expression)? (',' initializedIdentifier)*
+ *
+ * declaredIdentifier ::=
+ * metadata finalConstVarOrType identifier
+ *
+ * finalConstVarOrType ::=
+ * 'final' type?
+ * | 'const' type?
+ * | 'var'
+ * | type
+ *
+ * type ::=
+ * qualified typeArguments?
+ *
+ * initializedIdentifier ::=
+ * identifier ('=' expression)?
+ */
+ bool isInitializedVariableDeclaration() {
+ Keyword keyword = _currentToken.keyword;
+ if (keyword == Keyword.FINAL || keyword == Keyword.VAR) {
+ // An expression cannot start with a keyword other than 'const',
+ // 'rethrow', or 'throw'.
+ return true;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitLibraryDirective(LibraryDirective node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.name)) {
- return _parser.parseLibraryIdentifier();
+ if (keyword == Keyword.CONST) {
+ // Look to see whether we might be at the start of a list or map literal,
+ // otherwise this should be the start of a variable declaration.
+ return !_peek().matchesAny(const <TokenType>[
+ TokenType.LT,
+ TokenType.OPEN_CURLY_BRACKET,
+ TokenType.OPEN_SQUARE_BRACKET,
+ TokenType.INDEX
+ ]);
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitLibraryIdentifier(LibraryIdentifier node) {
- if (node.components.contains(_oldNode)) {
- return _parser.parseSimpleIdentifier();
+ bool allowAdditionalTokens = true;
+ // We know that we have an identifier, and need to see whether it might be
+ // a type name.
+ if (_currentToken.type != TokenType.IDENTIFIER) {
+ allowAdditionalTokens = false;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitListLiteral(ListLiteral node) {
- if (identical(_oldNode, node.typeArguments)) {
- return _parser.parseTypeArgumentList();
- } else if (node.elements.contains(_oldNode)) {
- return _parser.parseExpression2();
+ Token token = skipTypeName(_currentToken);
+ if (token == null) {
+ // There was no type name, so this can't be a declaration.
+ return false;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitMapLiteral(MapLiteral node) {
- if (identical(_oldNode, node.typeArguments)) {
- return _parser.parseTypeArgumentList();
- } else if (node.entries.contains(_oldNode)) {
- return _parser.parseMapLiteralEntry();
+ if (token.type != TokenType.IDENTIFIER) {
+ allowAdditionalTokens = false;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitMapLiteralEntry(MapLiteralEntry node) {
- if (identical(_oldNode, node.key)) {
- return _parser.parseExpression2();
- } else if (identical(_oldNode, node.value)) {
- return _parser.parseExpression2();
+ token = skipSimpleIdentifier(token);
+ if (token == null) {
+ return false;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitMethodDeclaration(MethodDeclaration node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.returnType)) {
- throw new InsufficientContextException();
- //return parser.parseTypeName();
- //return parser.parseReturnType();
- } else if (identical(_oldNode, node.name)) {
- if (node.operatorKeyword != null) {
- throw new InsufficientContextException();
- }
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.body)) {
- //return parser.parseFunctionBody();
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.parameters)) {
- // TODO(paulberry): if we want errors to be correct, we'll need to also
- // call _validateFormalParameterList, and sometimes
- // _validateModifiersForGetterOrSetterOrMethod.
- return _parser.parseFormalParameterList();
- }
- return _notAChild(node);
- }
-
- @override
- AstNode visitMethodInvocation(MethodInvocation node) {
- if (identical(_oldNode, node.target)) {
- throw new IncrementalParseException();
- } else if (identical(_oldNode, node.methodName)) {
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.argumentList)) {
- return _parser.parseArgumentList();
+ TokenType type = token.type;
+ // Usual cases in valid code:
+ // String v = '';
+ // String v, v2;
+ // String v;
+ // for (String item in items) {}
+ if (type == TokenType.EQ ||
+ type == TokenType.COMMA ||
+ type == TokenType.SEMICOLON ||
+ token.keyword == Keyword.IN) {
+ return true;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitNamedExpression(NamedExpression node) {
- if (identical(_oldNode, node.name)) {
- return _parser.parseLabel();
- } else if (identical(_oldNode, node.expression)) {
- return _parser.parseExpression2();
+ // It is OK to parse as a variable declaration in these cases:
+ // String v }
+ // String v if (true) print('OK');
+ // String v { print(42); }
+ // ...but not in these cases:
+ // get getterName {
+ // String get getterName
+ if (allowAdditionalTokens) {
+ if (type == TokenType.CLOSE_CURLY_BRACKET ||
+ type == TokenType.KEYWORD ||
+ type == TokenType.IDENTIFIER ||
+ type == TokenType.OPEN_CURLY_BRACKET) {
+ return true;
+ }
}
- return _notAChild(node);
+ return false;
}
- @override
- AstNode visitNativeClause(NativeClause node) {
- if (identical(_oldNode, node.name)) {
- return _parser.parseStringLiteral();
+ /**
+ * Return `true` if the current token appears to be the beginning of a switch
+ * member.
+ */
+ bool isSwitchMember() {
+ Token token = _currentToken;
+ while (_tokenMatches(token, TokenType.IDENTIFIER) &&
+ _tokenMatches(token.next, TokenType.COLON)) {
+ token = token.next.next;
}
- return _notAChild(node);
+ Keyword keyword = token.keyword;
+ return keyword == Keyword.CASE || keyword == Keyword.DEFAULT;
}
- @override
- AstNode visitNativeFunctionBody(NativeFunctionBody node) {
- if (identical(_oldNode, node.stringLiteral)) {
- return _parser.parseStringLiteral();
+ /**
+ * Parse an additive expression. Return the additive expression that was
+ * parsed.
+ *
+ * additiveExpression ::=
+ * multiplicativeExpression (additiveOperator multiplicativeExpression)*
+ * | 'super' (additiveOperator multiplicativeExpression)+
+ */
+ Expression parseAdditiveExpression() {
+ Expression expression;
+ if (_currentToken.keyword == Keyword.SUPER &&
+ _currentToken.next.type.isAdditiveOperator) {
+ expression = new SuperExpression(getAndAdvance());
+ } else {
+ expression = parseMultiplicativeExpression();
+ }
+ while (_currentToken.type.isAdditiveOperator) {
+ expression = new BinaryExpression(
+ expression, getAndAdvance(), parseMultiplicativeExpression());
}
- return _notAChild(node);
+ return expression;
}
- @override
- AstNode visitNullLiteral(NullLiteral node) => _notAChild(node);
-
- @override
- AstNode visitParenthesizedExpression(ParenthesizedExpression node) {
- if (identical(_oldNode, node.expression)) {
- return _parser.parseExpression2();
+ /**
+ * Parse an annotation. Return the annotation that was parsed.
+ *
+ * This method assumes that the current token matches [TokenType.AT].
+ *
+ * annotation ::=
+ * '@' qualified ('.' identifier)? arguments?
+ */
+ Annotation parseAnnotation() {
+ Token atSign = getAndAdvance();
+ Identifier name = parsePrefixedIdentifier();
+ Token period = null;
+ SimpleIdentifier constructorName = null;
+ if (_matches(TokenType.PERIOD)) {
+ period = getAndAdvance();
+ constructorName = parseSimpleIdentifier();
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitPartDirective(PartDirective node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.uri)) {
- return _parser.parseStringLiteral();
+ ArgumentList arguments = null;
+ if (_matches(TokenType.OPEN_PAREN)) {
+ arguments = parseArgumentList();
}
- return _notAChild(node);
+ return new Annotation(atSign, name, period, constructorName, arguments);
}
- @override
- AstNode visitPartOfDirective(PartOfDirective node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.libraryName)) {
- return _parser.parseLibraryIdentifier();
+ /**
+ * Parse an argument. Return the argument that was parsed.
+ *
+ * argument ::=
+ * namedArgument
+ * | expression
+ *
+ * namedArgument ::=
+ * label expression
+ */
+ Expression parseArgument() {
+ // TODO(brianwilkerson) Consider returning a wrapper indicating whether the
+ // expression is a named expression in order to remove the 'is' check in
+ // 'parseArgumentList'.
+ //
+ // Both namedArgument and expression can start with an identifier, but only
+ // namedArgument can have an identifier followed by a colon.
+ //
+ if (_matchesIdentifier() && _tokenMatches(_peek(), TokenType.COLON)) {
+ return new NamedExpression(parseLabel(), parseExpression2());
+ } else {
+ return parseExpression2();
}
- return _notAChild(node);
}
- @override
- AstNode visitPostfixExpression(PostfixExpression node) {
- if (identical(_oldNode, node.operand)) {
- throw new InsufficientContextException();
+ /**
+ * Parse a list of arguments. Return the argument list that was parsed.
+ *
+ * This method assumes that the current token matches [TokenType.OPEN_PAREN].
+ *
+ * arguments ::=
+ * '(' argumentList? ')'
+ *
+ * argumentList ::=
+ * namedArgument (',' namedArgument)*
+ * | expressionList (',' namedArgument)*
+ */
+ ArgumentList parseArgumentList() {
+ Token leftParenthesis = getAndAdvance();
+ if (_matches(TokenType.CLOSE_PAREN)) {
+ return new ArgumentList(leftParenthesis, null, getAndAdvance());
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitPrefixedIdentifier(PrefixedIdentifier node) {
- if (identical(_oldNode, node.prefix)) {
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.identifier)) {
- return _parser.parseSimpleIdentifier();
+ //
+ // Even though unnamed arguments must all appear before any named arguments,
+ // we allow them to appear in any order so that we can recover faster.
+ //
+ bool wasInInitializer = _inInitializer;
+ _inInitializer = false;
+ try {
+ Expression argument = parseArgument();
+ List<Expression> arguments = <Expression>[argument];
+ bool foundNamedArgument = argument is NamedExpression;
+ bool generatedError = false;
+ while (_optional(TokenType.COMMA)) {
+ if (_matches(TokenType.CLOSE_PAREN)) {
+ break;
+ }
+ argument = parseArgument();
+ arguments.add(argument);
+ if (argument is NamedExpression) {
+ foundNamedArgument = true;
+ } else if (foundNamedArgument) {
+ if (!generatedError) {
+ if (!argument.isSynthetic) {
+ // Report the error, once, but allow the arguments to be in any
+ // order in the AST.
+ _reportErrorForCurrentToken(
+ ParserErrorCode.POSITIONAL_AFTER_NAMED_ARGUMENT);
+ generatedError = true;
+ }
+ }
+ }
+ }
+ // Recovery: If the next token is not a right parenthesis, look at the
+ // left parenthesis to see whether there is a matching right parenthesis.
+ // If there is, then we're more likely missing a comma and should go back
+ // to parsing arguments.
+ Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
+ return new ArgumentList(leftParenthesis, arguments, rightParenthesis);
+ } finally {
+ _inInitializer = wasInInitializer;
}
- return _notAChild(node);
}
- @override
- AstNode visitPrefixExpression(PrefixExpression node) {
- if (identical(_oldNode, node.operand)) {
- throw new InsufficientContextException();
+ /**
+ * Parse an assert statement. Return the assert statement.
+ *
+ * This method assumes that the current token matches `Keyword.ASSERT`.
+ *
+ * assertStatement ::=
+ * 'assert' '(' expression [',' expression] ')' ';'
+ */
+ AssertStatement parseAssertStatement() {
+ Token keyword = getAndAdvance();
+ Token leftParen = _expect(TokenType.OPEN_PAREN);
+ Expression expression = parseExpression2();
+ Token comma;
+ Expression message;
+ if (_matches(TokenType.COMMA)) {
+ comma = getAndAdvance();
+ message = parseExpression2();
}
- return _notAChild(node);
+ Token rightParen = _expect(TokenType.CLOSE_PAREN);
+ Token semicolon = _expect(TokenType.SEMICOLON);
+ return new AssertStatement(
+ keyword, leftParen, expression, comma, message, rightParen, semicolon);
}
- @override
- AstNode visitPropertyAccess(PropertyAccess node) {
- if (identical(_oldNode, node.target)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.propertyName)) {
- return _parser.parseSimpleIdentifier();
+ /**
+ * Parse an assignable expression. The [primaryAllowed] is `true` if the
+ * expression is allowed to be a primary without any assignable selector.
+ * Return the assignable expression that was parsed.
+ *
+ * assignableExpression ::=
+ * primary (arguments* assignableSelector)+
+ * | 'super' unconditionalAssignableSelector
+ * | identifier
+ */
+ Expression parseAssignableExpression(bool primaryAllowed) {
+ if (_matchesKeyword(Keyword.SUPER)) {
+ return parseAssignableSelector(
+ new SuperExpression(getAndAdvance()), false,
+ allowConditional: false);
}
- return _notAChild(node);
+ return _parseAssignableExpressionNotStartingWithSuper(primaryAllowed);
}
- @override
- AstNode visitRedirectingConstructorInvocation(
- RedirectingConstructorInvocation node) {
- if (identical(_oldNode, node.constructorName)) {
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.argumentList)) {
- return _parser.parseArgumentList();
+ /**
+ * Parse an assignable selector. The [prefix] is the expression preceding the
+ * selector. The [optional] is `true` if the selector is optional. Return the
+ * assignable selector that was parsed, or the original prefix if there was no
+ * assignable selector. If [allowConditional] is false, then the '?.'
+ * operator will still be parsed, but a parse error will be generated.
+ *
+ * unconditionalAssignableSelector ::=
+ * '[' expression ']'
+ * | '.' identifier
+ *
+ * assignableSelector ::=
+ * unconditionalAssignableSelector
+ * | '?.' identifier
+ */
+ Expression parseAssignableSelector(Expression prefix, bool optional,
+ {bool allowConditional: true}) {
+ TokenType type = _currentToken.type;
+ if (type == TokenType.OPEN_SQUARE_BRACKET) {
+ Token leftBracket = getAndAdvance();
+ bool wasInInitializer = _inInitializer;
+ _inInitializer = false;
+ try {
+ Expression index = parseExpression2();
+ Token rightBracket = _expect(TokenType.CLOSE_SQUARE_BRACKET);
+ return new IndexExpression.forTarget(
+ prefix, leftBracket, index, rightBracket);
+ } finally {
+ _inInitializer = wasInInitializer;
+ }
+ } else {
+ bool isQuestionPeriod = type == TokenType.QUESTION_PERIOD;
+ if (type == TokenType.PERIOD || isQuestionPeriod) {
+ if (isQuestionPeriod && !allowConditional) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.INVALID_OPERATOR_FOR_SUPER,
+ [_currentToken.lexeme]);
+ }
+ Token operator = getAndAdvance();
+ return new PropertyAccess(prefix, operator, parseSimpleIdentifier());
+ } else {
+ if (!optional) {
+ // Report the missing selector.
+ _reportErrorForCurrentToken(
+ ParserErrorCode.MISSING_ASSIGNABLE_SELECTOR);
+ }
+ return prefix;
+ }
}
- return _notAChild(node);
}
- @override
- AstNode visitRethrowExpression(RethrowExpression node) => _notAChild(node);
-
- @override
- AstNode visitReturnStatement(ReturnStatement node) {
- if (identical(_oldNode, node.expression)) {
- return _parser.parseExpression2();
- }
- return _notAChild(node);
+ /**
+ * Parse a await expression. Return the await expression that was parsed.
+ *
+ * This method assumes that the current token matches `_AWAIT`.
+ *
+ * awaitExpression ::=
+ * 'await' unaryExpression
+ */
+ AwaitExpression parseAwaitExpression() {
+ Token awaitToken = getAndAdvance();
+ Expression expression = parseUnaryExpression();
+ return new AwaitExpression(awaitToken, expression);
}
- @override
- AstNode visitScriptTag(ScriptTag node) => _notAChild(node);
-
- @override
- AstNode visitShowCombinator(ShowCombinator node) {
- if (node.shownNames.contains(_oldNode)) {
- return _parser.parseSimpleIdentifier();
+ /**
+ * Parse a bitwise and expression. Return the bitwise and expression that was
+ * parsed.
+ *
+ * bitwiseAndExpression ::=
+ * shiftExpression ('&' shiftExpression)*
+ * | 'super' ('&' shiftExpression)+
+ */
+ Expression parseBitwiseAndExpression() {
+ Expression expression;
+ if (_currentToken.keyword == Keyword.SUPER &&
+ _currentToken.next.type == TokenType.AMPERSAND) {
+ expression = new SuperExpression(getAndAdvance());
+ } else {
+ expression = parseShiftExpression();
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitSimpleFormalParameter(SimpleFormalParameter node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.type)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.identifier)) {
- throw new InsufficientContextException();
+ while (_currentToken.type == TokenType.AMPERSAND) {
+ expression = new BinaryExpression(
+ expression, getAndAdvance(), parseShiftExpression());
}
- return _notAChild(node);
+ return expression;
}
- @override
- AstNode visitSimpleIdentifier(SimpleIdentifier node) => _notAChild(node);
-
- @override
- AstNode visitSimpleStringLiteral(SimpleStringLiteral node) =>
- _notAChild(node);
-
- @override
- AstNode visitStringInterpolation(StringInterpolation node) {
- if (node.elements.contains(_oldNode)) {
- throw new InsufficientContextException();
+ /**
+ * Parse a bitwise or expression. Return the bitwise or expression that was
+ * parsed.
+ *
+ * bitwiseOrExpression ::=
+ * bitwiseXorExpression ('|' bitwiseXorExpression)*
+ * | 'super' ('|' bitwiseXorExpression)+
+ */
+ Expression parseBitwiseOrExpression() {
+ Expression expression;
+ if (_currentToken.keyword == Keyword.SUPER &&
+ _currentToken.next.type == TokenType.BAR) {
+ expression = new SuperExpression(getAndAdvance());
+ } else {
+ expression = parseBitwiseXorExpression();
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitSuperConstructorInvocation(SuperConstructorInvocation node) {
- if (identical(_oldNode, node.constructorName)) {
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.argumentList)) {
- return _parser.parseArgumentList();
+ while (_currentToken.type == TokenType.BAR) {
+ expression = new BinaryExpression(
+ expression, getAndAdvance(), parseBitwiseXorExpression());
}
- return _notAChild(node);
+ return expression;
}
- @override
- AstNode visitSuperExpression(SuperExpression node) => _notAChild(node);
-
- @override
- AstNode visitSwitchCase(SwitchCase node) {
- if (node.labels.contains(_oldNode)) {
- return _parser.parseLabel();
- } else if (identical(_oldNode, node.expression)) {
- return _parser.parseExpression2();
- } else if (node.statements.contains(_oldNode)) {
- return _parser.parseStatement2();
+ /**
+ * Parse a bitwise exclusive-or expression. Return the bitwise exclusive-or
+ * expression that was parsed.
+ *
+ * bitwiseXorExpression ::=
+ * bitwiseAndExpression ('^' bitwiseAndExpression)*
+ * | 'super' ('^' bitwiseAndExpression)+
+ */
+ Expression parseBitwiseXorExpression() {
+ Expression expression;
+ if (_currentToken.keyword == Keyword.SUPER &&
+ _currentToken.next.type == TokenType.CARET) {
+ expression = new SuperExpression(getAndAdvance());
+ } else {
+ expression = parseBitwiseAndExpression();
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitSwitchDefault(SwitchDefault node) {
- if (node.labels.contains(_oldNode)) {
- return _parser.parseLabel();
- } else if (node.statements.contains(_oldNode)) {
- return _parser.parseStatement2();
+ while (_currentToken.type == TokenType.CARET) {
+ expression = new BinaryExpression(
+ expression, getAndAdvance(), parseBitwiseAndExpression());
}
- return _notAChild(node);
+ return expression;
}
- @override
- AstNode visitSwitchStatement(SwitchStatement node) {
- if (identical(_oldNode, node.expression)) {
- return _parser.parseExpression2();
- } else if (node.members.contains(_oldNode)) {
- throw new InsufficientContextException();
+ /**
+ * Parse a block. Return the block that was parsed.
+ *
+ * This method assumes that the current token matches
+ * [TokenType.OPEN_CURLY_BRACKET].
+ *
+ * block ::=
+ * '{' statements '}'
+ */
+ Block parseBlock() {
+ bool isEndOfBlock() {
+ TokenType type = _currentToken.type;
+ return type == TokenType.EOF || type == TokenType.CLOSE_CURLY_BRACKET;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitSymbolLiteral(SymbolLiteral node) => _notAChild(node);
-
- @override
- AstNode visitThisExpression(ThisExpression node) => _notAChild(node);
- @override
- AstNode visitThrowExpression(ThrowExpression node) {
- if (identical(_oldNode, node.expression)) {
- if (_isCascadeAllowedInThrow(node)) {
- return _parser.parseExpression2();
+ Token leftBracket = getAndAdvance();
+ List<Statement> statements = <Statement>[];
+ Token statementStart = _currentToken;
+ while (!isEndOfBlock()) {
+ Statement statement = parseStatement2();
+ if (identical(_currentToken, statementStart)) {
+ // Ensure that we are making progress and report an error if we're not.
+ _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
+ [_currentToken.lexeme]);
+ _advance();
+ } else if (statement != null) {
+ statements.add(statement);
}
- return _parser.parseExpressionWithoutCascade();
- }
- return _notAChild(node);
- }
-
- @override
- AstNode visitTopLevelVariableDeclaration(TopLevelVariableDeclaration node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.variables)) {
- throw new InsufficientContextException();
- }
- return _notAChild(node);
- }
-
- @override
- AstNode visitTryStatement(TryStatement node) {
- if (identical(_oldNode, node.body)) {
- return _parser.parseBlock();
- } else if (node.catchClauses.contains(_oldNode)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.finallyBlock)) {
- throw new InsufficientContextException();
+ statementStart = _currentToken;
}
- return _notAChild(node);
+ // Recovery: If the next token is not a right curly bracket, look at the
+ // left curly bracket to see whether there is a matching right bracket. If
+ // there is, then we're more likely missing a semi-colon and should go back
+ // to parsing statements.
+ Token rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET);
+ return new Block(leftBracket, statements, rightBracket);
}
- @override
- AstNode visitTypeArgumentList(TypeArgumentList node) {
- if (node.arguments.contains(_oldNode)) {
- return _parser.parseTypeName();
- }
- return _notAChild(node);
- }
-
- @override
- AstNode visitTypeName(TypeName node) {
- if (identical(_oldNode, node.name)) {
- return _parser.parsePrefixedIdentifier();
- } else if (identical(_oldNode, node.typeArguments)) {
- return _parser.parseTypeArgumentList();
- }
- return _notAChild(node);
- }
-
- @override
- AstNode visitTypeParameter(TypeParameter node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.name)) {
- return _parser.parseSimpleIdentifier();
- } else if (identical(_oldNode, node.bound)) {
- return _parser.parseTypeName();
- }
- return _notAChild(node);
- }
-
- @override
- AstNode visitTypeParameterList(TypeParameterList node) {
- if (node.typeParameters.contains(node)) {
- return _parser.parseTypeParameter();
+ /**
+ * Parse a break statement. Return the break statement that was parsed.
+ *
+ * This method assumes that the current token matches `Keyword.BREAK`.
+ *
+ * breakStatement ::=
+ * 'break' identifier? ';'
+ */
+ Statement parseBreakStatement() {
+ Token breakKeyword = getAndAdvance();
+ SimpleIdentifier label = null;
+ if (_matchesIdentifier()) {
+ label = _parseSimpleIdentifierUnchecked();
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitVariableDeclaration(VariableDeclaration node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.name)) {
- throw new InsufficientContextException();
- } else if (identical(_oldNode, node.initializer)) {
- throw new InsufficientContextException();
+ if (!_inLoop && !_inSwitch && label == null) {
+ _reportErrorForToken(ParserErrorCode.BREAK_OUTSIDE_OF_LOOP, breakKeyword);
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitVariableDeclarationList(VariableDeclarationList node) {
- if (identical(_oldNode, node.documentationComment)) {
- throw new InsufficientContextException();
- } else if (node.metadata.contains(_oldNode)) {
- return _parser.parseAnnotation();
- } else if (identical(_oldNode, node.type)) {
- // There is not enough context to know whether we should reparse the type
- // using parseReturnType() (which allows 'void') or parseTypeName()
- // (which doesn't). Note that even though the language disallows
- // variables of type 'void', the parser sometimes accepts them in the
- // course of error recovery (e.g. "class C { void v; }"
- throw new InsufficientContextException();
- } else if (node.variables.contains(_oldNode)) {
- throw new InsufficientContextException();
- }
- return _notAChild(node);
+ Token semicolon = _expect(TokenType.SEMICOLON);
+ return new BreakStatement(breakKeyword, label, semicolon);
}
- @override
- AstNode visitVariableDeclarationStatement(VariableDeclarationStatement node) {
- if (identical(_oldNode, node.variables)) {
- throw new InsufficientContextException();
+ /**
+ * Parse a cascade section. Return the expression representing the cascaded
+ * method invocation.
+ *
+ * This method assumes that the current token matches
+ * `TokenType.PERIOD_PERIOD`.
+ *
+ * cascadeSection ::=
+ * '..' (cascadeSelector typeArguments? arguments*)
+ * (assignableSelector typeArguments? arguments*)* cascadeAssignment?
+ *
+ * cascadeSelector ::=
+ * '[' expression ']'
+ * | identifier
+ *
+ * cascadeAssignment ::=
+ * assignmentOperator expressionWithoutCascade
+ */
+ Expression parseCascadeSection() {
+ Token period = getAndAdvance();
+ Expression expression = null;
+ SimpleIdentifier functionName = null;
+ if (_matchesIdentifier()) {
+ functionName = _parseSimpleIdentifierUnchecked();
+ } else if (_currentToken.type == TokenType.OPEN_SQUARE_BRACKET) {
+ Token leftBracket = getAndAdvance();
+ bool wasInInitializer = _inInitializer;
+ _inInitializer = false;
+ try {
+ Expression index = parseExpression2();
+ Token rightBracket = _expect(TokenType.CLOSE_SQUARE_BRACKET);
+ expression = new IndexExpression.forCascade(
+ period, leftBracket, index, rightBracket);
+ period = null;
+ } finally {
+ _inInitializer = wasInInitializer;
+ }
+ } else {
+ _reportErrorForToken(ParserErrorCode.MISSING_IDENTIFIER, _currentToken,
+ [_currentToken.lexeme]);
+ functionName = createSyntheticIdentifier();
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitWhileStatement(WhileStatement node) {
- if (identical(_oldNode, node.condition)) {
- return _parser.parseExpression2();
- } else if (identical(_oldNode, node.body)) {
- return _parser.parseStatement2();
+ assert((expression == null && functionName != null) ||
+ (expression != null && functionName == null));
+ if (_isLikelyArgumentList()) {
+ do {
+ TypeArgumentList typeArguments = _parseOptionalTypeArguments();
+ if (functionName != null) {
+ expression = new MethodInvocation(expression, period, functionName,
+ typeArguments, parseArgumentList());
+ period = null;
+ functionName = null;
+ } else if (expression == null) {
+ // It should not be possible to get here.
+ expression = new MethodInvocation(expression, period,
+ createSyntheticIdentifier(), typeArguments, parseArgumentList());
+ } else {
+ expression = new FunctionExpressionInvocation(
+ expression, typeArguments, parseArgumentList());
+ }
+ } while (_isLikelyArgumentList());
+ } else if (functionName != null) {
+ expression = new PropertyAccess(expression, period, functionName);
+ period = null;
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitWithClause(WithClause node) {
- if (node.mixinTypes.contains(node)) {
- return _parser.parseTypeName();
+ assert(expression != null);
+ bool progress = true;
+ while (progress) {
+ progress = false;
+ Expression selector = parseAssignableSelector(expression, true);
+ if (!identical(selector, expression)) {
+ expression = selector;
+ progress = true;
+ while (_isLikelyArgumentList()) {
+ TypeArgumentList typeArguments = _parseOptionalTypeArguments();
+ Expression currentExpression = expression;
+ if (currentExpression is PropertyAccess) {
+ expression = new MethodInvocation(
+ currentExpression.target,
+ currentExpression.operator,
+ currentExpression.propertyName,
+ typeArguments,
+ parseArgumentList());
+ } else {
+ expression = new FunctionExpressionInvocation(
+ expression, typeArguments, parseArgumentList());
+ }
+ }
+ }
}
- return _notAChild(node);
- }
-
- @override
- AstNode visitYieldStatement(YieldStatement node) {
- if (identical(_oldNode, node.expression)) {
- return _parser.parseExpression2();
+ if (_currentToken.type.isAssignmentOperator) {
+ Token operator = getAndAdvance();
+ _ensureAssignable(expression);
+ expression = new AssignmentExpression(
+ expression, operator, parseExpressionWithoutCascade());
}
- return _notAChild(node);
- }
-
- /**
- * Return `true` if the given assignment [expression] can have a cascade
- * expression on the right-hand side.
- */
- bool _isCascadeAllowedInAssignment(AssignmentExpression expression) {
- // TODO(brianwilkerson) Implement this method.
- throw new InsufficientContextException();
- }
-
- /**
- * Return `true` if the given throw [expression] can have a cascade
- * expression.
- */
- bool _isCascadeAllowedInThrow(ThrowExpression expression) {
- // TODO(brianwilkerson) Implement this method.
- throw new InsufficientContextException();
- }
-
- /**
- * Throw an exception indicating that the visited [node] was not the parent of
- * the node to be replaced.
- */
- AstNode _notAChild(AstNode node) {
- throw new IncrementalParseException.con1(
- "Internal error: the visited node (a ${node.runtimeType}) was not the parent of the node to be replaced (a ${_oldNode.runtimeType})");
+ return expression;
}
-}
-
-/**
- * An exception that occurred while attempting to parse a replacement for a
- * specified node in an existing AST structure.
- */
-class IncrementalParseException extends RuntimeException {
- /**
- * Initialize a newly created exception to have no message and to be its own
- * cause.
- */
- IncrementalParseException() : super();
-
- /**
- * Initialize a newly created exception to have the given [message] and to be
- * its own cause.
- */
- IncrementalParseException.con1(String message) : super(message: message);
-
- /**
- * Initialize a newly created exception to have no message and to have the
- * given [cause].
- */
- IncrementalParseException.con2(Exception cause) : super(cause: cause);
-}
-
-/**
- * An object used to re-parse a single AST structure within a larger AST
- * structure.
- */
-class IncrementalParser {
- /**
- * The source being parsed.
- */
- final Source _source;
-
- /**
- * A map from old tokens to new tokens used during the cloning process.
- */
- final TokenMap _tokenMap;
-
- /**
- * The error listener that will be informed of any errors that are found
- * during the parse.
- */
- final AnalysisErrorListener _errorListener;
-
- /**
- * The node in the AST structure that contains the revised content.
- */
- AstNode _updatedNode;
-
- /**
- * Initialize a newly created incremental parser to parse a portion of the
- * content of the given [_source]. The [_tokenMap] is a map from old tokens to
- * new tokens that is used during the cloning process. The [_errorListener]
- * will be informed of any errors that are found during the parse.
- */
- IncrementalParser(this._source, this._tokenMap, this._errorListener);
-
- /**
- * Return the node in the AST structure that contains the revised content.
- */
- AstNode get updatedNode => _updatedNode;
/**
- * Given a range of tokens that were re-scanned, re-parse the minimum number
- * of tokens to produce a consistent AST structure. The range is represented
- * by the first and last tokens in the range.
+ * Parse a class declaration. The [commentAndMetadata] is the metadata to be
+ * associated with the member. The [abstractKeyword] is the token for the
+ * keyword 'abstract', or `null` if the keyword was not given. Return the
+ * class declaration that was parsed.
*
- * More specifically, the [leftToken] is the token in the new token stream
- * immediately to the left of the range of tokens that were inserted and the
- * [rightToken] is the token in the new token stream immediately to the right
- * of the range of tokens that were inserted. The [originalStart] and
- * [originalEnd] are the offsets in the original source of the first and last
- * characters that were modified.
+ * This method assumes that the current token matches `Keyword.CLASS`.
*
- * The tokens are assumed to be contained in the same token stream.
+ * classDeclaration ::=
+ * metadata 'abstract'? 'class' name typeParameterList? (extendsClause withClause?)? implementsClause? '{' classMembers '}' |
+ * metadata 'abstract'? 'class' mixinApplicationClass
*/
- AstNode reparse(AstNode originalStructure, Token leftToken, Token rightToken,
- int originalStart, int originalEnd) {
- AstNode oldNode = null;
- AstNode newNode = null;
+ CompilationUnitMember parseClassDeclaration(
+ CommentAndMetadata commentAndMetadata, Token abstractKeyword) {
//
- // Find the first token that needs to be re-parsed.
+ // Parse the name and type parameters.
//
- Token firstToken = leftToken.next;
- if (identical(firstToken, rightToken)) {
- // If there are no new tokens, then we need to include at least one copied
- // node in the range.
- firstToken = leftToken;
+ Token keyword = getAndAdvance();
+ SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true);
+ String className = name.name;
+ TypeParameterList typeParameters = null;
+ TokenType type = _currentToken.type;
+ if (type == TokenType.LT) {
+ typeParameters = parseTypeParameterList();
+ type = _currentToken.type;
}
//
- // Find the smallest AST node that encompasses the range of re-scanned
- // tokens.
+ // Check to see whether this might be a class type alias rather than a class
+ // declaration.
//
- if (originalEnd < originalStart) {
- oldNode = new NodeLocator(originalStart).searchWithin(originalStructure);
- } else {
- oldNode = new NodeLocator(originalStart, originalEnd)
- .searchWithin(originalStructure);
+ if (type == TokenType.EQ) {
+ return _parseClassTypeAliasAfterName(
+ commentAndMetadata, abstractKeyword, keyword, name, typeParameters);
}
//
- // Find the token at which parsing is to begin.
+ // Parse the clauses. The parser accepts clauses in any order, but will
+ // generate errors if they are not in the order required by the
+ // specification.
//
- int originalOffset = oldNode.offset;
- Token parseToken = _findTokenAt(firstToken, originalOffset);
- if (parseToken == null) {
- return null;
+ ExtendsClause extendsClause = null;
+ WithClause withClause = null;
+ ImplementsClause implementsClause = null;
+ bool foundClause = true;
+ while (foundClause) {
+ Keyword keyword = _currentToken.keyword;
+ if (keyword == Keyword.EXTENDS) {
+ if (extendsClause == null) {
+ extendsClause = parseExtendsClause();
+ if (withClause != null) {
+ _reportErrorForToken(
+ ParserErrorCode.WITH_BEFORE_EXTENDS, withClause.withKeyword);
+ } else if (implementsClause != null) {
+ _reportErrorForToken(ParserErrorCode.IMPLEMENTS_BEFORE_EXTENDS,
+ implementsClause.implementsKeyword);
+ }
+ } else {
+ _reportErrorForToken(ParserErrorCode.MULTIPLE_EXTENDS_CLAUSES,
+ extendsClause.extendsKeyword);
+ parseExtendsClause();
+ }
+ } else if (keyword == Keyword.WITH) {
+ if (withClause == null) {
+ withClause = parseWithClause();
+ if (implementsClause != null) {
+ _reportErrorForToken(ParserErrorCode.IMPLEMENTS_BEFORE_WITH,
+ implementsClause.implementsKeyword);
+ }
+ } else {
+ _reportErrorForToken(
+ ParserErrorCode.MULTIPLE_WITH_CLAUSES, withClause.withKeyword);
+ parseWithClause();
+ // TODO(brianwilkerson) Should we merge the list of applied mixins
+ // into a single list?
+ }
+ } else if (keyword == Keyword.IMPLEMENTS) {
+ if (implementsClause == null) {
+ implementsClause = parseImplementsClause();
+ } else {
+ _reportErrorForToken(ParserErrorCode.MULTIPLE_IMPLEMENTS_CLAUSES,
+ implementsClause.implementsKeyword);
+ parseImplementsClause();
+ // TODO(brianwilkerson) Should we merge the list of implemented
+ // classes into a single list?
+ }
+ } else {
+ foundClause = false;
+ }
+ }
+ if (withClause != null && extendsClause == null) {
+ _reportErrorForToken(
+ ParserErrorCode.WITH_WITHOUT_EXTENDS, withClause.withKeyword);
}
//
- // Parse the appropriate AST structure starting at the appropriate place.
+ // Look for and skip over the extra-lingual 'native' specification.
//
- Parser parser = new Parser(_source, _errorListener);
- parser.currentToken = parseToken;
- while (newNode == null) {
- AstNode parent = oldNode.parent;
- if (parent == null) {
- parseToken = _findFirstToken(parseToken);
- parser.currentToken = parseToken;
- return parser.parseCompilationUnit2();
- }
- bool advanceToParent = false;
- try {
- IncrementalParseDispatcher dispatcher =
- new IncrementalParseDispatcher(parser, oldNode);
- IncrementalParseStateBuilder contextBuilder =
- new IncrementalParseStateBuilder(parser);
- contextBuilder.buildState(oldNode);
- newNode = parent.accept(dispatcher);
- //
- // Validate that the new node can replace the old node.
- //
- Token mappedToken = _tokenMap.get(oldNode.endToken.next);
- if (mappedToken == null ||
- newNode == null ||
- mappedToken.offset != newNode.endToken.next.offset ||
- newNode.offset != oldNode.offset) {
- advanceToParent = true;
- }
- } on InsufficientContextException {
- advanceToParent = true;
- } catch (exception) {
- return null;
- }
- if (advanceToParent) {
- newNode = null;
- oldNode = parent;
- originalOffset = oldNode.offset;
- parseToken = _findTokenAt(parseToken, originalOffset);
- parser.currentToken = parseToken;
- }
+ NativeClause nativeClause = null;
+ if (_matchesString(_NATIVE) && _tokenMatches(_peek(), TokenType.STRING)) {
+ nativeClause = _parseNativeClause();
}
- _updatedNode = newNode;
//
- // Replace the old node with the new node in a copy of the original AST
- // structure.
+ // Parse the body of the class.
//
- if (identical(oldNode, originalStructure)) {
- // We ended up re-parsing the whole structure, so there's no need for a
- // copy.
- ResolutionCopier.copyResolutionData(oldNode, newNode);
- return newNode;
+ Token leftBracket = null;
+ List<ClassMember> members = null;
+ Token rightBracket = null;
+ if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
+ leftBracket = getAndAdvance();
+ members = _parseClassMembers(className, _getEndToken(leftBracket));
+ rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET);
+ } else {
+ // Recovery: Check for an unmatched closing curly bracket and parse
+ // members until it is reached.
+ leftBracket = _createSyntheticToken(TokenType.OPEN_CURLY_BRACKET);
+ rightBracket = _createSyntheticToken(TokenType.CLOSE_CURLY_BRACKET);
+ _reportErrorForCurrentToken(ParserErrorCode.MISSING_CLASS_BODY);
}
- ResolutionCopier.copyResolutionData(oldNode, newNode);
- IncrementalAstCloner cloner =
- new IncrementalAstCloner(oldNode, newNode, _tokenMap);
- return originalStructure.accept(cloner) as AstNode;
- }
-
- /**
- * Return the first (non-EOF) token in the token stream containing the
- * [firstToken].
- */
- Token _findFirstToken(Token firstToken) {
- while (firstToken.type != TokenType.EOF) {
- firstToken = firstToken.previous;
- }
- return firstToken.next;
- }
-
- /**
- * Find the token at or before the [firstToken] with the given [offset], or
- * `null` if there is no such token.
- */
- Token _findTokenAt(Token firstToken, int offset) {
- while (firstToken.offset > offset && firstToken.type != TokenType.EOF) {
- firstToken = firstToken.previous;
- }
- return firstToken;
- }
-}
-
-/**
- * A visitor capable of inferring the correct parser state for incremental
- * parsing. This visitor visits each parent/child relationship in the chain of
- * ancestors of the node to be replaced (starting with the root of the parse
- * tree), updating the parser to the correct state for parsing the child of the
- * given parent. Once it has visited all of these relationships, the parser
- * will be in the correct state for reparsing the node to be replaced.
- */
-class IncrementalParseStateBuilder extends SimpleAstVisitor {
- // TODO(paulberry): add support for other pieces of parser state (_inAsync,
- // _inGenerator, _inLoop, and _inSwitch). Note that _inLoop and _inSwitch
- // only affect error message generation.
-
- /**
- * The parser whose state should be built.
- */
- final Parser _parser;
-
- /**
- * The child node in the parent/child relationship currently being visited.
- * (The corresponding parent is the node passed to the visit...() function.)
- */
- AstNode _childNode;
-
- /**
- * Create an IncrementalParseStateBuilder which will build the correct state
- * for [_parser].
- */
- IncrementalParseStateBuilder(this._parser);
-
- /**
- * Build the correct parser state for parsing a replacement for [node].
- */
- void buildState(AstNode node) {
- List<AstNode> ancestors = <AstNode>[];
- while (node != null) {
- ancestors.add(node);
- node = node.parent;
- }
- _parser._inInitializer = false;
- for (int i = ancestors.length - 2; i >= 0; i--) {
- _childNode = ancestors[i];
- ancestors[i + 1].accept(this);
- }
- }
-
- @override
- void visitArgumentList(ArgumentList node) {
- _parser._inInitializer = false;
- }
-
- @override
- void visitConstructorFieldInitializer(ConstructorFieldInitializer node) {
- if (identical(_childNode, node.expression)) {
- _parser._inInitializer = true;
- }
- }
-
- @override
- void visitIndexExpression(IndexExpression node) {
- if (identical(_childNode, node.index)) {
- _parser._inInitializer = false;
- }
- }
-
- @override
- void visitInterpolationExpression(InterpolationExpression node) {
- if (identical(_childNode, node.expression)) {
- _parser._inInitializer = false;
- }
- }
-
- @override
- void visitListLiteral(ListLiteral node) {
- if (node.elements.contains(_childNode)) {
- _parser._inInitializer = false;
- }
- }
-
- @override
- void visitMapLiteral(MapLiteral node) {
- if (node.entries.contains(_childNode)) {
- _parser._inInitializer = false;
- }
- }
-
- @override
- void visitParenthesizedExpression(ParenthesizedExpression node) {
- if (identical(_childNode, node.expression)) {
- _parser._inInitializer = false;
- }
- }
-}
-
-/**
- * An exception indicating that an AST node cannot be re-parsed because there is
- * not enough context to know how to re-parse the node. Clients can attempt to
- * re-parse the parent of the node.
- */
-class InsufficientContextException extends IncrementalParseException {
- /**
- * Initialize a newly created exception to have no message and to be its own
- * cause.
- */
- InsufficientContextException() : super();
-
- /**
- * Initialize a newly created exception to have the given [message] and to be
- * its own cause.
- */
- InsufficientContextException.con1(String message) : super.con1(message);
-
- /**
- * Initialize a newly created exception to have no message and to have the
- * given [cause].
- */
- InsufficientContextException.con2(Exception cause) : super.con2(cause);
-}
-
-/**
- * Wrapper around [Function] which should be called with "target" and
- * "arguments".
- */
-class MethodTrampoline {
- int parameterCount;
- Function trampoline;
- MethodTrampoline(this.parameterCount, this.trampoline);
- Object invoke(target, List arguments) {
- if (arguments.length != parameterCount) {
- throw new IllegalArgumentException(
- "${arguments.length} != $parameterCount");
- }
- switch (parameterCount) {
- case 0:
- return trampoline(target);
- case 1:
- return trampoline(target, arguments[0]);
- case 2:
- return trampoline(target, arguments[0], arguments[1]);
- case 3:
- return trampoline(target, arguments[0], arguments[1], arguments[2]);
- case 4:
- return trampoline(
- target, arguments[0], arguments[1], arguments[2], arguments[3]);
- default:
- throw new IllegalArgumentException("Not implemented for > 4 arguments");
- }
- }
-}
-
-/**
- * A simple data-holder for a method that needs to return multiple values.
- */
-class Modifiers {
- /**
- * The token representing the keyword 'abstract', or `null` if the keyword was
- * not found.
- */
- Token abstractKeyword;
-
- /**
- * The token representing the keyword 'const', or `null` if the keyword was
- * not found.
- */
- Token constKeyword;
-
- /**
- * The token representing the keyword 'external', or `null` if the keyword was
- * not found.
- */
- Token externalKeyword;
-
- /**
- * The token representing the keyword 'factory', or `null` if the keyword was
- * not found.
- */
- Token factoryKeyword;
-
- /**
- * The token representing the keyword 'final', or `null` if the keyword was
- * not found.
- */
- Token finalKeyword;
-
- /**
- * The token representing the keyword 'static', or `null` if the keyword was
- * not found.
- */
- Token staticKeyword;
-
- /**
- * The token representing the keyword 'var', or `null` if the keyword was not
- * found.
- */
- Token varKeyword;
-
- @override
- String toString() {
- StringBuffer buffer = new StringBuffer();
- bool needsSpace = _appendKeyword(buffer, false, abstractKeyword);
- needsSpace = _appendKeyword(buffer, needsSpace, constKeyword);
- needsSpace = _appendKeyword(buffer, needsSpace, externalKeyword);
- needsSpace = _appendKeyword(buffer, needsSpace, factoryKeyword);
- needsSpace = _appendKeyword(buffer, needsSpace, finalKeyword);
- needsSpace = _appendKeyword(buffer, needsSpace, staticKeyword);
- _appendKeyword(buffer, needsSpace, varKeyword);
- return buffer.toString();
- }
-
- /**
- * If the given [keyword] is not `null`, append it to the given [builder],
- * prefixing it with a space if [needsSpace] is `true`. Return `true` if
- * subsequent keywords need to be prefixed with a space.
- */
- bool _appendKeyword(StringBuffer buffer, bool needsSpace, Token keyword) {
- if (keyword != null) {
- if (needsSpace) {
- buffer.writeCharCode(0x20);
- }
- buffer.write(keyword.lexeme);
- return true;
- }
- return needsSpace;
- }
-}
-
-/**
- * A parser used to parse tokens into an AST structure.
- */
-class Parser {
- static String ASYNC = "async";
-
- static String _AWAIT = "await";
-
- static String _HIDE = "hide";
-
- static String _OF = "of";
-
- static String _ON = "on";
-
- static String _NATIVE = "native";
-
- static String _SHOW = "show";
-
- static String SYNC = "sync";
-
- static String _YIELD = "yield";
-
- /**
- * The source being parsed.
- */
- final Source _source;
-
- /**
- * The error listener that will be informed of any errors that are found
- * during the parse.
- */
- final AnalysisErrorListener _errorListener;
-
- /**
- * An [errorListener] lock, if more than `0`, then errors are not reported.
- */
- int _errorListenerLock = 0;
-
- /**
- * A flag indicating whether parser is to parse function bodies.
- */
- bool _parseFunctionBodies = true;
-
- /**
- * The next token to be parsed.
- */
- Token _currentToken;
-
- /**
- * A flag indicating whether the parser is currently in a function body marked
- * as being 'async'.
- */
- bool _inAsync = false;
-
- /**
- * A flag indicating whether the parser is currently in a function body marked
- * as being 'async'.
- */
- bool _inGenerator = false;
-
- /**
- * A flag indicating whether the parser is currently in the body of a loop.
- */
- bool _inLoop = false;
-
- /**
- * A flag indicating whether the parser is currently in a switch statement.
- */
- bool _inSwitch = false;
-
- /**
- * A flag indicating whether the parser is currently in a constructor field
- * initializer, with no intervening parens, braces, or brackets.
- */
- bool _inInitializer = false;
-
- /**
- * A flag indicating whether the parser is to parse generic method syntax.
- */
- bool parseGenericMethods = false;
-
- /**
- * Initialize a newly created parser to parse the content of the given
- * [_source] and to report any errors that are found to the given
- * [_errorListener].
- */
- Parser(this._source, this._errorListener);
-
- void set currentToken(Token currentToken) {
- this._currentToken = currentToken;
- }
-
- /**
- * Return `true` if the current token is the first token of a return type that
- * is followed by an identifier, possibly followed by a list of type
- * parameters, followed by a left-parenthesis. This is used by
- * [_parseTypeAlias] to determine whether or not to parse a return type.
- */
- bool get hasReturnTypeInTypeAlias {
- Token next = _skipReturnType(_currentToken);
- if (next == null) {
- return false;
- }
- return _tokenMatchesIdentifier(next);
- }
-
- /**
- * Set whether the parser is to parse the async support.
- */
- @deprecated
- void set parseAsync(bool parseAsync) {
- // Async support cannot be disabled
- }
-
- /**
- * Set whether the parser is to parse deferred libraries.
- */
- @deprecated
- void set parseDeferredLibraries(bool parseDeferredLibraries) {
- // Deferred libraries support cannot be disabled
- }
-
- /**
- * Set whether the parser is to parse enum declarations.
- */
- @deprecated
- void set parseEnum(bool parseEnum) {
- // Enum support cannot be disabled
- }
-
- /**
- * Set whether parser is to parse function bodies.
- */
- void set parseFunctionBodies(bool parseFunctionBodies) {
- this._parseFunctionBodies = parseFunctionBodies;
- }
-
- /**
- * Advance to the next token in the token stream, making it the new current
- * token and return the token that was current before this method was invoked.
- */
- Token getAndAdvance() {
- Token token = _currentToken;
- _advance();
- return token;
- }
-
- /**
- * Parse an annotation. Return the annotation that was parsed.
- *
- * annotation ::=
- * '@' qualified ('.' identifier)? arguments?
- *
- */
- Annotation parseAnnotation() {
- Token atSign = _expect(TokenType.AT);
- Identifier name = parsePrefixedIdentifier();
- Token period = null;
- SimpleIdentifier constructorName = null;
- if (_matches(TokenType.PERIOD)) {
- period = getAndAdvance();
- constructorName = parseSimpleIdentifier();
- }
- ArgumentList arguments = null;
- if (_matches(TokenType.OPEN_PAREN)) {
- arguments = parseArgumentList();
- }
- return new Annotation(atSign, name, period, constructorName, arguments);
- }
-
- /**
- * Parse an argument. Return the argument that was parsed.
- *
- * argument ::=
- * namedArgument
- * | expression
- *
- * namedArgument ::=
- * label expression
- */
- Expression parseArgument() {
- //
- // Both namedArgument and expression can start with an identifier, but only
- // namedArgument can have an identifier followed by a colon.
- //
- if (_matchesIdentifier() && _tokenMatches(_peek(), TokenType.COLON)) {
- return new NamedExpression(parseLabel(), parseExpression2());
- } else {
- return parseExpression2();
- }
- }
-
- /**
- * Parse a list of arguments. Return the argument list that was parsed.
- *
- * arguments ::=
- * '(' argumentList? ')'
- *
- * argumentList ::=
- * namedArgument (',' namedArgument)*
- * | expressionList (',' namedArgument)*
- */
- ArgumentList parseArgumentList() {
- Token leftParenthesis = _expect(TokenType.OPEN_PAREN);
- List<Expression> arguments = new List<Expression>();
- if (_matches(TokenType.CLOSE_PAREN)) {
- return new ArgumentList(leftParenthesis, arguments, getAndAdvance());
- }
- //
- // Even though unnamed arguments must all appear before any named arguments,
- // we allow them to appear in any order so that we can recover faster.
- //
- bool wasInInitializer = _inInitializer;
- _inInitializer = false;
- try {
- Expression argument = parseArgument();
- arguments.add(argument);
- bool foundNamedArgument = argument is NamedExpression;
- bool generatedError = false;
- while (_optional(TokenType.COMMA)) {
- argument = parseArgument();
- arguments.add(argument);
- if (foundNamedArgument) {
- bool blankArgument =
- argument is SimpleIdentifier && argument.name.isEmpty;
- if (!generatedError &&
- !(argument is NamedExpression && !blankArgument)) {
- // Report the error, once, but allow the arguments to be in any
- // order in the AST.
- _reportErrorForCurrentToken(
- ParserErrorCode.POSITIONAL_AFTER_NAMED_ARGUMENT);
- generatedError = true;
- }
- } else if (argument is NamedExpression) {
- foundNamedArgument = true;
- }
- }
- // TODO(brianwilkerson) Recovery: Look at the left parenthesis to see
- // whether there is a matching right parenthesis. If there is, then we're
- // more likely missing a comma and should go back to parsing arguments.
- Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
- return new ArgumentList(leftParenthesis, arguments, rightParenthesis);
- } finally {
- _inInitializer = wasInInitializer;
- }
- }
-
- /**
- * Parse a bitwise or expression. Return the bitwise or expression that was
- * parsed.
- *
- * bitwiseOrExpression ::=
- * bitwiseXorExpression ('|' bitwiseXorExpression)*
- * | 'super' ('|' bitwiseXorExpression)+
- */
- Expression parseBitwiseOrExpression() {
- Expression expression;
- if (_matchesKeyword(Keyword.SUPER) &&
- _tokenMatches(_peek(), TokenType.BAR)) {
- expression = new SuperExpression(getAndAdvance());
- } else {
- expression = _parseBitwiseXorExpression();
- }
- while (_matches(TokenType.BAR)) {
- Token operator = getAndAdvance();
- expression = new BinaryExpression(
- expression, operator, _parseBitwiseXorExpression());
- }
- return expression;
- }
-
- /**
- * Parse a block. Return the block that was parsed.
- *
- * block ::=
- * '{' statements '}'
- */
- Block parseBlock() {
- Token leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET);
- List<Statement> statements = new List<Statement>();
- Token statementStart = _currentToken;
- while (
- !_matches(TokenType.EOF) && !_matches(TokenType.CLOSE_CURLY_BRACKET)) {
- Statement statement = parseStatement2();
- if (statement != null) {
- statements.add(statement);
- }
- if (identical(_currentToken, statementStart)) {
- // Ensure that we are making progress and report an error if we're not.
- _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
- [_currentToken.lexeme]);
- _advance();
- }
- statementStart = _currentToken;
- }
- Token rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET);
- return new Block(leftBracket, statements, rightBracket);
- }
-
- /**
- * Parse a class member. The [className] is the name of the class containing
- * the member being parsed. Return the class member that was parsed, or `null`
- * if what was found was not a valid class member.
- *
- * classMemberDefinition ::=
- * declaration ';'
- * | methodSignature functionBody
- */
- ClassMember parseClassMember(String className) {
- CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata();
- Modifiers modifiers = _parseModifiers();
- if (_matchesKeyword(Keyword.VOID)) {
- TypeName returnType = parseReturnType();
- if (_matchesKeyword(Keyword.GET) && _tokenMatchesIdentifier(_peek())) {
- _validateModifiersForGetterOrSetterOrMethod(modifiers);
- return _parseGetter(commentAndMetadata, modifiers.externalKeyword,
- modifiers.staticKeyword, returnType);
- } else if (_matchesKeyword(Keyword.SET) &&
- _tokenMatchesIdentifier(_peek())) {
- _validateModifiersForGetterOrSetterOrMethod(modifiers);
- return _parseSetter(commentAndMetadata, modifiers.externalKeyword,
- modifiers.staticKeyword, returnType);
- } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
- _validateModifiersForOperator(modifiers);
- return _parseOperator(
- commentAndMetadata, modifiers.externalKeyword, returnType);
- } else if (_matchesIdentifier() &&
- _peek().matchesAny([
- TokenType.OPEN_PAREN,
- TokenType.OPEN_CURLY_BRACKET,
- TokenType.FUNCTION,
- TokenType.LT
- ])) {
- _validateModifiersForGetterOrSetterOrMethod(modifiers);
- return _parseMethodDeclarationAfterReturnType(commentAndMetadata,
- modifiers.externalKeyword, modifiers.staticKeyword, returnType);
- } else {
- //
- // We have found an error of some kind. Try to recover.
- //
- if (_matchesIdentifier()) {
- if (_peek().matchesAny(
- [TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) {
- //
- // We appear to have a variable declaration with a type of "void".
- //
- _reportErrorForNode(ParserErrorCode.VOID_VARIABLE, returnType);
- return _parseInitializedIdentifierList(
- commentAndMetadata,
- modifiers.staticKeyword,
- _validateModifiersForField(modifiers),
- returnType);
- }
- }
- if (_isOperator(_currentToken)) {
- //
- // We appear to have found an operator declaration without the
- // 'operator' keyword.
- //
- _validateModifiersForOperator(modifiers);
- return _parseOperator(
- commentAndMetadata, modifiers.externalKeyword, returnType);
- }
- _reportErrorForToken(
- ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken);
- return null;
- }
- } else if (_matchesKeyword(Keyword.GET) &&
- _tokenMatchesIdentifier(_peek())) {
- _validateModifiersForGetterOrSetterOrMethod(modifiers);
- return _parseGetter(commentAndMetadata, modifiers.externalKeyword,
- modifiers.staticKeyword, null);
- } else if (_matchesKeyword(Keyword.SET) &&
- _tokenMatchesIdentifier(_peek())) {
- _validateModifiersForGetterOrSetterOrMethod(modifiers);
- return _parseSetter(commentAndMetadata, modifiers.externalKeyword,
- modifiers.staticKeyword, null);
- } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
- _validateModifiersForOperator(modifiers);
- return _parseOperator(
- commentAndMetadata, modifiers.externalKeyword, null);
- } else if (!_matchesIdentifier()) {
- //
- // Recover from an error.
- //
- if (_matchesKeyword(Keyword.CLASS)) {
- _reportErrorForCurrentToken(ParserErrorCode.CLASS_IN_CLASS);
- // TODO(brianwilkerson) We don't currently have any way to capture the
- // class that was parsed.
- _parseClassDeclaration(commentAndMetadata, null);
- return null;
- } else if (_matchesKeyword(Keyword.ABSTRACT) &&
- _tokenMatchesKeyword(_peek(), Keyword.CLASS)) {
- _reportErrorForToken(ParserErrorCode.CLASS_IN_CLASS, _peek());
- // TODO(brianwilkerson) We don't currently have any way to capture the
- // class that was parsed.
- _parseClassDeclaration(commentAndMetadata, getAndAdvance());
- return null;
- } else if (_matchesKeyword(Keyword.ENUM)) {
- _reportErrorForToken(ParserErrorCode.ENUM_IN_CLASS, _peek());
- // TODO(brianwilkerson) We don't currently have any way to capture the
- // enum that was parsed.
- _parseEnumDeclaration(commentAndMetadata);
- return null;
- } else if (_isOperator(_currentToken)) {
- //
- // We appear to have found an operator declaration without the
- // 'operator' keyword.
- //
- _validateModifiersForOperator(modifiers);
- return _parseOperator(
- commentAndMetadata, modifiers.externalKeyword, null);
- }
- Token keyword = modifiers.varKeyword;
- if (keyword == null) {
- keyword = modifiers.finalKeyword;
- }
- if (keyword == null) {
- keyword = modifiers.constKeyword;
- }
- if (keyword != null) {
- //
- // We appear to have found an incomplete field declaration.
- //
- _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
- List<VariableDeclaration> variables = new List<VariableDeclaration>();
- variables.add(
- new VariableDeclaration(_createSyntheticIdentifier(), null, null));
- return new FieldDeclaration(
- commentAndMetadata.comment,
- commentAndMetadata.metadata,
- null,
- new VariableDeclarationList(null, null, keyword, null, variables),
- _expectSemicolon());
- }
- _reportErrorForToken(
- ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken);
- if (commentAndMetadata.comment != null ||
- !commentAndMetadata.metadata.isEmpty) {
- //
- // We appear to have found an incomplete declaration at the end of the
- // class. At this point it consists of a metadata, which we don't want
- // to loose, so we'll treat it as a method declaration with a missing
- // name, parameters and empty body.
- //
- return new MethodDeclaration(
- commentAndMetadata.comment,
- commentAndMetadata.metadata,
- null,
- null,
- null,
- null,
- null,
- _createSyntheticIdentifier(),
- null,
- new FormalParameterList(
- null, new List<FormalParameter>(), null, null, null),
- new EmptyFunctionBody(_createSyntheticToken(TokenType.SEMICOLON)));
- }
- return null;
- } else if (_tokenMatches(_peek(), TokenType.PERIOD) &&
- _tokenMatchesIdentifier(_peekAt(2)) &&
- _tokenMatches(_peekAt(3), TokenType.OPEN_PAREN)) {
- return _parseConstructor(
- commentAndMetadata,
- modifiers.externalKeyword,
- _validateModifiersForConstructor(modifiers),
- modifiers.factoryKeyword,
- parseSimpleIdentifier(),
- getAndAdvance(),
- parseSimpleIdentifier(),
- parseFormalParameterList());
- } else if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
- SimpleIdentifier methodName = parseSimpleIdentifier();
- FormalParameterList parameters = parseFormalParameterList();
- if (_matches(TokenType.COLON) ||
- modifiers.factoryKeyword != null ||
- methodName.name == className) {
- return _parseConstructor(
- commentAndMetadata,
- modifiers.externalKeyword,
- _validateModifiersForConstructor(modifiers),
- modifiers.factoryKeyword,
- methodName,
- null,
- null,
- parameters);
- }
- _validateModifiersForGetterOrSetterOrMethod(modifiers);
- _validateFormalParameterList(parameters);
- return _parseMethodDeclarationAfterParameters(
- commentAndMetadata,
- modifiers.externalKeyword,
- modifiers.staticKeyword,
- null,
- methodName,
- null,
- parameters);
- } else if (_peek()
- .matchesAny([TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) {
- if (modifiers.constKeyword == null &&
- modifiers.finalKeyword == null &&
- modifiers.varKeyword == null) {
- _reportErrorForCurrentToken(
- ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE);
- }
- return _parseInitializedIdentifierList(commentAndMetadata,
- modifiers.staticKeyword, _validateModifiersForField(modifiers), null);
- } else if (_matchesKeyword(Keyword.TYPEDEF)) {
- _reportErrorForCurrentToken(ParserErrorCode.TYPEDEF_IN_CLASS);
- // TODO(brianwilkerson) We don't currently have any way to capture the
- // function type alias that was parsed.
- _parseFunctionTypeAlias(commentAndMetadata, getAndAdvance());
- return null;
- } else if (parseGenericMethods) {
- Token token = _skipTypeParameterList(_peek());
- if (token != null && _tokenMatches(token, TokenType.OPEN_PAREN)) {
- return _parseMethodDeclarationAfterReturnType(commentAndMetadata,
- modifiers.externalKeyword, modifiers.staticKeyword, null);
- }
- }
- TypeName type = parseTypeName();
- if (_matchesKeyword(Keyword.GET) && _tokenMatchesIdentifier(_peek())) {
- _validateModifiersForGetterOrSetterOrMethod(modifiers);
- return _parseGetter(commentAndMetadata, modifiers.externalKeyword,
- modifiers.staticKeyword, type);
- } else if (_matchesKeyword(Keyword.SET) &&
- _tokenMatchesIdentifier(_peek())) {
- _validateModifiersForGetterOrSetterOrMethod(modifiers);
- return _parseSetter(commentAndMetadata, modifiers.externalKeyword,
- modifiers.staticKeyword, type);
- } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
- _validateModifiersForOperator(modifiers);
- return _parseOperator(
- commentAndMetadata, modifiers.externalKeyword, type);
- } else if (!_matchesIdentifier()) {
- if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
- //
- // We appear to have found an incomplete declaration at the end of the
- // class. At this point it consists of a type name, so we'll treat it as
- // a field declaration with a missing field name and semicolon.
- //
- return _parseInitializedIdentifierList(
- commentAndMetadata,
- modifiers.staticKeyword,
- _validateModifiersForField(modifiers),
- type);
- }
- if (_isOperator(_currentToken)) {
- //
- // We appear to have found an operator declaration without the
- // 'operator' keyword.
- //
- _validateModifiersForOperator(modifiers);
- return _parseOperator(
- commentAndMetadata, modifiers.externalKeyword, type);
- }
- //
- // We appear to have found an incomplete declaration before another
- // declaration. At this point it consists of a type name, so we'll treat
- // it as a field declaration with a missing field name and semicolon.
- //
- _reportErrorForToken(
- ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken);
- try {
- _lockErrorListener();
- return _parseInitializedIdentifierList(
- commentAndMetadata,
- modifiers.staticKeyword,
- _validateModifiersForField(modifiers),
- type);
- } finally {
- _unlockErrorListener();
- }
- } else if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
- SimpleIdentifier methodName = parseSimpleIdentifier();
- FormalParameterList parameters = parseFormalParameterList();
- if (methodName.name == className) {
- _reportErrorForNode(ParserErrorCode.CONSTRUCTOR_WITH_RETURN_TYPE, type);
- return _parseConstructor(
- commentAndMetadata,
- modifiers.externalKeyword,
- _validateModifiersForConstructor(modifiers),
- modifiers.factoryKeyword,
- methodName,
- null,
- null,
- parameters);
- }
- _validateModifiersForGetterOrSetterOrMethod(modifiers);
- _validateFormalParameterList(parameters);
- return _parseMethodDeclarationAfterParameters(
- commentAndMetadata,
- modifiers.externalKeyword,
- modifiers.staticKeyword,
- type,
- methodName,
- null,
- parameters);
- } else if (parseGenericMethods && _tokenMatches(_peek(), TokenType.LT)) {
- return _parseMethodDeclarationAfterReturnType(commentAndMetadata,
- modifiers.externalKeyword, modifiers.staticKeyword, type);
- } else if (_tokenMatches(_peek(), TokenType.OPEN_CURLY_BRACKET)) {
- // We have found "TypeName identifier {", and are guessing that this is a
- // getter without the keyword 'get'.
- _validateModifiersForGetterOrSetterOrMethod(modifiers);
- _reportErrorForCurrentToken(ParserErrorCode.MISSING_GET);
- _currentToken = _injectToken(
- new Parser_SyntheticKeywordToken(Keyword.GET, _currentToken.offset));
- return _parseGetter(commentAndMetadata, modifiers.externalKeyword,
- modifiers.staticKeyword, type);
- }
- return _parseInitializedIdentifierList(commentAndMetadata,
- modifiers.staticKeyword, _validateModifiersForField(modifiers), type);
- }
-
- /**
- * Parse a single combinator. Return the combinator that was parsed, or `null`
- * if no combinator is found.
- *
- * combinator ::=
- * 'show' identifier (',' identifier)*
- * | 'hide' identifier (',' identifier)*
- */
- Combinator parseCombinator() {
- if (_matchesString(_SHOW) || _matchesString(_HIDE)) {
- Token keyword = getAndAdvance();
- List<SimpleIdentifier> names = _parseIdentifierList();
- if (keyword.lexeme == _SHOW) {
- return new ShowCombinator(keyword, names);
- } else {
- return new HideCombinator(keyword, names);
- }
- }
- return null;
- }
-
- /**
- * Parse a compilation unit, starting with the given [token]. Return the
- * compilation unit that was parsed.
- */
- CompilationUnit parseCompilationUnit(Token token) {
- _currentToken = token;
- return parseCompilationUnit2();
- }
-
- /**
- * Parse a compilation unit. Return the compilation unit that was parsed.
- *
- * Specified:
- *
- * compilationUnit ::=
- * scriptTag? directive* topLevelDeclaration*
- *
- * Actual:
- *
- * compilationUnit ::=
- * scriptTag? topLevelElement*
- *
- * topLevelElement ::=
- * directive
- * | topLevelDeclaration
- */
- CompilationUnit parseCompilationUnit2() {
- Token firstToken = _currentToken;
- ScriptTag scriptTag = null;
- if (_matches(TokenType.SCRIPT_TAG)) {
- scriptTag = new ScriptTag(getAndAdvance());
- }
- //
- // Even though all directives must appear before declarations and must occur
- // in a given order, we allow directives and declarations to occur in any
- // order so that we can recover better.
- //
- bool libraryDirectiveFound = false;
- bool partOfDirectiveFound = false;
- bool partDirectiveFound = false;
- bool directiveFoundAfterDeclaration = false;
- List<Directive> directives = new List<Directive>();
- List<CompilationUnitMember> declarations =
- new List<CompilationUnitMember>();
- Token memberStart = _currentToken;
- while (!_matches(TokenType.EOF)) {
- CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata();
- if ((_matchesKeyword(Keyword.IMPORT) ||
- _matchesKeyword(Keyword.EXPORT) ||
- _matchesKeyword(Keyword.LIBRARY) ||
- _matchesKeyword(Keyword.PART)) &&
- !_tokenMatches(_peek(), TokenType.PERIOD) &&
- !_tokenMatches(_peek(), TokenType.LT) &&
- !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
- Directive directive = _parseDirective(commentAndMetadata);
- if (declarations.length > 0 && !directiveFoundAfterDeclaration) {
- _reportErrorForToken(ParserErrorCode.DIRECTIVE_AFTER_DECLARATION,
- directive.beginToken);
- directiveFoundAfterDeclaration = true;
- }
- if (directive is LibraryDirective) {
- if (libraryDirectiveFound) {
- _reportErrorForCurrentToken(
- ParserErrorCode.MULTIPLE_LIBRARY_DIRECTIVES);
- } else {
- if (directives.length > 0) {
- _reportErrorForToken(ParserErrorCode.LIBRARY_DIRECTIVE_NOT_FIRST,
- directive.libraryKeyword);
- }
- libraryDirectiveFound = true;
- }
- } else if (directive is PartDirective) {
- partDirectiveFound = true;
- } else if (partDirectiveFound) {
- if (directive is ExportDirective) {
- _reportErrorForToken(
- ParserErrorCode.EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE,
- directive.keyword);
- } else if (directive is ImportDirective) {
- _reportErrorForToken(
- ParserErrorCode.IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE,
- directive.keyword);
- }
- }
- if (directive is PartOfDirective) {
- if (partOfDirectiveFound) {
- _reportErrorForCurrentToken(
- ParserErrorCode.MULTIPLE_PART_OF_DIRECTIVES);
- } else {
- int directiveCount = directives.length;
- for (int i = 0; i < directiveCount; i++) {
- _reportErrorForToken(
- ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART,
- directives[i].keyword);
- }
- partOfDirectiveFound = true;
- }
- } else {
- if (partOfDirectiveFound) {
- _reportErrorForToken(ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART,
- directive.keyword);
- }
- }
- directives.add(directive);
- } else if (_matches(TokenType.SEMICOLON)) {
- _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
- [_currentToken.lexeme]);
- _advance();
- } else {
- CompilationUnitMember member =
- _parseCompilationUnitMember(commentAndMetadata);
- if (member != null) {
- declarations.add(member);
- }
- }
- if (identical(_currentToken, memberStart)) {
- _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
- [_currentToken.lexeme]);
- _advance();
- while (!_matches(TokenType.EOF) &&
- !_couldBeStartOfCompilationUnitMember()) {
- _advance();
- }
- }
- memberStart = _currentToken;
- }
- return new CompilationUnit(
- firstToken, scriptTag, directives, declarations, _currentToken);
- }
-
- /**
- * Parse a conditional expression. Return the conditional expression that was
- * parsed.
- *
- * conditionalExpression ::=
- * ifNullExpression ('?' expressionWithoutCascade ':' expressionWithoutCascade)?
- */
- Expression parseConditionalExpression() {
- Expression condition = parseIfNullExpression();
- if (!_matches(TokenType.QUESTION)) {
- return condition;
- }
- Token question = getAndAdvance();
- Expression thenExpression = parseExpressionWithoutCascade();
- Token colon = _expect(TokenType.COLON);
- Expression elseExpression = parseExpressionWithoutCascade();
- return new ConditionalExpression(
- condition, question, thenExpression, colon, elseExpression);
- }
-
- /**
- * Parse the name of a constructor. Return the constructor name that was
- * parsed.
- *
- * constructorName:
- * type ('.' identifier)?
- */
- ConstructorName parseConstructorName() {
- TypeName type = parseTypeName();
- Token period = null;
- SimpleIdentifier name = null;
- if (_matches(TokenType.PERIOD)) {
- period = getAndAdvance();
- name = parseSimpleIdentifier();
- }
- return new ConstructorName(type, period, name);
- }
-
- /**
- * Parse the script tag and directives in a compilation unit, starting with
- * the given [token], until the first non-directive is encountered. The
- * remainder of the compilation unit will not be parsed. Specifically, if
- * there are directives later in the file, they will not be parsed. Return the
- * compilation unit that was parsed.
- */
- CompilationUnit parseDirectives(Token token) {
- _currentToken = token;
- return _parseDirectives();
- }
-
- /**
- * Parse an expression, starting with the given [token]. Return the expression
- * that was parsed, or `null` if the tokens do not represent a recognizable
- * expression.
- */
- Expression parseExpression(Token token) {
- _currentToken = token;
- return parseExpression2();
- }
-
- /**
- * Parse an expression that might contain a cascade. Return the expression
- * that was parsed.
- *
- * expression ::=
- * assignableExpression assignmentOperator expression
- * | conditionalExpression cascadeSection*
- * | throwExpression
- */
- Expression parseExpression2() {
- if (_matchesKeyword(Keyword.THROW)) {
- return _parseThrowExpression();
- } else if (_matchesKeyword(Keyword.RETHROW)) {
- // TODO(brianwilkerson) Rethrow is a statement again.
- return _parseRethrowExpression();
- }
- //
- // assignableExpression is a subset of conditionalExpression, so we can
- // parse a conditional expression and then determine whether it is followed
- // by an assignmentOperator, checking for conformance to the restricted
- // grammar after making that determination.
- //
- Expression expression = parseConditionalExpression();
- TokenType tokenType = _currentToken.type;
- if (tokenType == TokenType.PERIOD_PERIOD) {
- List<Expression> cascadeSections = new List<Expression>();
- while (tokenType == TokenType.PERIOD_PERIOD) {
- Expression section = _parseCascadeSection();
- if (section != null) {
- cascadeSections.add(section);
- }
- tokenType = _currentToken.type;
- }
- return new CascadeExpression(expression, cascadeSections);
- } else if (tokenType.isAssignmentOperator) {
- Token operator = getAndAdvance();
- _ensureAssignable(expression);
- return new AssignmentExpression(expression, operator, parseExpression2());
- }
- return expression;
- }
-
- /**
- * Parse an expression that does not contain any cascades. Return the
- * expression that was parsed.
- *
- * expressionWithoutCascade ::=
- * assignableExpression assignmentOperator expressionWithoutCascade
- * | conditionalExpression
- * | throwExpressionWithoutCascade
- */
- Expression parseExpressionWithoutCascade() {
- if (_matchesKeyword(Keyword.THROW)) {
- return _parseThrowExpressionWithoutCascade();
- } else if (_matchesKeyword(Keyword.RETHROW)) {
- return _parseRethrowExpression();
- }
- //
- // assignableExpression is a subset of conditionalExpression, so we can
- // parse a conditional expression and then determine whether it is followed
- // by an assignmentOperator, checking for conformance to the restricted
- // grammar after making that determination.
- //
- Expression expression = parseConditionalExpression();
- if (_currentToken.type.isAssignmentOperator) {
- Token operator = getAndAdvance();
- _ensureAssignable(expression);
- expression = new AssignmentExpression(
- expression, operator, parseExpressionWithoutCascade());
- }
- return expression;
- }
-
- /**
- * Parse a class extends clause. Return the class extends clause that was
- * parsed.
- *
- * classExtendsClause ::=
- * 'extends' type
- */
- ExtendsClause parseExtendsClause() {
- Token keyword = _expectKeyword(Keyword.EXTENDS);
- TypeName superclass = parseTypeName();
- return new ExtendsClause(keyword, superclass);
- }
-
- /**
- * Parse a list of formal parameters. Return the formal parameters that were
- * parsed.
- *
- * formalParameterList ::=
- * '(' ')'
- * | '(' normalFormalParameters (',' optionalFormalParameters)? ')'
- * | '(' optionalFormalParameters ')'
- *
- * normalFormalParameters ::=
- * normalFormalParameter (',' normalFormalParameter)*
- *
- * optionalFormalParameters ::=
- * optionalPositionalFormalParameters
- * | namedFormalParameters
- *
- * optionalPositionalFormalParameters ::=
- * '[' defaultFormalParameter (',' defaultFormalParameter)* ']'
- *
- * namedFormalParameters ::=
- * '{' defaultNamedParameter (',' defaultNamedParameter)* '}'
- */
- FormalParameterList parseFormalParameterList() {
- Token leftParenthesis = _expect(TokenType.OPEN_PAREN);
- if (_matches(TokenType.CLOSE_PAREN)) {
- return new FormalParameterList(
- leftParenthesis, null, null, null, getAndAdvance());
- }
- //
- // Even though it is invalid to have default parameters outside of brackets,
- // required parameters inside of brackets, or multiple groups of default and
- // named parameters, we allow all of these cases so that we can recover
- // better.
- //
- List<FormalParameter> parameters = new List<FormalParameter>();
- List<FormalParameter> normalParameters = new List<FormalParameter>();
- List<FormalParameter> positionalParameters = new List<FormalParameter>();
- List<FormalParameter> namedParameters = new List<FormalParameter>();
- List<FormalParameter> currentParameters = normalParameters;
- Token leftSquareBracket = null;
- Token rightSquareBracket = null;
- Token leftCurlyBracket = null;
- Token rightCurlyBracket = null;
- ParameterKind kind = ParameterKind.REQUIRED;
- bool firstParameter = true;
- bool reportedMuliplePositionalGroups = false;
- bool reportedMulipleNamedGroups = false;
- bool reportedMixedGroups = false;
- bool wasOptionalParameter = false;
- Token initialToken = null;
- do {
- if (firstParameter) {
- firstParameter = false;
- } else if (!_optional(TokenType.COMMA)) {
- // TODO(brianwilkerson) The token is wrong, we need to recover from this
- // case.
- if (_getEndToken(leftParenthesis) != null) {
- _reportErrorForCurrentToken(
- ParserErrorCode.EXPECTED_TOKEN, [TokenType.COMMA.lexeme]);
- } else {
- _reportErrorForToken(ParserErrorCode.MISSING_CLOSING_PARENTHESIS,
- _currentToken.previous);
- break;
- }
- }
- initialToken = _currentToken;
- //
- // Handle the beginning of parameter groups.
- //
- if (_matches(TokenType.OPEN_SQUARE_BRACKET)) {
- wasOptionalParameter = true;
- if (leftSquareBracket != null && !reportedMuliplePositionalGroups) {
- _reportErrorForCurrentToken(
- ParserErrorCode.MULTIPLE_POSITIONAL_PARAMETER_GROUPS);
- reportedMuliplePositionalGroups = true;
- }
- if (leftCurlyBracket != null && !reportedMixedGroups) {
- _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS);
- reportedMixedGroups = true;
- }
- leftSquareBracket = getAndAdvance();
- currentParameters = positionalParameters;
- kind = ParameterKind.POSITIONAL;
- } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
- wasOptionalParameter = true;
- if (leftCurlyBracket != null && !reportedMulipleNamedGroups) {
- _reportErrorForCurrentToken(
- ParserErrorCode.MULTIPLE_NAMED_PARAMETER_GROUPS);
- reportedMulipleNamedGroups = true;
- }
- if (leftSquareBracket != null && !reportedMixedGroups) {
- _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS);
- reportedMixedGroups = true;
- }
- leftCurlyBracket = getAndAdvance();
- currentParameters = namedParameters;
- kind = ParameterKind.NAMED;
- }
- //
- // Parse and record the parameter.
- //
- FormalParameter parameter = _parseFormalParameter(kind);
- parameters.add(parameter);
- currentParameters.add(parameter);
- if (kind == ParameterKind.REQUIRED && wasOptionalParameter) {
- _reportErrorForNode(
- ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS, parameter);
- }
- //
- // Handle the end of parameter groups.
- //
- // TODO(brianwilkerson) Improve the detection and reporting of missing and
- // mismatched delimiters.
- if (_matches(TokenType.CLOSE_SQUARE_BRACKET)) {
- rightSquareBracket = getAndAdvance();
- currentParameters = normalParameters;
- if (leftSquareBracket == null) {
- if (leftCurlyBracket != null) {
- _reportErrorForCurrentToken(
- ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]);
- rightCurlyBracket = rightSquareBracket;
- rightSquareBracket = null;
- } else {
- _reportErrorForCurrentToken(
- ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP,
- ["["]);
- }
- }
- kind = ParameterKind.REQUIRED;
- } else if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
- rightCurlyBracket = getAndAdvance();
- currentParameters = normalParameters;
- if (leftCurlyBracket == null) {
- if (leftSquareBracket != null) {
- _reportErrorForCurrentToken(
- ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]);
- rightSquareBracket = rightCurlyBracket;
- rightCurlyBracket = null;
- } else {
- _reportErrorForCurrentToken(
- ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP,
- ["{"]);
- }
- }
- kind = ParameterKind.REQUIRED;
- }
- } while (!_matches(TokenType.CLOSE_PAREN) &&
- !identical(initialToken, _currentToken));
- Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
- //
- // Check that the groups were closed correctly.
- //
- if (leftSquareBracket != null && rightSquareBracket == null) {
- _reportErrorForCurrentToken(
- ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]);
- }
- if (leftCurlyBracket != null && rightCurlyBracket == null) {
- _reportErrorForCurrentToken(
- ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]);
- }
- //
- // Build the parameter list.
- //
- if (leftSquareBracket == null) {
- leftSquareBracket = leftCurlyBracket;
- }
- if (rightSquareBracket == null) {
- rightSquareBracket = rightCurlyBracket;
- }
- return new FormalParameterList(leftParenthesis, parameters,
- leftSquareBracket, rightSquareBracket, rightParenthesis);
- }
-
- /**
- * Parse a function expression. Return the function expression that was
- * parsed.
- *
- * functionExpression ::=
- * typeParameters? formalParameterList functionExpressionBody
- */
- FunctionExpression parseFunctionExpression() {
- TypeParameterList typeParameters = null;
- if (parseGenericMethods && _matches(TokenType.LT)) {
- typeParameters = parseTypeParameterList();
- }
- FormalParameterList parameters = parseFormalParameterList();
- _validateFormalParameterList(parameters);
- FunctionBody body =
- _parseFunctionBody(false, ParserErrorCode.MISSING_FUNCTION_BODY, true);
- return new FunctionExpression(typeParameters, parameters, body);
- }
-
- /**
- * Parse an if-null expression. Return the if-null expression that was
- * parsed.
- *
- * ifNullExpression ::= logicalOrExpression ('??' logicalOrExpression)*
- */
- Expression parseIfNullExpression() {
- Expression expression = parseLogicalOrExpression();
- while (_matches(TokenType.QUESTION_QUESTION)) {
- Token operator = getAndAdvance();
- expression = new BinaryExpression(
- expression, operator, parseLogicalOrExpression());
- }
- return expression;
- }
-
- /**
- * Parse an implements clause. Return the implements clause that was parsed.
- *
- * implementsClause ::=
- * 'implements' type (',' type)*
- */
- ImplementsClause parseImplementsClause() {
- Token keyword = _expectKeyword(Keyword.IMPLEMENTS);
- List<TypeName> interfaces = new List<TypeName>();
- interfaces.add(parseTypeName());
- while (_optional(TokenType.COMMA)) {
- interfaces.add(parseTypeName());
- }
- return new ImplementsClause(keyword, interfaces);
- }
-
- /**
- * Parse a label. Return the label that was parsed.
- *
- * label ::=
- * identifier ':'
- */
- Label parseLabel() {
- SimpleIdentifier label = parseSimpleIdentifier();
- Token colon = _expect(TokenType.COLON);
- return new Label(label, colon);
- }
-
- /**
- * Parse a library identifier. Return the library identifier that was parsed.
- *
- * libraryIdentifier ::=
- * identifier ('.' identifier)*
- */
- LibraryIdentifier parseLibraryIdentifier() {
- List<SimpleIdentifier> components = new List<SimpleIdentifier>();
- components.add(parseSimpleIdentifier());
- while (_matches(TokenType.PERIOD)) {
- _advance();
- components.add(parseSimpleIdentifier());
- }
- return new LibraryIdentifier(components);
- }
-
- /**
- * Parse a logical or expression. Return the logical or expression that was
- * parsed.
- *
- * logicalOrExpression ::=
- * logicalAndExpression ('||' logicalAndExpression)*
- */
- Expression parseLogicalOrExpression() {
- Expression expression = _parseLogicalAndExpression();
- while (_matches(TokenType.BAR_BAR)) {
- Token operator = getAndAdvance();
- expression = new BinaryExpression(
- expression, operator, _parseLogicalAndExpression());
- }
- return expression;
- }
-
- /**
- * Parse a map literal entry. Return the map literal entry that was parsed.
- *
- * mapLiteralEntry ::=
- * expression ':' expression
- */
- MapLiteralEntry parseMapLiteralEntry() {
- Expression key = parseExpression2();
- Token separator = _expect(TokenType.COLON);
- Expression value = parseExpression2();
- return new MapLiteralEntry(key, separator, value);
- }
-
- /**
- * Parse a normal formal parameter. Return the normal formal parameter that
- * was parsed.
- *
- * normalFormalParameter ::=
- * functionSignature
- * | fieldFormalParameter
- * | simpleFormalParameter
- *
- * functionSignature:
- * metadata returnType? identifier typeParameters? formalParameterList
- *
- * fieldFormalParameter ::=
- * metadata finalConstVarOrType? 'this' '.' identifier
- *
- * simpleFormalParameter ::=
- * declaredIdentifier
- * | metadata identifier
- */
- NormalFormalParameter parseNormalFormalParameter() {
- CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata();
- FinalConstVarOrType holder = _parseFinalConstVarOrType(true);
- Token thisKeyword = null;
- Token period = null;
- if (_matchesKeyword(Keyword.THIS)) {
- thisKeyword = getAndAdvance();
- period = _expect(TokenType.PERIOD);
- }
- SimpleIdentifier identifier = parseSimpleIdentifier();
- TypeParameterList typeParameters = null;
- if (parseGenericMethods && _matches(TokenType.LT)) {
- typeParameters = parseTypeParameterList();
- }
- if (_matches(TokenType.OPEN_PAREN)) {
- FormalParameterList parameters = parseFormalParameterList();
- if (thisKeyword == null) {
- if (holder.keyword != null) {
- _reportErrorForToken(
- ParserErrorCode.FUNCTION_TYPED_PARAMETER_VAR, holder.keyword);
- }
- return new FunctionTypedFormalParameter(
- commentAndMetadata.comment,
- commentAndMetadata.metadata,
- holder.type,
- identifier,
- typeParameters,
- parameters);
- } else {
- return new FieldFormalParameter(
- commentAndMetadata.comment,
- commentAndMetadata.metadata,
- holder.keyword,
- holder.type,
- thisKeyword,
- period,
- identifier,
- typeParameters,
- parameters);
- }
- } else if (typeParameters != null) {
- // TODO(brianwilkerson) Report an error. It looks like a function-typed
- // parameter with no parameter list.
- //_reportErrorForToken(ParserErrorCode.MISSING_PARAMETERS, typeParameters.endToken);
- }
- TypeName type = holder.type;
- if (type != null) {
- if (_tokenMatchesKeyword(type.name.beginToken, Keyword.VOID)) {
- _reportErrorForToken(
- ParserErrorCode.VOID_PARAMETER, type.name.beginToken);
- } else if (holder.keyword != null &&
- _tokenMatchesKeyword(holder.keyword, Keyword.VAR)) {
- _reportErrorForToken(ParserErrorCode.VAR_AND_TYPE, holder.keyword);
- }
- }
- if (thisKeyword != null) {
- // TODO(brianwilkerson) If there are type parameters but no parameters,
- // should we create a synthetic empty parameter list here so we can
- // capture the type parameters?
- return new FieldFormalParameter(
- commentAndMetadata.comment,
- commentAndMetadata.metadata,
- holder.keyword,
- holder.type,
- thisKeyword,
- period,
- identifier,
- null,
- null);
- }
- return new SimpleFormalParameter(commentAndMetadata.comment,
- commentAndMetadata.metadata, holder.keyword, holder.type, identifier);
- }
-
- /**
- * Parse a prefixed identifier. Return the prefixed identifier that was
- * parsed.
- *
- * prefixedIdentifier ::=
- * identifier ('.' identifier)?
- */
- Identifier parsePrefixedIdentifier() {
- SimpleIdentifier qualifier = parseSimpleIdentifier();
- if (!_matches(TokenType.PERIOD)) {
- return qualifier;
- }
- Token period = getAndAdvance();
- SimpleIdentifier qualified = parseSimpleIdentifier();
- return new PrefixedIdentifier(qualifier, period, qualified);
- }
-
- /**
- * Parse a return type. Return the return type that was parsed.
- *
- * returnType ::=
- * 'void'
- * | type
- */
- TypeName parseReturnType() {
- if (_matchesKeyword(Keyword.VOID)) {
- return new TypeName(new SimpleIdentifier(getAndAdvance()), null);
- } else {
- return parseTypeName();
- }
- }
-
- /**
- * Parse a simple identifier. Return the simple identifier that was parsed.
- *
- * identifier ::=
- * IDENTIFIER
- */
- SimpleIdentifier parseSimpleIdentifier() {
- if (_matchesIdentifier()) {
- String lexeme = _currentToken.lexeme;
- if ((_inAsync || _inGenerator) &&
- (lexeme == 'async' || lexeme == 'await' || lexeme == 'yield')) {
- _reportErrorForCurrentToken(
- ParserErrorCode.ASYNC_KEYWORD_USED_AS_IDENTIFIER);
- }
- return new SimpleIdentifier(getAndAdvance());
- }
- _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
- return _createSyntheticIdentifier();
- }
-
- /**
- * Parse a statement, starting with the given [token]. Return the statement
- * that was parsed, or `null` if the tokens do not represent a recognizable
- * statement.
- */
- Statement parseStatement(Token token) {
- _currentToken = token;
- return parseStatement2();
- }
-
- /**
- * Parse a statement. Return the statement that was parsed.
- *
- * statement ::=
- * label* nonLabeledStatement
- */
- Statement parseStatement2() {
- List<Label> labels = new List<Label>();
- while (_matchesIdentifier() && _tokenMatches(_peek(), TokenType.COLON)) {
- labels.add(parseLabel());
- }
- Statement statement = _parseNonLabeledStatement();
- if (labels.isEmpty) {
- return statement;
- }
- return new LabeledStatement(labels, statement);
- }
-
- /**
- * Parse a sequence of statements, starting with the given [token]. Return the
- * statements that were parsed, or `null` if the tokens do not represent a
- * recognizable sequence of statements.
- */
- List<Statement> parseStatements(Token token) {
- _currentToken = token;
- return _parseStatementList();
- }
-
- /**
- * Parse a string literal. Return the string literal that was parsed.
- *
- * stringLiteral ::=
- * MULTI_LINE_STRING+
- * | SINGLE_LINE_STRING+
- */
- StringLiteral parseStringLiteral() {
- List<StringLiteral> strings = new List<StringLiteral>();
- while (_matches(TokenType.STRING)) {
- Token string = getAndAdvance();
- if (_matches(TokenType.STRING_INTERPOLATION_EXPRESSION) ||
- _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER)) {
- strings.add(_parseStringInterpolation(string));
- } else {
- strings.add(new SimpleStringLiteral(
- string, _computeStringValue(string.lexeme, true, true)));
- }
- }
- if (strings.length < 1) {
- _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_STRING_LITERAL);
- return _createSyntheticStringLiteral();
- } else if (strings.length == 1) {
- return strings[0];
- } else {
- return new AdjacentStrings(strings);
- }
- }
-
- /**
- * Parse a list of type arguments. Return the type argument list that was
- * parsed.
- *
- * typeArguments ::=
- * '<' typeList '>'
- *
- * typeList ::=
- * type (',' type)*
- */
- TypeArgumentList parseTypeArgumentList() {
- Token leftBracket = _expect(TokenType.LT);
- List<TypeName> arguments = new List<TypeName>();
- arguments.add(parseTypeName());
- while (_optional(TokenType.COMMA)) {
- arguments.add(parseTypeName());
- }
- Token rightBracket = _expectGt();
- return new TypeArgumentList(leftBracket, arguments, rightBracket);
- }
-
- /**
- * Parse a type name. Return the type name that was parsed.
- *
- * type ::=
- * qualified typeArguments?
- */
- TypeName parseTypeName() {
- Identifier typeName;
- if (_matchesKeyword(Keyword.VAR)) {
- _reportErrorForCurrentToken(ParserErrorCode.VAR_AS_TYPE_NAME);
- typeName = new SimpleIdentifier(getAndAdvance());
- } else if (_matchesIdentifier()) {
- typeName = parsePrefixedIdentifier();
- } else {
- typeName = _createSyntheticIdentifier();
- _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_TYPE_NAME);
- }
- TypeArgumentList typeArguments = null;
- if (_matches(TokenType.LT)) {
- typeArguments = parseTypeArgumentList();
- }
- return new TypeName(typeName, typeArguments);
- }
-
- /**
- * Parse a type parameter. Return the type parameter that was parsed.
- *
- * typeParameter ::=
- * metadata name ('extends' bound)?
- */
- TypeParameter parseTypeParameter() {
- CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata();
- SimpleIdentifier name = parseSimpleIdentifier();
- if (_matchesKeyword(Keyword.EXTENDS)) {
- Token keyword = getAndAdvance();
- TypeName bound = parseTypeName();
- return new TypeParameter(commentAndMetadata.comment,
- commentAndMetadata.metadata, name, keyword, bound);
- }
- return new TypeParameter(commentAndMetadata.comment,
- commentAndMetadata.metadata, name, null, null);
- }
-
- /**
- * Parse a list of type parameters. Return the list of type parameters that
- * were parsed.
- *
- * typeParameterList ::=
- * '<' typeParameter (',' typeParameter)* '>'
- */
- TypeParameterList parseTypeParameterList() {
- Token leftBracket = _expect(TokenType.LT);
- List<TypeParameter> typeParameters = new List<TypeParameter>();
- typeParameters.add(parseTypeParameter());
- while (_optional(TokenType.COMMA)) {
- typeParameters.add(parseTypeParameter());
- }
- Token rightBracket = _expectGt();
- return new TypeParameterList(leftBracket, typeParameters, rightBracket);
- }
-
- /**
- * Parse a with clause. Return the with clause that was parsed.
- *
- * withClause ::=
- * 'with' typeName (',' typeName)*
- */
- WithClause parseWithClause() {
- Token with2 = _expectKeyword(Keyword.WITH);
- List<TypeName> types = new List<TypeName>();
- types.add(parseTypeName());
- while (_optional(TokenType.COMMA)) {
- types.add(parseTypeName());
- }
- return new WithClause(with2, types);
- }
-
- /**
- * Advance to the next token in the token stream.
- */
- void _advance() {
- _currentToken = _currentToken.next;
- }
-
- /**
- * Append the character equivalent of the given [scalarValue] to the given
- * [builder]. Use the [startIndex] and [endIndex] to report an error, and
- * don't append anything to the builder, if the scalar value is invalid. The
- * [escapeSequence] is the escape sequence that was parsed to produce the
- * scalar value (used for error reporting).
- */
- void _appendScalarValue(StringBuffer buffer, String escapeSequence,
- int scalarValue, int startIndex, int endIndex) {
- if (scalarValue < 0 ||
- scalarValue > Character.MAX_CODE_POINT ||
- (scalarValue >= 0xD800 && scalarValue <= 0xDFFF)) {
- _reportErrorForCurrentToken(
- ParserErrorCode.INVALID_CODE_POINT, [escapeSequence]);
- return;
- }
- if (scalarValue < Character.MAX_VALUE) {
- buffer.writeCharCode(scalarValue);
- } else {
- buffer.write(Character.toChars(scalarValue));
- }
- }
-
- /**
- * Return the content of a string with the given literal representation. The
- * [lexeme] is the literal representation of the string. The flag [isFirst] is
- * `true` if this is the first token in a string literal. The flag [isLast] is
- * `true` if this is the last token in a string literal.
- */
- String _computeStringValue(String lexeme, bool isFirst, bool isLast) {
- StringLexemeHelper helper = new StringLexemeHelper(lexeme, isFirst, isLast);
- int start = helper.start;
- int end = helper.end;
- bool stringEndsAfterStart = end >= start;
- assert(stringEndsAfterStart);
- if (!stringEndsAfterStart) {
- AnalysisEngine.instance.logger.logError(
- "Internal error: computeStringValue($lexeme, $isFirst, $isLast)");
- return "";
- }
- if (helper.isRaw) {
- return lexeme.substring(start, end);
- }
- StringBuffer buffer = new StringBuffer();
- int index = start;
- while (index < end) {
- index = _translateCharacter(buffer, lexeme, index);
- }
- return buffer.toString();
- }
-
- /**
- * Convert the given [method] declaration into the nearest valid top-level
- * function declaration (that is, the function declaration that most closely
- * captures the components of the given method declaration).
- */
- FunctionDeclaration _convertToFunctionDeclaration(MethodDeclaration method) =>
- new FunctionDeclaration(
- method.documentationComment,
- method.metadata,
- method.externalKeyword,
- method.returnType,
- method.propertyKeyword,
- method.name,
- new FunctionExpression(
- method.typeParameters, method.parameters, method.body));
-
- /**
- * Return `true` if the current token could be the start of a compilation unit
- * member. This method is used for recovery purposes to decide when to stop
- * skipping tokens after finding an error while parsing a compilation unit
- * member.
- */
- bool _couldBeStartOfCompilationUnitMember() {
- if ((_matchesKeyword(Keyword.IMPORT) ||
- _matchesKeyword(Keyword.EXPORT) ||
- _matchesKeyword(Keyword.LIBRARY) ||
- _matchesKeyword(Keyword.PART)) &&
- !_tokenMatches(_peek(), TokenType.PERIOD) &&
- !_tokenMatches(_peek(), TokenType.LT)) {
- // This looks like the start of a directive
- return true;
- } else if (_matchesKeyword(Keyword.CLASS)) {
- // This looks like the start of a class definition
- return true;
- } else if (_matchesKeyword(Keyword.TYPEDEF) &&
- !_tokenMatches(_peek(), TokenType.PERIOD) &&
- !_tokenMatches(_peek(), TokenType.LT)) {
- // This looks like the start of a typedef
- return true;
- } else if (_matchesKeyword(Keyword.VOID) ||
- ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) &&
- _tokenMatchesIdentifier(_peek())) ||
- (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek()))) {
- // This looks like the start of a function
- return true;
- } else if (_matchesIdentifier()) {
- if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
- // This looks like the start of a function
- return true;
- }
- Token token = _skipReturnType(_currentToken);
- if (token == null) {
- return false;
- }
- if (_matchesKeyword(Keyword.GET) ||
- _matchesKeyword(Keyword.SET) ||
- (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) ||
- _matchesIdentifier()) {
- return true;
- }
- }
- return false;
- }
-
- /**
- * Return a synthetic identifier.
- */
- SimpleIdentifier _createSyntheticIdentifier() {
- Token syntheticToken;
- if (_currentToken.type == TokenType.KEYWORD) {
- // Consider current keyword token as an identifier.
- // It is not always true, e.g. "^is T" where "^" is place the place for
- // synthetic identifier. By creating SyntheticStringToken we can
- // distinguish a real identifier from synthetic. In the code completion
- // behavior will depend on a cursor position - before or on "is".
- syntheticToken = _injectToken(new SyntheticStringToken(
- TokenType.IDENTIFIER, _currentToken.lexeme, _currentToken.offset));
- } else {
- syntheticToken = _createSyntheticToken(TokenType.IDENTIFIER);
- }
- return new SimpleIdentifier(syntheticToken);
- }
-
- /**
- * Return a synthetic token representing the given [keyword].
- */
- Token _createSyntheticKeyword(Keyword keyword) => _injectToken(
- new Parser_SyntheticKeywordToken(keyword, _currentToken.offset));
-
- /**
- * Return a synthetic string literal.
- */
- SimpleStringLiteral _createSyntheticStringLiteral() =>
- new SimpleStringLiteral(_createSyntheticToken(TokenType.STRING), "");
-
- /**
- * Return a synthetic token with the given [type].
- */
- Token _createSyntheticToken(TokenType type) =>
- _injectToken(new StringToken(type, "", _currentToken.offset));
-
- /**
- * Create and return a new token with the given [type]. The token will replace
- * the first portion of the given [token], so it will have the same offset and
- * will have any comments that might have preceeded the token.
- */
- Token _createToken(Token token, TokenType type, {bool isBegin: false}) {
- CommentToken comments = token.precedingComments;
- if (comments == null) {
- if (isBegin) {
- return new BeginToken(type, token.offset);
- }
- return new Token(type, token.offset);
- } else if (isBegin) {
- return new BeginTokenWithComment(type, token.offset, comments);
- }
- return new TokenWithComment(type, token.offset, comments);
- }
-
- /**
- * Check that the given [expression] is assignable and report an error if it
- * isn't.
- *
- * assignableExpression ::=
- * primary (arguments* assignableSelector)+
- * | 'super' unconditionalAssignableSelector
- * | identifier
- *
- * unconditionalAssignableSelector ::=
- * '[' expression ']'
- * | '.' identifier
- *
- * assignableSelector ::=
- * unconditionalAssignableSelector
- * | '?.' identifier
- */
- void _ensureAssignable(Expression expression) {
- if (expression != null && !expression.isAssignable) {
- _reportErrorForCurrentToken(
- ParserErrorCode.ILLEGAL_ASSIGNMENT_TO_NON_ASSIGNABLE);
- }
- }
-
- /**
- * If the current token has the expected type, return it after advancing to
- * the next token. Otherwise report an error and return the current token
- * without advancing.
- *
- * Note that the method [_expectGt] should be used if the argument to this
- * method would be [TokenType.GT].
- *
- * The [type] is the type of token that is expected.
- */
- Token _expect(TokenType type) {
- if (_matches(type)) {
- return getAndAdvance();
- }
- // Remove uses of this method in favor of matches?
- // Pass in the error code to use to report the error?
- if (type == TokenType.SEMICOLON) {
- if (_tokenMatches(_currentToken.next, TokenType.SEMICOLON)) {
- _reportErrorForCurrentToken(
- ParserErrorCode.UNEXPECTED_TOKEN, [_currentToken.lexeme]);
- _advance();
- return getAndAdvance();
- }
- _reportErrorForToken(ParserErrorCode.EXPECTED_TOKEN,
- _currentToken.previous, [type.lexeme]);
- } else {
- _reportErrorForCurrentToken(
- ParserErrorCode.EXPECTED_TOKEN, [type.lexeme]);
- }
- return _currentToken;
- }
-
- /**
- * If the current token has the type [TokenType.GT], return it after advancing
- * to the next token. Otherwise report an error and return the current token
- * without advancing.
- */
- Token _expectGt() {
- if (_matchesGt()) {
- return getAndAdvance();
- }
- _reportErrorForCurrentToken(
- ParserErrorCode.EXPECTED_TOKEN, [TokenType.GT.lexeme]);
- return _currentToken;
- }
-
- /**
- * If the current token is a keyword matching the given [keyword], return it
- * after advancing to the next token. Otherwise report an error and return the
- * current token without advancing.
- */
- Token _expectKeyword(Keyword keyword) {
- if (_matchesKeyword(keyword)) {
- return getAndAdvance();
- }
- // Remove uses of this method in favor of matches?
- // Pass in the error code to use to report the error?
- _reportErrorForCurrentToken(
- ParserErrorCode.EXPECTED_TOKEN, [keyword.syntax]);
- return _currentToken;
- }
-
- /**
- * If the current token is a semicolon, return it after advancing to the next
- * token. Otherwise report an error and create a synthetic semicolon.
- */
- Token _expectSemicolon() {
- // TODO(scheglov) consider pushing this behavior into [_expect]
- if (_matches(TokenType.SEMICOLON)) {
- return getAndAdvance();
- } else {
- _reportErrorForToken(
- ParserErrorCode.EXPECTED_TOKEN, _currentToken.previous, [";"]);
- return _createSyntheticToken(TokenType.SEMICOLON);
- }
- }
-
- /**
- * Search the given list of [ranges] for a range that contains the given
- * [index]. Return the range that was found, or `null` if none of the ranges
- * contain the index.
- */
- List<int> _findRange(List<List<int>> ranges, int index) {
- int rangeCount = ranges.length;
- for (int i = 0; i < rangeCount; i++) {
- List<int> range = ranges[i];
- if (range[0] <= index && index <= range[1]) {
- return range;
- } else if (index < range[0]) {
- return null;
- }
- }
- return null;
- }
-
- /**
- * Return a list of the ranges of characters in the given [comment] that
- * should be treated as code blocks.
- */
- List<List<int>> _getCodeBlockRanges(String comment) {
- List<List<int>> ranges = new List<List<int>>();
- int length = comment.length;
- if (length < 3) {
- return ranges;
- }
- int index = 0;
- int firstChar = comment.codeUnitAt(0);
- if (firstChar == 0x2F) {
- int secondChar = comment.codeUnitAt(1);
- int thirdChar = comment.codeUnitAt(2);
- if ((secondChar == 0x2A && thirdChar == 0x2A) ||
- (secondChar == 0x2F && thirdChar == 0x2F)) {
- index = 3;
- }
- }
- while (index < length) {
- int currentChar = comment.codeUnitAt(index);
- if (currentChar == 0xD || currentChar == 0xA) {
- index = index + 1;
- while (index < length &&
- Character.isWhitespace(comment.codeUnitAt(index))) {
- index = index + 1;
- }
- if (StringUtilities.startsWith6(
- comment, index, 0x2A, 0x20, 0x20, 0x20, 0x20, 0x20)) {
- int end = index + 6;
- while (end < length &&
- comment.codeUnitAt(end) != 0xD &&
- comment.codeUnitAt(end) != 0xA) {
- end = end + 1;
- }
- ranges.add(<int>[index, end]);
- index = end;
- }
- } else if (index + 1 < length &&
- currentChar == 0x5B &&
- comment.codeUnitAt(index + 1) == 0x3A) {
- int end = StringUtilities.indexOf2(comment, index + 2, 0x3A, 0x5D);
- if (end < 0) {
- end = length;
- }
- ranges.add(<int>[index, end]);
- index = end + 1;
- } else {
- index = index + 1;
- }
- }
- return ranges;
- }
-
- /**
- * Return the end token associated with the given [beginToken], or `null` if
- * either the given token is not a begin token or it does not have an end
- * token associated with it.
- */
- Token _getEndToken(Token beginToken) {
- if (beginToken is BeginToken) {
- return beginToken.endToken;
- }
- return null;
- }
-
- /**
- * Inject the given [token] into the token stream immediately before the
- * current token.
- */
- Token _injectToken(Token token) {
- Token previous = _currentToken.previous;
- token.setNext(_currentToken);
- previous.setNext(token);
- return token;
- }
-
- /**
- * Return `true` if the current token appears to be the beginning of a
- * function declaration.
- */
- bool _isFunctionDeclaration() {
- if (_matchesKeyword(Keyword.VOID)) {
- return true;
- }
- Token afterReturnType = _skipTypeName(_currentToken);
- if (afterReturnType == null) {
- // There was no return type, but it is optional, so go back to where we
- // started.
- afterReturnType = _currentToken;
- }
- Token afterIdentifier = _skipSimpleIdentifier(afterReturnType);
- if (afterIdentifier == null) {
- // It's possible that we parsed the function name as if it were a type
- // name, so see whether it makes sense if we assume that there is no type.
- afterIdentifier = _skipSimpleIdentifier(_currentToken);
- }
- if (afterIdentifier == null) {
- return false;
- }
- if (_isFunctionExpression(afterIdentifier)) {
- return true;
- }
- // It's possible that we have found a getter. While this isn't valid at this
- // point we test for it in order to recover better.
- if (_matchesKeyword(Keyword.GET)) {
- Token afterName = _skipSimpleIdentifier(_currentToken.next);
- if (afterName == null) {
- return false;
- }
- return _tokenMatches(afterName, TokenType.FUNCTION) ||
- _tokenMatches(afterName, TokenType.OPEN_CURLY_BRACKET);
- } else if (_tokenMatchesKeyword(afterReturnType, Keyword.GET)) {
- Token afterName = _skipSimpleIdentifier(afterReturnType.next);
- if (afterName == null) {
- return false;
- }
- return _tokenMatches(afterName, TokenType.FUNCTION) ||
- _tokenMatches(afterName, TokenType.OPEN_CURLY_BRACKET);
- }
- return false;
- }
-
- /**
- * Return `true` if the given [token] appears to be the beginning of a
- * function expression.
- */
- bool _isFunctionExpression(Token token) {
- // Function expressions aren't allowed in initializer lists.
- if (_inInitializer) {
- return false;
- }
- Token afterTypeParameters = _skipTypeParameterList(token);
- if (afterTypeParameters == null) {
- afterTypeParameters = token;
- }
- Token afterParameters = _skipFormalParameterList(afterTypeParameters);
- if (afterParameters == null) {
- return false;
- }
- if (afterParameters
- .matchesAny([TokenType.OPEN_CURLY_BRACKET, TokenType.FUNCTION])) {
- return true;
- }
- String lexeme = afterParameters.lexeme;
- return lexeme == ASYNC || lexeme == SYNC;
- }
-
- /**
- * Return `true` if the given [character] is a valid hexadecimal digit.
- */
- bool _isHexDigit(int character) => (0x30 <= character && character <= 0x39) ||
- (0x41 <= character && character <= 0x46) ||
- (0x61 <= character && character <= 0x66);
-
- /**
- * Return `true` if the current token is the first token in an initialized
- * variable declaration rather than an expression. This method assumes that we
- * have already skipped past any metadata that might be associated with the
- * declaration.
- *
- * initializedVariableDeclaration ::=
- * declaredIdentifier ('=' expression)? (',' initializedIdentifier)*
- *
- * declaredIdentifier ::=
- * metadata finalConstVarOrType identifier
- *
- * finalConstVarOrType ::=
- * 'final' type?
- * | 'const' type?
- * | 'var'
- * | type
- *
- * type ::=
- * qualified typeArguments?
- *
- * initializedIdentifier ::=
- * identifier ('=' expression)?
- */
- bool _isInitializedVariableDeclaration() {
- if (_matchesKeyword(Keyword.FINAL) || _matchesKeyword(Keyword.VAR)) {
- // An expression cannot start with a keyword other than 'const',
- // 'rethrow', or 'throw'.
- return true;
- }
- if (_matchesKeyword(Keyword.CONST)) {
- // Look to see whether we might be at the start of a list or map literal,
- // otherwise this should be the start of a variable declaration.
- return !_peek().matchesAny([
- TokenType.LT,
- TokenType.OPEN_CURLY_BRACKET,
- TokenType.OPEN_SQUARE_BRACKET,
- TokenType.INDEX
- ]);
- }
- bool allowAdditionalTokens = true;
- // We know that we have an identifier, and need to see whether it might be
- // a type name.
- if (_currentToken.type != TokenType.IDENTIFIER) {
- allowAdditionalTokens = false;
- }
- Token token = _skipTypeName(_currentToken);
- if (token == null) {
- // There was no type name, so this can't be a declaration.
- return false;
- }
- if (token.type != TokenType.IDENTIFIER) {
- allowAdditionalTokens = false;
- }
- token = _skipSimpleIdentifier(token);
- if (token == null) {
- return false;
- }
- TokenType type = token.type;
- // Usual cases in valid code:
- // String v = '';
- // String v, v2;
- // String v;
- // for (String item in items) {}
- if (type == TokenType.EQ ||
- type == TokenType.COMMA ||
- type == TokenType.SEMICOLON ||
- _tokenMatchesKeyword(token, Keyword.IN)) {
- return true;
- }
- // It is OK to parse as a variable declaration in these cases:
- // String v }
- // String v if (true) print('OK');
- // String v { print(42); }
- // ...but not in these cases:
- // get getterName {
- // String get getterName
- if (allowAdditionalTokens) {
- if (type == TokenType.CLOSE_CURLY_BRACKET ||
- type == TokenType.KEYWORD ||
- type == TokenType.IDENTIFIER ||
- type == TokenType.OPEN_CURLY_BRACKET) {
- return true;
- }
- }
- return false;
- }
-
- bool _isLikelyParameterList() {
- if (_matches(TokenType.OPEN_PAREN)) {
- return true;
- }
- if (!parseGenericMethods) {
- return false;
- }
- Token token = _skipTypeArgumentList(_currentToken);
- return token != null && _tokenMatches(token, TokenType.OPEN_PAREN);
- }
-
- /**
- * Given that we have just found bracketed text within the given [comment],
- * look to see whether that text is (a) followed by a parenthesized link
- * address, (b) followed by a colon, or (c) followed by optional whitespace
- * and another square bracket. The [rightIndex] is the index of the right
- * bracket. Return `true` if the bracketed text is followed by a link address.
- *
- * This method uses the syntax described by the
- * <a href="http://daringfireball.net/projects/markdown/syntax">markdown</a>
- * project.
- */
- bool _isLinkText(String comment, int rightIndex) {
- int length = comment.length;
- int index = rightIndex + 1;
- if (index >= length) {
- return false;
- }
- int nextChar = comment.codeUnitAt(index);
- if (nextChar == 0x28 || nextChar == 0x3A) {
- return true;
- }
- while (Character.isWhitespace(nextChar)) {
- index = index + 1;
- if (index >= length) {
- return false;
- }
- nextChar = comment.codeUnitAt(index);
- }
- return nextChar == 0x5B;
- }
-
- /**
- * Return `true` if the given [startToken] appears to be the beginning of an
- * operator declaration.
- */
- bool _isOperator(Token startToken) {
- // Accept any operator here, even if it is not user definable.
- if (!startToken.isOperator) {
- return false;
- }
- // Token "=" means that it is actually field initializer.
- if (startToken.type == TokenType.EQ) {
- return false;
- }
- // Consume all operator tokens.
- Token token = startToken.next;
- while (token.isOperator) {
- token = token.next;
- }
- // Formal parameter list is expect now.
- return _tokenMatches(token, TokenType.OPEN_PAREN);
- }
-
- /**
- * Return `true` if the current token appears to be the beginning of a switch
- * member.
- */
- bool _isSwitchMember() {
- Token token = _currentToken;
- while (_tokenMatches(token, TokenType.IDENTIFIER) &&
- _tokenMatches(token.next, TokenType.COLON)) {
- token = token.next.next;
- }
- if (token.type == TokenType.KEYWORD) {
- Keyword keyword = (token as KeywordToken).keyword;
- return keyword == Keyword.CASE || keyword == Keyword.DEFAULT;
- }
- return false;
- }
-
- /**
- * Return `true` if the [startToken] appears to be the first token of a type
- * name that is followed by a variable or field formal parameter.
- */
- bool _isTypedIdentifier(Token startToken) {
- Token token = _skipReturnType(startToken);
- if (token == null) {
- return false;
- } else if (_tokenMatchesIdentifier(token)) {
- return true;
- } else if (_tokenMatchesKeyword(token, Keyword.THIS) &&
- _tokenMatches(token.next, TokenType.PERIOD) &&
- _tokenMatchesIdentifier(token.next.next)) {
- return true;
- } else if (_tokenMatchesKeyword(startToken, Keyword.VOID)) {
- // The keyword 'void' isn't a valid identifier, so it should be assumed to
- // be a type name.
- return true;
- } else if (startToken.next != token &&
- !_tokenMatches(token, TokenType.OPEN_PAREN)) {
- // The type is more than a simple identifier, so it should be assumed to
- // be a type name.
- return true;
- }
- return false;
- }
-
- /**
- * Increments the error reporting lock level. If level is more than `0`, then
- * [reportError] wont report any error.
- */
- void _lockErrorListener() {
- _errorListenerLock++;
- }
-
- /**
- * Return `true` if the current token has the given [type]. Note that the
- * method [_matchesGt] should be used if the argument to this method would be
- * [TokenType.GT].
- */
- bool _matches(TokenType type) => _currentToken.type == type;
-
- /**
- * Return `true` if the current token has a type of [TokenType.GT]. Note that
- * this method, unlike other variants, will modify the token stream if
- * possible to match desired type. In particular, if the next token is either
- * a '>>' or '>>>', the token stream will be re-written and `true` will be
- * returned.
- */
- bool _matchesGt() {
- TokenType currentType = _currentToken.type;
- if (currentType == TokenType.GT) {
- return true;
- } else if (currentType == TokenType.GT_GT) {
- Token first = _createToken(_currentToken, TokenType.GT);
- Token second = new Token(TokenType.GT, _currentToken.offset + 1);
- second.setNext(_currentToken.next);
- first.setNext(second);
- _currentToken.previous.setNext(first);
- _currentToken = first;
- return true;
- } else if (currentType == TokenType.GT_EQ) {
- Token first = _createToken(_currentToken, TokenType.GT);
- Token second = new Token(TokenType.EQ, _currentToken.offset + 1);
- second.setNext(_currentToken.next);
- first.setNext(second);
- _currentToken.previous.setNext(first);
- _currentToken = first;
- return true;
- } else if (currentType == TokenType.GT_GT_EQ) {
- int offset = _currentToken.offset;
- Token first = _createToken(_currentToken, TokenType.GT);
- Token second = new Token(TokenType.GT, offset + 1);
- Token third = new Token(TokenType.EQ, offset + 2);
- third.setNext(_currentToken.next);
- second.setNext(third);
- first.setNext(second);
- _currentToken.previous.setNext(first);
- _currentToken = first;
- return true;
- }
- return false;
- }
-
- /**
- * Return `true` if the current token is a valid identifier. Valid identifiers
- * include built-in identifiers (pseudo-keywords).
- */
- bool _matchesIdentifier() => _tokenMatchesIdentifier(_currentToken);
-
- /**
- * Return `true` if the current token matches the given [keyword].
- */
- bool _matchesKeyword(Keyword keyword) =>
- _tokenMatchesKeyword(_currentToken, keyword);
-
- /**
- * Return `true` if the current token matches the given [identifier].
- */
- bool _matchesString(String identifier) =>
- _currentToken.type == TokenType.IDENTIFIER &&
- _currentToken.lexeme == identifier;
-
- /**
- * If the current token has the given [type], then advance to the next token
- * and return `true`. Otherwise, return `false` without advancing. This method
- * should not be invoked with an argument value of [TokenType.GT].
- */
- bool _optional(TokenType type) {
- if (_matches(type)) {
- _advance();
- return true;
- }
- return false;
- }
-
- /**
- * Parse an additive expression. Return the additive expression that was
- * parsed.
- *
- * additiveExpression ::=
- * multiplicativeExpression (additiveOperator multiplicativeExpression)*
- * | 'super' (additiveOperator multiplicativeExpression)+
- */
- Expression _parseAdditiveExpression() {
- Expression expression;
- if (_matchesKeyword(Keyword.SUPER) &&
- _currentToken.next.type.isAdditiveOperator) {
- expression = new SuperExpression(getAndAdvance());
- } else {
- expression = _parseMultiplicativeExpression();
- }
- while (_currentToken.type.isAdditiveOperator) {
- Token operator = getAndAdvance();
- expression = new BinaryExpression(
- expression, operator, _parseMultiplicativeExpression());
- }
- return expression;
- }
-
- /**
- * Parse an assert statement. Return the assert statement.
- *
- * assertStatement ::=
- * 'assert' '(' conditionalExpression ')' ';'
- */
- AssertStatement _parseAssertStatement() {
- Token keyword = _expectKeyword(Keyword.ASSERT);
- Token leftParen = _expect(TokenType.OPEN_PAREN);
- Expression expression = parseExpression2();
- if (expression is AssignmentExpression) {
- _reportErrorForNode(
- ParserErrorCode.ASSERT_DOES_NOT_TAKE_ASSIGNMENT, expression);
- } else if (expression is CascadeExpression) {
- _reportErrorForNode(
- ParserErrorCode.ASSERT_DOES_NOT_TAKE_CASCADE, expression);
- } else if (expression is ThrowExpression) {
- _reportErrorForNode(
- ParserErrorCode.ASSERT_DOES_NOT_TAKE_THROW, expression);
- } else if (expression is RethrowExpression) {
- _reportErrorForNode(
- ParserErrorCode.ASSERT_DOES_NOT_TAKE_RETHROW, expression);
- }
- Token rightParen = _expect(TokenType.CLOSE_PAREN);
- Token semicolon = _expect(TokenType.SEMICOLON);
- return new AssertStatement(
- keyword, leftParen, expression, rightParen, semicolon);
- }
-
- /**
- * Parse an assignable expression. The [primaryAllowed] is `true` if the
- * expression is allowed to be a primary without any assignable selector.
- * Return the assignable expression that was parsed.
- *
- * assignableExpression ::=
- * primary (arguments* assignableSelector)+
- * | 'super' unconditionalAssignableSelector
- * | identifier
- */
- Expression _parseAssignableExpression(bool primaryAllowed) {
- if (_matchesKeyword(Keyword.SUPER)) {
- return _parseAssignableSelector(
- new SuperExpression(getAndAdvance()), false,
- allowConditional: false);
- }
- //
- // A primary expression can start with an identifier. We resolve the
- // ambiguity by determining whether the primary consists of anything other
- // than an identifier and/or is followed by an assignableSelector.
- //
- Expression expression = _parsePrimaryExpression();
- bool isOptional = primaryAllowed || expression is SimpleIdentifier;
- while (true) {
- while (_isLikelyParameterList()) {
- TypeArgumentList typeArguments = null;
- if (_matches(TokenType.LT)) {
- typeArguments = parseTypeArgumentList();
- }
- ArgumentList argumentList = parseArgumentList();
- if (expression is SimpleIdentifier) {
- expression = new MethodInvocation(null, null,
- expression as SimpleIdentifier, typeArguments, argumentList);
- } else if (expression is PrefixedIdentifier) {
- PrefixedIdentifier identifier = expression as PrefixedIdentifier;
- expression = new MethodInvocation(
- identifier.prefix,
- identifier.period,
- identifier.identifier,
- typeArguments,
- argumentList);
- } else if (expression is PropertyAccess) {
- PropertyAccess access = expression as PropertyAccess;
- expression = new MethodInvocation(access.target, access.operator,
- access.propertyName, typeArguments, argumentList);
- } else {
- expression = new FunctionExpressionInvocation(
- expression, typeArguments, argumentList);
- }
- if (!primaryAllowed) {
- isOptional = false;
- }
- }
- Expression selectorExpression = _parseAssignableSelector(
- expression, isOptional || (expression is PrefixedIdentifier));
- if (identical(selectorExpression, expression)) {
- if (!isOptional && (expression is PrefixedIdentifier)) {
- PrefixedIdentifier identifier = expression as PrefixedIdentifier;
- expression = new PropertyAccess(
- identifier.prefix, identifier.period, identifier.identifier);
- }
- return expression;
- }
- expression = selectorExpression;
- isOptional = true;
- }
- }
-
- /**
- * Parse an assignable selector. The [prefix] is the expression preceding the
- * selector. The [optional] is `true` if the selector is optional. Return the
- * assignable selector that was parsed, or the original prefix if there was no
- * assignable selector. If [allowConditional] is false, then the '?.'
- * operator will still be parsed, but a parse error will be generated.
- *
- * unconditionalAssignableSelector ::=
- * '[' expression ']'
- * | '.' identifier
- *
- * assignableSelector ::=
- * unconditionalAssignableSelector
- * | '?.' identifier
- */
- Expression _parseAssignableSelector(Expression prefix, bool optional,
- {bool allowConditional: true}) {
- if (_matches(TokenType.OPEN_SQUARE_BRACKET)) {
- Token leftBracket = getAndAdvance();
- bool wasInInitializer = _inInitializer;
- _inInitializer = false;
- try {
- Expression index = parseExpression2();
- Token rightBracket = _expect(TokenType.CLOSE_SQUARE_BRACKET);
- return new IndexExpression.forTarget(
- prefix, leftBracket, index, rightBracket);
- } finally {
- _inInitializer = wasInInitializer;
- }
- } else if (_matches(TokenType.PERIOD) ||
- _matches(TokenType.QUESTION_PERIOD)) {
- if (_matches(TokenType.QUESTION_PERIOD) && !allowConditional) {
- _reportErrorForCurrentToken(
- ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, [_currentToken.lexeme]);
- }
- Token operator = getAndAdvance();
- return new PropertyAccess(prefix, operator, parseSimpleIdentifier());
- } else {
- if (!optional) {
- // Report the missing selector.
- _reportErrorForCurrentToken(
- ParserErrorCode.MISSING_ASSIGNABLE_SELECTOR);
- }
- return prefix;
- }
- }
-
- /**
- * Parse a await expression. Return the await expression that was parsed.
- *
- * awaitExpression ::=
- * 'await' unaryExpression
- */
- AwaitExpression _parseAwaitExpression() {
- Token awaitToken = getAndAdvance();
- Expression expression = _parseUnaryExpression();
- return new AwaitExpression(awaitToken, expression);
- }
-
- /**
- * Parse a bitwise and expression. Return the bitwise and expression that was
- * parsed.
- *
- * bitwiseAndExpression ::=
- * shiftExpression ('&' shiftExpression)*
- * | 'super' ('&' shiftExpression)+
- */
- Expression _parseBitwiseAndExpression() {
- Expression expression;
- if (_matchesKeyword(Keyword.SUPER) &&
- _tokenMatches(_peek(), TokenType.AMPERSAND)) {
- expression = new SuperExpression(getAndAdvance());
- } else {
- expression = _parseShiftExpression();
- }
- while (_matches(TokenType.AMPERSAND)) {
- Token operator = getAndAdvance();
- expression =
- new BinaryExpression(expression, operator, _parseShiftExpression());
- }
- return expression;
- }
-
- /**
- * Parse a bitwise exclusive-or expression. Return the bitwise exclusive-or
- * expression that was parsed.
- *
- * bitwiseXorExpression ::=
- * bitwiseAndExpression ('^' bitwiseAndExpression)*
- * | 'super' ('^' bitwiseAndExpression)+
- */
- Expression _parseBitwiseXorExpression() {
- Expression expression;
- if (_matchesKeyword(Keyword.SUPER) &&
- _tokenMatches(_peek(), TokenType.CARET)) {
- expression = new SuperExpression(getAndAdvance());
- } else {
- expression = _parseBitwiseAndExpression();
- }
- while (_matches(TokenType.CARET)) {
- Token operator = getAndAdvance();
- expression = new BinaryExpression(
- expression, operator, _parseBitwiseAndExpression());
- }
- return expression;
- }
-
- /**
- * Parse a break statement. Return the break statement that was parsed.
- *
- * breakStatement ::=
- * 'break' identifier? ';'
- */
- Statement _parseBreakStatement() {
- Token breakKeyword = _expectKeyword(Keyword.BREAK);
- SimpleIdentifier label = null;
- if (_matchesIdentifier()) {
- label = parseSimpleIdentifier();
- }
- if (!_inLoop && !_inSwitch && label == null) {
- _reportErrorForToken(ParserErrorCode.BREAK_OUTSIDE_OF_LOOP, breakKeyword);
- }
- Token semicolon = _expect(TokenType.SEMICOLON);
- return new BreakStatement(breakKeyword, label, semicolon);
- }
-
- /**
- * Parse a cascade section. Return the expression representing the cascaded
- * method invocation.
- *
- * cascadeSection ::=
- * '..' (cascadeSelector typeArguments? arguments*)
- * (assignableSelector typeArguments? arguments*)* cascadeAssignment?
- *
- * cascadeSelector ::=
- * '[' expression ']'
- * | identifier
- *
- * cascadeAssignment ::=
- * assignmentOperator expressionWithoutCascade
- */
- Expression _parseCascadeSection() {
- Token period = _expect(TokenType.PERIOD_PERIOD);
- Expression expression = null;
- SimpleIdentifier functionName = null;
- if (_matchesIdentifier()) {
- functionName = parseSimpleIdentifier();
- } else if (_currentToken.type == TokenType.OPEN_SQUARE_BRACKET) {
- Token leftBracket = getAndAdvance();
- bool wasInInitializer = _inInitializer;
- _inInitializer = false;
- try {
- Expression index = parseExpression2();
- Token rightBracket = _expect(TokenType.CLOSE_SQUARE_BRACKET);
- expression = new IndexExpression.forCascade(
- period, leftBracket, index, rightBracket);
- period = null;
- } finally {
- _inInitializer = wasInInitializer;
- }
- } else {
- _reportErrorForToken(ParserErrorCode.MISSING_IDENTIFIER, _currentToken,
- [_currentToken.lexeme]);
- functionName = _createSyntheticIdentifier();
- }
- assert((expression == null && functionName != null) ||
- (expression != null && functionName == null));
- if (_isLikelyParameterList()) {
- while (_isLikelyParameterList()) {
- TypeArgumentList typeArguments = null;
- if (_matches(TokenType.LT)) {
- typeArguments = parseTypeArgumentList();
- }
- if (functionName != null) {
- expression = new MethodInvocation(expression, period, functionName,
- typeArguments, parseArgumentList());
- period = null;
- functionName = null;
- } else if (expression == null) {
- // It should not be possible to get here.
- expression = new MethodInvocation(expression, period,
- _createSyntheticIdentifier(), typeArguments, parseArgumentList());
- } else {
- expression = new FunctionExpressionInvocation(
- expression, typeArguments, parseArgumentList());
- }
- }
- } else if (functionName != null) {
- expression = new PropertyAccess(expression, period, functionName);
- period = null;
- }
- assert(expression != null);
- bool progress = true;
- while (progress) {
- progress = false;
- Expression selector = _parseAssignableSelector(expression, true);
- if (!identical(selector, expression)) {
- expression = selector;
- progress = true;
- while (_isLikelyParameterList()) {
- TypeArgumentList typeArguments = null;
- if (_matches(TokenType.LT)) {
- typeArguments = parseTypeArgumentList();
- }
- if (expression is PropertyAccess) {
- PropertyAccess propertyAccess = expression as PropertyAccess;
- expression = new MethodInvocation(
- propertyAccess.target,
- propertyAccess.operator,
- propertyAccess.propertyName,
- typeArguments,
- parseArgumentList());
- } else {
- expression = new FunctionExpressionInvocation(
- expression, typeArguments, parseArgumentList());
- }
- }
- }
- }
- if (_currentToken.type.isAssignmentOperator) {
- Token operator = getAndAdvance();
- _ensureAssignable(expression);
- expression = new AssignmentExpression(
- expression, operator, parseExpressionWithoutCascade());
- }
- return expression;
+ ClassDeclaration classDeclaration = new ClassDeclaration(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ abstractKeyword,
+ keyword,
+ name,
+ typeParameters,
+ extendsClause,
+ withClause,
+ implementsClause,
+ leftBracket,
+ members,
+ rightBracket);
+ classDeclaration.nativeClause = nativeClause;
+ return classDeclaration;
}
/**
- * Parse a class declaration. The [commentAndMetadata] is the metadata to be
- * associated with the member. The [abstractKeyword] is the token for the
- * keyword 'abstract', or `null` if the keyword was not given. Return the
- * class declaration that was parsed.
+ * Parse a class member. The [className] is the name of the class containing
+ * the member being parsed. Return the class member that was parsed, or `null`
+ * if what was found was not a valid class member.
*
- * classDeclaration ::=
- * metadata 'abstract'? 'class' name typeParameterList? (extendsClause withClause?)? implementsClause? '{' classMembers '}' |
- * metadata 'abstract'? 'class' mixinApplicationClass
+ * classMemberDefinition ::=
+ * declaration ';'
+ * | methodSignature functionBody
*/
- CompilationUnitMember _parseClassDeclaration(
- CommentAndMetadata commentAndMetadata, Token abstractKeyword) {
- Token keyword = _expectKeyword(Keyword.CLASS);
- if (_matchesIdentifier()) {
+ ClassMember parseClassMember(String className) {
+ CommentAndMetadata commentAndMetadata = parseCommentAndMetadata();
+ Modifiers modifiers = parseModifiers();
+ Keyword keyword = _currentToken.keyword;
+ if (keyword == Keyword.VOID) {
+ TypeName returnType =
+ new TypeName(new SimpleIdentifier(getAndAdvance()), null);
+ keyword = _currentToken.keyword;
Token next = _peek();
- if (_tokenMatches(next, TokenType.LT)) {
- next = _skipTypeParameterList(next);
- if (next != null && _tokenMatches(next, TokenType.EQ)) {
- return _parseClassTypeAlias(
- commentAndMetadata, abstractKeyword, keyword);
+ bool isFollowedByIdentifier = _tokenMatchesIdentifier(next);
+ if (keyword == Keyword.GET && isFollowedByIdentifier) {
+ _validateModifiersForGetterOrSetterOrMethod(modifiers);
+ return parseGetter(commentAndMetadata, modifiers.externalKeyword,
+ modifiers.staticKeyword, returnType);
+ } else if (keyword == Keyword.SET && isFollowedByIdentifier) {
+ _validateModifiersForGetterOrSetterOrMethod(modifiers);
+ return parseSetter(commentAndMetadata, modifiers.externalKeyword,
+ modifiers.staticKeyword, returnType);
+ } else if (keyword == Keyword.OPERATOR && _isOperator(next)) {
+ _validateModifiersForOperator(modifiers);
+ return _parseOperatorAfterKeyword(commentAndMetadata,
+ modifiers.externalKeyword, returnType, getAndAdvance());
+ } else if (_matchesIdentifier() &&
+ _peek().matchesAny(const <TokenType>[
+ TokenType.OPEN_PAREN,
+ TokenType.OPEN_CURLY_BRACKET,
+ TokenType.FUNCTION,
+ TokenType.LT
+ ])) {
+ _validateModifiersForGetterOrSetterOrMethod(modifiers);
+ return _parseMethodDeclarationAfterReturnType(commentAndMetadata,
+ modifiers.externalKeyword, modifiers.staticKeyword, returnType);
+ } else {
+ //
+ // We have found an error of some kind. Try to recover.
+ //
+ if (_matchesIdentifier()) {
+ if (_peek().matchesAny(const <TokenType>[
+ TokenType.EQ,
+ TokenType.COMMA,
+ TokenType.SEMICOLON
+ ])) {
+ //
+ // We appear to have a variable declaration with a type of "void".
+ //
+ _reportErrorForNode(ParserErrorCode.VOID_VARIABLE, returnType);
+ return parseInitializedIdentifierList(
+ commentAndMetadata,
+ modifiers.staticKeyword,
+ _validateModifiersForField(modifiers),
+ returnType);
+ }
}
- } else if (_tokenMatches(next, TokenType.EQ)) {
- return _parseClassTypeAlias(
- commentAndMetadata, abstractKeyword, keyword);
+ if (_isOperator(_currentToken)) {
+ //
+ // We appear to have found an operator declaration without the
+ // 'operator' keyword.
+ //
+ _validateModifiersForOperator(modifiers);
+ return parseOperator(
+ commentAndMetadata, modifiers.externalKeyword, returnType);
+ }
+ _reportErrorForToken(
+ ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken);
+ return null;
}
}
- SimpleIdentifier name = parseSimpleIdentifier();
- String className = name.name;
- TypeParameterList typeParameters = null;
- if (_matches(TokenType.LT)) {
- typeParameters = parseTypeParameterList();
+ Token next = _peek();
+ bool isFollowedByIdentifier = _tokenMatchesIdentifier(next);
+ if (keyword == Keyword.GET && isFollowedByIdentifier) {
+ _validateModifiersForGetterOrSetterOrMethod(modifiers);
+ return parseGetter(commentAndMetadata, modifiers.externalKeyword,
+ modifiers.staticKeyword, null);
+ } else if (keyword == Keyword.SET && isFollowedByIdentifier) {
+ _validateModifiersForGetterOrSetterOrMethod(modifiers);
+ return parseSetter(commentAndMetadata, modifiers.externalKeyword,
+ modifiers.staticKeyword, null);
+ } else if (keyword == Keyword.OPERATOR && _isOperator(next)) {
+ _validateModifiersForOperator(modifiers);
+ return _parseOperatorAfterKeyword(
+ commentAndMetadata, modifiers.externalKeyword, null, getAndAdvance());
+ } else if (!_matchesIdentifier()) {
+ //
+ // Recover from an error.
+ //
+ if (_matchesKeyword(Keyword.CLASS)) {
+ _reportErrorForCurrentToken(ParserErrorCode.CLASS_IN_CLASS);
+ // TODO(brianwilkerson) We don't currently have any way to capture the
+ // class that was parsed.
+ parseClassDeclaration(commentAndMetadata, null);
+ return null;
+ } else if (_matchesKeyword(Keyword.ABSTRACT) &&
+ _tokenMatchesKeyword(_peek(), Keyword.CLASS)) {
+ _reportErrorForToken(ParserErrorCode.CLASS_IN_CLASS, _peek());
+ // TODO(brianwilkerson) We don't currently have any way to capture the
+ // class that was parsed.
+ parseClassDeclaration(commentAndMetadata, getAndAdvance());
+ return null;
+ } else if (_matchesKeyword(Keyword.ENUM)) {
+ _reportErrorForToken(ParserErrorCode.ENUM_IN_CLASS, _peek());
+ // TODO(brianwilkerson) We don't currently have any way to capture the
+ // enum that was parsed.
+ parseEnumDeclaration(commentAndMetadata);
+ return null;
+ } else if (_isOperator(_currentToken)) {
+ //
+ // We appear to have found an operator declaration without the
+ // 'operator' keyword.
+ //
+ _validateModifiersForOperator(modifiers);
+ return parseOperator(
+ commentAndMetadata, modifiers.externalKeyword, null);
+ }
+ Token keyword = modifiers.varKeyword;
+ if (keyword == null) {
+ keyword = modifiers.finalKeyword;
+ }
+ if (keyword == null) {
+ keyword = modifiers.constKeyword;
+ }
+ if (keyword != null) {
+ //
+ // We appear to have found an incomplete field declaration.
+ //
+ _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
+ VariableDeclaration variable =
+ new VariableDeclaration(createSyntheticIdentifier(), null, null);
+ List<VariableDeclaration> variables = <VariableDeclaration>[variable];
+ return new FieldDeclaration(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ null,
+ new VariableDeclarationList(null, null, keyword, null, variables),
+ _expect(TokenType.SEMICOLON));
+ }
+ _reportErrorForToken(
+ ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken);
+ if (commentAndMetadata.comment != null ||
+ commentAndMetadata.hasMetadata) {
+ //
+ // We appear to have found an incomplete declaration at the end of the
+ // class. At this point it consists of a metadata, which we don't want
+ // to loose, so we'll treat it as a method declaration with a missing
+ // name, parameters and empty body.
+ //
+ return new MethodDeclaration(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ null,
+ null,
+ null,
+ null,
+ null,
+ createSyntheticIdentifier(isDeclaration: true),
+ null,
+ new FormalParameterList(
+ null, <FormalParameter>[], null, null, null),
+ new EmptyFunctionBody(_createSyntheticToken(TokenType.SEMICOLON)));
+ }
+ return null;
+ } else if (_tokenMatches(next, TokenType.PERIOD) &&
+ _tokenMatchesIdentifier(_peekAt(2)) &&
+ _tokenMatches(_peekAt(3), TokenType.OPEN_PAREN)) {
+ return _parseConstructor(
+ commentAndMetadata,
+ modifiers.externalKeyword,
+ _validateModifiersForConstructor(modifiers),
+ modifiers.factoryKeyword,
+ parseSimpleIdentifier(),
+ getAndAdvance(),
+ parseSimpleIdentifier(isDeclaration: true),
+ parseFormalParameterList());
+ } else if (_tokenMatches(next, TokenType.OPEN_PAREN)) {
+ TypeName returnType = _parseOptionalTypeNameComment();
+ SimpleIdentifier methodName = parseSimpleIdentifier(isDeclaration: true);
+ TypeParameterList typeParameters = _parseGenericCommentTypeParameters();
+ FormalParameterList parameters = parseFormalParameterList();
+ if (_matches(TokenType.COLON) ||
+ modifiers.factoryKeyword != null ||
+ methodName.name == className) {
+ return _parseConstructor(
+ commentAndMetadata,
+ modifiers.externalKeyword,
+ _validateModifiersForConstructor(modifiers),
+ modifiers.factoryKeyword,
+ new SimpleIdentifier(methodName.token, isDeclaration: false),
+ null,
+ null,
+ parameters);
+ }
+ _validateModifiersForGetterOrSetterOrMethod(modifiers);
+ _validateFormalParameterList(parameters);
+ return _parseMethodDeclarationAfterParameters(
+ commentAndMetadata,
+ modifiers.externalKeyword,
+ modifiers.staticKeyword,
+ returnType,
+ methodName,
+ typeParameters,
+ parameters);
+ } else if (next.matchesAny(const <TokenType>[
+ TokenType.EQ,
+ TokenType.COMMA,
+ TokenType.SEMICOLON
+ ])) {
+ if (modifiers.constKeyword == null &&
+ modifiers.finalKeyword == null &&
+ modifiers.varKeyword == null) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE);
+ }
+ return parseInitializedIdentifierList(commentAndMetadata,
+ modifiers.staticKeyword, _validateModifiersForField(modifiers), null);
+ } else if (keyword == Keyword.TYPEDEF) {
+ _reportErrorForCurrentToken(ParserErrorCode.TYPEDEF_IN_CLASS);
+ // TODO(brianwilkerson) We don't currently have any way to capture the
+ // function type alias that was parsed.
+ _parseFunctionTypeAlias(commentAndMetadata, getAndAdvance());
+ return null;
+ } else if (parseGenericMethods) {
+ Token token = _skipTypeParameterList(_peek());
+ if (token != null && _tokenMatches(token, TokenType.OPEN_PAREN)) {
+ return _parseMethodDeclarationAfterReturnType(commentAndMetadata,
+ modifiers.externalKeyword, modifiers.staticKeyword, null);
+ }
}
- //
- // Parse the clauses. The parser accepts clauses in any order, but will
- // generate errors if they are not in the order required by the
- // specification.
- //
- ExtendsClause extendsClause = null;
- WithClause withClause = null;
- ImplementsClause implementsClause = null;
- bool foundClause = true;
- while (foundClause) {
- if (_matchesKeyword(Keyword.EXTENDS)) {
- if (extendsClause == null) {
- extendsClause = parseExtendsClause();
- if (withClause != null) {
- _reportErrorForToken(
- ParserErrorCode.WITH_BEFORE_EXTENDS, withClause.withKeyword);
- } else if (implementsClause != null) {
- _reportErrorForToken(ParserErrorCode.IMPLEMENTS_BEFORE_EXTENDS,
- implementsClause.implementsKeyword);
- }
- } else {
- _reportErrorForToken(ParserErrorCode.MULTIPLE_EXTENDS_CLAUSES,
- extendsClause.extendsKeyword);
- parseExtendsClause();
- }
- } else if (_matchesKeyword(Keyword.WITH)) {
- if (withClause == null) {
- withClause = parseWithClause();
- if (implementsClause != null) {
- _reportErrorForToken(ParserErrorCode.IMPLEMENTS_BEFORE_WITH,
- implementsClause.implementsKeyword);
- }
- } else {
- _reportErrorForToken(
- ParserErrorCode.MULTIPLE_WITH_CLAUSES, withClause.withKeyword);
- parseWithClause();
- // TODO(brianwilkerson) Should we merge the list of applied mixins
- // into a single list?
- }
- } else if (_matchesKeyword(Keyword.IMPLEMENTS)) {
- if (implementsClause == null) {
- implementsClause = parseImplementsClause();
- } else {
- _reportErrorForToken(ParserErrorCode.MULTIPLE_IMPLEMENTS_CLAUSES,
- implementsClause.implementsKeyword);
- parseImplementsClause();
- // TODO(brianwilkerson) Should we merge the list of implemented
- // classes into a single list?
- }
- } else {
- foundClause = false;
+ TypeName type = _parseTypeNameAfterIdentifier();
+ keyword = _currentToken.keyword;
+ next = _peek();
+ isFollowedByIdentifier = _tokenMatchesIdentifier(next);
+ if (keyword == Keyword.GET && isFollowedByIdentifier) {
+ _validateModifiersForGetterOrSetterOrMethod(modifiers);
+ return parseGetter(commentAndMetadata, modifiers.externalKeyword,
+ modifiers.staticKeyword, type);
+ } else if (keyword == Keyword.SET && isFollowedByIdentifier) {
+ _validateModifiersForGetterOrSetterOrMethod(modifiers);
+ return parseSetter(commentAndMetadata, modifiers.externalKeyword,
+ modifiers.staticKeyword, type);
+ } else if (keyword == Keyword.OPERATOR && _isOperator(next)) {
+ _validateModifiersForOperator(modifiers);
+ return _parseOperatorAfterKeyword(
+ commentAndMetadata, modifiers.externalKeyword, type, getAndAdvance());
+ } else if (!_matchesIdentifier()) {
+ if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
+ //
+ // We appear to have found an incomplete declaration at the end of the
+ // class. At this point it consists of a type name, so we'll treat it as
+ // a field declaration with a missing field name and semicolon.
+ //
+ return parseInitializedIdentifierList(
+ commentAndMetadata,
+ modifiers.staticKeyword,
+ _validateModifiersForField(modifiers),
+ type);
}
- }
- if (withClause != null && extendsClause == null) {
+ if (_isOperator(_currentToken)) {
+ //
+ // We appear to have found an operator declaration without the
+ // 'operator' keyword.
+ //
+ _validateModifiersForOperator(modifiers);
+ return parseOperator(
+ commentAndMetadata, modifiers.externalKeyword, type);
+ }
+ //
+ // We appear to have found an incomplete declaration before another
+ // declaration. At this point it consists of a type name, so we'll treat
+ // it as a field declaration with a missing field name and semicolon.
+ //
_reportErrorForToken(
- ParserErrorCode.WITH_WITHOUT_EXTENDS, withClause.withKeyword);
- }
- //
- // Look for and skip over the extra-lingual 'native' specification.
- //
- NativeClause nativeClause = null;
- if (_matchesString(_NATIVE) && _tokenMatches(_peek(), TokenType.STRING)) {
- nativeClause = _parseNativeClause();
- }
- //
- // Parse the body of the class.
- //
- Token leftBracket = null;
- List<ClassMember> members = null;
- Token rightBracket = null;
- if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
- leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET);
- members = _parseClassMembers(className, _getEndToken(leftBracket));
- rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET);
- } else {
- leftBracket = _createSyntheticToken(TokenType.OPEN_CURLY_BRACKET);
- rightBracket = _createSyntheticToken(TokenType.CLOSE_CURLY_BRACKET);
- _reportErrorForCurrentToken(ParserErrorCode.MISSING_CLASS_BODY);
- }
- ClassDeclaration classDeclaration = new ClassDeclaration(
- commentAndMetadata.comment,
- commentAndMetadata.metadata,
- abstractKeyword,
- keyword,
- name,
- typeParameters,
- extendsClause,
- withClause,
- implementsClause,
- leftBracket,
- members,
- rightBracket);
- classDeclaration.nativeClause = nativeClause;
- return classDeclaration;
- }
-
- /**
- * Parse a list of class members. The [className] is the name of the class
- * whose members are being parsed. The [closingBracket] is the closing bracket
- * for the class, or `null` if the closing bracket is missing. Return the list
- * of class members that were parsed.
- *
- * classMembers ::=
- * (metadata memberDefinition)*
- */
- List<ClassMember> _parseClassMembers(String className, Token closingBracket) {
- List<ClassMember> members = new List<ClassMember>();
- Token memberStart = _currentToken;
- while (!_matches(TokenType.EOF) &&
- !_matches(TokenType.CLOSE_CURLY_BRACKET) &&
- (closingBracket != null ||
- (!_matchesKeyword(Keyword.CLASS) &&
- !_matchesKeyword(Keyword.TYPEDEF)))) {
- if (_matches(TokenType.SEMICOLON)) {
- _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
- [_currentToken.lexeme]);
- _advance();
- } else {
- ClassMember member = parseClassMember(className);
- if (member != null) {
- members.add(member);
- }
+ ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken);
+ try {
+ _lockErrorListener();
+ return parseInitializedIdentifierList(
+ commentAndMetadata,
+ modifiers.staticKeyword,
+ _validateModifiersForField(modifiers),
+ type);
+ } finally {
+ _unlockErrorListener();
}
- if (identical(_currentToken, memberStart)) {
- _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
- [_currentToken.lexeme]);
- _advance();
+ } else if (_tokenMatches(next, TokenType.OPEN_PAREN)) {
+ SimpleIdentifier methodName =
+ _parseSimpleIdentifierUnchecked(isDeclaration: true);
+ TypeParameterList typeParameters = _parseGenericCommentTypeParameters();
+ FormalParameterList parameters = parseFormalParameterList();
+ if (methodName.name == className) {
+ _reportErrorForNode(ParserErrorCode.CONSTRUCTOR_WITH_RETURN_TYPE, type);
+ return _parseConstructor(
+ commentAndMetadata,
+ modifiers.externalKeyword,
+ _validateModifiersForConstructor(modifiers),
+ modifiers.factoryKeyword,
+ new SimpleIdentifier(methodName.token, isDeclaration: true),
+ null,
+ null,
+ parameters);
}
- memberStart = _currentToken;
+ _validateModifiersForGetterOrSetterOrMethod(modifiers);
+ _validateFormalParameterList(parameters);
+ return _parseMethodDeclarationAfterParameters(
+ commentAndMetadata,
+ modifiers.externalKeyword,
+ modifiers.staticKeyword,
+ type,
+ methodName,
+ typeParameters,
+ parameters);
+ } else if (parseGenericMethods && _tokenMatches(next, TokenType.LT)) {
+ return _parseMethodDeclarationAfterReturnType(commentAndMetadata,
+ modifiers.externalKeyword, modifiers.staticKeyword, type);
+ } else if (_tokenMatches(next, TokenType.OPEN_CURLY_BRACKET)) {
+ // We have found "TypeName identifier {", and are guessing that this is a
+ // getter without the keyword 'get'.
+ _validateModifiersForGetterOrSetterOrMethod(modifiers);
+ _reportErrorForCurrentToken(ParserErrorCode.MISSING_GET);
+ _currentToken = _injectToken(
+ new Parser_SyntheticKeywordToken(Keyword.GET, _currentToken.offset));
+ return parseGetter(commentAndMetadata, modifiers.externalKeyword,
+ modifiers.staticKeyword, type);
}
- return members;
+ return parseInitializedIdentifierList(commentAndMetadata,
+ modifiers.staticKeyword, _validateModifiersForField(modifiers), type);
}
/**
@@ -4803,77 +1505,59 @@ class Parser {
* the 'abstract' keyword. The [classKeyword] is the token representing the
* 'class' keyword. Return the class type alias that was parsed.
*
+ * This method assumes that the current token matches an identifier.
+ *
* classTypeAlias ::=
* identifier typeParameters? '=' 'abstract'? mixinApplication
*
* mixinApplication ::=
* type withClause implementsClause? ';'
*/
- ClassTypeAlias _parseClassTypeAlias(CommentAndMetadata commentAndMetadata,
+ ClassTypeAlias parseClassTypeAlias(CommentAndMetadata commentAndMetadata,
Token abstractKeyword, Token classKeyword) {
- SimpleIdentifier className = parseSimpleIdentifier();
+ SimpleIdentifier className =
+ _parseSimpleIdentifierUnchecked(isDeclaration: true);
TypeParameterList typeParameters = null;
if (_matches(TokenType.LT)) {
typeParameters = parseTypeParameterList();
}
- Token equals = _expect(TokenType.EQ);
- TypeName superclass = parseTypeName();
- WithClause withClause = null;
- if (_matchesKeyword(Keyword.WITH)) {
- withClause = parseWithClause();
- } else {
- _reportErrorForCurrentToken(
- ParserErrorCode.EXPECTED_TOKEN, [Keyword.WITH.syntax]);
- }
- ImplementsClause implementsClause = null;
- if (_matchesKeyword(Keyword.IMPLEMENTS)) {
- implementsClause = parseImplementsClause();
- }
- Token semicolon;
- if (_matches(TokenType.SEMICOLON)) {
- semicolon = getAndAdvance();
- } else {
- if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
- _reportErrorForCurrentToken(
- ParserErrorCode.EXPECTED_TOKEN, [TokenType.SEMICOLON.lexeme]);
- Token leftBracket = getAndAdvance();
- _parseClassMembers(className.name, _getEndToken(leftBracket));
- _expect(TokenType.CLOSE_CURLY_BRACKET);
- } else {
- _reportErrorForToken(ParserErrorCode.EXPECTED_TOKEN,
- _currentToken.previous, [TokenType.SEMICOLON.lexeme]);
- }
- semicolon = _createSyntheticToken(TokenType.SEMICOLON);
+ return _parseClassTypeAliasAfterName(commentAndMetadata, abstractKeyword,
+ classKeyword, className, typeParameters);
+ }
+
+ /**
+ * Parse a single combinator. Return the combinator that was parsed, or `null`
+ * if no combinator is found.
+ *
+ * combinator ::=
+ * 'show' identifier (',' identifier)*
+ * | 'hide' identifier (',' identifier)*
+ */
+ Combinator parseCombinator() {
+ if (_matchesString(_SHOW)) {
+ return new ShowCombinator(getAndAdvance(), parseIdentifierList());
+ } else if (_matchesString(_HIDE)) {
+ return new HideCombinator(getAndAdvance(), parseIdentifierList());
}
- return new ClassTypeAlias(
- commentAndMetadata.comment,
- commentAndMetadata.metadata,
- classKeyword,
- className,
- typeParameters,
- equals,
- abstractKeyword,
- superclass,
- withClause,
- implementsClause,
- semicolon);
+ return null;
}
/**
* Parse a list of combinators in a directive. Return the combinators that
- * were parsed.
+ * were parsed, or `null` if there are no combinators.
*
* combinator ::=
* 'show' identifier (',' identifier)*
* | 'hide' identifier (',' identifier)*
*/
- List<Combinator> _parseCombinators() {
- List<Combinator> combinators = new List<Combinator>();
+ List<Combinator> parseCombinators() {
+ List<Combinator> combinators = null;
while (true) {
Combinator combinator = parseCombinator();
if (combinator == null) {
break;
}
+ combinators ??= <Combinator>[];
combinators.add(combinator);
}
return combinators;
@@ -4889,17 +1573,21 @@ class Parser {
* metadata ::=
* annotation*
*/
- CommentAndMetadata _parseCommentAndMetadata() {
- Comment comment = _parseDocumentationComment();
- List<Annotation> metadata = new List<Annotation>();
+ CommentAndMetadata parseCommentAndMetadata() {
+ // TODO(brianwilkerson) Consider making the creation of documentation
+ // comments be lazy.
+ List<DocumentationCommentToken> tokens = parseDocumentationCommentTokens();
+ List<Annotation> metadata = null;
while (_matches(TokenType.AT)) {
+ metadata ??= <Annotation>[];
metadata.add(parseAnnotation());
- Comment optionalComment = _parseDocumentationComment();
- if (optionalComment != null) {
- comment = optionalComment;
+ List<DocumentationCommentToken> optionalTokens =
+ parseDocumentationCommentTokens();
+ if (optionalTokens != null) {
+ tokens = optionalTokens;
}
}
- return new CommentAndMetadata(comment, metadata);
+ return new CommentAndMetadata(parseDocumentationComment(tokens), metadata);
}
/**
@@ -4912,15 +1600,10 @@ class Parser {
* commentReference ::=
* 'new'? prefixedIdentifier
*/
- CommentReference _parseCommentReference(
+ CommentReference parseCommentReference(
String referenceSource, int sourceOffset) {
// TODO(brianwilkerson) The errors are not getting the right offset/length
// and are being duplicated.
- if (referenceSource.length == 0) {
- Token syntheticToken =
- new SyntheticStringToken(TokenType.IDENTIFIER, "", sourceOffset);
- return new CommentReference(null, new SimpleIdentifier(syntheticToken));
- }
try {
BooleanErrorListener listener = new BooleanErrorListener();
Scanner scanner = new Scanner(
@@ -4930,21 +1613,63 @@ class Parser {
if (listener.errorReported) {
return null;
}
+ if (firstToken.type == TokenType.EOF) {
+ Token syntheticToken =
+ new SyntheticStringToken(TokenType.IDENTIFIER, "", sourceOffset);
+ syntheticToken.setNext(firstToken);
+ return new CommentReference(null, new SimpleIdentifier(syntheticToken));
+ }
Token newKeyword = null;
if (_tokenMatchesKeyword(firstToken, Keyword.NEW)) {
newKeyword = firstToken;
firstToken = firstToken.next;
}
- if (_tokenMatchesIdentifier(firstToken)) {
+ if (firstToken.isUserDefinableOperator) {
+ if (firstToken.next.type != TokenType.EOF) {
+ return null;
+ }
+ Identifier identifier = new SimpleIdentifier(firstToken);
+ return new CommentReference(null, identifier);
+ } else if (_tokenMatchesKeyword(firstToken, Keyword.OPERATOR)) {
+ Token secondToken = firstToken.next;
+ if (secondToken.isUserDefinableOperator) {
+ if (secondToken.next.type != TokenType.EOF) {
+ return null;
+ }
+ Identifier identifier = new SimpleIdentifier(secondToken);
+ return new CommentReference(null, identifier);
+ }
+ return null;
+ } else if (_tokenMatchesIdentifier(firstToken)) {
Token secondToken = firstToken.next;
Token thirdToken = secondToken.next;
Token nextToken;
Identifier identifier;
- if (_tokenMatches(secondToken, TokenType.PERIOD) &&
- _tokenMatchesIdentifier(thirdToken)) {
- identifier = new PrefixedIdentifier(new SimpleIdentifier(firstToken),
- secondToken, new SimpleIdentifier(thirdToken));
- nextToken = thirdToken.next;
+ if (_tokenMatches(secondToken, TokenType.PERIOD)) {
+ if (thirdToken.isUserDefinableOperator) {
+ identifier = new PrefixedIdentifier(
+ new SimpleIdentifier(firstToken),
+ secondToken,
+ new SimpleIdentifier(thirdToken));
+ nextToken = thirdToken.next;
+ } else if (_tokenMatchesKeyword(thirdToken, Keyword.OPERATOR)) {
+ Token fourthToken = thirdToken.next;
+ if (fourthToken.isUserDefinableOperator) {
+ identifier = new PrefixedIdentifier(
+ new SimpleIdentifier(firstToken),
+ secondToken,
+ new SimpleIdentifier(fourthToken));
+ nextToken = fourthToken.next;
+ } else {
+ return null;
+ }
+ } else if (_tokenMatchesIdentifier(thirdToken)) {
+ identifier = new PrefixedIdentifier(
+ new SimpleIdentifier(firstToken),
+ secondToken,
+ new SimpleIdentifier(thirdToken));
+ nextToken = thirdToken.next;
+ }
} else {
identifier = new SimpleIdentifier(firstToken);
nextToken = firstToken.next;
@@ -4953,16 +1678,19 @@ class Parser {
return null;
}
return new CommentReference(newKeyword, identifier);
- } else if (_tokenMatchesKeyword(firstToken, Keyword.THIS) ||
- _tokenMatchesKeyword(firstToken, Keyword.NULL) ||
- _tokenMatchesKeyword(firstToken, Keyword.TRUE) ||
- _tokenMatchesKeyword(firstToken, Keyword.FALSE)) {
- // TODO(brianwilkerson) If we want to support this we will need to
- // extend the definition of CommentReference to take an expression
- // rather than an identifier. For now we just ignore it to reduce the
- // number of errors produced, but that's probably not a valid long term
- // approach.
- return null;
+ } else {
+ Keyword keyword = firstToken.keyword;
+ if (keyword == Keyword.THIS ||
+ keyword == Keyword.NULL ||
+ keyword == Keyword.TRUE ||
+ keyword == Keyword.FALSE) {
+ // TODO(brianwilkerson) If we want to support this we will need to
+ // extend the definition of CommentReference to take an expression
+ // rather than an identifier. For now we just ignore it to reduce the
+ // number of errors produced, but that's probably not a valid long term
+ // approach.
+ return null;
+ }
}
} catch (exception) {
// Ignored because we assume that it wasn't a real comment reference.
@@ -4982,11 +1710,25 @@ class Parser {
* libraryReference ::=
* '(' stringLiteral ')'
*/
- List<CommentReference> _parseCommentReferences(
+ List<CommentReference> parseCommentReferences(
List<DocumentationCommentToken> tokens) {
- List<CommentReference> references = new List<CommentReference>();
+ List<CommentReference> references = <CommentReference>[];
+ bool isInGitHubCodeBlock = false;
for (DocumentationCommentToken token in tokens) {
String comment = token.lexeme;
+ // Skip GitHub code blocks.
+ // https://help.github.com/articles/creating-and-highlighting-code-blocks/
+ if (tokens.length != 1) {
+ if (comment.indexOf('```') != -1) {
+ isInGitHubCodeBlock = !isInGitHubCodeBlock;
+ }
+ if (isInGitHubCodeBlock) {
+ continue;
+ }
+ }
+ // Remove GitHub include code.
+ comment = _removeGitHubInlineCode(comment);
+ // Find references.
int length = comment.length;
List<List<int>> codeBlockRanges = _getCodeBlockRanges(comment);
int leftIndex = comment.indexOf('[');
@@ -4994,7 +1736,7 @@ class Parser {
List<int> range = _findRange(codeBlockRanges, leftIndex);
if (range == null) {
int nameOffset = token.offset + leftIndex + 1;
- int rightIndex = JavaString.indexOf(comment, ']', leftIndex);
+ int rightIndex = comment.indexOf(']', leftIndex);
if (rightIndex >= 0) {
int firstChar = comment.codeUnitAt(leftIndex + 1);
if (firstChar != 0x27 && firstChar != 0x22) {
@@ -5002,7 +1744,7 @@ class Parser {
// TODO(brianwilkerson) Handle the case where there's a library
// URI in the link text.
} else {
- CommentReference reference = _parseCommentReference(
+ CommentReference reference = parseCommentReference(
comment.substring(leftIndex + 1, rightIndex), nameOffset);
if (reference != null) {
references.add(reference);
@@ -5010,33 +1752,199 @@ class Parser {
}
}
}
- } else {
- // terminating ']' is not typed yet
- int charAfterLeft = comment.codeUnitAt(leftIndex + 1);
- if (Character.isLetterOrDigit(charAfterLeft)) {
- int nameEnd = StringUtilities.indexOfFirstNotLetterDigit(
- comment, leftIndex + 1);
- String name = comment.substring(leftIndex + 1, nameEnd);
- Token nameToken =
- new StringToken(TokenType.IDENTIFIER, name, nameOffset);
- references.add(
- new CommentReference(null, new SimpleIdentifier(nameToken)));
+ } else {
+ // terminating ']' is not typed yet
+ int charAfterLeft = comment.codeUnitAt(leftIndex + 1);
+ Token nameToken;
+ if (Character.isLetterOrDigit(charAfterLeft)) {
+ int nameEnd = StringUtilities.indexOfFirstNotLetterDigit(
+ comment, leftIndex + 1);
+ String name = comment.substring(leftIndex + 1, nameEnd);
+ nameToken =
+ new StringToken(TokenType.IDENTIFIER, name, nameOffset);
+ } else {
+ nameToken = new SyntheticStringToken(
+ TokenType.IDENTIFIER, '', nameOffset);
+ }
+ nameToken.setNext(new SimpleToken(TokenType.EOF, nameToken.end));
+ references.add(
+ new CommentReference(null, new SimpleIdentifier(nameToken)));
+ token.references.add(nameToken);
+ // next character
+ rightIndex = leftIndex + 1;
+ }
+ leftIndex = comment.indexOf('[', rightIndex);
+ } else {
+ leftIndex = comment.indexOf('[', range[1]);
+ }
+ }
+ }
+ return references;
+ }
+
+ /**
+ * Parse a compilation unit, starting with the given [token]. Return the
+ * compilation unit that was parsed.
+ */
+ CompilationUnit parseCompilationUnit(Token token) {
+ _currentToken = token;
+ return parseCompilationUnit2();
+ }
+
+ /**
+ * Parse a compilation unit. Return the compilation unit that was parsed.
+ *
+ * Specified:
+ *
+ * compilationUnit ::=
+ * scriptTag? directive* topLevelDeclaration*
+ *
+ * Actual:
+ *
+ * compilationUnit ::=
+ * scriptTag? topLevelElement*
+ *
+ * topLevelElement ::=
+ * directive
+ * | topLevelDeclaration
+ */
+ CompilationUnit parseCompilationUnit2() {
+ Token firstToken = _currentToken;
+ ScriptTag scriptTag = null;
+ if (_matches(TokenType.SCRIPT_TAG)) {
+ scriptTag = new ScriptTag(getAndAdvance());
+ }
+ //
+ // Even though all directives must appear before declarations and must occur
+ // in a given order, we allow directives and declarations to occur in any
+ // order so that we can recover better.
+ //
+ bool libraryDirectiveFound = false;
+ bool partOfDirectiveFound = false;
+ bool partDirectiveFound = false;
+ bool directiveFoundAfterDeclaration = false;
+ List<Directive> directives = <Directive>[];
+ List<CompilationUnitMember> declarations = <CompilationUnitMember>[];
+ Token memberStart = _currentToken;
+ TokenType type = _currentToken.type;
+ while (type != TokenType.EOF) {
+ CommentAndMetadata commentAndMetadata = parseCommentAndMetadata();
+ Keyword keyword = _currentToken.keyword;
+ TokenType nextType = _currentToken.next.type;
+ if ((keyword == Keyword.IMPORT ||
+ keyword == Keyword.EXPORT ||
+ keyword == Keyword.LIBRARY ||
+ keyword == Keyword.PART) &&
+ nextType != TokenType.PERIOD &&
+ nextType != TokenType.LT &&
+ nextType != TokenType.OPEN_PAREN) {
+ Directive parseDirective() {
+ if (keyword == Keyword.IMPORT) {
+ if (partDirectiveFound) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE);
+ }
+ return parseImportDirective(commentAndMetadata);
+ } else if (keyword == Keyword.EXPORT) {
+ if (partDirectiveFound) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE);
+ }
+ return parseExportDirective(commentAndMetadata);
+ } else if (keyword == Keyword.LIBRARY) {
+ if (libraryDirectiveFound) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.MULTIPLE_LIBRARY_DIRECTIVES);
+ } else {
+ if (directives.length > 0) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.LIBRARY_DIRECTIVE_NOT_FIRST);
+ }
+ libraryDirectiveFound = true;
+ }
+ return parseLibraryDirective(commentAndMetadata);
+ } else if (keyword == Keyword.PART) {
+ if (_tokenMatchesString(_peek(), _OF)) {
+ partOfDirectiveFound = true;
+ return _parsePartOfDirective(commentAndMetadata);
} else {
- Token nameToken = new SyntheticStringToken(
- TokenType.IDENTIFIER, "", nameOffset);
- references.add(
- new CommentReference(null, new SimpleIdentifier(nameToken)));
+ partDirectiveFound = true;
+ return _parsePartDirective(commentAndMetadata);
}
- // next character
- rightIndex = leftIndex + 1;
+ } else {
+ // Internal error: this method should not have been invoked if the
+ // current token was something other than one of the above.
+ throw new StateError(
+ "parseDirective invoked in an invalid state (currentToken = $_currentToken)");
+ }
+ }
+
+ Directive directive = parseDirective();
+ if (declarations.length > 0 && !directiveFoundAfterDeclaration) {
+ _reportErrorForToken(ParserErrorCode.DIRECTIVE_AFTER_DECLARATION,
+ directive.beginToken);
+ directiveFoundAfterDeclaration = true;
+ }
+ directives.add(directive);
+ } else if (type == TokenType.SEMICOLON) {
+ // TODO(brianwilkerson) Consider moving this error detection into
+ // _parseCompilationUnitMember (in the places where EXPECTED_EXECUTABLE
+ // is being generated).
+ _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
+ [_currentToken.lexeme]);
+ _advance();
+ } else {
+ CompilationUnitMember member =
+ parseCompilationUnitMember(commentAndMetadata);
+ if (member != null) {
+ declarations.add(member);
+ }
+ }
+ if (identical(_currentToken, memberStart)) {
+ _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
+ [_currentToken.lexeme]);
+ _advance();
+ while (!_matches(TokenType.EOF) &&
+ !_couldBeStartOfCompilationUnitMember()) {
+ _advance();
+ }
+ }
+ memberStart = _currentToken;
+ type = _currentToken.type;
+ }
+ if (partOfDirectiveFound && directives.length > 1) {
+ // TODO(brianwilkerson) Improve error reporting when both a library and
+ // part-of directive are found.
+// if (libraryDirectiveFound) {
+// int directiveCount = directives.length;
+// for (int i = 0; i < directiveCount; i++) {
+// Directive directive = directives[i];
+// if (directive is PartOfDirective) {
+// _reportErrorForToken(
+// ParserErrorCode.PART_OF_IN_LIBRARY, directive.partKeyword);
+// }
+// }
+// } else {
+ bool firstPartOf = true;
+ int directiveCount = directives.length;
+ for (int i = 0; i < directiveCount; i++) {
+ Directive directive = directives[i];
+ if (directive is PartOfDirective) {
+ if (firstPartOf) {
+ firstPartOf = false;
+ } else {
+ _reportErrorForToken(ParserErrorCode.MULTIPLE_PART_OF_DIRECTIVES,
+ directive.partKeyword);
}
- leftIndex = JavaString.indexOf(comment, '[', rightIndex);
} else {
- leftIndex = JavaString.indexOf(comment, '[', range[1] + 1);
+ _reportErrorForToken(ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART,
+ directives[i].keyword);
}
+// }
}
}
- return references;
+ return new CompilationUnit(
+ firstToken, scriptTag, directives, declarations, _currentToken);
}
/**
@@ -5056,49 +1964,62 @@ class Parser {
* | (final | const) type? staticFinalDeclarationList ';'
* | variableDeclaration ';'
*/
- CompilationUnitMember _parseCompilationUnitMember(
+ CompilationUnitMember parseCompilationUnitMember(
CommentAndMetadata commentAndMetadata) {
- Modifiers modifiers = _parseModifiers();
- if (_matchesKeyword(Keyword.CLASS)) {
- return _parseClassDeclaration(
+ Modifiers modifiers = parseModifiers();
+ Keyword keyword = _currentToken.keyword;
+ if (keyword == Keyword.CLASS) {
+ return parseClassDeclaration(
commentAndMetadata, _validateModifiersForClass(modifiers));
- } else if (_matchesKeyword(Keyword.TYPEDEF) &&
- !_tokenMatches(_peek(), TokenType.PERIOD) &&
- !_tokenMatches(_peek(), TokenType.LT) &&
- !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
+ }
+ Token next = _peek();
+ TokenType nextType = next.type;
+ if (keyword == Keyword.TYPEDEF &&
+ nextType != TokenType.PERIOD &&
+ nextType != TokenType.LT &&
+ nextType != TokenType.OPEN_PAREN) {
_validateModifiersForTypedef(modifiers);
- return _parseTypeAlias(commentAndMetadata);
- } else if (_matchesKeyword(Keyword.ENUM)) {
+ return parseTypeAlias(commentAndMetadata);
+ } else if (keyword == Keyword.ENUM) {
_validateModifiersForEnum(modifiers);
- return _parseEnumDeclaration(commentAndMetadata);
- }
- if (_matchesKeyword(Keyword.VOID)) {
- TypeName returnType = parseReturnType();
- if ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) &&
- _tokenMatchesIdentifier(_peek())) {
+ return parseEnumDeclaration(commentAndMetadata);
+ } else if (keyword == Keyword.VOID) {
+ TypeName returnType =
+ new TypeName(new SimpleIdentifier(getAndAdvance()), null);
+ keyword = _currentToken.keyword;
+ next = _peek();
+ if ((keyword == Keyword.GET || keyword == Keyword.SET) &&
+ _tokenMatchesIdentifier(next)) {
_validateModifiersForTopLevelFunction(modifiers);
- return _parseFunctionDeclaration(
+ return parseFunctionDeclaration(
commentAndMetadata, modifiers.externalKeyword, returnType);
- } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
+ } else if (keyword == Keyword.OPERATOR && _isOperator(next)) {
_reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken);
- return _convertToFunctionDeclaration(_parseOperator(
- commentAndMetadata, modifiers.externalKeyword, returnType));
+ return _convertToFunctionDeclaration(_parseOperatorAfterKeyword(
+ commentAndMetadata,
+ modifiers.externalKeyword,
+ returnType,
+ getAndAdvance()));
} else if (_matchesIdentifier() &&
- _peek().matchesAny([
+ next.matchesAny(const <TokenType>[
TokenType.OPEN_PAREN,
TokenType.OPEN_CURLY_BRACKET,
- TokenType.FUNCTION
+ TokenType.FUNCTION,
+ TokenType.LT
])) {
_validateModifiersForTopLevelFunction(modifiers);
- return _parseFunctionDeclaration(
+ return parseFunctionDeclaration(
commentAndMetadata, modifiers.externalKeyword, returnType);
} else {
//
// We have found an error of some kind. Try to recover.
//
if (_matchesIdentifier()) {
- if (_peek().matchesAny(
- [TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) {
+ if (next.matchesAny(const <TokenType>[
+ TokenType.EQ,
+ TokenType.COMMA,
+ TokenType.SEMICOLON
+ ])) {
//
// We appear to have a variable declaration with a type of "void".
//
@@ -5106,7 +2027,7 @@ class Parser {
return new TopLevelVariableDeclaration(
commentAndMetadata.comment,
commentAndMetadata.metadata,
- _parseVariableDeclarationListAfterType(null,
+ parseVariableDeclarationListAfterType(null,
_validateModifiersForTopLevelVariable(modifiers), null),
_expect(TokenType.SEMICOLON));
}
@@ -5115,15 +2036,18 @@ class Parser {
ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken);
return null;
}
- } else if ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) &&
- _tokenMatchesIdentifier(_peek())) {
+ } else if ((keyword == Keyword.GET || keyword == Keyword.SET) &&
+ _tokenMatchesIdentifier(next)) {
_validateModifiersForTopLevelFunction(modifiers);
- return _parseFunctionDeclaration(
+ return parseFunctionDeclaration(
commentAndMetadata, modifiers.externalKeyword, null);
- } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
+ } else if (keyword == Keyword.OPERATOR && _isOperator(next)) {
_reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken);
- return _convertToFunctionDeclaration(
- _parseOperator(commentAndMetadata, modifiers.externalKeyword, null));
+ return _convertToFunctionDeclaration(_parseOperatorAfterKeyword(
+ commentAndMetadata,
+ modifiers.externalKeyword,
+ null,
+ getAndAdvance()));
} else if (!_matchesIdentifier()) {
Token keyword = modifiers.varKeyword;
if (keyword == null) {
@@ -5137,23 +2061,30 @@ class Parser {
// We appear to have found an incomplete top-level variable declaration.
//
_reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
- List<VariableDeclaration> variables = new List<VariableDeclaration>();
- variables.add(
- new VariableDeclaration(_createSyntheticIdentifier(), null, null));
+ VariableDeclaration variable =
+ new VariableDeclaration(createSyntheticIdentifier(), null, null);
+ List<VariableDeclaration> variables = <VariableDeclaration>[variable];
return new TopLevelVariableDeclaration(
commentAndMetadata.comment,
commentAndMetadata.metadata,
new VariableDeclarationList(null, null, keyword, null, variables),
- _expectSemicolon());
+ _expect(TokenType.SEMICOLON));
}
_reportErrorForToken(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken);
return null;
- } else if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
- _validateModifiersForTopLevelFunction(modifiers);
- return _parseFunctionDeclaration(
+ } else if (_isPeekGenericTypeParametersAndOpenParen()) {
+ return parseFunctionDeclaration(
commentAndMetadata, modifiers.externalKeyword, null);
- } else if (_peek()
- .matchesAny([TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) {
+ } else if (_tokenMatches(next, TokenType.OPEN_PAREN)) {
+ TypeName returnType = _parseOptionalTypeNameComment();
+ _validateModifiersForTopLevelFunction(modifiers);
+ return parseFunctionDeclaration(
+ commentAndMetadata, modifiers.externalKeyword, returnType);
+ } else if (next.matchesAny(const <TokenType>[
+ TokenType.EQ,
+ TokenType.COMMA,
+ TokenType.SEMICOLON
+ ])) {
if (modifiers.constKeyword == null &&
modifiers.finalKeyword == null &&
modifiers.varKeyword == null) {
@@ -5163,25 +2094,30 @@ class Parser {
return new TopLevelVariableDeclaration(
commentAndMetadata.comment,
commentAndMetadata.metadata,
- _parseVariableDeclarationListAfterType(
+ parseVariableDeclarationListAfterType(
null, _validateModifiersForTopLevelVariable(modifiers), null),
_expect(TokenType.SEMICOLON));
}
TypeName returnType = parseReturnType();
- if ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) &&
- _tokenMatchesIdentifier(_peek())) {
+ keyword = _currentToken.keyword;
+ next = _peek();
+ if ((keyword == Keyword.GET || keyword == Keyword.SET) &&
+ _tokenMatchesIdentifier(next)) {
_validateModifiersForTopLevelFunction(modifiers);
- return _parseFunctionDeclaration(
+ return parseFunctionDeclaration(
commentAndMetadata, modifiers.externalKeyword, returnType);
- } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
+ } else if (keyword == Keyword.OPERATOR && _isOperator(next)) {
_reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken);
- return _convertToFunctionDeclaration(_parseOperator(
- commentAndMetadata, modifiers.externalKeyword, returnType));
+ return _convertToFunctionDeclaration(_parseOperatorAfterKeyword(
+ commentAndMetadata,
+ modifiers.externalKeyword,
+ returnType,
+ getAndAdvance()));
} else if (_matches(TokenType.AT)) {
return new TopLevelVariableDeclaration(
commentAndMetadata.comment,
commentAndMetadata.metadata,
- _parseVariableDeclarationListAfterType(null,
+ parseVariableDeclarationListAfterType(null,
_validateModifiersForTopLevelVariable(modifiers), returnType),
_expect(TokenType.SEMICOLON));
} else if (!_matchesIdentifier()) {
@@ -5194,213 +2130,200 @@ class Parser {
} else {
semicolon = _createSyntheticToken(TokenType.SEMICOLON);
}
- List<VariableDeclaration> variables = new List<VariableDeclaration>();
- variables.add(
- new VariableDeclaration(_createSyntheticIdentifier(), null, null));
+ VariableDeclaration variable =
+ new VariableDeclaration(createSyntheticIdentifier(), null, null);
+ List<VariableDeclaration> variables = <VariableDeclaration>[variable];
return new TopLevelVariableDeclaration(
commentAndMetadata.comment,
commentAndMetadata.metadata,
new VariableDeclarationList(null, null, null, returnType, variables),
semicolon);
- }
- if (_peek().matchesAny([
+ } else if (next.matchesAny(const <TokenType>[
TokenType.OPEN_PAREN,
TokenType.FUNCTION,
- TokenType.OPEN_CURLY_BRACKET
+ TokenType.OPEN_CURLY_BRACKET,
+ TokenType.LT
])) {
_validateModifiersForTopLevelFunction(modifiers);
- return _parseFunctionDeclaration(
+ return parseFunctionDeclaration(
commentAndMetadata, modifiers.externalKeyword, returnType);
}
return new TopLevelVariableDeclaration(
commentAndMetadata.comment,
commentAndMetadata.metadata,
- _parseVariableDeclarationListAfterType(
+ parseVariableDeclarationListAfterType(
null, _validateModifiersForTopLevelVariable(modifiers), returnType),
_expect(TokenType.SEMICOLON));
}
/**
- * Parse a const expression. Return the const expression that was parsed.
+ * Parse a conditional expression. Return the conditional expression that was
+ * parsed.
*
- * constExpression ::=
- * instanceCreationExpression
- * | listLiteral
- * | mapLiteral
+ * conditionalExpression ::=
+ * ifNullExpression ('?' expressionWithoutCascade ':' expressionWithoutCascade)?
*/
- Expression _parseConstExpression() {
- Token keyword = _expectKeyword(Keyword.CONST);
- if (_matches(TokenType.OPEN_SQUARE_BRACKET) || _matches(TokenType.INDEX)) {
- return _parseListLiteral(keyword, null);
- } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
- return _parseMapLiteral(keyword, null);
- } else if (_matches(TokenType.LT)) {
- return _parseListOrMapLiteral(keyword);
+ Expression parseConditionalExpression() {
+ Expression condition = parseIfNullExpression();
+ if (_currentToken.type != TokenType.QUESTION) {
+ return condition;
}
- return _parseInstanceCreationExpression(keyword);
+ Token question = getAndAdvance();
+ Expression thenExpression = parseExpressionWithoutCascade();
+ Token colon = _expect(TokenType.COLON);
+ Expression elseExpression = parseExpressionWithoutCascade();
+ return new ConditionalExpression(
+ condition, question, thenExpression, colon, elseExpression);
}
- ConstructorDeclaration _parseConstructor(
- CommentAndMetadata commentAndMetadata,
- Token externalKeyword,
- Token constKeyword,
- Token factoryKeyword,
- SimpleIdentifier returnType,
- Token period,
- SimpleIdentifier name,
- FormalParameterList parameters) {
- bool bodyAllowed = externalKeyword == null;
- Token separator = null;
- List<ConstructorInitializer> initializers = null;
- if (_matches(TokenType.COLON)) {
- separator = getAndAdvance();
- initializers = new List<ConstructorInitializer>();
- do {
- if (_matchesKeyword(Keyword.THIS)) {
- if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
- bodyAllowed = false;
- initializers.add(_parseRedirectingConstructorInvocation());
- } else if (_tokenMatches(_peek(), TokenType.PERIOD) &&
- _tokenMatches(_peekAt(3), TokenType.OPEN_PAREN)) {
- bodyAllowed = false;
- initializers.add(_parseRedirectingConstructorInvocation());
- } else {
- initializers.add(_parseConstructorFieldInitializer());
- }
- } else if (_matchesKeyword(Keyword.SUPER)) {
- initializers.add(_parseSuperConstructorInvocation());
- } else if (_matches(TokenType.OPEN_CURLY_BRACKET) ||
- _matches(TokenType.FUNCTION)) {
- _reportErrorForCurrentToken(ParserErrorCode.MISSING_INITIALIZER);
- } else {
- initializers.add(_parseConstructorFieldInitializer());
- }
- } while (_optional(TokenType.COMMA));
- if (factoryKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.FACTORY_WITH_INITIALIZERS, factoryKeyword);
- }
- }
- ConstructorName redirectedConstructor = null;
- FunctionBody body;
- if (_matches(TokenType.EQ)) {
- separator = getAndAdvance();
- redirectedConstructor = parseConstructorName();
- body = new EmptyFunctionBody(_expect(TokenType.SEMICOLON));
- if (factoryKeyword == null) {
+ /**
+ * Parse a configuration in either an import or export directive.
+ *
+ * This method assumes that the current token matches `Keyword.IF`.
+ *
+ * configuration ::=
+ * 'if' '(' test ')' uri
+ *
+ * test ::=
+ * dottedName ('==' stringLiteral)?
+ *
+ * dottedName ::=
+ * identifier ('.' identifier)*
+ */
+ Configuration parseConfiguration() {
+ Token ifKeyword = getAndAdvance();
+ Token leftParenthesis = _expect(TokenType.OPEN_PAREN);
+ DottedName name = parseDottedName();
+ Token equalToken = null;
+ StringLiteral value = null;
+ if (_matches(TokenType.EQ_EQ)) {
+ equalToken = getAndAdvance();
+ value = parseStringLiteral();
+ if (value is StringInterpolation) {
_reportErrorForNode(
- ParserErrorCode.REDIRECTION_IN_NON_FACTORY_CONSTRUCTOR,
- redirectedConstructor);
- }
- } else {
- body = _parseFunctionBody(
- true, ParserErrorCode.MISSING_FUNCTION_BODY, false);
- if (constKeyword != null &&
- factoryKeyword != null &&
- externalKeyword == null) {
- _reportErrorForToken(ParserErrorCode.CONST_FACTORY, factoryKeyword);
- } else if (body is EmptyFunctionBody) {
- if (factoryKeyword != null &&
- externalKeyword == null &&
- _parseFunctionBodies) {
- _reportErrorForToken(
- ParserErrorCode.FACTORY_WITHOUT_BODY, factoryKeyword);
- }
- } else {
- if (constKeyword != null) {
- _reportErrorForNode(
- ParserErrorCode.CONST_CONSTRUCTOR_WITH_BODY, body);
- } else if (!bodyAllowed) {
- _reportErrorForNode(
- ParserErrorCode.EXTERNAL_CONSTRUCTOR_WITH_BODY, body);
- }
+ ParserErrorCode.INVALID_LITERAL_IN_CONFIGURATION, value);
}
}
- return new ConstructorDeclaration(
- commentAndMetadata.comment,
- commentAndMetadata.metadata,
- externalKeyword,
- constKeyword,
- factoryKeyword,
- returnType,
- period,
- name,
- parameters,
- separator,
- initializers,
- redirectedConstructor,
- body);
+ Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
+ StringLiteral libraryUri = _parseUri();
+ return new Configuration(ifKeyword, leftParenthesis, name, equalToken,
+ value, rightParenthesis, libraryUri);
}
/**
- * Parse a field initializer within a constructor. Return the field
- * initializer that was parsed.
+ * Parse a const expression. Return the const expression that was parsed.
+ *
+ * This method assumes that the current token matches `Keyword.CONST`.
+ *
+ * constExpression ::=
+ * instanceCreationExpression
+ * | listLiteral
+ * | mapLiteral
+ */
+ Expression parseConstExpression() {
+ Token keyword = getAndAdvance();
+ TokenType type = _currentToken.type;
+ if (type == TokenType.LT || _injectGenericCommentTypeList()) {
+ return parseListOrMapLiteral(keyword);
+ } else if (type == TokenType.OPEN_SQUARE_BRACKET ||
+ type == TokenType.INDEX) {
+ return parseListLiteral(keyword, null);
+ } else if (type == TokenType.OPEN_CURLY_BRACKET) {
+ return parseMapLiteral(keyword, null);
+ }
+ return parseInstanceCreationExpression(keyword);
+ }
+
+ /**
+ * Parse a field initializer within a constructor. The flag [hasThis] should
+ * be true if the current token is `this`. Return the field initializer that
+ * was parsed.
*
* fieldInitializer:
* ('this' '.')? identifier '=' conditionalExpression cascadeSection*
*/
- ConstructorFieldInitializer _parseConstructorFieldInitializer() {
- Token keyword = null;
+ ConstructorFieldInitializer parseConstructorFieldInitializer(bool hasThis) {
+ Token keywordToken = null;
Token period = null;
- if (_matchesKeyword(Keyword.THIS)) {
- keyword = getAndAdvance();
+ if (hasThis) {
+ keywordToken = getAndAdvance();
period = _expect(TokenType.PERIOD);
}
SimpleIdentifier fieldName = parseSimpleIdentifier();
Token equals = null;
- if (_matches(TokenType.EQ)) {
+ TokenType type = _currentToken.type;
+ if (type == TokenType.EQ) {
equals = getAndAdvance();
- } else if (!_matchesKeyword(Keyword.THIS) &&
- !_matchesKeyword(Keyword.SUPER) &&
- !_matches(TokenType.OPEN_CURLY_BRACKET) &&
- !_matches(TokenType.FUNCTION)) {
- _reportErrorForCurrentToken(
- ParserErrorCode.MISSING_ASSIGNMENT_IN_INITIALIZER);
- equals = _createSyntheticToken(TokenType.EQ);
} else {
_reportErrorForCurrentToken(
ParserErrorCode.MISSING_ASSIGNMENT_IN_INITIALIZER);
- return new ConstructorFieldInitializer(keyword, period, fieldName,
- _createSyntheticToken(TokenType.EQ), _createSyntheticIdentifier());
+ Keyword keyword = _currentToken.keyword;
+ if (keyword != Keyword.THIS &&
+ keyword != Keyword.SUPER &&
+ type != TokenType.OPEN_CURLY_BRACKET &&
+ type != TokenType.FUNCTION) {
+ equals = _createSyntheticToken(TokenType.EQ);
+ } else {
+ return new ConstructorFieldInitializer(keywordToken, period, fieldName,
+ _createSyntheticToken(TokenType.EQ), createSyntheticIdentifier());
+ }
}
bool wasInInitializer = _inInitializer;
_inInitializer = true;
try {
Expression expression = parseConditionalExpression();
- TokenType tokenType = _currentToken.type;
- if (tokenType == TokenType.PERIOD_PERIOD) {
- List<Expression> cascadeSections = new List<Expression>();
- while (tokenType == TokenType.PERIOD_PERIOD) {
- Expression section = _parseCascadeSection();
+ if (_matches(TokenType.PERIOD_PERIOD)) {
+ List<Expression> cascadeSections = <Expression>[];
+ do {
+ Expression section = parseCascadeSection();
if (section != null) {
cascadeSections.add(section);
}
- tokenType = _currentToken.type;
- }
+ } while (_matches(TokenType.PERIOD_PERIOD));
expression = new CascadeExpression(expression, cascadeSections);
}
return new ConstructorFieldInitializer(
- keyword, period, fieldName, equals, expression);
+ keywordToken, period, fieldName, equals, expression);
} finally {
_inInitializer = wasInInitializer;
}
}
+ /**
+ * Parse the name of a constructor. Return the constructor name that was
+ * parsed.
+ *
+ * constructorName:
+ * type ('.' identifier)?
+ */
+ ConstructorName parseConstructorName() {
+ TypeName type = parseTypeName(false);
+ Token period = null;
+ SimpleIdentifier name = null;
+ if (_matches(TokenType.PERIOD)) {
+ period = getAndAdvance();
+ name = parseSimpleIdentifier();
+ }
+ return new ConstructorName(type, period, name);
+ }
+
/**
* Parse a continue statement. Return the continue statement that was parsed.
*
+ * This method assumes that the current token matches `Keyword.CONTINUE`.
+ *
* continueStatement ::=
* 'continue' identifier? ';'
*/
- Statement _parseContinueStatement() {
- Token continueKeyword = _expectKeyword(Keyword.CONTINUE);
+ Statement parseContinueStatement() {
+ Token continueKeyword = getAndAdvance();
if (!_inLoop && !_inSwitch) {
_reportErrorForToken(
ParserErrorCode.CONTINUE_OUTSIDE_OF_LOOP, continueKeyword);
}
SimpleIdentifier label = null;
if (_matchesIdentifier()) {
- label = parseSimpleIdentifier();
+ label = _parseSimpleIdentifierUnchecked();
}
if (_inSwitch && !_inLoop && label == null) {
_reportErrorForToken(
@@ -5420,23 +2343,35 @@ class Parser {
* | importDirective
* | partDirective
*/
- Directive _parseDirective(CommentAndMetadata commentAndMetadata) {
+ Directive parseDirective(CommentAndMetadata commentAndMetadata) {
if (_matchesKeyword(Keyword.IMPORT)) {
- return _parseImportDirective(commentAndMetadata);
+ return parseImportDirective(commentAndMetadata);
} else if (_matchesKeyword(Keyword.EXPORT)) {
- return _parseExportDirective(commentAndMetadata);
+ return parseExportDirective(commentAndMetadata);
} else if (_matchesKeyword(Keyword.LIBRARY)) {
- return _parseLibraryDirective(commentAndMetadata);
+ return parseLibraryDirective(commentAndMetadata);
} else if (_matchesKeyword(Keyword.PART)) {
- return _parsePartDirective(commentAndMetadata);
+ return parsePartOrPartOfDirective(commentAndMetadata);
} else {
// Internal error: this method should not have been invoked if the current
// token was something other than one of the above.
- throw new IllegalStateException(
+ throw new StateError(
"parseDirective invoked in an invalid state; currentToken = $_currentToken");
}
}
+ /**
+ * Parse the script tag and directives in a compilation unit, starting with
+ * the given [token], until the first non-directive is encountered. The
+ * remainder of the compilation unit will not be parsed. Specifically, if
+ * there are directives later in the file, they will not be parsed. Return the
+ * compilation unit that was parsed.
+ */
+ CompilationUnit parseDirectives(Token token) {
+ _currentToken = token;
+ return parseDirectives2();
+ }
+
/**
* Parse the script tag and directives in a compilation unit until the first
* non-directive is encountered. Return the compilation unit that was parsed.
@@ -5444,35 +2379,54 @@ class Parser {
* compilationUnit ::=
* scriptTag? directive*
*/
- CompilationUnit _parseDirectives() {
+ CompilationUnit parseDirectives2() {
Token firstToken = _currentToken;
ScriptTag scriptTag = null;
if (_matches(TokenType.SCRIPT_TAG)) {
scriptTag = new ScriptTag(getAndAdvance());
}
- List<Directive> directives = new List<Directive>();
+ List<Directive> directives = <Directive>[];
while (!_matches(TokenType.EOF)) {
- CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata();
- if ((_matchesKeyword(Keyword.IMPORT) ||
- _matchesKeyword(Keyword.EXPORT) ||
- _matchesKeyword(Keyword.LIBRARY) ||
- _matchesKeyword(Keyword.PART)) &&
- !_tokenMatches(_peek(), TokenType.PERIOD) &&
- !_tokenMatches(_peek(), TokenType.LT) &&
- !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
- directives.add(_parseDirective(commentAndMetadata));
+ CommentAndMetadata commentAndMetadata = parseCommentAndMetadata();
+ Keyword keyword = _currentToken.keyword;
+ TokenType type = _peek().type;
+ if ((keyword == Keyword.IMPORT ||
+ keyword == Keyword.EXPORT ||
+ keyword == Keyword.LIBRARY ||
+ keyword == Keyword.PART) &&
+ type != TokenType.PERIOD &&
+ type != TokenType.LT &&
+ type != TokenType.OPEN_PAREN) {
+ directives.add(parseDirective(commentAndMetadata));
} else if (_matches(TokenType.SEMICOLON)) {
_advance();
} else {
while (!_matches(TokenType.EOF)) {
_advance();
}
- return new CompilationUnit(firstToken, scriptTag, directives,
- new List<CompilationUnitMember>(), _currentToken);
+ return new CompilationUnit(
+ firstToken, scriptTag, directives, null, _currentToken);
}
}
- return new CompilationUnit(firstToken, scriptTag, directives,
- new List<CompilationUnitMember>(), _currentToken);
+ return new CompilationUnit(
+ firstToken, scriptTag, directives, null, _currentToken);
+ }
+
+ /**
+ * Parse a documentation comment based on the given list of documentation
+ * comment tokens. Return the documentation comment that was parsed, or `null`
+ * if there was no comment.
+ *
+ * documentationComment ::=
+ * multiLineComment?
+ * | singleLineComment*
+ */
+ Comment parseDocumentationComment(List<DocumentationCommentToken> tokens) {
+ if (tokens == null) {
+ return null;
+ }
+ List<CommentReference> references = parseCommentReferences(tokens);
+ return Comment.createDocumentationCommentWithReferences(tokens, references);
}
/**
@@ -5483,45 +2437,40 @@ class Parser {
* multiLineComment?
* | singleLineComment*
*/
- Comment _parseDocumentationComment() {
- List<DocumentationCommentToken> documentationTokens =
- <DocumentationCommentToken>[];
+ List<DocumentationCommentToken> parseDocumentationCommentTokens() {
+ List<DocumentationCommentToken> tokens = <DocumentationCommentToken>[];
CommentToken commentToken = _currentToken.precedingComments;
while (commentToken != null) {
if (commentToken is DocumentationCommentToken) {
- if (documentationTokens.isNotEmpty) {
+ if (tokens.isNotEmpty) {
if (commentToken.type == TokenType.SINGLE_LINE_COMMENT) {
- if (documentationTokens[0].type != TokenType.SINGLE_LINE_COMMENT) {
- documentationTokens.clear();
+ if (tokens[0].type != TokenType.SINGLE_LINE_COMMENT) {
+ tokens.clear();
}
} else {
- documentationTokens.clear();
+ tokens.clear();
}
}
- documentationTokens.add(commentToken);
+ tokens.add(commentToken);
}
commentToken = commentToken.next;
}
- if (documentationTokens.isEmpty) {
- return null;
- }
- List<CommentReference> references =
- _parseCommentReferences(documentationTokens);
- return Comment.createDocumentationCommentWithReferences(
- documentationTokens, references);
+ return tokens.isEmpty ? null : tokens;
}
/**
* Parse a do statement. Return the do statement that was parsed.
*
+ * This method assumes that the current token matches `Keyword.DO`.
+ *
* doStatement ::=
* 'do' statement 'while' '(' expression ')' ';'
*/
- Statement _parseDoStatement() {
+ Statement parseDoStatement() {
bool wasInLoop = _inLoop;
_inLoop = true;
try {
- Token doKeyword = _expectKeyword(Keyword.DO);
+ Token doKeyword = getAndAdvance();
Statement body = parseStatement2();
Token whileKeyword = _expectKeyword(Keyword.WHILE);
Token leftParenthesis = _expect(TokenType.OPEN_PAREN);
@@ -5535,46 +2484,49 @@ class Parser {
}
}
+ /**
+ * Parse a dotted name. Return the dotted name that was parsed.
+ *
+ * dottedName ::=
+ * identifier ('.' identifier)*
+ */
+ DottedName parseDottedName() {
+ List<SimpleIdentifier> components = <SimpleIdentifier>[
+ parseSimpleIdentifier()
+ ];
+ while (_optional(TokenType.PERIOD)) {
+ components.add(parseSimpleIdentifier());
+ }
+ return new DottedName(components);
+ }
+
/**
* Parse an empty statement. Return the empty statement that was parsed.
*
+ * This method assumes that the current token matches `TokenType.SEMICOLON`.
+ *
* emptyStatement ::=
* ';'
*/
- Statement _parseEmptyStatement() => new EmptyStatement(getAndAdvance());
-
- EnumConstantDeclaration _parseEnumConstantDeclaration() {
- CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata();
- SimpleIdentifier name;
- if (_matchesIdentifier()) {
- name = parseSimpleIdentifier();
- } else {
- name = _createSyntheticIdentifier();
- }
- if (commentAndMetadata.metadata.isNotEmpty) {
- _reportErrorForNode(ParserErrorCode.ANNOTATION_ON_ENUM_CONSTANT,
- commentAndMetadata.metadata[0]);
- }
- return new EnumConstantDeclaration(
- commentAndMetadata.comment, commentAndMetadata.metadata, name);
- }
+ Statement parseEmptyStatement() => new EmptyStatement(getAndAdvance());
/**
* Parse an enum declaration. The [commentAndMetadata] is the metadata to be
* associated with the member. Return the enum declaration that was parsed.
*
+ * This method assumes that the current token matches `Keyword.ENUM`.
+ *
* enumType ::=
* metadata 'enum' id '{' id (',' id)* (',')? '}'
*/
- EnumDeclaration _parseEnumDeclaration(CommentAndMetadata commentAndMetadata) {
- Token keyword = _expectKeyword(Keyword.ENUM);
- SimpleIdentifier name = parseSimpleIdentifier();
+ EnumDeclaration parseEnumDeclaration(CommentAndMetadata commentAndMetadata) {
+ Token keyword = getAndAdvance();
+ SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true);
Token leftBracket = null;
- List<EnumConstantDeclaration> constants =
- new List<EnumConstantDeclaration>();
+ List<EnumConstantDeclaration> constants = <EnumConstantDeclaration>[];
Token rightBracket = null;
if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
- leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET);
+ leftBracket = getAndAdvance();
if (_matchesIdentifier() || _matches(TokenType.AT)) {
constants.add(_parseEnumConstantDeclaration());
} else if (_matches(TokenType.COMMA) &&
@@ -5615,23 +2567,22 @@ class Parser {
* relationalExpression (equalityOperator relationalExpression)?
* | 'super' equalityOperator relationalExpression
*/
- Expression _parseEqualityExpression() {
+ Expression parseEqualityExpression() {
Expression expression;
- if (_matchesKeyword(Keyword.SUPER) &&
+ if (_currentToken.keyword == Keyword.SUPER &&
_currentToken.next.type.isEqualityOperator) {
expression = new SuperExpression(getAndAdvance());
} else {
- expression = _parseRelationalExpression();
+ expression = parseRelationalExpression();
}
bool leftEqualityExpression = false;
while (_currentToken.type.isEqualityOperator) {
- Token operator = getAndAdvance();
if (leftEqualityExpression) {
_reportErrorForNode(
ParserErrorCode.EQUALITY_CANNOT_BE_EQUALITY_OPERAND, expression);
}
expression = new BinaryExpression(
- expression, operator, _parseRelationalExpression());
+ expression, getAndAdvance(), parseRelationalExpression());
leftEqualityExpression = true;
}
return expression;
@@ -5641,38 +2592,140 @@ class Parser {
* Parse an export directive. The [commentAndMetadata] is the metadata to be
* associated with the directive. Return the export directive that was parsed.
*
+ * This method assumes that the current token matches `Keyword.EXPORT`.
+ *
* exportDirective ::=
- * metadata 'export' stringLiteral combinator*';'
+ * metadata 'export' stringLiteral configuration* combinator*';'
*/
- ExportDirective _parseExportDirective(CommentAndMetadata commentAndMetadata) {
- Token exportKeyword = _expectKeyword(Keyword.EXPORT);
+ ExportDirective parseExportDirective(CommentAndMetadata commentAndMetadata) {
+ Token exportKeyword = getAndAdvance();
StringLiteral libraryUri = _parseUri();
- List<Combinator> combinators = _parseCombinators();
- Token semicolon = _expectSemicolon();
+ List<Configuration> configurations = _parseConfigurations();
+ List<Combinator> combinators = parseCombinators();
+ Token semicolon = _expect(TokenType.SEMICOLON);
return new ExportDirective(
commentAndMetadata.comment,
commentAndMetadata.metadata,
exportKeyword,
libraryUri,
+ configurations,
combinators,
semicolon);
}
+ /**
+ * Parse an expression, starting with the given [token]. Return the expression
+ * that was parsed, or `null` if the tokens do not represent a recognizable
+ * expression.
+ */
+ Expression parseExpression(Token token) {
+ _currentToken = token;
+ return parseExpression2();
+ }
+
+ /**
+ * Parse an expression that might contain a cascade. Return the expression
+ * that was parsed.
+ *
+ * expression ::=
+ * assignableExpression assignmentOperator expression
+ * | conditionalExpression cascadeSection*
+ * | throwExpression
+ */
+ Expression parseExpression2() {
+ Keyword keyword = _currentToken.keyword;
+ if (keyword == Keyword.THROW) {
+ return parseThrowExpression();
+ } else if (keyword == Keyword.RETHROW) {
+ // TODO(brianwilkerson) Rethrow is a statement again.
+ return parseRethrowExpression();
+ }
+ //
+ // assignableExpression is a subset of conditionalExpression, so we can
+ // parse a conditional expression and then determine whether it is followed
+ // by an assignmentOperator, checking for conformance to the restricted
+ // grammar after making that determination.
+ //
+ Expression expression = parseConditionalExpression();
+ TokenType type = _currentToken.type;
+ if (type == TokenType.PERIOD_PERIOD) {
+ List<Expression> cascadeSections = <Expression>[];
+ do {
+ Expression section = parseCascadeSection();
+ if (section != null) {
+ cascadeSections.add(section);
+ }
+ } while (_currentToken.type == TokenType.PERIOD_PERIOD);
+ return new CascadeExpression(expression, cascadeSections);
+ } else if (type.isAssignmentOperator) {
+ Token operator = getAndAdvance();
+ _ensureAssignable(expression);
+ return new AssignmentExpression(expression, operator, parseExpression2());
+ }
+ return expression;
+ }
+
/**
* Parse a list of expressions. Return the expression that was parsed.
*
* expressionList ::=
* expression (',' expression)*
*/
- List<Expression> _parseExpressionList() {
- List<Expression> expressions = new List<Expression>();
- expressions.add(parseExpression2());
+ List<Expression> parseExpressionList() {
+ List<Expression> expressions = <Expression>[parseExpression2()];
while (_optional(TokenType.COMMA)) {
expressions.add(parseExpression2());
}
return expressions;
}
+ /**
+ * Parse an expression that does not contain any cascades. Return the
+ * expression that was parsed.
+ *
+ * expressionWithoutCascade ::=
+ * assignableExpression assignmentOperator expressionWithoutCascade
+ * | conditionalExpression
+ * | throwExpressionWithoutCascade
+ */
+ Expression parseExpressionWithoutCascade() {
+ if (_matchesKeyword(Keyword.THROW)) {
+ return parseThrowExpressionWithoutCascade();
+ } else if (_matchesKeyword(Keyword.RETHROW)) {
+ return parseRethrowExpression();
+ }
+ //
+ // assignableExpression is a subset of conditionalExpression, so we can
+ // parse a conditional expression and then determine whether it is followed
+ // by an assignmentOperator, checking for conformance to the restricted
+ // grammar after making that determination.
+ //
+ Expression expression = parseConditionalExpression();
+ if (_currentToken.type.isAssignmentOperator) {
+ Token operator = getAndAdvance();
+ _ensureAssignable(expression);
+ expression = new AssignmentExpression(
+ expression, operator, parseExpressionWithoutCascade());
+ }
+ return expression;
+ }
+
+ /**
+ * Parse a class extends clause. Return the class extends clause that was
+ * parsed.
+ *
+ * This method assumes that the current token matches `Keyword.EXTENDS`.
+ *
+ * classExtendsClause ::=
+ * 'extends' type
+ */
+ ExtendsClause parseExtendsClause() {
+ Token keyword = getAndAdvance();
+ TypeName superclass = parseTypeName(false);
+ _mustNotBeNullable(superclass, ParserErrorCode.NULLABLE_TYPE_IN_EXTENDS);
+ return new ExtendsClause(keyword, superclass);
+ }
+
/**
* Parse the 'final', 'const', 'var' or type preceding a variable declaration.
* The [optional] is `true` if the keyword and type are optional. Return the
@@ -5684,25 +2737,37 @@ class Parser {
* | 'var'
* | type
*/
- FinalConstVarOrType _parseFinalConstVarOrType(bool optional) {
- Token keyword = null;
+ FinalConstVarOrType parseFinalConstVarOrType(bool optional) {
+ Token keywordToken = null;
TypeName type = null;
- if (_matchesKeyword(Keyword.FINAL) || _matchesKeyword(Keyword.CONST)) {
- keyword = getAndAdvance();
+ Keyword keyword = _currentToken.keyword;
+ if (keyword == Keyword.FINAL || keyword == Keyword.CONST) {
+ keywordToken = getAndAdvance();
if (_isTypedIdentifier(_currentToken)) {
- type = parseTypeName();
- }
- } else if (_matchesKeyword(Keyword.VAR)) {
- keyword = getAndAdvance();
+ type = parseTypeName(false);
+ } else {
+ // Support `final/*=T*/ x;`
+ type = _parseOptionalTypeNameComment();
+ }
+ } else if (keyword == Keyword.VAR) {
+ keywordToken = getAndAdvance();
+ // Support `var/*=T*/ x;`
+ type = _parseOptionalTypeNameComment();
+ if (type != null) {
+ // Clear the keyword to prevent an error.
+ keywordToken = null;
+ }
+ } else if (_isTypedIdentifier(_currentToken)) {
+ type = parseReturnType();
+ } else if (!optional) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE);
} else {
- if (_isTypedIdentifier(_currentToken)) {
- type = parseReturnType();
- } else if (!optional) {
- _reportErrorForCurrentToken(
- ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE);
- }
+ // Support parameters such as `(/*=K*/ key, /*=V*/ value)`
+ // This is not supported if the type is required.
+ type = _parseOptionalTypeNameComment();
}
- return new FinalConstVarOrType(keyword, type);
+ return new FinalConstVarOrType(keywordToken, type);
}
/**
@@ -5717,39 +2782,75 @@ class Parser {
* defaultNamedParameter ::=
* normalFormalParameter (':' expression)?
*/
- FormalParameter _parseFormalParameter(ParameterKind kind) {
+ FormalParameter parseFormalParameter(ParameterKind kind) {
NormalFormalParameter parameter = parseNormalFormalParameter();
- if (_matches(TokenType.EQ)) {
- Token seperator = getAndAdvance();
+ TokenType type = _currentToken.type;
+ if (type == TokenType.EQ) {
+ Token separator = getAndAdvance();
Expression defaultValue = parseExpression2();
if (kind == ParameterKind.NAMED) {
_reportErrorForToken(
- ParserErrorCode.WRONG_SEPARATOR_FOR_NAMED_PARAMETER, seperator);
+ ParserErrorCode.WRONG_SEPARATOR_FOR_NAMED_PARAMETER, separator);
} else if (kind == ParameterKind.REQUIRED) {
_reportErrorForNode(
ParserErrorCode.POSITIONAL_PARAMETER_OUTSIDE_GROUP, parameter);
}
return new DefaultFormalParameter(
- parameter, kind, seperator, defaultValue);
- } else if (_matches(TokenType.COLON)) {
- Token seperator = getAndAdvance();
+ parameter, kind, separator, defaultValue);
+ } else if (type == TokenType.COLON) {
+ Token separator = getAndAdvance();
Expression defaultValue = parseExpression2();
if (kind == ParameterKind.POSITIONAL) {
_reportErrorForToken(
ParserErrorCode.WRONG_SEPARATOR_FOR_POSITIONAL_PARAMETER,
- seperator);
+ separator);
} else if (kind == ParameterKind.REQUIRED) {
_reportErrorForNode(
ParserErrorCode.NAMED_PARAMETER_OUTSIDE_GROUP, parameter);
}
return new DefaultFormalParameter(
- parameter, kind, seperator, defaultValue);
+ parameter, kind, separator, defaultValue);
} else if (kind != ParameterKind.REQUIRED) {
return new DefaultFormalParameter(parameter, kind, null, null);
}
return parameter;
}
+ /**
+ * Parse a list of formal parameters. Return the formal parameters that were
+ * parsed.
+ *
+ * formalParameterList ::=
+ * '(' ')'
+ * | '(' normalFormalParameters (',' optionalFormalParameters)? ')'
+ * | '(' optionalFormalParameters ')'
+ *
+ * normalFormalParameters ::=
+ * normalFormalParameter (',' normalFormalParameter)*
+ *
+ * optionalFormalParameters ::=
+ * optionalPositionalFormalParameters
+ * | namedFormalParameters
+ *
+ * optionalPositionalFormalParameters ::=
+ * '[' defaultFormalParameter (',' defaultFormalParameter)* ']'
+ *
+ * namedFormalParameters ::=
+ * '{' defaultNamedParameter (',' defaultNamedParameter)* '}'
+ */
+ FormalParameterList parseFormalParameterList() {
+ if (_matches(TokenType.OPEN_PAREN)) {
+ return _parseFormalParameterListUnchecked();
+ }
+ // TODO(brianwilkerson) Improve the error message.
+ _reportErrorForCurrentToken(
+ ParserErrorCode.EXPECTED_TOKEN, [TokenType.OPEN_PAREN.lexeme]);
+ // Recovery: Check for an unmatched closing paren and parse parameters until
+ // it is reached.
+ return _parseFormalParameterListAfterParen(
+ _createSyntheticToken(TokenType.OPEN_PAREN));
+ }
+
/**
* Parse a for statement. Return the for statement that was parsed.
*
@@ -5765,7 +2866,7 @@ class Parser {
* localVariableDeclaration ';'
* | expression? ';'
*/
- Statement _parseForStatement() {
+ Statement parseForStatement() {
bool wasInLoop = _inLoop;
_inLoop = true;
try {
@@ -5778,23 +2879,24 @@ class Parser {
VariableDeclarationList variableList = null;
Expression initialization = null;
if (!_matches(TokenType.SEMICOLON)) {
- CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata();
+ CommentAndMetadata commentAndMetadata = parseCommentAndMetadata();
if (_matchesIdentifier() &&
(_tokenMatchesKeyword(_peek(), Keyword.IN) ||
_tokenMatches(_peek(), TokenType.COLON))) {
- List<VariableDeclaration> variables = new List<VariableDeclaration>();
- SimpleIdentifier variableName = parseSimpleIdentifier();
- variables.add(new VariableDeclaration(variableName, null, null));
+ SimpleIdentifier variableName = _parseSimpleIdentifierUnchecked();
variableList = new VariableDeclarationList(commentAndMetadata.comment,
- commentAndMetadata.metadata, null, null, variables);
- } else if (_isInitializedVariableDeclaration()) {
+ commentAndMetadata.metadata, null, null, <VariableDeclaration>[
+ new VariableDeclaration(variableName, null, null)
+ ]);
+ } else if (isInitializedVariableDeclaration()) {
variableList =
- _parseVariableDeclarationListAfterMetadata(commentAndMetadata);
+ parseVariableDeclarationListAfterMetadata(commentAndMetadata);
} else {
initialization = parseExpression2();
}
- if (_matchesKeyword(Keyword.IN) || _matches(TokenType.COLON)) {
- if (_matches(TokenType.COLON)) {
+ TokenType type = _currentToken.type;
+ if (_matchesKeyword(Keyword.IN) || type == TokenType.COLON) {
+ if (type == TokenType.COLON) {
_reportErrorForCurrentToken(ParserErrorCode.COLON_IN_PLACE_OF_IN);
}
DeclaredIdentifier loopVariable = null;
@@ -5818,10 +2920,15 @@ class Parser {
Token keyword = variableList.keyword;
TypeName type = variableList.type;
if (keyword != null || type != null) {
- loopVariable = new DeclaredIdentifier(commentAndMetadata.comment,
- commentAndMetadata.metadata, keyword, type, variable.name);
+ loopVariable = new DeclaredIdentifier(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ keyword,
+ type,
+ new SimpleIdentifier(variable.name.token,
+ isDeclaration: true));
} else {
- if (!commentAndMetadata.metadata.isEmpty) {
+ if (commentAndMetadata.hasMetadata) {
// TODO(jwren) metadata isn't allowed before the identifier in
// "identifier in expression", add warning if commentAndMetadata
// has content
@@ -5859,15 +2966,15 @@ class Parser {
_reportErrorForToken(
ParserErrorCode.INVALID_AWAIT_IN_FOR, awaitKeyword);
}
- Token leftSeparator = _expectSemicolon();
+ Token leftSeparator = _expect(TokenType.SEMICOLON);
Expression condition = null;
if (!_matches(TokenType.SEMICOLON)) {
condition = parseExpression2();
}
- Token rightSeparator = _expectSemicolon();
+ Token rightSeparator = _expect(TokenType.SEMICOLON);
List<Expression> updaters = null;
if (!_matches(TokenType.CLOSE_PAREN)) {
- updaters = _parseExpressionList();
+ updaters = parseExpressionList();
}
Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
Statement body = parseStatement2();
@@ -5902,7 +3009,7 @@ class Parser {
* '=>' expression
* | block
*/
- FunctionBody _parseFunctionBody(
+ FunctionBody parseFunctionBody(
bool mayBeEmpty, ParserErrorCode emptyErrorCode, bool inExpression) {
bool wasInAsync = _inAsync;
bool wasInGenerator = _inGenerator;
@@ -5913,39 +3020,41 @@ class Parser {
_inLoop = false;
_inSwitch = false;
try {
- if (_matches(TokenType.SEMICOLON)) {
+ TokenType type = _currentToken.type;
+ if (type == TokenType.SEMICOLON) {
if (!mayBeEmpty) {
_reportErrorForCurrentToken(emptyErrorCode);
}
return new EmptyFunctionBody(getAndAdvance());
- } else if (_matchesString(_NATIVE)) {
- Token nativeToken = getAndAdvance();
- StringLiteral stringLiteral = null;
- if (_matches(TokenType.STRING)) {
- stringLiteral = parseStringLiteral();
- }
- return new NativeFunctionBody(
- nativeToken, stringLiteral, _expect(TokenType.SEMICOLON));
}
Token keyword = null;
Token star = null;
- if (_matchesString(ASYNC)) {
- keyword = getAndAdvance();
- if (_matches(TokenType.STAR)) {
- star = getAndAdvance();
- _inGenerator = true;
- }
- _inAsync = true;
- } else if (_matchesString(SYNC)) {
- keyword = getAndAdvance();
- if (_matches(TokenType.STAR)) {
- star = getAndAdvance();
- _inGenerator = true;
+ bool foundAsync = false;
+ bool foundSync = false;
+ if (type == TokenType.IDENTIFIER) {
+ String lexeme = _currentToken.lexeme;
+ if (lexeme == ASYNC) {
+ foundAsync = true;
+ keyword = getAndAdvance();
+ if (_matches(TokenType.STAR)) {
+ star = getAndAdvance();
+ _inGenerator = true;
+ }
+ type = _currentToken.type;
+ _inAsync = true;
+ } else if (lexeme == SYNC) {
+ foundSync = true;
+ keyword = getAndAdvance();
+ if (_matches(TokenType.STAR)) {
+ star = getAndAdvance();
+ _inGenerator = true;
+ }
+ type = _currentToken.type;
}
}
- if (_matches(TokenType.FUNCTION)) {
+ if (type == TokenType.FUNCTION) {
if (keyword != null) {
- if (!_tokenMatchesString(keyword, ASYNC)) {
+ if (!foundAsync) {
_reportErrorForToken(ParserErrorCode.INVALID_SYNC, keyword);
keyword = null;
} else if (star != null) {
@@ -5970,9 +3079,9 @@ class Parser {
}
return new ExpressionFunctionBody(
keyword, functionDefinition, expression, semicolon);
- } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
+ } else if (type == TokenType.OPEN_CURLY_BRACKET) {
if (keyword != null) {
- if (_tokenMatchesString(keyword, SYNC) && star == null) {
+ if (foundSync && star == null) {
_reportErrorForToken(
ParserErrorCode.MISSING_STAR_AFTER_SYNC, keyword);
}
@@ -5983,6 +3092,14 @@ class Parser {
_createSyntheticToken(TokenType.SEMICOLON));
}
return new BlockFunctionBody(keyword, star, parseBlock());
+ } else if (_matchesString(_NATIVE)) {
+ Token nativeToken = getAndAdvance();
+ StringLiteral stringLiteral = null;
+ if (_matches(TokenType.STRING)) {
+ stringLiteral = _parseStringLiteralUnchecked();
+ }
+ return new NativeFunctionBody(
+ nativeToken, stringLiteral, _expect(TokenType.SEMICOLON));
} else {
// Invalid function body
_reportErrorForCurrentToken(emptyErrorCode);
@@ -6010,29 +3127,32 @@ class Parser {
* functionSignature functionBody
* | returnType? getOrSet identifier formalParameterList functionBody
*/
- FunctionDeclaration _parseFunctionDeclaration(
+ FunctionDeclaration parseFunctionDeclaration(
CommentAndMetadata commentAndMetadata,
Token externalKeyword,
TypeName returnType) {
- Token keyword = null;
+ Token keywordToken = null;
bool isGetter = false;
- if (_matchesKeyword(Keyword.GET) &&
- !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
- keyword = getAndAdvance();
+ Keyword keyword = _currentToken.keyword;
+ SimpleIdentifier name = null;
+ if (keyword == Keyword.GET) {
+ keywordToken = getAndAdvance();
isGetter = true;
- } else if (_matchesKeyword(Keyword.SET) &&
- !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
- keyword = getAndAdvance();
+ } else if (keyword == Keyword.SET) {
+ keywordToken = getAndAdvance();
}
- SimpleIdentifier name = parseSimpleIdentifier();
- TypeParameterList typeParameters = null;
- if (parseGenericMethods && _matches(TokenType.LT)) {
- typeParameters = parseTypeParameterList();
+ if (keywordToken != null && _matches(TokenType.OPEN_PAREN)) {
+ name = new SimpleIdentifier(keywordToken, isDeclaration: true);
+ keywordToken = null;
+ isGetter = false;
+ } else {
+ name = parseSimpleIdentifier(isDeclaration: true);
}
+ TypeParameterList typeParameters = _parseGenericMethodTypeParameters();
FormalParameterList parameters = null;
if (!isGetter) {
if (_matches(TokenType.OPEN_PAREN)) {
- parameters = parseFormalParameterList();
+ parameters = _parseFormalParameterListUnchecked();
_validateFormalParameterList(parameters);
} else {
_reportErrorForCurrentToken(
@@ -6046,11 +3166,11 @@ class Parser {
}
} else if (_matches(TokenType.OPEN_PAREN)) {
_reportErrorForCurrentToken(ParserErrorCode.GETTER_WITH_PARAMETERS);
- parseFormalParameterList();
+ _parseFormalParameterListUnchecked();
}
FunctionBody body;
if (externalKeyword == null) {
- body = _parseFunctionBody(
+ body = parseFunctionBody(
false, ParserErrorCode.MISSING_FUNCTION_BODY, false);
} else {
body = new EmptyFunctionBody(_expect(TokenType.SEMICOLON));
@@ -6065,7 +3185,7 @@ class Parser {
commentAndMetadata.metadata,
externalKeyword,
returnType,
- keyword,
+ keywordToken,
name,
new FunctionExpression(typeParameters, parameters, body));
}
@@ -6077,108 +3197,27 @@ class Parser {
* functionDeclarationStatement ::=
* functionSignature functionBody
*/
- Statement _parseFunctionDeclarationStatement() {
- Modifiers modifiers = _parseModifiers();
+ Statement parseFunctionDeclarationStatement() {
+ Modifiers modifiers = parseModifiers();
_validateModifiersForFunctionDeclarationStatement(modifiers);
return _parseFunctionDeclarationStatementAfterReturnType(
- _parseCommentAndMetadata(), _parseOptionalReturnType());
- }
-
- /**
- * Parse a function declaration statement. The [commentAndMetadata] is the
- * documentation comment and metadata to be associated with the declaration.
- * The [returnType] is the return type, or `null` if there is no return type.
- * Return the function declaration statement that was parsed.
- *
- * functionDeclarationStatement ::=
- * functionSignature functionBody
- */
- Statement _parseFunctionDeclarationStatementAfterReturnType(
- CommentAndMetadata commentAndMetadata, TypeName returnType) {
- FunctionDeclaration declaration =
- _parseFunctionDeclaration(commentAndMetadata, null, returnType);
- Token propertyKeyword = declaration.propertyKeyword;
- if (propertyKeyword != null) {
- if ((propertyKeyword as KeywordToken).keyword == Keyword.GET) {
- _reportErrorForToken(
- ParserErrorCode.GETTER_IN_FUNCTION, propertyKeyword);
- } else {
- _reportErrorForToken(
- ParserErrorCode.SETTER_IN_FUNCTION, propertyKeyword);
- }
- }
- return new FunctionDeclarationStatement(declaration);
+ parseCommentAndMetadata(), _parseOptionalReturnType());
}
/**
- * Parse a function type alias. The [commentAndMetadata] is the metadata to be
- * associated with the member. The [keyword] is the token representing the
- * 'typedef' keyword. Return the function type alias that was parsed.
- *
- * functionTypeAlias ::=
- * functionPrefix typeParameterList? formalParameterList ';'
- *
- * functionPrefix ::=
- * returnType? name
- */
- FunctionTypeAlias _parseFunctionTypeAlias(
- CommentAndMetadata commentAndMetadata, Token keyword) {
- TypeName returnType = null;
- if (hasReturnTypeInTypeAlias) {
- returnType = parseReturnType();
- }
- SimpleIdentifier name = parseSimpleIdentifier();
- TypeParameterList typeParameters = null;
- if (_matches(TokenType.LT)) {
- typeParameters = parseTypeParameterList();
- }
- if (_matches(TokenType.SEMICOLON) || _matches(TokenType.EOF)) {
- _reportErrorForCurrentToken(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS);
- FormalParameterList parameters = new FormalParameterList(
- _createSyntheticToken(TokenType.OPEN_PAREN),
- null,
- null,
- null,
- _createSyntheticToken(TokenType.CLOSE_PAREN));
- Token semicolon = _expect(TokenType.SEMICOLON);
- return new FunctionTypeAlias(
- commentAndMetadata.comment,
- commentAndMetadata.metadata,
- keyword,
- returnType,
- name,
- typeParameters,
- parameters,
- semicolon);
- } else if (!_matches(TokenType.OPEN_PAREN)) {
- _reportErrorForCurrentToken(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS);
- // TODO(brianwilkerson) Recover from this error. At the very least we
- // should skip to the start of the next valid compilation unit member,
- // allowing for the possibility of finding the typedef parameters before
- // that point.
- return new FunctionTypeAlias(
- commentAndMetadata.comment,
- commentAndMetadata.metadata,
- keyword,
- returnType,
- name,
- typeParameters,
- new FormalParameterList(_createSyntheticToken(TokenType.OPEN_PAREN),
- null, null, null, _createSyntheticToken(TokenType.CLOSE_PAREN)),
- _createSyntheticToken(TokenType.SEMICOLON));
- }
- FormalParameterList parameters = parseFormalParameterList();
- _validateFormalParameterList(parameters);
- Token semicolon = _expect(TokenType.SEMICOLON);
- return new FunctionTypeAlias(
- commentAndMetadata.comment,
- commentAndMetadata.metadata,
- keyword,
- returnType,
- name,
- typeParameters,
- parameters,
- semicolon);
+ * Parse a function expression. Return the function expression that was
+ * parsed.
+ *
+ * functionExpression ::=
+ * typeParameters? formalParameterList functionExpressionBody
+ */
+ FunctionExpression parseFunctionExpression() {
+ TypeParameterList typeParameters = _parseGenericMethodTypeParameters();
+ FormalParameterList parameters = parseFormalParameterList();
+ _validateFormalParameterList(parameters);
+ FunctionBody body =
+ parseFunctionBody(false, ParserErrorCode.MISSING_FUNCTION_BODY, true);
+ return new FunctionExpression(typeParameters, parameters, body);
}
/**
@@ -6189,23 +3228,25 @@ class Parser {
* been parsed, or `null` if there was no return type. Return the getter that
* was parsed.
*
+ * This method assumes that the current token matches `Keyword.GET`.
+ *
* getter ::=
* getterSignature functionBody?
*
* getterSignature ::=
* 'external'? 'static'? returnType? 'get' identifier
*/
- MethodDeclaration _parseGetter(CommentAndMetadata commentAndMetadata,
+ MethodDeclaration parseGetter(CommentAndMetadata commentAndMetadata,
Token externalKeyword, Token staticKeyword, TypeName returnType) {
- Token propertyKeyword = _expectKeyword(Keyword.GET);
- SimpleIdentifier name = parseSimpleIdentifier();
+ Token propertyKeyword = getAndAdvance();
+ SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true);
if (_matches(TokenType.OPEN_PAREN) &&
_tokenMatches(_peek(), TokenType.CLOSE_PAREN)) {
_reportErrorForCurrentToken(ParserErrorCode.GETTER_WITH_PARAMETERS);
_advance();
_advance();
}
- FunctionBody body = _parseFunctionBody(
+ FunctionBody body = parseFunctionBody(
externalKeyword != null || staticKeyword == null,
ParserErrorCode.STATIC_GETTER_WITHOUT_BODY,
false);
@@ -6233,24 +3274,41 @@ class Parser {
* identifierList ::=
* identifier (',' identifier)*
*/
- List<SimpleIdentifier> _parseIdentifierList() {
- List<SimpleIdentifier> identifiers = new List<SimpleIdentifier>();
- identifiers.add(parseSimpleIdentifier());
- while (_matches(TokenType.COMMA)) {
- _advance();
+ List<SimpleIdentifier> parseIdentifierList() {
+ List<SimpleIdentifier> identifiers = <SimpleIdentifier>[
+ parseSimpleIdentifier()
+ ];
+ while (_optional(TokenType.COMMA)) {
identifiers.add(parseSimpleIdentifier());
}
return identifiers;
}
+ /**
+ * Parse an if-null expression. Return the if-null expression that was
+ * parsed.
+ *
+ * ifNullExpression ::= logicalOrExpression ('??' logicalOrExpression)*
+ */
+ Expression parseIfNullExpression() {
+ Expression expression = parseLogicalOrExpression();
+ while (_currentToken.type == TokenType.QUESTION_QUESTION) {
+ expression = new BinaryExpression(
+ expression, getAndAdvance(), parseLogicalOrExpression());
+ }
+ return expression;
+ }
+
/**
* Parse an if statement. Return the if statement that was parsed.
*
+ * This method assumes that the current token matches `Keyword.IF`.
+ *
* ifStatement ::=
* 'if' '(' expression ')' statement ('else' statement)?
*/
- Statement _parseIfStatement() {
- Token ifKeyword = _expectKeyword(Keyword.IF);
+ Statement parseIfStatement() {
+ Token ifKeyword = getAndAdvance();
Token leftParenthesis = _expect(TokenType.OPEN_PAREN);
Expression condition = parseExpression2();
Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
@@ -6265,16 +3323,38 @@ class Parser {
rightParenthesis, thenStatement, elseKeyword, elseStatement);
}
+ /**
+ * Parse an implements clause. Return the implements clause that was parsed.
+ *
+ * This method assumes that the current token matches `Keyword.IMPLEMENTS`.
+ *
+ * implementsClause ::=
+ * 'implements' type (',' type)*
+ */
+ ImplementsClause parseImplementsClause() {
+ Token keyword = getAndAdvance();
+ List<TypeName> interfaces = <TypeName>[];
+ do {
+ TypeName typeName = parseTypeName(false);
+ _mustNotBeNullable(typeName, ParserErrorCode.NULLABLE_TYPE_IN_IMPLEMENTS);
+ interfaces.add(typeName);
+ } while (_optional(TokenType.COMMA));
+ return new ImplementsClause(keyword, interfaces);
+ }
+
/**
* Parse an import directive. The [commentAndMetadata] is the metadata to be
* associated with the directive. Return the import directive that was parsed.
*
+ * This method assumes that the current token matches `Keyword.IMPORT`.
+ *
* importDirective ::=
- * metadata 'import' stringLiteral (deferred)? ('as' identifier)? combinator*';'
+ * metadata 'import' stringLiteral configuration* (deferred)? ('as' identifier)? combinator*';'
*/
- ImportDirective _parseImportDirective(CommentAndMetadata commentAndMetadata) {
- Token importKeyword = _expectKeyword(Keyword.IMPORT);
+ ImportDirective parseImportDirective(CommentAndMetadata commentAndMetadata) {
+ Token importKeyword = getAndAdvance();
StringLiteral libraryUri = _parseUri();
+ List<Configuration> configurations = _parseConfigurations();
Token deferredToken = null;
Token asToken = null;
SimpleIdentifier prefix = null;
@@ -6283,7 +3363,7 @@ class Parser {
}
if (_matchesKeyword(Keyword.AS)) {
asToken = getAndAdvance();
- prefix = parseSimpleIdentifier();
+ prefix = parseSimpleIdentifier(isDeclaration: true);
} else if (deferredToken != null) {
_reportErrorForCurrentToken(
ParserErrorCode.MISSING_PREFIX_IN_DEFERRED_IMPORT);
@@ -6299,17 +3379,18 @@ class Parser {
_advance();
if (_matchesKeyword(Keyword.AS)) {
asToken = getAndAdvance();
- prefix = parseSimpleIdentifier();
+ prefix = parseSimpleIdentifier(isDeclaration: true);
}
}
}
- List<Combinator> combinators = _parseCombinators();
- Token semicolon = _expectSemicolon();
+ List<Combinator> combinators = parseCombinators();
+ Token semicolon = _expect(TokenType.SEMICOLON);
return new ImportDirective(
commentAndMetadata.comment,
commentAndMetadata.metadata,
importKeyword,
libraryUri,
+ configurations,
deferredToken,
asToken,
prefix,
@@ -6336,13 +3417,13 @@ class Parser {
* initializedIdentifier ::=
* identifier ('=' expression)?
*/
- FieldDeclaration _parseInitializedIdentifierList(
+ FieldDeclaration parseInitializedIdentifierList(
CommentAndMetadata commentAndMetadata,
Token staticKeyword,
Token keyword,
TypeName type) {
VariableDeclarationList fieldList =
- _parseVariableDeclarationListAfterType(null, keyword, type);
+ parseVariableDeclarationListAfterType(null, keyword, type);
return new FieldDeclaration(
commentAndMetadata.comment,
commentAndMetadata.metadata,
@@ -6359,24 +3440,42 @@ class Parser {
* instanceCreationExpression ::=
* ('new' | 'const') type ('.' identifier)? argumentList
*/
- InstanceCreationExpression _parseInstanceCreationExpression(Token keyword) {
+ InstanceCreationExpression parseInstanceCreationExpression(Token keyword) {
ConstructorName constructorName = parseConstructorName();
- ArgumentList argumentList = parseArgumentList();
+ ArgumentList argumentList = _parseArgumentListChecked();
return new InstanceCreationExpression(
keyword, constructorName, argumentList);
}
+ /**
+ * Parse a label. Return the label that was parsed.
+ *
+ * This method assumes that the current token matches an identifier and that
+ * the following token matches `TokenType.COLON`.
+ *
+ * label ::=
+ * identifier ':'
+ */
+ Label parseLabel({bool isDeclaration: false}) {
+ SimpleIdentifier label =
+ _parseSimpleIdentifierUnchecked(isDeclaration: isDeclaration);
+ Token colon = getAndAdvance();
+ return new Label(label, colon);
+ }
+
/**
* Parse a library directive. The [commentAndMetadata] is the metadata to be
* associated with the directive. Return the library directive that was
* parsed.
*
+ * This method assumes that the current token matches `Keyword.LIBRARY`.
+ *
* libraryDirective ::=
* metadata 'library' identifier ';'
*/
- LibraryDirective _parseLibraryDirective(
+ LibraryDirective parseLibraryDirective(
CommentAndMetadata commentAndMetadata) {
- Token keyword = _expectKeyword(Keyword.LIBRARY);
+ Token keyword = getAndAdvance();
LibraryIdentifier libraryName = _parseLibraryName(
ParserErrorCode.MISSING_NAME_IN_LIBRARY_DIRECTIVE, keyword);
Token semicolon = _expect(TokenType.SEMICOLON);
@@ -6385,29 +3484,17 @@ class Parser {
}
/**
- * Parse a library name. The [missingNameError] is the error code to be used
- * if the library name is missing. The [missingNameToken] is the token
- * associated with the error produced if the library name is missing. Return
- * the library name that was parsed.
+ * Parse a library identifier. Return the library identifier that was parsed.
*
- * libraryName ::=
- * libraryIdentifier
+ * libraryIdentifier ::=
+ * identifier ('.' identifier)*
*/
- LibraryIdentifier _parseLibraryName(
- ParserErrorCode missingNameError, Token missingNameToken) {
- if (_matchesIdentifier()) {
- return parseLibraryIdentifier();
- } else if (_matches(TokenType.STRING)) {
- // TODO(brianwilkerson) Recovery: This should be extended to handle
- // arbitrary tokens until we can find a token that can start a compilation
- // unit member.
- StringLiteral string = parseStringLiteral();
- _reportErrorForNode(ParserErrorCode.NON_IDENTIFIER_LIBRARY_NAME, string);
- } else {
- _reportErrorForToken(missingNameError, missingNameToken);
+ LibraryIdentifier parseLibraryIdentifier() {
+ List<SimpleIdentifier> components = <SimpleIdentifier>[];
+ components.add(parseSimpleIdentifier());
+ while (_optional(TokenType.PERIOD)) {
+ components.add(parseSimpleIdentifier());
}
- List<SimpleIdentifier> components = new List<SimpleIdentifier>();
- components.add(_createSyntheticIdentifier());
return new LibraryIdentifier(components);
}
@@ -6417,13 +3504,15 @@ class Parser {
* is the type arguments appearing before the literal, or `null` if there are
* no type arguments. Return the list literal that was parsed.
*
+ * This method assumes that the current token matches either
+ * `TokenType.OPEN_SQUARE_BRACKET` or `TokenType.INDEX`.
+ *
* listLiteral ::=
* 'const'? typeArguments? '[' (expressionList ','?)? ']'
*/
- ListLiteral _parseListLiteral(
- Token modifier, TypeArgumentList typeArguments) {
- // may be empty list literal
+ ListLiteral parseListLiteral(Token modifier, TypeArgumentList typeArguments) {
if (_matches(TokenType.INDEX)) {
+ // Split the token into two separate tokens.
BeginToken leftBracket = _createToken(
_currentToken, TokenType.OPEN_SQUARE_BRACKET,
isBegin: true);
@@ -6437,8 +3526,7 @@ class Parser {
return new ListLiteral(
modifier, typeArguments, leftBracket, null, rightBracket);
}
- // open
- Token leftBracket = _expect(TokenType.OPEN_SQUARE_BRACKET);
+ Token leftBracket = getAndAdvance();
if (_matches(TokenType.CLOSE_SQUARE_BRACKET)) {
return new ListLiteral(
modifier, typeArguments, leftBracket, null, getAndAdvance());
@@ -6446,8 +3534,7 @@ class Parser {
bool wasInInitializer = _inInitializer;
_inInitializer = false;
try {
- List<Expression> elements = new List<Expression>();
- elements.add(parseExpression2());
+ List<Expression> elements = <Expression>[parseExpression2()];
while (_optional(TokenType.COMMA)) {
if (_matches(TokenType.CLOSE_SQUARE_BRACKET)) {
return new ListLiteral(
@@ -6472,16 +3559,13 @@ class Parser {
* listLiteral
* | mapLiteral
*/
- TypedLiteral _parseListOrMapLiteral(Token modifier) {
- TypeArgumentList typeArguments = null;
- if (_matches(TokenType.LT)) {
- typeArguments = parseTypeArgumentList();
- }
+ TypedLiteral parseListOrMapLiteral(Token modifier) {
+ TypeArgumentList typeArguments = _parseOptionalTypeArguments();
if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
- return _parseMapLiteral(modifier, typeArguments);
+ return parseMapLiteral(modifier, typeArguments);
} else if (_matches(TokenType.OPEN_SQUARE_BRACKET) ||
_matches(TokenType.INDEX)) {
- return _parseListLiteral(modifier, typeArguments);
+ return parseListLiteral(modifier, typeArguments);
}
_reportErrorForCurrentToken(ParserErrorCode.EXPECTED_LIST_OR_MAP_LITERAL);
return new ListLiteral(
@@ -6499,12 +3583,27 @@ class Parser {
* logicalAndExpression ::=
* equalityExpression ('&&' equalityExpression)*
*/
- Expression _parseLogicalAndExpression() {
- Expression expression = _parseEqualityExpression();
- while (_matches(TokenType.AMPERSAND_AMPERSAND)) {
- Token operator = getAndAdvance();
+ Expression parseLogicalAndExpression() {
+ Expression expression = parseEqualityExpression();
+ while (_currentToken.type == TokenType.AMPERSAND_AMPERSAND) {
+ expression = new BinaryExpression(
+ expression, getAndAdvance(), parseEqualityExpression());
+ }
+ return expression;
+ }
+
+ /**
+ * Parse a logical or expression. Return the logical or expression that was
+ * parsed.
+ *
+ * logicalOrExpression ::=
+ * logicalAndExpression ('||' logicalAndExpression)*
+ */
+ Expression parseLogicalOrExpression() {
+ Expression expression = parseLogicalAndExpression();
+ while (_currentToken.type == TokenType.BAR_BAR) {
expression = new BinaryExpression(
- expression, operator, _parseEqualityExpression());
+ expression, getAndAdvance(), parseLogicalAndExpression());
}
return expression;
}
@@ -6515,20 +3614,22 @@ class Parser {
* is the type arguments that were declared, or `null` if there are no type
* arguments. Return the map literal that was parsed.
*
+ * This method assumes that the current token matches
+ * `TokenType.OPEN_CURLY_BRACKET`.
+ *
* mapLiteral ::=
* 'const'? typeArguments? '{' (mapLiteralEntry (',' mapLiteralEntry)* ','?)? '}'
*/
- MapLiteral _parseMapLiteral(Token modifier, TypeArgumentList typeArguments) {
- Token leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET);
- List<MapLiteralEntry> entries = new List<MapLiteralEntry>();
+ MapLiteral parseMapLiteral(Token modifier, TypeArgumentList typeArguments) {
+ Token leftBracket = getAndAdvance();
if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
return new MapLiteral(
- modifier, typeArguments, leftBracket, entries, getAndAdvance());
+ modifier, typeArguments, leftBracket, null, getAndAdvance());
}
bool wasInInitializer = _inInitializer;
_inInitializer = false;
try {
- entries.add(parseMapLiteralEntry());
+ List<MapLiteralEntry> entries = <MapLiteralEntry>[parseMapLiteralEntry()];
while (_optional(TokenType.COMMA)) {
if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
return new MapLiteral(
@@ -6545,99 +3646,16 @@ class Parser {
}
/**
- * Parse a method declaration. The [commentAndMetadata] is the documentation
- * comment and metadata to be associated with the declaration. The
- * [externalKeyword] is the 'external' token. The [staticKeyword] is the
- * static keyword, or `null` if the getter is not static. The [returnType] is
- * the return type of the method. The [name] is the name of the method. The
- * [parameters] is the parameters to the method. Return the method declaration
- * that was parsed.
- *
- * functionDeclaration ::=
- * ('external' 'static'?)? functionSignature functionBody
- * | 'external'? functionSignature ';'
- */
- MethodDeclaration _parseMethodDeclarationAfterParameters(
- CommentAndMetadata commentAndMetadata,
- Token externalKeyword,
- Token staticKeyword,
- TypeName returnType,
- SimpleIdentifier name,
- TypeParameterList typeParameters,
- FormalParameterList parameters) {
- FunctionBody body = _parseFunctionBody(
- externalKeyword != null || staticKeyword == null,
- ParserErrorCode.MISSING_FUNCTION_BODY,
- false);
- if (externalKeyword != null) {
- if (body is! EmptyFunctionBody) {
- _reportErrorForNode(ParserErrorCode.EXTERNAL_METHOD_WITH_BODY, body);
- }
- } else if (staticKeyword != null) {
- if (body is EmptyFunctionBody && _parseFunctionBodies) {
- _reportErrorForNode(ParserErrorCode.ABSTRACT_STATIC_METHOD, body);
- }
- }
- return new MethodDeclaration(
- commentAndMetadata.comment,
- commentAndMetadata.metadata,
- externalKeyword,
- staticKeyword,
- returnType,
- null,
- null,
- name,
- typeParameters,
- parameters,
- body);
- }
-
- /**
- * Parse a method declaration. The [commentAndMetadata] is the documentation
- * comment and metadata to be associated with the declaration. The
- * [externalKeyword] is the 'external' token. The [staticKeyword] is the
- * static keyword, or `null` if the getter is not static. The [returnType] is
- * the return type of the method. Return the method declaration that was
- * parsed.
+ * Parse a map literal entry. Return the map literal entry that was parsed.
*
- * functionDeclaration ::=
- * 'external'? 'static'? functionSignature functionBody
- * | 'external'? functionSignature ';'
+ * mapLiteralEntry ::=
+ * expression ':' expression
*/
- MethodDeclaration _parseMethodDeclarationAfterReturnType(
- CommentAndMetadata commentAndMetadata,
- Token externalKeyword,
- Token staticKeyword,
- TypeName returnType) {
- SimpleIdentifier methodName = parseSimpleIdentifier();
- TypeParameterList typeParameters = null;
- if (parseGenericMethods && _matches(TokenType.LT)) {
- typeParameters = parseTypeParameterList();
- }
- FormalParameterList parameters;
- if (!_matches(TokenType.OPEN_PAREN) &&
- (_matches(TokenType.OPEN_CURLY_BRACKET) ||
- _matches(TokenType.FUNCTION))) {
- _reportErrorForToken(
- ParserErrorCode.MISSING_METHOD_PARAMETERS, _currentToken.previous);
- parameters = new FormalParameterList(
- _createSyntheticToken(TokenType.OPEN_PAREN),
- null,
- null,
- null,
- _createSyntheticToken(TokenType.CLOSE_PAREN));
- } else {
- parameters = parseFormalParameterList();
- }
- _validateFormalParameterList(parameters);
- return _parseMethodDeclarationAfterParameters(
- commentAndMetadata,
- externalKeyword,
- staticKeyword,
- returnType,
- methodName,
- typeParameters,
- parameters);
+ MapLiteralEntry parseMapLiteralEntry() {
+ Expression key = parseExpression2();
+ Token separator = _expect(TokenType.COLON);
+ Expression value = parseExpression2();
+ return new MapLiteralEntry(key, separator, value);
}
/**
@@ -6651,16 +3669,18 @@ class Parser {
* modifiers ::=
* ('abstract' | 'const' | 'external' | 'factory' | 'final' | 'static' | 'var')*
*/
- Modifiers _parseModifiers() {
+ Modifiers parseModifiers() {
Modifiers modifiers = new Modifiers();
bool progress = true;
while (progress) {
- if (_tokenMatches(_peek(), TokenType.PERIOD) ||
- _tokenMatches(_peek(), TokenType.LT) ||
- _tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
+ TokenType nextType = _peek().type;
+ if (nextType == TokenType.PERIOD ||
+ nextType == TokenType.LT ||
+ nextType == TokenType.OPEN_PAREN) {
return modifiers;
}
- if (_matchesKeyword(Keyword.ABSTRACT)) {
+ Keyword keyword = _currentToken.keyword;
+ if (keyword == Keyword.ABSTRACT) {
if (modifiers.abstractKeyword != null) {
_reportErrorForCurrentToken(
ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]);
@@ -6668,7 +3688,7 @@ class Parser {
} else {
modifiers.abstractKeyword = getAndAdvance();
}
- } else if (_matchesKeyword(Keyword.CONST)) {
+ } else if (keyword == Keyword.CONST) {
if (modifiers.constKeyword != null) {
_reportErrorForCurrentToken(
ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]);
@@ -6676,9 +3696,7 @@ class Parser {
} else {
modifiers.constKeyword = getAndAdvance();
}
- } else if (_matchesKeyword(Keyword.EXTERNAL) &&
- !_tokenMatches(_peek(), TokenType.PERIOD) &&
- !_tokenMatches(_peek(), TokenType.LT)) {
+ } else if (keyword == Keyword.EXTERNAL) {
if (modifiers.externalKeyword != null) {
_reportErrorForCurrentToken(
ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]);
@@ -6686,9 +3704,7 @@ class Parser {
} else {
modifiers.externalKeyword = getAndAdvance();
}
- } else if (_matchesKeyword(Keyword.FACTORY) &&
- !_tokenMatches(_peek(), TokenType.PERIOD) &&
- !_tokenMatches(_peek(), TokenType.LT)) {
+ } else if (keyword == Keyword.FACTORY) {
if (modifiers.factoryKeyword != null) {
_reportErrorForCurrentToken(
ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]);
@@ -6696,7 +3712,7 @@ class Parser {
} else {
modifiers.factoryKeyword = getAndAdvance();
}
- } else if (_matchesKeyword(Keyword.FINAL)) {
+ } else if (keyword == Keyword.FINAL) {
if (modifiers.finalKeyword != null) {
_reportErrorForCurrentToken(
ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]);
@@ -6704,9 +3720,7 @@ class Parser {
} else {
modifiers.finalKeyword = getAndAdvance();
}
- } else if (_matchesKeyword(Keyword.STATIC) &&
- !_tokenMatches(_peek(), TokenType.PERIOD) &&
- !_tokenMatches(_peek(), TokenType.LT)) {
+ } else if (keyword == Keyword.STATIC) {
if (modifiers.staticKeyword != null) {
_reportErrorForCurrentToken(
ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]);
@@ -6714,7 +3728,7 @@ class Parser {
} else {
modifiers.staticKeyword = getAndAdvance();
}
- } else if (_matchesKeyword(Keyword.VAR)) {
+ } else if (keyword == Keyword.VAR) {
if (modifiers.varKeyword != null) {
_reportErrorForCurrentToken(
ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]);
@@ -6737,42 +3751,31 @@ class Parser {
* unaryExpression (multiplicativeOperator unaryExpression)*
* | 'super' (multiplicativeOperator unaryExpression)+
*/
- Expression _parseMultiplicativeExpression() {
+ Expression parseMultiplicativeExpression() {
Expression expression;
- if (_matchesKeyword(Keyword.SUPER) &&
+ if (_currentToken.keyword == Keyword.SUPER &&
_currentToken.next.type.isMultiplicativeOperator) {
expression = new SuperExpression(getAndAdvance());
} else {
- expression = _parseUnaryExpression();
+ expression = parseUnaryExpression();
}
while (_currentToken.type.isMultiplicativeOperator) {
- Token operator = getAndAdvance();
- expression =
- new BinaryExpression(expression, operator, _parseUnaryExpression());
+ expression = new BinaryExpression(
+ expression, getAndAdvance(), parseUnaryExpression());
}
return expression;
}
- /**
- * Parse a class native clause. Return the native clause that was parsed.
- *
- * classNativeClause ::=
- * 'native' name
- */
- NativeClause _parseNativeClause() {
- Token keyword = getAndAdvance();
- StringLiteral name = parseStringLiteral();
- return new NativeClause(keyword, name);
- }
-
/**
* Parse a new expression. Return the new expression that was parsed.
*
+ * This method assumes that the current token matches `Keyword.NEW`.
+ *
* newExpression ::=
* instanceCreationExpression
*/
- InstanceCreationExpression _parseNewExpression() =>
- _parseInstanceCreationExpression(_expectKeyword(Keyword.NEW));
+ InstanceCreationExpression parseNewExpression() =>
+ parseInstanceCreationExpression(getAndAdvance());
/**
* Parse a non-labeled statement. Return the non-labeled statement that was
@@ -6794,59 +3797,63 @@ class Parser {
* | expressionStatement
* | functionSignature functionBody
*/
- Statement _parseNonLabeledStatement() {
+ Statement parseNonLabeledStatement() {
// TODO(brianwilkerson) Pass the comment and metadata on where appropriate.
- CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata();
- if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
+ CommentAndMetadata commentAndMetadata = parseCommentAndMetadata();
+ TokenType type = _currentToken.type;
+ if (type == TokenType.OPEN_CURLY_BRACKET) {
if (_tokenMatches(_peek(), TokenType.STRING)) {
- Token afterString = _skipStringLiteral(_currentToken.next);
+ Token afterString = skipStringLiteral(_currentToken.next);
if (afterString != null && afterString.type == TokenType.COLON) {
return new ExpressionStatement(
parseExpression2(), _expect(TokenType.SEMICOLON));
}
}
return parseBlock();
- } else if (_matches(TokenType.KEYWORD) &&
- !(_currentToken as KeywordToken).keyword.isPseudoKeyword) {
- Keyword keyword = (_currentToken as KeywordToken).keyword;
+ } else if (type == TokenType.KEYWORD &&
+ !_currentToken.keyword.isPseudoKeyword) {
+ Keyword keyword = _currentToken.keyword;
// TODO(jwren) compute some metrics to figure out a better order for this
// if-then sequence to optimize performance
if (keyword == Keyword.ASSERT) {
- return _parseAssertStatement();
+ return parseAssertStatement();
} else if (keyword == Keyword.BREAK) {
- return _parseBreakStatement();
+ return parseBreakStatement();
} else if (keyword == Keyword.CONTINUE) {
- return _parseContinueStatement();
+ return parseContinueStatement();
} else if (keyword == Keyword.DO) {
- return _parseDoStatement();
+ return parseDoStatement();
} else if (keyword == Keyword.FOR) {
- return _parseForStatement();
+ return parseForStatement();
} else if (keyword == Keyword.IF) {
- return _parseIfStatement();
+ return parseIfStatement();
} else if (keyword == Keyword.RETHROW) {
return new ExpressionStatement(
- _parseRethrowExpression(), _expect(TokenType.SEMICOLON));
+ parseRethrowExpression(), _expect(TokenType.SEMICOLON));
} else if (keyword == Keyword.RETURN) {
- return _parseReturnStatement();
+ return parseReturnStatement();
} else if (keyword == Keyword.SWITCH) {
- return _parseSwitchStatement();
+ return parseSwitchStatement();
} else if (keyword == Keyword.THROW) {
return new ExpressionStatement(
- _parseThrowExpression(), _expect(TokenType.SEMICOLON));
+ parseThrowExpression(), _expect(TokenType.SEMICOLON));
} else if (keyword == Keyword.TRY) {
- return _parseTryStatement();
+ return parseTryStatement();
} else if (keyword == Keyword.WHILE) {
- return _parseWhileStatement();
+ return parseWhileStatement();
} else if (keyword == Keyword.VAR || keyword == Keyword.FINAL) {
- return _parseVariableDeclarationStatementAfterMetadata(
+ return parseVariableDeclarationStatementAfterMetadata(
commentAndMetadata);
} else if (keyword == Keyword.VOID) {
- TypeName returnType = parseReturnType();
+ TypeName returnType =
+ new TypeName(new SimpleIdentifier(getAndAdvance()), null);
+ Token next = _currentToken.next;
if (_matchesIdentifier() &&
- _peek().matchesAny([
+ next.matchesAny(const <TokenType>[
TokenType.OPEN_PAREN,
TokenType.OPEN_CURLY_BRACKET,
- TokenType.FUNCTION
+ TokenType.FUNCTION,
+ TokenType.LT
])) {
return _parseFunctionDeclarationStatementAfterReturnType(
commentAndMetadata, returnType);
@@ -6855,13 +3862,16 @@ class Parser {
// We have found an error of some kind. Try to recover.
//
if (_matchesIdentifier()) {
- if (_peek().matchesAny(
- [TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) {
+ if (next.matchesAny(const <TokenType>[
+ TokenType.EQ,
+ TokenType.COMMA,
+ TokenType.SEMICOLON
+ ])) {
//
// We appear to have a variable declaration with a type of "void".
//
_reportErrorForNode(ParserErrorCode.VOID_VARIABLE, returnType);
- return _parseVariableDeclarationStatementAfterMetadata(
+ return parseVariableDeclarationStatementAfterMetadata(
commentAndMetadata);
}
} else if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
@@ -6877,7 +3887,8 @@ class Parser {
return new EmptyStatement(_createSyntheticToken(TokenType.SEMICOLON));
}
} else if (keyword == Keyword.CONST) {
- if (_peek().matchesAny([
+ Token next = _currentToken.next;
+ if (next.matchesAny(const <TokenType>[
TokenType.LT,
TokenType.OPEN_CURLY_BRACKET,
TokenType.OPEN_SQUARE_BRACKET,
@@ -6885,8 +3896,8 @@ class Parser {
])) {
return new ExpressionStatement(
parseExpression2(), _expect(TokenType.SEMICOLON));
- } else if (_tokenMatches(_peek(), TokenType.IDENTIFIER)) {
- Token afterType = _skipTypeName(_peek());
+ } else if (_tokenMatches(next, TokenType.IDENTIFIER)) {
+ Token afterType = skipTypeName(next);
if (afterType != null) {
if (_tokenMatches(afterType, TokenType.OPEN_PAREN) ||
(_tokenMatches(afterType, TokenType.PERIOD) &&
@@ -6897,7 +3908,7 @@ class Parser {
}
}
}
- return _parseVariableDeclarationStatementAfterMetadata(
+ return parseVariableDeclarationStatementAfterMetadata(
commentAndMetadata);
} else if (keyword == Keyword.NEW ||
keyword == Keyword.TRUE ||
@@ -6915,35 +3926,134 @@ class Parser {
return new EmptyStatement(_createSyntheticToken(TokenType.SEMICOLON));
}
} else if (_inGenerator && _matchesString(_YIELD)) {
- return _parseYieldStatement();
+ return parseYieldStatement();
} else if (_inAsync && _matchesString(_AWAIT)) {
if (_tokenMatchesKeyword(_peek(), Keyword.FOR)) {
- return _parseForStatement();
+ return parseForStatement();
+ }
+ return new ExpressionStatement(
+ parseExpression2(), _expect(TokenType.SEMICOLON));
+ } else if (_matchesString(_AWAIT) &&
+ _tokenMatchesKeyword(_peek(), Keyword.FOR)) {
+ Token awaitToken = _currentToken;
+ Statement statement = parseForStatement();
+ if (statement is! ForStatement) {
+ _reportErrorForToken(
+ CompileTimeErrorCode.ASYNC_FOR_IN_WRONG_CONTEXT, awaitToken);
+ }
+ return statement;
+ } else if (type == TokenType.SEMICOLON) {
+ return parseEmptyStatement();
+ } else if (isInitializedVariableDeclaration()) {
+ return parseVariableDeclarationStatementAfterMetadata(commentAndMetadata);
+ } else if (isFunctionDeclaration()) {
+ return parseFunctionDeclarationStatement();
+ } else if (type == TokenType.CLOSE_CURLY_BRACKET) {
+ _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT);
+ return new EmptyStatement(_createSyntheticToken(TokenType.SEMICOLON));
+ } else {
+ return new ExpressionStatement(
+ parseExpression2(), _expect(TokenType.SEMICOLON));
+ }
+ }
+
+ /**
+ * Parse a normal formal parameter. Return the normal formal parameter that
+ * was parsed.
+ *
+ * normalFormalParameter ::=
+ * functionSignature
+ * | fieldFormalParameter
+ * | simpleFormalParameter
+ *
+ * functionSignature:
+ * metadata returnType? identifier typeParameters? formalParameterList
+ *
+ * fieldFormalParameter ::=
+ * metadata finalConstVarOrType? 'this' '.' identifier
+ *
+ * simpleFormalParameter ::=
+ * declaredIdentifier
+ * | metadata identifier
+ */
+ NormalFormalParameter parseNormalFormalParameter() {
+ CommentAndMetadata commentAndMetadata = parseCommentAndMetadata();
+ FinalConstVarOrType holder = parseFinalConstVarOrType(true);
+ Token thisKeyword = null;
+ Token period = null;
+ if (_matchesKeyword(Keyword.THIS)) {
+ thisKeyword = getAndAdvance();
+ period = _expect(TokenType.PERIOD);
+ }
+ SimpleIdentifier identifier = parseSimpleIdentifier();
+ TypeParameterList typeParameters = _parseGenericMethodTypeParameters();
+ if (_matches(TokenType.OPEN_PAREN)) {
+ FormalParameterList parameters = _parseFormalParameterListUnchecked();
+ if (thisKeyword == null) {
+ if (holder.keyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.FUNCTION_TYPED_PARAMETER_VAR, holder.keyword);
+ }
+ Token question = null;
+ if (enableNnbd && _matches(TokenType.QUESTION)) {
+ question = getAndAdvance();
+ }
+ return new FunctionTypedFormalParameter(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ holder.type,
+ new SimpleIdentifier(identifier.token, isDeclaration: true),
+ typeParameters,
+ parameters,
+ question: question);
+ } else {
+ return new FieldFormalParameter(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ holder.keyword,
+ holder.type,
+ thisKeyword,
+ period,
+ identifier,
+ typeParameters,
+ parameters);
}
- return new ExpressionStatement(
- parseExpression2(), _expect(TokenType.SEMICOLON));
- } else if (_matchesString(_AWAIT) &&
- _tokenMatchesKeyword(_peek(), Keyword.FOR)) {
- Token awaitToken = _currentToken;
- Statement statement = _parseForStatement();
- if (statement is! ForStatement) {
+ } else if (typeParameters != null) {
+ // TODO(brianwilkerson) Report an error. It looks like a function-typed
+ // parameter with no parameter list.
+ //_reportErrorForToken(ParserErrorCode.MISSING_PARAMETERS, typeParameters.endToken);
+ }
+ TypeName type = holder.type;
+ if (type != null) {
+ if (_tokenMatchesKeyword(type.name.beginToken, Keyword.VOID)) {
_reportErrorForToken(
- CompileTimeErrorCode.ASYNC_FOR_IN_WRONG_CONTEXT, awaitToken);
+ ParserErrorCode.VOID_PARAMETER, type.name.beginToken);
+ } else if (holder.keyword != null &&
+ _tokenMatchesKeyword(holder.keyword, Keyword.VAR)) {
+ _reportErrorForToken(ParserErrorCode.VAR_AND_TYPE, holder.keyword);
}
- return statement;
- } else if (_matches(TokenType.SEMICOLON)) {
- return _parseEmptyStatement();
- } else if (_isInitializedVariableDeclaration()) {
- return _parseVariableDeclarationStatementAfterMetadata(
- commentAndMetadata);
- } else if (_isFunctionDeclaration()) {
- return _parseFunctionDeclarationStatement();
- } else if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
- _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT);
- return new EmptyStatement(_createSyntheticToken(TokenType.SEMICOLON));
- } else {
- return new ExpressionStatement(parseExpression2(), _expectSemicolon());
}
+ if (thisKeyword != null) {
+ // TODO(brianwilkerson) If there are type parameters but no parameters,
+ // should we create a synthetic empty parameter list here so we can
+ // capture the type parameters?
+ return new FieldFormalParameter(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ holder.keyword,
+ holder.type,
+ thisKeyword,
+ period,
+ identifier,
+ null,
+ null);
+ }
+ return new SimpleFormalParameter(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ holder.keyword,
+ holder.type,
+ new SimpleIdentifier(identifier.token, isDeclaration: true));
}
/**
@@ -6959,7 +4069,7 @@ class Parser {
* operatorSignature ::=
* 'external'? returnType? 'operator' operator formalParameterList
*/
- MethodDeclaration _parseOperator(CommentAndMetadata commentAndMetadata,
+ MethodDeclaration parseOperator(CommentAndMetadata commentAndMetadata,
Token externalKeyword, TypeName returnType) {
Token operatorKeyword;
if (_matchesKeyword(Keyword.OPERATOR)) {
@@ -6969,64 +4079,8 @@ class Parser {
ParserErrorCode.MISSING_KEYWORD_OPERATOR, _currentToken);
operatorKeyword = _createSyntheticKeyword(Keyword.OPERATOR);
}
- if (!_currentToken.isUserDefinableOperator) {
- _reportErrorForCurrentToken(
- ParserErrorCode.NON_USER_DEFINABLE_OPERATOR, [_currentToken.lexeme]);
- }
- SimpleIdentifier name = new SimpleIdentifier(getAndAdvance());
- if (_matches(TokenType.EQ)) {
- Token previous = _currentToken.previous;
- if ((_tokenMatches(previous, TokenType.EQ_EQ) ||
- _tokenMatches(previous, TokenType.BANG_EQ)) &&
- _currentToken.offset == previous.offset + 2) {
- _reportErrorForCurrentToken(ParserErrorCode.INVALID_OPERATOR,
- ["${previous.lexeme}${_currentToken.lexeme}"]);
- _advance();
- }
- }
- FormalParameterList parameters = parseFormalParameterList();
- _validateFormalParameterList(parameters);
- FunctionBody body =
- _parseFunctionBody(true, ParserErrorCode.MISSING_FUNCTION_BODY, false);
- if (externalKeyword != null && body is! EmptyFunctionBody) {
- _reportErrorForCurrentToken(ParserErrorCode.EXTERNAL_OPERATOR_WITH_BODY);
- }
- return new MethodDeclaration(
- commentAndMetadata.comment,
- commentAndMetadata.metadata,
- externalKeyword,
- null,
- returnType,
- null,
- operatorKeyword,
- name,
- null,
- parameters,
- body);
- }
-
- /**
- * Parse a return type if one is given, otherwise return `null` without
- * advancing. Return the return type that was parsed.
- */
- TypeName _parseOptionalReturnType() {
- if (_matchesKeyword(Keyword.VOID)) {
- return parseReturnType();
- } else if (_matchesIdentifier() &&
- !_matchesKeyword(Keyword.GET) &&
- !_matchesKeyword(Keyword.SET) &&
- !_matchesKeyword(Keyword.OPERATOR) &&
- (_tokenMatchesIdentifier(_peek()) ||
- _tokenMatches(_peek(), TokenType.LT))) {
- return parseReturnType();
- } else if (_matchesIdentifier() &&
- _tokenMatches(_peek(), TokenType.PERIOD) &&
- _tokenMatchesIdentifier(_peekAt(2)) &&
- (_tokenMatchesIdentifier(_peekAt(3)) ||
- _tokenMatches(_peekAt(3), TokenType.LT))) {
- return parseReturnType();
- }
- return null;
+ return _parseOperatorAfterKeyword(
+ commentAndMetadata, externalKeyword, returnType, operatorKeyword);
}
/**
@@ -7034,31 +4088,19 @@ class Parser {
* to be associated with the directive. Return the part or part-of directive
* that was parsed.
*
+ * This method assumes that the current token matches `Keyword.PART`.
+ *
* partDirective ::=
* metadata 'part' stringLiteral ';'
*
* partOfDirective ::=
* metadata 'part' 'of' identifier ';'
*/
- Directive _parsePartDirective(CommentAndMetadata commentAndMetadata) {
- Token partKeyword = _expectKeyword(Keyword.PART);
- if (_matchesString(_OF)) {
- Token ofKeyword = getAndAdvance();
- LibraryIdentifier libraryName = _parseLibraryName(
- ParserErrorCode.MISSING_NAME_IN_PART_OF_DIRECTIVE, ofKeyword);
- Token semicolon = _expect(TokenType.SEMICOLON);
- return new PartOfDirective(
- commentAndMetadata.comment,
- commentAndMetadata.metadata,
- partKeyword,
- ofKeyword,
- libraryName,
- semicolon);
+ Directive parsePartOrPartOfDirective(CommentAndMetadata commentAndMetadata) {
+ if (_tokenMatchesString(_peek(), _OF)) {
+ return _parsePartOfDirective(commentAndMetadata);
}
- StringLiteral partUri = _parseUri();
- Token semicolon = _expect(TokenType.SEMICOLON);
- return new PartDirective(commentAndMetadata.comment,
- commentAndMetadata.metadata, partKeyword, partUri, semicolon);
+ return _parsePartDirective(commentAndMetadata);
}
/**
@@ -7072,35 +4114,38 @@ class Parser {
* assignableSelector
* | argumentList
*/
- Expression _parsePostfixExpression() {
- Expression operand = _parseAssignableExpression(true);
- if (_matches(TokenType.OPEN_SQUARE_BRACKET) ||
- _matches(TokenType.PERIOD) ||
- _matches(TokenType.QUESTION_PERIOD) ||
- _matches(TokenType.OPEN_PAREN) ||
- (parseGenericMethods && _matches(TokenType.LT))) {
+ Expression parsePostfixExpression() {
+ Expression operand = parseAssignableExpression(true);
+ TokenType type = _currentToken.type;
+ if (type == TokenType.OPEN_SQUARE_BRACKET ||
+ type == TokenType.PERIOD ||
+ type == TokenType.QUESTION_PERIOD ||
+ type == TokenType.OPEN_PAREN ||
+ (parseGenericMethods && type == TokenType.LT)) {
do {
- if (_isLikelyParameterList()) {
- TypeArgumentList typeArguments = null;
- if (_matches(TokenType.LT)) {
- typeArguments = parseTypeArgumentList();
- }
+ if (_isLikelyArgumentList()) {
+ TypeArgumentList typeArguments = _parseOptionalTypeArguments();
ArgumentList argumentList = parseArgumentList();
- if (operand is PropertyAccess) {
- PropertyAccess access = operand as PropertyAccess;
- operand = new MethodInvocation(access.target, access.operator,
- access.propertyName, typeArguments, argumentList);
+ Expression currentOperand = operand;
+ if (currentOperand is PropertyAccess) {
+ operand = new MethodInvocation(
+ currentOperand.target,
+ currentOperand.operator,
+ currentOperand.propertyName,
+ typeArguments,
+ argumentList);
} else {
operand = new FunctionExpressionInvocation(
operand, typeArguments, argumentList);
}
} else {
- operand = _parseAssignableSelector(operand, true);
+ operand = parseAssignableSelector(operand, true);
}
- } while (_matches(TokenType.OPEN_SQUARE_BRACKET) ||
- _matches(TokenType.PERIOD) ||
- _matches(TokenType.QUESTION_PERIOD) ||
- _matches(TokenType.OPEN_PAREN));
+ type = _currentToken.type;
+ } while (type == TokenType.OPEN_SQUARE_BRACKET ||
+ type == TokenType.PERIOD ||
+ type == TokenType.QUESTION_PERIOD ||
+ type == TokenType.OPEN_PAREN);
return operand;
}
if (!_currentToken.type.isIncrementOperator) {
@@ -7111,6 +4156,17 @@ class Parser {
return new PostfixExpression(operand, operator);
}
+ /**
+ * Parse a prefixed identifier. Return the prefixed identifier that was
+ * parsed.
+ *
+ * prefixedIdentifier ::=
+ * identifier ('.' identifier)?
+ */
+ Identifier parsePrefixedIdentifier() {
+ return _parsePrefixedIdentifierAfterIdentifier(parseSimpleIdentifier());
+ }
+
/**
* Parse a primary expression. Return the primary expression that was parsed.
*
@@ -7134,22 +4190,53 @@ class Parser {
* | mapLiteral
* | listLiteral
*/
- Expression _parsePrimaryExpression() {
- if (_matchesKeyword(Keyword.THIS)) {
+ Expression parsePrimaryExpression() {
+ if (_matchesIdentifier()) {
+ // TODO(brianwilkerson) The code below was an attempt to recover from an
+ // error case, but it needs to be applied as a recovery only after we
+ // know that parsing it as an identifier doesn't work. Leaving the code as
+ // a reminder of how to recover.
+// if (isFunctionExpression(_peek())) {
+// //
+// // Function expressions were allowed to have names at one point, but this is now illegal.
+// //
+// reportError(ParserErrorCode.NAMED_FUNCTION_EXPRESSION, getAndAdvance());
+// return parseFunctionExpression();
+// }
+ return _parsePrefixedIdentifierUnchecked();
+ }
+ TokenType type = _currentToken.type;
+ if (type == TokenType.STRING) {
+ return parseStringLiteral();
+ } else if (type == TokenType.INT) {
+ Token token = getAndAdvance();
+ int value = null;
+ try {
+ value = int.parse(token.lexeme);
+ } on FormatException {
+ // The invalid format should have been reported by the scanner.
+ }
+ return new IntegerLiteral(token, value);
+ }
+ Keyword keyword = _currentToken.keyword;
+ if (keyword == Keyword.NULL) {
+ return new NullLiteral(getAndAdvance());
+ } else if (keyword == Keyword.NEW) {
+ return parseNewExpression();
+ } else if (keyword == Keyword.THIS) {
return new ThisExpression(getAndAdvance());
- } else if (_matchesKeyword(Keyword.SUPER)) {
+ } else if (keyword == Keyword.SUPER) {
// TODO(paulberry): verify with Gilad that "super" must be followed by
// unconditionalAssignableSelector in this case.
- return _parseAssignableSelector(
+ return parseAssignableSelector(
new SuperExpression(getAndAdvance()), false,
allowConditional: false);
- } else if (_matchesKeyword(Keyword.NULL)) {
- return new NullLiteral(getAndAdvance());
- } else if (_matchesKeyword(Keyword.FALSE)) {
+ } else if (keyword == Keyword.FALSE) {
return new BooleanLiteral(getAndAdvance(), false);
- } else if (_matchesKeyword(Keyword.TRUE)) {
+ } else if (keyword == Keyword.TRUE) {
return new BooleanLiteral(getAndAdvance(), true);
- } else if (_matches(TokenType.DOUBLE)) {
+ }
+ if (type == TokenType.DOUBLE) {
Token token = getAndAdvance();
double value = 0.0;
try {
@@ -7158,7 +4245,7 @@ class Parser {
// The invalid format should have been reported by the scanner.
}
return new DoubleLiteral(token, value);
- } else if (_matches(TokenType.HEXADECIMAL)) {
+ } else if (type == TokenType.HEXADECIMAL) {
Token token = getAndAdvance();
int value = null;
try {
@@ -7167,41 +4254,10 @@ class Parser {
// The invalid format should have been reported by the scanner.
}
return new IntegerLiteral(token, value);
- } else if (_matches(TokenType.INT)) {
- Token token = getAndAdvance();
- int value = null;
- try {
- value = int.parse(token.lexeme);
- } on FormatException {
- // The invalid format should have been reported by the scanner.
- }
- return new IntegerLiteral(token, value);
- } else if (_matches(TokenType.STRING)) {
- return parseStringLiteral();
- } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
- return _parseMapLiteral(null, null);
- } else if (_matches(TokenType.OPEN_SQUARE_BRACKET) ||
- _matches(TokenType.INDEX)) {
- return _parseListLiteral(null, null);
- } else if (_matchesIdentifier()) {
- // TODO(brianwilkerson) The code below was an attempt to recover from an
- // error case, but it needs to be applied as a recovery only after we
- // know that parsing it as an identifier doesn't work. Leaving the code as
- // a reminder of how to recover.
-// if (isFunctionExpression(peek())) {
-// //
-// // Function expressions were allowed to have names at one point, but this is now illegal.
-// //
-// reportError(ParserErrorCode.NAMED_FUNCTION_EXPRESSION, getAndAdvance());
-// return parseFunctionExpression();
-// }
- return parsePrefixedIdentifier();
- } else if (_matchesKeyword(Keyword.NEW)) {
- return _parseNewExpression();
- } else if (_matchesKeyword(Keyword.CONST)) {
- return _parseConstExpression();
- } else if (_matches(TokenType.OPEN_PAREN)) {
- if (_isFunctionExpression(_currentToken)) {
+ } else if (keyword == Keyword.CONST) {
+ return parseConstExpression();
+ } else if (type == TokenType.OPEN_PAREN) {
+ if (isFunctionExpression(_currentToken)) {
return parseFunctionExpression();
}
Token leftParenthesis = getAndAdvance();
@@ -7215,15 +4271,20 @@ class Parser {
} finally {
_inInitializer = wasInInitializer;
}
- } else if (_matches(TokenType.LT)) {
- return _parseListOrMapLiteral(null);
- } else if (_matches(TokenType.QUESTION) &&
+ } else if (type == TokenType.LT || _injectGenericCommentTypeList()) {
+ return parseListOrMapLiteral(null);
+ } else if (type == TokenType.OPEN_CURLY_BRACKET) {
+ return parseMapLiteral(null, null);
+ } else if (type == TokenType.OPEN_SQUARE_BRACKET ||
+ type == TokenType.INDEX) {
+ return parseListLiteral(null, null);
+ } else if (type == TokenType.QUESTION &&
_tokenMatches(_peek(), TokenType.IDENTIFIER)) {
_reportErrorForCurrentToken(
ParserErrorCode.UNEXPECTED_TOKEN, [_currentToken.lexeme]);
_advance();
- return _parsePrimaryExpression();
- } else if (_matchesKeyword(Keyword.VOID)) {
+ return parsePrimaryExpression();
+ } else if (keyword == Keyword.VOID) {
//
// Recover from having a return type of "void" where a return type is not
// expected.
@@ -7232,31 +4293,41 @@ class Parser {
_reportErrorForCurrentToken(
ParserErrorCode.UNEXPECTED_TOKEN, [_currentToken.lexeme]);
_advance();
- return _parsePrimaryExpression();
- } else if (_matches(TokenType.HASH)) {
- return _parseSymbolLiteral();
+ return parsePrimaryExpression();
+ } else if (type == TokenType.HASH) {
+ return parseSymbolLiteral();
} else {
_reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
- return _createSyntheticIdentifier();
+ return createSyntheticIdentifier();
}
}
/**
- * Parse a redirecting constructor invocation. Return the redirecting
+ * Parse a redirecting constructor invocation. The flag [hasPeriod] should be
+ * `true` if the `this` is followed by a period. Return the redirecting
* constructor invocation that was parsed.
*
+ * This method assumes that the current token matches `Keyword.THIS`.
+ *
* redirectingConstructorInvocation ::=
* 'this' ('.' identifier)? arguments
*/
- RedirectingConstructorInvocation _parseRedirectingConstructorInvocation() {
- Token keyword = _expectKeyword(Keyword.THIS);
+ RedirectingConstructorInvocation parseRedirectingConstructorInvocation(
+ bool hasPeriod) {
+ Token keyword = getAndAdvance();
Token period = null;
SimpleIdentifier constructorName = null;
- if (_matches(TokenType.PERIOD)) {
+ if (hasPeriod) {
period = getAndAdvance();
- constructorName = parseSimpleIdentifier();
+ if (_matchesIdentifier()) {
+ constructorName = _parseSimpleIdentifierUnchecked(isDeclaration: false);
+ } else {
+ _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
+ constructorName = createSyntheticIdentifier(isDeclaration: false);
+ _advance();
+ }
}
- ArgumentList argumentList = parseArgumentList();
+ ArgumentList argumentList = _parseArgumentListChecked();
return new RedirectingConstructorInvocation(
keyword, period, constructorName, argumentList);
}
@@ -7269,30 +4340,30 @@ class Parser {
* bitwiseOrExpression ('is' '!'? type | 'as' type | relationalOperator bitwiseOrExpression)?
* | 'super' relationalOperator bitwiseOrExpression
*/
- Expression _parseRelationalExpression() {
- if (_matchesKeyword(Keyword.SUPER) &&
+ Expression parseRelationalExpression() {
+ if (_currentToken.keyword == Keyword.SUPER &&
_currentToken.next.type.isRelationalOperator) {
Expression expression = new SuperExpression(getAndAdvance());
Token operator = getAndAdvance();
- expression = new BinaryExpression(
+ return new BinaryExpression(
expression, operator, parseBitwiseOrExpression());
- return expression;
}
Expression expression = parseBitwiseOrExpression();
- if (_matchesKeyword(Keyword.AS)) {
+ Keyword keyword = _currentToken.keyword;
+ if (keyword == Keyword.AS) {
Token asOperator = getAndAdvance();
- expression = new AsExpression(expression, asOperator, parseTypeName());
- } else if (_matchesKeyword(Keyword.IS)) {
+ return new AsExpression(expression, asOperator, parseTypeName(true));
+ } else if (keyword == Keyword.IS) {
Token isOperator = getAndAdvance();
Token notOperator = null;
if (_matches(TokenType.BANG)) {
notOperator = getAndAdvance();
}
- expression = new IsExpression(
- expression, isOperator, notOperator, parseTypeName());
+ return new IsExpression(
+ expression, isOperator, notOperator, parseTypeName(true));
} else if (_currentToken.type.isRelationalOperator) {
Token operator = getAndAdvance();
- expression = new BinaryExpression(
+ return new BinaryExpression(
expression, operator, parseBitwiseOrExpression());
}
return expression;
@@ -7301,20 +4372,23 @@ class Parser {
/**
* Parse a rethrow expression. Return the rethrow expression that was parsed.
*
+ * This method assumes that the current token matches `Keyword.RETHROW`.
+ *
* rethrowExpression ::=
* 'rethrow'
*/
- Expression _parseRethrowExpression() =>
- new RethrowExpression(_expectKeyword(Keyword.RETHROW));
+ Expression parseRethrowExpression() => new RethrowExpression(getAndAdvance());
/**
* Parse a return statement. Return the return statement that was parsed.
*
+ * This method assumes that the current token matches `Keyword.RETURN`.
+ *
* returnStatement ::=
* 'return' expression? ';'
*/
- Statement _parseReturnStatement() {
- Token returnKeyword = _expectKeyword(Keyword.RETURN);
+ Statement parseReturnStatement() {
+ Token returnKeyword = getAndAdvance();
if (_matches(TokenType.SEMICOLON)) {
return new ReturnStatement(returnKeyword, null, getAndAdvance());
}
@@ -7323,6 +4397,21 @@ class Parser {
return new ReturnStatement(returnKeyword, expression, semicolon);
}
+ /**
+ * Parse a return type. Return the return type that was parsed.
+ *
+ * returnType ::=
+ * 'void'
+ * | type
+ */
+ TypeName parseReturnType() {
+ if (_currentToken.keyword == Keyword.VOID) {
+ return new TypeName(new SimpleIdentifier(getAndAdvance()), null);
+ } else {
+ return parseTypeName(false);
+ }
+ }
+
/**
* Parse a setter. The [commentAndMetadata] is the documentation comment and
* metadata to be associated with the declaration. The [externalKeyword] is
@@ -7331,19 +4420,21 @@ class Parser {
* already been parsed, or `null` if there was no return type. Return the
* setter that was parsed.
*
+ * This method assumes that the current token matches `Keyword.SET`.
+ *
* setter ::=
* setterSignature functionBody?
*
* setterSignature ::=
* 'external'? 'static'? returnType? 'set' identifier formalParameterList
*/
- MethodDeclaration _parseSetter(CommentAndMetadata commentAndMetadata,
+ MethodDeclaration parseSetter(CommentAndMetadata commentAndMetadata,
Token externalKeyword, Token staticKeyword, TypeName returnType) {
- Token propertyKeyword = _expectKeyword(Keyword.SET);
- SimpleIdentifier name = parseSimpleIdentifier();
+ Token propertyKeyword = getAndAdvance();
+ SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true);
FormalParameterList parameters = parseFormalParameterList();
_validateFormalParameterList(parameters);
- FunctionBody body = _parseFunctionBody(
+ FunctionBody body = parseFunctionBody(
externalKeyword != null || staticKeyword == null,
ParserErrorCode.STATIC_SETTER_WITHOUT_BODY,
false);
@@ -7371,108 +4462,111 @@ class Parser {
* additiveExpression (shiftOperator additiveExpression)*
* | 'super' (shiftOperator additiveExpression)+
*/
- Expression _parseShiftExpression() {
+ Expression parseShiftExpression() {
Expression expression;
- if (_matchesKeyword(Keyword.SUPER) &&
+ if (_currentToken.keyword == Keyword.SUPER &&
_currentToken.next.type.isShiftOperator) {
expression = new SuperExpression(getAndAdvance());
} else {
- expression = _parseAdditiveExpression();
+ expression = parseAdditiveExpression();
}
while (_currentToken.type.isShiftOperator) {
- Token operator = getAndAdvance();
expression = new BinaryExpression(
- expression, operator, _parseAdditiveExpression());
+ expression, getAndAdvance(), parseAdditiveExpression());
}
return expression;
}
/**
- * Parse a list of statements within a switch statement. Return the statements
- * that were parsed.
+ * Parse a simple identifier. Return the simple identifier that was parsed.
*
- * statements ::=
- * statement*
+ * identifier ::=
+ * IDENTIFIER
*/
- List<Statement> _parseStatementList() {
- List<Statement> statements = new List<Statement>();
- Token statementStart = _currentToken;
- while (!_matches(TokenType.EOF) &&
- !_matches(TokenType.CLOSE_CURLY_BRACKET) &&
- !_isSwitchMember()) {
- statements.add(parseStatement2());
- if (identical(_currentToken, statementStart)) {
- _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
- [_currentToken.lexeme]);
- _advance();
- }
- statementStart = _currentToken;
+ SimpleIdentifier parseSimpleIdentifier({bool isDeclaration: false}) {
+ if (_matchesIdentifier()) {
+ return _parseSimpleIdentifierUnchecked(isDeclaration: isDeclaration);
}
- return statements;
+ _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
+ return createSyntheticIdentifier(isDeclaration: isDeclaration);
}
/**
- * Parse a string literal that contains interpolations. Return the string
- * literal that was parsed.
+ * Parse a statement, starting with the given [token]. Return the statement
+ * that was parsed, or `null` if the tokens do not represent a recognizable
+ * statement.
*/
- StringInterpolation _parseStringInterpolation(Token string) {
- List<InterpolationElement> elements = new List<InterpolationElement>();
- bool hasMore = _matches(TokenType.STRING_INTERPOLATION_EXPRESSION) ||
- _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER);
- elements.add(new InterpolationString(
- string, _computeStringValue(string.lexeme, true, !hasMore)));
- while (hasMore) {
- if (_matches(TokenType.STRING_INTERPOLATION_EXPRESSION)) {
- Token openToken = getAndAdvance();
- bool wasInInitializer = _inInitializer;
- _inInitializer = false;
- try {
- Expression expression = parseExpression2();
- Token rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET);
- elements.add(
- new InterpolationExpression(openToken, expression, rightBracket));
- } finally {
- _inInitializer = wasInInitializer;
- }
- } else {
- Token openToken = getAndAdvance();
- Expression expression = null;
- if (_matchesKeyword(Keyword.THIS)) {
- expression = new ThisExpression(getAndAdvance());
- } else {
- expression = parseSimpleIdentifier();
- }
- elements.add(new InterpolationExpression(openToken, expression, null));
- }
- if (_matches(TokenType.STRING)) {
- string = getAndAdvance();
- hasMore = _matches(TokenType.STRING_INTERPOLATION_EXPRESSION) ||
- _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER);
- elements.add(new InterpolationString(
- string, _computeStringValue(string.lexeme, false, !hasMore)));
+ Statement parseStatement(Token token) {
+ _currentToken = token;
+ return parseStatement2();
+ }
+
+ /**
+ * Parse a statement. Return the statement that was parsed.
+ *
+ * statement ::=
+ * label* nonLabeledStatement
+ */
+ Statement parseStatement2() {
+ List<Label> labels = null;
+ while (_matchesIdentifier() && _currentToken.next.type == TokenType.COLON) {
+ Label label = parseLabel(isDeclaration: true);
+ if (labels == null) {
+ labels = <Label>[label];
} else {
- hasMore = false;
+ labels.add(label);
}
}
- return new StringInterpolation(elements);
+ Statement statement = parseNonLabeledStatement();
+ if (labels == null) {
+ return statement;
+ }
+ return new LabeledStatement(labels, statement);
+ }
+
+ /**
+ * Parse a sequence of statements, starting with the given [token]. Return the
+ * statements that were parsed, or `null` if the tokens do not represent a
+ * recognizable sequence of statements.
+ */
+ List<Statement> parseStatements(Token token) {
+ _currentToken = token;
+ return _parseStatementList();
+ }
+
+ /**
+ * Parse a string literal. Return the string literal that was parsed.
+ *
+ * stringLiteral ::=
+ * MULTI_LINE_STRING+
+ * | SINGLE_LINE_STRING+
+ */
+ StringLiteral parseStringLiteral() {
+ if (_matches(TokenType.STRING)) {
+ return _parseStringLiteralUnchecked();
+ }
+ _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_STRING_LITERAL);
+ return createSyntheticStringLiteral();
}
/**
* Parse a super constructor invocation. Return the super constructor
* invocation that was parsed.
*
+ * This method assumes that the current token matches [Keyword.SUPER].
+ *
* superConstructorInvocation ::=
* 'super' ('.' identifier)? arguments
*/
- SuperConstructorInvocation _parseSuperConstructorInvocation() {
- Token keyword = _expectKeyword(Keyword.SUPER);
+ SuperConstructorInvocation parseSuperConstructorInvocation() {
+ Token keyword = getAndAdvance();
Token period = null;
SimpleIdentifier constructorName = null;
if (_matches(TokenType.PERIOD)) {
period = getAndAdvance();
constructorName = parseSimpleIdentifier();
}
- ArgumentList argumentList = parseArgumentList();
+ ArgumentList argumentList = _parseArgumentListChecked();
return new SuperConstructorInvocation(
keyword, period, constructorName, argumentList);
}
@@ -7489,7 +4583,7 @@ class Parser {
* defaultCase ::=
* label* 'default' ':' statements
*/
- SwitchStatement _parseSwitchStatement() {
+ SwitchStatement parseSwitchStatement() {
bool wasInSwitch = _inSwitch;
_inSwitch = true;
try {
@@ -7500,13 +4594,14 @@ class Parser {
Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
Token leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET);
Token defaultKeyword = null;
- List<SwitchMember> members = new List<SwitchMember>();
- while (!_matches(TokenType.EOF) &&
- !_matches(TokenType.CLOSE_CURLY_BRACKET)) {
- List<Label> labels = new List<Label>();
+ List<SwitchMember> members = <SwitchMember>[];
+ TokenType type = _currentToken.type;
+ while (type != TokenType.EOF && type != TokenType.CLOSE_CURLY_BRACKET) {
+ List<Label> labels = <Label>[];
while (
_matchesIdentifier() && _tokenMatches(_peek(), TokenType.COLON)) {
- SimpleIdentifier identifier = parseSimpleIdentifier();
+ SimpleIdentifier identifier =
+ _parseSimpleIdentifierUnchecked(isDeclaration: true);
String label = identifier.token.lexeme;
if (definedLabels.contains(label)) {
_reportErrorForToken(
@@ -7516,10 +4611,11 @@ class Parser {
} else {
definedLabels.add(label);
}
- Token colon = _expect(TokenType.COLON);
+ Token colon = getAndAdvance();
labels.add(new Label(identifier, colon));
}
- if (_matchesKeyword(Keyword.CASE)) {
+ Keyword keyword = _currentToken.keyword;
+ if (keyword == Keyword.CASE) {
Token caseKeyword = getAndAdvance();
Expression caseExpression = parseExpression2();
Token colon = _expect(TokenType.COLON);
@@ -7530,7 +4626,7 @@ class Parser {
ParserErrorCode.SWITCH_HAS_CASE_AFTER_DEFAULT_CASE,
caseKeyword);
}
- } else if (_matchesKeyword(Keyword.DEFAULT)) {
+ } else if (keyword == Keyword.DEFAULT) {
if (defaultKeyword != null) {
_reportErrorForToken(
ParserErrorCode.SWITCH_HAS_MULTIPLE_DEFAULT_CASES, _peek());
@@ -7543,13 +4639,21 @@ class Parser {
// We need to advance, otherwise we could end up in an infinite loop,
// but this could be a lot smarter about recovering from the error.
_reportErrorForCurrentToken(ParserErrorCode.EXPECTED_CASE_OR_DEFAULT);
- while (!_matches(TokenType.EOF) &&
- !_matches(TokenType.CLOSE_CURLY_BRACKET) &&
- !_matchesKeyword(Keyword.CASE) &&
- !_matchesKeyword(Keyword.DEFAULT)) {
+ bool atEndOrNextMember() {
+ TokenType type = _currentToken.type;
+ if (type == TokenType.EOF ||
+ type == TokenType.CLOSE_CURLY_BRACKET) {
+ return true;
+ }
+ Keyword keyword = _currentToken.keyword;
+ return keyword == Keyword.CASE || keyword == Keyword.DEFAULT;
+ }
+
+ while (!atEndOrNextMember()) {
_advance();
}
}
+ type = _currentToken.type;
}
Token rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET);
return new SwitchStatement(keyword, leftParenthesis, expression,
@@ -7562,16 +4666,17 @@ class Parser {
/**
* Parse a symbol literal. Return the symbol literal that was parsed.
*
+ * This method assumes that the current token matches [TokenType.HASH].
+ *
* symbolLiteral ::=
* '#' identifier ('.' identifier)*
*/
- SymbolLiteral _parseSymbolLiteral() {
+ SymbolLiteral parseSymbolLiteral() {
Token poundSign = getAndAdvance();
- List<Token> components = new List<Token>();
+ List<Token> components = <Token>[];
if (_matchesIdentifier()) {
components.add(getAndAdvance());
- while (_matches(TokenType.PERIOD)) {
- _advance();
+ while (_optional(TokenType.PERIOD)) {
if (_matchesIdentifier()) {
components.add(getAndAdvance());
} else {
@@ -7582,7 +4687,7 @@ class Parser {
}
} else if (_currentToken.isOperator) {
components.add(getAndAdvance());
- } else if (_tokenMatchesKeyword(_currentToken, Keyword.VOID)) {
+ } else if (_matchesKeyword(Keyword.VOID)) {
components.add(getAndAdvance());
} else {
_reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
@@ -7594,15 +4699,18 @@ class Parser {
/**
* Parse a throw expression. Return the throw expression that was parsed.
*
+ * This method assumes that the current token matches [Keyword.THROW].
+ *
* throwExpression ::=
* 'throw' expression
*/
- Expression _parseThrowExpression() {
- Token keyword = _expectKeyword(Keyword.THROW);
- if (_matches(TokenType.SEMICOLON) || _matches(TokenType.CLOSE_PAREN)) {
+ Expression parseThrowExpression() {
+ Token keyword = getAndAdvance();
+ TokenType type = _currentToken.type;
+ if (type == TokenType.SEMICOLON || type == TokenType.CLOSE_PAREN) {
_reportErrorForToken(
ParserErrorCode.MISSING_EXPRESSION_IN_THROW, _currentToken);
- return new ThrowExpression(keyword, _createSyntheticIdentifier());
+ return new ThrowExpression(keyword, createSyntheticIdentifier());
}
Expression expression = parseExpression2();
return new ThrowExpression(keyword, expression);
@@ -7611,15 +4719,18 @@ class Parser {
/**
* Parse a throw expression. Return the throw expression that was parsed.
*
+ * This method assumes that the current token matches [Keyword.THROW].
+ *
* throwExpressionWithoutCascade ::=
* 'throw' expressionWithoutCascade
*/
- Expression _parseThrowExpressionWithoutCascade() {
- Token keyword = _expectKeyword(Keyword.THROW);
- if (_matches(TokenType.SEMICOLON) || _matches(TokenType.CLOSE_PAREN)) {
+ Expression parseThrowExpressionWithoutCascade() {
+ Token keyword = getAndAdvance();
+ TokenType type = _currentToken.type;
+ if (type == TokenType.SEMICOLON || type == TokenType.CLOSE_PAREN) {
_reportErrorForToken(
ParserErrorCode.MISSING_EXPRESSION_IN_THROW, _currentToken);
- return new ThrowExpression(keyword, _createSyntheticIdentifier());
+ return new ThrowExpression(keyword, createSyntheticIdentifier());
}
Expression expression = parseExpressionWithoutCascade();
return new ThrowExpression(keyword, expression);
@@ -7628,6 +4739,8 @@ class Parser {
/**
* Parse a try statement. Return the try statement that was parsed.
*
+ * This method assumes that the current token matches [Keyword.TRY].
+ *
* tryStatement ::=
* 'try' block (onPart+ finallyPart? | finallyPart)
*
@@ -7641,17 +4754,17 @@ class Parser {
* finallyPart ::=
* 'finally' block
*/
- Statement _parseTryStatement() {
- Token tryKeyword = _expectKeyword(Keyword.TRY);
- Block body = parseBlock();
- List<CatchClause> catchClauses = new List<CatchClause>();
+ Statement parseTryStatement() {
+ Token tryKeyword = getAndAdvance();
+ Block body = _parseBlockChecked();
+ List<CatchClause> catchClauses = <CatchClause>[];
Block finallyClause = null;
while (_matchesString(_ON) || _matchesKeyword(Keyword.CATCH)) {
Token onKeyword = null;
TypeName exceptionType = null;
if (_matchesString(_ON)) {
onKeyword = getAndAdvance();
- exceptionType = parseTypeName();
+ exceptionType = parseTypeName(false);
}
Token catchKeyword = null;
Token leftParenthesis = null;
@@ -7662,14 +4775,14 @@ class Parser {
if (_matchesKeyword(Keyword.CATCH)) {
catchKeyword = getAndAdvance();
leftParenthesis = _expect(TokenType.OPEN_PAREN);
- exceptionParameter = parseSimpleIdentifier();
+ exceptionParameter = parseSimpleIdentifier(isDeclaration: true);
if (_matches(TokenType.COMMA)) {
comma = getAndAdvance();
- stackTraceParameter = parseSimpleIdentifier();
+ stackTraceParameter = parseSimpleIdentifier(isDeclaration: true);
}
rightParenthesis = _expect(TokenType.CLOSE_PAREN);
}
- Block catchBody = parseBlock();
+ Block catchBody = _parseBlockChecked();
catchClauses.add(new CatchClause(
onKeyword,
exceptionType,
@@ -7684,11 +4797,9 @@ class Parser {
Token finallyKeyword = null;
if (_matchesKeyword(Keyword.FINALLY)) {
finallyKeyword = getAndAdvance();
- finallyClause = parseBlock();
- } else {
- if (catchClauses.isEmpty) {
- _reportErrorForCurrentToken(ParserErrorCode.MISSING_CATCH_OR_FINALLY);
- }
+ finallyClause = _parseBlockChecked();
+ } else if (catchClauses.isEmpty) {
+ _reportErrorForCurrentToken(ParserErrorCode.MISSING_CATCH_OR_FINALLY);
}
return new TryStatement(
tryKeyword, body, catchClauses, finallyKeyword, finallyClause);
@@ -7698,6 +4809,8 @@ class Parser {
* Parse a type alias. The [commentAndMetadata] is the metadata to be
* associated with the member. Return the type alias that was parsed.
*
+ * This method assumes that the current token matches [Keyword.TYPEDEF].
+ *
* typeAlias ::=
* 'typedef' typeAliasBody
*
@@ -7717,22 +4830,22 @@ class Parser {
* functionPrefix ::=
* returnType? name
*/
- TypeAlias _parseTypeAlias(CommentAndMetadata commentAndMetadata) {
- Token keyword = _expectKeyword(Keyword.TYPEDEF);
+ TypeAlias parseTypeAlias(CommentAndMetadata commentAndMetadata) {
+ Token keyword = getAndAdvance();
if (_matchesIdentifier()) {
Token next = _peek();
if (_tokenMatches(next, TokenType.LT)) {
next = _skipTypeParameterList(next);
if (next != null && _tokenMatches(next, TokenType.EQ)) {
TypeAlias typeAlias =
- _parseClassTypeAlias(commentAndMetadata, null, keyword);
+ parseClassTypeAlias(commentAndMetadata, null, keyword);
_reportErrorForToken(
ParserErrorCode.DEPRECATED_CLASS_TYPE_ALIAS, keyword);
return typeAlias;
}
} else if (_tokenMatches(next, TokenType.EQ)) {
TypeAlias typeAlias =
- _parseClassTypeAlias(commentAndMetadata, null, keyword);
+ parseClassTypeAlias(commentAndMetadata, null, keyword);
_reportErrorForToken(
ParserErrorCode.DEPRECATED_CLASS_TYPE_ALIAS, keyword);
return typeAlias;
@@ -7741,6 +4854,86 @@ class Parser {
return _parseFunctionTypeAlias(commentAndMetadata, keyword);
}
+ /**
+ * Parse a list of type arguments. Return the type argument list that was
+ * parsed.
+ *
+ * This method assumes that the current token matches `TokenType.LT`.
+ *
+ * typeArguments ::=
+ * '<' typeList '>'
+ *
+ * typeList ::=
+ * type (',' type)*
+ */
+ TypeArgumentList parseTypeArgumentList() {
+ Token leftBracket = getAndAdvance();
+ List<TypeName> arguments = <TypeName>[parseTypeName(false)];
+ while (_optional(TokenType.COMMA)) {
+ arguments.add(parseTypeName(false));
+ }
+ Token rightBracket = _expectGt();
+ return new TypeArgumentList(leftBracket, arguments, rightBracket);
+ }
+
+ /**
+ * Parse a type name. Return the type name that was parsed.
+ *
+ * type ::=
+ * qualified typeArguments?
+ */
+ TypeName parseTypeName(bool inExpression) {
+ TypeName realType = _parseTypeName(inExpression);
+ // If this is followed by a generic method type comment, allow the comment
+ // type to replace the real type name.
+ // TODO(jmesserly): this feels like a big hammer. Can we restrict it to
+ // only work inside generic methods?
+ TypeName typeFromComment = _parseOptionalTypeNameComment();
+ return typeFromComment ?? realType;
+ }
+
+ /**
+ * Parse a type parameter. Return the type parameter that was parsed.
+ *
+ * typeParameter ::=
+ * metadata name ('extends' bound)?
+ */
+ TypeParameter parseTypeParameter() {
+ CommentAndMetadata commentAndMetadata = parseCommentAndMetadata();
+ SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true);
+ if (_matches(TokenType.QUESTION)) {
+ _reportErrorForCurrentToken(ParserErrorCode.NULLABLE_TYPE_PARAMETER);
+ _advance();
+ }
+ if (_matchesKeyword(Keyword.EXTENDS)) {
+ Token keyword = getAndAdvance();
+ TypeName bound = parseTypeName(false);
+ return new TypeParameter(commentAndMetadata.comment,
+ commentAndMetadata.metadata, name, keyword, bound);
+ }
+ return new TypeParameter(commentAndMetadata.comment,
+ commentAndMetadata.metadata, name, null, null);
+ }
+
+ /**
+ * Parse a list of type parameters. Return the list of type parameters that
+ * were parsed.
+ *
+ * This method assumes that the current token matches `TokenType.LT`.
+ *
+ * typeParameterList ::=
+ * '<' typeParameter (',' typeParameter)* '>'
+ */
+ TypeParameterList parseTypeParameterList() {
+ Token leftBracket = getAndAdvance();
+ List<TypeParameter> typeParameters = <TypeParameter>[parseTypeParameter()];
+ while (_optional(TokenType.COMMA)) {
+ typeParameters.add(parseTypeParameter());
+ }
+ Token rightBracket = _expectGt();
+ return new TypeParameterList(leftBracket, typeParameters, rightBracket);
+ }
+
/**
* Parse a unary expression. Return the unary expression that was parsed.
*
@@ -7752,31 +4945,34 @@ class Parser {
* | '-' 'super'
* | incrementOperator assignableExpression
*/
- Expression _parseUnaryExpression() {
- if (_matches(TokenType.MINUS) ||
- _matches(TokenType.BANG) ||
- _matches(TokenType.TILDE)) {
+ Expression parseUnaryExpression() {
+ TokenType type = _currentToken.type;
+ if (type == TokenType.MINUS ||
+ type == TokenType.BANG ||
+ type == TokenType.TILDE) {
Token operator = getAndAdvance();
if (_matchesKeyword(Keyword.SUPER)) {
- if (_tokenMatches(_peek(), TokenType.OPEN_SQUARE_BRACKET) ||
- _tokenMatches(_peek(), TokenType.PERIOD)) {
+ TokenType nextType = _peek().type;
+ if (nextType == TokenType.OPEN_SQUARE_BRACKET ||
+ nextType == TokenType.PERIOD) {
// "prefixOperator unaryExpression"
// --> "prefixOperator postfixExpression"
// --> "prefixOperator primary selector*"
// --> "prefixOperator 'super' assignableSelector selector*"
- return new PrefixExpression(operator, _parseUnaryExpression());
+ return new PrefixExpression(operator, parseUnaryExpression());
}
return new PrefixExpression(
operator, new SuperExpression(getAndAdvance()));
}
- return new PrefixExpression(operator, _parseUnaryExpression());
+ return new PrefixExpression(operator, parseUnaryExpression());
} else if (_currentToken.type.isIncrementOperator) {
Token operator = getAndAdvance();
if (_matchesKeyword(Keyword.SUPER)) {
- if (_tokenMatches(_peek(), TokenType.OPEN_SQUARE_BRACKET) ||
- _tokenMatches(_peek(), TokenType.PERIOD)) {
+ TokenType nextType = _peek().type;
+ if (nextType == TokenType.OPEN_SQUARE_BRACKET ||
+ nextType == TokenType.PERIOD) {
// --> "prefixOperator 'super' assignableSelector selector*"
- return new PrefixExpression(operator, _parseUnaryExpression());
+ return new PrefixExpression(operator, parseUnaryExpression());
}
//
// Even though it is not valid to use an incrementing operator
@@ -7785,7 +4981,7 @@ class Parser {
// we cannot do the same for "++super" because "+super" is also not
// valid.
//
- if (operator.type == TokenType.MINUS_MINUS) {
+ if (type == TokenType.MINUS_MINUS) {
Token firstOperator = _createToken(operator, TokenType.MINUS);
Token secondOperator =
new Token(TokenType.MINUS, operator.offset + 1);
@@ -7796,73 +4992,22 @@ class Parser {
firstOperator,
new PrefixExpression(
secondOperator, new SuperExpression(getAndAdvance())));
- } else {
- // Invalid operator before 'super'
- _reportErrorForCurrentToken(
- ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, [operator.lexeme]);
- return new PrefixExpression(
- operator, new SuperExpression(getAndAdvance()));
}
+ // Invalid operator before 'super'
+ _reportErrorForCurrentToken(
+ ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, [operator.lexeme]);
+ return new PrefixExpression(
+ operator, new SuperExpression(getAndAdvance()));
}
- return new PrefixExpression(operator, _parseAssignableExpression(false));
- } else if (_matches(TokenType.PLUS)) {
+ return new PrefixExpression(
+ operator, _parseAssignableExpressionNotStartingWithSuper(false));
+ } else if (type == TokenType.PLUS) {
_reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
- return _createSyntheticIdentifier();
+ return createSyntheticIdentifier();
} else if (_inAsync && _matchesString(_AWAIT)) {
- return _parseAwaitExpression();
- }
- return _parsePostfixExpression();
- }
-
- /**
- * Parse a string literal representing a URI. Return the string literal that
- * was parsed.
- */
- StringLiteral _parseUri() {
- bool iskeywordAfterUri(Token token) => token.lexeme == Keyword.AS.syntax ||
- token.lexeme == _HIDE ||
- token.lexeme == _SHOW;
- if (!_matches(TokenType.STRING) &&
- !_matches(TokenType.SEMICOLON) &&
- !iskeywordAfterUri(_currentToken)) {
- // Attempt to recover in the case where the URI was not enclosed in
- // quotes.
- Token token = _currentToken;
- while ((_tokenMatchesIdentifier(token) && !iskeywordAfterUri(token)) ||
- _tokenMatches(token, TokenType.COLON) ||
- _tokenMatches(token, TokenType.SLASH) ||
- _tokenMatches(token, TokenType.PERIOD) ||
- _tokenMatches(token, TokenType.PERIOD_PERIOD) ||
- _tokenMatches(token, TokenType.PERIOD_PERIOD_PERIOD) ||
- _tokenMatches(token, TokenType.INT) ||
- _tokenMatches(token, TokenType.DOUBLE)) {
- token = token.next;
- }
- if (_tokenMatches(token, TokenType.SEMICOLON) ||
- iskeywordAfterUri(token)) {
- Token endToken = token.previous;
- token = _currentToken;
- int endOffset = token.end;
- StringBuffer buffer = new StringBuffer();
- buffer.write(token.lexeme);
- while (token != endToken) {
- token = token.next;
- if (token.offset != endOffset || token.precedingComments != null) {
- return parseStringLiteral();
- }
- buffer.write(token.lexeme);
- endOffset = token.end;
- }
- String value = buffer.toString();
- Token newToken =
- new StringToken(TokenType.STRING, "'$value'", _currentToken.offset);
- _reportErrorForToken(
- ParserErrorCode.NON_STRING_LITERAL_AS_URI, newToken);
- _currentToken = endToken.next;
- return new SimpleStringLiteral(newToken, value);
- }
+ return parseAwaitExpression();
}
- return parseStringLiteral();
+ return parsePostfixExpression();
}
/**
@@ -7872,7 +5017,7 @@ class Parser {
* variableDeclaration ::=
* identifier ('=' expression)?
*/
- VariableDeclaration _parseVariableDeclaration() {
+ VariableDeclaration parseVariableDeclaration() {
// TODO(paulberry): prior to the fix for bug 23204, we permitted
// annotations before variable declarations (e.g. "String @deprecated s;").
// Although such constructions are prohibited by the spec, we may want to
@@ -7882,7 +5027,7 @@ class Parser {
// of a construct like "class C { int @deprecated foo() {} }" (i.e. the
// user is in the middle of inserting "int bar;" prior to
// "@deprecated foo() {}").
- SimpleIdentifier name = parseSimpleIdentifier();
+ SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true);
Token equals = null;
Expression initializer = null;
if (_matches(TokenType.EQ)) {
@@ -7900,10 +5045,10 @@ class Parser {
* variableDeclarationList ::=
* finalConstVarOrType variableDeclaration (',' variableDeclaration)*
*/
- VariableDeclarationList _parseVariableDeclarationListAfterMetadata(
+ VariableDeclarationList parseVariableDeclarationListAfterMetadata(
CommentAndMetadata commentAndMetadata) {
- FinalConstVarOrType holder = _parseFinalConstVarOrType(false);
- return _parseVariableDeclarationListAfterType(
+ FinalConstVarOrType holder = parseFinalConstVarOrType(false);
+ return parseVariableDeclarationListAfterType(
commentAndMetadata, holder.keyword, holder.type);
}
@@ -7918,25 +5063,21 @@ class Parser {
* variableDeclarationList ::=
* finalConstVarOrType variableDeclaration (',' variableDeclaration)*
*/
- VariableDeclarationList _parseVariableDeclarationListAfterType(
+ VariableDeclarationList parseVariableDeclarationListAfterType(
CommentAndMetadata commentAndMetadata, Token keyword, TypeName type) {
if (type != null &&
keyword != null &&
_tokenMatchesKeyword(keyword, Keyword.VAR)) {
_reportErrorForToken(ParserErrorCode.VAR_AND_TYPE, keyword);
}
- List<VariableDeclaration> variables = new List<VariableDeclaration>();
- variables.add(_parseVariableDeclaration());
- while (_matches(TokenType.COMMA)) {
- _advance();
- variables.add(_parseVariableDeclaration());
+ List<VariableDeclaration> variables = <VariableDeclaration>[
+ parseVariableDeclaration()
+ ];
+ while (_optional(TokenType.COMMA)) {
+ variables.add(parseVariableDeclaration());
}
- return new VariableDeclarationList(
- commentAndMetadata != null ? commentAndMetadata.comment : null,
- commentAndMetadata != null ? commentAndMetadata.metadata : null,
- keyword,
- type,
- variables);
+ return new VariableDeclarationList(commentAndMetadata?.comment,
+ commentAndMetadata?.metadata, keyword, type, variables);
}
/**
@@ -7948,11 +5089,11 @@ class Parser {
* variableDeclarationStatement ::=
* variableDeclarationList ';'
*/
- VariableDeclarationStatement _parseVariableDeclarationStatementAfterMetadata(
+ VariableDeclarationStatement parseVariableDeclarationStatementAfterMetadata(
CommentAndMetadata commentAndMetadata) {
// Token startToken = currentToken;
VariableDeclarationList variableList =
- _parseVariableDeclarationListAfterMetadata(commentAndMetadata);
+ parseVariableDeclarationListAfterMetadata(commentAndMetadata);
// if (!matches(TokenType.SEMICOLON)) {
// if (matches(startToken, Keyword.VAR) && isTypedIdentifier(startToken.getNext())) {
// // TODO(brianwilkerson) This appears to be of the form "var type variable". We should do
@@ -7964,3099 +5105,3004 @@ class Parser {
}
/**
- * Parse a variable declaration statement. The [commentAndMetadata] is the
- * metadata to be associated with the variable declaration statement, or
- * `null` if there is no attempt at parsing the comment and metadata. The
- * [keyword] is the token representing the 'final', 'const' or 'var' keyword,
- * or `null` if there is no keyword. The [type] is the type of the variables
- * in the list. Return the variable declaration statement that was parsed.
+ * Parse a while statement. Return the while statement that was parsed.
*
- * variableDeclarationStatement ::=
- * variableDeclarationList ';'
+ * This method assumes that the current token matches [Keyword.WHILE].
+ *
+ * whileStatement ::=
+ * 'while' '(' expression ')' statement
*/
- VariableDeclarationStatement _parseVariableDeclarationStatementAfterType(
- CommentAndMetadata commentAndMetadata, Token keyword, TypeName type) {
- VariableDeclarationList variableList =
- _parseVariableDeclarationListAfterType(
- commentAndMetadata, keyword, type);
+ Statement parseWhileStatement() {
+ bool wasInLoop = _inLoop;
+ _inLoop = true;
+ try {
+ Token keyword = getAndAdvance();
+ Token leftParenthesis = _expect(TokenType.OPEN_PAREN);
+ Expression condition = parseExpression2();
+ Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
+ Statement body = parseStatement2();
+ return new WhileStatement(
+ keyword, leftParenthesis, condition, rightParenthesis, body);
+ } finally {
+ _inLoop = wasInLoop;
+ }
+ }
+
+ /**
+ * Parse a with clause. Return the with clause that was parsed.
+ *
+ * This method assumes that the current token matches `Keyword.WITH`.
+ *
+ * withClause ::=
+ * 'with' typeName (',' typeName)*
+ */
+ WithClause parseWithClause() {
+ Token withKeyword = getAndAdvance();
+ List<TypeName> types = <TypeName>[];
+ do {
+ TypeName typeName = parseTypeName(false);
+ _mustNotBeNullable(typeName, ParserErrorCode.NULLABLE_TYPE_IN_WITH);
+ types.add(typeName);
+ } while (_optional(TokenType.COMMA));
+ return new WithClause(withKeyword, types);
+ }
+
+ /**
+ * Parse a yield statement. Return the yield statement that was parsed.
+ *
+ * This method assumes that the current token matches [Keyword.YIELD].
+ *
+ * yieldStatement ::=
+ * 'yield' '*'? expression ';'
+ */
+ YieldStatement parseYieldStatement() {
+ Token yieldToken = getAndAdvance();
+ Token star = null;
+ if (_matches(TokenType.STAR)) {
+ star = getAndAdvance();
+ }
+ Expression expression = parseExpression2();
Token semicolon = _expect(TokenType.SEMICOLON);
- return new VariableDeclarationStatement(variableList, semicolon);
+ return new YieldStatement(yieldToken, star, expression, semicolon);
+ }
+
+ /**
+ * Parse a prefixed identifier, starting at the [startToken], without actually
+ * creating a prefixed identifier or changing the current token. Return the
+ * token following the prefixed identifier that was parsed, or `null` if the
+ * given token is not the first token in a valid prefixed identifier.
+ *
+ * This method must be kept in sync with [parsePrefixedIdentifier].
+ *
+ * prefixedIdentifier ::=
+ * identifier ('.' identifier)?
+ */
+ Token skipPrefixedIdentifier(Token startToken) {
+ Token token = skipSimpleIdentifier(startToken);
+ if (token == null) {
+ return null;
+ } else if (!_tokenMatches(token, TokenType.PERIOD)) {
+ return token;
+ }
+ token = token.next;
+ Token nextToken = skipSimpleIdentifier(token);
+ if (nextToken != null) {
+ return nextToken;
+ } else if (_tokenMatches(token, TokenType.CLOSE_PAREN) ||
+ _tokenMatches(token, TokenType.COMMA)) {
+ // If the `id.` is followed by something that cannot produce a valid
+ // structure then assume this is a prefixed identifier but missing the
+ // trailing identifier
+ return token;
+ }
+ return null;
+ }
+
+ /**
+ * Parse a return type, starting at the [startToken], without actually
+ * creating a return type or changing the current token. Return the token
+ * following the return type that was parsed, or `null` if the given token is
+ * not the first token in a valid return type.
+ *
+ * This method must be kept in sync with [parseReturnType].
+ *
+ * returnType ::=
+ * 'void'
+ * | type
+ */
+ Token skipReturnType(Token startToken) {
+ if (_tokenMatchesKeyword(startToken, Keyword.VOID)) {
+ return startToken.next;
+ } else {
+ return skipTypeName(startToken);
+ }
+ }
+
+ /**
+ * Parse a simple identifier, starting at the [startToken], without actually
+ * creating a simple identifier or changing the current token. Return the
+ * token following the simple identifier that was parsed, or `null` if the
+ * given token is not the first token in a valid simple identifier.
+ *
+ * This method must be kept in sync with [parseSimpleIdentifier].
+ *
+ * identifier ::=
+ * IDENTIFIER
+ */
+ Token skipSimpleIdentifier(Token startToken) {
+ if (_tokenMatches(startToken, TokenType.IDENTIFIER) ||
+ _tokenMatchesPseudoKeyword(startToken)) {
+ return startToken.next;
+ }
+ return null;
}
/**
- * Parse a while statement. Return the while statement that was parsed.
+ * Parse a string literal, starting at the [startToken], without actually
+ * creating a string literal or changing the current token. Return the token
+ * following the string literal that was parsed, or `null` if the given token
+ * is not the first token in a valid string literal.
*
- * whileStatement ::=
- * 'while' '(' expression ')' statement
+ * This method must be kept in sync with [parseStringLiteral].
+ *
+ * stringLiteral ::=
+ * MULTI_LINE_STRING+
+ * | SINGLE_LINE_STRING+
*/
- Statement _parseWhileStatement() {
- bool wasInLoop = _inLoop;
- _inLoop = true;
- try {
- Token keyword = _expectKeyword(Keyword.WHILE);
- Token leftParenthesis = _expect(TokenType.OPEN_PAREN);
- Expression condition = parseExpression2();
- Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
- Statement body = parseStatement2();
- return new WhileStatement(
- keyword, leftParenthesis, condition, rightParenthesis, body);
- } finally {
- _inLoop = wasInLoop;
+ Token skipStringLiteral(Token startToken) {
+ Token token = startToken;
+ while (token != null && _tokenMatches(token, TokenType.STRING)) {
+ token = token.next;
+ TokenType type = token.type;
+ if (type == TokenType.STRING_INTERPOLATION_EXPRESSION ||
+ type == TokenType.STRING_INTERPOLATION_IDENTIFIER) {
+ token = _skipStringInterpolation(token);
+ }
+ }
+ if (identical(token, startToken)) {
+ return null;
}
+ return token;
}
/**
- * Parse a yield statement. Return the yield statement that was parsed.
+ * Parse a list of type arguments, starting at the [startToken], without
+ * actually creating a type argument list or changing the current token.
+ * Return the token following the type argument list that was parsed, or
+ * `null` if the given token is not the first token in a valid type argument
+ * list.
*
- * yieldStatement ::=
- * 'yield' '*'? expression ';'
+ * This method must be kept in sync with [parseTypeArgumentList].
+ *
+ * typeArguments ::=
+ * '<' typeList '>'
+ *
+ * typeList ::=
+ * type (',' type)*
*/
- YieldStatement _parseYieldStatement() {
- Token yieldToken = getAndAdvance();
- Token star = null;
- if (_matches(TokenType.STAR)) {
- star = getAndAdvance();
+ Token skipTypeArgumentList(Token startToken) {
+ Token token = startToken;
+ if (!_tokenMatches(token, TokenType.LT) &&
+ !_injectGenericCommentTypeList()) {
+ return null;
}
- Expression expression = parseExpression2();
- Token semicolon = _expect(TokenType.SEMICOLON);
- return new YieldStatement(yieldToken, star, expression, semicolon);
+ token = skipTypeName(token.next);
+ if (token == null) {
+ // If the start token '<' is followed by '>'
+ // then assume this should be type argument list but is missing a type
+ token = startToken.next;
+ if (_tokenMatches(token, TokenType.GT)) {
+ return token.next;
+ }
+ return null;
+ }
+ while (_tokenMatches(token, TokenType.COMMA)) {
+ token = skipTypeName(token.next);
+ if (token == null) {
+ return null;
+ }
+ }
+ if (token.type == TokenType.GT) {
+ return token.next;
+ } else if (token.type == TokenType.GT_GT) {
+ Token second = new Token(TokenType.GT, token.offset + 1);
+ second.setNextWithoutSettingPrevious(token.next);
+ return second;
+ }
+ return null;
}
/**
- * Return the token that is immediately after the current token. This is
- * equivalent to [_peekAt](1).
+ * Parse a type name, starting at the [startToken], without actually creating
+ * a type name or changing the current token. Return the token following the
+ * type name that was parsed, or `null` if the given token is not the first
+ * token in a valid type name.
+ *
+ * This method must be kept in sync with [parseTypeName].
+ *
+ * type ::=
+ * qualified typeArguments?
*/
- Token _peek() => _currentToken.next;
+ Token skipTypeName(Token startToken) {
+ Token token = skipPrefixedIdentifier(startToken);
+ if (token == null) {
+ return null;
+ }
+ if (_tokenMatches(token, TokenType.LT)) {
+ token = skipTypeArgumentList(token);
+ }
+ return token;
+ }
/**
- * Return the token that is the given [distance] after the current token,
- * where the distance is the number of tokens to look ahead. A distance of `0`
- * is the current token, `1` is the next token, etc.
+ * Advance to the next token in the token stream.
*/
- Token _peekAt(int distance) {
- Token token = _currentToken;
- for (int i = 0; i < distance; i++) {
- token = token.next;
- }
- return token;
+ void _advance() {
+ _currentToken = _currentToken.next;
}
/**
- * Report the given [error].
+ * Append the character equivalent of the given [codePoint] to the given
+ * [builder]. Use the [startIndex] and [endIndex] to report an error, and
+ * don't append anything to the builder, if the code point is invalid. The
+ * [escapeSequence] is the escape sequence that was parsed to produce the
+ * code point (used for error reporting).
*/
- void _reportError(AnalysisError error) {
- if (_errorListenerLock != 0) {
+ void _appendCodePoint(StringBuffer buffer, String source, int codePoint,
+ int startIndex, int endIndex) {
+ if (codePoint < 0 || codePoint > Character.MAX_CODE_POINT) {
+ String escapeSequence = source.substring(startIndex, endIndex + 1);
+ _reportErrorForCurrentToken(
+ ParserErrorCode.INVALID_CODE_POINT, [escapeSequence]);
return;
}
- _errorListener.onError(error);
+ if (codePoint < Character.MAX_VALUE) {
+ buffer.writeCharCode(codePoint);
+ } else {
+ buffer.write(Character.toChars(codePoint));
+ }
}
/**
- * Report an error with the given [errorCode] and [arguments] associated with
- * the current token.
+ * Clone all token starting from the given [token] up to the end of the token
+ * stream, and return the first token in the new token stream.
*/
- void _reportErrorForCurrentToken(ParserErrorCode errorCode,
- [List<Object> arguments]) {
- _reportErrorForToken(errorCode, _currentToken, arguments);
+ Token _cloneTokens(Token token) {
+ if (token == null) {
+ return null;
+ }
+ token = token is CommentToken ? token.parent : token;
+ Token head = new Token(TokenType.EOF, -1);
+ head.setNext(head);
+ Token current = head;
+ while (token.type != TokenType.EOF) {
+ Token clone = token.copy();
+ current.setNext(clone);
+ current = clone;
+ token = token.next;
+ }
+ Token tail = new Token(TokenType.EOF, 0);
+ tail.setNext(tail);
+ current.setNext(tail);
+ return head.next;
}
/**
- * Report an error with the given [errorCode] and [arguments] associated with
- * the given [node].
+ * Convert the given [method] declaration into the nearest valid top-level
+ * function declaration (that is, the function declaration that most closely
+ * captures the components of the given method declaration).
*/
- void _reportErrorForNode(ParserErrorCode errorCode, AstNode node,
- [List<Object> arguments]) {
- _reportError(new AnalysisError(
- _source, node.offset, node.length, errorCode, arguments));
- }
+ FunctionDeclaration _convertToFunctionDeclaration(MethodDeclaration method) =>
+ new FunctionDeclaration(
+ method.documentationComment,
+ method.metadata,
+ method.externalKeyword,
+ method.returnType,
+ method.propertyKeyword,
+ method.name,
+ new FunctionExpression(
+ method.typeParameters, method.parameters, method.body));
/**
- * Report an error with the given [errorCode] and [arguments] associated with
- * the given [token].
+ * Return `true` if the current token could be the start of a compilation unit
+ * member. This method is used for recovery purposes to decide when to stop
+ * skipping tokens after finding an error while parsing a compilation unit
+ * member.
*/
- void _reportErrorForToken(ErrorCode errorCode, Token token,
- [List<Object> arguments]) {
- if (token.type == TokenType.EOF) {
- token = token.previous;
+ bool _couldBeStartOfCompilationUnitMember() {
+ Keyword keyword = _currentToken.keyword;
+ Token next = _currentToken.next;
+ TokenType nextType = next.type;
+ if ((keyword == Keyword.IMPORT ||
+ keyword == Keyword.EXPORT ||
+ keyword == Keyword.LIBRARY ||
+ keyword == Keyword.PART) &&
+ nextType != TokenType.PERIOD &&
+ nextType != TokenType.LT) {
+ // This looks like the start of a directive
+ return true;
+ } else if (keyword == Keyword.CLASS) {
+ // This looks like the start of a class definition
+ return true;
+ } else if (keyword == Keyword.TYPEDEF &&
+ nextType != TokenType.PERIOD &&
+ nextType != TokenType.LT) {
+ // This looks like the start of a typedef
+ return true;
+ } else if (keyword == Keyword.VOID ||
+ ((keyword == Keyword.GET || keyword == Keyword.SET) &&
+ _tokenMatchesIdentifier(next)) ||
+ (keyword == Keyword.OPERATOR && _isOperator(next))) {
+ // This looks like the start of a function
+ return true;
+ } else if (_matchesIdentifier()) {
+ if (nextType == TokenType.OPEN_PAREN) {
+ // This looks like the start of a function
+ return true;
+ }
+ Token token = skipReturnType(_currentToken);
+ if (token == null) {
+ return false;
+ }
+ // TODO(brianwilkerson) This looks wrong; should we be checking 'token'?
+ if (keyword == Keyword.GET ||
+ keyword == Keyword.SET ||
+ (keyword == Keyword.OPERATOR && _isOperator(next)) ||
+ _matchesIdentifier()) {
+ return true;
+ }
}
- _reportError(new AnalysisError(_source, token.offset,
- math.max(token.length, 1), errorCode, arguments));
+ return false;
}
/**
- * Skips a block with all containing blocks.
+ * Return a synthetic token representing the given [keyword].
*/
- void _skipBlock() {
- Token endToken = (_currentToken as BeginToken).endToken;
- if (endToken == null) {
- endToken = _currentToken.next;
- while (!identical(endToken, _currentToken)) {
- _currentToken = endToken;
- endToken = _currentToken.next;
- }
- _reportErrorForToken(
- ParserErrorCode.EXPECTED_TOKEN, _currentToken.previous, ["}"]);
- } else {
- _currentToken = endToken.next;
- }
- }
+ Token _createSyntheticKeyword(Keyword keyword) => _injectToken(
+ new Parser_SyntheticKeywordToken(keyword, _currentToken.offset));
/**
- * Parse the 'final', 'const', 'var' or type preceding a variable declaration,
- * starting at the given token, without actually creating a type or changing
- * the current token. Return the token following the type that was parsed, or
- * `null` if the given token is not the first token in a valid type. The
- * [startToken] is the token at which parsing is to begin. Return the token
- * following the type that was parsed.
- *
- * finalConstVarOrType ::=
- * | 'final' type?
- * | 'const' type?
- * | 'var'
- * | type
+ * Return a synthetic token with the given [type].
*/
- Token _skipFinalConstVarOrType(Token startToken) {
- if (_tokenMatchesKeyword(startToken, Keyword.FINAL) ||
- _tokenMatchesKeyword(startToken, Keyword.CONST)) {
- Token next = startToken.next;
- if (_tokenMatchesIdentifier(next)) {
- Token next2 = next.next;
- // "Type parameter" or "Type<" or "prefix.Type"
- if (_tokenMatchesIdentifier(next2) ||
- _tokenMatches(next2, TokenType.LT) ||
- _tokenMatches(next2, TokenType.PERIOD)) {
- return _skipTypeName(next);
- }
- // "parameter"
- return next;
- }
- } else if (_tokenMatchesKeyword(startToken, Keyword.VAR)) {
- return startToken.next;
- } else if (_tokenMatchesIdentifier(startToken)) {
- Token next = startToken.next;
- if (_tokenMatchesIdentifier(next) ||
- _tokenMatches(next, TokenType.LT) ||
- _tokenMatchesKeyword(next, Keyword.THIS) ||
- (_tokenMatches(next, TokenType.PERIOD) &&
- _tokenMatchesIdentifier(next.next) &&
- (_tokenMatchesIdentifier(next.next.next) ||
- _tokenMatches(next.next.next, TokenType.LT) ||
- _tokenMatchesKeyword(next.next.next, Keyword.THIS)))) {
- return _skipReturnType(startToken);
+ Token _createSyntheticToken(TokenType type) =>
+ _injectToken(new StringToken(type, "", _currentToken.offset));
+
+ /**
+ * Create and return a new token with the given [type]. The token will replace
+ * the first portion of the given [token], so it will have the same offset and
+ * will have any comments that might have preceeded the token.
+ */
+ Token _createToken(Token token, TokenType type, {bool isBegin: false}) {
+ CommentToken comments = token.precedingComments;
+ if (comments == null) {
+ if (isBegin) {
+ return new BeginToken(type, token.offset);
}
+ return new Token(type, token.offset);
+ } else if (isBegin) {
+ return new BeginTokenWithComment(type, token.offset, comments);
}
- return null;
+ return new TokenWithComment(type, token.offset, comments);
}
/**
- * Parse a list of formal parameters, starting at the [startToken], without
- * actually creating a formal parameter list or changing the current token.
- * Return the token following the formal parameter list that was parsed, or
- * `null` if the given token is not the first token in a valid list of formal
- * parameter.
- *
- * Note that unlike other skip methods, this method uses a heuristic. In the
- * worst case, the parameters could be prefixed by metadata, which would
- * require us to be able to skip arbitrary expressions. Rather than duplicate
- * the logic of most of the parse methods we simply look for something that is
- * likely to be a list of parameters and then skip to returning the token
- * after the closing parenthesis.
- *
- * This method must be kept in sync with [parseFormalParameterList].
+ * Check that the given [expression] is assignable and report an error if it
+ * isn't.
*
- * formalParameterList ::=
- * '(' ')'
- * | '(' normalFormalParameters (',' optionalFormalParameters)? ')'
- * | '(' optionalFormalParameters ')'
+ * assignableExpression ::=
+ * primary (arguments* assignableSelector)+
+ * | 'super' unconditionalAssignableSelector
+ * | identifier
*
- * normalFormalParameters ::=
- * normalFormalParameter (',' normalFormalParameter)*
+ * unconditionalAssignableSelector ::=
+ * '[' expression ']'
+ * | '.' identifier
*
- * optionalFormalParameters ::=
- * optionalPositionalFormalParameters
- * | namedFormalParameters
+ * assignableSelector ::=
+ * unconditionalAssignableSelector
+ * | '?.' identifier
+ */
+ void _ensureAssignable(Expression expression) {
+ if (expression != null && !expression.isAssignable) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.ILLEGAL_ASSIGNMENT_TO_NON_ASSIGNABLE);
+ }
+ }
+
+ /**
+ * If the current token has the expected type, return it after advancing to
+ * the next token. Otherwise report an error and return the current token
+ * without advancing.
*
- * optionalPositionalFormalParameters ::=
- * '[' defaultFormalParameter (',' defaultFormalParameter)* ']'
+ * Note that the method [_expectGt] should be used if the argument to this
+ * method would be [TokenType.GT].
*
- * namedFormalParameters ::=
- * '{' defaultNamedParameter (',' defaultNamedParameter)* '}'
+ * The [type] is the type of token that is expected.
*/
- Token _skipFormalParameterList(Token startToken) {
- if (!_tokenMatches(startToken, TokenType.OPEN_PAREN)) {
- return null;
- }
- Token next = startToken.next;
- if (_tokenMatches(next, TokenType.CLOSE_PAREN)) {
- return next.next;
- }
- //
- // Look to see whether the token after the open parenthesis is something
- // that should only occur at the beginning of a parameter list.
- //
- if (next.matchesAny([
- TokenType.AT,
- TokenType.OPEN_SQUARE_BRACKET,
- TokenType.OPEN_CURLY_BRACKET
- ]) ||
- _tokenMatchesKeyword(next, Keyword.VOID) ||
- (_tokenMatchesIdentifier(next) &&
- (next.next.matchesAny([TokenType.COMMA, TokenType.CLOSE_PAREN])))) {
- return _skipPastMatchingToken(startToken);
+ Token _expect(TokenType type) {
+ if (_matches(type)) {
+ return getAndAdvance();
}
- //
- // Look to see whether the first parameter is a function typed parameter
- // without a return type.
- //
- if (_tokenMatchesIdentifier(next) &&
- _tokenMatches(next.next, TokenType.OPEN_PAREN)) {
- Token afterParameters = _skipFormalParameterList(next.next);
- if (afterParameters != null &&
- (afterParameters
- .matchesAny([TokenType.COMMA, TokenType.CLOSE_PAREN]))) {
- return _skipPastMatchingToken(startToken);
+ // Remove uses of this method in favor of matches?
+ // Pass in the error code to use to report the error?
+ if (type == TokenType.SEMICOLON) {
+ if (_tokenMatches(_currentToken.next, TokenType.SEMICOLON)) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.UNEXPECTED_TOKEN, [_currentToken.lexeme]);
+ _advance();
+ return getAndAdvance();
}
+ _reportErrorForToken(ParserErrorCode.EXPECTED_TOKEN,
+ _currentToken.previous, [type.lexeme]);
+ return _createSyntheticToken(TokenType.SEMICOLON);
}
- //
- // Look to see whether the first parameter has a type or is a function typed
- // parameter with a return type.
- //
- Token afterType = _skipFinalConstVarOrType(next);
- if (afterType == null) {
- return null;
- }
- if (_skipSimpleIdentifier(afterType) == null) {
- return null;
- }
- return _skipPastMatchingToken(startToken);
+ _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_TOKEN, [type.lexeme]);
+ return _createSyntheticToken(type);
}
/**
- * If the [startToken] is a begin token with an associated end token, then
- * return the token following the end token. Otherwise, return `null`.
+ * If the current token has the type [TokenType.GT], return it after advancing
+ * to the next token. Otherwise report an error and create a synthetic token.
*/
- Token _skipPastMatchingToken(Token startToken) {
- if (startToken is! BeginToken) {
- return null;
- }
- Token closeParen = (startToken as BeginToken).endToken;
- if (closeParen == null) {
- return null;
+ Token _expectGt() {
+ if (_matchesGt()) {
+ return getAndAdvance();
}
- return closeParen.next;
+ _reportErrorForCurrentToken(
+ ParserErrorCode.EXPECTED_TOKEN, [TokenType.GT.lexeme]);
+ return _createSyntheticToken(TokenType.GT);
}
/**
- * Parse a prefixed identifier, starting at the [startToken], without actually
- * creating a prefixed identifier or changing the current token. Return the
- * token following the prefixed identifier that was parsed, or `null` if the
- * given token is not the first token in a valid prefixed identifier.
- *
- * This method must be kept in sync with [parsePrefixedIdentifier].
- *
- * prefixedIdentifier ::=
- * identifier ('.' identifier)?
+ * If the current token is a keyword matching the given [keyword], return it
+ * after advancing to the next token. Otherwise report an error and return the
+ * current token without advancing.
*/
- Token _skipPrefixedIdentifier(Token startToken) {
- Token token = _skipSimpleIdentifier(startToken);
- if (token == null) {
- return null;
- } else if (!_tokenMatches(token, TokenType.PERIOD)) {
- return token;
- }
- token = token.next;
- Token nextToken = _skipSimpleIdentifier(token);
- if (nextToken != null) {
- return nextToken;
- } else if (_tokenMatches(token, TokenType.CLOSE_PAREN) ||
- _tokenMatches(token, TokenType.COMMA)) {
- // If the `id.` is followed by something that cannot produce a valid
- // structure then assume this is a prefixed identifier but missing the
- // trailing identifier
- return token;
+ Token _expectKeyword(Keyword keyword) {
+ if (_matchesKeyword(keyword)) {
+ return getAndAdvance();
}
- return null;
+ // Remove uses of this method in favor of matches?
+ // Pass in the error code to use to report the error?
+ _reportErrorForCurrentToken(
+ ParserErrorCode.EXPECTED_TOKEN, [keyword.syntax]);
+ return _currentToken;
}
/**
- * Parse a return type, starting at the [startToken], without actually
- * creating a return type or changing the current token. Return the token
- * following the return type that was parsed, or `null` if the given token is
- * not the first token in a valid return type.
- *
- * This method must be kept in sync with [parseReturnType].
- *
- * returnType ::=
- * 'void'
- * | type
+ * Search the given list of [ranges] for a range that contains the given
+ * [index]. Return the range that was found, or `null` if none of the ranges
+ * contain the index.
*/
- Token _skipReturnType(Token startToken) {
- if (_tokenMatchesKeyword(startToken, Keyword.VOID)) {
- return startToken.next;
- } else {
- return _skipTypeName(startToken);
+ List<int> _findRange(List<List<int>> ranges, int index) {
+ int rangeCount = ranges.length;
+ for (int i = 0; i < rangeCount; i++) {
+ List<int> range = ranges[i];
+ if (range[0] <= index && index <= range[1]) {
+ return range;
+ } else if (index < range[0]) {
+ return null;
+ }
}
+ return null;
}
/**
- * Parse a simple identifier, starting at the [startToken], without actually
- * creating a simple identifier or changing the current token. Return the
- * token following the simple identifier that was parsed, or `null` if the
- * given token is not the first token in a valid simple identifier.
- *
- * This method must be kept in sync with [parseSimpleIdentifier].
- *
- * identifier ::=
- * IDENTIFIER
+ * Return a list of the ranges of characters in the given [comment] that
+ * should be treated as code blocks.
*/
- Token _skipSimpleIdentifier(Token startToken) {
- if (_tokenMatches(startToken, TokenType.IDENTIFIER) ||
- _tokenMatchesPseudoKeyword(startToken)) {
- return startToken.next;
+ List<List<int>> _getCodeBlockRanges(String comment) {
+ List<List<int>> ranges = <List<int>>[];
+ int length = comment.length;
+ if (length < 3) {
+ return ranges;
+ }
+ int index = 0;
+ int firstChar = comment.codeUnitAt(0);
+ if (firstChar == 0x2F) {
+ int secondChar = comment.codeUnitAt(1);
+ int thirdChar = comment.codeUnitAt(2);
+ if ((secondChar == 0x2A && thirdChar == 0x2A) ||
+ (secondChar == 0x2F && thirdChar == 0x2F)) {
+ index = 3;
+ }
+ }
+ if (StringUtilities.startsWith4(comment, index, 0x20, 0x20, 0x20, 0x20)) {
+ int end = index + 4;
+ while (end < length &&
+ comment.codeUnitAt(end) != 0xD &&
+ comment.codeUnitAt(end) != 0xA) {
+ end = end + 1;
+ }
+ ranges.add(<int>[index, end]);
+ index = end;
+ }
+ while (index < length) {
+ int currentChar = comment.codeUnitAt(index);
+ if (currentChar == 0xD || currentChar == 0xA) {
+ index = index + 1;
+ while (index < length &&
+ Character.isWhitespace(comment.codeUnitAt(index))) {
+ index = index + 1;
+ }
+ if (StringUtilities.startsWith6(
+ comment, index, 0x2A, 0x20, 0x20, 0x20, 0x20, 0x20)) {
+ int end = index + 6;
+ while (end < length &&
+ comment.codeUnitAt(end) != 0xD &&
+ comment.codeUnitAt(end) != 0xA) {
+ end = end + 1;
+ }
+ ranges.add(<int>[index, end]);
+ index = end;
+ }
+ } else if (index + 1 < length &&
+ currentChar == 0x5B &&
+ comment.codeUnitAt(index + 1) == 0x3A) {
+ int end = StringUtilities.indexOf2(comment, index + 2, 0x3A, 0x5D);
+ if (end < 0) {
+ end = length;
+ }
+ ranges.add(<int>[index, end]);
+ index = end + 1;
+ } else {
+ index = index + 1;
+ }
}
- return null;
+ return ranges;
}
/**
- * Parse a string literal that contains interpolations, starting at the
- * [startToken], without actually creating a string literal or changing the
- * current token. Return the token following the string literal that was
- * parsed, or `null` if the given token is not the first token in a valid
- * string literal.
- *
- * This method must be kept in sync with [parseStringInterpolation].
+ * Return the end token associated with the given [beginToken], or `null` if
+ * either the given token is not a begin token or it does not have an end
+ * token associated with it.
*/
- Token _skipStringInterpolation(Token startToken) {
- Token token = startToken;
- TokenType type = token.type;
- while (type == TokenType.STRING_INTERPOLATION_EXPRESSION ||
- type == TokenType.STRING_INTERPOLATION_IDENTIFIER) {
- if (type == TokenType.STRING_INTERPOLATION_EXPRESSION) {
- token = token.next;
- type = token.type;
- //
- // Rather than verify that the following tokens represent a valid
- // expression, we simply skip tokens until we reach the end of the
- // interpolation, being careful to handle nested string literals.
- //
- int bracketNestingLevel = 1;
- while (bracketNestingLevel > 0) {
- if (type == TokenType.EOF) {
- return null;
- } else if (type == TokenType.OPEN_CURLY_BRACKET) {
- bracketNestingLevel++;
- } else if (type == TokenType.CLOSE_CURLY_BRACKET) {
- bracketNestingLevel--;
- } else if (type == TokenType.STRING) {
- token = _skipStringLiteral(token);
- if (token == null) {
- return null;
- }
- } else {
- token = token.next;
+ Token _getEndToken(Token beginToken) {
+ if (beginToken is BeginToken) {
+ return beginToken.endToken;
+ }
+ return null;
+ }
+
+ bool _injectGenericComment(TokenType type, int prefixLen) {
+ if (parseGenericMethodComments) {
+ CommentToken t = _currentToken.precedingComments;
+ for (; t != null; t = t.next) {
+ if (t.type == type) {
+ String comment = t.lexeme.substring(prefixLen, t.lexeme.length - 2);
+ Token list = _scanGenericMethodComment(comment, t.offset + prefixLen);
+ if (list != null) {
+ // Remove the token from the comment stream.
+ t.remove();
+ // Insert the tokens into the stream.
+ _injectTokenList(list);
+ return true;
}
- type = token.type;
- }
- token = token.next;
- type = token.type;
- } else {
- token = token.next;
- if (token.type != TokenType.IDENTIFIER) {
- return null;
}
- token = token.next;
- }
- type = token.type;
- if (type == TokenType.STRING) {
- token = token.next;
- type = token.type;
}
}
- return token;
+ return false;
}
/**
- * Parse a string literal, starting at the [startToken], without actually
- * creating a string literal or changing the current token. Return the token
- * following the string literal that was parsed, or `null` if the given token
- * is not the first token in a valid string literal.
- *
- * This method must be kept in sync with [parseStringLiteral].
+ * Matches a generic comment type substitution and injects it into the token
+ * stream. Returns true if a match was injected, otherwise false.
*
- * stringLiteral ::=
- * MULTI_LINE_STRING+
- * | SINGLE_LINE_STRING+
+ * These comments are of the form `/*=T*/`, in other words, a [TypeName]
+ * inside a slash-star comment, preceded by equals sign.
*/
- Token _skipStringLiteral(Token startToken) {
- Token token = startToken;
- while (token != null && _tokenMatches(token, TokenType.STRING)) {
- token = token.next;
- TokenType type = token.type;
- if (type == TokenType.STRING_INTERPOLATION_EXPRESSION ||
- type == TokenType.STRING_INTERPOLATION_IDENTIFIER) {
- token = _skipStringInterpolation(token);
- }
- }
- if (identical(token, startToken)) {
- return null;
- }
- return token;
+ bool _injectGenericCommentTypeAssign() {
+ return _injectGenericComment(TokenType.GENERIC_METHOD_TYPE_ASSIGN, 3);
}
/**
- * Parse a list of type arguments, starting at the [startToken], without
- * actually creating a type argument list or changing the current token.
- * Return the token following the type argument list that was parsed, or
- * `null` if the given token is not the first token in a valid type argument
- * list.
- *
- * This method must be kept in sync with [parseTypeArgumentList].
- *
- * typeArguments ::=
- * '<' typeList '>'
+ * Matches a generic comment type parameters and injects them into the token
+ * stream. Returns true if a match was injected, otherwise false.
*
- * typeList ::=
- * type (',' type)*
+ * These comments are of the form `/*<K, V>*/`, in other words, a
+ * [TypeParameterList] or [TypeArgumentList] inside a slash-star comment.
*/
- Token _skipTypeArgumentList(Token startToken) {
- Token token = startToken;
- if (!_tokenMatches(token, TokenType.LT)) {
- return null;
- }
- token = _skipTypeName(token.next);
- if (token == null) {
- // If the start token '<' is followed by '>'
- // then assume this should be type argument list but is missing a type
- token = startToken.next;
- if (_tokenMatches(token, TokenType.GT)) {
- return token.next;
- }
- return null;
+ bool _injectGenericCommentTypeList() {
+ return _injectGenericComment(TokenType.GENERIC_METHOD_TYPE_LIST, 2);
+ }
+
+ /**
+ * Inject the given [token] into the token stream immediately before the
+ * current token.
+ */
+ Token _injectToken(Token token) {
+ Token previous = _currentToken.previous;
+ token.setNext(_currentToken);
+ previous.setNext(token);
+ return token;
+ }
+
+ void _injectTokenList(Token firstToken) {
+ // Scanner creates a cyclic EOF token.
+ Token lastToken = firstToken;
+ while (lastToken.next.type != TokenType.EOF) {
+ lastToken = lastToken.next;
}
- while (_tokenMatches(token, TokenType.COMMA)) {
- token = _skipTypeName(token.next);
- if (token == null) {
- return null;
- }
+ // Inject these new tokens into the stream.
+ Token previous = _currentToken.previous;
+ lastToken.setNext(_currentToken);
+ previous.setNext(firstToken);
+ _currentToken = firstToken;
+ }
+
+ /**
+ * Return `true` if the current token could be the question mark in a
+ * condition expression. The current token is assumed to be a question mark.
+ */
+ bool _isConditionalOperator() {
+ void parseOperation(Parser parser) {
+ parser.parseExpressionWithoutCascade();
}
- if (token.type == TokenType.GT) {
- return token.next;
- } else if (token.type == TokenType.GT_GT) {
- Token second = new Token(TokenType.GT, token.offset + 1);
- second.setNextWithoutSettingPrevious(token.next);
- return second;
+
+ Token token = _skip(_currentToken.next, parseOperation);
+ if (token == null || !_tokenMatches(token, TokenType.COLON)) {
+ return false;
}
- return null;
+ token = _skip(token.next, parseOperation);
+ return token != null;
}
/**
- * Parse a type name, starting at the [startToken], without actually creating
- * a type name or changing the current token. Return the token following the
- * type name that was parsed, or `null` if the given token is not the first
- * token in a valid type name.
- *
- * This method must be kept in sync with [parseTypeName].
- *
- * type ::=
- * qualified typeArguments?
+ * Return `true` if the given [character] is a valid hexadecimal digit.
*/
- Token _skipTypeName(Token startToken) {
- Token token = _skipPrefixedIdentifier(startToken);
- if (token == null) {
- return null;
+ bool _isHexDigit(int character) =>
+ (0x30 <= character && character <= 0x39) ||
+ (0x41 <= character && character <= 0x46) ||
+ (0x61 <= character && character <= 0x66);
+
+ bool _isLikelyArgumentList() {
+ // Try to reduce the amount of lookahead required here before enabling
+ // generic methods.
+ if (_matches(TokenType.OPEN_PAREN)) {
+ return true;
}
- if (_tokenMatches(token, TokenType.LT)) {
- token = _skipTypeArgumentList(token);
+ if (!parseGenericMethods) {
+ return false;
}
- return token;
+ Token token = skipTypeArgumentList(_currentToken);
+ return token != null && _tokenMatches(token, TokenType.OPEN_PAREN);
}
/**
- * Parse a list of type parameters, starting at the [startToken], without
- * actually creating a type parameter list or changing the current token.
- * Return the token following the type parameter list that was parsed, or
- * `null` if the given token is not the first token in a valid type parameter
- * list.
- *
- * This method must be kept in sync with [parseTypeParameterList].
+ * Given that we have just found bracketed text within the given [comment],
+ * look to see whether that text is (a) followed by a parenthesized link
+ * address, (b) followed by a colon, or (c) followed by optional whitespace
+ * and another square bracket. The [rightIndex] is the index of the right
+ * bracket. Return `true` if the bracketed text is followed by a link address.
*
- * typeParameterList ::=
- * '<' typeParameter (',' typeParameter)* '>'
+ * This method uses the syntax described by the
+ * <a href="http://daringfireball.net/projects/markdown/syntax">markdown</a>
+ * project.
*/
- Token _skipTypeParameterList(Token startToken) {
- if (!_tokenMatches(startToken, TokenType.LT)) {
- return null;
+ bool _isLinkText(String comment, int rightIndex) {
+ int length = comment.length;
+ int index = rightIndex + 1;
+ if (index >= length) {
+ return false;
}
- //
- // We can't skip a type parameter because it can be preceeded by metadata,
- // so we just assume that everything before the matching end token is valid.
- //
- int depth = 1;
- Token next = startToken.next;
- while (depth > 0) {
- if (_tokenMatches(next, TokenType.EOF)) {
- return null;
- } else if (_tokenMatches(next, TokenType.LT)) {
- depth++;
- } else if (_tokenMatches(next, TokenType.GT)) {
- depth--;
- } else if (_tokenMatches(next, TokenType.GT_EQ)) {
- if (depth == 1) {
- Token fakeEquals = new Token(TokenType.EQ, next.offset + 2);
- fakeEquals.setNextWithoutSettingPrevious(next.next);
- return fakeEquals;
- }
- depth--;
- } else if (_tokenMatches(next, TokenType.GT_GT)) {
- depth -= 2;
- } else if (_tokenMatches(next, TokenType.GT_GT_EQ)) {
- if (depth < 2) {
- return null;
- } else if (depth == 2) {
- Token fakeEquals = new Token(TokenType.EQ, next.offset + 2);
- fakeEquals.setNextWithoutSettingPrevious(next.next);
- return fakeEquals;
- }
- depth -= 2;
+ int nextChar = comment.codeUnitAt(index);
+ if (nextChar == 0x28 || nextChar == 0x3A) {
+ return true;
+ }
+ while (Character.isWhitespace(nextChar)) {
+ index = index + 1;
+ if (index >= length) {
+ return false;
}
- next = next.next;
+ nextChar = comment.codeUnitAt(index);
}
- return next;
+ return nextChar == 0x5B;
}
/**
- * Return `true` if the given [token] has the given [type].
+ * Return `true` if the given [startToken] appears to be the beginning of an
+ * operator declaration.
*/
- bool _tokenMatches(Token token, TokenType type) => token.type == type;
+ bool _isOperator(Token startToken) {
+ // Accept any operator here, even if it is not user definable.
+ if (!startToken.isOperator) {
+ return false;
+ }
+ // Token "=" means that it is actually a field initializer.
+ if (startToken.type == TokenType.EQ) {
+ return false;
+ }
+ // Consume all operator tokens.
+ Token token = startToken.next;
+ while (token.isOperator) {
+ token = token.next;
+ }
+ // Formal parameter list is expect now.
+ return _tokenMatches(token, TokenType.OPEN_PAREN);
+ }
- /**
- * Return `true` if the given [token] is a valid identifier. Valid identifiers
- * include built-in identifiers (pseudo-keywords).
- */
- bool _tokenMatchesIdentifier(Token token) =>
- _tokenMatches(token, TokenType.IDENTIFIER) ||
- _tokenMatchesPseudoKeyword(token);
+ bool _isPeekGenericTypeParametersAndOpenParen() {
+ if (!parseGenericMethods) {
+ return false;
+ }
+ Token token = _skipTypeParameterList(_peek());
+ return token != null && _tokenMatches(token, TokenType.OPEN_PAREN);
+ }
/**
- * Return `true` if the given [token] matches the given [keyword].
+ * Return `true` if the [startToken] appears to be the first token of a type
+ * name that is followed by a variable or field formal parameter.
*/
- bool _tokenMatchesKeyword(Token token, Keyword keyword) =>
- token.type == TokenType.KEYWORD &&
- (token as KeywordToken).keyword == keyword;
+ bool _isTypedIdentifier(Token startToken) {
+ Token token = skipReturnType(startToken);
+ if (token == null) {
+ return false;
+ } else if (_tokenMatchesIdentifier(token)) {
+ return true;
+ } else if (_tokenMatchesKeyword(token, Keyword.THIS) &&
+ _tokenMatches(token.next, TokenType.PERIOD) &&
+ _tokenMatchesIdentifier(token.next.next)) {
+ return true;
+ } else if (_tokenMatchesKeyword(startToken, Keyword.VOID)) {
+ // The keyword 'void' isn't a valid identifier, so it should be assumed to
+ // be a type name.
+ return true;
+ } else if (startToken.next != token &&
+ !_tokenMatches(token, TokenType.OPEN_PAREN)) {
+ // The type is more than a simple identifier, so it should be assumed to
+ // be a type name.
+ return true;
+ }
+ return false;
+ }
/**
- * Return `true` if the given [token] matches a pseudo keyword.
+ * Increments the error reporting lock level. If level is more than `0`, then
+ * [reportError] wont report any error.
*/
- bool _tokenMatchesPseudoKeyword(Token token) =>
- _tokenMatches(token, TokenType.KEYWORD) &&
- (token as KeywordToken).keyword.isPseudoKeyword;
+ void _lockErrorListener() {
+ _errorListenerLock++;
+ }
/**
- * Return `true` if the given [token] matches the given [identifier].
+ * Return `true` if the current token has the given [type]. Note that the
+ * method [_matchesGt] should be used if the argument to this method would be
+ * [TokenType.GT].
*/
- bool _tokenMatchesString(Token token, String identifier) =>
- token.type == TokenType.IDENTIFIER && token.lexeme == identifier;
+ bool _matches(TokenType type) => _currentToken.type == type;
/**
- * Translate the characters at the given [index] in the given [lexeme],
- * appending the translated character to the given [buffer]. The index is
- * assumed to be valid.
+ * Return `true` if the current token has a type of [TokenType.GT]. Note that
+ * this method, unlike other variants, will modify the token stream if
+ * possible to match desired type. In particular, if the next token is either
+ * a '>>' or '>>>', the token stream will be re-written and `true` will be
+ * returned.
*/
- int _translateCharacter(StringBuffer buffer, String lexeme, int index) {
- int currentChar = lexeme.codeUnitAt(index);
- if (currentChar != 0x5C) {
- buffer.writeCharCode(currentChar);
- return index + 1;
- }
- //
- // We have found an escape sequence, so we parse the string to determine
- // what kind of escape sequence and what character to add to the builder.
- //
- int length = lexeme.length;
- int currentIndex = index + 1;
- if (currentIndex >= length) {
- // Illegal escape sequence: no char after escape.
- // This cannot actually happen because it would require the escape
- // character to be the last character in the string, but if it were it
- // would escape the closing quote, leaving the string unclosed.
- // reportError(ParserErrorCode.MISSING_CHAR_IN_ESCAPE_SEQUENCE);
- return length;
- }
- currentChar = lexeme.codeUnitAt(currentIndex);
- if (currentChar == 0x6E) {
- buffer.writeCharCode(0xA);
- // newline
- } else if (currentChar == 0x72) {
- buffer.writeCharCode(0xD);
- // carriage return
- } else if (currentChar == 0x66) {
- buffer.writeCharCode(0xC);
- // form feed
- } else if (currentChar == 0x62) {
- buffer.writeCharCode(0x8);
- // backspace
- } else if (currentChar == 0x74) {
- buffer.writeCharCode(0x9);
- // tab
- } else if (currentChar == 0x76) {
- buffer.writeCharCode(0xB);
- // vertical tab
- } else if (currentChar == 0x78) {
- if (currentIndex + 2 >= length) {
- // Illegal escape sequence: not enough hex digits
- _reportErrorForCurrentToken(ParserErrorCode.INVALID_HEX_ESCAPE);
- return length;
- }
- int firstDigit = lexeme.codeUnitAt(currentIndex + 1);
- int secondDigit = lexeme.codeUnitAt(currentIndex + 2);
- if (!_isHexDigit(firstDigit) || !_isHexDigit(secondDigit)) {
- // Illegal escape sequence: invalid hex digit
- _reportErrorForCurrentToken(ParserErrorCode.INVALID_HEX_ESCAPE);
- } else {
- int charCode = (Character.digit(firstDigit, 16) << 4) +
- Character.digit(secondDigit, 16);
- buffer.writeCharCode(charCode);
- }
- return currentIndex + 3;
- } else if (currentChar == 0x75) {
- currentIndex++;
- if (currentIndex >= length) {
- // Illegal escape sequence: not enough hex digits
- _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
- return length;
- }
- currentChar = lexeme.codeUnitAt(currentIndex);
- if (currentChar == 0x7B) {
- currentIndex++;
- if (currentIndex >= length) {
- // Illegal escape sequence: incomplete escape
- _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
- return length;
- }
- currentChar = lexeme.codeUnitAt(currentIndex);
- int digitCount = 0;
- int value = 0;
- while (currentChar != 0x7D) {
- if (!_isHexDigit(currentChar)) {
- // Illegal escape sequence: invalid hex digit
- _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
- currentIndex++;
- while (currentIndex < length &&
- lexeme.codeUnitAt(currentIndex) != 0x7D) {
- currentIndex++;
- }
- return currentIndex + 1;
- }
- digitCount++;
- value = (value << 4) + Character.digit(currentChar, 16);
- currentIndex++;
- if (currentIndex >= length) {
- // Illegal escape sequence: incomplete escape
- _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
- return length;
- }
- currentChar = lexeme.codeUnitAt(currentIndex);
- }
- if (digitCount < 1 || digitCount > 6) {
- // Illegal escape sequence: not enough or too many hex digits
- _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
- }
- _appendScalarValue(buffer, lexeme.substring(index, currentIndex + 1),
- value, index, currentIndex);
- return currentIndex + 1;
- } else {
- if (currentIndex + 3 >= length) {
- // Illegal escape sequence: not enough hex digits
- _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
- return length;
- }
- int firstDigit = currentChar;
- int secondDigit = lexeme.codeUnitAt(currentIndex + 1);
- int thirdDigit = lexeme.codeUnitAt(currentIndex + 2);
- int fourthDigit = lexeme.codeUnitAt(currentIndex + 3);
- if (!_isHexDigit(firstDigit) ||
- !_isHexDigit(secondDigit) ||
- !_isHexDigit(thirdDigit) ||
- !_isHexDigit(fourthDigit)) {
- // Illegal escape sequence: invalid hex digits
- _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
- } else {
- _appendScalarValue(
- buffer,
- lexeme.substring(index, currentIndex + 1),
- (((((Character.digit(firstDigit, 16) << 4) +
- Character.digit(secondDigit, 16)) <<
- 4) +
- Character.digit(thirdDigit, 16)) <<
- 4) +
- Character.digit(fourthDigit, 16),
- index,
- currentIndex + 3);
- }
- return currentIndex + 4;
- }
- } else {
- buffer.writeCharCode(currentChar);
+ bool _matchesGt() {
+ TokenType currentType = _currentToken.type;
+ if (currentType == TokenType.GT) {
+ return true;
+ } else if (currentType == TokenType.GT_GT) {
+ Token first = _createToken(_currentToken, TokenType.GT);
+ Token second = new Token(TokenType.GT, _currentToken.offset + 1);
+ second.setNext(_currentToken.next);
+ first.setNext(second);
+ _currentToken.previous.setNext(first);
+ _currentToken = first;
+ return true;
+ } else if (currentType == TokenType.GT_EQ) {
+ Token first = _createToken(_currentToken, TokenType.GT);
+ Token second = new Token(TokenType.EQ, _currentToken.offset + 1);
+ second.setNext(_currentToken.next);
+ first.setNext(second);
+ _currentToken.previous.setNext(first);
+ _currentToken = first;
+ return true;
+ } else if (currentType == TokenType.GT_GT_EQ) {
+ int offset = _currentToken.offset;
+ Token first = _createToken(_currentToken, TokenType.GT);
+ Token second = new Token(TokenType.GT, offset + 1);
+ Token third = new Token(TokenType.EQ, offset + 2);
+ third.setNext(_currentToken.next);
+ second.setNext(third);
+ first.setNext(second);
+ _currentToken.previous.setNext(first);
+ _currentToken = first;
+ return true;
}
- return currentIndex + 1;
+ return false;
}
/**
- * Decrements the error reporting lock level. If level is more than `0`, then
- * [reportError] wont report any error.
+ * Return `true` if the current token is a valid identifier. Valid identifiers
+ * include built-in identifiers (pseudo-keywords).
*/
- void _unlockErrorListener() {
- if (_errorListenerLock == 0) {
- throw new IllegalStateException(
- "Attempt to unlock not locked error listener.");
- }
- _errorListenerLock--;
- }
+ bool _matchesIdentifier() => _tokenMatchesIdentifier(_currentToken);
/**
- * Validate that the given [parameterList] does not contain any field
- * initializers.
+ * Return `true` if the current token matches the given [keyword].
*/
- void _validateFormalParameterList(FormalParameterList parameterList) {
- for (FormalParameter parameter in parameterList.parameters) {
- if (parameter is FieldFormalParameter) {
- _reportErrorForNode(
- ParserErrorCode.FIELD_INITIALIZER_OUTSIDE_CONSTRUCTOR,
- parameter.identifier);
- }
- }
- }
+ bool _matchesKeyword(Keyword keyword) =>
+ _tokenMatchesKeyword(_currentToken, keyword);
/**
- * Validate that the given set of [modifiers] is appropriate for a class and
- * return the 'abstract' keyword if there is one.
+ * Return `true` if the current token matches the given [identifier].
*/
- Token _validateModifiersForClass(Modifiers modifiers) {
- _validateModifiersForTopLevelDeclaration(modifiers);
- if (modifiers.constKeyword != null) {
- _reportErrorForToken(ParserErrorCode.CONST_CLASS, modifiers.constKeyword);
- }
- if (modifiers.externalKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.EXTERNAL_CLASS, modifiers.externalKeyword);
- }
- if (modifiers.finalKeyword != null) {
- _reportErrorForToken(ParserErrorCode.FINAL_CLASS, modifiers.finalKeyword);
- }
- if (modifiers.varKeyword != null) {
- _reportErrorForToken(ParserErrorCode.VAR_CLASS, modifiers.varKeyword);
+ bool _matchesString(String identifier) =>
+ _currentToken.type == TokenType.IDENTIFIER &&
+ _currentToken.lexeme == identifier;
+
+ /**
+ * Report an error with the given [errorCode] if the given [typeName] has been
+ * marked as nullable.
+ */
+ void _mustNotBeNullable(TypeName typeName, ParserErrorCode errorCode) {
+ if (typeName.question != null) {
+ _reportErrorForToken(errorCode, typeName.question);
}
- return modifiers.abstractKeyword;
}
/**
- * Validate that the given set of [modifiers] is appropriate for a constructor
- * and return the 'const' keyword if there is one.
+ * If the current token has the given [type], then advance to the next token
+ * and return `true`. Otherwise, return `false` without advancing. This method
+ * should not be invoked with an argument value of [TokenType.GT].
*/
- Token _validateModifiersForConstructor(Modifiers modifiers) {
- if (modifiers.abstractKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.ABSTRACT_CLASS_MEMBER, modifiers.abstractKeyword);
- }
- if (modifiers.finalKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.FINAL_CONSTRUCTOR, modifiers.finalKeyword);
- }
- if (modifiers.staticKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.STATIC_CONSTRUCTOR, modifiers.staticKeyword);
- }
- if (modifiers.varKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.CONSTRUCTOR_WITH_RETURN_TYPE, modifiers.varKeyword);
- }
- Token externalKeyword = modifiers.externalKeyword;
- Token constKeyword = modifiers.constKeyword;
- Token factoryKeyword = modifiers.factoryKeyword;
- if (externalKeyword != null &&
- constKeyword != null &&
- constKeyword.offset < externalKeyword.offset) {
- _reportErrorForToken(
- ParserErrorCode.EXTERNAL_AFTER_CONST, externalKeyword);
- }
- if (externalKeyword != null &&
- factoryKeyword != null &&
- factoryKeyword.offset < externalKeyword.offset) {
- _reportErrorForToken(
- ParserErrorCode.EXTERNAL_AFTER_FACTORY, externalKeyword);
+ bool _optional(TokenType type) {
+ if (_currentToken.type == type) {
+ _advance();
+ return true;
}
- return constKeyword;
+ return false;
}
/**
- * Validate that the given set of [modifiers] is appropriate for a class and
- * return the 'abstract' keyword if there is one.
+ * Parse an argument list when we need to check for an open paren and recover
+ * when there isn't one. Return the argument list that was parsed.
*/
- void _validateModifiersForEnum(Modifiers modifiers) {
- _validateModifiersForTopLevelDeclaration(modifiers);
- if (modifiers.abstractKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.ABSTRACT_ENUM, modifiers.abstractKeyword);
- }
- if (modifiers.constKeyword != null) {
- _reportErrorForToken(ParserErrorCode.CONST_ENUM, modifiers.constKeyword);
- }
- if (modifiers.externalKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.EXTERNAL_ENUM, modifiers.externalKeyword);
- }
- if (modifiers.finalKeyword != null) {
- _reportErrorForToken(ParserErrorCode.FINAL_ENUM, modifiers.finalKeyword);
- }
- if (modifiers.varKeyword != null) {
- _reportErrorForToken(ParserErrorCode.VAR_ENUM, modifiers.varKeyword);
+ ArgumentList _parseArgumentListChecked() {
+ if (_matches(TokenType.OPEN_PAREN)) {
+ return parseArgumentList();
}
+ _reportErrorForCurrentToken(
+ ParserErrorCode.EXPECTED_TOKEN, [TokenType.OPEN_PAREN.lexeme]);
+ // Recovery: Look to see whether there is a close paren that isn't matched
+ // to an open paren and if so parse the list of arguments as normal.
+ return new ArgumentList(_createSyntheticToken(TokenType.OPEN_PAREN), null,
+ _createSyntheticToken(TokenType.CLOSE_PAREN));
}
/**
- * Validate that the given set of [modifiers] is appropriate for a field and
- * return the 'final', 'const' or 'var' keyword if there is one.
+ * Parse an assert within a constructor's initializer list. Return the assert.
+ *
+ * This method assumes that the current token matches `Keyword.ASSERT`.
+ *
+ * assertInitializer ::=
+ * 'assert' '(' expression [',' expression] ')'
*/
- Token _validateModifiersForField(Modifiers modifiers) {
- if (modifiers.abstractKeyword != null) {
- _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER);
- }
- if (modifiers.externalKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.EXTERNAL_FIELD, modifiers.externalKeyword);
- }
- if (modifiers.factoryKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword);
+ void _parseAssertInitializer() {
+ // TODO(brianwilkerson) Capture the syntax in the AST using a new class,
+ // such as AssertInitializer
+ Token keyword = getAndAdvance();
+ Token leftParen = _expect(TokenType.OPEN_PAREN);
+ Expression expression = parseExpression2();
+ Token comma;
+ Expression message;
+ if (_matches(TokenType.COMMA)) {
+ comma = getAndAdvance();
+ message = parseExpression2();
}
- Token staticKeyword = modifiers.staticKeyword;
- Token constKeyword = modifiers.constKeyword;
- Token finalKeyword = modifiers.finalKeyword;
- Token varKeyword = modifiers.varKeyword;
- if (constKeyword != null) {
- if (finalKeyword != null) {
- _reportErrorForToken(ParserErrorCode.CONST_AND_FINAL, finalKeyword);
- }
- if (varKeyword != null) {
- _reportErrorForToken(ParserErrorCode.CONST_AND_VAR, varKeyword);
- }
- if (staticKeyword != null && constKeyword.offset < staticKeyword.offset) {
- _reportErrorForToken(ParserErrorCode.STATIC_AFTER_CONST, staticKeyword);
- }
- } else if (finalKeyword != null) {
- if (varKeyword != null) {
- _reportErrorForToken(ParserErrorCode.FINAL_AND_VAR, varKeyword);
+ Token rightParen = _expect(TokenType.CLOSE_PAREN);
+// return new AssertInitializer(
+// keyword, leftParen, expression, comma, message, rightParen);
+ }
+
+ /**
+ * Parse an assignable expression given that the current token is not 'super'.
+ * The [primaryAllowed] is `true` if the expression is allowed to be a primary
+ * without any assignable selector. Return the assignable expression that was
+ * parsed.
+ */
+ Expression _parseAssignableExpressionNotStartingWithSuper(
+ bool primaryAllowed) {
+ //
+ // A primary expression can start with an identifier. We resolve the
+ // ambiguity by determining whether the primary consists of anything other
+ // than an identifier and/or is followed by an assignableSelector.
+ //
+ Expression expression = parsePrimaryExpression();
+ bool isOptional = primaryAllowed || expression is SimpleIdentifier;
+ while (true) {
+ while (_isLikelyArgumentList()) {
+ TypeArgumentList typeArguments = _parseOptionalTypeArguments();
+ ArgumentList argumentList = parseArgumentList();
+ Expression currentExpression = expression;
+ if (currentExpression is SimpleIdentifier) {
+ expression = new MethodInvocation(
+ null, null, currentExpression, typeArguments, argumentList);
+ } else if (currentExpression is PrefixedIdentifier) {
+ expression = new MethodInvocation(
+ currentExpression.prefix,
+ currentExpression.period,
+ currentExpression.identifier,
+ typeArguments,
+ argumentList);
+ } else if (currentExpression is PropertyAccess) {
+ expression = new MethodInvocation(
+ currentExpression.target,
+ currentExpression.operator,
+ currentExpression.propertyName,
+ typeArguments,
+ argumentList);
+ } else {
+ expression = new FunctionExpressionInvocation(
+ expression, typeArguments, argumentList);
+ }
+ if (!primaryAllowed) {
+ isOptional = false;
+ }
}
- if (staticKeyword != null && finalKeyword.offset < staticKeyword.offset) {
- _reportErrorForToken(ParserErrorCode.STATIC_AFTER_FINAL, staticKeyword);
+ Expression selectorExpression = parseAssignableSelector(
+ expression, isOptional || (expression is PrefixedIdentifier));
+ if (identical(selectorExpression, expression)) {
+ if (!isOptional && (expression is PrefixedIdentifier)) {
+ PrefixedIdentifier identifier = expression as PrefixedIdentifier;
+ expression = new PropertyAccess(
+ identifier.prefix, identifier.period, identifier.identifier);
+ }
+ return expression;
}
- } else if (varKeyword != null &&
- staticKeyword != null &&
- varKeyword.offset < staticKeyword.offset) {
- _reportErrorForToken(ParserErrorCode.STATIC_AFTER_VAR, staticKeyword);
+ expression = selectorExpression;
+ isOptional = true;
}
- return Token.lexicallyFirst([constKeyword, finalKeyword, varKeyword]);
}
/**
- * Validate that the given set of [modifiers] is appropriate for a local
- * function.
+ * Parse a block when we need to check for an open curly brace and recover
+ * when there isn't one. Return the block that was parsed.
+ *
+ * block ::=
+ * '{' statements '}'
*/
- void _validateModifiersForFunctionDeclarationStatement(Modifiers modifiers) {
- if (modifiers.abstractKeyword != null ||
- modifiers.constKeyword != null ||
- modifiers.externalKeyword != null ||
- modifiers.factoryKeyword != null ||
- modifiers.finalKeyword != null ||
- modifiers.staticKeyword != null ||
- modifiers.varKeyword != null) {
- _reportErrorForCurrentToken(
- ParserErrorCode.LOCAL_FUNCTION_DECLARATION_MODIFIER);
+ Block _parseBlockChecked() {
+ if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
+ return parseBlock();
}
+ // TODO(brianwilkerson) Improve the error message.
+ _reportErrorForCurrentToken(
+ ParserErrorCode.EXPECTED_TOKEN, [TokenType.OPEN_CURLY_BRACKET.lexeme]);
+ // Recovery: Check for an unmatched closing curly bracket and parse
+ // statements until it is reached.
+ return new Block(_createSyntheticToken(TokenType.OPEN_CURLY_BRACKET), null,
+ _createSyntheticToken(TokenType.CLOSE_CURLY_BRACKET));
}
/**
- * Validate that the given set of [modifiers] is appropriate for a getter,
- * setter, or method.
+ * Parse a list of class members. The [className] is the name of the class
+ * whose members are being parsed. The [closingBracket] is the closing bracket
+ * for the class, or `null` if the closing bracket is missing. Return the list
+ * of class members that were parsed.
+ *
+ * classMembers ::=
+ * (metadata memberDefinition)*
*/
- void _validateModifiersForGetterOrSetterOrMethod(Modifiers modifiers) {
- if (modifiers.abstractKeyword != null) {
- _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER);
- }
- if (modifiers.constKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.CONST_METHOD, modifiers.constKeyword);
- }
- if (modifiers.factoryKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword);
+ List<ClassMember> _parseClassMembers(String className, Token closingBracket) {
+ List<ClassMember> members = <ClassMember>[];
+ Token memberStart = _currentToken;
+ TokenType type = _currentToken.type;
+ Keyword keyword = _currentToken.keyword;
+ while (type != TokenType.EOF &&
+ type != TokenType.CLOSE_CURLY_BRACKET &&
+ (closingBracket != null ||
+ (keyword != Keyword.CLASS && keyword != Keyword.TYPEDEF))) {
+ if (type == TokenType.SEMICOLON) {
+ _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
+ [_currentToken.lexeme]);
+ _advance();
+ } else {
+ ClassMember member = parseClassMember(className);
+ if (member != null) {
+ members.add(member);
+ }
+ }
+ if (identical(_currentToken, memberStart)) {
+ _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
+ [_currentToken.lexeme]);
+ _advance();
+ }
+ memberStart = _currentToken;
+ type = _currentToken.type;
+ keyword = _currentToken.keyword;
}
- if (modifiers.finalKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.FINAL_METHOD, modifiers.finalKeyword);
+ return members;
+ }
+
+ /**
+ * Parse a class type alias. The [commentAndMetadata] is the metadata to be
+ * associated with the member. The [abstractKeyword] is the token representing
+ * the 'abstract' keyword. The [classKeyword] is the token representing the
+ * 'class' keyword. The [className] is the name of the alias, and the
+ * [typeParameters] are the type parameters following the name. Return the
+ * class type alias that was parsed.
+ *
+ * classTypeAlias ::=
+ * identifier typeParameters? '=' 'abstract'? mixinApplication
+ *
+ * mixinApplication ::=
+ * type withClause implementsClause? ';'
+ */
+ ClassTypeAlias _parseClassTypeAliasAfterName(
+ CommentAndMetadata commentAndMetadata,
+ Token abstractKeyword,
+ Token classKeyword,
+ SimpleIdentifier className,
+ TypeParameterList typeParameters) {
+ Token equals = _expect(TokenType.EQ);
+ TypeName superclass = parseTypeName(false);
+ WithClause withClause = null;
+ if (_matchesKeyword(Keyword.WITH)) {
+ withClause = parseWithClause();
+ } else {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.EXPECTED_TOKEN, [Keyword.WITH.syntax]);
}
- if (modifiers.varKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword);
+ ImplementsClause implementsClause = null;
+ if (_matchesKeyword(Keyword.IMPLEMENTS)) {
+ implementsClause = parseImplementsClause();
}
- Token externalKeyword = modifiers.externalKeyword;
- Token staticKeyword = modifiers.staticKeyword;
- if (externalKeyword != null &&
- staticKeyword != null &&
- staticKeyword.offset < externalKeyword.offset) {
- _reportErrorForToken(
- ParserErrorCode.EXTERNAL_AFTER_STATIC, externalKeyword);
+ Token semicolon;
+ if (_matches(TokenType.SEMICOLON)) {
+ semicolon = getAndAdvance();
+ } else {
+ if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.EXPECTED_TOKEN, [TokenType.SEMICOLON.lexeme]);
+ Token leftBracket = getAndAdvance();
+ _parseClassMembers(className.name, _getEndToken(leftBracket));
+ _expect(TokenType.CLOSE_CURLY_BRACKET);
+ } else {
+ _reportErrorForToken(ParserErrorCode.EXPECTED_TOKEN,
+ _currentToken.previous, [TokenType.SEMICOLON.lexeme]);
+ }
+ semicolon = _createSyntheticToken(TokenType.SEMICOLON);
}
+ return new ClassTypeAlias(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ classKeyword,
+ className,
+ typeParameters,
+ equals,
+ abstractKeyword,
+ superclass,
+ withClause,
+ implementsClause,
+ semicolon);
}
/**
- * Validate that the given set of [modifiers] is appropriate for a getter,
- * setter, or method.
+ * Parse a list of configurations. Return the configurations that were parsed,
+ * or `null` if there are no configurations.
*/
- void _validateModifiersForOperator(Modifiers modifiers) {
- if (modifiers.abstractKeyword != null) {
- _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER);
- }
- if (modifiers.constKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.CONST_METHOD, modifiers.constKeyword);
- }
- if (modifiers.factoryKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword);
- }
- if (modifiers.finalKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.FINAL_METHOD, modifiers.finalKeyword);
- }
- if (modifiers.staticKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.STATIC_OPERATOR, modifiers.staticKeyword);
- }
- if (modifiers.varKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword);
+ List<Configuration> _parseConfigurations() {
+ List<Configuration> configurations = null;
+ while (_matchesKeyword(Keyword.IF)) {
+ configurations ??= <Configuration>[];
+ configurations.add(parseConfiguration());
}
+ return configurations;
}
- /**
- * Validate that the given set of [modifiers] is appropriate for a top-level
- * declaration.
- */
- void _validateModifiersForTopLevelDeclaration(Modifiers modifiers) {
- if (modifiers.factoryKeyword != null) {
- _reportErrorForToken(ParserErrorCode.FACTORY_TOP_LEVEL_DECLARATION,
- modifiers.factoryKeyword);
+ ConstructorDeclaration _parseConstructor(
+ CommentAndMetadata commentAndMetadata,
+ Token externalKeyword,
+ Token constKeyword,
+ Token factoryKeyword,
+ SimpleIdentifier returnType,
+ Token period,
+ SimpleIdentifier name,
+ FormalParameterList parameters) {
+ bool bodyAllowed = externalKeyword == null;
+ Token separator = null;
+ List<ConstructorInitializer> initializers = null;
+ if (_matches(TokenType.COLON)) {
+ separator = getAndAdvance();
+ initializers = <ConstructorInitializer>[];
+ do {
+ Keyword keyword = _currentToken.keyword;
+ if (keyword == Keyword.THIS) {
+ TokenType nextType = _peek().type;
+ if (nextType == TokenType.OPEN_PAREN) {
+ bodyAllowed = false;
+ initializers.add(parseRedirectingConstructorInvocation(false));
+ } else if (nextType == TokenType.PERIOD &&
+ _tokenMatches(_peekAt(3), TokenType.OPEN_PAREN)) {
+ bodyAllowed = false;
+ initializers.add(parseRedirectingConstructorInvocation(true));
+ } else {
+ initializers.add(parseConstructorFieldInitializer(true));
+ }
+ } else if (keyword == Keyword.SUPER) {
+ initializers.add(parseSuperConstructorInvocation());
+ } else if (_matches(TokenType.OPEN_CURLY_BRACKET) ||
+ _matches(TokenType.FUNCTION)) {
+ _reportErrorForCurrentToken(ParserErrorCode.MISSING_INITIALIZER);
+ } else if (_enableAssertInitializer &&
+ _matchesKeyword(Keyword.ASSERT)) {
+ _parseAssertInitializer();
+ } else {
+ initializers.add(parseConstructorFieldInitializer(false));
+ }
+ } while (_optional(TokenType.COMMA));
+ if (factoryKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.FACTORY_WITH_INITIALIZERS, factoryKeyword);
+ }
}
- if (modifiers.staticKeyword != null) {
- _reportErrorForToken(ParserErrorCode.STATIC_TOP_LEVEL_DECLARATION,
- modifiers.staticKeyword);
+ ConstructorName redirectedConstructor = null;
+ FunctionBody body;
+ if (_matches(TokenType.EQ)) {
+ separator = getAndAdvance();
+ redirectedConstructor = parseConstructorName();
+ body = new EmptyFunctionBody(_expect(TokenType.SEMICOLON));
+ if (factoryKeyword == null) {
+ _reportErrorForNode(
+ ParserErrorCode.REDIRECTION_IN_NON_FACTORY_CONSTRUCTOR,
+ redirectedConstructor);
+ }
+ } else {
+ body =
+ parseFunctionBody(true, ParserErrorCode.MISSING_FUNCTION_BODY, false);
+ if (constKeyword != null &&
+ factoryKeyword != null &&
+ externalKeyword == null) {
+ _reportErrorForToken(ParserErrorCode.CONST_FACTORY, factoryKeyword);
+ } else if (body is EmptyFunctionBody) {
+ if (factoryKeyword != null &&
+ externalKeyword == null &&
+ _parseFunctionBodies) {
+ _reportErrorForToken(
+ ParserErrorCode.FACTORY_WITHOUT_BODY, factoryKeyword);
+ }
+ } else {
+ if (constKeyword != null) {
+ _reportErrorForNode(
+ ParserErrorCode.CONST_CONSTRUCTOR_WITH_BODY, body);
+ } else if (externalKeyword != null) {
+ _reportErrorForNode(
+ ParserErrorCode.EXTERNAL_CONSTRUCTOR_WITH_BODY, body);
+ } else if (!bodyAllowed) {
+ _reportErrorForNode(
+ ParserErrorCode.REDIRECTING_CONSTRUCTOR_WITH_BODY, body);
+ }
+ }
}
+ return new ConstructorDeclaration(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ externalKeyword,
+ constKeyword,
+ factoryKeyword,
+ returnType,
+ period,
+ name,
+ parameters,
+ separator,
+ initializers,
+ redirectedConstructor,
+ body);
}
/**
- * Validate that the given set of [modifiers] is appropriate for a top-level
- * function.
+ * Parse an enum constant declaration. Return the enum constant declaration
+ * that was parsed.
+ *
+ * Specified:
+ *
+ * enumConstant ::=
+ * id
+ *
+ * Actual:
+ *
+ * enumConstant ::=
+ * metadata id
*/
- void _validateModifiersForTopLevelFunction(Modifiers modifiers) {
- _validateModifiersForTopLevelDeclaration(modifiers);
- if (modifiers.abstractKeyword != null) {
- _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_TOP_LEVEL_FUNCTION);
- }
- if (modifiers.constKeyword != null) {
- _reportErrorForToken(ParserErrorCode.CONST_CLASS, modifiers.constKeyword);
- }
- if (modifiers.finalKeyword != null) {
- _reportErrorForToken(ParserErrorCode.FINAL_CLASS, modifiers.finalKeyword);
+ EnumConstantDeclaration _parseEnumConstantDeclaration() {
+ CommentAndMetadata commentAndMetadata = parseCommentAndMetadata();
+ SimpleIdentifier name;
+ if (_matchesIdentifier()) {
+ name = _parseSimpleIdentifierUnchecked(isDeclaration: true);
+ } else {
+ name = createSyntheticIdentifier();
}
- if (modifiers.varKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword);
+ if (commentAndMetadata.hasMetadata) {
+ _reportErrorForNode(ParserErrorCode.ANNOTATION_ON_ENUM_CONSTANT,
+ commentAndMetadata.metadata[0]);
}
+ return new EnumConstantDeclaration(
+ commentAndMetadata.comment, commentAndMetadata.metadata, name);
}
/**
- * Validate that the given set of [modifiers] is appropriate for a field and
- * return the 'final', 'const' or 'var' keyword if there is one.
+ * Parse a list of formal parameters given that the list starts with the given
+ * [leftParenthesis]. Return the formal parameters that were parsed.
*/
- Token _validateModifiersForTopLevelVariable(Modifiers modifiers) {
- _validateModifiersForTopLevelDeclaration(modifiers);
- if (modifiers.abstractKeyword != null) {
- _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_TOP_LEVEL_VARIABLE);
- }
- if (modifiers.externalKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.EXTERNAL_FIELD, modifiers.externalKeyword);
+ FormalParameterList _parseFormalParameterListAfterParen(
+ Token leftParenthesis) {
+ if (_matches(TokenType.CLOSE_PAREN)) {
+ return new FormalParameterList(
+ leftParenthesis, null, null, null, getAndAdvance());
}
- Token constKeyword = modifiers.constKeyword;
- Token finalKeyword = modifiers.finalKeyword;
- Token varKeyword = modifiers.varKeyword;
- if (constKeyword != null) {
- if (finalKeyword != null) {
- _reportErrorForToken(ParserErrorCode.CONST_AND_FINAL, finalKeyword);
+ //
+ // Even though it is invalid to have default parameters outside of brackets,
+ // required parameters inside of brackets, or multiple groups of default and
+ // named parameters, we allow all of these cases so that we can recover
+ // better.
+ //
+ List<FormalParameter> parameters = <FormalParameter>[];
+ Token leftSquareBracket = null;
+ Token rightSquareBracket = null;
+ Token leftCurlyBracket = null;
+ Token rightCurlyBracket = null;
+ ParameterKind kind = ParameterKind.REQUIRED;
+ bool firstParameter = true;
+ bool reportedMultiplePositionalGroups = false;
+ bool reportedMultipleNamedGroups = false;
+ bool reportedMixedGroups = false;
+ bool wasOptionalParameter = false;
+ Token initialToken = null;
+ do {
+ if (firstParameter) {
+ firstParameter = false;
+ } else if (!_optional(TokenType.COMMA)) {
+ // TODO(brianwilkerson) The token is wrong, we need to recover from this
+ // case.
+ if (_getEndToken(leftParenthesis) != null) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.EXPECTED_TOKEN, [TokenType.COMMA.lexeme]);
+ } else {
+ _reportErrorForToken(ParserErrorCode.MISSING_CLOSING_PARENTHESIS,
+ _currentToken.previous);
+ break;
+ }
}
- if (varKeyword != null) {
- _reportErrorForToken(ParserErrorCode.CONST_AND_VAR, varKeyword);
+ initialToken = _currentToken;
+ //
+ // Handle the beginning of parameter groups.
+ //
+ TokenType type = _currentToken.type;
+ if (type == TokenType.OPEN_SQUARE_BRACKET) {
+ wasOptionalParameter = true;
+ if (leftSquareBracket != null && !reportedMultiplePositionalGroups) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.MULTIPLE_POSITIONAL_PARAMETER_GROUPS);
+ reportedMultiplePositionalGroups = true;
+ }
+ if (leftCurlyBracket != null && !reportedMixedGroups) {
+ _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS);
+ reportedMixedGroups = true;
+ }
+ leftSquareBracket = getAndAdvance();
+ kind = ParameterKind.POSITIONAL;
+ } else if (type == TokenType.OPEN_CURLY_BRACKET) {
+ wasOptionalParameter = true;
+ if (leftCurlyBracket != null && !reportedMultipleNamedGroups) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.MULTIPLE_NAMED_PARAMETER_GROUPS);
+ reportedMultipleNamedGroups = true;
+ }
+ if (leftSquareBracket != null && !reportedMixedGroups) {
+ _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS);
+ reportedMixedGroups = true;
+ }
+ leftCurlyBracket = getAndAdvance();
+ kind = ParameterKind.NAMED;
}
- } else if (finalKeyword != null) {
- if (varKeyword != null) {
- _reportErrorForToken(ParserErrorCode.FINAL_AND_VAR, varKeyword);
+ //
+ // Parse and record the parameter.
+ //
+ FormalParameter parameter = parseFormalParameter(kind);
+ parameters.add(parameter);
+ if (kind == ParameterKind.REQUIRED && wasOptionalParameter) {
+ _reportErrorForNode(
+ ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS, parameter);
+ }
+ //
+ // Handle the end of parameter groups.
+ //
+ // TODO(brianwilkerson) Improve the detection and reporting of missing and
+ // mismatched delimiters.
+ type = _currentToken.type;
+
+ // Advance past trailing commas as appropriate.
+ if (type == TokenType.COMMA) {
+ // Only parse commas trailing normal (non-positional/named) params.
+ if (rightSquareBracket == null && rightCurlyBracket == null) {
+ Token next = _peek();
+ if (next.type == TokenType.CLOSE_PAREN ||
+ next.type == TokenType.CLOSE_CURLY_BRACKET ||
+ next.type == TokenType.CLOSE_SQUARE_BRACKET) {
+ _advance();
+ type = _currentToken.type;
+ }
+ }
+ }
+
+ if (type == TokenType.CLOSE_SQUARE_BRACKET) {
+ rightSquareBracket = getAndAdvance();
+ if (leftSquareBracket == null) {
+ if (leftCurlyBracket != null) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]);
+ rightCurlyBracket = rightSquareBracket;
+ rightSquareBracket = null;
+ } else {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP,
+ ["["]);
+ }
+ }
+ kind = ParameterKind.REQUIRED;
+ } else if (type == TokenType.CLOSE_CURLY_BRACKET) {
+ rightCurlyBracket = getAndAdvance();
+ if (leftCurlyBracket == null) {
+ if (leftSquareBracket != null) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]);
+ rightSquareBracket = rightCurlyBracket;
+ rightCurlyBracket = null;
+ } else {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP,
+ ["{"]);
+ }
+ }
+ kind = ParameterKind.REQUIRED;
}
+ } while (!_matches(TokenType.CLOSE_PAREN) &&
+ !identical(initialToken, _currentToken));
+ Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
+ //
+ // Check that the groups were closed correctly.
+ //
+ if (leftSquareBracket != null && rightSquareBracket == null) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]);
}
- return Token.lexicallyFirst([constKeyword, finalKeyword, varKeyword]);
- }
-
- /**
- * Validate that the given set of [modifiers] is appropriate for a class and
- * return the 'abstract' keyword if there is one.
- */
- void _validateModifiersForTypedef(Modifiers modifiers) {
- _validateModifiersForTopLevelDeclaration(modifiers);
- if (modifiers.abstractKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.ABSTRACT_TYPEDEF, modifiers.abstractKeyword);
- }
- if (modifiers.constKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.CONST_TYPEDEF, modifiers.constKeyword);
- }
- if (modifiers.externalKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.EXTERNAL_TYPEDEF, modifiers.externalKeyword);
- }
- if (modifiers.finalKeyword != null) {
- _reportErrorForToken(
- ParserErrorCode.FINAL_TYPEDEF, modifiers.finalKeyword);
- }
- if (modifiers.varKeyword != null) {
- _reportErrorForToken(ParserErrorCode.VAR_TYPEDEF, modifiers.varKeyword);
+ if (leftCurlyBracket != null && rightCurlyBracket == null) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]);
}
+ //
+ // Build the parameter list.
+ //
+ leftSquareBracket ??= leftCurlyBracket;
+ rightSquareBracket ??= rightCurlyBracket;
+ return new FormalParameterList(leftParenthesis, parameters,
+ leftSquareBracket, rightSquareBracket, rightParenthesis);
}
-}
-/**
- * A synthetic keyword token.
- */
-class Parser_SyntheticKeywordToken extends KeywordToken {
/**
- * Initialize a newly created token to represent the given [keyword] at the
- * given [offset].
+ * Parse a list of formal parameters. Return the formal parameters that were
+ * parsed.
+ *
+ * This method assumes that the current token matches `TokenType.OPEN_PAREN`.
*/
- Parser_SyntheticKeywordToken(Keyword keyword, int offset)
- : super(keyword, offset);
-
- @override
- int get length => 0;
-
- @override
- Token copy() => new Parser_SyntheticKeywordToken(keyword, offset);
-}
-
-/**
- * The error codes used for errors detected by the parser. The convention for
- * this class is for the name of the error code to indicate the problem that
- * caused the error to be generated and for the error message to explain what
- * is wrong and, when appropriate, how the problem can be corrected.
- */
-class ParserErrorCode extends ErrorCode {
- static const ParserErrorCode ABSTRACT_CLASS_MEMBER = const ParserErrorCode(
- 'ABSTRACT_CLASS_MEMBER',
- "Members of classes cannot be declared to be 'abstract'");
-
- static const ParserErrorCode ABSTRACT_ENUM = const ParserErrorCode(
- 'ABSTRACT_ENUM', "Enums cannot be declared to be 'abstract'");
-
- static const ParserErrorCode ABSTRACT_STATIC_METHOD = const ParserErrorCode(
- 'ABSTRACT_STATIC_METHOD',
- "Static methods cannot be declared to be 'abstract'");
-
- static const ParserErrorCode ABSTRACT_TOP_LEVEL_FUNCTION =
- const ParserErrorCode('ABSTRACT_TOP_LEVEL_FUNCTION',
- "Top-level functions cannot be declared to be 'abstract'");
-
- static const ParserErrorCode ABSTRACT_TOP_LEVEL_VARIABLE =
- const ParserErrorCode('ABSTRACT_TOP_LEVEL_VARIABLE',
- "Top-level variables cannot be declared to be 'abstract'");
-
- static const ParserErrorCode ABSTRACT_TYPEDEF = const ParserErrorCode(
- 'ABSTRACT_TYPEDEF', "Type aliases cannot be declared to be 'abstract'");
-
- static const ParserErrorCode ANNOTATION_ON_ENUM_CONSTANT =
- const ParserErrorCode('ANNOTATION_ON_ENUM_CONSTANT',
- "Enum constants cannot have annotations");
-
- static const ParserErrorCode ASSERT_DOES_NOT_TAKE_ASSIGNMENT =
- const ParserErrorCode('ASSERT_DOES_NOT_TAKE_ASSIGNMENT',
- "Assert cannot be called on an assignment");
-
- static const ParserErrorCode ASSERT_DOES_NOT_TAKE_CASCADE =
- const ParserErrorCode(
- 'ASSERT_DOES_NOT_TAKE_CASCADE', "Assert cannot be called on cascade");
-
- static const ParserErrorCode ASSERT_DOES_NOT_TAKE_THROW =
- const ParserErrorCode(
- 'ASSERT_DOES_NOT_TAKE_THROW', "Assert cannot be called on throws");
-
- static const ParserErrorCode ASSERT_DOES_NOT_TAKE_RETHROW =
- const ParserErrorCode('ASSERT_DOES_NOT_TAKE_RETHROW',
- "Assert cannot be called on rethrows");
+ FormalParameterList _parseFormalParameterListUnchecked() {
+ return _parseFormalParameterListAfterParen(getAndAdvance());
+ }
/**
- * 16.32 Identifier Reference: It is a compile-time error if any of the
- * identifiers async, await, or yield is used as an identifier in a function
- * body marked with either async, async*, or sync*.
+ * Parse a function declaration statement. The [commentAndMetadata] is the
+ * documentation comment and metadata to be associated with the declaration.
+ * The [returnType] is the return type, or `null` if there is no return type.
+ * Return the function declaration statement that was parsed.
+ *
+ * functionDeclarationStatement ::=
+ * functionSignature functionBody
*/
- static const ParserErrorCode ASYNC_KEYWORD_USED_AS_IDENTIFIER =
- const ParserErrorCode('ASYNC_KEYWORD_USED_AS_IDENTIFIER',
- "The keywords 'async', 'await', and 'yield' may not be used as identifiers in an asynchronous or generator function.");
-
- static const ParserErrorCode BREAK_OUTSIDE_OF_LOOP = const ParserErrorCode(
- 'BREAK_OUTSIDE_OF_LOOP',
- "A break statement cannot be used outside of a loop or switch statement");
-
- static const ParserErrorCode CLASS_IN_CLASS = const ParserErrorCode(
- 'CLASS_IN_CLASS', "Classes cannot be declared inside other classes");
-
- static const ParserErrorCode COLON_IN_PLACE_OF_IN = const ParserErrorCode(
- 'COLON_IN_PLACE_OF_IN', "For-in loops use 'in' rather than a colon");
-
- static const ParserErrorCode CONST_AND_FINAL = const ParserErrorCode(
- 'CONST_AND_FINAL',
- "Members cannot be declared to be both 'const' and 'final'");
-
- static const ParserErrorCode CONST_AND_VAR = const ParserErrorCode(
- 'CONST_AND_VAR',
- "Members cannot be declared to be both 'const' and 'var'");
-
- static const ParserErrorCode CONST_CLASS = const ParserErrorCode(
- 'CONST_CLASS', "Classes cannot be declared to be 'const'");
-
- static const ParserErrorCode CONST_CONSTRUCTOR_WITH_BODY =
- const ParserErrorCode('CONST_CONSTRUCTOR_WITH_BODY',
- "'const' constructors cannot have a body");
-
- static const ParserErrorCode CONST_ENUM = const ParserErrorCode(
- 'CONST_ENUM', "Enums cannot be declared to be 'const'");
-
- static const ParserErrorCode CONST_FACTORY = const ParserErrorCode(
- 'CONST_FACTORY',
- "Only redirecting factory constructors can be declared to be 'const'");
-
- static const ParserErrorCode CONST_METHOD = const ParserErrorCode(
- 'CONST_METHOD',
- "Getters, setters and methods cannot be declared to be 'const'");
-
- static const ParserErrorCode CONST_TYPEDEF = const ParserErrorCode(
- 'CONST_TYPEDEF', "Type aliases cannot be declared to be 'const'");
-
- static const ParserErrorCode CONSTRUCTOR_WITH_RETURN_TYPE =
- const ParserErrorCode('CONSTRUCTOR_WITH_RETURN_TYPE',
- "Constructors cannot have a return type");
-
- static const ParserErrorCode CONTINUE_OUTSIDE_OF_LOOP = const ParserErrorCode(
- 'CONTINUE_OUTSIDE_OF_LOOP',
- "A continue statement cannot be used outside of a loop or switch statement");
-
- static const ParserErrorCode CONTINUE_WITHOUT_LABEL_IN_CASE =
- const ParserErrorCode('CONTINUE_WITHOUT_LABEL_IN_CASE',
- "A continue statement in a switch statement must have a label as a target");
-
- static const ParserErrorCode DEPRECATED_CLASS_TYPE_ALIAS =
- const ParserErrorCode('DEPRECATED_CLASS_TYPE_ALIAS',
- "The 'typedef' mixin application was replaced with 'class'");
-
- static const ParserErrorCode DIRECTIVE_AFTER_DECLARATION =
- const ParserErrorCode('DIRECTIVE_AFTER_DECLARATION',
- "Directives must appear before any declarations");
-
- static const ParserErrorCode DUPLICATE_LABEL_IN_SWITCH_STATEMENT =
- const ParserErrorCode('DUPLICATE_LABEL_IN_SWITCH_STATEMENT',
- "The label {0} was already used in this switch statement");
-
- static const ParserErrorCode DUPLICATED_MODIFIER = const ParserErrorCode(
- 'DUPLICATED_MODIFIER', "The modifier '{0}' was already specified.");
-
- static const ParserErrorCode EMPTY_ENUM_BODY = const ParserErrorCode(
- 'EMPTY_ENUM_BODY', "An enum must declare at least one constant name");
-
- static const ParserErrorCode ENUM_IN_CLASS = const ParserErrorCode(
- 'ENUM_IN_CLASS', "Enums cannot be declared inside classes");
-
- static const ParserErrorCode EQUALITY_CANNOT_BE_EQUALITY_OPERAND =
- const ParserErrorCode('EQUALITY_CANNOT_BE_EQUALITY_OPERAND',
- "Equality expression cannot be operand of another equality expression.");
-
- static const ParserErrorCode EXPECTED_CASE_OR_DEFAULT = const ParserErrorCode(
- 'EXPECTED_CASE_OR_DEFAULT', "Expected 'case' or 'default'");
-
- static const ParserErrorCode EXPECTED_CLASS_MEMBER =
- const ParserErrorCode('EXPECTED_CLASS_MEMBER', "Expected a class member");
-
- static const ParserErrorCode EXPECTED_EXECUTABLE = const ParserErrorCode(
- 'EXPECTED_EXECUTABLE',
- "Expected a method, getter, setter or operator declaration");
-
- static const ParserErrorCode EXPECTED_LIST_OR_MAP_LITERAL =
- const ParserErrorCode(
- 'EXPECTED_LIST_OR_MAP_LITERAL', "Expected a list or map literal");
-
- static const ParserErrorCode EXPECTED_STRING_LITERAL = const ParserErrorCode(
- 'EXPECTED_STRING_LITERAL', "Expected a string literal");
-
- static const ParserErrorCode EXPECTED_TOKEN =
- const ParserErrorCode('EXPECTED_TOKEN', "Expected to find '{0}'");
-
- static const ParserErrorCode EXPECTED_TYPE_NAME =
- const ParserErrorCode('EXPECTED_TYPE_NAME', "Expected a type name");
-
- static const ParserErrorCode EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE =
- const ParserErrorCode('EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE',
- "Export directives must preceed part directives");
-
- static const ParserErrorCode EXTERNAL_AFTER_CONST = const ParserErrorCode(
- 'EXTERNAL_AFTER_CONST',
- "The modifier 'external' should be before the modifier 'const'");
-
- static const ParserErrorCode EXTERNAL_AFTER_FACTORY = const ParserErrorCode(
- 'EXTERNAL_AFTER_FACTORY',
- "The modifier 'external' should be before the modifier 'factory'");
-
- static const ParserErrorCode EXTERNAL_AFTER_STATIC = const ParserErrorCode(
- 'EXTERNAL_AFTER_STATIC',
- "The modifier 'external' should be before the modifier 'static'");
-
- static const ParserErrorCode EXTERNAL_CLASS = const ParserErrorCode(
- 'EXTERNAL_CLASS', "Classes cannot be declared to be 'external'");
-
- static const ParserErrorCode EXTERNAL_CONSTRUCTOR_WITH_BODY =
- const ParserErrorCode('EXTERNAL_CONSTRUCTOR_WITH_BODY',
- "External constructors cannot have a body");
-
- static const ParserErrorCode EXTERNAL_ENUM = const ParserErrorCode(
- 'EXTERNAL_ENUM', "Enums cannot be declared to be 'external'");
-
- static const ParserErrorCode EXTERNAL_FIELD = const ParserErrorCode(
- 'EXTERNAL_FIELD', "Fields cannot be declared to be 'external'");
-
- static const ParserErrorCode EXTERNAL_GETTER_WITH_BODY =
- const ParserErrorCode(
- 'EXTERNAL_GETTER_WITH_BODY', "External getters cannot have a body");
-
- static const ParserErrorCode EXTERNAL_METHOD_WITH_BODY =
- const ParserErrorCode(
- 'EXTERNAL_METHOD_WITH_BODY', "External methods cannot have a body");
-
- static const ParserErrorCode EXTERNAL_OPERATOR_WITH_BODY =
- const ParserErrorCode('EXTERNAL_OPERATOR_WITH_BODY',
- "External operators cannot have a body");
-
- static const ParserErrorCode EXTERNAL_SETTER_WITH_BODY =
- const ParserErrorCode(
- 'EXTERNAL_SETTER_WITH_BODY', "External setters cannot have a body");
-
- static const ParserErrorCode EXTERNAL_TYPEDEF = const ParserErrorCode(
- 'EXTERNAL_TYPEDEF', "Type aliases cannot be declared to be 'external'");
-
- static const ParserErrorCode FACTORY_TOP_LEVEL_DECLARATION =
- const ParserErrorCode('FACTORY_TOP_LEVEL_DECLARATION',
- "Top-level declarations cannot be declared to be 'factory'");
-
- static const ParserErrorCode FACTORY_WITH_INITIALIZERS =
- const ParserErrorCode(
- 'FACTORY_WITH_INITIALIZERS',
- "A 'factory' constructor cannot have initializers",
- "Either remove the 'factory' keyword to make this a generative "
- "constructor or remove the initializers.");
-
- static const ParserErrorCode FACTORY_WITHOUT_BODY = const ParserErrorCode(
- 'FACTORY_WITHOUT_BODY',
- "A non-redirecting 'factory' constructor must have a body");
-
- static const ParserErrorCode FIELD_INITIALIZER_OUTSIDE_CONSTRUCTOR =
- const ParserErrorCode('FIELD_INITIALIZER_OUTSIDE_CONSTRUCTOR',
- "Field initializers can only be used in a constructor");
-
- static const ParserErrorCode FINAL_AND_VAR = const ParserErrorCode(
- 'FINAL_AND_VAR',
- "Members cannot be declared to be both 'final' and 'var'");
-
- static const ParserErrorCode FINAL_CLASS = const ParserErrorCode(
- 'FINAL_CLASS', "Classes cannot be declared to be 'final'");
-
- static const ParserErrorCode FINAL_CONSTRUCTOR = const ParserErrorCode(
- 'FINAL_CONSTRUCTOR', "A constructor cannot be declared to be 'final'");
-
- static const ParserErrorCode FINAL_ENUM = const ParserErrorCode(
- 'FINAL_ENUM', "Enums cannot be declared to be 'final'");
-
- static const ParserErrorCode FINAL_METHOD = const ParserErrorCode(
- 'FINAL_METHOD',
- "Getters, setters and methods cannot be declared to be 'final'");
-
- static const ParserErrorCode FINAL_TYPEDEF = const ParserErrorCode(
- 'FINAL_TYPEDEF', "Type aliases cannot be declared to be 'final'");
-
- static const ParserErrorCode FUNCTION_TYPED_PARAMETER_VAR = const ParserErrorCode(
- 'FUNCTION_TYPED_PARAMETER_VAR',
- "Function typed parameters cannot specify 'const', 'final' or 'var' instead of return type");
-
- static const ParserErrorCode GETTER_IN_FUNCTION = const ParserErrorCode(
- 'GETTER_IN_FUNCTION',
- "Getters cannot be defined within methods or functions");
-
- static const ParserErrorCode GETTER_WITH_PARAMETERS = const ParserErrorCode(
- 'GETTER_WITH_PARAMETERS',
- "Getter should be declared without a parameter list");
-
- static const ParserErrorCode ILLEGAL_ASSIGNMENT_TO_NON_ASSIGNABLE =
- const ParserErrorCode('ILLEGAL_ASSIGNMENT_TO_NON_ASSIGNABLE',
- "Illegal assignment to non-assignable expression");
-
- static const ParserErrorCode IMPLEMENTS_BEFORE_EXTENDS =
- const ParserErrorCode('IMPLEMENTS_BEFORE_EXTENDS',
- "The extends clause must be before the implements clause");
-
- static const ParserErrorCode IMPLEMENTS_BEFORE_WITH = const ParserErrorCode(
- 'IMPLEMENTS_BEFORE_WITH',
- "The with clause must be before the implements clause");
-
- static const ParserErrorCode IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE =
- const ParserErrorCode('IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE',
- "Import directives must preceed part directives");
-
- static const ParserErrorCode INITIALIZED_VARIABLE_IN_FOR_EACH =
- const ParserErrorCode('INITIALIZED_VARIABLE_IN_FOR_EACH',
- "The loop variable in a for-each loop cannot be initialized");
-
- static const ParserErrorCode INVALID_AWAIT_IN_FOR = const ParserErrorCode(
- 'INVALID_AWAIT_IN_FOR',
- "The modifier 'await' is not allowed for a normal 'for' statement",
- "Remove the keyword or use a for-each statement.");
-
- static const ParserErrorCode INVALID_CODE_POINT = const ParserErrorCode(
- 'INVALID_CODE_POINT',
- "The escape sequence '{0}' is not a valid code point");
-
- static const ParserErrorCode INVALID_COMMENT_REFERENCE = const ParserErrorCode(
- 'INVALID_COMMENT_REFERENCE',
- "Comment references should contain a possibly prefixed identifier and can start with 'new', but should not contain anything else");
-
- static const ParserErrorCode INVALID_HEX_ESCAPE = const ParserErrorCode(
- 'INVALID_HEX_ESCAPE',
- "An escape sequence starting with '\\x' must be followed by 2 hexidecimal digits");
-
- static const ParserErrorCode INVALID_OPERATOR = const ParserErrorCode(
- 'INVALID_OPERATOR', "The string '{0}' is not a valid operator");
-
- static const ParserErrorCode INVALID_OPERATOR_FOR_SUPER =
- const ParserErrorCode('INVALID_OPERATOR_FOR_SUPER',
- "The operator '{0}' cannot be used with 'super'");
-
- static const ParserErrorCode INVALID_STAR_AFTER_ASYNC = const ParserErrorCode(
- 'INVALID_STAR_AFTER_ASYNC',
- "The modifier 'async*' is not allowed for an expression function body",
- "Convert the body to a block.");
-
- static const ParserErrorCode INVALID_SYNC = const ParserErrorCode(
- 'INVALID_SYNC',
- "The modifier 'sync' is not allowed for an exrpression function body",
- "Convert the body to a block.");
-
- static const ParserErrorCode INVALID_UNICODE_ESCAPE = const ParserErrorCode(
- 'INVALID_UNICODE_ESCAPE',
- "An escape sequence starting with '\\u' must be followed by 4 hexidecimal digits or from 1 to 6 digits between '{' and '}'");
-
- static const ParserErrorCode LIBRARY_DIRECTIVE_NOT_FIRST =
- const ParserErrorCode('LIBRARY_DIRECTIVE_NOT_FIRST',
- "The library directive must appear before all other directives");
-
- static const ParserErrorCode LOCAL_FUNCTION_DECLARATION_MODIFIER =
- const ParserErrorCode('LOCAL_FUNCTION_DECLARATION_MODIFIER',
- "Local function declarations cannot specify any modifier");
-
- static const ParserErrorCode MISSING_ASSIGNABLE_SELECTOR =
- const ParserErrorCode('MISSING_ASSIGNABLE_SELECTOR',
- "Missing selector such as \".<identifier>\" or \"[0]\"");
-
- static const ParserErrorCode MISSING_ASSIGNMENT_IN_INITIALIZER =
- const ParserErrorCode('MISSING_ASSIGNMENT_IN_INITIALIZER',
- "Expected an assignment after the field name");
-
- static const ParserErrorCode MISSING_CATCH_OR_FINALLY = const ParserErrorCode(
- 'MISSING_CATCH_OR_FINALLY',
- "A try statement must have either a catch or finally clause");
-
- static const ParserErrorCode MISSING_CLASS_BODY = const ParserErrorCode(
- 'MISSING_CLASS_BODY',
- "A class definition must have a body, even if it is empty");
-
- static const ParserErrorCode MISSING_CLOSING_PARENTHESIS =
- const ParserErrorCode(
- 'MISSING_CLOSING_PARENTHESIS', "The closing parenthesis is missing");
-
- static const ParserErrorCode MISSING_CONST_FINAL_VAR_OR_TYPE =
- const ParserErrorCode('MISSING_CONST_FINAL_VAR_OR_TYPE',
- "Variables must be declared using the keywords 'const', 'final', 'var' or a type name");
-
- static const ParserErrorCode MISSING_ENUM_BODY = const ParserErrorCode(
- 'MISSING_ENUM_BODY',
- "An enum definition must have a body with at least one constant name");
-
- static const ParserErrorCode MISSING_EXPRESSION_IN_INITIALIZER =
- const ParserErrorCode('MISSING_EXPRESSION_IN_INITIALIZER',
- "Expected an expression after the assignment operator");
-
- static const ParserErrorCode MISSING_EXPRESSION_IN_THROW =
- const ParserErrorCode('MISSING_EXPRESSION_IN_THROW',
- "Throw expressions must compute the object to be thrown");
-
- static const ParserErrorCode MISSING_FUNCTION_BODY = const ParserErrorCode(
- 'MISSING_FUNCTION_BODY', "A function body must be provided");
-
- static const ParserErrorCode MISSING_FUNCTION_PARAMETERS =
- const ParserErrorCode('MISSING_FUNCTION_PARAMETERS',
- "Functions must have an explicit list of parameters");
-
- static const ParserErrorCode MISSING_METHOD_PARAMETERS =
- const ParserErrorCode('MISSING_METHOD_PARAMETERS',
- "Methods must have an explicit list of parameters");
-
- static const ParserErrorCode MISSING_GET = const ParserErrorCode(
- 'MISSING_GET',
- "Getters must have the keyword 'get' before the getter name");
-
- static const ParserErrorCode MISSING_IDENTIFIER =
- const ParserErrorCode('MISSING_IDENTIFIER', "Expected an identifier");
-
- static const ParserErrorCode MISSING_INITIALIZER =
- const ParserErrorCode('MISSING_INITIALIZER', "Expected an initializer");
-
- static const ParserErrorCode MISSING_KEYWORD_OPERATOR = const ParserErrorCode(
- 'MISSING_KEYWORD_OPERATOR',
- "Operator declarations must be preceeded by the keyword 'operator'");
-
- static const ParserErrorCode MISSING_NAME_IN_LIBRARY_DIRECTIVE =
- const ParserErrorCode('MISSING_NAME_IN_LIBRARY_DIRECTIVE',
- "Library directives must include a library name");
-
- static const ParserErrorCode MISSING_NAME_IN_PART_OF_DIRECTIVE =
- const ParserErrorCode('MISSING_NAME_IN_PART_OF_DIRECTIVE',
- "Library directives must include a library name");
-
- static const ParserErrorCode MISSING_PREFIX_IN_DEFERRED_IMPORT =
- const ParserErrorCode('MISSING_PREFIX_IN_DEFERRED_IMPORT',
- "Deferred imports must have a prefix");
-
- static const ParserErrorCode MISSING_STAR_AFTER_SYNC = const ParserErrorCode(
- 'MISSING_STAR_AFTER_SYNC',
- "The modifier 'sync' must be followed by a star ('*')",
- "Remove the modifier or add a star.");
-
- static const ParserErrorCode MISSING_STATEMENT =
- const ParserErrorCode('MISSING_STATEMENT', "Expected a statement");
-
- static const ParserErrorCode MISSING_TERMINATOR_FOR_PARAMETER_GROUP =
- const ParserErrorCode('MISSING_TERMINATOR_FOR_PARAMETER_GROUP',
- "There is no '{0}' to close the parameter group");
-
- static const ParserErrorCode MISSING_TYPEDEF_PARAMETERS =
- const ParserErrorCode('MISSING_TYPEDEF_PARAMETERS',
- "Type aliases for functions must have an explicit list of parameters");
-
- static const ParserErrorCode MISSING_VARIABLE_IN_FOR_EACH = const ParserErrorCode(
- 'MISSING_VARIABLE_IN_FOR_EACH',
- "A loop variable must be declared in a for-each loop before the 'in', but none were found");
-
- static const ParserErrorCode MIXED_PARAMETER_GROUPS = const ParserErrorCode(
- 'MIXED_PARAMETER_GROUPS',
- "Cannot have both positional and named parameters in a single parameter list");
-
- static const ParserErrorCode MULTIPLE_EXTENDS_CLAUSES = const ParserErrorCode(
- 'MULTIPLE_EXTENDS_CLAUSES',
- "Each class definition can have at most one extends clause");
-
- static const ParserErrorCode MULTIPLE_IMPLEMENTS_CLAUSES =
- const ParserErrorCode('MULTIPLE_IMPLEMENTS_CLAUSES',
- "Each class definition can have at most one implements clause");
-
- static const ParserErrorCode MULTIPLE_LIBRARY_DIRECTIVES =
- const ParserErrorCode('MULTIPLE_LIBRARY_DIRECTIVES',
- "Only one library directive may be declared in a file");
-
- static const ParserErrorCode MULTIPLE_NAMED_PARAMETER_GROUPS =
- const ParserErrorCode('MULTIPLE_NAMED_PARAMETER_GROUPS',
- "Cannot have multiple groups of named parameters in a single parameter list");
-
- static const ParserErrorCode MULTIPLE_PART_OF_DIRECTIVES =
- const ParserErrorCode('MULTIPLE_PART_OF_DIRECTIVES',
- "Only one part-of directive may be declared in a file");
-
- static const ParserErrorCode MULTIPLE_POSITIONAL_PARAMETER_GROUPS =
- const ParserErrorCode('MULTIPLE_POSITIONAL_PARAMETER_GROUPS',
- "Cannot have multiple groups of positional parameters in a single parameter list");
-
- static const ParserErrorCode MULTIPLE_VARIABLES_IN_FOR_EACH =
- const ParserErrorCode('MULTIPLE_VARIABLES_IN_FOR_EACH',
- "A single loop variable must be declared in a for-each loop before the 'in', but {0} were found");
-
- static const ParserErrorCode MULTIPLE_WITH_CLAUSES = const ParserErrorCode(
- 'MULTIPLE_WITH_CLAUSES',
- "Each class definition can have at most one with clause");
-
- static const ParserErrorCode NAMED_FUNCTION_EXPRESSION =
- const ParserErrorCode(
- 'NAMED_FUNCTION_EXPRESSION', "Function expressions cannot be named");
-
- static const ParserErrorCode NAMED_PARAMETER_OUTSIDE_GROUP =
- const ParserErrorCode('NAMED_PARAMETER_OUTSIDE_GROUP',
- "Named parameters must be enclosed in curly braces ('{' and '}')");
-
- static const ParserErrorCode NATIVE_CLAUSE_IN_NON_SDK_CODE =
- const ParserErrorCode('NATIVE_CLAUSE_IN_NON_SDK_CODE',
- "Native clause can only be used in the SDK and code that is loaded through native extensions");
-
- static const ParserErrorCode NATIVE_FUNCTION_BODY_IN_NON_SDK_CODE =
- const ParserErrorCode('NATIVE_FUNCTION_BODY_IN_NON_SDK_CODE',
- "Native functions can only be declared in the SDK and code that is loaded through native extensions");
-
- static const ParserErrorCode NON_CONSTRUCTOR_FACTORY = const ParserErrorCode(
- 'NON_CONSTRUCTOR_FACTORY',
- "Only constructors can be declared to be a 'factory'");
-
- static const ParserErrorCode NON_IDENTIFIER_LIBRARY_NAME =
- const ParserErrorCode('NON_IDENTIFIER_LIBRARY_NAME',
- "The name of a library must be an identifier");
-
- static const ParserErrorCode NON_PART_OF_DIRECTIVE_IN_PART =
- const ParserErrorCode('NON_PART_OF_DIRECTIVE_IN_PART',
- "The part-of directive must be the only directive in a part");
-
- static const ParserErrorCode NON_STRING_LITERAL_AS_URI =
- const ParserErrorCode(
- 'NON_STRING_LITERAL_AS_URI',
- "The URI must be a string literal",
- "Enclose the URI in either single or double quotes.");
-
- static const ParserErrorCode NON_USER_DEFINABLE_OPERATOR =
- const ParserErrorCode('NON_USER_DEFINABLE_OPERATOR',
- "The operator '{0}' is not user definable");
-
- static const ParserErrorCode NORMAL_BEFORE_OPTIONAL_PARAMETERS =
- const ParserErrorCode('NORMAL_BEFORE_OPTIONAL_PARAMETERS',
- "Normal parameters must occur before optional parameters");
-
- static const ParserErrorCode POSITIONAL_AFTER_NAMED_ARGUMENT =
- const ParserErrorCode('POSITIONAL_AFTER_NAMED_ARGUMENT',
- "Positional arguments must occur before named arguments");
-
- static const ParserErrorCode POSITIONAL_PARAMETER_OUTSIDE_GROUP =
- const ParserErrorCode('POSITIONAL_PARAMETER_OUTSIDE_GROUP',
- "Positional parameters must be enclosed in square brackets ('[' and ']')");
-
- static const ParserErrorCode REDIRECTION_IN_NON_FACTORY_CONSTRUCTOR =
- const ParserErrorCode('REDIRECTION_IN_NON_FACTORY_CONSTRUCTOR',
- "Only factory constructor can specify '=' redirection.");
-
- static const ParserErrorCode SETTER_IN_FUNCTION = const ParserErrorCode(
- 'SETTER_IN_FUNCTION',
- "Setters cannot be defined within methods or functions");
-
- static const ParserErrorCode STATIC_AFTER_CONST = const ParserErrorCode(
- 'STATIC_AFTER_CONST',
- "The modifier 'static' should be before the modifier 'const'");
-
- static const ParserErrorCode STATIC_AFTER_FINAL = const ParserErrorCode(
- 'STATIC_AFTER_FINAL',
- "The modifier 'static' should be before the modifier 'final'");
-
- static const ParserErrorCode STATIC_AFTER_VAR = const ParserErrorCode(
- 'STATIC_AFTER_VAR',
- "The modifier 'static' should be before the modifier 'var'");
-
- static const ParserErrorCode STATIC_CONSTRUCTOR = const ParserErrorCode(
- 'STATIC_CONSTRUCTOR', "Constructors cannot be static");
-
- static const ParserErrorCode STATIC_GETTER_WITHOUT_BODY =
- const ParserErrorCode(
- 'STATIC_GETTER_WITHOUT_BODY', "A 'static' getter must have a body");
-
- static const ParserErrorCode STATIC_OPERATOR =
- const ParserErrorCode('STATIC_OPERATOR', "Operators cannot be static");
-
- static const ParserErrorCode STATIC_SETTER_WITHOUT_BODY =
- const ParserErrorCode(
- 'STATIC_SETTER_WITHOUT_BODY', "A 'static' setter must have a body");
-
- static const ParserErrorCode STATIC_TOP_LEVEL_DECLARATION =
- const ParserErrorCode('STATIC_TOP_LEVEL_DECLARATION',
- "Top-level declarations cannot be declared to be 'static'");
-
- static const ParserErrorCode SWITCH_HAS_CASE_AFTER_DEFAULT_CASE =
- const ParserErrorCode('SWITCH_HAS_CASE_AFTER_DEFAULT_CASE',
- "The 'default' case should be the last case in a switch statement");
-
- static const ParserErrorCode SWITCH_HAS_MULTIPLE_DEFAULT_CASES =
- const ParserErrorCode('SWITCH_HAS_MULTIPLE_DEFAULT_CASES',
- "The 'default' case can only be declared once");
-
- static const ParserErrorCode TOP_LEVEL_OPERATOR = const ParserErrorCode(
- 'TOP_LEVEL_OPERATOR', "Operators must be declared within a class");
-
- static const ParserErrorCode TYPEDEF_IN_CLASS = const ParserErrorCode(
- 'TYPEDEF_IN_CLASS',
- "Function type aliases cannot be declared inside classes");
-
- static const ParserErrorCode UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP =
- const ParserErrorCode('UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP',
- "There is no '{0}' to open a parameter group");
-
- static const ParserErrorCode UNEXPECTED_TOKEN =
- const ParserErrorCode('UNEXPECTED_TOKEN', "Unexpected token '{0}'");
-
- static const ParserErrorCode WITH_BEFORE_EXTENDS = const ParserErrorCode(
- 'WITH_BEFORE_EXTENDS',
- "The extends clause must be before the with clause");
-
- static const ParserErrorCode WITH_WITHOUT_EXTENDS = const ParserErrorCode(
- 'WITH_WITHOUT_EXTENDS',
- "The with clause cannot be used without an extends clause");
-
- static const ParserErrorCode WRONG_SEPARATOR_FOR_NAMED_PARAMETER =
- const ParserErrorCode('WRONG_SEPARATOR_FOR_NAMED_PARAMETER',
- "The default value of a named parameter should be preceeded by ':'");
-
- static const ParserErrorCode WRONG_SEPARATOR_FOR_POSITIONAL_PARAMETER =
- const ParserErrorCode('WRONG_SEPARATOR_FOR_POSITIONAL_PARAMETER',
- "The default value of a positional parameter should be preceeded by '='");
-
- static const ParserErrorCode WRONG_TERMINATOR_FOR_PARAMETER_GROUP =
- const ParserErrorCode('WRONG_TERMINATOR_FOR_PARAMETER_GROUP',
- "Expected '{0}' to close parameter group");
-
- static const ParserErrorCode VAR_AND_TYPE = const ParserErrorCode(
- 'VAR_AND_TYPE',
- "Variables cannot be declared using both 'var' and a type name; remove the 'var'");
-
- static const ParserErrorCode VAR_AS_TYPE_NAME = const ParserErrorCode(
- 'VAR_AS_TYPE_NAME', "The keyword 'var' cannot be used as a type name");
-
- static const ParserErrorCode VAR_CLASS = const ParserErrorCode(
- 'VAR_CLASS', "Classes cannot be declared to be 'var'");
-
- static const ParserErrorCode VAR_ENUM =
- const ParserErrorCode('VAR_ENUM', "Enums cannot be declared to be 'var'");
-
- static const ParserErrorCode VAR_RETURN_TYPE = const ParserErrorCode(
- 'VAR_RETURN_TYPE', "The return type cannot be 'var'");
-
- static const ParserErrorCode VAR_TYPEDEF = const ParserErrorCode(
- 'VAR_TYPEDEF', "Type aliases cannot be declared to be 'var'");
-
- static const ParserErrorCode VOID_PARAMETER = const ParserErrorCode(
- 'VOID_PARAMETER', "Parameters cannot have a type of 'void'");
-
- static const ParserErrorCode VOID_VARIABLE = const ParserErrorCode(
- 'VOID_VARIABLE', "Variables cannot have a type of 'void'");
+ Statement _parseFunctionDeclarationStatementAfterReturnType(
+ CommentAndMetadata commentAndMetadata, TypeName returnType) {
+ FunctionDeclaration declaration =
+ parseFunctionDeclaration(commentAndMetadata, null, returnType);
+ Token propertyKeyword = declaration.propertyKeyword;
+ if (propertyKeyword != null) {
+ if (propertyKeyword.keyword == Keyword.GET) {
+ _reportErrorForToken(
+ ParserErrorCode.GETTER_IN_FUNCTION, propertyKeyword);
+ } else {
+ _reportErrorForToken(
+ ParserErrorCode.SETTER_IN_FUNCTION, propertyKeyword);
+ }
+ }
+ return new FunctionDeclarationStatement(declaration);
+ }
/**
- * Initialize a newly created error code to have the given [name]. The message
- * associated with the error will be created from the given [message]
- * template. The correction associated with the error will be created from the
- * given [correction] template.
+ * Parse a function type alias. The [commentAndMetadata] is the metadata to be
+ * associated with the member. The [keyword] is the token representing the
+ * 'typedef' keyword. Return the function type alias that was parsed.
+ *
+ * functionTypeAlias ::=
+ * functionPrefix typeParameterList? formalParameterList ';'
+ *
+ * functionPrefix ::=
+ * returnType? name
*/
- const ParserErrorCode(String name, String message, [String correction])
- : super(name, message, correction);
+ FunctionTypeAlias _parseFunctionTypeAlias(
+ CommentAndMetadata commentAndMetadata, Token keyword) {
+ TypeName returnType = null;
+ if (hasReturnTypeInTypeAlias) {
+ returnType = parseReturnType();
+ }
+ SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true);
+ TypeParameterList typeParameters = null;
+ if (_matches(TokenType.LT)) {
+ typeParameters = parseTypeParameterList();
+ }
+ TokenType type = _currentToken.type;
+ if (type == TokenType.SEMICOLON || type == TokenType.EOF) {
+ _reportErrorForCurrentToken(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS);
+ FormalParameterList parameters = new FormalParameterList(
+ _createSyntheticToken(TokenType.OPEN_PAREN),
+ null,
+ null,
+ null,
+ _createSyntheticToken(TokenType.CLOSE_PAREN));
+ Token semicolon = _expect(TokenType.SEMICOLON);
+ return new FunctionTypeAlias(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ keyword,
+ returnType,
+ name,
+ typeParameters,
+ parameters,
+ semicolon);
+ } else if (type == TokenType.OPEN_PAREN) {
+ FormalParameterList parameters = _parseFormalParameterListUnchecked();
+ _validateFormalParameterList(parameters);
+ Token semicolon = _expect(TokenType.SEMICOLON);
+ return new FunctionTypeAlias(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ keyword,
+ returnType,
+ name,
+ typeParameters,
+ parameters,
+ semicolon);
+ } else {
+ _reportErrorForCurrentToken(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS);
+ // Recovery: At the very least we should skip to the start of the next
+ // valid compilation unit member, allowing for the possibility of finding
+ // the typedef parameters before that point.
+ return new FunctionTypeAlias(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ keyword,
+ returnType,
+ name,
+ typeParameters,
+ new FormalParameterList(_createSyntheticToken(TokenType.OPEN_PAREN),
+ null, null, null, _createSyntheticToken(TokenType.CLOSE_PAREN)),
+ _createSyntheticToken(TokenType.SEMICOLON));
+ }
+ }
- @override
- ErrorSeverity get errorSeverity => ErrorSeverity.ERROR;
+ /**
+ * Parses generic type parameters from a comment.
+ *
+ * Normally this is handled by [_parseGenericMethodTypeParameters], but if the
+ * code already handles the normal generic type parameters, the comment
+ * matcher can be called directly. For example, we may have already tried
+ * matching `<` (less than sign) in a method declaration, and be currently
+ * on the `(` (open paren) because we didn't find it. In that case, this
+ * function will parse the preceding comment such as `/*<T, R>*/`.
+ */
+ TypeParameterList _parseGenericCommentTypeParameters() {
+ if (_injectGenericCommentTypeList()) {
+ return parseTypeParameterList();
+ }
+ return null;
+ }
- @override
- ErrorType get type => ErrorType.SYNTACTIC_ERROR;
-}
+ /**
+ * Parse the generic method or function's type parameters.
+ *
+ * For backwards compatibility this can optionally use comments.
+ * See [parseGenericMethodComments].
+ */
+ TypeParameterList _parseGenericMethodTypeParameters() {
+ if (parseGenericMethods && _matches(TokenType.LT) ||
+ _injectGenericCommentTypeList()) {
+ return parseTypeParameterList();
+ }
+ return null;
+ }
-/**
- * An object that copies resolution information from one AST structure to
- * another as long as the structures of the corresponding children of a pair of
- * nodes are the same.
- */
-class ResolutionCopier implements AstVisitor<bool> {
/**
- * The AST node with which the node being visited is to be compared. This is
- * only valid at the beginning of each visit method (until [isEqualNodes] is
- * invoked).
+ * Parse a library name. The [missingNameError] is the error code to be used
+ * if the library name is missing. The [missingNameToken] is the token
+ * associated with the error produced if the library name is missing. Return
+ * the library name that was parsed.
+ *
+ * libraryName ::=
+ * libraryIdentifier
*/
- AstNode _toNode;
+ LibraryIdentifier _parseLibraryName(
+ ParserErrorCode missingNameError, Token missingNameToken) {
+ if (_matchesIdentifier()) {
+ return parseLibraryIdentifier();
+ } else if (_matches(TokenType.STRING)) {
+ // Recovery: This should be extended to handle arbitrary tokens until we
+ // can find a token that can start a compilation unit member.
+ StringLiteral string = parseStringLiteral();
+ _reportErrorForNode(ParserErrorCode.NON_IDENTIFIER_LIBRARY_NAME, string);
+ } else {
+ _reportErrorForToken(missingNameError, missingNameToken);
+ }
+ return new LibraryIdentifier(
+ <SimpleIdentifier>[createSyntheticIdentifier()]);
+ }
- @override
- bool visitAdjacentStrings(AdjacentStrings node) {
- AdjacentStrings toNode = this._toNode as AdjacentStrings;
- return _isEqualNodeLists(node.strings, toNode.strings);
+ /**
+ * Parse a method declaration. The [commentAndMetadata] is the documentation
+ * comment and metadata to be associated with the declaration. The
+ * [externalKeyword] is the 'external' token. The [staticKeyword] is the
+ * static keyword, or `null` if the getter is not static. The [returnType] is
+ * the return type of the method. The [name] is the name of the method. The
+ * [parameters] is the parameters to the method. Return the method declaration
+ * that was parsed.
+ *
+ * functionDeclaration ::=
+ * ('external' 'static'?)? functionSignature functionBody
+ * | 'external'? functionSignature ';'
+ */
+ MethodDeclaration _parseMethodDeclarationAfterParameters(
+ CommentAndMetadata commentAndMetadata,
+ Token externalKeyword,
+ Token staticKeyword,
+ TypeName returnType,
+ SimpleIdentifier name,
+ TypeParameterList typeParameters,
+ FormalParameterList parameters) {
+ FunctionBody body = parseFunctionBody(
+ externalKeyword != null || staticKeyword == null,
+ ParserErrorCode.MISSING_FUNCTION_BODY,
+ false);
+ if (externalKeyword != null) {
+ if (body is! EmptyFunctionBody) {
+ _reportErrorForNode(ParserErrorCode.EXTERNAL_METHOD_WITH_BODY, body);
+ }
+ } else if (staticKeyword != null) {
+ if (body is EmptyFunctionBody && _parseFunctionBodies) {
+ _reportErrorForNode(ParserErrorCode.ABSTRACT_STATIC_METHOD, body);
+ }
+ }
+ return new MethodDeclaration(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ externalKeyword,
+ staticKeyword,
+ returnType,
+ null,
+ null,
+ name,
+ typeParameters,
+ parameters,
+ body);
}
- @override
- bool visitAnnotation(Annotation node) {
- Annotation toNode = this._toNode as Annotation;
- if (_and(
- _isEqualTokens(node.atSign, toNode.atSign),
- _isEqualNodes(node.name, toNode.name),
- _isEqualTokens(node.period, toNode.period),
- _isEqualNodes(node.constructorName, toNode.constructorName),
- _isEqualNodes(node.arguments, toNode.arguments))) {
- toNode.element = node.element;
- return true;
+ /**
+ * Parse a method declaration. The [commentAndMetadata] is the documentation
+ * comment and metadata to be associated with the declaration. The
+ * [externalKeyword] is the 'external' token. The [staticKeyword] is the
+ * static keyword, or `null` if the getter is not static. The [returnType] is
+ * the return type of the method. Return the method declaration that was
+ * parsed.
+ *
+ * functionDeclaration ::=
+ * 'external'? 'static'? functionSignature functionBody
+ * | 'external'? functionSignature ';'
+ */
+ MethodDeclaration _parseMethodDeclarationAfterReturnType(
+ CommentAndMetadata commentAndMetadata,
+ Token externalKeyword,
+ Token staticKeyword,
+ TypeName returnType) {
+ SimpleIdentifier methodName = parseSimpleIdentifier(isDeclaration: true);
+ TypeParameterList typeParameters = _parseGenericMethodTypeParameters();
+ FormalParameterList parameters;
+ TokenType type = _currentToken.type;
+ // TODO(brianwilkerson) Figure out why we care what the current token is if
+ // it isn't a paren.
+ if (type != TokenType.OPEN_PAREN &&
+ (type == TokenType.OPEN_CURLY_BRACKET || type == TokenType.FUNCTION)) {
+ _reportErrorForToken(
+ ParserErrorCode.MISSING_METHOD_PARAMETERS, _currentToken.previous);
+ parameters = new FormalParameterList(
+ _createSyntheticToken(TokenType.OPEN_PAREN),
+ null,
+ null,
+ null,
+ _createSyntheticToken(TokenType.CLOSE_PAREN));
+ } else {
+ parameters = parseFormalParameterList();
}
- return false;
+ _validateFormalParameterList(parameters);
+ return _parseMethodDeclarationAfterParameters(
+ commentAndMetadata,
+ externalKeyword,
+ staticKeyword,
+ returnType,
+ methodName,
+ typeParameters,
+ parameters);
}
- @override
- bool visitArgumentList(ArgumentList node) {
- ArgumentList toNode = this._toNode as ArgumentList;
- return _and(
- _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis),
- _isEqualNodeLists(node.arguments, toNode.arguments),
- _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis));
+ /**
+ * Parse a class native clause. Return the native clause that was parsed.
+ *
+ * This method assumes that the current token matches `_NATIVE`.
+ *
+ * classNativeClause ::=
+ * 'native' name
+ */
+ NativeClause _parseNativeClause() {
+ Token keyword = getAndAdvance();
+ StringLiteral name = parseStringLiteral();
+ return new NativeClause(keyword, name);
}
- @override
- bool visitAsExpression(AsExpression node) {
- AsExpression toNode = this._toNode as AsExpression;
- if (_and(
- _isEqualNodes(node.expression, toNode.expression),
- _isEqualTokens(node.asOperator, toNode.asOperator),
- _isEqualNodes(node.type, toNode.type))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ /**
+ * Parse an operator declaration starting after the 'operator' keyword. The
+ * [commentAndMetadata] is the documentation comment and metadata to be
+ * associated with the declaration. The [externalKeyword] is the 'external'
+ * token. The [returnType] is the return type that has already been parsed, or
+ * `null` if there was no return type. The [operatorKeyword] is the 'operator'
+ * keyword. Return the operator declaration that was parsed.
+ *
+ * operatorDeclaration ::=
+ * operatorSignature (';' | functionBody)
+ *
+ * operatorSignature ::=
+ * 'external'? returnType? 'operator' operator formalParameterList
+ */
+ MethodDeclaration _parseOperatorAfterKeyword(
+ CommentAndMetadata commentAndMetadata,
+ Token externalKeyword,
+ TypeName returnType,
+ Token operatorKeyword) {
+ if (!_currentToken.isUserDefinableOperator) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.NON_USER_DEFINABLE_OPERATOR, [_currentToken.lexeme]);
+ }
+ SimpleIdentifier name =
+ new SimpleIdentifier(getAndAdvance(), isDeclaration: true);
+ if (_matches(TokenType.EQ)) {
+ Token previous = _currentToken.previous;
+ if ((_tokenMatches(previous, TokenType.EQ_EQ) ||
+ _tokenMatches(previous, TokenType.BANG_EQ)) &&
+ _currentToken.offset == previous.offset + 2) {
+ _reportErrorForCurrentToken(ParserErrorCode.INVALID_OPERATOR,
+ ["${previous.lexeme}${_currentToken.lexeme}"]);
+ _advance();
+ }
+ }
+ FormalParameterList parameters = parseFormalParameterList();
+ _validateFormalParameterList(parameters);
+ FunctionBody body =
+ parseFunctionBody(true, ParserErrorCode.MISSING_FUNCTION_BODY, false);
+ if (externalKeyword != null && body is! EmptyFunctionBody) {
+ _reportErrorForCurrentToken(ParserErrorCode.EXTERNAL_OPERATOR_WITH_BODY);
+ }
+ return new MethodDeclaration(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ externalKeyword,
+ null,
+ returnType,
+ null,
+ operatorKeyword,
+ name,
+ null,
+ parameters,
+ body);
+ }
+
+ /**
+ * Parse a return type if one is given, otherwise return `null` without
+ * advancing. Return the return type that was parsed.
+ */
+ TypeName _parseOptionalReturnType() {
+ TypeName typeComment = _parseOptionalTypeNameComment();
+ if (typeComment != null) {
+ return typeComment;
+ }
+ Keyword keyword = _currentToken.keyword;
+ if (keyword == Keyword.VOID) {
+ return new TypeName(new SimpleIdentifier(getAndAdvance()), null);
+ } else if (_matchesIdentifier()) {
+ Token next = _peek();
+ if (keyword != Keyword.GET &&
+ keyword != Keyword.SET &&
+ keyword != Keyword.OPERATOR &&
+ (_tokenMatchesIdentifier(next) ||
+ _tokenMatches(next, TokenType.LT))) {
+ return parseReturnType();
+ }
+ Token next2 = next.next;
+ Token next3 = next2.next;
+ if (_tokenMatches(next, TokenType.PERIOD) &&
+ _tokenMatchesIdentifier(next2) &&
+ (_tokenMatchesIdentifier(next3) ||
+ _tokenMatches(next3, TokenType.LT))) {
+ return parseReturnType();
+ }
}
- return false;
+ return null;
}
- @override
- bool visitAssertStatement(AssertStatement node) {
- AssertStatement toNode = this._toNode as AssertStatement;
- return _and(
- _isEqualTokens(node.assertKeyword, toNode.assertKeyword),
- _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis),
- _isEqualNodes(node.condition, toNode.condition),
- _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis),
- _isEqualTokens(node.semicolon, toNode.semicolon));
+ /**
+ * Parse a [TypeArgumentList] if present, otherwise return null.
+ * This also supports the comment form, if enabled: `/*<T>*/`
+ */
+ TypeArgumentList _parseOptionalTypeArguments() {
+ if (_matches(TokenType.LT) || _injectGenericCommentTypeList()) {
+ return parseTypeArgumentList();
+ }
+ return null;
}
- @override
- bool visitAssignmentExpression(AssignmentExpression node) {
- AssignmentExpression toNode = this._toNode as AssignmentExpression;
- if (_and(
- _isEqualNodes(node.leftHandSide, toNode.leftHandSide),
- _isEqualTokens(node.operator, toNode.operator),
- _isEqualNodes(node.rightHandSide, toNode.rightHandSide))) {
- toNode.propagatedElement = node.propagatedElement;
- toNode.propagatedType = node.propagatedType;
- toNode.staticElement = node.staticElement;
- toNode.staticType = node.staticType;
- return true;
+ TypeName _parseOptionalTypeNameComment() {
+ if (_injectGenericCommentTypeAssign()) {
+ return _parseTypeName(false);
}
- return false;
+ return null;
}
- @override
- bool visitAwaitExpression(AwaitExpression node) {
- AwaitExpression toNode = this._toNode as AwaitExpression;
- if (_and(_isEqualTokens(node.awaitKeyword, toNode.awaitKeyword),
- _isEqualNodes(node.expression, toNode.expression))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
- }
- return false;
+ /**
+ * Parse a part directive. The [commentAndMetadata] is the metadata to be
+ * associated with the directive. Return the part or part-of directive that
+ * was parsed.
+ *
+ * This method assumes that the current token matches `Keyword.PART`.
+ *
+ * partDirective ::=
+ * metadata 'part' stringLiteral ';'
+ */
+ Directive _parsePartDirective(CommentAndMetadata commentAndMetadata) {
+ Token partKeyword = getAndAdvance();
+ StringLiteral partUri = _parseUri();
+ Token semicolon = _expect(TokenType.SEMICOLON);
+ return new PartDirective(commentAndMetadata.comment,
+ commentAndMetadata.metadata, partKeyword, partUri, semicolon);
}
- @override
- bool visitBinaryExpression(BinaryExpression node) {
- BinaryExpression toNode = this._toNode as BinaryExpression;
- if (_and(
- _isEqualNodes(node.leftOperand, toNode.leftOperand),
- _isEqualTokens(node.operator, toNode.operator),
- _isEqualNodes(node.rightOperand, toNode.rightOperand))) {
- toNode.propagatedElement = node.propagatedElement;
- toNode.propagatedType = node.propagatedType;
- toNode.staticElement = node.staticElement;
- toNode.staticType = node.staticType;
- return true;
- }
- return false;
+ /**
+ * Parse a part-of directive. The [commentAndMetadata] is the metadata to be
+ * associated with the directive. Return the part or part-of directive that
+ * was parsed.
+ *
+ * This method assumes that the current token matches [Keyword.PART] and that
+ * the following token matches the identifier 'of'.
+ *
+ * partOfDirective ::=
+ * metadata 'part' 'of' identifier ';'
+ */
+ Directive _parsePartOfDirective(CommentAndMetadata commentAndMetadata) {
+ Token partKeyword = getAndAdvance();
+ Token ofKeyword = getAndAdvance();
+ LibraryIdentifier libraryName = _parseLibraryName(
+ ParserErrorCode.MISSING_NAME_IN_PART_OF_DIRECTIVE, ofKeyword);
+ Token semicolon = _expect(TokenType.SEMICOLON);
+ return new PartOfDirective(
+ commentAndMetadata.comment,
+ commentAndMetadata.metadata,
+ partKeyword,
+ ofKeyword,
+ libraryName,
+ semicolon);
}
- @override
- bool visitBlock(Block node) {
- Block toNode = this._toNode as Block;
- return _and(
- _isEqualTokens(node.leftBracket, toNode.leftBracket),
- _isEqualNodeLists(node.statements, toNode.statements),
- _isEqualTokens(node.rightBracket, toNode.rightBracket));
+ /**
+ * Parse a prefixed identifier given that the given [qualifier] was already
+ * parsed. Return the prefixed identifier that was parsed.
+ *
+ * prefixedIdentifier ::=
+ * identifier ('.' identifier)?
+ */
+ Identifier _parsePrefixedIdentifierAfterIdentifier(
+ SimpleIdentifier qualifier) {
+ if (!_matches(TokenType.PERIOD) || _injectGenericCommentTypeList()) {
+ return qualifier;
+ }
+ Token period = getAndAdvance();
+ SimpleIdentifier qualified = parseSimpleIdentifier();
+ return new PrefixedIdentifier(qualifier, period, qualified);
}
- @override
- bool visitBlockFunctionBody(BlockFunctionBody node) {
- BlockFunctionBody toNode = this._toNode as BlockFunctionBody;
- return _isEqualNodes(node.block, toNode.block);
+ /**
+ * Parse a prefixed identifier. Return the prefixed identifier that was
+ * parsed.
+ *
+ * This method assumes that the current token matches an identifier.
+ *
+ * prefixedIdentifier ::=
+ * identifier ('.' identifier)?
+ */
+ Identifier _parsePrefixedIdentifierUnchecked() {
+ return _parsePrefixedIdentifierAfterIdentifier(
+ _parseSimpleIdentifierUnchecked());
}
- @override
- bool visitBooleanLiteral(BooleanLiteral node) {
- BooleanLiteral toNode = this._toNode as BooleanLiteral;
- if (_and(_isEqualTokens(node.literal, toNode.literal),
- node.value == toNode.value)) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ /**
+ * Parse a simple identifier. Return the simple identifier that was parsed.
+ *
+ * This method assumes that the current token matches an identifier.
+ *
+ * identifier ::=
+ * IDENTIFIER
+ */
+ SimpleIdentifier _parseSimpleIdentifierUnchecked(
+ {bool isDeclaration: false}) {
+ String lexeme = _currentToken.lexeme;
+ if ((_inAsync || _inGenerator) &&
+ (lexeme == ASYNC || lexeme == _AWAIT || lexeme == _YIELD)) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.ASYNC_KEYWORD_USED_AS_IDENTIFIER);
}
- return false;
+ return new SimpleIdentifier(getAndAdvance(), isDeclaration: isDeclaration);
}
- @override
- bool visitBreakStatement(BreakStatement node) {
- BreakStatement toNode = this._toNode as BreakStatement;
- if (_and(
- _isEqualTokens(node.breakKeyword, toNode.breakKeyword),
- _isEqualNodes(node.label, toNode.label),
- _isEqualTokens(node.semicolon, toNode.semicolon))) {
- // TODO(paulberry): map node.target to toNode.target.
- return true;
+ /**
+ * Parse a list of statements within a switch statement. Return the statements
+ * that were parsed.
+ *
+ * statements ::=
+ * statement*
+ */
+ List<Statement> _parseStatementList() {
+ List<Statement> statements = <Statement>[];
+ Token statementStart = _currentToken;
+ TokenType type = _currentToken.type;
+ while (type != TokenType.EOF &&
+ type != TokenType.CLOSE_CURLY_BRACKET &&
+ !isSwitchMember()) {
+ statements.add(parseStatement2());
+ if (identical(_currentToken, statementStart)) {
+ _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
+ [_currentToken.lexeme]);
+ _advance();
+ }
+ statementStart = _currentToken;
+ type = _currentToken.type;
}
- return false;
+ return statements;
}
- @override
- bool visitCascadeExpression(CascadeExpression node) {
- CascadeExpression toNode = this._toNode as CascadeExpression;
- if (_and(_isEqualNodes(node.target, toNode.target),
- _isEqualNodeLists(node.cascadeSections, toNode.cascadeSections))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ /**
+ * Parse a string literal that contains interpolations. Return the string
+ * literal that was parsed.
+ *
+ * This method assumes that the current token matches either
+ * [TokenType.STRING_INTERPOLATION_EXPRESSION] or
+ * [TokenType.STRING_INTERPOLATION_IDENTIFIER].
+ */
+ StringInterpolation _parseStringInterpolation(Token string) {
+ List<InterpolationElement> elements = <InterpolationElement>[
+ new InterpolationString(
+ string, computeStringValue(string.lexeme, true, false))
+ ];
+ bool hasMore = true;
+ bool isExpression = _matches(TokenType.STRING_INTERPOLATION_EXPRESSION);
+ while (hasMore) {
+ if (isExpression) {
+ Token openToken = getAndAdvance();
+ bool wasInInitializer = _inInitializer;
+ _inInitializer = false;
+ try {
+ Expression expression = parseExpression2();
+ Token rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET);
+ elements.add(
+ new InterpolationExpression(openToken, expression, rightBracket));
+ } finally {
+ _inInitializer = wasInInitializer;
+ }
+ } else {
+ Token openToken = getAndAdvance();
+ Expression expression = null;
+ if (_matchesKeyword(Keyword.THIS)) {
+ expression = new ThisExpression(getAndAdvance());
+ } else {
+ expression = parseSimpleIdentifier();
+ }
+ elements.add(new InterpolationExpression(openToken, expression, null));
+ }
+ if (_matches(TokenType.STRING)) {
+ string = getAndAdvance();
+ isExpression = _matches(TokenType.STRING_INTERPOLATION_EXPRESSION);
+ hasMore =
+ isExpression || _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER);
+ elements.add(new InterpolationString(
+ string, computeStringValue(string.lexeme, false, !hasMore)));
+ } else {
+ hasMore = false;
+ }
}
- return false;
+ return new StringInterpolation(elements);
}
- @override
- bool visitCatchClause(CatchClause node) {
- CatchClause toNode = this._toNode as CatchClause;
- return _and(
- _isEqualTokens(node.onKeyword, toNode.onKeyword),
- _isEqualNodes(node.exceptionType, toNode.exceptionType),
- _isEqualTokens(node.catchKeyword, toNode.catchKeyword),
- _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis),
- _isEqualNodes(node.exceptionParameter, toNode.exceptionParameter),
- _isEqualTokens(node.comma, toNode.comma),
- _isEqualNodes(node.stackTraceParameter, toNode.stackTraceParameter),
- _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis),
- _isEqualNodes(node.body, toNode.body));
+ /**
+ * Parse a string literal. Return the string literal that was parsed.
+ *
+ * This method assumes that the current token matches `TokenType.STRING`.
+ *
+ * stringLiteral ::=
+ * MULTI_LINE_STRING+
+ * | SINGLE_LINE_STRING+
+ */
+ StringLiteral _parseStringLiteralUnchecked() {
+ List<StringLiteral> strings = <StringLiteral>[];
+ do {
+ Token string = getAndAdvance();
+ if (_matches(TokenType.STRING_INTERPOLATION_EXPRESSION) ||
+ _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER)) {
+ strings.add(_parseStringInterpolation(string));
+ } else {
+ strings.add(new SimpleStringLiteral(
+ string, computeStringValue(string.lexeme, true, true)));
+ }
+ } while (_matches(TokenType.STRING));
+ return strings.length == 1 ? strings[0] : new AdjacentStrings(strings);
}
- @override
- bool visitClassDeclaration(ClassDeclaration node) {
- ClassDeclaration toNode = this._toNode as ClassDeclaration;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.abstractKeyword, toNode.abstractKeyword),
- _isEqualTokens(node.classKeyword, toNode.classKeyword),
- _isEqualNodes(node.name, toNode.name),
- _isEqualNodes(node.typeParameters, toNode.typeParameters),
- _isEqualNodes(node.extendsClause, toNode.extendsClause),
- _isEqualNodes(node.withClause, toNode.withClause),
- _isEqualNodes(node.implementsClause, toNode.implementsClause),
- _isEqualTokens(node.leftBracket, toNode.leftBracket),
- _isEqualNodeLists(node.members, toNode.members),
- _isEqualTokens(node.rightBracket, toNode.rightBracket));
+ TypeName _parseTypeName(bool inExpression) {
+ Identifier typeName;
+ if (_matchesIdentifier()) {
+ typeName = _parsePrefixedIdentifierUnchecked();
+ } else if (_matchesKeyword(Keyword.VAR)) {
+ _reportErrorForCurrentToken(ParserErrorCode.VAR_AS_TYPE_NAME);
+ typeName = new SimpleIdentifier(getAndAdvance());
+ } else {
+ typeName = createSyntheticIdentifier();
+ _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_TYPE_NAME);
+ }
+ TypeArgumentList typeArguments = _parseOptionalTypeArguments();
+ Token question = null;
+ if (enableNnbd && _matches(TokenType.QUESTION)) {
+ if (!inExpression || !_isConditionalOperator()) {
+ question = getAndAdvance();
+ }
+ }
+ return new TypeName(typeName, typeArguments, question: question);
}
- @override
- bool visitClassTypeAlias(ClassTypeAlias node) {
- ClassTypeAlias toNode = this._toNode as ClassTypeAlias;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.typedefKeyword, toNode.typedefKeyword),
- _isEqualNodes(node.name, toNode.name),
- _isEqualNodes(node.typeParameters, toNode.typeParameters),
- _isEqualTokens(node.equals, toNode.equals),
- _isEqualTokens(node.abstractKeyword, toNode.abstractKeyword),
- _isEqualNodes(node.superclass, toNode.superclass),
- _isEqualNodes(node.withClause, toNode.withClause),
- _isEqualNodes(node.implementsClause, toNode.implementsClause),
- _isEqualTokens(node.semicolon, toNode.semicolon));
+ /**
+ * Parse a type name. Return the type name that was parsed.
+ *
+ * This method assumes that the current token is an identifier.
+ *
+ * type ::=
+ * qualified typeArguments?
+ */
+ TypeName _parseTypeNameAfterIdentifier() {
+ Identifier typeName = _parsePrefixedIdentifierUnchecked();
+ TypeArgumentList typeArguments = _parseOptionalTypeArguments();
+ // If this is followed by a generic method type comment, allow the comment
+ // type to replace the real type name.
+ // TODO(jmesserly): this feels like a big hammer. Can we restrict it to
+ // only work inside generic methods?
+ TypeName typeFromComment = _parseOptionalTypeNameComment();
+ return typeFromComment ?? new TypeName(typeName, typeArguments);
}
- @override
- bool visitComment(Comment node) {
- Comment toNode = this._toNode as Comment;
- return _isEqualNodeLists(node.references, toNode.references);
+ /**
+ * Parse a string literal representing a URI. Return the string literal that
+ * was parsed.
+ */
+ StringLiteral _parseUri() {
+ // TODO(brianwilkerson) Should this function also return true for valid
+ // top-level keywords?
+ bool isKeywordAfterUri(Token token) =>
+ token.lexeme == Keyword.AS.syntax ||
+ token.lexeme == _HIDE ||
+ token.lexeme == _SHOW;
+ TokenType type = _currentToken.type;
+ if (type != TokenType.STRING &&
+ type != TokenType.SEMICOLON &&
+ !isKeywordAfterUri(_currentToken)) {
+ // Attempt to recover in the case where the URI was not enclosed in
+ // quotes.
+ Token token = _currentToken;
+ bool isValidInUri(Token token) {
+ TokenType type = token.type;
+ return type == TokenType.COLON ||
+ type == TokenType.SLASH ||
+ type == TokenType.PERIOD ||
+ type == TokenType.PERIOD_PERIOD ||
+ type == TokenType.PERIOD_PERIOD_PERIOD ||
+ type == TokenType.INT ||
+ type == TokenType.DOUBLE;
+ }
+
+ while ((_tokenMatchesIdentifier(token) && !isKeywordAfterUri(token)) ||
+ isValidInUri(token)) {
+ token = token.next;
+ }
+ if (_tokenMatches(token, TokenType.SEMICOLON) ||
+ isKeywordAfterUri(token)) {
+ Token endToken = token.previous;
+ token = _currentToken;
+ int endOffset = token.end;
+ StringBuffer buffer = new StringBuffer();
+ buffer.write(token.lexeme);
+ while (token != endToken) {
+ token = token.next;
+ if (token.offset != endOffset || token.precedingComments != null) {
+ return parseStringLiteral();
+ }
+ buffer.write(token.lexeme);
+ endOffset = token.end;
+ }
+ String value = buffer.toString();
+ Token newToken =
+ new StringToken(TokenType.STRING, "'$value'", _currentToken.offset);
+ _reportErrorForToken(
+ ParserErrorCode.NON_STRING_LITERAL_AS_URI, newToken);
+ _currentToken = endToken.next;
+ return new SimpleStringLiteral(newToken, value);
+ }
+ }
+ return parseStringLiteral();
}
- @override
- bool visitCommentReference(CommentReference node) {
- CommentReference toNode = this._toNode as CommentReference;
- return _and(_isEqualTokens(node.newKeyword, toNode.newKeyword),
- _isEqualNodes(node.identifier, toNode.identifier));
+ /**
+ * Parse a variable declaration statement. The [commentAndMetadata] is the
+ * metadata to be associated with the variable declaration statement, or
+ * `null` if there is no attempt at parsing the comment and metadata. The
+ * [keyword] is the token representing the 'final', 'const' or 'var' keyword,
+ * or `null` if there is no keyword. The [type] is the type of the variables
+ * in the list. Return the variable declaration statement that was parsed.
+ *
+ * variableDeclarationStatement ::=
+ * variableDeclarationList ';'
+ */
+ VariableDeclarationStatement _parseVariableDeclarationStatementAfterType(
+ CommentAndMetadata commentAndMetadata, Token keyword, TypeName type) {
+ VariableDeclarationList variableList =
+ parseVariableDeclarationListAfterType(
+ commentAndMetadata, keyword, type);
+ Token semicolon = _expect(TokenType.SEMICOLON);
+ return new VariableDeclarationStatement(variableList, semicolon);
}
- @override
- bool visitCompilationUnit(CompilationUnit node) {
- CompilationUnit toNode = this._toNode as CompilationUnit;
- if (_and(
- _isEqualTokens(node.beginToken, toNode.beginToken),
- _isEqualNodes(node.scriptTag, toNode.scriptTag),
- _isEqualNodeLists(node.directives, toNode.directives),
- _isEqualNodeLists(node.declarations, toNode.declarations),
- _isEqualTokens(node.endToken, toNode.endToken))) {
- toNode.element = node.element;
- return true;
- }
- return false;
- }
+ /**
+ * Return the token that is immediately after the current token. This is
+ * equivalent to [_peekAt](1).
+ */
+ Token _peek() => _currentToken.next;
- @override
- bool visitConditionalExpression(ConditionalExpression node) {
- ConditionalExpression toNode = this._toNode as ConditionalExpression;
- if (_and(
- _isEqualNodes(node.condition, toNode.condition),
- _isEqualTokens(node.question, toNode.question),
- _isEqualNodes(node.thenExpression, toNode.thenExpression),
- _isEqualTokens(node.colon, toNode.colon),
- _isEqualNodes(node.elseExpression, toNode.elseExpression))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ /**
+ * Return the token that is the given [distance] after the current token,
+ * where the distance is the number of tokens to look ahead. A distance of `0`
+ * is the current token, `1` is the next token, etc.
+ */
+ Token _peekAt(int distance) {
+ Token token = _currentToken;
+ for (int i = 0; i < distance; i++) {
+ token = token.next;
}
- return false;
+ return token;
}
- @override
- bool visitConstructorDeclaration(ConstructorDeclaration node) {
- ConstructorDeclaration toNode = this._toNode as ConstructorDeclaration;
- if (_and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.externalKeyword, toNode.externalKeyword),
- _isEqualTokens(node.constKeyword, toNode.constKeyword),
- _isEqualTokens(node.factoryKeyword, toNode.factoryKeyword),
- _isEqualNodes(node.returnType, toNode.returnType),
- _isEqualTokens(node.period, toNode.period),
- _isEqualNodes(node.name, toNode.name),
- _isEqualNodes(node.parameters, toNode.parameters),
- _isEqualTokens(node.separator, toNode.separator),
- _isEqualNodeLists(node.initializers, toNode.initializers),
- _isEqualNodes(node.redirectedConstructor, toNode.redirectedConstructor),
- _isEqualNodes(node.body, toNode.body))) {
- toNode.element = node.element;
- return true;
+ String _removeGitHubInlineCode(String comment) {
+ int index = 0;
+ while (true) {
+ int beginIndex = comment.indexOf('`', index);
+ if (beginIndex == -1) {
+ break;
+ }
+ int endIndex = comment.indexOf('`', beginIndex + 1);
+ if (endIndex == -1) {
+ break;
+ }
+ comment = comment.substring(0, beginIndex + 1) +
+ ' ' * (endIndex - beginIndex - 1) +
+ comment.substring(endIndex);
+ index = endIndex + 1;
}
- return false;
- }
-
- @override
- bool visitConstructorFieldInitializer(ConstructorFieldInitializer node) {
- ConstructorFieldInitializer toNode =
- this._toNode as ConstructorFieldInitializer;
- return _and(
- _isEqualTokens(node.thisKeyword, toNode.thisKeyword),
- _isEqualTokens(node.period, toNode.period),
- _isEqualNodes(node.fieldName, toNode.fieldName),
- _isEqualTokens(node.equals, toNode.equals),
- _isEqualNodes(node.expression, toNode.expression));
+ return comment;
}
- @override
- bool visitConstructorName(ConstructorName node) {
- ConstructorName toNode = this._toNode as ConstructorName;
- if (_and(
- _isEqualNodes(node.type, toNode.type),
- _isEqualTokens(node.period, toNode.period),
- _isEqualNodes(node.name, toNode.name))) {
- toNode.staticElement = node.staticElement;
- return true;
+ /**
+ * Report the given [error].
+ */
+ void _reportError(AnalysisError error) {
+ if (_errorListenerLock != 0) {
+ return;
}
- return false;
+ _errorListener.onError(error);
}
- @override
- bool visitContinueStatement(ContinueStatement node) {
- ContinueStatement toNode = this._toNode as ContinueStatement;
- if (_and(
- _isEqualTokens(node.continueKeyword, toNode.continueKeyword),
- _isEqualNodes(node.label, toNode.label),
- _isEqualTokens(node.semicolon, toNode.semicolon))) {
- // TODO(paulberry): map node.target to toNode.target.
- return true;
- }
- return false;
+ /**
+ * Report an error with the given [errorCode] and [arguments] associated with
+ * the current token.
+ */
+ void _reportErrorForCurrentToken(ParserErrorCode errorCode,
+ [List<Object> arguments]) {
+ _reportErrorForToken(errorCode, _currentToken, arguments);
}
- @override
- bool visitDeclaredIdentifier(DeclaredIdentifier node) {
- DeclaredIdentifier toNode = this._toNode as DeclaredIdentifier;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.keyword, toNode.keyword),
- _isEqualNodes(node.type, toNode.type),
- _isEqualNodes(node.identifier, toNode.identifier));
+ /**
+ * Report an error with the given [errorCode] and [arguments] associated with
+ * the given [node].
+ */
+ void _reportErrorForNode(ParserErrorCode errorCode, AstNode node,
+ [List<Object> arguments]) {
+ _reportError(new AnalysisError(
+ _source, node.offset, node.length, errorCode, arguments));
}
- @override
- bool visitDefaultFormalParameter(DefaultFormalParameter node) {
- DefaultFormalParameter toNode = this._toNode as DefaultFormalParameter;
- return _and(
- _isEqualNodes(node.parameter, toNode.parameter),
- node.kind == toNode.kind,
- _isEqualTokens(node.separator, toNode.separator),
- _isEqualNodes(node.defaultValue, toNode.defaultValue));
+ /**
+ * Report an error with the given [errorCode] and [arguments] associated with
+ * the given [token].
+ */
+ void _reportErrorForToken(ErrorCode errorCode, Token token,
+ [List<Object> arguments]) {
+ if (token.type == TokenType.EOF) {
+ token = token.previous;
+ }
+ _reportError(new AnalysisError(_source, token.offset,
+ math.max(token.length, 1), errorCode, arguments));
}
- @override
- bool visitDoStatement(DoStatement node) {
- DoStatement toNode = this._toNode as DoStatement;
- return _and(
- _isEqualTokens(node.doKeyword, toNode.doKeyword),
- _isEqualNodes(node.body, toNode.body),
- _isEqualTokens(node.whileKeyword, toNode.whileKeyword),
- _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis),
- _isEqualNodes(node.condition, toNode.condition),
- _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis),
- _isEqualTokens(node.semicolon, toNode.semicolon));
+ /**
+ * Scans the generic method comment, and returns the tokens, otherwise
+ * returns null.
+ */
+ Token _scanGenericMethodComment(String code, int offset) {
+ BooleanErrorListener listener = new BooleanErrorListener();
+ Scanner scanner =
+ new Scanner(null, new SubSequenceReader(code, offset), listener);
+ scanner.setSourceStart(1, 1);
+ Token firstToken = scanner.tokenize();
+ if (listener.errorReported) {
+ return null;
+ }
+ return firstToken;
}
- @override
- bool visitDoubleLiteral(DoubleLiteral node) {
- DoubleLiteral toNode = this._toNode as DoubleLiteral;
- if (_and(_isEqualTokens(node.literal, toNode.literal),
- node.value == toNode.value)) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ /**
+ * Execute the given [parseOperation] in a temporary parser whose current
+ * token has been set to the given [startToken]. If the parse does not
+ * generate any errors or exceptions, then return the token following the
+ * matching portion of the token stream. Otherwise, return `null`.
+ *
+ * Note: This is an extremely inefficient way of testing whether the tokens in
+ * the token stream match a given production. It should not be used for
+ * production code.
+ */
+ Token _skip(Token startToken, parseOperation(Parser parser)) {
+ BooleanErrorListener listener = new BooleanErrorListener();
+ Parser parser = new Parser(_source, listener);
+ parser._currentToken = _cloneTokens(startToken);
+ parser._enableAssertInitializer = _enableAssertInitializer;
+ parser._enableNnbd = _enableNnbd;
+ parser._inAsync = _inAsync;
+ parser._inGenerator = _inGenerator;
+ parser._inInitializer = _inInitializer;
+ parser._inLoop = _inLoop;
+ parser._inSwitch = _inSwitch;
+ parser._parseFunctionBodies = _parseFunctionBodies;
+ try {
+ parseOperation(parser);
+ } catch (exception) {
+ return null;
}
- return false;
+ if (listener.errorReported) {
+ return null;
+ }
+ return parser._currentToken;
}
- @override
- bool visitEmptyFunctionBody(EmptyFunctionBody node) {
- EmptyFunctionBody toNode = this._toNode as EmptyFunctionBody;
- return _isEqualTokens(node.semicolon, toNode.semicolon);
+ /**
+ * Skips a block with all containing blocks.
+ */
+ void _skipBlock() {
+ Token endToken = (_currentToken as BeginToken).endToken;
+ if (endToken == null) {
+ endToken = _currentToken.next;
+ while (!identical(endToken, _currentToken)) {
+ _currentToken = endToken;
+ endToken = _currentToken.next;
+ }
+ _reportErrorForToken(
+ ParserErrorCode.EXPECTED_TOKEN, _currentToken.previous, ["}"]);
+ } else {
+ _currentToken = endToken.next;
+ }
}
- @override
- bool visitEmptyStatement(EmptyStatement node) {
- EmptyStatement toNode = this._toNode as EmptyStatement;
- return _isEqualTokens(node.semicolon, toNode.semicolon);
+ /**
+ * Parse the 'final', 'const', 'var' or type preceding a variable declaration,
+ * starting at the given token, without actually creating a type or changing
+ * the current token. Return the token following the type that was parsed, or
+ * `null` if the given token is not the first token in a valid type. The
+ * [startToken] is the token at which parsing is to begin. Return the token
+ * following the type that was parsed.
+ *
+ * finalConstVarOrType ::=
+ * | 'final' type?
+ * | 'const' type?
+ * | 'var'
+ * | type
+ */
+ Token _skipFinalConstVarOrType(Token startToken) {
+ Keyword keyword = startToken.keyword;
+ if (keyword == Keyword.FINAL || keyword == Keyword.CONST) {
+ Token next = startToken.next;
+ if (_tokenMatchesIdentifier(next)) {
+ Token next2 = next.next;
+ // "Type parameter" or "Type<" or "prefix.Type"
+ if (_tokenMatchesIdentifier(next2) ||
+ _tokenMatches(next2, TokenType.LT) ||
+ _tokenMatches(next2, TokenType.PERIOD)) {
+ return skipTypeName(next);
+ }
+ // "parameter"
+ return next;
+ }
+ } else if (keyword == Keyword.VAR) {
+ return startToken.next;
+ } else if (_tokenMatchesIdentifier(startToken)) {
+ Token next = startToken.next;
+ if (_tokenMatchesIdentifier(next) ||
+ _tokenMatches(next, TokenType.LT) ||
+ _tokenMatchesKeyword(next, Keyword.THIS) ||
+ (_tokenMatches(next, TokenType.PERIOD) &&
+ _tokenMatchesIdentifier(next.next) &&
+ (_tokenMatchesIdentifier(next.next.next) ||
+ _tokenMatches(next.next.next, TokenType.LT) ||
+ _tokenMatchesKeyword(next.next.next, Keyword.THIS)))) {
+ return skipReturnType(startToken);
+ }
+ }
+ return null;
}
- @override
- bool visitEnumConstantDeclaration(EnumConstantDeclaration node) {
- EnumConstantDeclaration toNode = this._toNode as EnumConstantDeclaration;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualNodes(node.name, toNode.name));
+ /**
+ * Parse a list of formal parameters, starting at the [startToken], without
+ * actually creating a formal parameter list or changing the current token.
+ * Return the token following the formal parameter list that was parsed, or
+ * `null` if the given token is not the first token in a valid list of formal
+ * parameter.
+ *
+ * Note that unlike other skip methods, this method uses a heuristic. In the
+ * worst case, the parameters could be prefixed by metadata, which would
+ * require us to be able to skip arbitrary expressions. Rather than duplicate
+ * the logic of most of the parse methods we simply look for something that is
+ * likely to be a list of parameters and then skip to returning the token
+ * after the closing parenthesis.
+ *
+ * This method must be kept in sync with [parseFormalParameterList].
+ *
+ * formalParameterList ::=
+ * '(' ')'
+ * | '(' normalFormalParameters (',' optionalFormalParameters)? ')'
+ * | '(' optionalFormalParameters ')'
+ *
+ * normalFormalParameters ::=
+ * normalFormalParameter (',' normalFormalParameter)*
+ *
+ * optionalFormalParameters ::=
+ * optionalPositionalFormalParameters
+ * | namedFormalParameters
+ *
+ * optionalPositionalFormalParameters ::=
+ * '[' defaultFormalParameter (',' defaultFormalParameter)* ']'
+ *
+ * namedFormalParameters ::=
+ * '{' defaultNamedParameter (',' defaultNamedParameter)* '}'
+ */
+ Token _skipFormalParameterList(Token startToken) {
+ if (!_tokenMatches(startToken, TokenType.OPEN_PAREN)) {
+ return null;
+ }
+ Token next = startToken.next;
+ if (_tokenMatches(next, TokenType.CLOSE_PAREN)) {
+ return next.next;
+ }
+ //
+ // Look to see whether the token after the open parenthesis is something
+ // that should only occur at the beginning of a parameter list.
+ //
+ if (next.matchesAny(const <TokenType>[
+ TokenType.AT,
+ TokenType.OPEN_SQUARE_BRACKET,
+ TokenType.OPEN_CURLY_BRACKET
+ ]) ||
+ _tokenMatchesKeyword(next, Keyword.VOID) ||
+ (_tokenMatchesIdentifier(next) &&
+ (next.next.matchesAny(
+ const <TokenType>[TokenType.COMMA, TokenType.CLOSE_PAREN])))) {
+ return _skipPastMatchingToken(startToken);
+ }
+ //
+ // Look to see whether the first parameter is a function typed parameter
+ // without a return type.
+ //
+ if (_tokenMatchesIdentifier(next) &&
+ _tokenMatches(next.next, TokenType.OPEN_PAREN)) {
+ Token afterParameters = _skipFormalParameterList(next.next);
+ if (afterParameters != null &&
+ afterParameters.matchesAny(
+ const <TokenType>[TokenType.COMMA, TokenType.CLOSE_PAREN])) {
+ return _skipPastMatchingToken(startToken);
+ }
+ }
+ //
+ // Look to see whether the first parameter has a type or is a function typed
+ // parameter with a return type.
+ //
+ Token afterType = _skipFinalConstVarOrType(next);
+ if (afterType == null) {
+ return null;
+ }
+ if (skipSimpleIdentifier(afterType) == null) {
+ return null;
+ }
+ return _skipPastMatchingToken(startToken);
}
- @override
- bool visitEnumDeclaration(EnumDeclaration node) {
- EnumDeclaration toNode = this._toNode as EnumDeclaration;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.enumKeyword, toNode.enumKeyword),
- _isEqualNodes(node.name, toNode.name),
- _isEqualTokens(node.leftBracket, toNode.leftBracket),
- _isEqualNodeLists(node.constants, toNode.constants),
- _isEqualTokens(node.rightBracket, toNode.rightBracket));
+ /**
+ * If the [startToken] is a begin token with an associated end token, then
+ * return the token following the end token. Otherwise, return `null`.
+ */
+ Token _skipPastMatchingToken(Token startToken) {
+ if (startToken is! BeginToken) {
+ return null;
+ }
+ Token closeParen = (startToken as BeginToken).endToken;
+ if (closeParen == null) {
+ return null;
+ }
+ return closeParen.next;
}
- @override
- bool visitExportDirective(ExportDirective node) {
- ExportDirective toNode = this._toNode as ExportDirective;
- if (_and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.keyword, toNode.keyword),
- _isEqualNodes(node.uri, toNode.uri),
- _isEqualNodeLists(node.combinators, toNode.combinators),
- _isEqualTokens(node.semicolon, toNode.semicolon))) {
- toNode.element = node.element;
- return true;
+ /**
+ * Parse a string literal that contains interpolations, starting at the
+ * [startToken], without actually creating a string literal or changing the
+ * current token. Return the token following the string literal that was
+ * parsed, or `null` if the given token is not the first token in a valid
+ * string literal.
+ *
+ * This method must be kept in sync with [parseStringInterpolation].
+ */
+ Token _skipStringInterpolation(Token startToken) {
+ Token token = startToken;
+ TokenType type = token.type;
+ while (type == TokenType.STRING_INTERPOLATION_EXPRESSION ||
+ type == TokenType.STRING_INTERPOLATION_IDENTIFIER) {
+ if (type == TokenType.STRING_INTERPOLATION_EXPRESSION) {
+ token = token.next;
+ type = token.type;
+ //
+ // Rather than verify that the following tokens represent a valid
+ // expression, we simply skip tokens until we reach the end of the
+ // interpolation, being careful to handle nested string literals.
+ //
+ int bracketNestingLevel = 1;
+ while (bracketNestingLevel > 0) {
+ if (type == TokenType.EOF) {
+ return null;
+ } else if (type == TokenType.OPEN_CURLY_BRACKET) {
+ bracketNestingLevel++;
+ token = token.next;
+ } else if (type == TokenType.CLOSE_CURLY_BRACKET) {
+ bracketNestingLevel--;
+ token = token.next;
+ } else if (type == TokenType.STRING) {
+ token = skipStringLiteral(token);
+ if (token == null) {
+ return null;
+ }
+ } else {
+ token = token.next;
+ }
+ type = token.type;
+ }
+ token = token.next;
+ type = token.type;
+ } else {
+ token = token.next;
+ if (token.type != TokenType.IDENTIFIER) {
+ return null;
+ }
+ token = token.next;
+ }
+ type = token.type;
+ if (type == TokenType.STRING) {
+ token = token.next;
+ type = token.type;
+ }
}
- return false;
- }
-
- @override
- bool visitExpressionFunctionBody(ExpressionFunctionBody node) {
- ExpressionFunctionBody toNode = this._toNode as ExpressionFunctionBody;
- return _and(
- _isEqualTokens(node.functionDefinition, toNode.functionDefinition),
- _isEqualNodes(node.expression, toNode.expression),
- _isEqualTokens(node.semicolon, toNode.semicolon));
- }
-
- @override
- bool visitExpressionStatement(ExpressionStatement node) {
- ExpressionStatement toNode = this._toNode as ExpressionStatement;
- return _and(_isEqualNodes(node.expression, toNode.expression),
- _isEqualTokens(node.semicolon, toNode.semicolon));
- }
-
- @override
- bool visitExtendsClause(ExtendsClause node) {
- ExtendsClause toNode = this._toNode as ExtendsClause;
- return _and(_isEqualTokens(node.extendsKeyword, toNode.extendsKeyword),
- _isEqualNodes(node.superclass, toNode.superclass));
- }
-
- @override
- bool visitFieldDeclaration(FieldDeclaration node) {
- FieldDeclaration toNode = this._toNode as FieldDeclaration;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.staticKeyword, toNode.staticKeyword),
- _isEqualNodes(node.fields, toNode.fields),
- _isEqualTokens(node.semicolon, toNode.semicolon));
- }
-
- @override
- bool visitFieldFormalParameter(FieldFormalParameter node) {
- FieldFormalParameter toNode = this._toNode as FieldFormalParameter;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.keyword, toNode.keyword),
- _isEqualNodes(node.type, toNode.type),
- _isEqualTokens(node.thisKeyword, toNode.thisKeyword),
- _isEqualTokens(node.period, toNode.period),
- _isEqualNodes(node.identifier, toNode.identifier));
- }
-
- @override
- bool visitForEachStatement(ForEachStatement node) {
- ForEachStatement toNode = this._toNode as ForEachStatement;
- return _and(
- _isEqualTokens(node.forKeyword, toNode.forKeyword),
- _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis),
- _isEqualNodes(node.loopVariable, toNode.loopVariable),
- _isEqualTokens(node.inKeyword, toNode.inKeyword),
- _isEqualNodes(node.iterable, toNode.iterable),
- _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis),
- _isEqualNodes(node.body, toNode.body));
- }
-
- @override
- bool visitFormalParameterList(FormalParameterList node) {
- FormalParameterList toNode = this._toNode as FormalParameterList;
- return _and(
- _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis),
- _isEqualNodeLists(node.parameters, toNode.parameters),
- _isEqualTokens(node.leftDelimiter, toNode.leftDelimiter),
- _isEqualTokens(node.rightDelimiter, toNode.rightDelimiter),
- _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis));
- }
-
- @override
- bool visitForStatement(ForStatement node) {
- ForStatement toNode = this._toNode as ForStatement;
- return _and(
- _isEqualTokens(node.forKeyword, toNode.forKeyword),
- _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis),
- _isEqualNodes(node.variables, toNode.variables),
- _isEqualNodes(node.initialization, toNode.initialization),
- _isEqualTokens(node.leftSeparator, toNode.leftSeparator),
- _isEqualNodes(node.condition, toNode.condition),
- _isEqualTokens(node.rightSeparator, toNode.rightSeparator),
- _isEqualNodeLists(node.updaters, toNode.updaters),
- _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis),
- _isEqualNodes(node.body, toNode.body));
- }
-
- @override
- bool visitFunctionDeclaration(FunctionDeclaration node) {
- FunctionDeclaration toNode = this._toNode as FunctionDeclaration;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.externalKeyword, toNode.externalKeyword),
- _isEqualNodes(node.returnType, toNode.returnType),
- _isEqualTokens(node.propertyKeyword, toNode.propertyKeyword),
- _isEqualNodes(node.name, toNode.name),
- _isEqualNodes(node.functionExpression, toNode.functionExpression));
- }
-
- @override
- bool visitFunctionDeclarationStatement(FunctionDeclarationStatement node) {
- FunctionDeclarationStatement toNode =
- this._toNode as FunctionDeclarationStatement;
- return _isEqualNodes(node.functionDeclaration, toNode.functionDeclaration);
+ return token;
}
- @override
- bool visitFunctionExpression(FunctionExpression node) {
- FunctionExpression toNode = this._toNode as FunctionExpression;
- if (_and(_isEqualNodes(node.parameters, toNode.parameters),
- _isEqualNodes(node.body, toNode.body))) {
- toNode.element = node.element;
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ /**
+ * Parse a list of type parameters, starting at the [startToken], without
+ * actually creating a type parameter list or changing the current token.
+ * Return the token following the type parameter list that was parsed, or
+ * `null` if the given token is not the first token in a valid type parameter
+ * list.
+ *
+ * This method must be kept in sync with [parseTypeParameterList].
+ *
+ * typeParameterList ::=
+ * '<' typeParameter (',' typeParameter)* '>'
+ */
+ Token _skipTypeParameterList(Token startToken) {
+ if (!_tokenMatches(startToken, TokenType.LT)) {
+ return null;
}
- return false;
- }
-
- @override
- bool visitFunctionExpressionInvocation(FunctionExpressionInvocation node) {
- FunctionExpressionInvocation toNode =
- this._toNode as FunctionExpressionInvocation;
- if (_and(_isEqualNodes(node.function, toNode.function),
- _isEqualNodes(node.argumentList, toNode.argumentList))) {
- toNode.propagatedElement = node.propagatedElement;
- toNode.propagatedType = node.propagatedType;
- toNode.staticElement = node.staticElement;
- toNode.staticType = node.staticType;
- return true;
+ //
+ // We can't skip a type parameter because it can be preceeded by metadata,
+ // so we just assume that everything before the matching end token is valid.
+ //
+ int depth = 1;
+ Token next = startToken.next;
+ while (depth > 0) {
+ if (_tokenMatches(next, TokenType.EOF)) {
+ return null;
+ } else if (_tokenMatches(next, TokenType.LT)) {
+ depth++;
+ } else if (_tokenMatches(next, TokenType.GT)) {
+ depth--;
+ } else if (_tokenMatches(next, TokenType.GT_EQ)) {
+ if (depth == 1) {
+ Token fakeEquals = new Token(TokenType.EQ, next.offset + 2);
+ fakeEquals.setNextWithoutSettingPrevious(next.next);
+ return fakeEquals;
+ }
+ depth--;
+ } else if (_tokenMatches(next, TokenType.GT_GT)) {
+ depth -= 2;
+ } else if (_tokenMatches(next, TokenType.GT_GT_EQ)) {
+ if (depth < 2) {
+ return null;
+ } else if (depth == 2) {
+ Token fakeEquals = new Token(TokenType.EQ, next.offset + 2);
+ fakeEquals.setNextWithoutSettingPrevious(next.next);
+ return fakeEquals;
+ }
+ depth -= 2;
+ }
+ next = next.next;
}
- return false;
+ return next;
}
- @override
- bool visitFunctionTypeAlias(FunctionTypeAlias node) {
- FunctionTypeAlias toNode = this._toNode as FunctionTypeAlias;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.typedefKeyword, toNode.typedefKeyword),
- _isEqualNodes(node.returnType, toNode.returnType),
- _isEqualNodes(node.name, toNode.name),
- _isEqualNodes(node.typeParameters, toNode.typeParameters),
- _isEqualNodes(node.parameters, toNode.parameters),
- _isEqualTokens(node.semicolon, toNode.semicolon));
- }
+ /**
+ * Return `true` if the given [token] has the given [type].
+ */
+ bool _tokenMatches(Token token, TokenType type) => token.type == type;
- @override
- bool visitFunctionTypedFormalParameter(FunctionTypedFormalParameter node) {
- FunctionTypedFormalParameter toNode =
- this._toNode as FunctionTypedFormalParameter;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualNodes(node.returnType, toNode.returnType),
- _isEqualNodes(node.identifier, toNode.identifier),
- _isEqualNodes(node.parameters, toNode.parameters));
- }
+ /**
+ * Return `true` if the given [token] is a valid identifier. Valid identifiers
+ * include built-in identifiers (pseudo-keywords).
+ */
+ bool _tokenMatchesIdentifier(Token token) =>
+ _tokenMatches(token, TokenType.IDENTIFIER) ||
+ _tokenMatchesPseudoKeyword(token);
- @override
- bool visitHideCombinator(HideCombinator node) {
- HideCombinator toNode = this._toNode as HideCombinator;
- return _and(_isEqualTokens(node.keyword, toNode.keyword),
- _isEqualNodeLists(node.hiddenNames, toNode.hiddenNames));
- }
+ /**
+ * Return `true` if the given [token] matches the given [keyword].
+ */
+ bool _tokenMatchesKeyword(Token token, Keyword keyword) =>
+ token.keyword == keyword;
- @override
- bool visitIfStatement(IfStatement node) {
- IfStatement toNode = this._toNode as IfStatement;
- return _and(
- _isEqualTokens(node.ifKeyword, toNode.ifKeyword),
- _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis),
- _isEqualNodes(node.condition, toNode.condition),
- _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis),
- _isEqualNodes(node.thenStatement, toNode.thenStatement),
- _isEqualTokens(node.elseKeyword, toNode.elseKeyword),
- _isEqualNodes(node.elseStatement, toNode.elseStatement));
- }
+ /**
+ * Return `true` if the given [token] matches a pseudo keyword.
+ */
+ bool _tokenMatchesPseudoKeyword(Token token) =>
+ token.keyword?.isPseudoKeyword ?? false;
- @override
- bool visitImplementsClause(ImplementsClause node) {
- ImplementsClause toNode = this._toNode as ImplementsClause;
- return _and(
- _isEqualTokens(node.implementsKeyword, toNode.implementsKeyword),
- _isEqualNodeLists(node.interfaces, toNode.interfaces));
- }
+ /**
+ * Return `true` if the given [token] matches the given [identifier].
+ */
+ bool _tokenMatchesString(Token token, String identifier) =>
+ token.type == TokenType.IDENTIFIER && token.lexeme == identifier;
- @override
- bool visitImportDirective(ImportDirective node) {
- ImportDirective toNode = this._toNode as ImportDirective;
- if (_and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.keyword, toNode.keyword),
- _isEqualNodes(node.uri, toNode.uri),
- _isEqualTokens(node.asKeyword, toNode.asKeyword),
- _isEqualNodes(node.prefix, toNode.prefix),
- _isEqualNodeLists(node.combinators, toNode.combinators),
- _isEqualTokens(node.semicolon, toNode.semicolon))) {
- toNode.element = node.element;
- return true;
+ /**
+ * Translate the characters at the given [index] in the given [lexeme],
+ * appending the translated character to the given [buffer]. The index is
+ * assumed to be valid.
+ */
+ int _translateCharacter(StringBuffer buffer, String lexeme, int index) {
+ int currentChar = lexeme.codeUnitAt(index);
+ if (currentChar != 0x5C) {
+ buffer.writeCharCode(currentChar);
+ return index + 1;
+ }
+ //
+ // We have found an escape sequence, so we parse the string to determine
+ // what kind of escape sequence and what character to add to the builder.
+ //
+ int length = lexeme.length;
+ int currentIndex = index + 1;
+ if (currentIndex >= length) {
+ // Illegal escape sequence: no char after escape.
+ // This cannot actually happen because it would require the escape
+ // character to be the last character in the string, but if it were it
+ // would escape the closing quote, leaving the string unclosed.
+ // reportError(ParserErrorCode.MISSING_CHAR_IN_ESCAPE_SEQUENCE);
+ return length;
+ }
+ currentChar = lexeme.codeUnitAt(currentIndex);
+ if (currentChar == 0x6E) {
+ buffer.writeCharCode(0xA);
+ // newline
+ } else if (currentChar == 0x72) {
+ buffer.writeCharCode(0xD);
+ // carriage return
+ } else if (currentChar == 0x66) {
+ buffer.writeCharCode(0xC);
+ // form feed
+ } else if (currentChar == 0x62) {
+ buffer.writeCharCode(0x8);
+ // backspace
+ } else if (currentChar == 0x74) {
+ buffer.writeCharCode(0x9);
+ // tab
+ } else if (currentChar == 0x76) {
+ buffer.writeCharCode(0xB);
+ // vertical tab
+ } else if (currentChar == 0x78) {
+ if (currentIndex + 2 >= length) {
+ // Illegal escape sequence: not enough hex digits
+ _reportErrorForCurrentToken(ParserErrorCode.INVALID_HEX_ESCAPE);
+ return length;
+ }
+ int firstDigit = lexeme.codeUnitAt(currentIndex + 1);
+ int secondDigit = lexeme.codeUnitAt(currentIndex + 2);
+ if (!_isHexDigit(firstDigit) || !_isHexDigit(secondDigit)) {
+ // Illegal escape sequence: invalid hex digit
+ _reportErrorForCurrentToken(ParserErrorCode.INVALID_HEX_ESCAPE);
+ } else {
+ int charCode = (Character.digit(firstDigit, 16) << 4) +
+ Character.digit(secondDigit, 16);
+ buffer.writeCharCode(charCode);
+ }
+ return currentIndex + 3;
+ } else if (currentChar == 0x75) {
+ currentIndex++;
+ if (currentIndex >= length) {
+ // Illegal escape sequence: not enough hex digits
+ _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
+ return length;
+ }
+ currentChar = lexeme.codeUnitAt(currentIndex);
+ if (currentChar == 0x7B) {
+ currentIndex++;
+ if (currentIndex >= length) {
+ // Illegal escape sequence: incomplete escape
+ _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
+ return length;
+ }
+ currentChar = lexeme.codeUnitAt(currentIndex);
+ int digitCount = 0;
+ int value = 0;
+ while (currentChar != 0x7D) {
+ if (!_isHexDigit(currentChar)) {
+ // Illegal escape sequence: invalid hex digit
+ _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
+ currentIndex++;
+ while (currentIndex < length &&
+ lexeme.codeUnitAt(currentIndex) != 0x7D) {
+ currentIndex++;
+ }
+ return currentIndex + 1;
+ }
+ digitCount++;
+ value = (value << 4) + Character.digit(currentChar, 16);
+ currentIndex++;
+ if (currentIndex >= length) {
+ // Illegal escape sequence: incomplete escape
+ _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
+ return length;
+ }
+ currentChar = lexeme.codeUnitAt(currentIndex);
+ }
+ if (digitCount < 1 || digitCount > 6) {
+ // Illegal escape sequence: not enough or too many hex digits
+ _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
+ }
+ _appendCodePoint(buffer, lexeme, value, index, currentIndex);
+ return currentIndex + 1;
+ } else {
+ if (currentIndex + 3 >= length) {
+ // Illegal escape sequence: not enough hex digits
+ _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
+ return length;
+ }
+ int firstDigit = currentChar;
+ int secondDigit = lexeme.codeUnitAt(currentIndex + 1);
+ int thirdDigit = lexeme.codeUnitAt(currentIndex + 2);
+ int fourthDigit = lexeme.codeUnitAt(currentIndex + 3);
+ if (!_isHexDigit(firstDigit) ||
+ !_isHexDigit(secondDigit) ||
+ !_isHexDigit(thirdDigit) ||
+ !_isHexDigit(fourthDigit)) {
+ // Illegal escape sequence: invalid hex digits
+ _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
+ } else {
+ _appendCodePoint(
+ buffer,
+ lexeme,
+ (((((Character.digit(firstDigit, 16) << 4) +
+ Character.digit(secondDigit, 16)) <<
+ 4) +
+ Character.digit(thirdDigit, 16)) <<
+ 4) +
+ Character.digit(fourthDigit, 16),
+ index,
+ currentIndex + 3);
+ }
+ return currentIndex + 4;
+ }
+ } else {
+ buffer.writeCharCode(currentChar);
}
- return false;
+ return currentIndex + 1;
}
- @override
- bool visitIndexExpression(IndexExpression node) {
- IndexExpression toNode = this._toNode as IndexExpression;
- if (_and(
- _isEqualNodes(node.target, toNode.target),
- _isEqualTokens(node.leftBracket, toNode.leftBracket),
- _isEqualNodes(node.index, toNode.index),
- _isEqualTokens(node.rightBracket, toNode.rightBracket))) {
- toNode.auxiliaryElements = node.auxiliaryElements;
- toNode.propagatedElement = node.propagatedElement;
- toNode.propagatedType = node.propagatedType;
- toNode.staticElement = node.staticElement;
- toNode.staticType = node.staticType;
- return true;
+ /**
+ * Decrements the error reporting lock level. If level is more than `0`, then
+ * [reportError] wont report any error.
+ */
+ void _unlockErrorListener() {
+ if (_errorListenerLock == 0) {
+ throw new StateError("Attempt to unlock not locked error listener.");
}
- return false;
+ _errorListenerLock--;
}
- @override
- bool visitInstanceCreationExpression(InstanceCreationExpression node) {
- InstanceCreationExpression toNode =
- this._toNode as InstanceCreationExpression;
- if (_and(
- _isEqualTokens(node.keyword, toNode.keyword),
- _isEqualNodes(node.constructorName, toNode.constructorName),
- _isEqualNodes(node.argumentList, toNode.argumentList))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticElement = node.staticElement;
- toNode.staticType = node.staticType;
- return true;
+ /**
+ * Validate that the given [parameterList] does not contain any field
+ * initializers.
+ */
+ void _validateFormalParameterList(FormalParameterList parameterList) {
+ for (FormalParameter parameter in parameterList.parameters) {
+ if (parameter is FieldFormalParameter) {
+ _reportErrorForNode(
+ ParserErrorCode.FIELD_INITIALIZER_OUTSIDE_CONSTRUCTOR,
+ parameter.identifier);
+ }
}
- return false;
}
- @override
- bool visitIntegerLiteral(IntegerLiteral node) {
- IntegerLiteral toNode = this._toNode as IntegerLiteral;
- if (_and(_isEqualTokens(node.literal, toNode.literal),
- node.value == toNode.value)) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ /**
+ * Validate that the given set of [modifiers] is appropriate for a class and
+ * return the 'abstract' keyword if there is one.
+ */
+ Token _validateModifiersForClass(Modifiers modifiers) {
+ _validateModifiersForTopLevelDeclaration(modifiers);
+ if (modifiers.constKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.CONST_CLASS, modifiers.constKeyword);
}
- return false;
- }
-
- @override
- bool visitInterpolationExpression(InterpolationExpression node) {
- InterpolationExpression toNode = this._toNode as InterpolationExpression;
- return _and(
- _isEqualTokens(node.leftBracket, toNode.leftBracket),
- _isEqualNodes(node.expression, toNode.expression),
- _isEqualTokens(node.rightBracket, toNode.rightBracket));
- }
-
- @override
- bool visitInterpolationString(InterpolationString node) {
- InterpolationString toNode = this._toNode as InterpolationString;
- return _and(_isEqualTokens(node.contents, toNode.contents),
- node.value == toNode.value);
- }
-
- @override
- bool visitIsExpression(IsExpression node) {
- IsExpression toNode = this._toNode as IsExpression;
- if (_and(
- _isEqualNodes(node.expression, toNode.expression),
- _isEqualTokens(node.isOperator, toNode.isOperator),
- _isEqualTokens(node.notOperator, toNode.notOperator),
- _isEqualNodes(node.type, toNode.type))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ if (modifiers.externalKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.EXTERNAL_CLASS, modifiers.externalKeyword);
}
- return false;
- }
-
- @override
- bool visitLabel(Label node) {
- Label toNode = this._toNode as Label;
- return _and(_isEqualNodes(node.label, toNode.label),
- _isEqualTokens(node.colon, toNode.colon));
- }
-
- @override
- bool visitLabeledStatement(LabeledStatement node) {
- LabeledStatement toNode = this._toNode as LabeledStatement;
- return _and(_isEqualNodeLists(node.labels, toNode.labels),
- _isEqualNodes(node.statement, toNode.statement));
- }
-
- @override
- bool visitLibraryDirective(LibraryDirective node) {
- LibraryDirective toNode = this._toNode as LibraryDirective;
- if (_and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.libraryKeyword, toNode.libraryKeyword),
- _isEqualNodes(node.name, toNode.name),
- _isEqualTokens(node.semicolon, toNode.semicolon))) {
- toNode.element = node.element;
- return true;
+ if (modifiers.finalKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.FINAL_CLASS, modifiers.finalKeyword);
}
- return false;
- }
-
- @override
- bool visitLibraryIdentifier(LibraryIdentifier node) {
- LibraryIdentifier toNode = this._toNode as LibraryIdentifier;
- if (_isEqualNodeLists(node.components, toNode.components)) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ if (modifiers.varKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.VAR_CLASS, modifiers.varKeyword);
}
- return false;
+ return modifiers.abstractKeyword;
}
- @override
- bool visitListLiteral(ListLiteral node) {
- ListLiteral toNode = this._toNode as ListLiteral;
- if (_and(
- _isEqualTokens(node.constKeyword, toNode.constKeyword),
- _isEqualNodes(node.typeArguments, toNode.typeArguments),
- _isEqualTokens(node.leftBracket, toNode.leftBracket),
- _isEqualNodeLists(node.elements, toNode.elements),
- _isEqualTokens(node.rightBracket, toNode.rightBracket))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ /**
+ * Validate that the given set of [modifiers] is appropriate for a constructor
+ * and return the 'const' keyword if there is one.
+ */
+ Token _validateModifiersForConstructor(Modifiers modifiers) {
+ if (modifiers.abstractKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.ABSTRACT_CLASS_MEMBER, modifiers.abstractKeyword);
}
- return false;
- }
-
- @override
- bool visitMapLiteral(MapLiteral node) {
- MapLiteral toNode = this._toNode as MapLiteral;
- if (_and(
- _isEqualTokens(node.constKeyword, toNode.constKeyword),
- _isEqualNodes(node.typeArguments, toNode.typeArguments),
- _isEqualTokens(node.leftBracket, toNode.leftBracket),
- _isEqualNodeLists(node.entries, toNode.entries),
- _isEqualTokens(node.rightBracket, toNode.rightBracket))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ if (modifiers.finalKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.FINAL_CONSTRUCTOR, modifiers.finalKeyword);
}
- return false;
- }
-
- @override
- bool visitMapLiteralEntry(MapLiteralEntry node) {
- MapLiteralEntry toNode = this._toNode as MapLiteralEntry;
- return _and(
- _isEqualNodes(node.key, toNode.key),
- _isEqualTokens(node.separator, toNode.separator),
- _isEqualNodes(node.value, toNode.value));
- }
-
- @override
- bool visitMethodDeclaration(MethodDeclaration node) {
- MethodDeclaration toNode = this._toNode as MethodDeclaration;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.externalKeyword, toNode.externalKeyword),
- _isEqualTokens(node.modifierKeyword, toNode.modifierKeyword),
- _isEqualNodes(node.returnType, toNode.returnType),
- _isEqualTokens(node.propertyKeyword, toNode.propertyKeyword),
- _isEqualTokens(node.propertyKeyword, toNode.propertyKeyword),
- _isEqualNodes(node.name, toNode.name),
- _isEqualNodes(node.parameters, toNode.parameters),
- _isEqualNodes(node.body, toNode.body));
- }
-
- @override
- bool visitMethodInvocation(MethodInvocation node) {
- MethodInvocation toNode = this._toNode as MethodInvocation;
- if (_and(
- _isEqualNodes(node.target, toNode.target),
- _isEqualTokens(node.operator, toNode.operator),
- _isEqualNodes(node.methodName, toNode.methodName),
- _isEqualNodes(node.argumentList, toNode.argumentList))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ if (modifiers.staticKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.STATIC_CONSTRUCTOR, modifiers.staticKeyword);
}
- return false;
- }
-
- @override
- bool visitNamedExpression(NamedExpression node) {
- NamedExpression toNode = this._toNode as NamedExpression;
- if (_and(_isEqualNodes(node.name, toNode.name),
- _isEqualNodes(node.expression, toNode.expression))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ if (modifiers.varKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.CONSTRUCTOR_WITH_RETURN_TYPE, modifiers.varKeyword);
}
- return false;
- }
-
- @override
- bool visitNativeClause(NativeClause node) {
- NativeClause toNode = this._toNode as NativeClause;
- return _and(_isEqualTokens(node.nativeKeyword, toNode.nativeKeyword),
- _isEqualNodes(node.name, toNode.name));
- }
-
- @override
- bool visitNativeFunctionBody(NativeFunctionBody node) {
- NativeFunctionBody toNode = this._toNode as NativeFunctionBody;
- return _and(
- _isEqualTokens(node.nativeKeyword, toNode.nativeKeyword),
- _isEqualNodes(node.stringLiteral, toNode.stringLiteral),
- _isEqualTokens(node.semicolon, toNode.semicolon));
- }
-
- @override
- bool visitNullLiteral(NullLiteral node) {
- NullLiteral toNode = this._toNode as NullLiteral;
- if (_isEqualTokens(node.literal, toNode.literal)) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ Token externalKeyword = modifiers.externalKeyword;
+ Token constKeyword = modifiers.constKeyword;
+ Token factoryKeyword = modifiers.factoryKeyword;
+ if (externalKeyword != null &&
+ constKeyword != null &&
+ constKeyword.offset < externalKeyword.offset) {
+ _reportErrorForToken(
+ ParserErrorCode.EXTERNAL_AFTER_CONST, externalKeyword);
}
- return false;
- }
-
- @override
- bool visitParenthesizedExpression(ParenthesizedExpression node) {
- ParenthesizedExpression toNode = this._toNode as ParenthesizedExpression;
- if (_and(
- _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis),
- _isEqualNodes(node.expression, toNode.expression),
- _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ if (externalKeyword != null &&
+ factoryKeyword != null &&
+ factoryKeyword.offset < externalKeyword.offset) {
+ _reportErrorForToken(
+ ParserErrorCode.EXTERNAL_AFTER_FACTORY, externalKeyword);
}
- return false;
+ return constKeyword;
}
- @override
- bool visitPartDirective(PartDirective node) {
- PartDirective toNode = this._toNode as PartDirective;
- if (_and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.partKeyword, toNode.partKeyword),
- _isEqualNodes(node.uri, toNode.uri),
- _isEqualTokens(node.semicolon, toNode.semicolon))) {
- toNode.element = node.element;
- return true;
+ /**
+ * Validate that the given set of [modifiers] is appropriate for a class and
+ * return the 'abstract' keyword if there is one.
+ */
+ void _validateModifiersForEnum(Modifiers modifiers) {
+ _validateModifiersForTopLevelDeclaration(modifiers);
+ if (modifiers.abstractKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.ABSTRACT_ENUM, modifiers.abstractKeyword);
}
- return false;
- }
-
- @override
- bool visitPartOfDirective(PartOfDirective node) {
- PartOfDirective toNode = this._toNode as PartOfDirective;
- if (_and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.partKeyword, toNode.partKeyword),
- _isEqualTokens(node.ofKeyword, toNode.ofKeyword),
- _isEqualNodes(node.libraryName, toNode.libraryName),
- _isEqualTokens(node.semicolon, toNode.semicolon))) {
- toNode.element = node.element;
- return true;
+ if (modifiers.constKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.CONST_ENUM, modifiers.constKeyword);
}
- return false;
- }
-
- @override
- bool visitPostfixExpression(PostfixExpression node) {
- PostfixExpression toNode = this._toNode as PostfixExpression;
- if (_and(_isEqualNodes(node.operand, toNode.operand),
- _isEqualTokens(node.operator, toNode.operator))) {
- toNode.propagatedElement = node.propagatedElement;
- toNode.propagatedType = node.propagatedType;
- toNode.staticElement = node.staticElement;
- toNode.staticType = node.staticType;
- return true;
+ if (modifiers.externalKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.EXTERNAL_ENUM, modifiers.externalKeyword);
}
- return false;
- }
-
- @override
- bool visitPrefixedIdentifier(PrefixedIdentifier node) {
- PrefixedIdentifier toNode = this._toNode as PrefixedIdentifier;
- if (_and(
- _isEqualNodes(node.prefix, toNode.prefix),
- _isEqualTokens(node.period, toNode.period),
- _isEqualNodes(node.identifier, toNode.identifier))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ if (modifiers.finalKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.FINAL_ENUM, modifiers.finalKeyword);
+ }
+ if (modifiers.varKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.VAR_ENUM, modifiers.varKeyword);
}
- return false;
}
- @override
- bool visitPrefixExpression(PrefixExpression node) {
- PrefixExpression toNode = this._toNode as PrefixExpression;
- if (_and(_isEqualTokens(node.operator, toNode.operator),
- _isEqualNodes(node.operand, toNode.operand))) {
- toNode.propagatedElement = node.propagatedElement;
- toNode.propagatedType = node.propagatedType;
- toNode.staticElement = node.staticElement;
- toNode.staticType = node.staticType;
- return true;
+ /**
+ * Validate that the given set of [modifiers] is appropriate for a field and
+ * return the 'final', 'const' or 'var' keyword if there is one.
+ */
+ Token _validateModifiersForField(Modifiers modifiers) {
+ if (modifiers.abstractKeyword != null) {
+ _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER);
}
- return false;
+ if (modifiers.externalKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.EXTERNAL_FIELD, modifiers.externalKeyword);
+ }
+ if (modifiers.factoryKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword);
+ }
+ Token staticKeyword = modifiers.staticKeyword;
+ Token constKeyword = modifiers.constKeyword;
+ Token finalKeyword = modifiers.finalKeyword;
+ Token varKeyword = modifiers.varKeyword;
+ if (constKeyword != null) {
+ if (finalKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.CONST_AND_FINAL, finalKeyword);
+ }
+ if (varKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.CONST_AND_VAR, varKeyword);
+ }
+ if (staticKeyword != null && constKeyword.offset < staticKeyword.offset) {
+ _reportErrorForToken(ParserErrorCode.STATIC_AFTER_CONST, staticKeyword);
+ }
+ } else if (finalKeyword != null) {
+ if (varKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.FINAL_AND_VAR, varKeyword);
+ }
+ if (staticKeyword != null && finalKeyword.offset < staticKeyword.offset) {
+ _reportErrorForToken(ParserErrorCode.STATIC_AFTER_FINAL, staticKeyword);
+ }
+ } else if (varKeyword != null &&
+ staticKeyword != null &&
+ varKeyword.offset < staticKeyword.offset) {
+ _reportErrorForToken(ParserErrorCode.STATIC_AFTER_VAR, staticKeyword);
+ }
+ return Token.lexicallyFirst([constKeyword, finalKeyword, varKeyword]);
}
- @override
- bool visitPropertyAccess(PropertyAccess node) {
- PropertyAccess toNode = this._toNode as PropertyAccess;
- if (_and(
- _isEqualNodes(node.target, toNode.target),
- _isEqualTokens(node.operator, toNode.operator),
- _isEqualNodes(node.propertyName, toNode.propertyName))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ /**
+ * Validate that the given set of [modifiers] is appropriate for a local
+ * function.
+ */
+ void _validateModifiersForFunctionDeclarationStatement(Modifiers modifiers) {
+ if (modifiers.abstractKeyword != null ||
+ modifiers.constKeyword != null ||
+ modifiers.externalKeyword != null ||
+ modifiers.factoryKeyword != null ||
+ modifiers.finalKeyword != null ||
+ modifiers.staticKeyword != null ||
+ modifiers.varKeyword != null) {
+ _reportErrorForCurrentToken(
+ ParserErrorCode.LOCAL_FUNCTION_DECLARATION_MODIFIER);
}
- return false;
}
- @override
- bool visitRedirectingConstructorInvocation(
- RedirectingConstructorInvocation node) {
- RedirectingConstructorInvocation toNode =
- this._toNode as RedirectingConstructorInvocation;
- if (_and(
- _isEqualTokens(node.thisKeyword, toNode.thisKeyword),
- _isEqualTokens(node.period, toNode.period),
- _isEqualNodes(node.constructorName, toNode.constructorName),
- _isEqualNodes(node.argumentList, toNode.argumentList))) {
- toNode.staticElement = node.staticElement;
- return true;
+ /**
+ * Validate that the given set of [modifiers] is appropriate for a getter,
+ * setter, or method.
+ */
+ void _validateModifiersForGetterOrSetterOrMethod(Modifiers modifiers) {
+ if (modifiers.abstractKeyword != null) {
+ _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER);
}
- return false;
- }
-
- @override
- bool visitRethrowExpression(RethrowExpression node) {
- RethrowExpression toNode = this._toNode as RethrowExpression;
- if (_isEqualTokens(node.rethrowKeyword, toNode.rethrowKeyword)) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ if (modifiers.constKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.CONST_METHOD, modifiers.constKeyword);
}
- return false;
- }
-
- @override
- bool visitReturnStatement(ReturnStatement node) {
- ReturnStatement toNode = this._toNode as ReturnStatement;
- return _and(
- _isEqualTokens(node.returnKeyword, toNode.returnKeyword),
- _isEqualNodes(node.expression, toNode.expression),
- _isEqualTokens(node.semicolon, toNode.semicolon));
- }
-
- @override
- bool visitScriptTag(ScriptTag node) {
- ScriptTag toNode = this._toNode as ScriptTag;
- return _isEqualTokens(node.scriptTag, toNode.scriptTag);
- }
-
- @override
- bool visitShowCombinator(ShowCombinator node) {
- ShowCombinator toNode = this._toNode as ShowCombinator;
- return _and(_isEqualTokens(node.keyword, toNode.keyword),
- _isEqualNodeLists(node.shownNames, toNode.shownNames));
- }
-
- @override
- bool visitSimpleFormalParameter(SimpleFormalParameter node) {
- SimpleFormalParameter toNode = this._toNode as SimpleFormalParameter;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.keyword, toNode.keyword),
- _isEqualNodes(node.type, toNode.type),
- _isEqualNodes(node.identifier, toNode.identifier));
- }
-
- @override
- bool visitSimpleIdentifier(SimpleIdentifier node) {
- SimpleIdentifier toNode = this._toNode as SimpleIdentifier;
- if (_isEqualTokens(node.token, toNode.token)) {
- toNode.staticElement = node.staticElement;
- toNode.staticType = node.staticType;
- toNode.propagatedElement = node.propagatedElement;
- toNode.propagatedType = node.propagatedType;
- toNode.auxiliaryElements = node.auxiliaryElements;
- return true;
+ if (modifiers.factoryKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword);
}
- return false;
- }
-
- @override
- bool visitSimpleStringLiteral(SimpleStringLiteral node) {
- SimpleStringLiteral toNode = this._toNode as SimpleStringLiteral;
- if (_and(_isEqualTokens(node.literal, toNode.literal),
- node.value == toNode.value)) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ if (modifiers.finalKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.FINAL_METHOD, modifiers.finalKeyword);
}
- return false;
- }
-
- @override
- bool visitStringInterpolation(StringInterpolation node) {
- StringInterpolation toNode = this._toNode as StringInterpolation;
- if (_isEqualNodeLists(node.elements, toNode.elements)) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ if (modifiers.varKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword);
}
- return false;
- }
-
- @override
- bool visitSuperConstructorInvocation(SuperConstructorInvocation node) {
- SuperConstructorInvocation toNode =
- this._toNode as SuperConstructorInvocation;
- if (_and(
- _isEqualTokens(node.superKeyword, toNode.superKeyword),
- _isEqualTokens(node.period, toNode.period),
- _isEqualNodes(node.constructorName, toNode.constructorName),
- _isEqualNodes(node.argumentList, toNode.argumentList))) {
- toNode.staticElement = node.staticElement;
- return true;
+ Token externalKeyword = modifiers.externalKeyword;
+ Token staticKeyword = modifiers.staticKeyword;
+ if (externalKeyword != null &&
+ staticKeyword != null &&
+ staticKeyword.offset < externalKeyword.offset) {
+ _reportErrorForToken(
+ ParserErrorCode.EXTERNAL_AFTER_STATIC, externalKeyword);
}
- return false;
}
- @override
- bool visitSuperExpression(SuperExpression node) {
- SuperExpression toNode = this._toNode as SuperExpression;
- if (_isEqualTokens(node.superKeyword, toNode.superKeyword)) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ /**
+ * Validate that the given set of [modifiers] is appropriate for a getter,
+ * setter, or method.
+ */
+ void _validateModifiersForOperator(Modifiers modifiers) {
+ if (modifiers.abstractKeyword != null) {
+ _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER);
}
- return false;
- }
-
- @override
- bool visitSwitchCase(SwitchCase node) {
- SwitchCase toNode = this._toNode as SwitchCase;
- return _and(
- _isEqualNodeLists(node.labels, toNode.labels),
- _isEqualTokens(node.keyword, toNode.keyword),
- _isEqualNodes(node.expression, toNode.expression),
- _isEqualTokens(node.colon, toNode.colon),
- _isEqualNodeLists(node.statements, toNode.statements));
- }
-
- @override
- bool visitSwitchDefault(SwitchDefault node) {
- SwitchDefault toNode = this._toNode as SwitchDefault;
- return _and(
- _isEqualNodeLists(node.labels, toNode.labels),
- _isEqualTokens(node.keyword, toNode.keyword),
- _isEqualTokens(node.colon, toNode.colon),
- _isEqualNodeLists(node.statements, toNode.statements));
- }
-
- @override
- bool visitSwitchStatement(SwitchStatement node) {
- SwitchStatement toNode = this._toNode as SwitchStatement;
- return _and(
- _isEqualTokens(node.switchKeyword, toNode.switchKeyword),
- _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis),
- _isEqualNodes(node.expression, toNode.expression),
- _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis),
- _isEqualTokens(node.leftBracket, toNode.leftBracket),
- _isEqualNodeLists(node.members, toNode.members),
- _isEqualTokens(node.rightBracket, toNode.rightBracket));
- }
-
- @override
- bool visitSymbolLiteral(SymbolLiteral node) {
- SymbolLiteral toNode = this._toNode as SymbolLiteral;
- if (_and(_isEqualTokens(node.poundSign, toNode.poundSign),
- _isEqualTokenLists(node.components, toNode.components))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ if (modifiers.constKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.CONST_METHOD, modifiers.constKeyword);
}
- return false;
- }
-
- @override
- bool visitThisExpression(ThisExpression node) {
- ThisExpression toNode = this._toNode as ThisExpression;
- if (_isEqualTokens(node.thisKeyword, toNode.thisKeyword)) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ if (modifiers.factoryKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword);
}
- return false;
- }
-
- @override
- bool visitThrowExpression(ThrowExpression node) {
- ThrowExpression toNode = this._toNode as ThrowExpression;
- if (_and(_isEqualTokens(node.throwKeyword, toNode.throwKeyword),
- _isEqualNodes(node.expression, toNode.expression))) {
- toNode.propagatedType = node.propagatedType;
- toNode.staticType = node.staticType;
- return true;
+ if (modifiers.finalKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.FINAL_METHOD, modifiers.finalKeyword);
}
- return false;
- }
-
- @override
- bool visitTopLevelVariableDeclaration(TopLevelVariableDeclaration node) {
- TopLevelVariableDeclaration toNode =
- this._toNode as TopLevelVariableDeclaration;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualNodes(node.variables, toNode.variables),
- _isEqualTokens(node.semicolon, toNode.semicolon));
- }
-
- @override
- bool visitTryStatement(TryStatement node) {
- TryStatement toNode = this._toNode as TryStatement;
- return _and(
- _isEqualTokens(node.tryKeyword, toNode.tryKeyword),
- _isEqualNodes(node.body, toNode.body),
- _isEqualNodeLists(node.catchClauses, toNode.catchClauses),
- _isEqualTokens(node.finallyKeyword, toNode.finallyKeyword),
- _isEqualNodes(node.finallyBlock, toNode.finallyBlock));
- }
-
- @override
- bool visitTypeArgumentList(TypeArgumentList node) {
- TypeArgumentList toNode = this._toNode as TypeArgumentList;
- return _and(
- _isEqualTokens(node.leftBracket, toNode.leftBracket),
- _isEqualNodeLists(node.arguments, toNode.arguments),
- _isEqualTokens(node.rightBracket, toNode.rightBracket));
- }
-
- @override
- bool visitTypeName(TypeName node) {
- TypeName toNode = this._toNode as TypeName;
- if (_and(_isEqualNodes(node.name, toNode.name),
- _isEqualNodes(node.typeArguments, toNode.typeArguments))) {
- toNode.type = node.type;
- return true;
+ if (modifiers.staticKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.STATIC_OPERATOR, modifiers.staticKeyword);
+ }
+ if (modifiers.varKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword);
}
- return false;
- }
-
- @override
- bool visitTypeParameter(TypeParameter node) {
- TypeParameter toNode = this._toNode as TypeParameter;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualNodes(node.name, toNode.name),
- _isEqualTokens(node.extendsKeyword, toNode.extendsKeyword),
- _isEqualNodes(node.bound, toNode.bound));
- }
-
- @override
- bool visitTypeParameterList(TypeParameterList node) {
- TypeParameterList toNode = this._toNode as TypeParameterList;
- return _and(
- _isEqualTokens(node.leftBracket, toNode.leftBracket),
- _isEqualNodeLists(node.typeParameters, toNode.typeParameters),
- _isEqualTokens(node.rightBracket, toNode.rightBracket));
- }
-
- @override
- bool visitVariableDeclaration(VariableDeclaration node) {
- VariableDeclaration toNode = this._toNode as VariableDeclaration;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualNodes(node.name, toNode.name),
- _isEqualTokens(node.equals, toNode.equals),
- _isEqualNodes(node.initializer, toNode.initializer));
- }
-
- @override
- bool visitVariableDeclarationList(VariableDeclarationList node) {
- VariableDeclarationList toNode = this._toNode as VariableDeclarationList;
- return _and(
- _isEqualNodes(node.documentationComment, toNode.documentationComment),
- _isEqualNodeLists(node.metadata, toNode.metadata),
- _isEqualTokens(node.keyword, toNode.keyword),
- _isEqualNodes(node.type, toNode.type),
- _isEqualNodeLists(node.variables, toNode.variables));
- }
-
- @override
- bool visitVariableDeclarationStatement(VariableDeclarationStatement node) {
- VariableDeclarationStatement toNode =
- this._toNode as VariableDeclarationStatement;
- return _and(_isEqualNodes(node.variables, toNode.variables),
- _isEqualTokens(node.semicolon, toNode.semicolon));
- }
-
- @override
- bool visitWhileStatement(WhileStatement node) {
- WhileStatement toNode = this._toNode as WhileStatement;
- return _and(
- _isEqualTokens(node.whileKeyword, toNode.whileKeyword),
- _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis),
- _isEqualNodes(node.condition, toNode.condition),
- _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis),
- _isEqualNodes(node.body, toNode.body));
- }
-
- @override
- bool visitWithClause(WithClause node) {
- WithClause toNode = this._toNode as WithClause;
- return _and(_isEqualTokens(node.withKeyword, toNode.withKeyword),
- _isEqualNodeLists(node.mixinTypes, toNode.mixinTypes));
}
- @override
- bool visitYieldStatement(YieldStatement node) {
- YieldStatement toNode = this._toNode as YieldStatement;
- return _and(
- _isEqualTokens(node.yieldKeyword, toNode.yieldKeyword),
- _isEqualNodes(node.expression, toNode.expression),
- _isEqualTokens(node.semicolon, toNode.semicolon));
- }
-
- /**
- * Return `true` if all of the parameters are `true`.
- */
- bool _and(bool b1, bool b2,
- [bool b3 = true,
- bool b4 = true,
- bool b5 = true,
- bool b6 = true,
- bool b7 = true,
- bool b8 = true,
- bool b9 = true,
- bool b10 = true,
- bool b11 = true,
- bool b12 = true,
- bool b13 = true]) {
- // TODO(brianwilkerson) Inline this method.
- return b1 &&
- b2 &&
- b3 &&
- b4 &&
- b5 &&
- b6 &&
- b7 &&
- b8 &&
- b9 &&
- b10 &&
- b11 &&
- b12 &&
- b13;
- }
-
- /**
- * Return `true` if the [first] and [second] lists of AST nodes have the same
- * size and corresponding elements are equal.
- */
- bool _isEqualNodeLists(NodeList first, NodeList second) {
- if (first == null) {
- return second == null;
- } else if (second == null) {
- return false;
- }
- int size = first.length;
- if (second.length != size) {
- return false;
+ /**
+ * Validate that the given set of [modifiers] is appropriate for a top-level
+ * declaration.
+ */
+ void _validateModifiersForTopLevelDeclaration(Modifiers modifiers) {
+ if (modifiers.factoryKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.FACTORY_TOP_LEVEL_DECLARATION,
+ modifiers.factoryKeyword);
}
- bool equal = true;
- for (int i = 0; i < size; i++) {
- if (!_isEqualNodes(first[i], second[i])) {
- equal = false;
- }
+ if (modifiers.staticKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.STATIC_TOP_LEVEL_DECLARATION,
+ modifiers.staticKeyword);
}
- return equal;
}
/**
- * Return `true` if the [fromNode] and [toNode] have the same structure. As a
- * side-effect, if the nodes do have the same structure, any resolution data
- * from the first node will be copied to the second node.
+ * Validate that the given set of [modifiers] is appropriate for a top-level
+ * function.
*/
- bool _isEqualNodes(AstNode fromNode, AstNode toNode) {
- if (fromNode == null) {
- return toNode == null;
- } else if (toNode == null) {
- return false;
- } else if (fromNode.runtimeType == toNode.runtimeType) {
- this._toNode = toNode;
- return fromNode.accept(this);
+ void _validateModifiersForTopLevelFunction(Modifiers modifiers) {
+ _validateModifiersForTopLevelDeclaration(modifiers);
+ if (modifiers.abstractKeyword != null) {
+ _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_TOP_LEVEL_FUNCTION);
}
- //
- // Check for a simple transformation caused by entering a period.
- //
- if (toNode is PrefixedIdentifier) {
- SimpleIdentifier prefix = toNode.prefix;
- if (fromNode.runtimeType == prefix.runtimeType) {
- this._toNode = prefix;
- return fromNode.accept(this);
- }
- } else if (toNode is PropertyAccess) {
- Expression target = toNode.target;
- if (fromNode.runtimeType == target.runtimeType) {
- this._toNode = target;
- return fromNode.accept(this);
- }
+ if (modifiers.constKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.CONST_CLASS, modifiers.constKeyword);
+ }
+ if (modifiers.finalKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.FINAL_CLASS, modifiers.finalKeyword);
+ }
+ if (modifiers.varKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword);
}
- return false;
}
/**
- * Return `true` if the [first] and [second] arrays of tokens have the same
- * length and corresponding elements are equal.
+ * Validate that the given set of [modifiers] is appropriate for a field and
+ * return the 'final', 'const' or 'var' keyword if there is one.
*/
- bool _isEqualTokenLists(List<Token> first, List<Token> second) {
- int length = first.length;
- if (second.length != length) {
- return false;
+ Token _validateModifiersForTopLevelVariable(Modifiers modifiers) {
+ _validateModifiersForTopLevelDeclaration(modifiers);
+ if (modifiers.abstractKeyword != null) {
+ _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_TOP_LEVEL_VARIABLE);
}
- for (int i = 0; i < length; i++) {
- if (!_isEqualTokens(first[i], second[i])) {
- return false;
+ if (modifiers.externalKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.EXTERNAL_FIELD, modifiers.externalKeyword);
+ }
+ Token constKeyword = modifiers.constKeyword;
+ Token finalKeyword = modifiers.finalKeyword;
+ Token varKeyword = modifiers.varKeyword;
+ if (constKeyword != null) {
+ if (finalKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.CONST_AND_FINAL, finalKeyword);
+ }
+ if (varKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.CONST_AND_VAR, varKeyword);
+ }
+ } else if (finalKeyword != null) {
+ if (varKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.FINAL_AND_VAR, varKeyword);
}
}
- return true;
+ return Token.lexicallyFirst([constKeyword, finalKeyword, varKeyword]);
}
/**
- * Return `true` if the [first] and [second] tokens have the same structure.
+ * Validate that the given set of [modifiers] is appropriate for a class and
+ * return the 'abstract' keyword if there is one.
*/
- bool _isEqualTokens(Token first, Token second) {
- if (first == null) {
- return second == null;
- } else if (second == null) {
- return false;
+ void _validateModifiersForTypedef(Modifiers modifiers) {
+ _validateModifiersForTopLevelDeclaration(modifiers);
+ if (modifiers.abstractKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.ABSTRACT_TYPEDEF, modifiers.abstractKeyword);
+ }
+ if (modifiers.constKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.CONST_TYPEDEF, modifiers.constKeyword);
+ }
+ if (modifiers.externalKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.EXTERNAL_TYPEDEF, modifiers.externalKeyword);
+ }
+ if (modifiers.finalKeyword != null) {
+ _reportErrorForToken(
+ ParserErrorCode.FINAL_TYPEDEF, modifiers.finalKeyword);
+ }
+ if (modifiers.varKeyword != null) {
+ _reportErrorForToken(ParserErrorCode.VAR_TYPEDEF, modifiers.varKeyword);
}
- return first.lexeme == second.lexeme;
}
+}
+/**
+ * A synthetic keyword token.
+ */
+class Parser_SyntheticKeywordToken extends KeywordToken {
/**
- * Copy resolution data from the [fromNode] to the [toNode].
+ * Initialize a newly created token to represent the given [keyword] at the
+ * given [offset].
*/
- static void copyResolutionData(AstNode fromNode, AstNode toNode) {
- ResolutionCopier copier = new ResolutionCopier();
- copier._isEqualNodes(fromNode, toNode);
- }
+ Parser_SyntheticKeywordToken(Keyword keyword, int offset)
+ : super(keyword, offset);
+
+ @override
+ int get length => 0;
+
+ @override
+ Token copy() => new Parser_SyntheticKeywordToken(keyword, offset);
}
« no previous file with comments | « packages/analyzer/lib/src/generated/package.dart ('k') | packages/analyzer/lib/src/generated/resolver.dart » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698