| Index: pkg/analyzer/lib/src/generated/parser.dart | 
| diff --git a/pkg/analyzer/lib/src/generated/parser.dart b/pkg/analyzer/lib/src/generated/parser.dart | 
| index f63edfa49cf1b6af5fd63d53d368778a6cbcde31..c90928f8fc26bd1fafc715eb46010b02bfd9d3c7 100644 | 
| --- a/pkg/analyzer/lib/src/generated/parser.dart | 
| +++ b/pkg/analyzer/lib/src/generated/parser.dart | 
| @@ -216,16 +216,19 @@ Map<String, MethodTrampoline> methodTable_Parser = <String, MethodTrampoline>{ | 
| 8, | 
| (Parser target, arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7) => target | 
| ._parseConstructor(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)), | 
| -  'parseConstructorFieldInitializer_0': new MethodTrampoline( | 
| -      0, (Parser target) => target._parseConstructorFieldInitializer()), | 
| +  'parseConstructorFieldInitializer_1': new MethodTrampoline(1, | 
| +      (Parser target, arg0) => target._parseConstructorFieldInitializer(arg0)), | 
| 'parseContinueStatement_0': new MethodTrampoline( | 
| 0, (Parser target) => target._parseContinueStatement()), | 
| 'parseDirective_1': new MethodTrampoline( | 
| 1, (Parser target, arg0) => target._parseDirective(arg0)), | 
| 'parseDirectives_0': | 
| new MethodTrampoline(0, (Parser target) => target._parseDirectives()), | 
| -  'parseDocumentationComment_0': new MethodTrampoline( | 
| -      0, (Parser target) => target._parseDocumentationComment()), | 
| +  'parseDocumentationComment_0': new MethodTrampoline(0, (Parser target) { | 
| +    List<DocumentationCommentToken> tokens = | 
| +        target._parseDocumentationCommentTokens(); | 
| +    return target._parseDocumentationComment(tokens); | 
| +  }), | 
| 'parseDoStatement_0': | 
| new MethodTrampoline(0, (Parser target) => target._parseDoStatement()), | 
| 'parseDottedName_0': | 
| @@ -320,13 +323,15 @@ Map<String, MethodTrampoline> methodTable_Parser = <String, MethodTrampoline>{ | 
| 'parseOptionalReturnType_0': new MethodTrampoline( | 
| 0, (Parser target) => target._parseOptionalReturnType()), | 
| 'parsePartDirective_1': new MethodTrampoline( | 
| -      1, (Parser target, arg0) => target._parsePartDirective(arg0)), | 
| +      1, (Parser target, arg0) => target._parsePartOrPartOfDirective(arg0)), | 
| 'parsePostfixExpression_0': new MethodTrampoline( | 
| 0, (Parser target) => target._parsePostfixExpression()), | 
| 'parsePrimaryExpression_0': new MethodTrampoline( | 
| 0, (Parser target) => target._parsePrimaryExpression()), | 
| -  'parseRedirectingConstructorInvocation_0': new MethodTrampoline( | 
| -      0, (Parser target) => target._parseRedirectingConstructorInvocation()), | 
| +  'parseRedirectingConstructorInvocation_1': new MethodTrampoline( | 
| +      1, | 
| +      (Parser target, arg0) => | 
| +          target._parseRedirectingConstructorInvocation(arg0)), | 
| 'parseRelationalExpression_0': new MethodTrampoline( | 
| 0, (Parser target) => target._parseRelationalExpression()), | 
| 'parseRethrowExpression_0': new MethodTrampoline( | 
| @@ -493,7 +498,7 @@ class CommentAndMetadata { | 
| final Comment comment; | 
|  | 
| /** | 
| -   * The metadata that was parsed. | 
| +   * The metadata that was parsed, or `null` if none was given. | 
| */ | 
| final List<Annotation> metadata; | 
|  | 
| @@ -501,6 +506,11 @@ class CommentAndMetadata { | 
| * Initialize a newly created holder with the given [comment] and [metadata]. | 
| */ | 
| CommentAndMetadata(this.comment, this.metadata); | 
| + | 
| +  /** | 
| +   * Return `true` if some metadata was parsed. | 
| +   */ | 
| +  bool get hasMetadata => metadata != null && metadata.isNotEmpty; | 
| } | 
|  | 
| /** | 
| @@ -2147,7 +2157,7 @@ class Parser { | 
|  | 
| /** | 
| * A flag indicating whether the parser is currently in a function body marked | 
| -   * as being 'async'. | 
| +   * (by a star) as being a generator. | 
| */ | 
| bool _inGenerator = false; | 
|  | 
| @@ -2163,7 +2173,7 @@ class Parser { | 
|  | 
| /** | 
| * A flag indicating whether the parser is currently in a constructor field | 
| -   * initializer, with no intervening parens, braces, or brackets. | 
| +   * initializer, with no intervening parentheses, braces, or brackets. | 
| */ | 
| bool _inInitializer = false; | 
|  | 
| @@ -2179,14 +2189,16 @@ class Parser { | 
| bool parseGenericMethodComments = false; | 
|  | 
| /** | 
| -   * Initialize a newly created parser to parse the content of the given | 
| -   * [_source] and to report any errors that are found to the given | 
| -   * [_errorListener]. | 
| +   * Initialize a newly created parser to parse tokens in the given [_source] | 
| +   * and to report any errors that are found to the given [_errorListener]. | 
| */ | 
| Parser(this._source, this._errorListener); | 
|  | 
| -  void set currentToken(Token currentToken) { | 
| -    this._currentToken = currentToken; | 
| +  /** | 
| +   * Set the token with which the parse is to begin to the given [token]. | 
| +   */ | 
| +  void set currentToken(Token token) { | 
| +    this._currentToken = token; | 
| } | 
|  | 
| /** | 
| @@ -2195,6 +2207,7 @@ class Parser { | 
| * parameters, followed by a left-parenthesis. This is used by | 
| * [_parseTypeAlias] to determine whether or not to parse a return type. | 
| */ | 
| +  @deprecated | 
| bool get hasReturnTypeInTypeAlias { | 
| Token next = _skipReturnType(_currentToken); | 
| if (next == null) { | 
| @@ -2229,19 +2242,20 @@ class Parser { | 
| */ | 
| Token getAndAdvance() { | 
| Token token = _currentToken; | 
| -    _advance(); | 
| +    _currentToken = _currentToken.next; | 
| return token; | 
| } | 
|  | 
| /** | 
| * Parse an annotation. Return the annotation that was parsed. | 
| * | 
| +   * This method assumes that the current token matches [TokenType.AT]. | 
| +   * | 
| *     annotation ::= | 
| *         '@' qualified ('.' identifier)? arguments? | 
| -   * | 
| */ | 
| Annotation parseAnnotation() { | 
| -    Token atSign = _expect(TokenType.AT); | 
| +    Token atSign = getAndAdvance(); | 
| Identifier name = parsePrefixedIdentifier(); | 
| Token period = null; | 
| SimpleIdentifier constructorName = null; | 
| @@ -2267,6 +2281,9 @@ class Parser { | 
| *         label expression | 
| */ | 
| Expression parseArgument() { | 
| +    // TODO(brianwilkerson) Consider returning a wrapper indicating whether the | 
| +    // expression is a named expression in order to remove the 'is' check in | 
| +    // 'parseArgumentList'. | 
| // | 
| // Both namedArgument and expression can start with an identifier, but only | 
| // namedArgument can have an identifier followed by a colon. | 
| @@ -2281,6 +2298,8 @@ class Parser { | 
| /** | 
| * Parse a list of arguments. Return the argument list that was parsed. | 
| * | 
| +   * This method assumes that the current token matches [TokenType.OPEN_PAREN]. | 
| +   * | 
| *     arguments ::= | 
| *         '(' argumentList? ')' | 
| * | 
| @@ -2289,10 +2308,9 @@ class Parser { | 
| *       | expressionList (',' namedArgument)* | 
| */ | 
| ArgumentList parseArgumentList() { | 
| -    Token leftParenthesis = _expect(TokenType.OPEN_PAREN); | 
| -    List<Expression> arguments = new List<Expression>(); | 
| +    Token leftParenthesis = getAndAdvance(); | 
| if (_matches(TokenType.CLOSE_PAREN)) { | 
| -      return new ArgumentList(leftParenthesis, arguments, getAndAdvance()); | 
| +      return new ArgumentList(leftParenthesis, null, getAndAdvance()); | 
| } | 
| // | 
| // Even though unnamed arguments must all appear before any named arguments, | 
| @@ -2302,30 +2320,30 @@ class Parser { | 
| _inInitializer = false; | 
| try { | 
| Expression argument = parseArgument(); | 
| -      arguments.add(argument); | 
| +      List<Expression> arguments = <Expression>[argument]; | 
| bool foundNamedArgument = argument is NamedExpression; | 
| bool generatedError = false; | 
| while (_optional(TokenType.COMMA)) { | 
| argument = parseArgument(); | 
| arguments.add(argument); | 
| -        if (foundNamedArgument) { | 
| -          bool blankArgument = | 
| -              argument is SimpleIdentifier && argument.name.isEmpty; | 
| -          if (!generatedError && | 
| -              !(argument is NamedExpression && !blankArgument)) { | 
| -            // Report the error, once, but allow the arguments to be in any | 
| -            // order in the AST. | 
| -            _reportErrorForCurrentToken( | 
| -                ParserErrorCode.POSITIONAL_AFTER_NAMED_ARGUMENT); | 
| -            generatedError = true; | 
| -          } | 
| -        } else if (argument is NamedExpression) { | 
| +        if (argument is NamedExpression) { | 
| foundNamedArgument = true; | 
| +        } else if (foundNamedArgument) { | 
| +          if (!generatedError) { | 
| +            if (!argument.isSynthetic) { | 
| +              // Report the error, once, but allow the arguments to be in any | 
| +              // order in the AST. | 
| +              _reportErrorForCurrentToken( | 
| +                  ParserErrorCode.POSITIONAL_AFTER_NAMED_ARGUMENT); | 
| +              generatedError = true; | 
| +            } | 
| +          } | 
| } | 
| } | 
| -      // TODO(brianwilkerson) Recovery: Look at the left parenthesis to see | 
| -      // whether there is a matching right parenthesis. If there is, then we're | 
| -      // more likely missing a comma and should go back to parsing arguments. | 
| +      // Recovery: If the next token is not a right parenthesis, look at the | 
| +      // left parenthesis to see whether there is a matching right parenthesis. | 
| +      // If there is, then we're more likely missing a comma and should go back | 
| +      // to parsing arguments. | 
| Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | 
| return new ArgumentList(leftParenthesis, arguments, rightParenthesis); | 
| } finally { | 
| @@ -2343,16 +2361,15 @@ class Parser { | 
| */ | 
| Expression parseBitwiseOrExpression() { | 
| Expression expression; | 
| -    if (_matchesKeyword(Keyword.SUPER) && | 
| -        _tokenMatches(_peek(), TokenType.BAR)) { | 
| +    if (_currentToken.keyword == Keyword.SUPER && | 
| +        _currentToken.next.type == TokenType.BAR) { | 
| expression = new SuperExpression(getAndAdvance()); | 
| } else { | 
| expression = _parseBitwiseXorExpression(); | 
| } | 
| -    while (_matches(TokenType.BAR)) { | 
| -      Token operator = getAndAdvance(); | 
| +    while (_currentToken.type == TokenType.BAR) { | 
| expression = new BinaryExpression( | 
| -          expression, operator, _parseBitwiseXorExpression()); | 
| +          expression, getAndAdvance(), _parseBitwiseXorExpression()); | 
| } | 
| return expression; | 
| } | 
| @@ -2360,27 +2377,37 @@ class Parser { | 
| /** | 
| * Parse a block. Return the block that was parsed. | 
| * | 
| +   * This method assumes that the current token matches | 
| +   * [TokenType.OPEN_CURLY_BRACKET]. | 
| +   * | 
| *     block ::= | 
| *         '{' statements '}' | 
| */ | 
| Block parseBlock() { | 
| -    Token leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET); | 
| -    List<Statement> statements = new List<Statement>(); | 
| +    bool isEndOfBlock() { | 
| +      TokenType type = _currentToken.type; | 
| +      return type == TokenType.EOF || type == TokenType.CLOSE_CURLY_BRACKET; | 
| +    } | 
| + | 
| +    Token leftBracket = getAndAdvance(); | 
| +    List<Statement> statements = <Statement>[]; | 
| Token statementStart = _currentToken; | 
| -    while ( | 
| -        !_matches(TokenType.EOF) && !_matches(TokenType.CLOSE_CURLY_BRACKET)) { | 
| +    while (!isEndOfBlock()) { | 
| Statement statement = parseStatement2(); | 
| -      if (statement != null) { | 
| -        statements.add(statement); | 
| -      } | 
| if (identical(_currentToken, statementStart)) { | 
| // Ensure that we are making progress and report an error if we're not. | 
| _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, | 
| [_currentToken.lexeme]); | 
| _advance(); | 
| +      } else if (statement != null) { | 
| +        statements.add(statement); | 
| } | 
| statementStart = _currentToken; | 
| } | 
| +    // Recovery: If the next token is not a right curly bracket, look at the | 
| +    // left curly bracket to see whether there is a matching right bracket. If | 
| +    // there is, then we're more likely missing a semi-colon and should go back | 
| +    // to parsing statements. | 
| Token rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET); | 
| return new Block(leftBracket, statements, rightBracket); | 
| } | 
| @@ -2397,21 +2424,25 @@ class Parser { | 
| ClassMember parseClassMember(String className) { | 
| CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata(); | 
| Modifiers modifiers = _parseModifiers(); | 
| -    if (_matchesKeyword(Keyword.VOID)) { | 
| -      TypeName returnType = parseReturnType(); | 
| -      if (_matchesKeyword(Keyword.GET) && _tokenMatchesIdentifier(_peek())) { | 
| +    Keyword keyword = _currentToken.keyword; | 
| +    if (keyword == Keyword.VOID) { | 
| +      TypeName returnType = | 
| +          new TypeName(new SimpleIdentifier(getAndAdvance()), null); | 
| +      keyword = _currentToken.keyword; | 
| +      Token next = _peek(); | 
| +      bool isFollowedByIdentifier = _tokenMatchesIdentifier(next); | 
| +      if (keyword == Keyword.GET && isFollowedByIdentifier) { | 
| _validateModifiersForGetterOrSetterOrMethod(modifiers); | 
| return _parseGetter(commentAndMetadata, modifiers.externalKeyword, | 
| modifiers.staticKeyword, returnType); | 
| -      } else if (_matchesKeyword(Keyword.SET) && | 
| -          _tokenMatchesIdentifier(_peek())) { | 
| +      } else if (keyword == Keyword.SET && isFollowedByIdentifier) { | 
| _validateModifiersForGetterOrSetterOrMethod(modifiers); | 
| return _parseSetter(commentAndMetadata, modifiers.externalKeyword, | 
| modifiers.staticKeyword, returnType); | 
| -      } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) { | 
| +      } else if (keyword == Keyword.OPERATOR && _isOperator(next)) { | 
| _validateModifiersForOperator(modifiers); | 
| -        return _parseOperator( | 
| -            commentAndMetadata, modifiers.externalKeyword, returnType); | 
| +        return _parseOperatorAfterKeyword(commentAndMetadata, | 
| +            modifiers.externalKeyword, returnType, getAndAdvance()); | 
| } else if (_matchesIdentifier() && | 
| _peek().matchesAny(const <TokenType>[ | 
| TokenType.OPEN_PAREN, | 
| @@ -2456,20 +2487,21 @@ class Parser { | 
| ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken); | 
| return null; | 
| } | 
| -    } else if (_matchesKeyword(Keyword.GET) && | 
| -        _tokenMatchesIdentifier(_peek())) { | 
| +    } | 
| +    Token next = _peek(); | 
| +    bool isFollowedByIdentifier = _tokenMatchesIdentifier(next); | 
| +    if (keyword == Keyword.GET && isFollowedByIdentifier) { | 
| _validateModifiersForGetterOrSetterOrMethod(modifiers); | 
| return _parseGetter(commentAndMetadata, modifiers.externalKeyword, | 
| modifiers.staticKeyword, null); | 
| -    } else if (_matchesKeyword(Keyword.SET) && | 
| -        _tokenMatchesIdentifier(_peek())) { | 
| +    } else if (keyword == Keyword.SET && isFollowedByIdentifier) { | 
| _validateModifiersForGetterOrSetterOrMethod(modifiers); | 
| return _parseSetter(commentAndMetadata, modifiers.externalKeyword, | 
| modifiers.staticKeyword, null); | 
| -    } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) { | 
| +    } else if (keyword == Keyword.OPERATOR && _isOperator(next)) { | 
| _validateModifiersForOperator(modifiers); | 
| -      return _parseOperator( | 
| -          commentAndMetadata, modifiers.externalKeyword, null); | 
| +      return _parseOperatorAfterKeyword( | 
| +          commentAndMetadata, modifiers.externalKeyword, null, getAndAdvance()); | 
| } else if (!_matchesIdentifier()) { | 
| // | 
| // Recover from an error. | 
| @@ -2514,9 +2546,9 @@ class Parser { | 
| // We appear to have found an incomplete field declaration. | 
| // | 
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | 
| -        List<VariableDeclaration> variables = new List<VariableDeclaration>(); | 
| -        variables.add( | 
| -            new VariableDeclaration(_createSyntheticIdentifier(), null, null)); | 
| +        VariableDeclaration variable = | 
| +            new VariableDeclaration(_createSyntheticIdentifier(), null, null); | 
| +        List<VariableDeclaration> variables = <VariableDeclaration>[variable]; | 
| return new FieldDeclaration( | 
| commentAndMetadata.comment, | 
| commentAndMetadata.metadata, | 
| @@ -2527,7 +2559,7 @@ class Parser { | 
| _reportErrorForToken( | 
| ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken); | 
| if (commentAndMetadata.comment != null || | 
| -          !commentAndMetadata.metadata.isEmpty) { | 
| +          commentAndMetadata.hasMetadata) { | 
| // | 
| // We appear to have found an incomplete declaration at the end of the | 
| // class. At this point it consists of a metadata, which we don't want | 
| @@ -2549,7 +2581,7 @@ class Parser { | 
| new EmptyFunctionBody(_createSyntheticToken(TokenType.SEMICOLON))); | 
| } | 
| return null; | 
| -    } else if (_tokenMatches(_peek(), TokenType.PERIOD) && | 
| +    } else if (_tokenMatches(next, TokenType.PERIOD) && | 
| _tokenMatchesIdentifier(_peekAt(2)) && | 
| _tokenMatches(_peekAt(3), TokenType.OPEN_PAREN)) { | 
| return _parseConstructor( | 
| @@ -2561,7 +2593,7 @@ class Parser { | 
| getAndAdvance(), | 
| parseSimpleIdentifier(isDeclaration: true), | 
| parseFormalParameterList()); | 
| -    } else if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | 
| +    } else if (_tokenMatches(next, TokenType.OPEN_PAREN)) { | 
| TypeName returnType = _parseOptionalTypeNameComment(); | 
| SimpleIdentifier methodName = parseSimpleIdentifier(isDeclaration: true); | 
| TypeParameterList typeParameters = _parseGenericCommentTypeParameters(); | 
| @@ -2589,7 +2621,7 @@ class Parser { | 
| methodName, | 
| typeParameters, | 
| parameters); | 
| -    } else if (_peek().matchesAny(const <TokenType>[ | 
| +    } else if (next.matchesAny(const <TokenType>[ | 
| TokenType.EQ, | 
| TokenType.COMMA, | 
| TokenType.SEMICOLON | 
| @@ -2602,7 +2634,7 @@ class Parser { | 
| } | 
| return _parseInitializedIdentifierList(commentAndMetadata, | 
| modifiers.staticKeyword, _validateModifiersForField(modifiers), null); | 
| -    } else if (_matchesKeyword(Keyword.TYPEDEF)) { | 
| +    } else if (keyword == Keyword.TYPEDEF) { | 
| _reportErrorForCurrentToken(ParserErrorCode.TYPEDEF_IN_CLASS); | 
| // TODO(brianwilkerson) We don't currently have any way to capture the | 
| // function type alias that was parsed. | 
| @@ -2615,20 +2647,22 @@ class Parser { | 
| modifiers.externalKeyword, modifiers.staticKeyword, null); | 
| } | 
| } | 
| -    TypeName type = parseTypeName(); | 
| -    if (_matchesKeyword(Keyword.GET) && _tokenMatchesIdentifier(_peek())) { | 
| +    TypeName type = _parseTypeNameAfterIdentifier(); | 
| +    keyword = _currentToken.keyword; | 
| +    next = _peek(); | 
| +    isFollowedByIdentifier = _tokenMatchesIdentifier(next); | 
| +    if (keyword == Keyword.GET && isFollowedByIdentifier) { | 
| _validateModifiersForGetterOrSetterOrMethod(modifiers); | 
| return _parseGetter(commentAndMetadata, modifiers.externalKeyword, | 
| modifiers.staticKeyword, type); | 
| -    } else if (_matchesKeyword(Keyword.SET) && | 
| -        _tokenMatchesIdentifier(_peek())) { | 
| +    } else if (keyword == Keyword.SET && isFollowedByIdentifier) { | 
| _validateModifiersForGetterOrSetterOrMethod(modifiers); | 
| return _parseSetter(commentAndMetadata, modifiers.externalKeyword, | 
| modifiers.staticKeyword, type); | 
| -    } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) { | 
| +    } else if (keyword == Keyword.OPERATOR && _isOperator(next)) { | 
| _validateModifiersForOperator(modifiers); | 
| -      return _parseOperator( | 
| -          commentAndMetadata, modifiers.externalKeyword, type); | 
| +      return _parseOperatorAfterKeyword( | 
| +          commentAndMetadata, modifiers.externalKeyword, type, getAndAdvance()); | 
| } else if (!_matchesIdentifier()) { | 
| if (_matches(TokenType.CLOSE_CURLY_BRACKET)) { | 
| // | 
| @@ -2668,8 +2702,9 @@ class Parser { | 
| } finally { | 
| _unlockErrorListener(); | 
| } | 
| -    } else if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | 
| -      SimpleIdentifier methodName = parseSimpleIdentifier(isDeclaration: true); | 
| +    } else if (_tokenMatches(next, TokenType.OPEN_PAREN)) { | 
| +      SimpleIdentifier methodName = | 
| +          _parseSimpleIdentifierUnchecked(isDeclaration: true); | 
| TypeParameterList typeParameters = _parseGenericCommentTypeParameters(); | 
| FormalParameterList parameters = parseFormalParameterList(); | 
| if (methodName.name == className) { | 
| @@ -2694,10 +2729,10 @@ class Parser { | 
| methodName, | 
| typeParameters, | 
| parameters); | 
| -    } else if (parseGenericMethods && _tokenMatches(_peek(), TokenType.LT)) { | 
| +    } else if (parseGenericMethods && _tokenMatches(next, TokenType.LT)) { | 
| return _parseMethodDeclarationAfterReturnType(commentAndMetadata, | 
| modifiers.externalKeyword, modifiers.staticKeyword, type); | 
| -    } else if (_tokenMatches(_peek(), TokenType.OPEN_CURLY_BRACKET)) { | 
| +    } else if (_tokenMatches(next, TokenType.OPEN_CURLY_BRACKET)) { | 
| // We have found "TypeName identifier {", and are guessing that this is a | 
| // getter without the keyword 'get'. | 
| _validateModifiersForGetterOrSetterOrMethod(modifiers); | 
| @@ -2720,14 +2755,10 @@ class Parser { | 
| *       | 'hide' identifier (',' identifier)* | 
| */ | 
| Combinator parseCombinator() { | 
| -    if (_matchesString(_SHOW) || _matchesString(_HIDE)) { | 
| -      Token keyword = getAndAdvance(); | 
| -      List<SimpleIdentifier> names = _parseIdentifierList(); | 
| -      if (keyword.lexeme == _SHOW) { | 
| -        return new ShowCombinator(keyword, names); | 
| -      } else { | 
| -        return new HideCombinator(keyword, names); | 
| -      } | 
| +    if (_matchesString(_SHOW)) { | 
| +      return new ShowCombinator(getAndAdvance(), _parseIdentifierList()); | 
| +    } else if (_matchesString(_HIDE)) { | 
| +      return new HideCombinator(getAndAdvance(), _parseIdentifierList()); | 
| } | 
| return null; | 
| } | 
| @@ -2773,70 +2804,72 @@ class Parser { | 
| bool partOfDirectiveFound = false; | 
| bool partDirectiveFound = false; | 
| bool directiveFoundAfterDeclaration = false; | 
| -    List<Directive> directives = new List<Directive>(); | 
| -    List<CompilationUnitMember> declarations = | 
| -        new List<CompilationUnitMember>(); | 
| +    List<Directive> directives = <Directive>[]; | 
| +    List<CompilationUnitMember> declarations = <CompilationUnitMember>[]; | 
| Token memberStart = _currentToken; | 
| -    while (!_matches(TokenType.EOF)) { | 
| +    TokenType type = _currentToken.type; | 
| +    while (type != TokenType.EOF) { | 
| CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata(); | 
| -      if ((_matchesKeyword(Keyword.IMPORT) || | 
| -              _matchesKeyword(Keyword.EXPORT) || | 
| -              _matchesKeyword(Keyword.LIBRARY) || | 
| -              _matchesKeyword(Keyword.PART)) && | 
| -          !_tokenMatches(_peek(), TokenType.PERIOD) && | 
| -          !_tokenMatches(_peek(), TokenType.LT) && | 
| -          !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | 
| -        Directive directive = _parseDirective(commentAndMetadata); | 
| +      Keyword keyword = _currentToken.keyword; | 
| +      TokenType nextType = _currentToken.next.type; | 
| +      if ((keyword == Keyword.IMPORT || | 
| +              keyword == Keyword.EXPORT || | 
| +              keyword == Keyword.LIBRARY || | 
| +              keyword == Keyword.PART) && | 
| +          nextType != TokenType.PERIOD && | 
| +          nextType != TokenType.LT && | 
| +          nextType != TokenType.OPEN_PAREN) { | 
| +        Directive parseDirective() { | 
| +          if (keyword == Keyword.IMPORT) { | 
| +            if (partDirectiveFound) { | 
| +              _reportErrorForCurrentToken( | 
| +                  ParserErrorCode.IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE); | 
| +            } | 
| +            return _parseImportDirective(commentAndMetadata); | 
| +          } else if (keyword == Keyword.EXPORT) { | 
| +            if (partDirectiveFound) { | 
| +              _reportErrorForCurrentToken( | 
| +                  ParserErrorCode.EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE); | 
| +            } | 
| +            return _parseExportDirective(commentAndMetadata); | 
| +          } else if (keyword == Keyword.LIBRARY) { | 
| +            if (libraryDirectiveFound) { | 
| +              _reportErrorForCurrentToken( | 
| +                  ParserErrorCode.MULTIPLE_LIBRARY_DIRECTIVES); | 
| +            } else { | 
| +              if (directives.length > 0) { | 
| +                _reportErrorForCurrentToken( | 
| +                    ParserErrorCode.LIBRARY_DIRECTIVE_NOT_FIRST); | 
| +              } | 
| +              libraryDirectiveFound = true; | 
| +            } | 
| +            return _parseLibraryDirective(commentAndMetadata); | 
| +          } else if (keyword == Keyword.PART) { | 
| +            if (_tokenMatchesString(_peek(), _OF)) { | 
| +              partOfDirectiveFound = true; | 
| +              return _parsePartOfDirective(commentAndMetadata); | 
| +            } else { | 
| +              partDirectiveFound = true; | 
| +              return _parsePartDirective(commentAndMetadata); | 
| +            } | 
| +          } else { | 
| +            // Internal error: this method should not have been invoked if the | 
| +            // current token was something other than one of the above. | 
| +            throw new IllegalStateException( | 
| +                "parseDirective invoked in an invalid state (currentToken = $_currentToken)"); | 
| +          } | 
| +        } | 
| +        Directive directive = parseDirective(); | 
| if (declarations.length > 0 && !directiveFoundAfterDeclaration) { | 
| _reportErrorForToken(ParserErrorCode.DIRECTIVE_AFTER_DECLARATION, | 
| directive.beginToken); | 
| directiveFoundAfterDeclaration = true; | 
| } | 
| -        if (directive is LibraryDirective) { | 
| -          if (libraryDirectiveFound) { | 
| -            _reportErrorForCurrentToken( | 
| -                ParserErrorCode.MULTIPLE_LIBRARY_DIRECTIVES); | 
| -          } else { | 
| -            if (directives.length > 0) { | 
| -              _reportErrorForToken(ParserErrorCode.LIBRARY_DIRECTIVE_NOT_FIRST, | 
| -                  directive.libraryKeyword); | 
| -            } | 
| -            libraryDirectiveFound = true; | 
| -          } | 
| -        } else if (directive is PartDirective) { | 
| -          partDirectiveFound = true; | 
| -        } else if (partDirectiveFound) { | 
| -          if (directive is ExportDirective) { | 
| -            _reportErrorForToken( | 
| -                ParserErrorCode.EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE, | 
| -                directive.keyword); | 
| -          } else if (directive is ImportDirective) { | 
| -            _reportErrorForToken( | 
| -                ParserErrorCode.IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE, | 
| -                directive.keyword); | 
| -          } | 
| -        } | 
| -        if (directive is PartOfDirective) { | 
| -          if (partOfDirectiveFound) { | 
| -            _reportErrorForCurrentToken( | 
| -                ParserErrorCode.MULTIPLE_PART_OF_DIRECTIVES); | 
| -          } else { | 
| -            int directiveCount = directives.length; | 
| -            for (int i = 0; i < directiveCount; i++) { | 
| -              _reportErrorForToken( | 
| -                  ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART, | 
| -                  directives[i].keyword); | 
| -            } | 
| -            partOfDirectiveFound = true; | 
| -          } | 
| -        } else { | 
| -          if (partOfDirectiveFound) { | 
| -            _reportErrorForToken(ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART, | 
| -                directive.keyword); | 
| -          } | 
| -        } | 
| directives.add(directive); | 
| -      } else if (_matches(TokenType.SEMICOLON)) { | 
| +      } else if (type == TokenType.SEMICOLON) { | 
| +        // TODO(brianwilkerson) Consider moving this error detection into | 
| +        // _parseCompilationUnitMember (in the places where EXPECTED_EXECUTABLE | 
| +        // is being generated). | 
| _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, | 
| [_currentToken.lexeme]); | 
| _advance(); | 
| @@ -2857,6 +2890,38 @@ class Parser { | 
| } | 
| } | 
| memberStart = _currentToken; | 
| +      type = _currentToken.type; | 
| +    } | 
| +    if (partOfDirectiveFound && directives.length > 1) { | 
| +      // TODO(brianwilkerson) Improve error reporting when both a library and | 
| +      // part-of directive are found. | 
| +//      if (libraryDirectiveFound) { | 
| +//        int directiveCount = directives.length; | 
| +//        for (int i = 0; i < directiveCount; i++) { | 
| +//          Directive directive = directives[i]; | 
| +//          if (directive is PartOfDirective) { | 
| +//            _reportErrorForToken( | 
| +//                ParserErrorCode.PART_OF_IN_LIBRARY, directive.partKeyword); | 
| +//          } | 
| +//        } | 
| +//      } else { | 
| +      bool firstPartOf = true; | 
| +      int directiveCount = directives.length; | 
| +      for (int i = 0; i < directiveCount; i++) { | 
| +        Directive directive = directives[i]; | 
| +        if (directive is PartOfDirective) { | 
| +          if (firstPartOf) { | 
| +            firstPartOf = false; | 
| +          } else { | 
| +            _reportErrorForToken(ParserErrorCode.MULTIPLE_PART_OF_DIRECTIVES, | 
| +                directive.partKeyword); | 
| +          } | 
| +        } else { | 
| +          _reportErrorForToken(ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART, | 
| +              directives[i].keyword); | 
| +        } | 
| +//        } | 
| +      } | 
| } | 
| return new CompilationUnit( | 
| firstToken, scriptTag, directives, declarations, _currentToken); | 
| @@ -2871,7 +2936,7 @@ class Parser { | 
| */ | 
| Expression parseConditionalExpression() { | 
| Expression condition = parseIfNullExpression(); | 
| -    if (!_matches(TokenType.QUESTION)) { | 
| +    if (_currentToken.type != TokenType.QUESTION) { | 
| return condition; | 
| } | 
| Token question = getAndAdvance(); | 
| @@ -2932,9 +2997,10 @@ class Parser { | 
| *       | throwExpression | 
| */ | 
| Expression parseExpression2() { | 
| -    if (_matchesKeyword(Keyword.THROW)) { | 
| +    Keyword keyword = _currentToken.keyword; | 
| +    if (keyword == Keyword.THROW) { | 
| return _parseThrowExpression(); | 
| -    } else if (_matchesKeyword(Keyword.RETHROW)) { | 
| +    } else if (keyword == Keyword.RETHROW) { | 
| // TODO(brianwilkerson) Rethrow is a statement again. | 
| return _parseRethrowExpression(); | 
| } | 
| @@ -2945,18 +3011,17 @@ class Parser { | 
| // grammar after making that determination. | 
| // | 
| Expression expression = parseConditionalExpression(); | 
| -    TokenType tokenType = _currentToken.type; | 
| -    if (tokenType == TokenType.PERIOD_PERIOD) { | 
| -      List<Expression> cascadeSections = new List<Expression>(); | 
| -      while (tokenType == TokenType.PERIOD_PERIOD) { | 
| +    TokenType type = _currentToken.type; | 
| +    if (type == TokenType.PERIOD_PERIOD) { | 
| +      List<Expression> cascadeSections = <Expression>[]; | 
| +      do { | 
| Expression section = _parseCascadeSection(); | 
| if (section != null) { | 
| cascadeSections.add(section); | 
| } | 
| -        tokenType = _currentToken.type; | 
| -      } | 
| +      } while (_currentToken.type == TokenType.PERIOD_PERIOD); | 
| return new CascadeExpression(expression, cascadeSections); | 
| -    } else if (tokenType.isAssignmentOperator) { | 
| +    } else if (type.isAssignmentOperator) { | 
| Token operator = getAndAdvance(); | 
| _ensureAssignable(expression); | 
| return new AssignmentExpression(expression, operator, parseExpression2()); | 
| @@ -2999,11 +3064,13 @@ class Parser { | 
| * Parse a class extends clause. Return the class extends clause that was | 
| * parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.EXTENDS`. | 
| +   * | 
| *     classExtendsClause ::= | 
| *         'extends' type | 
| */ | 
| ExtendsClause parseExtendsClause() { | 
| -    Token keyword = _expectKeyword(Keyword.EXTENDS); | 
| +    Token keyword = getAndAdvance(); | 
| TypeName superclass = parseTypeName(); | 
| return new ExtendsClause(keyword, superclass); | 
| } | 
| @@ -3031,145 +3098,16 @@ class Parser { | 
| *         '{' defaultNamedParameter (',' defaultNamedParameter)* '}' | 
| */ | 
| FormalParameterList parseFormalParameterList() { | 
| -    Token leftParenthesis = _expect(TokenType.OPEN_PAREN); | 
| -    if (_matches(TokenType.CLOSE_PAREN)) { | 
| -      return new FormalParameterList( | 
| -          leftParenthesis, null, null, null, getAndAdvance()); | 
| -    } | 
| -    // | 
| -    // Even though it is invalid to have default parameters outside of brackets, | 
| -    // required parameters inside of brackets, or multiple groups of default and | 
| -    // named parameters, we allow all of these cases so that we can recover | 
| -    // better. | 
| -    // | 
| -    List<FormalParameter> parameters = new List<FormalParameter>(); | 
| -    Token leftSquareBracket = null; | 
| -    Token rightSquareBracket = null; | 
| -    Token leftCurlyBracket = null; | 
| -    Token rightCurlyBracket = null; | 
| -    ParameterKind kind = ParameterKind.REQUIRED; | 
| -    bool firstParameter = true; | 
| -    bool reportedMultiplePositionalGroups = false; | 
| -    bool reportedMultipleNamedGroups = false; | 
| -    bool reportedMixedGroups = false; | 
| -    bool wasOptionalParameter = false; | 
| -    Token initialToken = null; | 
| -    do { | 
| -      if (firstParameter) { | 
| -        firstParameter = false; | 
| -      } else if (!_optional(TokenType.COMMA)) { | 
| -        // TODO(brianwilkerson) The token is wrong, we need to recover from this | 
| -        // case. | 
| -        if (_getEndToken(leftParenthesis) != null) { | 
| -          _reportErrorForCurrentToken( | 
| -              ParserErrorCode.EXPECTED_TOKEN, [TokenType.COMMA.lexeme]); | 
| -        } else { | 
| -          _reportErrorForToken(ParserErrorCode.MISSING_CLOSING_PARENTHESIS, | 
| -              _currentToken.previous); | 
| -          break; | 
| -        } | 
| -      } | 
| -      initialToken = _currentToken; | 
| -      // | 
| -      // Handle the beginning of parameter groups. | 
| -      // | 
| -      if (_matches(TokenType.OPEN_SQUARE_BRACKET)) { | 
| -        wasOptionalParameter = true; | 
| -        if (leftSquareBracket != null && !reportedMultiplePositionalGroups) { | 
| -          _reportErrorForCurrentToken( | 
| -              ParserErrorCode.MULTIPLE_POSITIONAL_PARAMETER_GROUPS); | 
| -          reportedMultiplePositionalGroups = true; | 
| -        } | 
| -        if (leftCurlyBracket != null && !reportedMixedGroups) { | 
| -          _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS); | 
| -          reportedMixedGroups = true; | 
| -        } | 
| -        leftSquareBracket = getAndAdvance(); | 
| -        kind = ParameterKind.POSITIONAL; | 
| -      } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | 
| -        wasOptionalParameter = true; | 
| -        if (leftCurlyBracket != null && !reportedMultipleNamedGroups) { | 
| -          _reportErrorForCurrentToken( | 
| -              ParserErrorCode.MULTIPLE_NAMED_PARAMETER_GROUPS); | 
| -          reportedMultipleNamedGroups = true; | 
| -        } | 
| -        if (leftSquareBracket != null && !reportedMixedGroups) { | 
| -          _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS); | 
| -          reportedMixedGroups = true; | 
| -        } | 
| -        leftCurlyBracket = getAndAdvance(); | 
| -        kind = ParameterKind.NAMED; | 
| -      } | 
| -      // | 
| -      // Parse and record the parameter. | 
| -      // | 
| -      FormalParameter parameter = _parseFormalParameter(kind); | 
| -      parameters.add(parameter); | 
| -      if (kind == ParameterKind.REQUIRED && wasOptionalParameter) { | 
| -        _reportErrorForNode( | 
| -            ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS, parameter); | 
| -      } | 
| -      // | 
| -      // Handle the end of parameter groups. | 
| -      // | 
| -      // TODO(brianwilkerson) Improve the detection and reporting of missing and | 
| -      // mismatched delimiters. | 
| -      if (_matches(TokenType.CLOSE_SQUARE_BRACKET)) { | 
| -        rightSquareBracket = getAndAdvance(); | 
| -        if (leftSquareBracket == null) { | 
| -          if (leftCurlyBracket != null) { | 
| -            _reportErrorForCurrentToken( | 
| -                ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]); | 
| -            rightCurlyBracket = rightSquareBracket; | 
| -            rightSquareBracket = null; | 
| -          } else { | 
| -            _reportErrorForCurrentToken( | 
| -                ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP, | 
| -                ["["]); | 
| -          } | 
| -        } | 
| -        kind = ParameterKind.REQUIRED; | 
| -      } else if (_matches(TokenType.CLOSE_CURLY_BRACKET)) { | 
| -        rightCurlyBracket = getAndAdvance(); | 
| -        if (leftCurlyBracket == null) { | 
| -          if (leftSquareBracket != null) { | 
| -            _reportErrorForCurrentToken( | 
| -                ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]); | 
| -            rightSquareBracket = rightCurlyBracket; | 
| -            rightCurlyBracket = null; | 
| -          } else { | 
| -            _reportErrorForCurrentToken( | 
| -                ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP, | 
| -                ["{"]); | 
| -          } | 
| -        } | 
| -        kind = ParameterKind.REQUIRED; | 
| -      } | 
| -    } while (!_matches(TokenType.CLOSE_PAREN) && | 
| -        !identical(initialToken, _currentToken)); | 
| -    Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | 
| -    // | 
| -    // Check that the groups were closed correctly. | 
| -    // | 
| -    if (leftSquareBracket != null && rightSquareBracket == null) { | 
| -      _reportErrorForCurrentToken( | 
| -          ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]); | 
| -    } | 
| -    if (leftCurlyBracket != null && rightCurlyBracket == null) { | 
| -      _reportErrorForCurrentToken( | 
| -          ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]); | 
| -    } | 
| -    // | 
| -    // Build the parameter list. | 
| -    // | 
| -    if (leftSquareBracket == null) { | 
| -      leftSquareBracket = leftCurlyBracket; | 
| -    } | 
| -    if (rightSquareBracket == null) { | 
| -      rightSquareBracket = rightCurlyBracket; | 
| +    if (_matches(TokenType.OPEN_PAREN)) { | 
| +      return _parseFormalParameterListUnchecked(); | 
| } | 
| -    return new FormalParameterList(leftParenthesis, parameters, | 
| -        leftSquareBracket, rightSquareBracket, rightParenthesis); | 
| +    // TODO(brianwilkerson) Improve the error message. | 
| +    _reportErrorForCurrentToken( | 
| +        ParserErrorCode.EXPECTED_TOKEN, [TokenType.OPEN_PAREN.lexeme]); | 
| +    // Recovery: Check for an unmatched closing paren and parse parameters until | 
| +    // it is reached. | 
| +    return _parseFormalParameterListAfterParen( | 
| +        _createSyntheticToken(TokenType.OPEN_PAREN)); | 
| } | 
|  | 
| /** | 
| @@ -3196,10 +3134,9 @@ class Parser { | 
| */ | 
| Expression parseIfNullExpression() { | 
| Expression expression = parseLogicalOrExpression(); | 
| -    while (_matches(TokenType.QUESTION_QUESTION)) { | 
| -      Token operator = getAndAdvance(); | 
| +    while (_currentToken.type == TokenType.QUESTION_QUESTION) { | 
| expression = new BinaryExpression( | 
| -          expression, operator, parseLogicalOrExpression()); | 
| +          expression, getAndAdvance(), parseLogicalOrExpression()); | 
| } | 
| return expression; | 
| } | 
| @@ -3207,12 +3144,14 @@ class Parser { | 
| /** | 
| * Parse an implements clause. Return the implements clause that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.IMPLEMENTS`. | 
| +   * | 
| *     implementsClause ::= | 
| *         'implements' type (',' type)* | 
| */ | 
| ImplementsClause parseImplementsClause() { | 
| -    Token keyword = _expectKeyword(Keyword.IMPLEMENTS); | 
| -    List<TypeName> interfaces = new List<TypeName>(); | 
| +    Token keyword = getAndAdvance(); | 
| +    List<TypeName> interfaces = <TypeName>[]; | 
| interfaces.add(parseTypeName()); | 
| while (_optional(TokenType.COMMA)) { | 
| interfaces.add(parseTypeName()); | 
| @@ -3223,13 +3162,16 @@ class Parser { | 
| /** | 
| * Parse a label. Return the label that was parsed. | 
| * | 
| +   * This method assumes that the current token matches an identifier and that | 
| +   * the following token matches `TokenType.COLON`. | 
| +   * | 
| *     label ::= | 
| *         identifier ':' | 
| */ | 
| Label parseLabel({bool isDeclaration: false}) { | 
| SimpleIdentifier label = | 
| -        parseSimpleIdentifier(isDeclaration: isDeclaration); | 
| -    Token colon = _expect(TokenType.COLON); | 
| +        _parseSimpleIdentifierUnchecked(isDeclaration: isDeclaration); | 
| +    Token colon = getAndAdvance(); | 
| return new Label(label, colon); | 
| } | 
|  | 
| @@ -3240,10 +3182,9 @@ class Parser { | 
| *         identifier ('.' identifier)* | 
| */ | 
| LibraryIdentifier parseLibraryIdentifier() { | 
| -    List<SimpleIdentifier> components = new List<SimpleIdentifier>(); | 
| +    List<SimpleIdentifier> components = <SimpleIdentifier>[]; | 
| components.add(parseSimpleIdentifier()); | 
| -    while (_matches(TokenType.PERIOD)) { | 
| -      _advance(); | 
| +    while (_optional(TokenType.PERIOD)) { | 
| components.add(parseSimpleIdentifier()); | 
| } | 
| return new LibraryIdentifier(components); | 
| @@ -3258,10 +3199,9 @@ class Parser { | 
| */ | 
| Expression parseLogicalOrExpression() { | 
| Expression expression = _parseLogicalAndExpression(); | 
| -    while (_matches(TokenType.BAR_BAR)) { | 
| -      Token operator = getAndAdvance(); | 
| +    while (_currentToken.type == TokenType.BAR_BAR) { | 
| expression = new BinaryExpression( | 
| -          expression, operator, _parseLogicalAndExpression()); | 
| +          expression, getAndAdvance(), _parseLogicalAndExpression()); | 
| } | 
| return expression; | 
| } | 
| @@ -3310,7 +3250,7 @@ class Parser { | 
| SimpleIdentifier identifier = parseSimpleIdentifier(); | 
| TypeParameterList typeParameters = _parseGenericMethodTypeParameters(); | 
| if (_matches(TokenType.OPEN_PAREN)) { | 
| -      FormalParameterList parameters = parseFormalParameterList(); | 
| +      FormalParameterList parameters = _parseFormalParameterListUnchecked(); | 
| if (thisKeyword == null) { | 
| if (holder.keyword != null) { | 
| _reportErrorForToken( | 
| @@ -3381,13 +3321,7 @@ class Parser { | 
| *         identifier ('.' identifier)? | 
| */ | 
| Identifier parsePrefixedIdentifier() { | 
| -    SimpleIdentifier qualifier = parseSimpleIdentifier(); | 
| -    if (!_matches(TokenType.PERIOD) || _injectGenericCommentTypeList()) { | 
| -      return qualifier; | 
| -    } | 
| -    Token period = getAndAdvance(); | 
| -    SimpleIdentifier qualified = parseSimpleIdentifier(); | 
| -    return new PrefixedIdentifier(qualifier, period, qualified); | 
| +    return _parsePrefixedIdentifierAfterIdentifier(parseSimpleIdentifier()); | 
| } | 
|  | 
| /** | 
| @@ -3398,7 +3332,7 @@ class Parser { | 
| *       | type | 
| */ | 
| TypeName parseReturnType() { | 
| -    if (_matchesKeyword(Keyword.VOID)) { | 
| +    if (_currentToken.keyword == Keyword.VOID) { | 
| return new TypeName(new SimpleIdentifier(getAndAdvance()), null); | 
| } else { | 
| return parseTypeName(); | 
| @@ -3413,14 +3347,7 @@ class Parser { | 
| */ | 
| SimpleIdentifier parseSimpleIdentifier({bool isDeclaration: false}) { | 
| if (_matchesIdentifier()) { | 
| -      String lexeme = _currentToken.lexeme; | 
| -      if ((_inAsync || _inGenerator) && | 
| -          (lexeme == 'async' || lexeme == 'await' || lexeme == 'yield')) { | 
| -        _reportErrorForCurrentToken( | 
| -            ParserErrorCode.ASYNC_KEYWORD_USED_AS_IDENTIFIER); | 
| -      } | 
| -      return new SimpleIdentifier(getAndAdvance(), | 
| -          isDeclaration: isDeclaration); | 
| +      return _parseSimpleIdentifierUnchecked(isDeclaration: isDeclaration); | 
| } | 
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | 
| return _createSyntheticIdentifier(isDeclaration: isDeclaration); | 
| @@ -3444,7 +3371,7 @@ class Parser { | 
| */ | 
| Statement parseStatement2() { | 
| List<Label> labels = null; | 
| -    while (_matchesIdentifier() && _tokenMatches(_peek(), TokenType.COLON)) { | 
| +    while (_matchesIdentifier() && _currentToken.next.type == TokenType.COLON) { | 
| Label label = parseLabel(isDeclaration: true); | 
| if (labels == null) { | 
| labels = <Label>[label]; | 
| @@ -3477,31 +3404,19 @@ class Parser { | 
| *       | SINGLE_LINE_STRING+ | 
| */ | 
| StringLiteral parseStringLiteral() { | 
| -    List<StringLiteral> strings = new List<StringLiteral>(); | 
| -    while (_matches(TokenType.STRING)) { | 
| -      Token string = getAndAdvance(); | 
| -      if (_matches(TokenType.STRING_INTERPOLATION_EXPRESSION) || | 
| -          _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER)) { | 
| -        strings.add(_parseStringInterpolation(string)); | 
| -      } else { | 
| -        strings.add(new SimpleStringLiteral( | 
| -            string, _computeStringValue(string.lexeme, true, true))); | 
| -      } | 
| -    } | 
| -    if (strings.length < 1) { | 
| -      _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_STRING_LITERAL); | 
| -      return _createSyntheticStringLiteral(); | 
| -    } else if (strings.length == 1) { | 
| -      return strings[0]; | 
| -    } else { | 
| -      return new AdjacentStrings(strings); | 
| +    if (_matches(TokenType.STRING)) { | 
| +      return _parseStringLiteralUnchecked(); | 
| } | 
| +    _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_STRING_LITERAL); | 
| +    return _createSyntheticStringLiteral(); | 
| } | 
|  | 
| /** | 
| * Parse a list of type arguments. Return the type argument list that was | 
| * parsed. | 
| * | 
| +   * This method assumes that the current token matches `TokenType.LT`. | 
| +   * | 
| *     typeArguments ::= | 
| *         '<' typeList '>' | 
| * | 
| @@ -3509,9 +3424,8 @@ class Parser { | 
| *         type (',' type)* | 
| */ | 
| TypeArgumentList parseTypeArgumentList() { | 
| -    Token leftBracket = _expect(TokenType.LT); | 
| -    List<TypeName> arguments = new List<TypeName>(); | 
| -    arguments.add(parseTypeName()); | 
| +    Token leftBracket = getAndAdvance(); | 
| +    List<TypeName> arguments = <TypeName>[parseTypeName()]; | 
| while (_optional(TokenType.COMMA)) { | 
| arguments.add(parseTypeName()); | 
| } | 
| @@ -3531,8 +3445,8 @@ class Parser { | 
| // type to replace the real type name. | 
| // TODO(jmesserly): this feels like a big hammer. Can we restrict it to | 
| // only work inside generic methods? | 
| -    TypeName typeComment = _parseOptionalTypeNameComment(); | 
| -    return typeComment ?? realType; | 
| +    TypeName typeFromComment = _parseOptionalTypeNameComment(); | 
| +    return typeFromComment ?? realType; | 
| } | 
|  | 
| /** | 
| @@ -3558,13 +3472,14 @@ class Parser { | 
| * Parse a list of type parameters. Return the list of type parameters that | 
| * were parsed. | 
| * | 
| +   * This method assumes that the current token matches `TokenType.LT`. | 
| +   * | 
| *     typeParameterList ::= | 
| *         '<' typeParameter (',' typeParameter)* '>' | 
| */ | 
| TypeParameterList parseTypeParameterList() { | 
| -    Token leftBracket = _expect(TokenType.LT); | 
| -    List<TypeParameter> typeParameters = new List<TypeParameter>(); | 
| -    typeParameters.add(parseTypeParameter()); | 
| +    Token leftBracket = getAndAdvance(); | 
| +    List<TypeParameter> typeParameters = <TypeParameter>[parseTypeParameter()]; | 
| while (_optional(TokenType.COMMA)) { | 
| typeParameters.add(parseTypeParameter()); | 
| } | 
| @@ -3575,17 +3490,18 @@ class Parser { | 
| /** | 
| * Parse a with clause. Return the with clause that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.WITH`. | 
| +   * | 
| *     withClause ::= | 
| *         'with' typeName (',' typeName)* | 
| */ | 
| WithClause parseWithClause() { | 
| -    Token with2 = _expectKeyword(Keyword.WITH); | 
| -    List<TypeName> types = new List<TypeName>(); | 
| -    types.add(parseTypeName()); | 
| +    Token withKeyword = getAndAdvance(); | 
| +    List<TypeName> types = <TypeName>[parseTypeName()]; | 
| while (_optional(TokenType.COMMA)) { | 
| types.add(parseTypeName()); | 
| } | 
| -    return new WithClause(with2, types); | 
| +    return new WithClause(withKeyword, types); | 
| } | 
|  | 
| /** | 
| @@ -3669,30 +3585,33 @@ class Parser { | 
| * member. | 
| */ | 
| bool _couldBeStartOfCompilationUnitMember() { | 
| -    if ((_matchesKeyword(Keyword.IMPORT) || | 
| -            _matchesKeyword(Keyword.EXPORT) || | 
| -            _matchesKeyword(Keyword.LIBRARY) || | 
| -            _matchesKeyword(Keyword.PART)) && | 
| -        !_tokenMatches(_peek(), TokenType.PERIOD) && | 
| -        !_tokenMatches(_peek(), TokenType.LT)) { | 
| +    Keyword keyword = _currentToken.keyword; | 
| +    Token next = _currentToken.next; | 
| +    TokenType nextType = next.type; | 
| +    if ((keyword == Keyword.IMPORT || | 
| +            keyword == Keyword.EXPORT || | 
| +            keyword == Keyword.LIBRARY || | 
| +            keyword == Keyword.PART) && | 
| +        nextType != TokenType.PERIOD && | 
| +        nextType != TokenType.LT) { | 
| // This looks like the start of a directive | 
| return true; | 
| -    } else if (_matchesKeyword(Keyword.CLASS)) { | 
| +    } else if (keyword == Keyword.CLASS) { | 
| // This looks like the start of a class definition | 
| return true; | 
| -    } else if (_matchesKeyword(Keyword.TYPEDEF) && | 
| -        !_tokenMatches(_peek(), TokenType.PERIOD) && | 
| -        !_tokenMatches(_peek(), TokenType.LT)) { | 
| +    } else if (keyword == Keyword.TYPEDEF && | 
| +        nextType != TokenType.PERIOD && | 
| +        nextType != TokenType.LT) { | 
| // This looks like the start of a typedef | 
| return true; | 
| -    } else if (_matchesKeyword(Keyword.VOID) || | 
| -        ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) && | 
| -            _tokenMatchesIdentifier(_peek())) || | 
| -        (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek()))) { | 
| +    } else if (keyword == Keyword.VOID || | 
| +        ((keyword == Keyword.GET || keyword == Keyword.SET) && | 
| +            _tokenMatchesIdentifier(next)) || | 
| +        (keyword == Keyword.OPERATOR && _isOperator(next))) { | 
| // This looks like the start of a function | 
| return true; | 
| } else if (_matchesIdentifier()) { | 
| -      if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | 
| +      if (nextType == TokenType.OPEN_PAREN) { | 
| // This looks like the start of a function | 
| return true; | 
| } | 
| @@ -3700,9 +3619,10 @@ class Parser { | 
| if (token == null) { | 
| return false; | 
| } | 
| -      if (_matchesKeyword(Keyword.GET) || | 
| -          _matchesKeyword(Keyword.SET) || | 
| -          (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) || | 
| +      // TODO(brianwilkerson) This looks wrong; should we be checking 'token'? | 
| +      if (keyword == Keyword.GET || | 
| +          keyword == Keyword.SET || | 
| +          (keyword == Keyword.OPERATOR && _isOperator(next)) || | 
| _matchesIdentifier()) { | 
| return true; | 
| } | 
| @@ -3872,7 +3792,7 @@ class Parser { | 
| * should be treated as code blocks. | 
| */ | 
| List<List<int>> _getCodeBlockRanges(String comment) { | 
| -    List<List<int>> ranges = new List<List<int>>(); | 
| +    List<List<int>> ranges = <List<int>>[]; | 
| int length = comment.length; | 
| if (length < 3) { | 
| return ranges; | 
| @@ -4015,7 +3935,8 @@ class Parser { | 
| * function declaration. | 
| */ | 
| bool _isFunctionDeclaration() { | 
| -    if (_matchesKeyword(Keyword.VOID)) { | 
| +    Keyword keyword = _currentToken.keyword; | 
| +    if (keyword == Keyword.VOID) { | 
| return true; | 
| } | 
| Token afterReturnType = _skipTypeName(_currentToken); | 
| @@ -4038,20 +3959,20 @@ class Parser { | 
| } | 
| // It's possible that we have found a getter. While this isn't valid at this | 
| // point we test for it in order to recover better. | 
| -    if (_matchesKeyword(Keyword.GET)) { | 
| +    if (keyword == Keyword.GET) { | 
| Token afterName = _skipSimpleIdentifier(_currentToken.next); | 
| if (afterName == null) { | 
| return false; | 
| } | 
| -      return _tokenMatches(afterName, TokenType.FUNCTION) || | 
| -          _tokenMatches(afterName, TokenType.OPEN_CURLY_BRACKET); | 
| +      TokenType type = afterName.type; | 
| +      return type == TokenType.FUNCTION || type == TokenType.OPEN_CURLY_BRACKET; | 
| } else if (_tokenMatchesKeyword(afterReturnType, Keyword.GET)) { | 
| Token afterName = _skipSimpleIdentifier(afterReturnType.next); | 
| if (afterName == null) { | 
| return false; | 
| } | 
| -      return _tokenMatches(afterName, TokenType.FUNCTION) || | 
| -          _tokenMatches(afterName, TokenType.OPEN_CURLY_BRACKET); | 
| +      TokenType type = afterName.type; | 
| +      return type == TokenType.FUNCTION || type == TokenType.OPEN_CURLY_BRACKET; | 
| } | 
| return false; | 
| } | 
| @@ -4114,12 +4035,13 @@ class Parser { | 
| *         identifier ('=' expression)? | 
| */ | 
| bool _isInitializedVariableDeclaration() { | 
| -    if (_matchesKeyword(Keyword.FINAL) || _matchesKeyword(Keyword.VAR)) { | 
| +    Keyword keyword = _currentToken.keyword; | 
| +    if (keyword == Keyword.FINAL || keyword == Keyword.VAR) { | 
| // An expression cannot start with a keyword other than 'const', | 
| // 'rethrow', or 'throw'. | 
| return true; | 
| } | 
| -    if (_matchesKeyword(Keyword.CONST)) { | 
| +    if (keyword == Keyword.CONST) { | 
| // Look to see whether we might be at the start of a list or map literal, | 
| // otherwise this should be the start of a variable declaration. | 
| return !_peek().matchesAny(const <TokenType>[ | 
| @@ -4156,7 +4078,7 @@ class Parser { | 
| if (type == TokenType.EQ || | 
| type == TokenType.COMMA || | 
| type == TokenType.SEMICOLON || | 
| -        _tokenMatchesKeyword(token, Keyword.IN)) { | 
| +        token.keyword == Keyword.IN) { | 
| return true; | 
| } | 
| // It is OK to parse as a variable declaration in these cases: | 
| @@ -4178,6 +4100,8 @@ class Parser { | 
| } | 
|  | 
| bool _isLikelyArgumentList() { | 
| +    // Try to reduce the amount of lookahead required here before enabling | 
| +    // generic methods. | 
| if (_matches(TokenType.OPEN_PAREN)) { | 
| return true; | 
| } | 
| @@ -4228,7 +4152,7 @@ class Parser { | 
| if (!startToken.isOperator) { | 
| return false; | 
| } | 
| -    // Token "=" means that it is actually field initializer. | 
| +    // Token "=" means that it is actually a field initializer. | 
| if (startToken.type == TokenType.EQ) { | 
| return false; | 
| } | 
| @@ -4372,7 +4296,7 @@ class Parser { | 
| * should not be invoked with an argument value of [TokenType.GT]. | 
| */ | 
| bool _optional(TokenType type) { | 
| -    if (_matches(type)) { | 
| +    if (_currentToken.type == type) { | 
| _advance(); | 
| return true; | 
| } | 
| @@ -4389,28 +4313,45 @@ class Parser { | 
| */ | 
| Expression _parseAdditiveExpression() { | 
| Expression expression; | 
| -    if (_matchesKeyword(Keyword.SUPER) && | 
| +    if (_currentToken.keyword == Keyword.SUPER && | 
| _currentToken.next.type.isAdditiveOperator) { | 
| expression = new SuperExpression(getAndAdvance()); | 
| } else { | 
| expression = _parseMultiplicativeExpression(); | 
| } | 
| while (_currentToken.type.isAdditiveOperator) { | 
| -      Token operator = getAndAdvance(); | 
| expression = new BinaryExpression( | 
| -          expression, operator, _parseMultiplicativeExpression()); | 
| +          expression, getAndAdvance(), _parseMultiplicativeExpression()); | 
| } | 
| return expression; | 
| } | 
|  | 
| /** | 
| +   * Parse an argument list when we need to check for an open paren and recover | 
| +   * when there isn't one. Return the argument list that was parsed. | 
| +   */ | 
| +  ArgumentList _parseArgumentListChecked() { | 
| +    if (_matches(TokenType.OPEN_PAREN)) { | 
| +      return parseArgumentList(); | 
| +    } | 
| +    _reportErrorForCurrentToken( | 
| +        ParserErrorCode.EXPECTED_TOKEN, [TokenType.OPEN_PAREN.lexeme]); | 
| +    // Recovery: Look to see whether there is a close paren that isn't matched | 
| +    // to an open paren and if so parse the list of arguments as normal. | 
| +    return new ArgumentList(_createSyntheticToken(TokenType.OPEN_PAREN), null, | 
| +        _createSyntheticToken(TokenType.CLOSE_PAREN)); | 
| +  } | 
| + | 
| +  /** | 
| * Parse an assert statement. Return the assert statement. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.ASSERT`. | 
| +   * | 
| *     assertStatement ::= | 
| *         'assert' '(' conditionalExpression ')' ';' | 
| */ | 
| AssertStatement _parseAssertStatement() { | 
| -    Token keyword = _expectKeyword(Keyword.ASSERT); | 
| +    Token keyword = getAndAdvance(); | 
| Token leftParen = _expect(TokenType.OPEN_PAREN); | 
| Expression expression = parseExpression2(); | 
| if (expression is AssignmentExpression) { | 
| @@ -4454,6 +4395,17 @@ class Parser { | 
| new SuperExpression(getAndAdvance()), false, | 
| allowConditional: false); | 
| } | 
| +    return _parseAssignableExpressionNotStartingWithSuper(primaryAllowed); | 
| +  } | 
| + | 
| +  /** | 
| +   * Parse an assignable expression given that the current token is not 'super'. | 
| +   * The [primaryAllowed] is `true` if the expression is allowed to be a primary | 
| +   * without any assignable selector. Return the assignable expression that was | 
| +   * parsed. | 
| +   */ | 
| +  Expression _parseAssignableExpressionNotStartingWithSuper( | 
| +      bool primaryAllowed) { | 
| // | 
| // A primary expression can start with an identifier. We resolve the | 
| // ambiguity by determining whether the primary consists of anything other | 
| @@ -4523,7 +4475,8 @@ class Parser { | 
| */ | 
| Expression _parseAssignableSelector(Expression prefix, bool optional, | 
| {bool allowConditional: true}) { | 
| -    if (_matches(TokenType.OPEN_SQUARE_BRACKET)) { | 
| +    TokenType type = _currentToken.type; | 
| +    if (type == TokenType.OPEN_SQUARE_BRACKET) { | 
| Token leftBracket = getAndAdvance(); | 
| bool wasInInitializer = _inInitializer; | 
| _inInitializer = false; | 
| @@ -4535,27 +4488,32 @@ class Parser { | 
| } finally { | 
| _inInitializer = wasInInitializer; | 
| } | 
| -    } else if (_matches(TokenType.PERIOD) || | 
| -        _matches(TokenType.QUESTION_PERIOD)) { | 
| -      if (_matches(TokenType.QUESTION_PERIOD) && !allowConditional) { | 
| -        _reportErrorForCurrentToken( | 
| -            ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, [_currentToken.lexeme]); | 
| -      } | 
| -      Token operator = getAndAdvance(); | 
| -      return new PropertyAccess(prefix, operator, parseSimpleIdentifier()); | 
| } else { | 
| -      if (!optional) { | 
| -        // Report the missing selector. | 
| -        _reportErrorForCurrentToken( | 
| -            ParserErrorCode.MISSING_ASSIGNABLE_SELECTOR); | 
| +      bool isQuestionPeriod = type == TokenType.QUESTION_PERIOD; | 
| +      if (type == TokenType.PERIOD || isQuestionPeriod) { | 
| +        if (isQuestionPeriod && !allowConditional) { | 
| +          _reportErrorForCurrentToken( | 
| +              ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, | 
| +              [_currentToken.lexeme]); | 
| +        } | 
| +        Token operator = getAndAdvance(); | 
| +        return new PropertyAccess(prefix, operator, parseSimpleIdentifier()); | 
| +      } else { | 
| +        if (!optional) { | 
| +          // Report the missing selector. | 
| +          _reportErrorForCurrentToken( | 
| +              ParserErrorCode.MISSING_ASSIGNABLE_SELECTOR); | 
| +        } | 
| +        return prefix; | 
| } | 
| -      return prefix; | 
| } | 
| } | 
|  | 
| /** | 
| * Parse a await expression. Return the await expression that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `_AWAIT`. | 
| +   * | 
| *     awaitExpression ::= | 
| *         'await' unaryExpression | 
| */ | 
| @@ -4575,16 +4533,15 @@ class Parser { | 
| */ | 
| Expression _parseBitwiseAndExpression() { | 
| Expression expression; | 
| -    if (_matchesKeyword(Keyword.SUPER) && | 
| -        _tokenMatches(_peek(), TokenType.AMPERSAND)) { | 
| +    if (_currentToken.keyword == Keyword.SUPER && | 
| +        _currentToken.next.type == TokenType.AMPERSAND) { | 
| expression = new SuperExpression(getAndAdvance()); | 
| } else { | 
| expression = _parseShiftExpression(); | 
| } | 
| -    while (_matches(TokenType.AMPERSAND)) { | 
| -      Token operator = getAndAdvance(); | 
| -      expression = | 
| -          new BinaryExpression(expression, operator, _parseShiftExpression()); | 
| +    while (_currentToken.type == TokenType.AMPERSAND) { | 
| +      expression = new BinaryExpression( | 
| +          expression, getAndAdvance(), _parseShiftExpression()); | 
| } | 
| return expression; | 
| } | 
| @@ -4599,31 +4556,52 @@ class Parser { | 
| */ | 
| Expression _parseBitwiseXorExpression() { | 
| Expression expression; | 
| -    if (_matchesKeyword(Keyword.SUPER) && | 
| -        _tokenMatches(_peek(), TokenType.CARET)) { | 
| +    if (_currentToken.keyword == Keyword.SUPER && | 
| +        _currentToken.next.type == TokenType.CARET) { | 
| expression = new SuperExpression(getAndAdvance()); | 
| } else { | 
| expression = _parseBitwiseAndExpression(); | 
| } | 
| -    while (_matches(TokenType.CARET)) { | 
| -      Token operator = getAndAdvance(); | 
| +    while (_currentToken.type == TokenType.CARET) { | 
| expression = new BinaryExpression( | 
| -          expression, operator, _parseBitwiseAndExpression()); | 
| +          expression, getAndAdvance(), _parseBitwiseAndExpression()); | 
| } | 
| return expression; | 
| } | 
|  | 
| /** | 
| +   * Parse a block when we need to check for an open curly brace and recover | 
| +   * when there isn't one. Return the block that was parsed. | 
| +   * | 
| +   *     block ::= | 
| +   *         '{' statements '}' | 
| +   */ | 
| +  Block _parseBlockChecked() { | 
| +    if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | 
| +      return parseBlock(); | 
| +    } | 
| +    // TODO(brianwilkerson) Improve the error message. | 
| +    _reportErrorForCurrentToken( | 
| +        ParserErrorCode.EXPECTED_TOKEN, [TokenType.OPEN_CURLY_BRACKET.lexeme]); | 
| +    // Recovery: Check for an unmatched closing curly bracket and parse | 
| +    // statements until it is reached. | 
| +    return new Block(_createSyntheticToken(TokenType.OPEN_CURLY_BRACKET), null, | 
| +        _createSyntheticToken(TokenType.CLOSE_CURLY_BRACKET)); | 
| +  } | 
| + | 
| +  /** | 
| * Parse a break statement. Return the break statement that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.BREAK`. | 
| +   * | 
| *     breakStatement ::= | 
| *         'break' identifier? ';' | 
| */ | 
| Statement _parseBreakStatement() { | 
| -    Token breakKeyword = _expectKeyword(Keyword.BREAK); | 
| +    Token breakKeyword = getAndAdvance(); | 
| SimpleIdentifier label = null; | 
| if (_matchesIdentifier()) { | 
| -      label = parseSimpleIdentifier(); | 
| +      label = _parseSimpleIdentifierUnchecked(); | 
| } | 
| if (!_inLoop && !_inSwitch && label == null) { | 
| _reportErrorForToken(ParserErrorCode.BREAK_OUTSIDE_OF_LOOP, breakKeyword); | 
| @@ -4636,6 +4614,9 @@ class Parser { | 
| * Parse a cascade section. Return the expression representing the cascaded | 
| * method invocation. | 
| * | 
| +   * This method assumes that the current token matches | 
| +   * `TokenType.PERIOD_PERIOD`. | 
| +   * | 
| *     cascadeSection ::= | 
| *         '..' (cascadeSelector typeArguments? arguments*) | 
| *         (assignableSelector typeArguments? arguments*)* cascadeAssignment? | 
| @@ -4648,11 +4629,11 @@ class Parser { | 
| *         assignmentOperator expressionWithoutCascade | 
| */ | 
| Expression _parseCascadeSection() { | 
| -    Token period = _expect(TokenType.PERIOD_PERIOD); | 
| +    Token period = getAndAdvance(); | 
| Expression expression = null; | 
| SimpleIdentifier functionName = null; | 
| if (_matchesIdentifier()) { | 
| -      functionName = parseSimpleIdentifier(); | 
| +      functionName = _parseSimpleIdentifierUnchecked(); | 
| } else if (_currentToken.type == TokenType.OPEN_SQUARE_BRACKET) { | 
| Token leftBracket = getAndAdvance(); | 
| bool wasInInitializer = _inInitializer; | 
| @@ -4674,7 +4655,7 @@ class Parser { | 
| assert((expression == null && functionName != null) || | 
| (expression != null && functionName == null)); | 
| if (_isLikelyArgumentList()) { | 
| -      while (_isLikelyArgumentList()) { | 
| +      do { | 
| TypeArgumentList typeArguments = _parseOptionalTypeArguments(); | 
| if (functionName != null) { | 
| expression = new MethodInvocation(expression, period, functionName, | 
| @@ -4689,7 +4670,7 @@ class Parser { | 
| expression = new FunctionExpressionInvocation( | 
| expression, typeArguments, parseArgumentList()); | 
| } | 
| -      } | 
| +      } while (_isLikelyArgumentList()); | 
| } else if (functionName != null) { | 
| expression = new PropertyAccess(expression, period, functionName); | 
| period = null; | 
| @@ -4734,31 +4715,33 @@ class Parser { | 
| * keyword 'abstract', or `null` if the keyword was not given. Return the | 
| * class declaration that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.CLASS`. | 
| +   * | 
| *     classDeclaration ::= | 
| *         metadata 'abstract'? 'class' name typeParameterList? (extendsClause withClause?)? implementsClause? '{' classMembers '}' | | 
| *         metadata 'abstract'? 'class' mixinApplicationClass | 
| */ | 
| CompilationUnitMember _parseClassDeclaration( | 
| CommentAndMetadata commentAndMetadata, Token abstractKeyword) { | 
| -    Token keyword = _expectKeyword(Keyword.CLASS); | 
| -    if (_matchesIdentifier()) { | 
| -      Token next = _peek(); | 
| -      if (_tokenMatches(next, TokenType.LT)) { | 
| -        next = _skipTypeParameterList(next); | 
| -        if (next != null && _tokenMatches(next, TokenType.EQ)) { | 
| -          return _parseClassTypeAlias( | 
| -              commentAndMetadata, abstractKeyword, keyword); | 
| -        } | 
| -      } else if (_tokenMatches(next, TokenType.EQ)) { | 
| -        return _parseClassTypeAlias( | 
| -            commentAndMetadata, abstractKeyword, keyword); | 
| -      } | 
| -    } | 
| +    // | 
| +    // Parse the name and type parameters. | 
| +    // | 
| +    Token keyword = getAndAdvance(); | 
| SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true); | 
| String className = name.name; | 
| TypeParameterList typeParameters = null; | 
| -    if (_matches(TokenType.LT)) { | 
| +    TokenType type = _currentToken.type; | 
| +    if (type == TokenType.LT) { | 
| typeParameters = parseTypeParameterList(); | 
| +      type = _currentToken.type; | 
| +    } | 
| +    // | 
| +    // Check to see whether this might be a class type alias rather than a class | 
| +    // declaration. | 
| +    // | 
| +    if (type == TokenType.EQ) { | 
| +      return _parseClassTypeAliasAfterName( | 
| +          commentAndMetadata, abstractKeyword, keyword, name, typeParameters); | 
| } | 
| // | 
| // Parse the clauses. The parser accepts clauses in any order, but will | 
| @@ -4770,7 +4753,8 @@ class Parser { | 
| ImplementsClause implementsClause = null; | 
| bool foundClause = true; | 
| while (foundClause) { | 
| -      if (_matchesKeyword(Keyword.EXTENDS)) { | 
| +      Keyword keyword = _currentToken.keyword; | 
| +      if (keyword == Keyword.EXTENDS) { | 
| if (extendsClause == null) { | 
| extendsClause = parseExtendsClause(); | 
| if (withClause != null) { | 
| @@ -4785,7 +4769,7 @@ class Parser { | 
| extendsClause.extendsKeyword); | 
| parseExtendsClause(); | 
| } | 
| -      } else if (_matchesKeyword(Keyword.WITH)) { | 
| +      } else if (keyword == Keyword.WITH) { | 
| if (withClause == null) { | 
| withClause = parseWithClause(); | 
| if (implementsClause != null) { | 
| @@ -4799,7 +4783,7 @@ class Parser { | 
| // TODO(brianwilkerson) Should we merge the list of applied mixins | 
| // into a single list? | 
| } | 
| -      } else if (_matchesKeyword(Keyword.IMPLEMENTS)) { | 
| +      } else if (keyword == Keyword.IMPLEMENTS) { | 
| if (implementsClause == null) { | 
| implementsClause = parseImplementsClause(); | 
| } else { | 
| @@ -4831,10 +4815,12 @@ class Parser { | 
| List<ClassMember> members = null; | 
| Token rightBracket = null; | 
| if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | 
| -      leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET); | 
| +      leftBracket = getAndAdvance(); | 
| members = _parseClassMembers(className, _getEndToken(leftBracket)); | 
| rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET); | 
| } else { | 
| +      // Recovery: Check for an unmatched closing curly bracket and parse | 
| +      // members until it is reached. | 
| leftBracket = _createSyntheticToken(TokenType.OPEN_CURLY_BRACKET); | 
| rightBracket = _createSyntheticToken(TokenType.CLOSE_CURLY_BRACKET); | 
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_CLASS_BODY); | 
| @@ -4866,14 +4852,15 @@ class Parser { | 
| *         (metadata memberDefinition)* | 
| */ | 
| List<ClassMember> _parseClassMembers(String className, Token closingBracket) { | 
| -    List<ClassMember> members = new List<ClassMember>(); | 
| +    List<ClassMember> members = <ClassMember>[]; | 
| Token memberStart = _currentToken; | 
| -    while (!_matches(TokenType.EOF) && | 
| -        !_matches(TokenType.CLOSE_CURLY_BRACKET) && | 
| +    TokenType type = _currentToken.type; | 
| +    Keyword keyword = _currentToken.keyword; | 
| +    while (type != TokenType.EOF && | 
| +        type != TokenType.CLOSE_CURLY_BRACKET && | 
| (closingBracket != null || | 
| -            (!_matchesKeyword(Keyword.CLASS) && | 
| -                !_matchesKeyword(Keyword.TYPEDEF)))) { | 
| -      if (_matches(TokenType.SEMICOLON)) { | 
| +            (keyword != Keyword.CLASS && keyword != Keyword.TYPEDEF))) { | 
| +      if (type == TokenType.SEMICOLON) { | 
| _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, | 
| [_currentToken.lexeme]); | 
| _advance(); | 
| @@ -4889,6 +4876,8 @@ class Parser { | 
| _advance(); | 
| } | 
| memberStart = _currentToken; | 
| +      type = _currentToken.type; | 
| +      keyword = _currentToken.keyword; | 
| } | 
| return members; | 
| } | 
| @@ -4899,6 +4888,8 @@ class Parser { | 
| * the 'abstract' keyword. The [classKeyword] is the token representing the | 
| * 'class' keyword. Return the class type alias that was parsed. | 
| * | 
| +   * This method assumes that the current token matches an identifier. | 
| +   * | 
| *     classTypeAlias ::= | 
| *         identifier typeParameters? '=' 'abstract'? mixinApplication | 
| * | 
| @@ -4907,11 +4898,36 @@ class Parser { | 
| */ | 
| ClassTypeAlias _parseClassTypeAlias(CommentAndMetadata commentAndMetadata, | 
| Token abstractKeyword, Token classKeyword) { | 
| -    SimpleIdentifier className = parseSimpleIdentifier(isDeclaration: true); | 
| +    SimpleIdentifier className = | 
| +        _parseSimpleIdentifierUnchecked(isDeclaration: true); | 
| TypeParameterList typeParameters = null; | 
| if (_matches(TokenType.LT)) { | 
| typeParameters = parseTypeParameterList(); | 
| } | 
| +    return _parseClassTypeAliasAfterName(commentAndMetadata, abstractKeyword, | 
| +        classKeyword, className, typeParameters); | 
| +  } | 
| + | 
| +  /** | 
| +   * Parse a class type alias. The [commentAndMetadata] is the metadata to be | 
| +   * associated with the member. The [abstractKeyword] is the token representing | 
| +   * the 'abstract' keyword. The [classKeyword] is the token representing the | 
| +   * 'class' keyword. The [className] is the name of the alias, and the | 
| +   * [typeParameters] are the type parameters following the name. Return the | 
| +   * class type alias that was parsed. | 
| +   * | 
| +   *     classTypeAlias ::= | 
| +   *         identifier typeParameters? '=' 'abstract'? mixinApplication | 
| +   * | 
| +   *     mixinApplication ::= | 
| +   *         type withClause implementsClause? ';' | 
| +   */ | 
| +  ClassTypeAlias _parseClassTypeAliasAfterName( | 
| +      CommentAndMetadata commentAndMetadata, | 
| +      Token abstractKeyword, | 
| +      Token classKeyword, | 
| +      SimpleIdentifier className, | 
| +      TypeParameterList typeParameters) { | 
| Token equals = _expect(TokenType.EQ); | 
| TypeName superclass = parseTypeName(); | 
| WithClause withClause = null; | 
| @@ -4957,19 +4973,20 @@ class Parser { | 
|  | 
| /** | 
| * Parse a list of combinators in a directive. Return the combinators that | 
| -   * were parsed. | 
| +   * were parsed, or `null` if there are no combinators. | 
| * | 
| *     combinator ::= | 
| *         'show' identifier (',' identifier)* | 
| *       | 'hide' identifier (',' identifier)* | 
| */ | 
| List<Combinator> _parseCombinators() { | 
| -    List<Combinator> combinators = new List<Combinator>(); | 
| +    List<Combinator> combinators = null; | 
| while (true) { | 
| Combinator combinator = parseCombinator(); | 
| if (combinator == null) { | 
| break; | 
| } | 
| +      combinators ??= <Combinator>[]; | 
| combinators.add(combinator); | 
| } | 
| return combinators; | 
| @@ -4986,18 +5003,20 @@ class Parser { | 
| *         annotation* | 
| */ | 
| CommentAndMetadata _parseCommentAndMetadata() { | 
| -    Comment comment = _parseDocumentationComment(); | 
| +    // TODO(brianwilkerson) Consider making the creation of documentation | 
| +    // comments be lazy. | 
| +    List<DocumentationCommentToken> tokens = _parseDocumentationCommentTokens(); | 
| List<Annotation> metadata = null; | 
| while (_matches(TokenType.AT)) { | 
| -      metadata ??= new List<Annotation>(); | 
| +      metadata ??= <Annotation>[]; | 
| metadata.add(parseAnnotation()); | 
| -      Comment optionalComment = _parseDocumentationComment(); | 
| -      if (optionalComment != null) { | 
| -        comment = optionalComment; | 
| +      List<DocumentationCommentToken> optionalTokens = | 
| +          _parseDocumentationCommentTokens(); | 
| +      if (optionalTokens != null) { | 
| +        tokens = optionalTokens; | 
| } | 
| } | 
| -    metadata ??= const <Annotation>[]; | 
| -    return new CommentAndMetadata(comment, metadata); | 
| +    return new CommentAndMetadata(_parseDocumentationComment(tokens), metadata); | 
| } | 
|  | 
| /** | 
| @@ -5051,16 +5070,19 @@ class Parser { | 
| return null; | 
| } | 
| return new CommentReference(newKeyword, identifier); | 
| -      } else if (_tokenMatchesKeyword(firstToken, Keyword.THIS) || | 
| -          _tokenMatchesKeyword(firstToken, Keyword.NULL) || | 
| -          _tokenMatchesKeyword(firstToken, Keyword.TRUE) || | 
| -          _tokenMatchesKeyword(firstToken, Keyword.FALSE)) { | 
| -        // TODO(brianwilkerson) If we want to support this we will need to | 
| -        // extend the definition of CommentReference to take an expression | 
| -        // rather than an identifier. For now we just ignore it to reduce the | 
| -        // number of errors produced, but that's probably not a valid long term | 
| -        // approach. | 
| -        return null; | 
| +      } else { | 
| +        Keyword keyword = firstToken.keyword; | 
| +        if (keyword == Keyword.THIS || | 
| +            keyword == Keyword.NULL || | 
| +            keyword == Keyword.TRUE || | 
| +            keyword == Keyword.FALSE) { | 
| +          // TODO(brianwilkerson) If we want to support this we will need to | 
| +          // extend the definition of CommentReference to take an expression | 
| +          // rather than an identifier. For now we just ignore it to reduce the | 
| +          // number of errors produced, but that's probably not a valid long term | 
| +          // approach. | 
| +          return null; | 
| +        } | 
| } | 
| } catch (exception) { | 
| // Ignored because we assume that it wasn't a real comment reference. | 
| @@ -5082,7 +5104,7 @@ class Parser { | 
| */ | 
| List<CommentReference> _parseCommentReferences( | 
| List<DocumentationCommentToken> tokens) { | 
| -    List<CommentReference> references = new List<CommentReference>(); | 
| +    List<CommentReference> references = <CommentReference>[]; | 
| for (DocumentationCommentToken token in tokens) { | 
| String comment = token.lexeme; | 
| comment = _removeCodeBlocksGitHub(comment); | 
| @@ -5158,32 +5180,41 @@ class Parser { | 
| CompilationUnitMember _parseCompilationUnitMember( | 
| CommentAndMetadata commentAndMetadata) { | 
| Modifiers modifiers = _parseModifiers(); | 
| -    if (_matchesKeyword(Keyword.CLASS)) { | 
| +    Keyword keyword = _currentToken.keyword; | 
| +    if (keyword == Keyword.CLASS) { | 
| return _parseClassDeclaration( | 
| commentAndMetadata, _validateModifiersForClass(modifiers)); | 
| -    } else if (_matchesKeyword(Keyword.TYPEDEF) && | 
| -        !_tokenMatches(_peek(), TokenType.PERIOD) && | 
| -        !_tokenMatches(_peek(), TokenType.LT) && | 
| -        !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | 
| +    } | 
| +    Token next = _peek(); | 
| +    TokenType nextType = next.type; | 
| +    if (keyword == Keyword.TYPEDEF && | 
| +        nextType != TokenType.PERIOD && | 
| +        nextType != TokenType.LT && | 
| +        nextType != TokenType.OPEN_PAREN) { | 
| _validateModifiersForTypedef(modifiers); | 
| return _parseTypeAlias(commentAndMetadata); | 
| -    } else if (_matchesKeyword(Keyword.ENUM)) { | 
| +    } else if (keyword == Keyword.ENUM) { | 
| _validateModifiersForEnum(modifiers); | 
| return _parseEnumDeclaration(commentAndMetadata); | 
| -    } | 
| -    if (_matchesKeyword(Keyword.VOID)) { | 
| -      TypeName returnType = parseReturnType(); | 
| -      if ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) && | 
| -          _tokenMatchesIdentifier(_peek())) { | 
| +    } else if (keyword == Keyword.VOID) { | 
| +      TypeName returnType = | 
| +          new TypeName(new SimpleIdentifier(getAndAdvance()), null); | 
| +      keyword = _currentToken.keyword; | 
| +      next = _peek(); | 
| +      if ((keyword == Keyword.GET || keyword == Keyword.SET) && | 
| +          _tokenMatchesIdentifier(next)) { | 
| _validateModifiersForTopLevelFunction(modifiers); | 
| return _parseFunctionDeclaration( | 
| commentAndMetadata, modifiers.externalKeyword, returnType); | 
| -      } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) { | 
| +      } else if (keyword == Keyword.OPERATOR && _isOperator(next)) { | 
| _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken); | 
| -        return _convertToFunctionDeclaration(_parseOperator( | 
| -            commentAndMetadata, modifiers.externalKeyword, returnType)); | 
| +        return _convertToFunctionDeclaration(_parseOperatorAfterKeyword( | 
| +            commentAndMetadata, | 
| +            modifiers.externalKeyword, | 
| +            returnType, | 
| +            getAndAdvance())); | 
| } else if (_matchesIdentifier() && | 
| -          _peek().matchesAny(const <TokenType>[ | 
| +          next.matchesAny(const <TokenType>[ | 
| TokenType.OPEN_PAREN, | 
| TokenType.OPEN_CURLY_BRACKET, | 
| TokenType.FUNCTION, | 
| @@ -5197,7 +5228,7 @@ class Parser { | 
| // We have found an error of some kind. Try to recover. | 
| // | 
| if (_matchesIdentifier()) { | 
| -          if (_peek().matchesAny(const <TokenType>[ | 
| +          if (next.matchesAny(const <TokenType>[ | 
| TokenType.EQ, | 
| TokenType.COMMA, | 
| TokenType.SEMICOLON | 
| @@ -5218,15 +5249,18 @@ class Parser { | 
| ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken); | 
| return null; | 
| } | 
| -    } else if ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) && | 
| -        _tokenMatchesIdentifier(_peek())) { | 
| +    } else if ((keyword == Keyword.GET || keyword == Keyword.SET) && | 
| +        _tokenMatchesIdentifier(next)) { | 
| _validateModifiersForTopLevelFunction(modifiers); | 
| return _parseFunctionDeclaration( | 
| commentAndMetadata, modifiers.externalKeyword, null); | 
| -    } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) { | 
| +    } else if (keyword == Keyword.OPERATOR && _isOperator(next)) { | 
| _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken); | 
| -      return _convertToFunctionDeclaration( | 
| -          _parseOperator(commentAndMetadata, modifiers.externalKeyword, null)); | 
| +      return _convertToFunctionDeclaration(_parseOperatorAfterKeyword( | 
| +          commentAndMetadata, | 
| +          modifiers.externalKeyword, | 
| +          null, | 
| +          getAndAdvance())); | 
| } else if (!_matchesIdentifier()) { | 
| Token keyword = modifiers.varKeyword; | 
| if (keyword == null) { | 
| @@ -5240,9 +5274,9 @@ class Parser { | 
| // We appear to have found an incomplete top-level variable declaration. | 
| // | 
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | 
| -        List<VariableDeclaration> variables = new List<VariableDeclaration>(); | 
| -        variables.add( | 
| -            new VariableDeclaration(_createSyntheticIdentifier(), null, null)); | 
| +        VariableDeclaration variable = | 
| +            new VariableDeclaration(_createSyntheticIdentifier(), null, null); | 
| +        List<VariableDeclaration> variables = <VariableDeclaration>[variable]; | 
| return new TopLevelVariableDeclaration( | 
| commentAndMetadata.comment, | 
| commentAndMetadata.metadata, | 
| @@ -5254,12 +5288,12 @@ class Parser { | 
| } else if (_isPeekGenericTypeParametersAndOpenParen()) { | 
| return _parseFunctionDeclaration( | 
| commentAndMetadata, modifiers.externalKeyword, null); | 
| -    } else if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | 
| +    } else if (_tokenMatches(next, TokenType.OPEN_PAREN)) { | 
| TypeName returnType = _parseOptionalTypeNameComment(); | 
| _validateModifiersForTopLevelFunction(modifiers); | 
| return _parseFunctionDeclaration( | 
| commentAndMetadata, modifiers.externalKeyword, returnType); | 
| -    } else if (_peek().matchesAny(const <TokenType>[ | 
| +    } else if (next.matchesAny(const <TokenType>[ | 
| TokenType.EQ, | 
| TokenType.COMMA, | 
| TokenType.SEMICOLON | 
| @@ -5278,15 +5312,20 @@ class Parser { | 
| _expect(TokenType.SEMICOLON)); | 
| } | 
| TypeName returnType = parseReturnType(); | 
| -    if ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) && | 
| -        _tokenMatchesIdentifier(_peek())) { | 
| +    keyword = _currentToken.keyword; | 
| +    next = _peek(); | 
| +    if ((keyword == Keyword.GET || keyword == Keyword.SET) && | 
| +        _tokenMatchesIdentifier(next)) { | 
| _validateModifiersForTopLevelFunction(modifiers); | 
| return _parseFunctionDeclaration( | 
| commentAndMetadata, modifiers.externalKeyword, returnType); | 
| -    } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) { | 
| +    } else if (keyword == Keyword.OPERATOR && _isOperator(next)) { | 
| _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken); | 
| -      return _convertToFunctionDeclaration(_parseOperator( | 
| -          commentAndMetadata, modifiers.externalKeyword, returnType)); | 
| +      return _convertToFunctionDeclaration(_parseOperatorAfterKeyword( | 
| +          commentAndMetadata, | 
| +          modifiers.externalKeyword, | 
| +          returnType, | 
| +          getAndAdvance())); | 
| } else if (_matches(TokenType.AT)) { | 
| return new TopLevelVariableDeclaration( | 
| commentAndMetadata.comment, | 
| @@ -5304,16 +5343,15 @@ class Parser { | 
| } else { | 
| semicolon = _createSyntheticToken(TokenType.SEMICOLON); | 
| } | 
| -      List<VariableDeclaration> variables = new List<VariableDeclaration>(); | 
| -      variables.add( | 
| -          new VariableDeclaration(_createSyntheticIdentifier(), null, null)); | 
| +      VariableDeclaration variable = | 
| +          new VariableDeclaration(_createSyntheticIdentifier(), null, null); | 
| +      List<VariableDeclaration> variables = <VariableDeclaration>[variable]; | 
| return new TopLevelVariableDeclaration( | 
| commentAndMetadata.comment, | 
| commentAndMetadata.metadata, | 
| new VariableDeclarationList(null, null, null, returnType, variables), | 
| semicolon); | 
| -    } | 
| -    if (_peek().matchesAny(const <TokenType>[ | 
| +    } else if (next.matchesAny(const <TokenType>[ | 
| TokenType.OPEN_PAREN, | 
| TokenType.FUNCTION, | 
| TokenType.OPEN_CURLY_BRACKET, | 
| @@ -5334,6 +5372,8 @@ class Parser { | 
| /** | 
| * Parse a configuration in either an import or export directive. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.IF`. | 
| +   * | 
| *     configuration ::= | 
| *         'if' '(' test ')' uri | 
| * | 
| @@ -5344,7 +5384,7 @@ class Parser { | 
| *         identifier ('.' identifier)* | 
| */ | 
| Configuration _parseConfiguration() { | 
| -    Token ifKeyword = _expectKeyword(Keyword.IF); | 
| +    Token ifKeyword = getAndAdvance(); | 
| Token leftParenthesis = _expect(TokenType.OPEN_PAREN); | 
| DottedName name = _parseDottedName(); | 
| Token equalToken = null; | 
| @@ -5364,12 +5404,13 @@ class Parser { | 
| } | 
|  | 
| /** | 
| -   * Parse a list of configurations. If conditional directives are not | 
| -   * supported, return an empty list without attempting to parse anything. | 
| +   * Parse a list of configurations. Return the configurations that were parsed, | 
| +   * or `null` if there are no configurations. | 
| */ | 
| List<Configuration> _parseConfigurations() { | 
| -    List<Configuration> configurations = <Configuration>[]; | 
| +    List<Configuration> configurations = null; | 
| while (_matchesKeyword(Keyword.IF)) { | 
| +      configurations ??= <Configuration>[]; | 
| configurations.add(_parseConfiguration()); | 
| } | 
| return configurations; | 
| @@ -5378,19 +5419,22 @@ class Parser { | 
| /** | 
| * Parse a const expression. Return the const expression that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.CONST`. | 
| +   * | 
| *     constExpression ::= | 
| *         instanceCreationExpression | 
| *       | listLiteral | 
| *       | mapLiteral | 
| */ | 
| Expression _parseConstExpression() { | 
| -    Token keyword = _expectKeyword(Keyword.CONST); | 
| -    if (_matches(TokenType.LT) || _injectGenericCommentTypeList()) { | 
| +    Token keyword = getAndAdvance(); | 
| +    TokenType type = _currentToken.type; | 
| +    if (type == TokenType.LT || _injectGenericCommentTypeList()) { | 
| return _parseListOrMapLiteral(keyword); | 
| -    } else if (_matches(TokenType.OPEN_SQUARE_BRACKET) || | 
| -        _matches(TokenType.INDEX)) { | 
| +    } else if (type == TokenType.OPEN_SQUARE_BRACKET || | 
| +        type == TokenType.INDEX) { | 
| return _parseListLiteral(keyword, null); | 
| -    } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | 
| +    } else if (type == TokenType.OPEN_CURLY_BRACKET) { | 
| return _parseMapLiteral(keyword, null); | 
| } | 
| return _parseInstanceCreationExpression(keyword); | 
| @@ -5410,26 +5454,28 @@ class Parser { | 
| List<ConstructorInitializer> initializers = null; | 
| if (_matches(TokenType.COLON)) { | 
| separator = getAndAdvance(); | 
| -      initializers = new List<ConstructorInitializer>(); | 
| +      initializers = <ConstructorInitializer>[]; | 
| do { | 
| -        if (_matchesKeyword(Keyword.THIS)) { | 
| -          if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | 
| +        Keyword keyword = _currentToken.keyword; | 
| +        if (keyword == Keyword.THIS) { | 
| +          TokenType nextType = _peek().type; | 
| +          if (nextType == TokenType.OPEN_PAREN) { | 
| bodyAllowed = false; | 
| -            initializers.add(_parseRedirectingConstructorInvocation()); | 
| -          } else if (_tokenMatches(_peek(), TokenType.PERIOD) && | 
| +            initializers.add(_parseRedirectingConstructorInvocation(false)); | 
| +          } else if (nextType == TokenType.PERIOD && | 
| _tokenMatches(_peekAt(3), TokenType.OPEN_PAREN)) { | 
| bodyAllowed = false; | 
| -            initializers.add(_parseRedirectingConstructorInvocation()); | 
| +            initializers.add(_parseRedirectingConstructorInvocation(true)); | 
| } else { | 
| -            initializers.add(_parseConstructorFieldInitializer()); | 
| +            initializers.add(_parseConstructorFieldInitializer(true)); | 
| } | 
| -        } else if (_matchesKeyword(Keyword.SUPER)) { | 
| +        } else if (keyword == Keyword.SUPER) { | 
| initializers.add(_parseSuperConstructorInvocation()); | 
| } else if (_matches(TokenType.OPEN_CURLY_BRACKET) || | 
| _matches(TokenType.FUNCTION)) { | 
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_INITIALIZER); | 
| } else { | 
| -          initializers.add(_parseConstructorFieldInitializer()); | 
| +          initializers.add(_parseConstructorFieldInitializer(false)); | 
| } | 
| } while (_optional(TokenType.COMMA)); | 
| if (factoryKeyword != null) { | 
| @@ -5492,54 +5538,55 @@ class Parser { | 
| } | 
|  | 
| /** | 
| -   * Parse a field initializer within a constructor. Return the field | 
| -   * initializer that was parsed. | 
| +   * Parse a field initializer within a constructor. The flag [hasThis] should | 
| +   * be true if the current token is `this`. Return the field initializer that | 
| +   * was parsed. | 
| * | 
| *     fieldInitializer: | 
| *         ('this' '.')? identifier '=' conditionalExpression cascadeSection* | 
| */ | 
| -  ConstructorFieldInitializer _parseConstructorFieldInitializer() { | 
| -    Token keyword = null; | 
| +  ConstructorFieldInitializer _parseConstructorFieldInitializer(bool hasThis) { | 
| +    Token keywordToken = null; | 
| Token period = null; | 
| -    if (_matchesKeyword(Keyword.THIS)) { | 
| -      keyword = getAndAdvance(); | 
| +    if (hasThis) { | 
| +      keywordToken = getAndAdvance(); | 
| period = _expect(TokenType.PERIOD); | 
| } | 
| SimpleIdentifier fieldName = parseSimpleIdentifier(); | 
| Token equals = null; | 
| -    if (_matches(TokenType.EQ)) { | 
| +    TokenType type = _currentToken.type; | 
| +    if (type == TokenType.EQ) { | 
| equals = getAndAdvance(); | 
| -    } else if (!_matchesKeyword(Keyword.THIS) && | 
| -        !_matchesKeyword(Keyword.SUPER) && | 
| -        !_matches(TokenType.OPEN_CURLY_BRACKET) && | 
| -        !_matches(TokenType.FUNCTION)) { | 
| -      _reportErrorForCurrentToken( | 
| -          ParserErrorCode.MISSING_ASSIGNMENT_IN_INITIALIZER); | 
| -      equals = _createSyntheticToken(TokenType.EQ); | 
| } else { | 
| _reportErrorForCurrentToken( | 
| ParserErrorCode.MISSING_ASSIGNMENT_IN_INITIALIZER); | 
| -      return new ConstructorFieldInitializer(keyword, period, fieldName, | 
| -          _createSyntheticToken(TokenType.EQ), _createSyntheticIdentifier()); | 
| +      Keyword keyword = _currentToken.keyword; | 
| +      if (keyword != Keyword.THIS && | 
| +          keyword != Keyword.SUPER && | 
| +          type != TokenType.OPEN_CURLY_BRACKET && | 
| +          type != TokenType.FUNCTION) { | 
| +        equals = _createSyntheticToken(TokenType.EQ); | 
| +      } else { | 
| +        return new ConstructorFieldInitializer(keywordToken, period, fieldName, | 
| +            _createSyntheticToken(TokenType.EQ), _createSyntheticIdentifier()); | 
| +      } | 
| } | 
| bool wasInInitializer = _inInitializer; | 
| _inInitializer = true; | 
| try { | 
| Expression expression = parseConditionalExpression(); | 
| -      TokenType tokenType = _currentToken.type; | 
| -      if (tokenType == TokenType.PERIOD_PERIOD) { | 
| -        List<Expression> cascadeSections = new List<Expression>(); | 
| -        while (tokenType == TokenType.PERIOD_PERIOD) { | 
| +      if (_matches(TokenType.PERIOD_PERIOD)) { | 
| +        List<Expression> cascadeSections = <Expression>[]; | 
| +        do { | 
| Expression section = _parseCascadeSection(); | 
| if (section != null) { | 
| cascadeSections.add(section); | 
| } | 
| -          tokenType = _currentToken.type; | 
| -        } | 
| +        } while (_matches(TokenType.PERIOD_PERIOD)); | 
| expression = new CascadeExpression(expression, cascadeSections); | 
| } | 
| return new ConstructorFieldInitializer( | 
| -          keyword, period, fieldName, equals, expression); | 
| +          keywordToken, period, fieldName, equals, expression); | 
| } finally { | 
| _inInitializer = wasInInitializer; | 
| } | 
| @@ -5548,18 +5595,20 @@ class Parser { | 
| /** | 
| * Parse a continue statement. Return the continue statement that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.CONTINUE`. | 
| +   * | 
| *     continueStatement ::= | 
| *         'continue' identifier? ';' | 
| */ | 
| Statement _parseContinueStatement() { | 
| -    Token continueKeyword = _expectKeyword(Keyword.CONTINUE); | 
| +    Token continueKeyword = getAndAdvance(); | 
| if (!_inLoop && !_inSwitch) { | 
| _reportErrorForToken( | 
| ParserErrorCode.CONTINUE_OUTSIDE_OF_LOOP, continueKeyword); | 
| } | 
| SimpleIdentifier label = null; | 
| if (_matchesIdentifier()) { | 
| -      label = parseSimpleIdentifier(); | 
| +      label = _parseSimpleIdentifierUnchecked(); | 
| } | 
| if (_inSwitch && !_inLoop && label == null) { | 
| _reportErrorForToken( | 
| @@ -5587,7 +5636,7 @@ class Parser { | 
| } else if (_matchesKeyword(Keyword.LIBRARY)) { | 
| return _parseLibraryDirective(commentAndMetadata); | 
| } else if (_matchesKeyword(Keyword.PART)) { | 
| -      return _parsePartDirective(commentAndMetadata); | 
| +      return _parsePartOrPartOfDirective(commentAndMetadata); | 
| } else { | 
| // Internal error: this method should not have been invoked if the current | 
| // token was something other than one of the above. | 
| @@ -5609,16 +5658,18 @@ class Parser { | 
| if (_matches(TokenType.SCRIPT_TAG)) { | 
| scriptTag = new ScriptTag(getAndAdvance()); | 
| } | 
| -    List<Directive> directives = new List<Directive>(); | 
| +    List<Directive> directives = <Directive>[]; | 
| while (!_matches(TokenType.EOF)) { | 
| CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata(); | 
| -      if ((_matchesKeyword(Keyword.IMPORT) || | 
| -              _matchesKeyword(Keyword.EXPORT) || | 
| -              _matchesKeyword(Keyword.LIBRARY) || | 
| -              _matchesKeyword(Keyword.PART)) && | 
| -          !_tokenMatches(_peek(), TokenType.PERIOD) && | 
| -          !_tokenMatches(_peek(), TokenType.LT) && | 
| -          !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | 
| +      Keyword keyword = _currentToken.keyword; | 
| +      TokenType type = _peek().type; | 
| +      if ((keyword == Keyword.IMPORT || | 
| +              keyword == Keyword.EXPORT || | 
| +              keyword == Keyword.LIBRARY || | 
| +              keyword == Keyword.PART) && | 
| +          type != TokenType.PERIOD && | 
| +          type != TokenType.LT && | 
| +          type != TokenType.OPEN_PAREN) { | 
| directives.add(_parseDirective(commentAndMetadata)); | 
| } else if (_matches(TokenType.SEMICOLON)) { | 
| _advance(); | 
| @@ -5626,12 +5677,29 @@ class Parser { | 
| while (!_matches(TokenType.EOF)) { | 
| _advance(); | 
| } | 
| -        return new CompilationUnit(firstToken, scriptTag, directives, | 
| -            new List<CompilationUnitMember>(), _currentToken); | 
| +        return new CompilationUnit( | 
| +            firstToken, scriptTag, directives, null, _currentToken); | 
| } | 
| } | 
| -    return new CompilationUnit(firstToken, scriptTag, directives, | 
| -        new List<CompilationUnitMember>(), _currentToken); | 
| +    return new CompilationUnit( | 
| +        firstToken, scriptTag, directives, null, _currentToken); | 
| +  } | 
| + | 
| +  /** | 
| +   * Parse a documentation comment based on the given list of documentation | 
| +   * comment tokens. Return the documentation comment that was parsed, or `null` | 
| +   * if there was no comment. | 
| +   * | 
| +   *     documentationComment ::= | 
| +   *         multiLineComment? | 
| +   *       | singleLineComment* | 
| +   */ | 
| +  Comment _parseDocumentationComment(List<DocumentationCommentToken> tokens) { | 
| +    if (tokens == null) { | 
| +      return null; | 
| +    } | 
| +    List<CommentReference> references = _parseCommentReferences(tokens); | 
| +    return Comment.createDocumentationCommentWithReferences(tokens, references); | 
| } | 
|  | 
| /** | 
| @@ -5642,37 +5710,32 @@ class Parser { | 
| *         multiLineComment? | 
| *       | singleLineComment* | 
| */ | 
| -  Comment _parseDocumentationComment() { | 
| -    List<DocumentationCommentToken> documentationTokens = | 
| -        <DocumentationCommentToken>[]; | 
| +  List<DocumentationCommentToken> _parseDocumentationCommentTokens() { | 
| +    List<DocumentationCommentToken> tokens = <DocumentationCommentToken>[]; | 
| CommentToken commentToken = _currentToken.precedingComments; | 
| while (commentToken != null) { | 
| if (commentToken is DocumentationCommentToken) { | 
| -        if (documentationTokens.isNotEmpty) { | 
| +        if (tokens.isNotEmpty) { | 
| if (commentToken.type == TokenType.SINGLE_LINE_COMMENT) { | 
| -            if (documentationTokens[0].type != TokenType.SINGLE_LINE_COMMENT) { | 
| -              documentationTokens.clear(); | 
| +            if (tokens[0].type != TokenType.SINGLE_LINE_COMMENT) { | 
| +              tokens.clear(); | 
| } | 
| } else { | 
| -            documentationTokens.clear(); | 
| +            tokens.clear(); | 
| } | 
| } | 
| -        documentationTokens.add(commentToken); | 
| +        tokens.add(commentToken); | 
| } | 
| commentToken = commentToken.next; | 
| } | 
| -    if (documentationTokens.isEmpty) { | 
| -      return null; | 
| -    } | 
| -    List<CommentReference> references = | 
| -        _parseCommentReferences(documentationTokens); | 
| -    return Comment.createDocumentationCommentWithReferences( | 
| -        documentationTokens, references); | 
| +    return tokens.isEmpty ? null : tokens; | 
| } | 
|  | 
| /** | 
| * Parse a do statement. Return the do statement that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.DO`. | 
| +   * | 
| *     doStatement ::= | 
| *         'do' statement 'while' '(' expression ')' ';' | 
| */ | 
| @@ -5680,7 +5743,7 @@ class Parser { | 
| bool wasInLoop = _inLoop; | 
| _inLoop = true; | 
| try { | 
| -      Token doKeyword = _expectKeyword(Keyword.DO); | 
| +      Token doKeyword = getAndAdvance(); | 
| Statement body = parseStatement2(); | 
| Token whileKeyword = _expectKeyword(Keyword.WHILE); | 
| Token leftParenthesis = _expect(TokenType.OPEN_PAREN); | 
| @@ -5701,10 +5764,10 @@ class Parser { | 
| *         identifier ('.' identifier)* | 
| */ | 
| DottedName _parseDottedName() { | 
| -    List<SimpleIdentifier> components = new List<SimpleIdentifier>(); | 
| -    components.add(parseSimpleIdentifier()); | 
| -    while (_matches(TokenType.PERIOD)) { | 
| -      _advance(); | 
| +    List<SimpleIdentifier> components = <SimpleIdentifier>[ | 
| +      parseSimpleIdentifier() | 
| +    ]; | 
| +    while (_optional(TokenType.PERIOD)) { | 
| components.add(parseSimpleIdentifier()); | 
| } | 
| return new DottedName(components); | 
| @@ -5713,20 +5776,36 @@ class Parser { | 
| /** | 
| * Parse an empty statement. Return the empty statement that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `TokenType.SEMICOLON`. | 
| +   * | 
| *     emptyStatement ::= | 
| *         ';' | 
| */ | 
| Statement _parseEmptyStatement() => new EmptyStatement(getAndAdvance()); | 
|  | 
| +  /** | 
| +   * Parse an enum constant declaration. Return the enum constant declaration | 
| +   * that was parsed. | 
| +   * | 
| +   * Specified: | 
| +   * | 
| +   *     enumConstant ::= | 
| +   *         id | 
| +   * | 
| +   * Actual: | 
| +   * | 
| +   *     enumConstant ::= | 
| +   *         metadata id | 
| +   */ | 
| EnumConstantDeclaration _parseEnumConstantDeclaration() { | 
| CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata(); | 
| SimpleIdentifier name; | 
| if (_matchesIdentifier()) { | 
| -      name = parseSimpleIdentifier(isDeclaration: true); | 
| +      name = _parseSimpleIdentifierUnchecked(isDeclaration: true); | 
| } else { | 
| name = _createSyntheticIdentifier(); | 
| } | 
| -    if (commentAndMetadata.metadata.isNotEmpty) { | 
| +    if (commentAndMetadata.hasMetadata) { | 
| _reportErrorForNode(ParserErrorCode.ANNOTATION_ON_ENUM_CONSTANT, | 
| commentAndMetadata.metadata[0]); | 
| } | 
| @@ -5738,18 +5817,19 @@ class Parser { | 
| * Parse an enum declaration. The [commentAndMetadata] is the metadata to be | 
| * associated with the member. Return the enum declaration that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.ENUM`. | 
| +   * | 
| *     enumType ::= | 
| *         metadata 'enum' id '{' id (',' id)* (',')? '}' | 
| */ | 
| EnumDeclaration _parseEnumDeclaration(CommentAndMetadata commentAndMetadata) { | 
| -    Token keyword = _expectKeyword(Keyword.ENUM); | 
| +    Token keyword = getAndAdvance(); | 
| SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true); | 
| Token leftBracket = null; | 
| -    List<EnumConstantDeclaration> constants = | 
| -        new List<EnumConstantDeclaration>(); | 
| +    List<EnumConstantDeclaration> constants = <EnumConstantDeclaration>[]; | 
| Token rightBracket = null; | 
| if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | 
| -      leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET); | 
| +      leftBracket = getAndAdvance(); | 
| if (_matchesIdentifier() || _matches(TokenType.AT)) { | 
| constants.add(_parseEnumConstantDeclaration()); | 
| } else if (_matches(TokenType.COMMA) && | 
| @@ -5792,7 +5872,7 @@ class Parser { | 
| */ | 
| Expression _parseEqualityExpression() { | 
| Expression expression; | 
| -    if (_matchesKeyword(Keyword.SUPER) && | 
| +    if (_currentToken.keyword == Keyword.SUPER && | 
| _currentToken.next.type.isEqualityOperator) { | 
| expression = new SuperExpression(getAndAdvance()); | 
| } else { | 
| @@ -5800,13 +5880,12 @@ class Parser { | 
| } | 
| bool leftEqualityExpression = false; | 
| while (_currentToken.type.isEqualityOperator) { | 
| -      Token operator = getAndAdvance(); | 
| if (leftEqualityExpression) { | 
| _reportErrorForNode( | 
| ParserErrorCode.EQUALITY_CANNOT_BE_EQUALITY_OPERAND, expression); | 
| } | 
| expression = new BinaryExpression( | 
| -          expression, operator, _parseRelationalExpression()); | 
| +          expression, getAndAdvance(), _parseRelationalExpression()); | 
| leftEqualityExpression = true; | 
| } | 
| return expression; | 
| @@ -5816,11 +5895,13 @@ class Parser { | 
| * Parse an export directive. The [commentAndMetadata] is the metadata to be | 
| * associated with the directive. Return the export directive that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.EXPORT`. | 
| +   * | 
| *     exportDirective ::= | 
| *         metadata 'export' stringLiteral configuration* combinator*';' | 
| */ | 
| ExportDirective _parseExportDirective(CommentAndMetadata commentAndMetadata) { | 
| -    Token exportKeyword = _expectKeyword(Keyword.EXPORT); | 
| +    Token exportKeyword = getAndAdvance(); | 
| StringLiteral libraryUri = _parseUri(); | 
| List<Configuration> configurations = _parseConfigurations(); | 
| List<Combinator> combinators = _parseCombinators(); | 
| @@ -5842,8 +5923,7 @@ class Parser { | 
| *         expression (',' expression)* | 
| */ | 
| List<Expression> _parseExpressionList() { | 
| -    List<Expression> expressions = new List<Expression>(); | 
| -    expressions.add(parseExpression2()); | 
| +    List<Expression> expressions = <Expression>[parseExpression2()]; | 
| while (_optional(TokenType.COMMA)) { | 
| expressions.add(parseExpression2()); | 
| } | 
| @@ -5862,23 +5942,24 @@ class Parser { | 
| *       | type | 
| */ | 
| FinalConstVarOrType _parseFinalConstVarOrType(bool optional) { | 
| -    Token keyword = null; | 
| +    Token keywordToken = null; | 
| TypeName type = null; | 
| -    if (_matchesKeyword(Keyword.FINAL) || _matchesKeyword(Keyword.CONST)) { | 
| -      keyword = getAndAdvance(); | 
| +    Keyword keyword = _currentToken.keyword; | 
| +    if (keyword == Keyword.FINAL || keyword == Keyword.CONST) { | 
| +      keywordToken = getAndAdvance(); | 
| if (_isTypedIdentifier(_currentToken)) { | 
| type = parseTypeName(); | 
| } else { | 
| // Support `final/*=T*/ x;` | 
| type = _parseOptionalTypeNameComment(); | 
| } | 
| -    } else if (_matchesKeyword(Keyword.VAR)) { | 
| -      keyword = getAndAdvance(); | 
| +    } else if (keyword == Keyword.VAR) { | 
| +      keywordToken = getAndAdvance(); | 
| // Support `var/*=T*/ x;` | 
| type = _parseOptionalTypeNameComment(); | 
| if (type != null) { | 
| // Clear the keyword to prevent an error. | 
| -        keyword = null; | 
| +        keywordToken = null; | 
| } | 
| } else if (_isTypedIdentifier(_currentToken)) { | 
| type = parseReturnType(); | 
| @@ -5890,7 +5971,7 @@ class Parser { | 
| // This is not supported if the type is required. | 
| type = _parseOptionalTypeNameComment(); | 
| } | 
| -    return new FinalConstVarOrType(keyword, type); | 
| +    return new FinalConstVarOrType(keywordToken, type); | 
| } | 
|  | 
| /** | 
| @@ -5907,31 +5988,32 @@ class Parser { | 
| */ | 
| FormalParameter _parseFormalParameter(ParameterKind kind) { | 
| NormalFormalParameter parameter = parseNormalFormalParameter(); | 
| -    if (_matches(TokenType.EQ)) { | 
| -      Token seperator = getAndAdvance(); | 
| +    TokenType type = _currentToken.type; | 
| +    if (type == TokenType.EQ) { | 
| +      Token separator = getAndAdvance(); | 
| Expression defaultValue = parseExpression2(); | 
| if (kind == ParameterKind.NAMED) { | 
| _reportErrorForToken( | 
| -            ParserErrorCode.WRONG_SEPARATOR_FOR_NAMED_PARAMETER, seperator); | 
| +            ParserErrorCode.WRONG_SEPARATOR_FOR_NAMED_PARAMETER, separator); | 
| } else if (kind == ParameterKind.REQUIRED) { | 
| _reportErrorForNode( | 
| ParserErrorCode.POSITIONAL_PARAMETER_OUTSIDE_GROUP, parameter); | 
| } | 
| return new DefaultFormalParameter( | 
| -          parameter, kind, seperator, defaultValue); | 
| -    } else if (_matches(TokenType.COLON)) { | 
| -      Token seperator = getAndAdvance(); | 
| +          parameter, kind, separator, defaultValue); | 
| +    } else if (type == TokenType.COLON) { | 
| +      Token separator = getAndAdvance(); | 
| Expression defaultValue = parseExpression2(); | 
| if (kind == ParameterKind.POSITIONAL) { | 
| _reportErrorForToken( | 
| ParserErrorCode.WRONG_SEPARATOR_FOR_POSITIONAL_PARAMETER, | 
| -            seperator); | 
| +            separator); | 
| } else if (kind == ParameterKind.REQUIRED) { | 
| _reportErrorForNode( | 
| ParserErrorCode.NAMED_PARAMETER_OUTSIDE_GROUP, parameter); | 
| } | 
| return new DefaultFormalParameter( | 
| -          parameter, kind, seperator, defaultValue); | 
| +          parameter, kind, separator, defaultValue); | 
| } else if (kind != ParameterKind.REQUIRED) { | 
| return new DefaultFormalParameter(parameter, kind, null, null); | 
| } | 
| @@ -5939,6 +6021,160 @@ class Parser { | 
| } | 
|  | 
| /** | 
| +   * Parse a list of formal parameters given that the list starts with the given | 
| +   * [leftParenthesis]. Return the formal parameters that were parsed. | 
| +   */ | 
| +  FormalParameterList _parseFormalParameterListAfterParen( | 
| +      Token leftParenthesis) { | 
| +    if (_matches(TokenType.CLOSE_PAREN)) { | 
| +      return new FormalParameterList( | 
| +          leftParenthesis, null, null, null, getAndAdvance()); | 
| +    } | 
| +    // | 
| +    // Even though it is invalid to have default parameters outside of brackets, | 
| +    // required parameters inside of brackets, or multiple groups of default and | 
| +    // named parameters, we allow all of these cases so that we can recover | 
| +    // better. | 
| +    // | 
| +    List<FormalParameter> parameters = <FormalParameter>[]; | 
| +    Token leftSquareBracket = null; | 
| +    Token rightSquareBracket = null; | 
| +    Token leftCurlyBracket = null; | 
| +    Token rightCurlyBracket = null; | 
| +    ParameterKind kind = ParameterKind.REQUIRED; | 
| +    bool firstParameter = true; | 
| +    bool reportedMultiplePositionalGroups = false; | 
| +    bool reportedMultipleNamedGroups = false; | 
| +    bool reportedMixedGroups = false; | 
| +    bool wasOptionalParameter = false; | 
| +    Token initialToken = null; | 
| +    do { | 
| +      if (firstParameter) { | 
| +        firstParameter = false; | 
| +      } else if (!_optional(TokenType.COMMA)) { | 
| +        // TODO(brianwilkerson) The token is wrong, we need to recover from this | 
| +        // case. | 
| +        if (_getEndToken(leftParenthesis) != null) { | 
| +          _reportErrorForCurrentToken( | 
| +              ParserErrorCode.EXPECTED_TOKEN, [TokenType.COMMA.lexeme]); | 
| +        } else { | 
| +          _reportErrorForToken(ParserErrorCode.MISSING_CLOSING_PARENTHESIS, | 
| +              _currentToken.previous); | 
| +          break; | 
| +        } | 
| +      } | 
| +      initialToken = _currentToken; | 
| +      // | 
| +      // Handle the beginning of parameter groups. | 
| +      // | 
| +      TokenType type = _currentToken.type; | 
| +      if (type == TokenType.OPEN_SQUARE_BRACKET) { | 
| +        wasOptionalParameter = true; | 
| +        if (leftSquareBracket != null && !reportedMultiplePositionalGroups) { | 
| +          _reportErrorForCurrentToken( | 
| +              ParserErrorCode.MULTIPLE_POSITIONAL_PARAMETER_GROUPS); | 
| +          reportedMultiplePositionalGroups = true; | 
| +        } | 
| +        if (leftCurlyBracket != null && !reportedMixedGroups) { | 
| +          _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS); | 
| +          reportedMixedGroups = true; | 
| +        } | 
| +        leftSquareBracket = getAndAdvance(); | 
| +        kind = ParameterKind.POSITIONAL; | 
| +      } else if (type == TokenType.OPEN_CURLY_BRACKET) { | 
| +        wasOptionalParameter = true; | 
| +        if (leftCurlyBracket != null && !reportedMultipleNamedGroups) { | 
| +          _reportErrorForCurrentToken( | 
| +              ParserErrorCode.MULTIPLE_NAMED_PARAMETER_GROUPS); | 
| +          reportedMultipleNamedGroups = true; | 
| +        } | 
| +        if (leftSquareBracket != null && !reportedMixedGroups) { | 
| +          _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS); | 
| +          reportedMixedGroups = true; | 
| +        } | 
| +        leftCurlyBracket = getAndAdvance(); | 
| +        kind = ParameterKind.NAMED; | 
| +      } | 
| +      // | 
| +      // Parse and record the parameter. | 
| +      // | 
| +      FormalParameter parameter = _parseFormalParameter(kind); | 
| +      parameters.add(parameter); | 
| +      if (kind == ParameterKind.REQUIRED && wasOptionalParameter) { | 
| +        _reportErrorForNode( | 
| +            ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS, parameter); | 
| +      } | 
| +      // | 
| +      // Handle the end of parameter groups. | 
| +      // | 
| +      // TODO(brianwilkerson) Improve the detection and reporting of missing and | 
| +      // mismatched delimiters. | 
| +      type = _currentToken.type; | 
| +      if (type == TokenType.CLOSE_SQUARE_BRACKET) { | 
| +        rightSquareBracket = getAndAdvance(); | 
| +        if (leftSquareBracket == null) { | 
| +          if (leftCurlyBracket != null) { | 
| +            _reportErrorForCurrentToken( | 
| +                ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]); | 
| +            rightCurlyBracket = rightSquareBracket; | 
| +            rightSquareBracket = null; | 
| +          } else { | 
| +            _reportErrorForCurrentToken( | 
| +                ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP, | 
| +                ["["]); | 
| +          } | 
| +        } | 
| +        kind = ParameterKind.REQUIRED; | 
| +      } else if (type == TokenType.CLOSE_CURLY_BRACKET) { | 
| +        rightCurlyBracket = getAndAdvance(); | 
| +        if (leftCurlyBracket == null) { | 
| +          if (leftSquareBracket != null) { | 
| +            _reportErrorForCurrentToken( | 
| +                ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]); | 
| +            rightSquareBracket = rightCurlyBracket; | 
| +            rightCurlyBracket = null; | 
| +          } else { | 
| +            _reportErrorForCurrentToken( | 
| +                ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP, | 
| +                ["{"]); | 
| +          } | 
| +        } | 
| +        kind = ParameterKind.REQUIRED; | 
| +      } | 
| +    } while (!_matches(TokenType.CLOSE_PAREN) && | 
| +        !identical(initialToken, _currentToken)); | 
| +    Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | 
| +    // | 
| +    // Check that the groups were closed correctly. | 
| +    // | 
| +    if (leftSquareBracket != null && rightSquareBracket == null) { | 
| +      _reportErrorForCurrentToken( | 
| +          ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]); | 
| +    } | 
| +    if (leftCurlyBracket != null && rightCurlyBracket == null) { | 
| +      _reportErrorForCurrentToken( | 
| +          ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]); | 
| +    } | 
| +    // | 
| +    // Build the parameter list. | 
| +    // | 
| +    leftSquareBracket ??= leftCurlyBracket; | 
| +    rightSquareBracket ??= rightCurlyBracket; | 
| +    return new FormalParameterList(leftParenthesis, parameters, | 
| +        leftSquareBracket, rightSquareBracket, rightParenthesis); | 
| +  } | 
| + | 
| +  /** | 
| +   * Parse a list of formal parameters. Return the formal parameters that were | 
| +   * parsed. | 
| +   * | 
| +   * This method assumes that the current token matches `TokenType.OPEN_PAREN`. | 
| +   */ | 
| +  FormalParameterList _parseFormalParameterListUnchecked() { | 
| +    return _parseFormalParameterListAfterParen(getAndAdvance()); | 
| +  } | 
| + | 
| +  /** | 
| * Parse a for statement. Return the for statement that was parsed. | 
| * | 
| *     forStatement ::= | 
| @@ -5970,19 +6206,20 @@ class Parser { | 
| if (_matchesIdentifier() && | 
| (_tokenMatchesKeyword(_peek(), Keyword.IN) || | 
| _tokenMatches(_peek(), TokenType.COLON))) { | 
| -          List<VariableDeclaration> variables = new List<VariableDeclaration>(); | 
| -          SimpleIdentifier variableName = parseSimpleIdentifier(); | 
| -          variables.add(new VariableDeclaration(variableName, null, null)); | 
| +          SimpleIdentifier variableName = _parseSimpleIdentifierUnchecked(); | 
| variableList = new VariableDeclarationList(commentAndMetadata.comment, | 
| -              commentAndMetadata.metadata, null, null, variables); | 
| +              commentAndMetadata.metadata, null, null, <VariableDeclaration>[ | 
| +            new VariableDeclaration(variableName, null, null) | 
| +          ]); | 
| } else if (_isInitializedVariableDeclaration()) { | 
| variableList = | 
| _parseVariableDeclarationListAfterMetadata(commentAndMetadata); | 
| } else { | 
| initialization = parseExpression2(); | 
| } | 
| -        if (_matchesKeyword(Keyword.IN) || _matches(TokenType.COLON)) { | 
| -          if (_matches(TokenType.COLON)) { | 
| +        TokenType type = _currentToken.type; | 
| +        if (_matchesKeyword(Keyword.IN) || type == TokenType.COLON) { | 
| +          if (type == TokenType.COLON) { | 
| _reportErrorForCurrentToken(ParserErrorCode.COLON_IN_PLACE_OF_IN); | 
| } | 
| DeclaredIdentifier loopVariable = null; | 
| @@ -6014,7 +6251,7 @@ class Parser { | 
| new SimpleIdentifier(variable.name.token, | 
| isDeclaration: true)); | 
| } else { | 
| -              if (!commentAndMetadata.metadata.isEmpty) { | 
| +              if (commentAndMetadata.hasMetadata) { | 
| // TODO(jwren) metadata isn't allowed before the identifier in | 
| // "identifier in expression", add warning if commentAndMetadata | 
| // has content | 
| @@ -6106,45 +6343,47 @@ class Parser { | 
| _inLoop = false; | 
| _inSwitch = false; | 
| try { | 
| -      if (_matches(TokenType.SEMICOLON)) { | 
| +      TokenType type = _currentToken.type; | 
| +      if (type == TokenType.SEMICOLON) { | 
| if (!mayBeEmpty) { | 
| _reportErrorForCurrentToken(emptyErrorCode); | 
| } | 
| return new EmptyFunctionBody(getAndAdvance()); | 
| -      } else if (_matchesString(_NATIVE)) { | 
| -        Token nativeToken = getAndAdvance(); | 
| -        StringLiteral stringLiteral = null; | 
| -        if (_matches(TokenType.STRING)) { | 
| -          stringLiteral = parseStringLiteral(); | 
| -        } | 
| -        return new NativeFunctionBody( | 
| -            nativeToken, stringLiteral, _expect(TokenType.SEMICOLON)); | 
| } | 
| Token keyword = null; | 
| Token star = null; | 
| -      if (_matchesString(ASYNC)) { | 
| -        keyword = getAndAdvance(); | 
| -        if (!_parseAsync) { | 
| -          _reportErrorForToken(ParserErrorCode.ASYNC_NOT_SUPPORTED, keyword); | 
| -        } | 
| -        if (_matches(TokenType.STAR)) { | 
| -          star = getAndAdvance(); | 
| -          _inGenerator = true; | 
| -        } | 
| -        _inAsync = true; | 
| -      } else if (_matchesString(SYNC)) { | 
| -        keyword = getAndAdvance(); | 
| -        if (!_parseAsync) { | 
| -          _reportErrorForToken(ParserErrorCode.ASYNC_NOT_SUPPORTED, keyword); | 
| -        } | 
| -        if (_matches(TokenType.STAR)) { | 
| -          star = getAndAdvance(); | 
| -          _inGenerator = true; | 
| +      bool foundAsync = false; | 
| +      bool foundSync = false; | 
| +      if (type == TokenType.IDENTIFIER) { | 
| +        String lexeme = _currentToken.lexeme; | 
| +        if (lexeme == ASYNC) { | 
| +          foundAsync = true; | 
| +          keyword = getAndAdvance(); | 
| +          if (!_parseAsync) { | 
| +            _reportErrorForToken(ParserErrorCode.ASYNC_NOT_SUPPORTED, keyword); | 
| +          } | 
| +          if (_matches(TokenType.STAR)) { | 
| +            star = getAndAdvance(); | 
| +            _inGenerator = true; | 
| +          } | 
| +          type = _currentToken.type; | 
| +          _inAsync = true; | 
| +        } else if (lexeme == SYNC) { | 
| +          foundSync = true; | 
| +          keyword = getAndAdvance(); | 
| +          if (!_parseAsync) { | 
| +            _reportErrorForToken(ParserErrorCode.ASYNC_NOT_SUPPORTED, keyword); | 
| +          } | 
| +          if (_matches(TokenType.STAR)) { | 
| +            star = getAndAdvance(); | 
| +            _inGenerator = true; | 
| +          } | 
| +          type = _currentToken.type; | 
| } | 
| } | 
| -      if (_matches(TokenType.FUNCTION)) { | 
| +      if (type == TokenType.FUNCTION) { | 
| if (keyword != null) { | 
| -          if (!_tokenMatchesString(keyword, ASYNC)) { | 
| +          if (!foundAsync) { | 
| _reportErrorForToken(ParserErrorCode.INVALID_SYNC, keyword); | 
| keyword = null; | 
| } else if (star != null) { | 
| @@ -6169,9 +6408,9 @@ class Parser { | 
| } | 
| return new ExpressionFunctionBody( | 
| keyword, functionDefinition, expression, semicolon); | 
| -      } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | 
| +      } else if (type == TokenType.OPEN_CURLY_BRACKET) { | 
| if (keyword != null) { | 
| -          if (_tokenMatchesString(keyword, SYNC) && star == null) { | 
| +          if (foundSync && star == null) { | 
| _reportErrorForToken( | 
| ParserErrorCode.MISSING_STAR_AFTER_SYNC, keyword); | 
| } | 
| @@ -6182,6 +6421,14 @@ class Parser { | 
| _createSyntheticToken(TokenType.SEMICOLON)); | 
| } | 
| return new BlockFunctionBody(keyword, star, parseBlock()); | 
| +      } else if (_matchesString(_NATIVE)) { | 
| +        Token nativeToken = getAndAdvance(); | 
| +        StringLiteral stringLiteral = null; | 
| +        if (_matches(TokenType.STRING)) { | 
| +          stringLiteral = _parseStringLiteralUnchecked(); | 
| +        } | 
| +        return new NativeFunctionBody( | 
| +            nativeToken, stringLiteral, _expect(TokenType.SEMICOLON)); | 
| } else { | 
| // Invalid function body | 
| _reportErrorForCurrentToken(emptyErrorCode); | 
| @@ -6213,22 +6460,28 @@ class Parser { | 
| CommentAndMetadata commentAndMetadata, | 
| Token externalKeyword, | 
| TypeName returnType) { | 
| -    Token keyword = null; | 
| +    Token keywordToken = null; | 
| bool isGetter = false; | 
| -    if (_matchesKeyword(Keyword.GET) && | 
| -        !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | 
| -      keyword = getAndAdvance(); | 
| +    Keyword keyword = _currentToken.keyword; | 
| +    SimpleIdentifier name = null; | 
| +    if (keyword == Keyword.GET) { | 
| +      keywordToken = getAndAdvance(); | 
| isGetter = true; | 
| -    } else if (_matchesKeyword(Keyword.SET) && | 
| -        !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | 
| -      keyword = getAndAdvance(); | 
| +    } else if (keyword == Keyword.SET) { | 
| +      keywordToken = getAndAdvance(); | 
| +    } | 
| +    if (keywordToken != null && _matches(TokenType.OPEN_PAREN)) { | 
| +      name = new SimpleIdentifier(keywordToken, isDeclaration: true); | 
| +      keywordToken = null; | 
| +      isGetter = false; | 
| +    } else { | 
| +      name = parseSimpleIdentifier(isDeclaration: true); | 
| } | 
| -    SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true); | 
| TypeParameterList typeParameters = _parseGenericMethodTypeParameters(); | 
| FormalParameterList parameters = null; | 
| if (!isGetter) { | 
| if (_matches(TokenType.OPEN_PAREN)) { | 
| -        parameters = parseFormalParameterList(); | 
| +        parameters = _parseFormalParameterListUnchecked(); | 
| _validateFormalParameterList(parameters); | 
| } else { | 
| _reportErrorForCurrentToken( | 
| @@ -6242,7 +6495,7 @@ class Parser { | 
| } | 
| } else if (_matches(TokenType.OPEN_PAREN)) { | 
| _reportErrorForCurrentToken(ParserErrorCode.GETTER_WITH_PARAMETERS); | 
| -      parseFormalParameterList(); | 
| +      _parseFormalParameterListUnchecked(); | 
| } | 
| FunctionBody body; | 
| if (externalKeyword == null) { | 
| @@ -6261,7 +6514,7 @@ class Parser { | 
| commentAndMetadata.metadata, | 
| externalKeyword, | 
| returnType, | 
| -        keyword, | 
| +        keywordToken, | 
| name, | 
| new FunctionExpression(typeParameters, parameters, body)); | 
| } | 
| @@ -6328,7 +6581,8 @@ class Parser { | 
| if (_matches(TokenType.LT)) { | 
| typeParameters = parseTypeParameterList(); | 
| } | 
| -    if (_matches(TokenType.SEMICOLON) || _matches(TokenType.EOF)) { | 
| +    TokenType type = _currentToken.type; | 
| +    if (type == TokenType.SEMICOLON || type == TokenType.EOF) { | 
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS); | 
| FormalParameterList parameters = new FormalParameterList( | 
| _createSyntheticToken(TokenType.OPEN_PAREN), | 
| @@ -6346,12 +6600,24 @@ class Parser { | 
| typeParameters, | 
| parameters, | 
| semicolon); | 
| -    } else if (!_matches(TokenType.OPEN_PAREN)) { | 
| +    } else if (type == TokenType.OPEN_PAREN) { | 
| +      FormalParameterList parameters = _parseFormalParameterListUnchecked(); | 
| +      _validateFormalParameterList(parameters); | 
| +      Token semicolon = _expect(TokenType.SEMICOLON); | 
| +      return new FunctionTypeAlias( | 
| +          commentAndMetadata.comment, | 
| +          commentAndMetadata.metadata, | 
| +          keyword, | 
| +          returnType, | 
| +          name, | 
| +          typeParameters, | 
| +          parameters, | 
| +          semicolon); | 
| +    } else { | 
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS); | 
| -      // TODO(brianwilkerson) Recover from this error. At the very least we | 
| -      // should skip to the start of the next valid compilation unit member, | 
| -      // allowing for the possibility of finding the typedef parameters before | 
| -      // that point. | 
| +      // Recovery: At the very least we should skip to the start of the next | 
| +      // valid compilation unit member, allowing for the possibility of finding | 
| +      // the typedef parameters before that point. | 
| return new FunctionTypeAlias( | 
| commentAndMetadata.comment, | 
| commentAndMetadata.metadata, | 
| @@ -6363,18 +6629,6 @@ class Parser { | 
| null, null, null, _createSyntheticToken(TokenType.CLOSE_PAREN)), | 
| _createSyntheticToken(TokenType.SEMICOLON)); | 
| } | 
| -    FormalParameterList parameters = parseFormalParameterList(); | 
| -    _validateFormalParameterList(parameters); | 
| -    Token semicolon = _expect(TokenType.SEMICOLON); | 
| -    return new FunctionTypeAlias( | 
| -        commentAndMetadata.comment, | 
| -        commentAndMetadata.metadata, | 
| -        keyword, | 
| -        returnType, | 
| -        name, | 
| -        typeParameters, | 
| -        parameters, | 
| -        semicolon); | 
| } | 
|  | 
| /** | 
| @@ -6416,6 +6670,8 @@ class Parser { | 
| * been parsed, or `null` if there was no return type. Return the getter that | 
| * was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.GET`. | 
| +   * | 
| *     getter ::= | 
| *         getterSignature functionBody? | 
| * | 
| @@ -6424,7 +6680,7 @@ class Parser { | 
| */ | 
| MethodDeclaration _parseGetter(CommentAndMetadata commentAndMetadata, | 
| Token externalKeyword, Token staticKeyword, TypeName returnType) { | 
| -    Token propertyKeyword = _expectKeyword(Keyword.GET); | 
| +    Token propertyKeyword = getAndAdvance(); | 
| SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true); | 
| if (_matches(TokenType.OPEN_PAREN) && | 
| _tokenMatches(_peek(), TokenType.CLOSE_PAREN)) { | 
| @@ -6461,10 +6717,10 @@ class Parser { | 
| *         identifier (',' identifier)* | 
| */ | 
| List<SimpleIdentifier> _parseIdentifierList() { | 
| -    List<SimpleIdentifier> identifiers = new List<SimpleIdentifier>(); | 
| -    identifiers.add(parseSimpleIdentifier()); | 
| -    while (_matches(TokenType.COMMA)) { | 
| -      _advance(); | 
| +    List<SimpleIdentifier> identifiers = <SimpleIdentifier>[ | 
| +      parseSimpleIdentifier() | 
| +    ]; | 
| +    while (_optional(TokenType.COMMA)) { | 
| identifiers.add(parseSimpleIdentifier()); | 
| } | 
| return identifiers; | 
| @@ -6473,11 +6729,13 @@ class Parser { | 
| /** | 
| * Parse an if statement. Return the if statement that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.IF`. | 
| +   * | 
| *     ifStatement ::= | 
| *         'if' '(' expression ')' statement ('else' statement)? | 
| */ | 
| Statement _parseIfStatement() { | 
| -    Token ifKeyword = _expectKeyword(Keyword.IF); | 
| +    Token ifKeyword = getAndAdvance(); | 
| Token leftParenthesis = _expect(TokenType.OPEN_PAREN); | 
| Expression condition = parseExpression2(); | 
| Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | 
| @@ -6496,11 +6754,13 @@ class Parser { | 
| * Parse an import directive. The [commentAndMetadata] is the metadata to be | 
| * associated with the directive. Return the import directive that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.IMPORT`. | 
| +   * | 
| *     importDirective ::= | 
| *         metadata 'import' stringLiteral configuration* (deferred)? ('as' identifier)? combinator*';' | 
| */ | 
| ImportDirective _parseImportDirective(CommentAndMetadata commentAndMetadata) { | 
| -    Token importKeyword = _expectKeyword(Keyword.IMPORT); | 
| +    Token importKeyword = getAndAdvance(); | 
| StringLiteral libraryUri = _parseUri(); | 
| List<Configuration> configurations = _parseConfigurations(); | 
| Token deferredToken = null; | 
| @@ -6590,7 +6850,7 @@ class Parser { | 
| */ | 
| InstanceCreationExpression _parseInstanceCreationExpression(Token keyword) { | 
| ConstructorName constructorName = parseConstructorName(); | 
| -    ArgumentList argumentList = parseArgumentList(); | 
| +    ArgumentList argumentList = _parseArgumentListChecked(); | 
| return new InstanceCreationExpression( | 
| keyword, constructorName, argumentList); | 
| } | 
| @@ -6600,12 +6860,14 @@ class Parser { | 
| * associated with the directive. Return the library directive that was | 
| * parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.LIBRARY`. | 
| +   * | 
| *     libraryDirective ::= | 
| *         metadata 'library' identifier ';' | 
| */ | 
| LibraryDirective _parseLibraryDirective( | 
| CommentAndMetadata commentAndMetadata) { | 
| -    Token keyword = _expectKeyword(Keyword.LIBRARY); | 
| +    Token keyword = getAndAdvance(); | 
| LibraryIdentifier libraryName = _parseLibraryName( | 
| ParserErrorCode.MISSING_NAME_IN_LIBRARY_DIRECTIVE, keyword); | 
| Token semicolon = _expect(TokenType.SEMICOLON); | 
| @@ -6627,17 +6889,15 @@ class Parser { | 
| if (_matchesIdentifier()) { | 
| return parseLibraryIdentifier(); | 
| } else if (_matches(TokenType.STRING)) { | 
| -      // TODO(brianwilkerson) Recovery: This should be extended to handle | 
| -      // arbitrary tokens until we can find a token that can start a compilation | 
| -      // unit member. | 
| +      // Recovery: This should be extended to handle arbitrary tokens until we | 
| +      // can find a token that can start a compilation unit member. | 
| StringLiteral string = parseStringLiteral(); | 
| _reportErrorForNode(ParserErrorCode.NON_IDENTIFIER_LIBRARY_NAME, string); | 
| } else { | 
| _reportErrorForToken(missingNameError, missingNameToken); | 
| } | 
| -    List<SimpleIdentifier> components = new List<SimpleIdentifier>(); | 
| -    components.add(_createSyntheticIdentifier()); | 
| -    return new LibraryIdentifier(components); | 
| +    return new LibraryIdentifier( | 
| +        <SimpleIdentifier>[_createSyntheticIdentifier()]); | 
| } | 
|  | 
| /** | 
| @@ -6646,13 +6906,16 @@ class Parser { | 
| * is the type arguments appearing before the literal, or `null` if there are | 
| * no type arguments. Return the list literal that was parsed. | 
| * | 
| +   * This method assumes that the current token matches either | 
| +   * `TokenType.OPEN_SQUARE_BRACKET` or `TokenType.INDEX`. | 
| +   * | 
| *     listLiteral ::= | 
| *         'const'? typeArguments? '[' (expressionList ','?)? ']' | 
| */ | 
| ListLiteral _parseListLiteral( | 
| Token modifier, TypeArgumentList typeArguments) { | 
| -    // may be empty list literal | 
| if (_matches(TokenType.INDEX)) { | 
| +      // Split the token into two separate tokens. | 
| BeginToken leftBracket = _createToken( | 
| _currentToken, TokenType.OPEN_SQUARE_BRACKET, | 
| isBegin: true); | 
| @@ -6666,8 +6929,7 @@ class Parser { | 
| return new ListLiteral( | 
| modifier, typeArguments, leftBracket, null, rightBracket); | 
| } | 
| -    // open | 
| -    Token leftBracket = _expect(TokenType.OPEN_SQUARE_BRACKET); | 
| +    Token leftBracket = getAndAdvance(); | 
| if (_matches(TokenType.CLOSE_SQUARE_BRACKET)) { | 
| return new ListLiteral( | 
| modifier, typeArguments, leftBracket, null, getAndAdvance()); | 
| @@ -6675,8 +6937,7 @@ class Parser { | 
| bool wasInInitializer = _inInitializer; | 
| _inInitializer = false; | 
| try { | 
| -      List<Expression> elements = new List<Expression>(); | 
| -      elements.add(parseExpression2()); | 
| +      List<Expression> elements = <Expression>[parseExpression2()]; | 
| while (_optional(TokenType.COMMA)) { | 
| if (_matches(TokenType.CLOSE_SQUARE_BRACKET)) { | 
| return new ListLiteral( | 
| @@ -6727,10 +6988,9 @@ class Parser { | 
| */ | 
| Expression _parseLogicalAndExpression() { | 
| Expression expression = _parseEqualityExpression(); | 
| -    while (_matches(TokenType.AMPERSAND_AMPERSAND)) { | 
| -      Token operator = getAndAdvance(); | 
| +    while (_currentToken.type == TokenType.AMPERSAND_AMPERSAND) { | 
| expression = new BinaryExpression( | 
| -          expression, operator, _parseEqualityExpression()); | 
| +          expression, getAndAdvance(), _parseEqualityExpression()); | 
| } | 
| return expression; | 
| } | 
| @@ -6741,20 +7001,22 @@ class Parser { | 
| * is the type arguments that were declared, or `null` if there are no type | 
| * arguments. Return the map literal that was parsed. | 
| * | 
| +   * This method assumes that the current token matches | 
| +   * `TokenType.OPEN_CURLY_BRACKET`. | 
| +   * | 
| *     mapLiteral ::= | 
| *         'const'? typeArguments? '{' (mapLiteralEntry (',' mapLiteralEntry)* ','?)? '}' | 
| */ | 
| MapLiteral _parseMapLiteral(Token modifier, TypeArgumentList typeArguments) { | 
| -    Token leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET); | 
| -    List<MapLiteralEntry> entries = new List<MapLiteralEntry>(); | 
| +    Token leftBracket = getAndAdvance(); | 
| if (_matches(TokenType.CLOSE_CURLY_BRACKET)) { | 
| return new MapLiteral( | 
| -          modifier, typeArguments, leftBracket, entries, getAndAdvance()); | 
| +          modifier, typeArguments, leftBracket, null, getAndAdvance()); | 
| } | 
| bool wasInInitializer = _inInitializer; | 
| _inInitializer = false; | 
| try { | 
| -      entries.add(parseMapLiteralEntry()); | 
| +      List<MapLiteralEntry> entries = <MapLiteralEntry>[parseMapLiteralEntry()]; | 
| while (_optional(TokenType.COMMA)) { | 
| if (_matches(TokenType.CLOSE_CURLY_BRACKET)) { | 
| return new MapLiteral( | 
| @@ -6838,9 +7100,11 @@ class Parser { | 
| SimpleIdentifier methodName = parseSimpleIdentifier(isDeclaration: true); | 
| TypeParameterList typeParameters = _parseGenericMethodTypeParameters(); | 
| FormalParameterList parameters; | 
| -    if (!_matches(TokenType.OPEN_PAREN) && | 
| -        (_matches(TokenType.OPEN_CURLY_BRACKET) || | 
| -            _matches(TokenType.FUNCTION))) { | 
| +    TokenType type = _currentToken.type; | 
| +    // TODO(brianwilkerson) Figure out why we care what the current token is if | 
| +    // it isn't a paren. | 
| +    if (type != TokenType.OPEN_PAREN && | 
| +        (type == TokenType.OPEN_CURLY_BRACKET || type == TokenType.FUNCTION)) { | 
| _reportErrorForToken( | 
| ParserErrorCode.MISSING_METHOD_PARAMETERS, _currentToken.previous); | 
| parameters = new FormalParameterList( | 
| @@ -6878,12 +7142,14 @@ class Parser { | 
| Modifiers modifiers = new Modifiers(); | 
| bool progress = true; | 
| while (progress) { | 
| -      if (_tokenMatches(_peek(), TokenType.PERIOD) || | 
| -          _tokenMatches(_peek(), TokenType.LT) || | 
| -          _tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | 
| +      TokenType nextType = _peek().type; | 
| +      if (nextType == TokenType.PERIOD || | 
| +          nextType == TokenType.LT || | 
| +          nextType == TokenType.OPEN_PAREN) { | 
| return modifiers; | 
| } | 
| -      if (_matchesKeyword(Keyword.ABSTRACT)) { | 
| +      Keyword keyword = _currentToken.keyword; | 
| +      if (keyword == Keyword.ABSTRACT) { | 
| if (modifiers.abstractKeyword != null) { | 
| _reportErrorForCurrentToken( | 
| ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); | 
| @@ -6891,7 +7157,7 @@ class Parser { | 
| } else { | 
| modifiers.abstractKeyword = getAndAdvance(); | 
| } | 
| -      } else if (_matchesKeyword(Keyword.CONST)) { | 
| +      } else if (keyword == Keyword.CONST) { | 
| if (modifiers.constKeyword != null) { | 
| _reportErrorForCurrentToken( | 
| ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); | 
| @@ -6899,9 +7165,7 @@ class Parser { | 
| } else { | 
| modifiers.constKeyword = getAndAdvance(); | 
| } | 
| -      } else if (_matchesKeyword(Keyword.EXTERNAL) && | 
| -          !_tokenMatches(_peek(), TokenType.PERIOD) && | 
| -          !_tokenMatches(_peek(), TokenType.LT)) { | 
| +      } else if (keyword == Keyword.EXTERNAL) { | 
| if (modifiers.externalKeyword != null) { | 
| _reportErrorForCurrentToken( | 
| ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); | 
| @@ -6909,9 +7173,7 @@ class Parser { | 
| } else { | 
| modifiers.externalKeyword = getAndAdvance(); | 
| } | 
| -      } else if (_matchesKeyword(Keyword.FACTORY) && | 
| -          !_tokenMatches(_peek(), TokenType.PERIOD) && | 
| -          !_tokenMatches(_peek(), TokenType.LT)) { | 
| +      } else if (keyword == Keyword.FACTORY) { | 
| if (modifiers.factoryKeyword != null) { | 
| _reportErrorForCurrentToken( | 
| ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); | 
| @@ -6919,7 +7181,7 @@ class Parser { | 
| } else { | 
| modifiers.factoryKeyword = getAndAdvance(); | 
| } | 
| -      } else if (_matchesKeyword(Keyword.FINAL)) { | 
| +      } else if (keyword == Keyword.FINAL) { | 
| if (modifiers.finalKeyword != null) { | 
| _reportErrorForCurrentToken( | 
| ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); | 
| @@ -6927,9 +7189,7 @@ class Parser { | 
| } else { | 
| modifiers.finalKeyword = getAndAdvance(); | 
| } | 
| -      } else if (_matchesKeyword(Keyword.STATIC) && | 
| -          !_tokenMatches(_peek(), TokenType.PERIOD) && | 
| -          !_tokenMatches(_peek(), TokenType.LT)) { | 
| +      } else if (keyword == Keyword.STATIC) { | 
| if (modifiers.staticKeyword != null) { | 
| _reportErrorForCurrentToken( | 
| ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); | 
| @@ -6937,7 +7197,7 @@ class Parser { | 
| } else { | 
| modifiers.staticKeyword = getAndAdvance(); | 
| } | 
| -      } else if (_matchesKeyword(Keyword.VAR)) { | 
| +      } else if (keyword == Keyword.VAR) { | 
| if (modifiers.varKeyword != null) { | 
| _reportErrorForCurrentToken( | 
| ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); | 
| @@ -6962,16 +7222,15 @@ class Parser { | 
| */ | 
| Expression _parseMultiplicativeExpression() { | 
| Expression expression; | 
| -    if (_matchesKeyword(Keyword.SUPER) && | 
| +    if (_currentToken.keyword == Keyword.SUPER && | 
| _currentToken.next.type.isMultiplicativeOperator) { | 
| expression = new SuperExpression(getAndAdvance()); | 
| } else { | 
| expression = _parseUnaryExpression(); | 
| } | 
| while (_currentToken.type.isMultiplicativeOperator) { | 
| -      Token operator = getAndAdvance(); | 
| -      expression = | 
| -          new BinaryExpression(expression, operator, _parseUnaryExpression()); | 
| +      expression = new BinaryExpression( | 
| +          expression, getAndAdvance(), _parseUnaryExpression()); | 
| } | 
| return expression; | 
| } | 
| @@ -6979,6 +7238,8 @@ class Parser { | 
| /** | 
| * Parse a class native clause. Return the native clause that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `_NATIVE`. | 
| +   * | 
| *     classNativeClause ::= | 
| *         'native' name | 
| */ | 
| @@ -6991,11 +7252,13 @@ class Parser { | 
| /** | 
| * Parse a new expression. Return the new expression that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.NEW`. | 
| +   * | 
| *     newExpression ::= | 
| *         instanceCreationExpression | 
| */ | 
| InstanceCreationExpression _parseNewExpression() => | 
| -      _parseInstanceCreationExpression(_expectKeyword(Keyword.NEW)); | 
| +      _parseInstanceCreationExpression(getAndAdvance()); | 
|  | 
| /** | 
| * Parse a non-labeled statement. Return the non-labeled statement that was | 
| @@ -7020,7 +7283,8 @@ class Parser { | 
| Statement _parseNonLabeledStatement() { | 
| // TODO(brianwilkerson) Pass the comment and metadata on where appropriate. | 
| CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata(); | 
| -    if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | 
| +    TokenType type = _currentToken.type; | 
| +    if (type == TokenType.OPEN_CURLY_BRACKET) { | 
| if (_tokenMatches(_peek(), TokenType.STRING)) { | 
| Token afterString = _skipStringLiteral(_currentToken.next); | 
| if (afterString != null && afterString.type == TokenType.COLON) { | 
| @@ -7029,7 +7293,7 @@ class Parser { | 
| } | 
| } | 
| return parseBlock(); | 
| -    } else if (_matches(TokenType.KEYWORD) && | 
| +    } else if (type == TokenType.KEYWORD && | 
| !_currentToken.keyword.isPseudoKeyword) { | 
| Keyword keyword = _currentToken.keyword; | 
| // TODO(jwren) compute some metrics to figure out a better order for this | 
| @@ -7064,9 +7328,11 @@ class Parser { | 
| return _parseVariableDeclarationStatementAfterMetadata( | 
| commentAndMetadata); | 
| } else if (keyword == Keyword.VOID) { | 
| -        TypeName returnType = parseReturnType(); | 
| +        TypeName returnType = | 
| +            new TypeName(new SimpleIdentifier(getAndAdvance()), null); | 
| +        Token next = _currentToken.next; | 
| if (_matchesIdentifier() && | 
| -            _peek().matchesAny(const <TokenType>[ | 
| +            next.matchesAny(const <TokenType>[ | 
| TokenType.OPEN_PAREN, | 
| TokenType.OPEN_CURLY_BRACKET, | 
| TokenType.FUNCTION, | 
| @@ -7079,7 +7345,7 @@ class Parser { | 
| // We have found an error of some kind. Try to recover. | 
| // | 
| if (_matchesIdentifier()) { | 
| -            if (_peek().matchesAny(const <TokenType>[ | 
| +            if (next.matchesAny(const <TokenType>[ | 
| TokenType.EQ, | 
| TokenType.COMMA, | 
| TokenType.SEMICOLON | 
| @@ -7104,7 +7370,8 @@ class Parser { | 
| return new EmptyStatement(_createSyntheticToken(TokenType.SEMICOLON)); | 
| } | 
| } else if (keyword == Keyword.CONST) { | 
| -        if (_peek().matchesAny(const <TokenType>[ | 
| +        Token next = _currentToken.next; | 
| +        if (next.matchesAny(const <TokenType>[ | 
| TokenType.LT, | 
| TokenType.OPEN_CURLY_BRACKET, | 
| TokenType.OPEN_SQUARE_BRACKET, | 
| @@ -7112,8 +7379,8 @@ class Parser { | 
| ])) { | 
| return new ExpressionStatement( | 
| parseExpression2(), _expect(TokenType.SEMICOLON)); | 
| -        } else if (_tokenMatches(_peek(), TokenType.IDENTIFIER)) { | 
| -          Token afterType = _skipTypeName(_peek()); | 
| +        } else if (_tokenMatches(next, TokenType.IDENTIFIER)) { | 
| +          Token afterType = _skipTypeName(next); | 
| if (afterType != null) { | 
| if (_tokenMatches(afterType, TokenType.OPEN_PAREN) || | 
| (_tokenMatches(afterType, TokenType.PERIOD) && | 
| @@ -7158,14 +7425,14 @@ class Parser { | 
| CompileTimeErrorCode.ASYNC_FOR_IN_WRONG_CONTEXT, awaitToken); | 
| } | 
| return statement; | 
| -    } else if (_matches(TokenType.SEMICOLON)) { | 
| +    } else if (type == TokenType.SEMICOLON) { | 
| return _parseEmptyStatement(); | 
| } else if (_isInitializedVariableDeclaration()) { | 
| return _parseVariableDeclarationStatementAfterMetadata( | 
| commentAndMetadata); | 
| } else if (_isFunctionDeclaration()) { | 
| return _parseFunctionDeclarationStatement(); | 
| -    } else if (_matches(TokenType.CLOSE_CURLY_BRACKET)) { | 
| +    } else if (type == TokenType.CLOSE_CURLY_BRACKET) { | 
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT); | 
| return new EmptyStatement(_createSyntheticToken(TokenType.SEMICOLON)); | 
| } else { | 
| @@ -7197,6 +7464,29 @@ class Parser { | 
| ParserErrorCode.MISSING_KEYWORD_OPERATOR, _currentToken); | 
| operatorKeyword = _createSyntheticKeyword(Keyword.OPERATOR); | 
| } | 
| +    return _parseOperatorAfterKeyword( | 
| +        commentAndMetadata, externalKeyword, returnType, operatorKeyword); | 
| +  } | 
| + | 
| +  /** | 
| +   * Parse an operator declaration starting after the 'operator' keyword. The | 
| +   * [commentAndMetadata] is the documentation comment and metadata to be | 
| +   * associated with the declaration. The [externalKeyword] is the 'external' | 
| +   * token. The [returnType] is the return type that has already been parsed, or | 
| +   * `null` if there was no return type. The [operatorKeyword] is the 'operator' | 
| +   * keyword. Return the operator declaration that was parsed. | 
| +   * | 
| +   *     operatorDeclaration ::= | 
| +   *         operatorSignature (';' | functionBody) | 
| +   * | 
| +   *     operatorSignature ::= | 
| +   *         'external'? returnType? 'operator' operator formalParameterList | 
| +   */ | 
| +  MethodDeclaration _parseOperatorAfterKeyword( | 
| +      CommentAndMetadata commentAndMetadata, | 
| +      Token externalKeyword, | 
| +      TypeName returnType, | 
| +      Token operatorKeyword) { | 
| if (!_currentToken.isUserDefinableOperator) { | 
| _reportErrorForCurrentToken( | 
| ParserErrorCode.NON_USER_DEFINABLE_OPERATOR, [_currentToken.lexeme]); | 
| @@ -7242,21 +7532,27 @@ class Parser { | 
| TypeName typeComment = _parseOptionalTypeNameComment(); | 
| if (typeComment != null) { | 
| return typeComment; | 
| -    } else if (_matchesKeyword(Keyword.VOID)) { | 
| -      return parseReturnType(); | 
| -    } else if (_matchesIdentifier() && | 
| -        !_matchesKeyword(Keyword.GET) && | 
| -        !_matchesKeyword(Keyword.SET) && | 
| -        !_matchesKeyword(Keyword.OPERATOR) && | 
| -        (_tokenMatchesIdentifier(_peek()) || | 
| -            _tokenMatches(_peek(), TokenType.LT))) { | 
| -      return parseReturnType(); | 
| -    } else if (_matchesIdentifier() && | 
| -        _tokenMatches(_peek(), TokenType.PERIOD) && | 
| -        _tokenMatchesIdentifier(_peekAt(2)) && | 
| -        (_tokenMatchesIdentifier(_peekAt(3)) || | 
| -            _tokenMatches(_peekAt(3), TokenType.LT))) { | 
| -      return parseReturnType(); | 
| +    } | 
| +    Keyword keyword = _currentToken.keyword; | 
| +    if (keyword == Keyword.VOID) { | 
| +      return new TypeName(new SimpleIdentifier(getAndAdvance()), null); | 
| +    } else if (_matchesIdentifier()) { | 
| +      Token next = _peek(); | 
| +      if (keyword != Keyword.GET && | 
| +          keyword != Keyword.SET && | 
| +          keyword != Keyword.OPERATOR && | 
| +          (_tokenMatchesIdentifier(next) || | 
| +              _tokenMatches(next, TokenType.LT))) { | 
| +        return parseReturnType(); | 
| +      } | 
| +      Token next2 = next.next; | 
| +      Token next3 = next2.next; | 
| +      if (_tokenMatches(next, TokenType.PERIOD) && | 
| +          _tokenMatchesIdentifier(next2) && | 
| +          (_tokenMatchesIdentifier(next3) || | 
| +              _tokenMatches(next3, TokenType.LT))) { | 
| +        return parseReturnType(); | 
| +      } | 
| } | 
| return null; | 
| } | 
| @@ -7280,35 +7576,67 @@ class Parser { | 
| } | 
|  | 
| /** | 
| +   * Parse a part directive. The [commentAndMetadata] is the metadata to be | 
| +   * associated with the directive. Return the part or part-of directive that | 
| +   * was parsed. | 
| +   * | 
| +   * This method assumes that the current token matches `Keyword.PART`. | 
| +   * | 
| +   *     partDirective ::= | 
| +   *         metadata 'part' stringLiteral ';' | 
| +   */ | 
| +  Directive _parsePartDirective(CommentAndMetadata commentAndMetadata) { | 
| +    Token partKeyword = getAndAdvance(); | 
| +    StringLiteral partUri = _parseUri(); | 
| +    Token semicolon = _expect(TokenType.SEMICOLON); | 
| +    return new PartDirective(commentAndMetadata.comment, | 
| +        commentAndMetadata.metadata, partKeyword, partUri, semicolon); | 
| +  } | 
| + | 
| +  /** | 
| +   * Parse a part-of directive. The [commentAndMetadata] is the metadata to be | 
| +   * associated with the directive. Return the part or part-of directive that | 
| +   * was parsed. | 
| +   * | 
| +   * This method assumes that the current token matches [Keyword.PART] and that | 
| +   * the following token matches the identifier 'of'. | 
| +   * | 
| +   *     partOfDirective ::= | 
| +   *         metadata 'part' 'of' identifier ';' | 
| +   */ | 
| +  Directive _parsePartOfDirective(CommentAndMetadata commentAndMetadata) { | 
| +    Token partKeyword = getAndAdvance(); | 
| +    Token ofKeyword = getAndAdvance(); | 
| +    LibraryIdentifier libraryName = _parseLibraryName( | 
| +        ParserErrorCode.MISSING_NAME_IN_PART_OF_DIRECTIVE, ofKeyword); | 
| +    Token semicolon = _expect(TokenType.SEMICOLON); | 
| +    return new PartOfDirective( | 
| +        commentAndMetadata.comment, | 
| +        commentAndMetadata.metadata, | 
| +        partKeyword, | 
| +        ofKeyword, | 
| +        libraryName, | 
| +        semicolon); | 
| +  } | 
| + | 
| +  /** | 
| * Parse a part or part-of directive. The [commentAndMetadata] is the metadata | 
| * to be associated with the directive. Return the part or part-of directive | 
| * that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.PART`. | 
| +   * | 
| *     partDirective ::= | 
| *         metadata 'part' stringLiteral ';' | 
| * | 
| *     partOfDirective ::= | 
| *         metadata 'part' 'of' identifier ';' | 
| */ | 
| -  Directive _parsePartDirective(CommentAndMetadata commentAndMetadata) { | 
| -    Token partKeyword = _expectKeyword(Keyword.PART); | 
| -    if (_matchesString(_OF)) { | 
| -      Token ofKeyword = getAndAdvance(); | 
| -      LibraryIdentifier libraryName = _parseLibraryName( | 
| -          ParserErrorCode.MISSING_NAME_IN_PART_OF_DIRECTIVE, ofKeyword); | 
| -      Token semicolon = _expect(TokenType.SEMICOLON); | 
| -      return new PartOfDirective( | 
| -          commentAndMetadata.comment, | 
| -          commentAndMetadata.metadata, | 
| -          partKeyword, | 
| -          ofKeyword, | 
| -          libraryName, | 
| -          semicolon); | 
| +  Directive _parsePartOrPartOfDirective(CommentAndMetadata commentAndMetadata) { | 
| +    if (_tokenMatchesString(_peek(), _OF)) { | 
| +      return _parsePartOfDirective(commentAndMetadata); | 
| } | 
| -    StringLiteral partUri = _parseUri(); | 
| -    Token semicolon = _expect(TokenType.SEMICOLON); | 
| -    return new PartDirective(commentAndMetadata.comment, | 
| -        commentAndMetadata.metadata, partKeyword, partUri, semicolon); | 
| +    return _parsePartDirective(commentAndMetadata); | 
| } | 
|  | 
| /** | 
| @@ -7324,11 +7652,12 @@ class Parser { | 
| */ | 
| Expression _parsePostfixExpression() { | 
| Expression operand = _parseAssignableExpression(true); | 
| -    if (_matches(TokenType.OPEN_SQUARE_BRACKET) || | 
| -        _matches(TokenType.PERIOD) || | 
| -        _matches(TokenType.QUESTION_PERIOD) || | 
| -        _matches(TokenType.OPEN_PAREN) || | 
| -        (parseGenericMethods && _matches(TokenType.LT))) { | 
| +    TokenType type = _currentToken.type; | 
| +    if (type == TokenType.OPEN_SQUARE_BRACKET || | 
| +        type == TokenType.PERIOD || | 
| +        type == TokenType.QUESTION_PERIOD || | 
| +        type == TokenType.OPEN_PAREN || | 
| +        (parseGenericMethods && type == TokenType.LT)) { | 
| do { | 
| if (_isLikelyArgumentList()) { | 
| TypeArgumentList typeArguments = _parseOptionalTypeArguments(); | 
| @@ -7348,10 +7677,11 @@ class Parser { | 
| } else { | 
| operand = _parseAssignableSelector(operand, true); | 
| } | 
| -      } while (_matches(TokenType.OPEN_SQUARE_BRACKET) || | 
| -          _matches(TokenType.PERIOD) || | 
| -          _matches(TokenType.QUESTION_PERIOD) || | 
| -          _matches(TokenType.OPEN_PAREN)); | 
| +        type = _currentToken.type; | 
| +      } while (type == TokenType.OPEN_SQUARE_BRACKET || | 
| +          type == TokenType.PERIOD || | 
| +          type == TokenType.QUESTION_PERIOD || | 
| +          type == TokenType.OPEN_PAREN); | 
| return operand; | 
| } | 
| if (!_currentToken.type.isIncrementOperator) { | 
| @@ -7363,6 +7693,37 @@ class Parser { | 
| } | 
|  | 
| /** | 
| +   * Parse a prefixed identifier given that the given [qualifier] was already | 
| +   * parsed. Return the prefixed identifier that was parsed. | 
| +   * | 
| +   *     prefixedIdentifier ::= | 
| +   *         identifier ('.' identifier)? | 
| +   */ | 
| +  Identifier _parsePrefixedIdentifierAfterIdentifier( | 
| +      SimpleIdentifier qualifier) { | 
| +    if (!_matches(TokenType.PERIOD) || _injectGenericCommentTypeList()) { | 
| +      return qualifier; | 
| +    } | 
| +    Token period = getAndAdvance(); | 
| +    SimpleIdentifier qualified = parseSimpleIdentifier(); | 
| +    return new PrefixedIdentifier(qualifier, period, qualified); | 
| +  } | 
| + | 
| +  /** | 
| +   * Parse a prefixed identifier. Return the prefixed identifier that was | 
| +   * parsed. | 
| +   * | 
| +   * This method assumes that the current token matches an identifier. | 
| +   * | 
| +   *     prefixedIdentifier ::= | 
| +   *         identifier ('.' identifier)? | 
| +   */ | 
| +  Identifier _parsePrefixedIdentifierUnchecked() { | 
| +    return _parsePrefixedIdentifierAfterIdentifier( | 
| +        _parseSimpleIdentifierUnchecked()); | 
| +  } | 
| + | 
| +  /** | 
| * Parse a primary expression. Return the primary expression that was parsed. | 
| * | 
| *     primary ::= | 
| @@ -7386,21 +7747,52 @@ class Parser { | 
| *       | listLiteral | 
| */ | 
| Expression _parsePrimaryExpression() { | 
| -    if (_matchesKeyword(Keyword.THIS)) { | 
| +    if (_matchesIdentifier()) { | 
| +      // TODO(brianwilkerson) The code below was an attempt to recover from an | 
| +      // error case, but it needs to be applied as a recovery only after we | 
| +      // know that parsing it as an identifier doesn't work. Leaving the code as | 
| +      // a reminder of how to recover. | 
| +//      if (isFunctionExpression(_peek())) { | 
| +//        // | 
| +//        // Function expressions were allowed to have names at one point, but this is now illegal. | 
| +//        // | 
| +//        reportError(ParserErrorCode.NAMED_FUNCTION_EXPRESSION, getAndAdvance()); | 
| +//        return parseFunctionExpression(); | 
| +//      } | 
| +      return _parsePrefixedIdentifierUnchecked(); | 
| +    } | 
| +    TokenType type = _currentToken.type; | 
| +    if (type == TokenType.STRING) { | 
| +      return parseStringLiteral(); | 
| +    } else if (type == TokenType.INT) { | 
| +      Token token = getAndAdvance(); | 
| +      int value = null; | 
| +      try { | 
| +        value = int.parse(token.lexeme); | 
| +      } on FormatException { | 
| +        // The invalid format should have been reported by the scanner. | 
| +      } | 
| +      return new IntegerLiteral(token, value); | 
| +    } | 
| +    Keyword keyword = _currentToken.keyword; | 
| +    if (keyword == Keyword.NULL) { | 
| +      return new NullLiteral(getAndAdvance()); | 
| +    } else if (keyword == Keyword.NEW) { | 
| +      return _parseNewExpression(); | 
| +    } else if (keyword == Keyword.THIS) { | 
| return new ThisExpression(getAndAdvance()); | 
| -    } else if (_matchesKeyword(Keyword.SUPER)) { | 
| +    } else if (keyword == Keyword.SUPER) { | 
| // TODO(paulberry): verify with Gilad that "super" must be followed by | 
| // unconditionalAssignableSelector in this case. | 
| return _parseAssignableSelector( | 
| new SuperExpression(getAndAdvance()), false, | 
| allowConditional: false); | 
| -    } else if (_matchesKeyword(Keyword.NULL)) { | 
| -      return new NullLiteral(getAndAdvance()); | 
| -    } else if (_matchesKeyword(Keyword.FALSE)) { | 
| +    } else if (keyword == Keyword.FALSE) { | 
| return new BooleanLiteral(getAndAdvance(), false); | 
| -    } else if (_matchesKeyword(Keyword.TRUE)) { | 
| +    } else if (keyword == Keyword.TRUE) { | 
| return new BooleanLiteral(getAndAdvance(), true); | 
| -    } else if (_matches(TokenType.DOUBLE)) { | 
| +    } | 
| +    if (type == TokenType.DOUBLE) { | 
| Token token = getAndAdvance(); | 
| double value = 0.0; | 
| try { | 
| @@ -7409,7 +7801,7 @@ class Parser { | 
| // The invalid format should have been reported by the scanner. | 
| } | 
| return new DoubleLiteral(token, value); | 
| -    } else if (_matches(TokenType.HEXADECIMAL)) { | 
| +    } else if (type == TokenType.HEXADECIMAL) { | 
| Token token = getAndAdvance(); | 
| int value = null; | 
| try { | 
| @@ -7418,35 +7810,9 @@ class Parser { | 
| // The invalid format should have been reported by the scanner. | 
| } | 
| return new IntegerLiteral(token, value); | 
| -    } else if (_matches(TokenType.INT)) { | 
| -      Token token = getAndAdvance(); | 
| -      int value = null; | 
| -      try { | 
| -        value = int.parse(token.lexeme); | 
| -      } on FormatException { | 
| -        // The invalid format should have been reported by the scanner. | 
| -      } | 
| -      return new IntegerLiteral(token, value); | 
| -    } else if (_matches(TokenType.STRING)) { | 
| -      return parseStringLiteral(); | 
| -    } else if (_matchesIdentifier()) { | 
| -      // TODO(brianwilkerson) The code below was an attempt to recover from an | 
| -      // error case, but it needs to be applied as a recovery only after we | 
| -      // know that parsing it as an identifier doesn't work. Leaving the code as | 
| -      // a reminder of how to recover. | 
| -//            if (isFunctionExpression(peek())) { | 
| -//              // | 
| -//              // Function expressions were allowed to have names at one point, but this is now illegal. | 
| -//              // | 
| -//              reportError(ParserErrorCode.NAMED_FUNCTION_EXPRESSION, getAndAdvance()); | 
| -//              return parseFunctionExpression(); | 
| -//            } | 
| -      return parsePrefixedIdentifier(); | 
| -    } else if (_matchesKeyword(Keyword.NEW)) { | 
| -      return _parseNewExpression(); | 
| -    } else if (_matchesKeyword(Keyword.CONST)) { | 
| +    } else if (keyword == Keyword.CONST) { | 
| return _parseConstExpression(); | 
| -    } else if (_matches(TokenType.OPEN_PAREN)) { | 
| +    } else if (type == TokenType.OPEN_PAREN) { | 
| if (_isFunctionExpression(_currentToken)) { | 
| return parseFunctionExpression(); | 
| } | 
| @@ -7461,20 +7827,20 @@ class Parser { | 
| } finally { | 
| _inInitializer = wasInInitializer; | 
| } | 
| -    } else if (_matches(TokenType.LT) || _injectGenericCommentTypeList()) { | 
| +    } else if (type == TokenType.LT || _injectGenericCommentTypeList()) { | 
| return _parseListOrMapLiteral(null); | 
| -    } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | 
| +    } else if (type == TokenType.OPEN_CURLY_BRACKET) { | 
| return _parseMapLiteral(null, null); | 
| -    } else if (_matches(TokenType.OPEN_SQUARE_BRACKET) || | 
| -        _matches(TokenType.INDEX)) { | 
| +    } else if (type == TokenType.OPEN_SQUARE_BRACKET || | 
| +        type == TokenType.INDEX) { | 
| return _parseListLiteral(null, null); | 
| -    } else if (_matches(TokenType.QUESTION) && | 
| +    } else if (type == TokenType.QUESTION && | 
| _tokenMatches(_peek(), TokenType.IDENTIFIER)) { | 
| _reportErrorForCurrentToken( | 
| ParserErrorCode.UNEXPECTED_TOKEN, [_currentToken.lexeme]); | 
| _advance(); | 
| return _parsePrimaryExpression(); | 
| -    } else if (_matchesKeyword(Keyword.VOID)) { | 
| +    } else if (keyword == Keyword.VOID) { | 
| // | 
| // Recover from having a return type of "void" where a return type is not | 
| // expected. | 
| @@ -7484,7 +7850,7 @@ class Parser { | 
| ParserErrorCode.UNEXPECTED_TOKEN, [_currentToken.lexeme]); | 
| _advance(); | 
| return _parsePrimaryExpression(); | 
| -    } else if (_matches(TokenType.HASH)) { | 
| +    } else if (type == TokenType.HASH) { | 
| return _parseSymbolLiteral(); | 
| } else { | 
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | 
| @@ -7493,21 +7859,31 @@ class Parser { | 
| } | 
|  | 
| /** | 
| -   * Parse a redirecting constructor invocation. Return the redirecting | 
| +   * Parse a redirecting constructor invocation. The flag [hasPeriod] should be | 
| +   * `true` if the `this` is followed by a period. Return the redirecting | 
| * constructor invocation that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.THIS`. | 
| +   * | 
| *     redirectingConstructorInvocation ::= | 
| *         'this' ('.' identifier)? arguments | 
| */ | 
| -  RedirectingConstructorInvocation _parseRedirectingConstructorInvocation() { | 
| -    Token keyword = _expectKeyword(Keyword.THIS); | 
| +  RedirectingConstructorInvocation _parseRedirectingConstructorInvocation( | 
| +      bool hasPeriod) { | 
| +    Token keyword = getAndAdvance(); | 
| Token period = null; | 
| SimpleIdentifier constructorName = null; | 
| -    if (_matches(TokenType.PERIOD)) { | 
| +    if (hasPeriod) { | 
| period = getAndAdvance(); | 
| -      constructorName = parseSimpleIdentifier(); | 
| +      if (_matchesIdentifier()) { | 
| +        constructorName = _parseSimpleIdentifierUnchecked(isDeclaration: false); | 
| +      } else { | 
| +        _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | 
| +        constructorName = _createSyntheticIdentifier(isDeclaration: false); | 
| +        _advance(); | 
| +      } | 
| } | 
| -    ArgumentList argumentList = parseArgumentList(); | 
| +    ArgumentList argumentList = _parseArgumentListChecked(); | 
| return new RedirectingConstructorInvocation( | 
| keyword, period, constructorName, argumentList); | 
| } | 
| @@ -7521,29 +7897,29 @@ class Parser { | 
| *       | 'super' relationalOperator bitwiseOrExpression | 
| */ | 
| Expression _parseRelationalExpression() { | 
| -    if (_matchesKeyword(Keyword.SUPER) && | 
| +    if (_currentToken.keyword == Keyword.SUPER && | 
| _currentToken.next.type.isRelationalOperator) { | 
| Expression expression = new SuperExpression(getAndAdvance()); | 
| Token operator = getAndAdvance(); | 
| -      expression = new BinaryExpression( | 
| +      return new BinaryExpression( | 
| expression, operator, parseBitwiseOrExpression()); | 
| -      return expression; | 
| } | 
| Expression expression = parseBitwiseOrExpression(); | 
| -    if (_matchesKeyword(Keyword.AS)) { | 
| +    Keyword keyword = _currentToken.keyword; | 
| +    if (keyword == Keyword.AS) { | 
| Token asOperator = getAndAdvance(); | 
| -      expression = new AsExpression(expression, asOperator, parseTypeName()); | 
| -    } else if (_matchesKeyword(Keyword.IS)) { | 
| +      return new AsExpression(expression, asOperator, parseTypeName()); | 
| +    } else if (keyword == Keyword.IS) { | 
| Token isOperator = getAndAdvance(); | 
| Token notOperator = null; | 
| if (_matches(TokenType.BANG)) { | 
| notOperator = getAndAdvance(); | 
| } | 
| -      expression = new IsExpression( | 
| +      return new IsExpression( | 
| expression, isOperator, notOperator, parseTypeName()); | 
| } else if (_currentToken.type.isRelationalOperator) { | 
| Token operator = getAndAdvance(); | 
| -      expression = new BinaryExpression( | 
| +      return new BinaryExpression( | 
| expression, operator, parseBitwiseOrExpression()); | 
| } | 
| return expression; | 
| @@ -7552,20 +7928,24 @@ class Parser { | 
| /** | 
| * Parse a rethrow expression. Return the rethrow expression that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.RETHROW`. | 
| +   * | 
| *     rethrowExpression ::= | 
| *         'rethrow' | 
| */ | 
| Expression _parseRethrowExpression() => | 
| -      new RethrowExpression(_expectKeyword(Keyword.RETHROW)); | 
| +      new RethrowExpression(getAndAdvance()); | 
|  | 
| /** | 
| * Parse a return statement. Return the return statement that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.RETURN`. | 
| +   * | 
| *     returnStatement ::= | 
| *         'return' expression? ';' | 
| */ | 
| Statement _parseReturnStatement() { | 
| -    Token returnKeyword = _expectKeyword(Keyword.RETURN); | 
| +    Token returnKeyword = getAndAdvance(); | 
| if (_matches(TokenType.SEMICOLON)) { | 
| return new ReturnStatement(returnKeyword, null, getAndAdvance()); | 
| } | 
| @@ -7582,6 +7962,8 @@ class Parser { | 
| * already been parsed, or `null` if there was no return type. Return the | 
| * setter that was parsed. | 
| * | 
| +   * This method assumes that the current token matches `Keyword.SET`. | 
| +   * | 
| *     setter ::= | 
| *         setterSignature functionBody? | 
| * | 
| @@ -7590,7 +7972,7 @@ class Parser { | 
| */ | 
| MethodDeclaration _parseSetter(CommentAndMetadata commentAndMetadata, | 
| Token externalKeyword, Token staticKeyword, TypeName returnType) { | 
| -    Token propertyKeyword = _expectKeyword(Keyword.SET); | 
| +    Token propertyKeyword = getAndAdvance(); | 
| SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true); | 
| FormalParameterList parameters = parseFormalParameterList(); | 
| _validateFormalParameterList(parameters); | 
| @@ -7624,21 +8006,39 @@ class Parser { | 
| */ | 
| Expression _parseShiftExpression() { | 
| Expression expression; | 
| -    if (_matchesKeyword(Keyword.SUPER) && | 
| +    if (_currentToken.keyword == Keyword.SUPER && | 
| _currentToken.next.type.isShiftOperator) { | 
| expression = new SuperExpression(getAndAdvance()); | 
| } else { | 
| expression = _parseAdditiveExpression(); | 
| } | 
| while (_currentToken.type.isShiftOperator) { | 
| -      Token operator = getAndAdvance(); | 
| expression = new BinaryExpression( | 
| -          expression, operator, _parseAdditiveExpression()); | 
| +          expression, getAndAdvance(), _parseAdditiveExpression()); | 
| } | 
| return expression; | 
| } | 
|  | 
| /** | 
| +   * Parse a simple identifier. Return the simple identifier that was parsed. | 
| +   * | 
| +   * This method assumes that the current token matches an identifier. | 
| +   * | 
| +   *     identifier ::= | 
| +   *         IDENTIFIER | 
| +   */ | 
| +  SimpleIdentifier _parseSimpleIdentifierUnchecked( | 
| +      {bool isDeclaration: false}) { | 
| +    String lexeme = _currentToken.lexeme; | 
| +    if ((_inAsync || _inGenerator) && | 
| +        (lexeme == ASYNC || lexeme == _AWAIT || lexeme == _YIELD)) { | 
| +      _reportErrorForCurrentToken( | 
| +          ParserErrorCode.ASYNC_KEYWORD_USED_AS_IDENTIFIER); | 
| +    } | 
| +    return new SimpleIdentifier(getAndAdvance(), isDeclaration: isDeclaration); | 
| +  } | 
| + | 
| +  /** | 
| * Parse a list of statements within a switch statement. Return the statements | 
| * that were parsed. | 
| * | 
| @@ -7646,10 +8046,11 @@ class Parser { | 
| *         statement* | 
| */ | 
| List<Statement> _parseStatementList() { | 
| -    List<Statement> statements = new List<Statement>(); | 
| +    List<Statement> statements = <Statement>[]; | 
| Token statementStart = _currentToken; | 
| -    while (!_matches(TokenType.EOF) && | 
| -        !_matches(TokenType.CLOSE_CURLY_BRACKET) && | 
| +    TokenType type = _currentToken.type; | 
| +    while (type != TokenType.EOF && | 
| +        type != TokenType.CLOSE_CURLY_BRACKET && | 
| !_isSwitchMember()) { | 
| statements.add(parseStatement2()); | 
| if (identical(_currentToken, statementStart)) { | 
| @@ -7658,6 +8059,7 @@ class Parser { | 
| _advance(); | 
| } | 
| statementStart = _currentToken; | 
| +      type = _currentToken.type; | 
| } | 
| return statements; | 
| } | 
| @@ -7665,15 +8067,20 @@ class Parser { | 
| /** | 
| * Parse a string literal that contains interpolations. Return the string | 
| * literal that was parsed. | 
| +   * | 
| +   * This method assumes that the current token matches either | 
| +   * [TokenType.STRING_INTERPOLATION_EXPRESSION] or | 
| +   * [TokenType.STRING_INTERPOLATION_IDENTIFIER]. | 
| */ | 
| StringInterpolation _parseStringInterpolation(Token string) { | 
| -    List<InterpolationElement> elements = new List<InterpolationElement>(); | 
| -    bool hasMore = _matches(TokenType.STRING_INTERPOLATION_EXPRESSION) || | 
| -        _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER); | 
| -    elements.add(new InterpolationString( | 
| -        string, _computeStringValue(string.lexeme, true, !hasMore))); | 
| +    List<InterpolationElement> elements = <InterpolationElement>[ | 
| +      new InterpolationString( | 
| +          string, _computeStringValue(string.lexeme, true, false)) | 
| +    ]; | 
| +    bool hasMore = true; | 
| +    bool isExpression = _matches(TokenType.STRING_INTERPOLATION_EXPRESSION); | 
| while (hasMore) { | 
| -      if (_matches(TokenType.STRING_INTERPOLATION_EXPRESSION)) { | 
| +      if (isExpression) { | 
| Token openToken = getAndAdvance(); | 
| bool wasInInitializer = _inInitializer; | 
| _inInitializer = false; | 
| @@ -7697,8 +8104,9 @@ class Parser { | 
| } | 
| if (_matches(TokenType.STRING)) { | 
| string = getAndAdvance(); | 
| -        hasMore = _matches(TokenType.STRING_INTERPOLATION_EXPRESSION) || | 
| -            _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER); | 
| +        isExpression = _matches(TokenType.STRING_INTERPOLATION_EXPRESSION); | 
| +        hasMore = | 
| +            isExpression || _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER); | 
| elements.add(new InterpolationString( | 
| string, _computeStringValue(string.lexeme, false, !hasMore))); | 
| } else { | 
| @@ -7709,21 +8117,47 @@ class Parser { | 
| } | 
|  | 
| /** | 
| +   * Parse a string literal. Return the string literal that was parsed. | 
| +   * | 
| +   * This method assumes that the current token matches `TokenType.STRING`. | 
| +   * | 
| +   *     stringLiteral ::= | 
| +   *         MULTI_LINE_STRING+ | 
| +   *       | SINGLE_LINE_STRING+ | 
| +   */ | 
| +  StringLiteral _parseStringLiteralUnchecked() { | 
| +    List<StringLiteral> strings = <StringLiteral>[]; | 
| +    do { | 
| +      Token string = getAndAdvance(); | 
| +      if (_matches(TokenType.STRING_INTERPOLATION_EXPRESSION) || | 
| +          _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER)) { | 
| +        strings.add(_parseStringInterpolation(string)); | 
| +      } else { | 
| +        strings.add(new SimpleStringLiteral( | 
| +            string, _computeStringValue(string.lexeme, true, true))); | 
| +      } | 
| +    } while (_matches(TokenType.STRING)); | 
| +    return strings.length == 1 ? strings[0] : new AdjacentStrings(strings); | 
| +  } | 
| + | 
| +  /** | 
| * Parse a super constructor invocation. Return the super constructor | 
| * invocation that was parsed. | 
| * | 
| +   * This method assumes that the current token matches [Keyword.SUPER]. | 
| +   * | 
| *     superConstructorInvocation ::= | 
| *         'super' ('.' identifier)? arguments | 
| */ | 
| SuperConstructorInvocation _parseSuperConstructorInvocation() { | 
| -    Token keyword = _expectKeyword(Keyword.SUPER); | 
| +    Token keyword = getAndAdvance(); | 
| Token period = null; | 
| SimpleIdentifier constructorName = null; | 
| if (_matches(TokenType.PERIOD)) { | 
| period = getAndAdvance(); | 
| constructorName = parseSimpleIdentifier(); | 
| } | 
| -    ArgumentList argumentList = parseArgumentList(); | 
| +    ArgumentList argumentList = _parseArgumentListChecked(); | 
| return new SuperConstructorInvocation( | 
| keyword, period, constructorName, argumentList); | 
| } | 
| @@ -7751,14 +8185,14 @@ class Parser { | 
| Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | 
| Token leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET); | 
| Token defaultKeyword = null; | 
| -      List<SwitchMember> members = new List<SwitchMember>(); | 
| -      while (!_matches(TokenType.EOF) && | 
| -          !_matches(TokenType.CLOSE_CURLY_BRACKET)) { | 
| -        List<Label> labels = new List<Label>(); | 
| +      List<SwitchMember> members = <SwitchMember>[]; | 
| +      TokenType type = _currentToken.type; | 
| +      while (type != TokenType.EOF && type != TokenType.CLOSE_CURLY_BRACKET) { | 
| +        List<Label> labels = <Label>[]; | 
| while ( | 
| _matchesIdentifier() && _tokenMatches(_peek(), TokenType.COLON)) { | 
| SimpleIdentifier identifier = | 
| -              parseSimpleIdentifier(isDeclaration: true); | 
| +              _parseSimpleIdentifierUnchecked(isDeclaration: true); | 
| String label = identifier.token.lexeme; | 
| if (definedLabels.contains(label)) { | 
| _reportErrorForToken( | 
| @@ -7768,10 +8202,11 @@ class Parser { | 
| } else { | 
| definedLabels.add(label); | 
| } | 
| -          Token colon = _expect(TokenType.COLON); | 
| +          Token colon = getAndAdvance(); | 
| labels.add(new Label(identifier, colon)); | 
| } | 
| -        if (_matchesKeyword(Keyword.CASE)) { | 
| +        Keyword keyword = _currentToken.keyword; | 
| +        if (keyword == Keyword.CASE) { | 
| Token caseKeyword = getAndAdvance(); | 
| Expression caseExpression = parseExpression2(); | 
| Token colon = _expect(TokenType.COLON); | 
| @@ -7782,7 +8217,7 @@ class Parser { | 
| ParserErrorCode.SWITCH_HAS_CASE_AFTER_DEFAULT_CASE, | 
| caseKeyword); | 
| } | 
| -        } else if (_matchesKeyword(Keyword.DEFAULT)) { | 
| +        } else if (keyword == Keyword.DEFAULT) { | 
| if (defaultKeyword != null) { | 
| _reportErrorForToken( | 
| ParserErrorCode.SWITCH_HAS_MULTIPLE_DEFAULT_CASES, _peek()); | 
| @@ -7795,13 +8230,20 @@ class Parser { | 
| // We need to advance, otherwise we could end up in an infinite loop, | 
| // but this could be a lot smarter about recovering from the error. | 
| _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_CASE_OR_DEFAULT); | 
| -          while (!_matches(TokenType.EOF) && | 
| -              !_matches(TokenType.CLOSE_CURLY_BRACKET) && | 
| -              !_matchesKeyword(Keyword.CASE) && | 
| -              !_matchesKeyword(Keyword.DEFAULT)) { | 
| +          bool atEndOrNextMember() { | 
| +            TokenType type = _currentToken.type; | 
| +            if (type == TokenType.EOF || | 
| +                type == TokenType.CLOSE_CURLY_BRACKET) { | 
| +              return true; | 
| +            } | 
| +            Keyword keyword = _currentToken.keyword; | 
| +            return keyword == Keyword.CASE || keyword == Keyword.DEFAULT; | 
| +          } | 
| +          while (!atEndOrNextMember()) { | 
| _advance(); | 
| } | 
| } | 
| +        type = _currentToken.type; | 
| } | 
| Token rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET); | 
| return new SwitchStatement(keyword, leftParenthesis, expression, | 
| @@ -7814,16 +8256,17 @@ class Parser { | 
| /** | 
| * Parse a symbol literal. Return the symbol literal that was parsed. | 
| * | 
| +   * This method assumes that the current token matches [TokenType.HASH]. | 
| +   * | 
| *     symbolLiteral ::= | 
| *         '#' identifier ('.' identifier)* | 
| */ | 
| SymbolLiteral _parseSymbolLiteral() { | 
| Token poundSign = getAndAdvance(); | 
| -    List<Token> components = new List<Token>(); | 
| +    List<Token> components = <Token>[]; | 
| if (_matchesIdentifier()) { | 
| components.add(getAndAdvance()); | 
| -      while (_matches(TokenType.PERIOD)) { | 
| -        _advance(); | 
| +      while (_optional(TokenType.PERIOD)) { | 
| if (_matchesIdentifier()) { | 
| components.add(getAndAdvance()); | 
| } else { | 
| @@ -7834,7 +8277,7 @@ class Parser { | 
| } | 
| } else if (_currentToken.isOperator) { | 
| components.add(getAndAdvance()); | 
| -    } else if (_tokenMatchesKeyword(_currentToken, Keyword.VOID)) { | 
| +    } else if (_matchesKeyword(Keyword.VOID)) { | 
| components.add(getAndAdvance()); | 
| } else { | 
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | 
| @@ -7846,12 +8289,15 @@ class Parser { | 
| /** | 
| * Parse a throw expression. Return the throw expression that was parsed. | 
| * | 
| +   * This method assumes that the current token matches [Keyword.THROW]. | 
| +   * | 
| *     throwExpression ::= | 
| *         'throw' expression | 
| */ | 
| Expression _parseThrowExpression() { | 
| -    Token keyword = _expectKeyword(Keyword.THROW); | 
| -    if (_matches(TokenType.SEMICOLON) || _matches(TokenType.CLOSE_PAREN)) { | 
| +    Token keyword = getAndAdvance(); | 
| +    TokenType type = _currentToken.type; | 
| +    if (type == TokenType.SEMICOLON || type == TokenType.CLOSE_PAREN) { | 
| _reportErrorForToken( | 
| ParserErrorCode.MISSING_EXPRESSION_IN_THROW, _currentToken); | 
| return new ThrowExpression(keyword, _createSyntheticIdentifier()); | 
| @@ -7863,12 +8309,15 @@ class Parser { | 
| /** | 
| * Parse a throw expression. Return the throw expression that was parsed. | 
| * | 
| +   * This method assumes that the current token matches [Keyword.THROW]. | 
| +   * | 
| *     throwExpressionWithoutCascade ::= | 
| *         'throw' expressionWithoutCascade | 
| */ | 
| Expression _parseThrowExpressionWithoutCascade() { | 
| -    Token keyword = _expectKeyword(Keyword.THROW); | 
| -    if (_matches(TokenType.SEMICOLON) || _matches(TokenType.CLOSE_PAREN)) { | 
| +    Token keyword = getAndAdvance(); | 
| +    TokenType type = _currentToken.type; | 
| +    if (type == TokenType.SEMICOLON || type == TokenType.CLOSE_PAREN) { | 
| _reportErrorForToken( | 
| ParserErrorCode.MISSING_EXPRESSION_IN_THROW, _currentToken); | 
| return new ThrowExpression(keyword, _createSyntheticIdentifier()); | 
| @@ -7880,6 +8329,8 @@ class Parser { | 
| /** | 
| * Parse a try statement. Return the try statement that was parsed. | 
| * | 
| +   * This method assumes that the current token matches [Keyword.TRY]. | 
| +   * | 
| *     tryStatement ::= | 
| *         'try' block (onPart+ finallyPart? | finallyPart) | 
| * | 
| @@ -7894,9 +8345,9 @@ class Parser { | 
| *         'finally' block | 
| */ | 
| Statement _parseTryStatement() { | 
| -    Token tryKeyword = _expectKeyword(Keyword.TRY); | 
| -    Block body = parseBlock(); | 
| -    List<CatchClause> catchClauses = new List<CatchClause>(); | 
| +    Token tryKeyword = getAndAdvance(); | 
| +    Block body = _parseBlockChecked(); | 
| +    List<CatchClause> catchClauses = <CatchClause>[]; | 
| Block finallyClause = null; | 
| while (_matchesString(_ON) || _matchesKeyword(Keyword.CATCH)) { | 
| Token onKeyword = null; | 
| @@ -7921,7 +8372,7 @@ class Parser { | 
| } | 
| rightParenthesis = _expect(TokenType.CLOSE_PAREN); | 
| } | 
| -      Block catchBody = parseBlock(); | 
| +      Block catchBody = _parseBlockChecked(); | 
| catchClauses.add(new CatchClause( | 
| onKeyword, | 
| exceptionType, | 
| @@ -7936,11 +8387,9 @@ class Parser { | 
| Token finallyKeyword = null; | 
| if (_matchesKeyword(Keyword.FINALLY)) { | 
| finallyKeyword = getAndAdvance(); | 
| -      finallyClause = parseBlock(); | 
| -    } else { | 
| -      if (catchClauses.isEmpty) { | 
| -        _reportErrorForCurrentToken(ParserErrorCode.MISSING_CATCH_OR_FINALLY); | 
| -      } | 
| +      finallyClause = _parseBlockChecked(); | 
| +    } else if (catchClauses.isEmpty) { | 
| +      _reportErrorForCurrentToken(ParserErrorCode.MISSING_CATCH_OR_FINALLY); | 
| } | 
| return new TryStatement( | 
| tryKeyword, body, catchClauses, finallyKeyword, finallyClause); | 
| @@ -7950,6 +8399,8 @@ class Parser { | 
| * Parse a type alias. The [commentAndMetadata] is the metadata to be | 
| * associated with the member. Return the type alias that was parsed. | 
| * | 
| +   * This method assumes that the current token matches [Keyword.TYPEDEF]. | 
| +   * | 
| *     typeAlias ::= | 
| *         'typedef' typeAliasBody | 
| * | 
| @@ -7970,7 +8421,7 @@ class Parser { | 
| *         returnType? name | 
| */ | 
| TypeAlias _parseTypeAlias(CommentAndMetadata commentAndMetadata) { | 
| -    Token keyword = _expectKeyword(Keyword.TYPEDEF); | 
| +    Token keyword = getAndAdvance(); | 
| if (_matchesIdentifier()) { | 
| Token next = _peek(); | 
| if (_tokenMatches(next, TokenType.LT)) { | 
| @@ -7995,11 +8446,11 @@ class Parser { | 
|  | 
| TypeName _parseTypeName() { | 
| Identifier typeName; | 
| -    if (_matchesKeyword(Keyword.VAR)) { | 
| +    if (_matchesIdentifier()) { | 
| +      typeName = _parsePrefixedIdentifierUnchecked(); | 
| +    } else if (_matchesKeyword(Keyword.VAR)) { | 
| _reportErrorForCurrentToken(ParserErrorCode.VAR_AS_TYPE_NAME); | 
| typeName = new SimpleIdentifier(getAndAdvance()); | 
| -    } else if (_matchesIdentifier()) { | 
| -      typeName = parsePrefixedIdentifier(); | 
| } else { | 
| typeName = _createSyntheticIdentifier(); | 
| _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_TYPE_NAME); | 
| @@ -8009,6 +8460,25 @@ class Parser { | 
| } | 
|  | 
| /** | 
| +   * Parse a type name. Return the type name that was parsed. | 
| +   * | 
| +   * This method assumes that the current token is an identifier. | 
| +   * | 
| +   *     type ::= | 
| +   *         qualified typeArguments? | 
| +   */ | 
| +  TypeName _parseTypeNameAfterIdentifier() { | 
| +    Identifier typeName = _parsePrefixedIdentifierUnchecked(); | 
| +    TypeArgumentList typeArguments = _parseOptionalTypeArguments(); | 
| +    // If this is followed by a generic method type comment, allow the comment | 
| +    // type to replace the real type name. | 
| +    // TODO(jmesserly): this feels like a big hammer. Can we restrict it to | 
| +    // only work inside generic methods? | 
| +    TypeName typeFromComment = _parseOptionalTypeNameComment(); | 
| +    return typeFromComment ?? new TypeName(typeName, typeArguments); | 
| +  } | 
| + | 
| +  /** | 
| * Parse a unary expression. Return the unary expression that was parsed. | 
| * | 
| *     unaryExpression ::= | 
| @@ -8020,13 +8490,15 @@ class Parser { | 
| *       | incrementOperator assignableExpression | 
| */ | 
| Expression _parseUnaryExpression() { | 
| -    if (_matches(TokenType.MINUS) || | 
| -        _matches(TokenType.BANG) || | 
| -        _matches(TokenType.TILDE)) { | 
| +    TokenType type = _currentToken.type; | 
| +    if (type == TokenType.MINUS || | 
| +        type == TokenType.BANG || | 
| +        type == TokenType.TILDE) { | 
| Token operator = getAndAdvance(); | 
| if (_matchesKeyword(Keyword.SUPER)) { | 
| -        if (_tokenMatches(_peek(), TokenType.OPEN_SQUARE_BRACKET) || | 
| -            _tokenMatches(_peek(), TokenType.PERIOD)) { | 
| +        TokenType nextType = _peek().type; | 
| +        if (nextType == TokenType.OPEN_SQUARE_BRACKET || | 
| +            nextType == TokenType.PERIOD) { | 
| //     "prefixOperator unaryExpression" | 
| // --> "prefixOperator postfixExpression" | 
| // --> "prefixOperator primary                    selector*" | 
| @@ -8040,8 +8512,9 @@ class Parser { | 
| } else if (_currentToken.type.isIncrementOperator) { | 
| Token operator = getAndAdvance(); | 
| if (_matchesKeyword(Keyword.SUPER)) { | 
| -        if (_tokenMatches(_peek(), TokenType.OPEN_SQUARE_BRACKET) || | 
| -            _tokenMatches(_peek(), TokenType.PERIOD)) { | 
| +        TokenType nextType = _peek().type; | 
| +        if (nextType == TokenType.OPEN_SQUARE_BRACKET || | 
| +            nextType == TokenType.PERIOD) { | 
| // --> "prefixOperator 'super' assignableSelector selector*" | 
| return new PrefixExpression(operator, _parseUnaryExpression()); | 
| } | 
| @@ -8052,7 +8525,7 @@ class Parser { | 
| // we cannot do the same for "++super" because "+super" is also not | 
| // valid. | 
| // | 
| -        if (operator.type == TokenType.MINUS_MINUS) { | 
| +        if (type == TokenType.MINUS_MINUS) { | 
| Token firstOperator = _createToken(operator, TokenType.MINUS); | 
| Token secondOperator = | 
| new Token(TokenType.MINUS, operator.offset + 1); | 
| @@ -8063,16 +8536,16 @@ class Parser { | 
| firstOperator, | 
| new PrefixExpression( | 
| secondOperator, new SuperExpression(getAndAdvance()))); | 
| -        } else { | 
| -          // Invalid operator before 'super' | 
| -          _reportErrorForCurrentToken( | 
| -              ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, [operator.lexeme]); | 
| -          return new PrefixExpression( | 
| -              operator, new SuperExpression(getAndAdvance())); | 
| } | 
| +        // Invalid operator before 'super' | 
| +        _reportErrorForCurrentToken( | 
| +            ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, [operator.lexeme]); | 
| +        return new PrefixExpression( | 
| +            operator, new SuperExpression(getAndAdvance())); | 
| } | 
| -      return new PrefixExpression(operator, _parseAssignableExpression(false)); | 
| -    } else if (_matches(TokenType.PLUS)) { | 
| +      return new PrefixExpression( | 
| +          operator, _parseAssignableExpressionNotStartingWithSuper(false)); | 
| +    } else if (type == TokenType.PLUS) { | 
| _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | 
| return _createSyntheticIdentifier(); | 
| } else if (_inAsync && _matchesString(_AWAIT)) { | 
| @@ -8086,28 +8559,35 @@ class Parser { | 
| * was parsed. | 
| */ | 
| StringLiteral _parseUri() { | 
| -    bool iskeywordAfterUri(Token token) => | 
| +    // TODO(brianwilkerson) Should this function also return true for valid | 
| +    // top-level keywords? | 
| +    bool isKeywordAfterUri(Token token) => | 
| token.lexeme == Keyword.AS.syntax || | 
| token.lexeme == _HIDE || | 
| token.lexeme == _SHOW; | 
| -    if (!_matches(TokenType.STRING) && | 
| -        !_matches(TokenType.SEMICOLON) && | 
| -        !iskeywordAfterUri(_currentToken)) { | 
| +    TokenType type = _currentToken.type; | 
| +    if (type != TokenType.STRING && | 
| +        type != TokenType.SEMICOLON && | 
| +        !isKeywordAfterUri(_currentToken)) { | 
| // Attempt to recover in the case where the URI was not enclosed in | 
| // quotes. | 
| Token token = _currentToken; | 
| -      while ((_tokenMatchesIdentifier(token) && !iskeywordAfterUri(token)) || | 
| -          _tokenMatches(token, TokenType.COLON) || | 
| -          _tokenMatches(token, TokenType.SLASH) || | 
| -          _tokenMatches(token, TokenType.PERIOD) || | 
| -          _tokenMatches(token, TokenType.PERIOD_PERIOD) || | 
| -          _tokenMatches(token, TokenType.PERIOD_PERIOD_PERIOD) || | 
| -          _tokenMatches(token, TokenType.INT) || | 
| -          _tokenMatches(token, TokenType.DOUBLE)) { | 
| +      bool isValidInUri(Token token) { | 
| +        TokenType type = token.type; | 
| +        return type == TokenType.COLON || | 
| +            type == TokenType.SLASH || | 
| +            type == TokenType.PERIOD || | 
| +            type == TokenType.PERIOD_PERIOD || | 
| +            type == TokenType.PERIOD_PERIOD_PERIOD || | 
| +            type == TokenType.INT || | 
| +            type == TokenType.DOUBLE; | 
| +      } | 
| +      while ((_tokenMatchesIdentifier(token) && !isKeywordAfterUri(token)) || | 
| +          isValidInUri(token)) { | 
| token = token.next; | 
| } | 
| if (_tokenMatches(token, TokenType.SEMICOLON) || | 
| -          iskeywordAfterUri(token)) { | 
| +          isKeywordAfterUri(token)) { | 
| Token endToken = token.previous; | 
| token = _currentToken; | 
| int endOffset = token.end; | 
| @@ -8193,10 +8673,10 @@ class Parser { | 
| _tokenMatchesKeyword(keyword, Keyword.VAR)) { | 
| _reportErrorForToken(ParserErrorCode.VAR_AND_TYPE, keyword); | 
| } | 
| -    List<VariableDeclaration> variables = new List<VariableDeclaration>(); | 
| -    variables.add(_parseVariableDeclaration()); | 
| -    while (_matches(TokenType.COMMA)) { | 
| -      _advance(); | 
| +    List<VariableDeclaration> variables = <VariableDeclaration>[ | 
| +      _parseVariableDeclaration() | 
| +    ]; | 
| +    while (_optional(TokenType.COMMA)) { | 
| variables.add(_parseVariableDeclaration()); | 
| } | 
| return new VariableDeclarationList(commentAndMetadata?.comment, | 
| @@ -8250,6 +8730,8 @@ class Parser { | 
| /** | 
| * Parse a while statement. Return the while statement that was parsed. | 
| * | 
| +   * This method assumes that the current token matches [Keyword.WHILE]. | 
| +   * | 
| *     whileStatement ::= | 
| *         'while' '(' expression ')' statement | 
| */ | 
| @@ -8257,7 +8739,7 @@ class Parser { | 
| bool wasInLoop = _inLoop; | 
| _inLoop = true; | 
| try { | 
| -      Token keyword = _expectKeyword(Keyword.WHILE); | 
| +      Token keyword = getAndAdvance(); | 
| Token leftParenthesis = _expect(TokenType.OPEN_PAREN); | 
| Expression condition = parseExpression2(); | 
| Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | 
| @@ -8272,6 +8754,8 @@ class Parser { | 
| /** | 
| * Parse a yield statement. Return the yield statement that was parsed. | 
| * | 
| +   * This method assumes that the current token matches [Keyword.YIELD]. | 
| +   * | 
| *     yieldStatement ::= | 
| *         'yield' '*'? expression ';' | 
| */ | 
| @@ -8415,8 +8899,8 @@ class Parser { | 
| *   | type | 
| */ | 
| Token _skipFinalConstVarOrType(Token startToken) { | 
| -    if (_tokenMatchesKeyword(startToken, Keyword.FINAL) || | 
| -        _tokenMatchesKeyword(startToken, Keyword.CONST)) { | 
| +    Keyword keyword = startToken.keyword; | 
| +    if (keyword == Keyword.FINAL || keyword == Keyword.CONST) { | 
| Token next = startToken.next; | 
| if (_tokenMatchesIdentifier(next)) { | 
| Token next2 = next.next; | 
| @@ -8429,7 +8913,7 @@ class Parser { | 
| // "parameter" | 
| return next; | 
| } | 
| -    } else if (_tokenMatchesKeyword(startToken, Keyword.VAR)) { | 
| +    } else if (keyword == Keyword.VAR) { | 
| return startToken.next; | 
| } else if (_tokenMatchesIdentifier(startToken)) { | 
| Token next = startToken.next; | 
|  |