| Index: pkg/analyzer/lib/src/generated/parser.dart
|
| diff --git a/pkg/analyzer/lib/src/generated/parser.dart b/pkg/analyzer/lib/src/generated/parser.dart
|
| index 7354c0a9237dd7da2fabff53b3cb19b18e6b34c6..6f6f2b61fb0de56bcf144fd25a7df3e77029d647 100644
|
| --- a/pkg/analyzer/lib/src/generated/parser.dart
|
| +++ b/pkg/analyzer/lib/src/generated/parser.dart
|
| @@ -1776,7 +1776,7 @@ class Parser {
|
| bool blankArgument = argument is SimpleIdentifier && (argument as SimpleIdentifier).name.isEmpty;
|
| if (!generatedError && !(argument is NamedExpression && !blankArgument)) {
|
| // Report the error, once, but allow the arguments to be in any order in the AST.
|
| - _reportErrorForCurrentToken(ParserErrorCode.POSITIONAL_AFTER_NAMED_ARGUMENT, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.POSITIONAL_AFTER_NAMED_ARGUMENT);
|
| generatedError = true;
|
| }
|
| } else if (argument is NamedExpression) {
|
| @@ -1890,7 +1890,7 @@ class Parser {
|
| //
|
| // We appear to have a variable declaration with a type of "void".
|
| //
|
| - _reportErrorForNode(ParserErrorCode.VOID_VARIABLE, returnType, []);
|
| + _reportErrorForNode(ParserErrorCode.VOID_VARIABLE, returnType);
|
| return _parseInitializedIdentifierList(commentAndMetadata, modifiers.staticKeyword, _validateModifiersForField(modifiers), returnType);
|
| }
|
| }
|
| @@ -1901,7 +1901,7 @@ class Parser {
|
| _validateModifiersForOperator(modifiers);
|
| return _parseOperator(commentAndMetadata, modifiers.externalKeyword, returnType);
|
| }
|
| - _reportErrorForToken(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken, []);
|
| + _reportErrorForToken(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken);
|
| return null;
|
| }
|
| } else if (_matchesKeyword(Keyword.GET) && _tokenMatchesIdentifier(_peek())) {
|
| @@ -1932,12 +1932,12 @@ class Parser {
|
| //
|
| // We appear to have found an incomplete field declaration.
|
| //
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
|
| List<VariableDeclaration> variables = new List<VariableDeclaration>();
|
| variables.add(new VariableDeclaration(null, null, _createSyntheticIdentifier(), null, null));
|
| return new FieldDeclaration(commentAndMetadata.comment, commentAndMetadata.metadata, null, new VariableDeclarationList(null, null, keyword, null, variables), _expectSemicolon());
|
| }
|
| - _reportErrorForToken(ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken, []);
|
| + _reportErrorForToken(ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken);
|
| if (commentAndMetadata.comment != null || !commentAndMetadata.metadata.isEmpty) {
|
| //
|
| // We appear to have found an incomplete declaration at the end of the class. At this point
|
| @@ -1960,7 +1960,7 @@ class Parser {
|
| return _parseMethodDeclarationAfterParameters(commentAndMetadata, modifiers.externalKeyword, modifiers.staticKeyword, null, methodName, parameters);
|
| } else if (_peek().matchesAny([TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) {
|
| if (modifiers.constKeyword == null && modifiers.finalKeyword == null && modifiers.varKeyword == null) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE);
|
| }
|
| return _parseInitializedIdentifierList(commentAndMetadata, modifiers.staticKeyword, _validateModifiersForField(modifiers), null);
|
| }
|
| @@ -1995,7 +1995,7 @@ class Parser {
|
| // At this point it consists of a type name, so we'll treat it as a field declaration
|
| // with a missing field name and semicolon.
|
| //
|
| - _reportErrorForToken(ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken, []);
|
| + _reportErrorForToken(ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken);
|
| try {
|
| _lockErrorListener();
|
| return _parseInitializedIdentifierList(commentAndMetadata, modifiers.staticKeyword, _validateModifiersForField(modifiers), type);
|
| @@ -2006,7 +2006,7 @@ class Parser {
|
| SimpleIdentifier methodName = parseSimpleIdentifier();
|
| FormalParameterList parameters = parseFormalParameterList();
|
| if (methodName.name == className) {
|
| - _reportErrorForNode(ParserErrorCode.CONSTRUCTOR_WITH_RETURN_TYPE, type, []);
|
| + _reportErrorForNode(ParserErrorCode.CONSTRUCTOR_WITH_RETURN_TYPE, type);
|
| return _parseConstructor(commentAndMetadata, modifiers.externalKeyword, _validateModifiersForConstructor(modifiers), modifiers.factoryKeyword, methodName, null, null, parameters);
|
| }
|
| _validateModifiersForGetterOrSetterOrMethod(modifiers);
|
| @@ -2016,7 +2016,7 @@ class Parser {
|
| // We have found "TypeName identifier {", and are guessing that this is a getter without the
|
| // keyword 'get'.
|
| _validateModifiersForGetterOrSetterOrMethod(modifiers);
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_GET, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_GET);
|
| _currentToken = _injectToken(new Parser_SyntheticKeywordToken(Keyword.GET, _currentToken.offset));
|
| return _parseGetter(commentAndMetadata, modifiers.externalKeyword, modifiers.staticKeyword, type);
|
| }
|
| @@ -2067,15 +2067,15 @@ class Parser {
|
| if ((_matchesKeyword(Keyword.IMPORT) || _matchesKeyword(Keyword.EXPORT) || _matchesKeyword(Keyword.LIBRARY) || _matchesKeyword(Keyword.PART)) && !_tokenMatches(_peek(), TokenType.PERIOD) && !_tokenMatches(_peek(), TokenType.LT) && !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
|
| Directive directive = _parseDirective(commentAndMetadata);
|
| if (declarations.length > 0 && !directiveFoundAfterDeclaration) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.DIRECTIVE_AFTER_DECLARATION, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.DIRECTIVE_AFTER_DECLARATION);
|
| directiveFoundAfterDeclaration = true;
|
| }
|
| if (directive is LibraryDirective) {
|
| if (libraryDirectiveFound) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MULTIPLE_LIBRARY_DIRECTIVES, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MULTIPLE_LIBRARY_DIRECTIVES);
|
| } else {
|
| if (directives.length > 0) {
|
| - _reportErrorForToken(ParserErrorCode.LIBRARY_DIRECTIVE_NOT_FIRST, directive.libraryToken, []);
|
| + _reportErrorForToken(ParserErrorCode.LIBRARY_DIRECTIVE_NOT_FIRST, directive.libraryToken);
|
| }
|
| libraryDirectiveFound = true;
|
| }
|
| @@ -2083,24 +2083,24 @@ class Parser {
|
| partDirectiveFound = true;
|
| } else if (partDirectiveFound) {
|
| if (directive is ExportDirective) {
|
| - _reportErrorForToken(ParserErrorCode.EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE, directive.keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE, directive.keyword);
|
| } else if (directive is ImportDirective) {
|
| - _reportErrorForToken(ParserErrorCode.IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE, directive.keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE, directive.keyword);
|
| }
|
| }
|
| if (directive is PartOfDirective) {
|
| if (partOfDirectiveFound) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MULTIPLE_PART_OF_DIRECTIVES, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MULTIPLE_PART_OF_DIRECTIVES);
|
| } else {
|
| int directiveCount = directives.length;
|
| for (int i = 0; i < directiveCount; i++) {
|
| - _reportErrorForToken(ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART, directives[i].keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART, directives[i].keyword);
|
| }
|
| partOfDirectiveFound = true;
|
| }
|
| } else {
|
| if (partOfDirectiveFound) {
|
| - _reportErrorForToken(ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART, directive.keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART, directive.keyword);
|
| }
|
| }
|
| directives.add(directive);
|
| @@ -2320,7 +2320,7 @@ class Parser {
|
| if (_getEndToken(leftParenthesis) != null) {
|
| _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_TOKEN, [TokenType.COMMA.lexeme]);
|
| } else {
|
| - _reportErrorForToken(ParserErrorCode.MISSING_CLOSING_PARENTHESIS, _currentToken.previous, []);
|
| + _reportErrorForToken(ParserErrorCode.MISSING_CLOSING_PARENTHESIS, _currentToken.previous);
|
| break;
|
| }
|
| }
|
| @@ -2331,11 +2331,11 @@ class Parser {
|
| if (_matches(TokenType.OPEN_SQUARE_BRACKET)) {
|
| wasOptionalParameter = true;
|
| if (leftSquareBracket != null && !reportedMuliplePositionalGroups) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MULTIPLE_POSITIONAL_PARAMETER_GROUPS, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MULTIPLE_POSITIONAL_PARAMETER_GROUPS);
|
| reportedMuliplePositionalGroups = true;
|
| }
|
| if (leftCurlyBracket != null && !reportedMixedGroups) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS);
|
| reportedMixedGroups = true;
|
| }
|
| leftSquareBracket = andAdvance;
|
| @@ -2344,11 +2344,11 @@ class Parser {
|
| } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
|
| wasOptionalParameter = true;
|
| if (leftCurlyBracket != null && !reportedMulipleNamedGroups) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MULTIPLE_NAMED_PARAMETER_GROUPS, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MULTIPLE_NAMED_PARAMETER_GROUPS);
|
| reportedMulipleNamedGroups = true;
|
| }
|
| if (leftSquareBracket != null && !reportedMixedGroups) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS);
|
| reportedMixedGroups = true;
|
| }
|
| leftCurlyBracket = andAdvance;
|
| @@ -2362,7 +2362,7 @@ class Parser {
|
| parameters.add(parameter);
|
| currentParameters.add(parameter);
|
| if (kind == ParameterKind.REQUIRED && wasOptionalParameter) {
|
| - _reportErrorForNode(ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS, parameter, []);
|
| + _reportErrorForNode(ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS, parameter);
|
| }
|
| //
|
| // Handle the end of parameter groups.
|
| @@ -2562,7 +2562,7 @@ class Parser {
|
| FormalParameterList parameters = parseFormalParameterList();
|
| if (thisKeyword == null) {
|
| if (holder.keyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.FUNCTION_TYPED_PARAMETER_VAR, holder.keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.FUNCTION_TYPED_PARAMETER_VAR, holder.keyword);
|
| }
|
| return new FunctionTypedFormalParameter(commentAndMetadata.comment, commentAndMetadata.metadata, holder.type, identifier, parameters);
|
| } else {
|
| @@ -2572,9 +2572,9 @@ class Parser {
|
| TypeName type = holder.type;
|
| if (type != null) {
|
| if (_tokenMatchesKeyword(type.name.beginToken, Keyword.VOID)) {
|
| - _reportErrorForToken(ParserErrorCode.VOID_PARAMETER, type.name.beginToken, []);
|
| + _reportErrorForToken(ParserErrorCode.VOID_PARAMETER, type.name.beginToken);
|
| } else if (holder.keyword != null && _tokenMatchesKeyword(holder.keyword, Keyword.VAR)) {
|
| - _reportErrorForToken(ParserErrorCode.VAR_AND_TYPE, holder.keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.VAR_AND_TYPE, holder.keyword);
|
| }
|
| }
|
| if (thisKeyword != null) {
|
| @@ -2636,7 +2636,7 @@ class Parser {
|
| if (_matchesIdentifier()) {
|
| return new SimpleIdentifier(andAdvance);
|
| }
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
|
| return _createSyntheticIdentifier();
|
| }
|
|
|
| @@ -2684,7 +2684,7 @@ class Parser {
|
| }
|
| }
|
| if (strings.length < 1) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_STRING_LITERAL, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_STRING_LITERAL);
|
| return _createSyntheticStringLiteral();
|
| } else if (strings.length == 1) {
|
| return strings[0];
|
| @@ -2730,13 +2730,13 @@ class Parser {
|
| TypeName parseTypeName() {
|
| Identifier typeName;
|
| if (_matchesKeyword(Keyword.VAR)) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.VAR_AS_TYPE_NAME, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.VAR_AS_TYPE_NAME);
|
| typeName = new SimpleIdentifier(andAdvance);
|
| } else if (_matchesIdentifier()) {
|
| typeName = parsePrefixedIdentifier();
|
| } else {
|
| typeName = _createSyntheticIdentifier();
|
| - _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_TYPE_NAME, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_TYPE_NAME);
|
| }
|
| TypeArgumentList typeArguments = null;
|
| if (_matches(TokenType.LT)) {
|
| @@ -2991,7 +2991,7 @@ class Parser {
|
| */
|
| void _ensureAssignable(Expression expression) {
|
| if (expression != null && !expression.isAssignable) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.ILLEGAL_ASSIGNMENT_TO_NON_ASSIGNABLE, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.ILLEGAL_ASSIGNMENT_TO_NON_ASSIGNABLE);
|
| }
|
| }
|
|
|
| @@ -3591,13 +3591,13 @@ class Parser {
|
| Token leftParen = _expect(TokenType.OPEN_PAREN);
|
| Expression expression = parseExpression2();
|
| if (expression is AssignmentExpression) {
|
| - _reportErrorForNode(ParserErrorCode.ASSERT_DOES_NOT_TAKE_ASSIGNMENT, expression, []);
|
| + _reportErrorForNode(ParserErrorCode.ASSERT_DOES_NOT_TAKE_ASSIGNMENT, expression);
|
| } else if (expression is CascadeExpression) {
|
| - _reportErrorForNode(ParserErrorCode.ASSERT_DOES_NOT_TAKE_CASCADE, expression, []);
|
| + _reportErrorForNode(ParserErrorCode.ASSERT_DOES_NOT_TAKE_CASCADE, expression);
|
| } else if (expression is ThrowExpression) {
|
| - _reportErrorForNode(ParserErrorCode.ASSERT_DOES_NOT_TAKE_THROW, expression, []);
|
| + _reportErrorForNode(ParserErrorCode.ASSERT_DOES_NOT_TAKE_THROW, expression);
|
| } else if (expression is RethrowExpression) {
|
| - _reportErrorForNode(ParserErrorCode.ASSERT_DOES_NOT_TAKE_RETHROW, expression, []);
|
| + _reportErrorForNode(ParserErrorCode.ASSERT_DOES_NOT_TAKE_RETHROW, expression);
|
| }
|
| Token rightParen = _expect(TokenType.CLOSE_PAREN);
|
| Token semicolon = _expect(TokenType.SEMICOLON);
|
| @@ -3692,7 +3692,7 @@ class Parser {
|
| } else {
|
| if (!optional) {
|
| // Report the missing selector.
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_ASSIGNABLE_SELECTOR, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_ASSIGNABLE_SELECTOR);
|
| }
|
| return prefix;
|
| }
|
| @@ -3781,7 +3781,7 @@ class Parser {
|
| label = parseSimpleIdentifier();
|
| }
|
| if (!_inLoop && !_inSwitch && label == null) {
|
| - _reportErrorForToken(ParserErrorCode.BREAK_OUTSIDE_OF_LOOP, breakKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.BREAK_OUTSIDE_OF_LOOP, breakKeyword);
|
| }
|
| Token semicolon = _expect(TokenType.SEMICOLON);
|
| return new BreakStatement(breakKeyword, label, semicolon);
|
| @@ -3916,22 +3916,22 @@ class Parser {
|
| if (extendsClause == null) {
|
| extendsClause = parseExtendsClause();
|
| if (withClause != null) {
|
| - _reportErrorForToken(ParserErrorCode.WITH_BEFORE_EXTENDS, withClause.withKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.WITH_BEFORE_EXTENDS, withClause.withKeyword);
|
| } else if (implementsClause != null) {
|
| - _reportErrorForToken(ParserErrorCode.IMPLEMENTS_BEFORE_EXTENDS, implementsClause.keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.IMPLEMENTS_BEFORE_EXTENDS, implementsClause.keyword);
|
| }
|
| } else {
|
| - _reportErrorForToken(ParserErrorCode.MULTIPLE_EXTENDS_CLAUSES, extendsClause.keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.MULTIPLE_EXTENDS_CLAUSES, extendsClause.keyword);
|
| parseExtendsClause();
|
| }
|
| } else if (_matchesKeyword(Keyword.WITH)) {
|
| if (withClause == null) {
|
| withClause = parseWithClause();
|
| if (implementsClause != null) {
|
| - _reportErrorForToken(ParserErrorCode.IMPLEMENTS_BEFORE_WITH, implementsClause.keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.IMPLEMENTS_BEFORE_WITH, implementsClause.keyword);
|
| }
|
| } else {
|
| - _reportErrorForToken(ParserErrorCode.MULTIPLE_WITH_CLAUSES, withClause.withKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.MULTIPLE_WITH_CLAUSES, withClause.withKeyword);
|
| parseWithClause();
|
| // TODO(brianwilkerson) Should we merge the list of applied mixins into a single list?
|
| }
|
| @@ -3939,7 +3939,7 @@ class Parser {
|
| if (implementsClause == null) {
|
| implementsClause = parseImplementsClause();
|
| } else {
|
| - _reportErrorForToken(ParserErrorCode.MULTIPLE_IMPLEMENTS_CLAUSES, implementsClause.keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.MULTIPLE_IMPLEMENTS_CLAUSES, implementsClause.keyword);
|
| parseImplementsClause();
|
| // TODO(brianwilkerson) Should we merge the list of implemented classes into a single list?
|
| }
|
| @@ -3948,7 +3948,7 @@ class Parser {
|
| }
|
| }
|
| if (withClause != null && extendsClause == null) {
|
| - _reportErrorForToken(ParserErrorCode.WITH_WITHOUT_EXTENDS, withClause.withKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.WITH_WITHOUT_EXTENDS, withClause.withKeyword);
|
| }
|
| //
|
| // Look for and skip over the extra-lingual 'native' specification.
|
| @@ -3970,7 +3970,7 @@ class Parser {
|
| } else {
|
| leftBracket = _createSyntheticToken(TokenType.OPEN_CURLY_BRACKET);
|
| rightBracket = _createSyntheticToken(TokenType.CLOSE_CURLY_BRACKET);
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_CLASS_BODY, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_CLASS_BODY);
|
| }
|
| ClassDeclaration classDeclaration = new ClassDeclaration(commentAndMetadata.comment, commentAndMetadata.metadata, abstractKeyword, keyword, name, typeParameters, extendsClause, withClause, implementsClause, leftBracket, members, rightBracket);
|
| classDeclaration.nativeClause = nativeClause;
|
| @@ -4274,7 +4274,7 @@ class Parser {
|
| _validateModifiersForTopLevelFunction(modifiers);
|
| return _parseFunctionDeclaration(commentAndMetadata, modifiers.externalKeyword, returnType);
|
| } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
|
| - _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken, []);
|
| + _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken);
|
| return _convertToFunctionDeclaration(_parseOperator(commentAndMetadata, modifiers.externalKeyword, returnType));
|
| } else if (_matchesIdentifier() && _peek().matchesAny([
|
| TokenType.OPEN_PAREN,
|
| @@ -4291,18 +4291,18 @@ class Parser {
|
| //
|
| // We appear to have a variable declaration with a type of "void".
|
| //
|
| - _reportErrorForNode(ParserErrorCode.VOID_VARIABLE, returnType, []);
|
| + _reportErrorForNode(ParserErrorCode.VOID_VARIABLE, returnType);
|
| return new TopLevelVariableDeclaration(commentAndMetadata.comment, commentAndMetadata.metadata, _parseVariableDeclarationListAfterType(null, _validateModifiersForTopLevelVariable(modifiers), null), _expect(TokenType.SEMICOLON));
|
| }
|
| }
|
| - _reportErrorForToken(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken, []);
|
| + _reportErrorForToken(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken);
|
| return null;
|
| }
|
| } else if ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) && _tokenMatchesIdentifier(_peek())) {
|
| _validateModifiersForTopLevelFunction(modifiers);
|
| return _parseFunctionDeclaration(commentAndMetadata, modifiers.externalKeyword, null);
|
| } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
|
| - _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken, []);
|
| + _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken);
|
| return _convertToFunctionDeclaration(_parseOperator(commentAndMetadata, modifiers.externalKeyword, null));
|
| } else if (!_matchesIdentifier()) {
|
| Token keyword = modifiers.varKeyword;
|
| @@ -4316,19 +4316,19 @@ class Parser {
|
| //
|
| // We appear to have found an incomplete top-level variable declaration.
|
| //
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
|
| List<VariableDeclaration> variables = new List<VariableDeclaration>();
|
| variables.add(new VariableDeclaration(null, null, _createSyntheticIdentifier(), null, null));
|
| return new TopLevelVariableDeclaration(commentAndMetadata.comment, commentAndMetadata.metadata, new VariableDeclarationList(null, null, keyword, null, variables), _expectSemicolon());
|
| }
|
| - _reportErrorForToken(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken, []);
|
| + _reportErrorForToken(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken);
|
| return null;
|
| } else if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
|
| _validateModifiersForTopLevelFunction(modifiers);
|
| return _parseFunctionDeclaration(commentAndMetadata, modifiers.externalKeyword, null);
|
| } else if (_peek().matchesAny([TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) {
|
| if (modifiers.constKeyword == null && modifiers.finalKeyword == null && modifiers.varKeyword == null) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE);
|
| }
|
| return new TopLevelVariableDeclaration(commentAndMetadata.comment, commentAndMetadata.metadata, _parseVariableDeclarationListAfterType(null, _validateModifiersForTopLevelVariable(modifiers), null), _expect(TokenType.SEMICOLON));
|
| }
|
| @@ -4337,13 +4337,13 @@ class Parser {
|
| _validateModifiersForTopLevelFunction(modifiers);
|
| return _parseFunctionDeclaration(commentAndMetadata, modifiers.externalKeyword, returnType);
|
| } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
|
| - _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken, []);
|
| + _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken);
|
| return _convertToFunctionDeclaration(_parseOperator(commentAndMetadata, modifiers.externalKeyword, returnType));
|
| } else if (_matches(TokenType.AT)) {
|
| return new TopLevelVariableDeclaration(commentAndMetadata.comment, commentAndMetadata.metadata, _parseVariableDeclarationListAfterType(null, _validateModifiersForTopLevelVariable(modifiers), returnType), _expect(TokenType.SEMICOLON));
|
| } else if (!_matchesIdentifier()) {
|
| // TODO(brianwilkerson) Generalize this error. We could also be parsing a top-level variable at this point.
|
| - _reportErrorForToken(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken, []);
|
| + _reportErrorForToken(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken);
|
| Token semicolon;
|
| if (_matches(TokenType.SEMICOLON)) {
|
| semicolon = andAdvance;
|
| @@ -4410,13 +4410,13 @@ class Parser {
|
| initializers.add(_parseSuperConstructorInvocation());
|
| } else if (_matches(TokenType.OPEN_CURLY_BRACKET)
|
| || _matches(TokenType.FUNCTION)) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_INITIALIZER, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_INITIALIZER);
|
| } else {
|
| initializers.add(_parseConstructorFieldInitializer());
|
| }
|
| } while (_optional(TokenType.COMMA));
|
| if (factoryKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.FACTORY_WITH_INITIALIZERS, factoryKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.FACTORY_WITH_INITIALIZERS, factoryKeyword);
|
| }
|
| }
|
| ConstructorName redirectedConstructor = null;
|
| @@ -4426,21 +4426,21 @@ class Parser {
|
| redirectedConstructor = parseConstructorName();
|
| body = new EmptyFunctionBody(_expect(TokenType.SEMICOLON));
|
| if (factoryKeyword == null) {
|
| - _reportErrorForNode(ParserErrorCode.REDIRECTION_IN_NON_FACTORY_CONSTRUCTOR, redirectedConstructor, []);
|
| + _reportErrorForNode(ParserErrorCode.REDIRECTION_IN_NON_FACTORY_CONSTRUCTOR, redirectedConstructor);
|
| }
|
| } else {
|
| body = _parseFunctionBody(true, ParserErrorCode.MISSING_FUNCTION_BODY, false);
|
| if (constKeyword != null && factoryKeyword != null && externalKeyword == null) {
|
| - _reportErrorForToken(ParserErrorCode.CONST_FACTORY, factoryKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.CONST_FACTORY, factoryKeyword);
|
| } else if (body is EmptyFunctionBody) {
|
| if (factoryKeyword != null && externalKeyword == null) {
|
| - _reportErrorForToken(ParserErrorCode.FACTORY_WITHOUT_BODY, factoryKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.FACTORY_WITHOUT_BODY, factoryKeyword);
|
| }
|
| } else {
|
| if (constKeyword != null) {
|
| - _reportErrorForNode(ParserErrorCode.CONST_CONSTRUCTOR_WITH_BODY, body, []);
|
| + _reportErrorForNode(ParserErrorCode.CONST_CONSTRUCTOR_WITH_BODY, body);
|
| } else if (!bodyAllowed) {
|
| - _reportErrorForNode(ParserErrorCode.EXTERNAL_CONSTRUCTOR_WITH_BODY, body, []);
|
| + _reportErrorForNode(ParserErrorCode.EXTERNAL_CONSTRUCTOR_WITH_BODY, body);
|
| }
|
| }
|
| }
|
| @@ -4466,7 +4466,7 @@ class Parser {
|
| }
|
| SimpleIdentifier fieldName = parseSimpleIdentifier();
|
| if (!_matches(TokenType.EQ)) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_ASSIGNMENT_IN_INITIALIZER, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_ASSIGNMENT_IN_INITIALIZER);
|
| return new ConstructorFieldInitializer(
|
| keyword,
|
| period,
|
| @@ -4510,14 +4510,14 @@ class Parser {
|
| Statement _parseContinueStatement() {
|
| Token continueKeyword = _expectKeyword(Keyword.CONTINUE);
|
| if (!_inLoop && !_inSwitch) {
|
| - _reportErrorForToken(ParserErrorCode.CONTINUE_OUTSIDE_OF_LOOP, continueKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.CONTINUE_OUTSIDE_OF_LOOP, continueKeyword);
|
| }
|
| SimpleIdentifier label = null;
|
| if (_matchesIdentifier()) {
|
| label = parseSimpleIdentifier();
|
| }
|
| if (_inSwitch && !_inLoop && label == null) {
|
| - _reportErrorForToken(ParserErrorCode.CONTINUE_WITHOUT_LABEL_IN_CASE, continueKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.CONTINUE_WITHOUT_LABEL_IN_CASE, continueKeyword);
|
| }
|
| Token semicolon = _expect(TokenType.SEMICOLON);
|
| return new ContinueStatement(continueKeyword, label, semicolon);
|
| @@ -4698,10 +4698,10 @@ class Parser {
|
| constants.add(_parseEnumConstantDeclaration());
|
| } else if (_matches(TokenType.COMMA) && _tokenMatchesIdentifier(_peek())) {
|
| constants.add(_parseEnumConstantDeclaration());
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
|
| } else {
|
| constants.add(_parseEnumConstantDeclaration());
|
| - _reportErrorForCurrentToken(ParserErrorCode.EMPTY_ENUM_BODY, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.EMPTY_ENUM_BODY);
|
| }
|
| while (_optional(TokenType.COMMA)) {
|
| if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
|
| @@ -4713,7 +4713,7 @@ class Parser {
|
| } else {
|
| leftBracket = _createSyntheticToken(TokenType.OPEN_CURLY_BRACKET);
|
| rightBracket = _createSyntheticToken(TokenType.CLOSE_CURLY_BRACKET);
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_ENUM_BODY, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_ENUM_BODY);
|
| }
|
| return new EnumDeclaration(commentAndMetadata.comment, commentAndMetadata.metadata, keyword, name, leftBracket, constants, rightBracket);
|
| }
|
| @@ -4740,7 +4740,7 @@ class Parser {
|
| while (_currentToken.type.isEqualityOperator) {
|
| Token operator = andAdvance;
|
| if (leftEqualityExpression) {
|
| - _reportErrorForNode(ParserErrorCode.EQUALITY_CANNOT_BE_EQUALITY_OPERAND, expression, []);
|
| + _reportErrorForNode(ParserErrorCode.EQUALITY_CANNOT_BE_EQUALITY_OPERAND, expression);
|
| }
|
| expression = new BinaryExpression(expression, operator, _parseRelationalExpression());
|
| leftEqualityExpression = true;
|
| @@ -4814,7 +4814,7 @@ class Parser {
|
| if (_isTypedIdentifier(_currentToken)) {
|
| type = parseReturnType();
|
| } else if (!optional) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE);
|
| }
|
| }
|
| return new FinalConstVarOrType(keyword, type);
|
| @@ -4842,18 +4842,18 @@ class Parser {
|
| Token seperator = andAdvance;
|
| Expression defaultValue = parseExpression2();
|
| if (kind == ParameterKind.NAMED) {
|
| - _reportErrorForToken(ParserErrorCode.WRONG_SEPARATOR_FOR_NAMED_PARAMETER, seperator, []);
|
| + _reportErrorForToken(ParserErrorCode.WRONG_SEPARATOR_FOR_NAMED_PARAMETER, seperator);
|
| } else if (kind == ParameterKind.REQUIRED) {
|
| - _reportErrorForNode(ParserErrorCode.POSITIONAL_PARAMETER_OUTSIDE_GROUP, parameter, []);
|
| + _reportErrorForNode(ParserErrorCode.POSITIONAL_PARAMETER_OUTSIDE_GROUP, parameter);
|
| }
|
| return new DefaultFormalParameter(parameter, kind, seperator, defaultValue);
|
| } else if (_matches(TokenType.COLON)) {
|
| Token seperator = andAdvance;
|
| Expression defaultValue = parseExpression2();
|
| if (kind == ParameterKind.POSITIONAL) {
|
| - _reportErrorForToken(ParserErrorCode.WRONG_SEPARATOR_FOR_POSITIONAL_PARAMETER, seperator, []);
|
| + _reportErrorForToken(ParserErrorCode.WRONG_SEPARATOR_FOR_POSITIONAL_PARAMETER, seperator);
|
| } else if (kind == ParameterKind.REQUIRED) {
|
| - _reportErrorForNode(ParserErrorCode.NAMED_PARAMETER_OUTSIDE_GROUP, parameter, []);
|
| + _reportErrorForNode(ParserErrorCode.NAMED_PARAMETER_OUTSIDE_GROUP, parameter);
|
| }
|
| return new DefaultFormalParameter(parameter, kind, seperator, defaultValue);
|
| } else if (kind != ParameterKind.REQUIRED) {
|
| @@ -4910,7 +4910,7 @@ class Parser {
|
| SimpleIdentifier identifier = null;
|
| if (variableList == null) {
|
| // We found: <expression> 'in'
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_VARIABLE_IN_FOR_EACH, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_VARIABLE_IN_FOR_EACH);
|
| } else {
|
| NodeList<VariableDeclaration> variables = variableList.variables;
|
| if (variables.length > 1) {
|
| @@ -4918,7 +4918,7 @@ class Parser {
|
| }
|
| VariableDeclaration variable = variables[0];
|
| if (variable.initializer != null) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.INITIALIZED_VARIABLE_IN_FOR_EACH, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.INITIALIZED_VARIABLE_IN_FOR_EACH);
|
| }
|
| Token keyword = variableList.keyword;
|
| TypeName type = variableList.type;
|
| @@ -4943,7 +4943,7 @@ class Parser {
|
| }
|
| }
|
| if (awaitKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.INVALID_AWAIT_IN_FOR, awaitKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.INVALID_AWAIT_IN_FOR, awaitKeyword);
|
| }
|
| Token leftSeparator = _expect(TokenType.SEMICOLON);
|
| Expression condition = null;
|
| @@ -4994,7 +4994,7 @@ class Parser {
|
| try {
|
| if (_matches(TokenType.SEMICOLON)) {
|
| if (!mayBeEmpty) {
|
| - _reportErrorForCurrentToken(emptyErrorCode, []);
|
| + _reportErrorForCurrentToken(emptyErrorCode);
|
| }
|
| return new EmptyFunctionBody(andAdvance);
|
| } else if (_matchesString(_NATIVE)) {
|
| @@ -5026,10 +5026,10 @@ class Parser {
|
| if (_matches(TokenType.FUNCTION)) {
|
| if (keyword != null) {
|
| if (!_tokenMatchesString(keyword, ASYNC)) {
|
| - _reportErrorForToken(ParserErrorCode.INVALID_SYNC, keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.INVALID_SYNC, keyword);
|
| keyword = null;
|
| } else if (star != null) {
|
| - _reportErrorForToken(ParserErrorCode.INVALID_STAR_AFTER_ASYNC, star, []);
|
| + _reportErrorForToken(ParserErrorCode.INVALID_STAR_AFTER_ASYNC, star);
|
| }
|
| }
|
| Token functionDefinition = andAdvance;
|
| @@ -5045,7 +5045,7 @@ class Parser {
|
| } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
|
| if (keyword != null) {
|
| if (_tokenMatchesString(keyword, SYNC) && star == null) {
|
| - _reportErrorForToken(ParserErrorCode.MISSING_STAR_AFTER_SYNC, keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.MISSING_STAR_AFTER_SYNC, keyword);
|
| }
|
| }
|
| if (!_parseFunctionBodies) {
|
| @@ -5055,7 +5055,7 @@ class Parser {
|
| return new BlockFunctionBody(keyword, star, parseBlock());
|
| } else {
|
| // Invalid function body
|
| - _reportErrorForCurrentToken(emptyErrorCode, []);
|
| + _reportErrorForCurrentToken(emptyErrorCode);
|
| return new EmptyFunctionBody(_createSyntheticToken(TokenType.SEMICOLON));
|
| }
|
| } finally {
|
| @@ -5098,10 +5098,10 @@ class Parser {
|
| parameters = parseFormalParameterList();
|
| _validateFormalParameterList(parameters);
|
| } else {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_FUNCTION_PARAMETERS, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_FUNCTION_PARAMETERS);
|
| }
|
| } else if (_matches(TokenType.OPEN_PAREN)) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.GETTER_WITH_PARAMETERS, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.GETTER_WITH_PARAMETERS);
|
| parseFormalParameterList();
|
| }
|
| FunctionBody body;
|
| @@ -5152,9 +5152,9 @@ class Parser {
|
| Token propertyKeyword = declaration.propertyKeyword;
|
| if (propertyKeyword != null) {
|
| if ((propertyKeyword as KeywordToken).keyword == Keyword.GET) {
|
| - _reportErrorForToken(ParserErrorCode.GETTER_IN_FUNCTION, propertyKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.GETTER_IN_FUNCTION, propertyKeyword);
|
| } else {
|
| - _reportErrorForToken(ParserErrorCode.SETTER_IN_FUNCTION, propertyKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.SETTER_IN_FUNCTION, propertyKeyword);
|
| }
|
| }
|
| return new FunctionDeclarationStatement(declaration);
|
| @@ -5186,12 +5186,12 @@ class Parser {
|
| typeParameters = parseTypeParameterList();
|
| }
|
| if (_matches(TokenType.SEMICOLON) || _matches(TokenType.EOF)) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS);
|
| FormalParameterList parameters = new FormalParameterList(_createSyntheticToken(TokenType.OPEN_PAREN), null, null, null, _createSyntheticToken(TokenType.CLOSE_PAREN));
|
| Token semicolon = _expect(TokenType.SEMICOLON);
|
| return new FunctionTypeAlias(commentAndMetadata.comment, commentAndMetadata.metadata, keyword, returnType, name, typeParameters, parameters, semicolon);
|
| } else if (!_matches(TokenType.OPEN_PAREN)) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS);
|
| // TODO(brianwilkerson) Recover from this error. At the very least we should skip to the start
|
| // of the next valid compilation unit member, allowing for the possibility of finding the
|
| // typedef parameters before that point.
|
| @@ -5226,13 +5226,13 @@ class Parser {
|
| Token propertyKeyword = _expectKeyword(Keyword.GET);
|
| SimpleIdentifier name = parseSimpleIdentifier();
|
| if (_matches(TokenType.OPEN_PAREN) && _tokenMatches(_peek(), TokenType.CLOSE_PAREN)) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.GETTER_WITH_PARAMETERS, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.GETTER_WITH_PARAMETERS);
|
| _advance();
|
| _advance();
|
| }
|
| FunctionBody body = _parseFunctionBody(externalKeyword != null || staticKeyword == null, ParserErrorCode.STATIC_GETTER_WITHOUT_BODY, false);
|
| if (externalKeyword != null && body is! EmptyFunctionBody) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.EXTERNAL_GETTER_WITH_BODY, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.EXTERNAL_GETTER_WITH_BODY);
|
| }
|
| return new MethodDeclaration(commentAndMetadata.comment, commentAndMetadata.metadata, externalKeyword, staticKeyword, returnType, propertyKeyword, null, name, null, body);
|
| }
|
| @@ -5303,7 +5303,7 @@ class Parser {
|
| if (_parseDeferredLibraries) {
|
| deferredToken = andAdvance;
|
| } else {
|
| - _reportErrorForCurrentToken(ParserErrorCode.DEFERRED_IMPORTS_NOT_SUPPORTED, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.DEFERRED_IMPORTS_NOT_SUPPORTED);
|
| _advance();
|
| }
|
| }
|
| @@ -5311,7 +5311,7 @@ class Parser {
|
| asToken = andAdvance;
|
| prefix = parseSimpleIdentifier();
|
| } else if (deferredToken != null) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_PREFIX_IN_DEFERRED_IMPORT, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_PREFIX_IN_DEFERRED_IMPORT);
|
| }
|
| List<Combinator> combinators = _parseCombinators();
|
| Token semicolon = _expectSemicolon();
|
| @@ -5401,9 +5401,9 @@ class Parser {
|
| // TODO(brianwilkerson) Recovery: This should be extended to handle arbitrary tokens until we
|
| // can find a token that can start a compilation unit member.
|
| StringLiteral string = parseStringLiteral();
|
| - _reportErrorForNode(ParserErrorCode.NON_IDENTIFIER_LIBRARY_NAME, string, []);
|
| + _reportErrorForNode(ParserErrorCode.NON_IDENTIFIER_LIBRARY_NAME, string);
|
| } else {
|
| - _reportErrorForToken(missingNameError, missingNameToken, []);
|
| + _reportErrorForToken(missingNameError, missingNameToken);
|
| }
|
| List<SimpleIdentifier> components = new List<SimpleIdentifier>();
|
| components.add(_createSyntheticIdentifier());
|
| @@ -5482,7 +5482,7 @@ class Parser {
|
| } else if (_matches(TokenType.OPEN_SQUARE_BRACKET) || _matches(TokenType.INDEX)) {
|
| return _parseListLiteral(modifier, typeArguments);
|
| }
|
| - _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_LIST_OR_MAP_LITERAL, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_LIST_OR_MAP_LITERAL);
|
| return new ListLiteral(modifier, typeArguments, _createSyntheticToken(TokenType.OPEN_SQUARE_BRACKET), null, _createSyntheticToken(TokenType.CLOSE_SQUARE_BRACKET));
|
| }
|
|
|
| @@ -5564,11 +5564,11 @@ class Parser {
|
| FunctionBody body = _parseFunctionBody(externalKeyword != null || staticKeyword == null, ParserErrorCode.MISSING_FUNCTION_BODY, false);
|
| if (externalKeyword != null) {
|
| if (body is! EmptyFunctionBody) {
|
| - _reportErrorForNode(ParserErrorCode.EXTERNAL_METHOD_WITH_BODY, body, []);
|
| + _reportErrorForNode(ParserErrorCode.EXTERNAL_METHOD_WITH_BODY, body);
|
| }
|
| } else if (staticKeyword != null) {
|
| if (body is EmptyFunctionBody) {
|
| - _reportErrorForNode(ParserErrorCode.ABSTRACT_STATIC_METHOD, body, []);
|
| + _reportErrorForNode(ParserErrorCode.ABSTRACT_STATIC_METHOD, body);
|
| }
|
| }
|
| return new MethodDeclaration(commentAndMetadata.comment, commentAndMetadata.metadata, externalKeyword, staticKeyword, returnType, null, null, name, parameters, body);
|
| @@ -5805,7 +5805,7 @@ class Parser {
|
| //
|
| // We appear to have a variable declaration with a type of "void".
|
| //
|
| - _reportErrorForNode(ParserErrorCode.VOID_VARIABLE, returnType, []);
|
| + _reportErrorForNode(ParserErrorCode.VOID_VARIABLE, returnType);
|
| return _parseVariableDeclarationStatementAfterMetadata(commentAndMetadata);
|
| }
|
| } else if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
|
| @@ -5815,7 +5815,7 @@ class Parser {
|
| //
|
| return _parseVariableDeclarationStatementAfterType(commentAndMetadata, null, returnType);
|
| }
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT);
|
| // TODO(brianwilkerson) Recover from this error.
|
| return new EmptyStatement(_createSyntheticToken(TokenType.SEMICOLON));
|
| }
|
| @@ -5841,7 +5841,7 @@ class Parser {
|
| //
|
| // We have found an error of some kind. Try to recover.
|
| //
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT);
|
| return new EmptyStatement(_createSyntheticToken(TokenType.SEMICOLON));
|
| }
|
| } else if (_inGenerator && _matchesString(_YIELD)) {
|
| @@ -5855,7 +5855,7 @@ class Parser {
|
| Token awaitToken = _currentToken;
|
| Statement statement = _parseForStatement();
|
| if (statement is! ForStatement) {
|
| - _reportErrorForToken(CompileTimeErrorCode.ASYNC_FOR_IN_WRONG_CONTEXT, awaitToken, []);
|
| + _reportErrorForToken(CompileTimeErrorCode.ASYNC_FOR_IN_WRONG_CONTEXT, awaitToken);
|
| }
|
| return statement;
|
| } else if (_matches(TokenType.SEMICOLON)) {
|
| @@ -5865,7 +5865,7 @@ class Parser {
|
| } else if (_isFunctionDeclaration()) {
|
| return _parseFunctionDeclarationStatement();
|
| } else if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT);
|
| return new EmptyStatement(_createSyntheticToken(TokenType.SEMICOLON));
|
| } else {
|
| return new ExpressionStatement(parseExpression2(), _expect(TokenType.SEMICOLON));
|
| @@ -5895,7 +5895,7 @@ class Parser {
|
| if (_matchesKeyword(Keyword.OPERATOR)) {
|
| operatorKeyword = andAdvance;
|
| } else {
|
| - _reportErrorForToken(ParserErrorCode.MISSING_KEYWORD_OPERATOR, _currentToken, []);
|
| + _reportErrorForToken(ParserErrorCode.MISSING_KEYWORD_OPERATOR, _currentToken);
|
| operatorKeyword = _createSyntheticKeyword(Keyword.OPERATOR);
|
| }
|
| if (!_currentToken.isUserDefinableOperator) {
|
| @@ -5913,7 +5913,7 @@ class Parser {
|
| _validateFormalParameterList(parameters);
|
| FunctionBody body = _parseFunctionBody(true, ParserErrorCode.MISSING_FUNCTION_BODY, false);
|
| if (externalKeyword != null && body is! EmptyFunctionBody) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.EXTERNAL_OPERATOR_WITH_BODY, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.EXTERNAL_OPERATOR_WITH_BODY);
|
| }
|
| return new MethodDeclaration(commentAndMetadata.comment, commentAndMetadata.metadata, externalKeyword, null, returnType, null, operatorKeyword, name, parameters, body);
|
| }
|
| @@ -6120,7 +6120,7 @@ class Parser {
|
| } else if (_matches(TokenType.HASH)) {
|
| return _parseSymbolLiteral();
|
| } else {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
|
| return _createSyntheticIdentifier();
|
| }
|
| }
|
| @@ -6241,7 +6241,7 @@ class Parser {
|
| _validateFormalParameterList(parameters);
|
| FunctionBody body = _parseFunctionBody(externalKeyword != null || staticKeyword == null, ParserErrorCode.STATIC_SETTER_WITHOUT_BODY, false);
|
| if (externalKeyword != null && body is! EmptyFunctionBody) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.EXTERNAL_SETTER_WITH_BODY, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.EXTERNAL_SETTER_WITH_BODY);
|
| }
|
| return new MethodDeclaration(commentAndMetadata.comment, commentAndMetadata.metadata, externalKeyword, staticKeyword, returnType, propertyKeyword, null, name, parameters, body);
|
| }
|
| @@ -6406,11 +6406,11 @@ class Parser {
|
| Token colon = _expect(TokenType.COLON);
|
| members.add(new SwitchCase(labels, caseKeyword, caseExpression, colon, _parseStatementList()));
|
| if (defaultKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.SWITCH_HAS_CASE_AFTER_DEFAULT_CASE, caseKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.SWITCH_HAS_CASE_AFTER_DEFAULT_CASE, caseKeyword);
|
| }
|
| } else if (_matchesKeyword(Keyword.DEFAULT)) {
|
| if (defaultKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.SWITCH_HAS_MULTIPLE_DEFAULT_CASES, _peek(), []);
|
| + _reportErrorForToken(ParserErrorCode.SWITCH_HAS_MULTIPLE_DEFAULT_CASES, _peek());
|
| }
|
| defaultKeyword = andAdvance;
|
| Token colon = _expect(TokenType.COLON);
|
| @@ -6418,7 +6418,7 @@ class Parser {
|
| } else {
|
| // We need to advance, otherwise we could end up in an infinite loop, but this could be a
|
| // lot smarter about recovering from the error.
|
| - _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_CASE_OR_DEFAULT, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_CASE_OR_DEFAULT);
|
| while (!_matches(TokenType.EOF) && !_matches(TokenType.CLOSE_CURLY_BRACKET) && !_matchesKeyword(Keyword.CASE) && !_matchesKeyword(Keyword.DEFAULT)) {
|
| _advance();
|
| }
|
| @@ -6451,7 +6451,7 @@ class Parser {
|
| if (_matchesIdentifier()) {
|
| components.add(andAdvance);
|
| } else {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
|
| components.add(_createSyntheticToken(TokenType.IDENTIFIER));
|
| break;
|
| }
|
| @@ -6461,7 +6461,7 @@ class Parser {
|
| } else if (_tokenMatchesKeyword(_currentToken, Keyword.VOID)) {
|
| components.add(andAdvance);
|
| } else {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
|
| components.add(_createSyntheticToken(TokenType.IDENTIFIER));
|
| }
|
| return new SymbolLiteral(poundSign, components);
|
| @@ -6480,7 +6480,7 @@ class Parser {
|
| Expression _parseThrowExpression() {
|
| Token keyword = _expectKeyword(Keyword.THROW);
|
| if (_matches(TokenType.SEMICOLON) || _matches(TokenType.CLOSE_PAREN)) {
|
| - _reportErrorForToken(ParserErrorCode.MISSING_EXPRESSION_IN_THROW, _currentToken, []);
|
| + _reportErrorForToken(ParserErrorCode.MISSING_EXPRESSION_IN_THROW, _currentToken);
|
| return new ThrowExpression(keyword, _createSyntheticIdentifier());
|
| }
|
| Expression expression = parseExpression2();
|
| @@ -6500,7 +6500,7 @@ class Parser {
|
| Expression _parseThrowExpressionWithoutCascade() {
|
| Token keyword = _expectKeyword(Keyword.THROW);
|
| if (_matches(TokenType.SEMICOLON) || _matches(TokenType.CLOSE_PAREN)) {
|
| - _reportErrorForToken(ParserErrorCode.MISSING_EXPRESSION_IN_THROW, _currentToken, []);
|
| + _reportErrorForToken(ParserErrorCode.MISSING_EXPRESSION_IN_THROW, _currentToken);
|
| return new ThrowExpression(keyword, _createSyntheticIdentifier());
|
| }
|
| Expression expression = parseExpressionWithoutCascade();
|
| @@ -6564,7 +6564,7 @@ class Parser {
|
| finallyClause = parseBlock();
|
| } else {
|
| if (catchClauses.isEmpty) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_CATCH_OR_FINALLY, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_CATCH_OR_FINALLY);
|
| }
|
| }
|
| return new TryStatement(tryKeyword, body, catchClauses, finallyKeyword, finallyClause);
|
| @@ -6605,12 +6605,12 @@ class Parser {
|
| next = _skipTypeParameterList(next);
|
| if (next != null && _tokenMatches(next, TokenType.EQ)) {
|
| TypeAlias typeAlias = _parseClassTypeAlias(commentAndMetadata, null, keyword);
|
| - _reportErrorForToken(ParserErrorCode.DEPRECATED_CLASS_TYPE_ALIAS, keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.DEPRECATED_CLASS_TYPE_ALIAS, keyword);
|
| return typeAlias;
|
| }
|
| } else if (_tokenMatches(next, TokenType.EQ)) {
|
| TypeAlias typeAlias = _parseClassTypeAlias(commentAndMetadata, null, keyword);
|
| - _reportErrorForToken(ParserErrorCode.DEPRECATED_CLASS_TYPE_ALIAS, keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.DEPRECATED_CLASS_TYPE_ALIAS, keyword);
|
| return typeAlias;
|
| }
|
| }
|
| @@ -6674,7 +6674,7 @@ class Parser {
|
| }
|
| return new PrefixExpression(operator, _parseAssignableExpression(false));
|
| } else if (_matches(TokenType.PLUS)) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
|
| return _createSyntheticIdentifier();
|
| } else if (_matchesString(_AWAIT)) {
|
| return _parseAwaitExpression();
|
| @@ -6737,7 +6737,7 @@ class Parser {
|
| */
|
| VariableDeclarationList _parseVariableDeclarationListAfterType(CommentAndMetadata commentAndMetadata, Token keyword, TypeName type) {
|
| if (type != null && keyword != null && _tokenMatchesKeyword(keyword, Keyword.VAR)) {
|
| - _reportErrorForToken(ParserErrorCode.VAR_AND_TYPE, keyword, []);
|
| + _reportErrorForToken(ParserErrorCode.VAR_AND_TYPE, keyword);
|
| }
|
| List<VariableDeclaration> variables = new List<VariableDeclaration>();
|
| variables.add(_parseVariableDeclaration());
|
| @@ -6881,7 +6881,7 @@ class Parser {
|
| * @param errorCode the error code of the error to be reported
|
| * @param arguments the arguments to the error, used to compose the error message
|
| */
|
| - void _reportErrorForCurrentToken(ParserErrorCode errorCode, List<Object> arguments) {
|
| + void _reportErrorForCurrentToken(ParserErrorCode errorCode, [List<Object> arguments]) {
|
| _reportErrorForToken(errorCode, _currentToken, arguments);
|
| }
|
|
|
| @@ -6892,7 +6892,7 @@ class Parser {
|
| * @param node the node specifying the location of the error
|
| * @param arguments the arguments to the error, used to compose the error message
|
| */
|
| - void _reportErrorForNode(ParserErrorCode errorCode, AstNode node, List<Object> arguments) {
|
| + void _reportErrorForNode(ParserErrorCode errorCode, AstNode node, [List<Object> arguments]) {
|
| _reportError(new AnalysisError.con2(_source, node.offset, node.length, errorCode, arguments));
|
| }
|
|
|
| @@ -6903,7 +6903,7 @@ class Parser {
|
| * @param token the token specifying the location of the error
|
| * @param arguments the arguments to the error, used to compose the error message
|
| */
|
| - void _reportErrorForToken(ErrorCode errorCode, Token token, List<Object> arguments) {
|
| + void _reportErrorForToken(ErrorCode errorCode, Token token, [List<Object> arguments]) {
|
| if (token.type == TokenType.EOF) {
|
| token = token.previous;
|
| }
|
| @@ -7434,14 +7434,14 @@ class Parser {
|
| } else if (currentChar == 0x78) {
|
| if (currentIndex + 2 >= length) {
|
| // Illegal escape sequence: not enough hex digits
|
| - _reportErrorForCurrentToken(ParserErrorCode.INVALID_HEX_ESCAPE, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.INVALID_HEX_ESCAPE);
|
| return length;
|
| }
|
| int firstDigit = lexeme.codeUnitAt(currentIndex + 1);
|
| int secondDigit = lexeme.codeUnitAt(currentIndex + 2);
|
| if (!_isHexDigit(firstDigit) || !_isHexDigit(secondDigit)) {
|
| // Illegal escape sequence: invalid hex digit
|
| - _reportErrorForCurrentToken(ParserErrorCode.INVALID_HEX_ESCAPE, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.INVALID_HEX_ESCAPE);
|
| } else {
|
| int charCode = (Character.digit(firstDigit, 16) << 4)
|
| + Character.digit(secondDigit, 16);
|
| @@ -7452,7 +7452,7 @@ class Parser {
|
| currentIndex++;
|
| if (currentIndex >= length) {
|
| // Illegal escape sequence: not enough hex digits
|
| - _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
|
| return length;
|
| }
|
| currentChar = lexeme.codeUnitAt(currentIndex);
|
| @@ -7460,9 +7460,7 @@ class Parser {
|
| currentIndex++;
|
| if (currentIndex >= length) {
|
| // Illegal escape sequence: incomplete escape
|
| - _reportErrorForCurrentToken(
|
| - ParserErrorCode.INVALID_UNICODE_ESCAPE,
|
| - []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
|
| return length;
|
| }
|
| currentChar = lexeme.codeUnitAt(currentIndex);
|
| @@ -7471,9 +7469,7 @@ class Parser {
|
| while (currentChar != 0x7D) {
|
| if (!_isHexDigit(currentChar)) {
|
| // Illegal escape sequence: invalid hex digit
|
| - _reportErrorForCurrentToken(
|
| - ParserErrorCode.INVALID_UNICODE_ESCAPE,
|
| - []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
|
| currentIndex++;
|
| while (currentIndex < length
|
| && lexeme.codeUnitAt(currentIndex) != 0x7D) {
|
| @@ -7486,18 +7482,14 @@ class Parser {
|
| currentIndex++;
|
| if (currentIndex >= length) {
|
| // Illegal escape sequence: incomplete escape
|
| - _reportErrorForCurrentToken(
|
| - ParserErrorCode.INVALID_UNICODE_ESCAPE,
|
| - []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
|
| return length;
|
| }
|
| currentChar = lexeme.codeUnitAt(currentIndex);
|
| }
|
| if (digitCount < 1 || digitCount > 6) {
|
| // Illegal escape sequence: not enough or too many hex digits
|
| - _reportErrorForCurrentToken(
|
| - ParserErrorCode.INVALID_UNICODE_ESCAPE,
|
| - []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
|
| }
|
| _appendScalarValue(
|
| buffer,
|
| @@ -7509,9 +7501,7 @@ class Parser {
|
| } else {
|
| if (currentIndex + 3 >= length) {
|
| // Illegal escape sequence: not enough hex digits
|
| - _reportErrorForCurrentToken(
|
| - ParserErrorCode.INVALID_UNICODE_ESCAPE,
|
| - []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
|
| return length;
|
| }
|
| int firstDigit = currentChar;
|
| @@ -7523,7 +7513,7 @@ class Parser {
|
| || !_isHexDigit(thirdDigit)
|
| || !_isHexDigit(fourthDigit)) {
|
| // Illegal escape sequence: invalid hex digits
|
| - _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE);
|
| } else {
|
| _appendScalarValue(
|
| buffer,
|
| @@ -7562,7 +7552,7 @@ class Parser {
|
| void _validateFormalParameterList(FormalParameterList parameterList) {
|
| for (FormalParameter parameter in parameterList.parameters) {
|
| if (parameter is FieldFormalParameter) {
|
| - _reportErrorForNode(ParserErrorCode.FIELD_INITIALIZER_OUTSIDE_CONSTRUCTOR, parameter.identifier, []);
|
| + _reportErrorForNode(ParserErrorCode.FIELD_INITIALIZER_OUTSIDE_CONSTRUCTOR, parameter.identifier);
|
| }
|
| }
|
| }
|
| @@ -7576,16 +7566,16 @@ class Parser {
|
| Token _validateModifiersForClass(Modifiers modifiers) {
|
| _validateModifiersForTopLevelDeclaration(modifiers);
|
| if (modifiers.constKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.CONST_CLASS, modifiers.constKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.CONST_CLASS, modifiers.constKeyword);
|
| }
|
| if (modifiers.externalKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.EXTERNAL_CLASS, modifiers.externalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.EXTERNAL_CLASS, modifiers.externalKeyword);
|
| }
|
| if (modifiers.finalKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.FINAL_CLASS, modifiers.finalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.FINAL_CLASS, modifiers.finalKeyword);
|
| }
|
| if (modifiers.varKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.VAR_CLASS, modifiers.varKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.VAR_CLASS, modifiers.varKeyword);
|
| }
|
| return modifiers.abstractKeyword;
|
| }
|
| @@ -7599,25 +7589,25 @@ class Parser {
|
| */
|
| Token _validateModifiersForConstructor(Modifiers modifiers) {
|
| if (modifiers.abstractKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER, modifiers.abstractKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER, modifiers.abstractKeyword);
|
| }
|
| if (modifiers.finalKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.FINAL_CONSTRUCTOR, modifiers.finalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.FINAL_CONSTRUCTOR, modifiers.finalKeyword);
|
| }
|
| if (modifiers.staticKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.STATIC_CONSTRUCTOR, modifiers.staticKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.STATIC_CONSTRUCTOR, modifiers.staticKeyword);
|
| }
|
| if (modifiers.varKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.CONSTRUCTOR_WITH_RETURN_TYPE, modifiers.varKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.CONSTRUCTOR_WITH_RETURN_TYPE, modifiers.varKeyword);
|
| }
|
| Token externalKeyword = modifiers.externalKeyword;
|
| Token constKeyword = modifiers.constKeyword;
|
| Token factoryKeyword = modifiers.factoryKeyword;
|
| if (externalKeyword != null && constKeyword != null && constKeyword.offset < externalKeyword.offset) {
|
| - _reportErrorForToken(ParserErrorCode.EXTERNAL_AFTER_CONST, externalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.EXTERNAL_AFTER_CONST, externalKeyword);
|
| }
|
| if (externalKeyword != null && factoryKeyword != null && factoryKeyword.offset < externalKeyword.offset) {
|
| - _reportErrorForToken(ParserErrorCode.EXTERNAL_AFTER_FACTORY, externalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.EXTERNAL_AFTER_FACTORY, externalKeyword);
|
| }
|
| return constKeyword;
|
| }
|
| @@ -7631,19 +7621,19 @@ class Parser {
|
| void _validateModifiersForEnum(Modifiers modifiers) {
|
| _validateModifiersForTopLevelDeclaration(modifiers);
|
| if (modifiers.abstractKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.ABSTRACT_ENUM, modifiers.abstractKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.ABSTRACT_ENUM, modifiers.abstractKeyword);
|
| }
|
| if (modifiers.constKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.CONST_ENUM, modifiers.constKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.CONST_ENUM, modifiers.constKeyword);
|
| }
|
| if (modifiers.externalKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.EXTERNAL_ENUM, modifiers.externalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.EXTERNAL_ENUM, modifiers.externalKeyword);
|
| }
|
| if (modifiers.finalKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.FINAL_ENUM, modifiers.finalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.FINAL_ENUM, modifiers.finalKeyword);
|
| }
|
| if (modifiers.varKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.VAR_ENUM, modifiers.varKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.VAR_ENUM, modifiers.varKeyword);
|
| }
|
| }
|
|
|
| @@ -7656,13 +7646,13 @@ class Parser {
|
| */
|
| Token _validateModifiersForField(Modifiers modifiers) {
|
| if (modifiers.abstractKeyword != null) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER);
|
| }
|
| if (modifiers.externalKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.EXTERNAL_FIELD, modifiers.externalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.EXTERNAL_FIELD, modifiers.externalKeyword);
|
| }
|
| if (modifiers.factoryKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword);
|
| }
|
| Token staticKeyword = modifiers.staticKeyword;
|
| Token constKeyword = modifiers.constKeyword;
|
| @@ -7670,23 +7660,23 @@ class Parser {
|
| Token varKeyword = modifiers.varKeyword;
|
| if (constKeyword != null) {
|
| if (finalKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.CONST_AND_FINAL, finalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.CONST_AND_FINAL, finalKeyword);
|
| }
|
| if (varKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.CONST_AND_VAR, varKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.CONST_AND_VAR, varKeyword);
|
| }
|
| if (staticKeyword != null && constKeyword.offset < staticKeyword.offset) {
|
| - _reportErrorForToken(ParserErrorCode.STATIC_AFTER_CONST, staticKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.STATIC_AFTER_CONST, staticKeyword);
|
| }
|
| } else if (finalKeyword != null) {
|
| if (varKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.FINAL_AND_VAR, varKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.FINAL_AND_VAR, varKeyword);
|
| }
|
| if (staticKeyword != null && finalKeyword.offset < staticKeyword.offset) {
|
| - _reportErrorForToken(ParserErrorCode.STATIC_AFTER_FINAL, staticKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.STATIC_AFTER_FINAL, staticKeyword);
|
| }
|
| } else if (varKeyword != null && staticKeyword != null && varKeyword.offset < staticKeyword.offset) {
|
| - _reportErrorForToken(ParserErrorCode.STATIC_AFTER_VAR, staticKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.STATIC_AFTER_VAR, staticKeyword);
|
| }
|
| return _lexicallyFirst([constKeyword, finalKeyword, varKeyword]);
|
| }
|
| @@ -7698,7 +7688,7 @@ class Parser {
|
| */
|
| void _validateModifiersForFunctionDeclarationStatement(Modifiers modifiers) {
|
| if (modifiers.abstractKeyword != null || modifiers.constKeyword != null || modifiers.externalKeyword != null || modifiers.factoryKeyword != null || modifiers.finalKeyword != null || modifiers.staticKeyword != null || modifiers.varKeyword != null) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.LOCAL_FUNCTION_DECLARATION_MODIFIER, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.LOCAL_FUNCTION_DECLARATION_MODIFIER);
|
| }
|
| }
|
|
|
| @@ -7709,24 +7699,24 @@ class Parser {
|
| */
|
| void _validateModifiersForGetterOrSetterOrMethod(Modifiers modifiers) {
|
| if (modifiers.abstractKeyword != null) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER);
|
| }
|
| if (modifiers.constKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.CONST_METHOD, modifiers.constKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.CONST_METHOD, modifiers.constKeyword);
|
| }
|
| if (modifiers.factoryKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword);
|
| }
|
| if (modifiers.finalKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.FINAL_METHOD, modifiers.finalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.FINAL_METHOD, modifiers.finalKeyword);
|
| }
|
| if (modifiers.varKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword);
|
| }
|
| Token externalKeyword = modifiers.externalKeyword;
|
| Token staticKeyword = modifiers.staticKeyword;
|
| if (externalKeyword != null && staticKeyword != null && staticKeyword.offset < externalKeyword.offset) {
|
| - _reportErrorForToken(ParserErrorCode.EXTERNAL_AFTER_STATIC, externalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.EXTERNAL_AFTER_STATIC, externalKeyword);
|
| }
|
| }
|
|
|
| @@ -7737,22 +7727,22 @@ class Parser {
|
| */
|
| void _validateModifiersForOperator(Modifiers modifiers) {
|
| if (modifiers.abstractKeyword != null) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER);
|
| }
|
| if (modifiers.constKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.CONST_METHOD, modifiers.constKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.CONST_METHOD, modifiers.constKeyword);
|
| }
|
| if (modifiers.factoryKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword);
|
| }
|
| if (modifiers.finalKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.FINAL_METHOD, modifiers.finalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.FINAL_METHOD, modifiers.finalKeyword);
|
| }
|
| if (modifiers.staticKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.STATIC_OPERATOR, modifiers.staticKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.STATIC_OPERATOR, modifiers.staticKeyword);
|
| }
|
| if (modifiers.varKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword);
|
| }
|
| }
|
|
|
| @@ -7763,10 +7753,10 @@ class Parser {
|
| */
|
| void _validateModifiersForTopLevelDeclaration(Modifiers modifiers) {
|
| if (modifiers.factoryKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.FACTORY_TOP_LEVEL_DECLARATION, modifiers.factoryKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.FACTORY_TOP_LEVEL_DECLARATION, modifiers.factoryKeyword);
|
| }
|
| if (modifiers.staticKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.STATIC_TOP_LEVEL_DECLARATION, modifiers.staticKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.STATIC_TOP_LEVEL_DECLARATION, modifiers.staticKeyword);
|
| }
|
| }
|
|
|
| @@ -7778,16 +7768,16 @@ class Parser {
|
| void _validateModifiersForTopLevelFunction(Modifiers modifiers) {
|
| _validateModifiersForTopLevelDeclaration(modifiers);
|
| if (modifiers.abstractKeyword != null) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_TOP_LEVEL_FUNCTION, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_TOP_LEVEL_FUNCTION);
|
| }
|
| if (modifiers.constKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.CONST_CLASS, modifiers.constKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.CONST_CLASS, modifiers.constKeyword);
|
| }
|
| if (modifiers.finalKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.FINAL_CLASS, modifiers.finalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.FINAL_CLASS, modifiers.finalKeyword);
|
| }
|
| if (modifiers.varKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword);
|
| }
|
| }
|
|
|
| @@ -7801,24 +7791,24 @@ class Parser {
|
| Token _validateModifiersForTopLevelVariable(Modifiers modifiers) {
|
| _validateModifiersForTopLevelDeclaration(modifiers);
|
| if (modifiers.abstractKeyword != null) {
|
| - _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_TOP_LEVEL_VARIABLE, []);
|
| + _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_TOP_LEVEL_VARIABLE);
|
| }
|
| if (modifiers.externalKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.EXTERNAL_FIELD, modifiers.externalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.EXTERNAL_FIELD, modifiers.externalKeyword);
|
| }
|
| Token constKeyword = modifiers.constKeyword;
|
| Token finalKeyword = modifiers.finalKeyword;
|
| Token varKeyword = modifiers.varKeyword;
|
| if (constKeyword != null) {
|
| if (finalKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.CONST_AND_FINAL, finalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.CONST_AND_FINAL, finalKeyword);
|
| }
|
| if (varKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.CONST_AND_VAR, varKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.CONST_AND_VAR, varKeyword);
|
| }
|
| } else if (finalKeyword != null) {
|
| if (varKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.FINAL_AND_VAR, varKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.FINAL_AND_VAR, varKeyword);
|
| }
|
| }
|
| return _lexicallyFirst([constKeyword, finalKeyword, varKeyword]);
|
| @@ -7833,19 +7823,19 @@ class Parser {
|
| void _validateModifiersForTypedef(Modifiers modifiers) {
|
| _validateModifiersForTopLevelDeclaration(modifiers);
|
| if (modifiers.abstractKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.ABSTRACT_TYPEDEF, modifiers.abstractKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.ABSTRACT_TYPEDEF, modifiers.abstractKeyword);
|
| }
|
| if (modifiers.constKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.CONST_TYPEDEF, modifiers.constKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.CONST_TYPEDEF, modifiers.constKeyword);
|
| }
|
| if (modifiers.externalKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.EXTERNAL_TYPEDEF, modifiers.externalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.EXTERNAL_TYPEDEF, modifiers.externalKeyword);
|
| }
|
| if (modifiers.finalKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.FINAL_TYPEDEF, modifiers.finalKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.FINAL_TYPEDEF, modifiers.finalKeyword);
|
| }
|
| if (modifiers.varKeyword != null) {
|
| - _reportErrorForToken(ParserErrorCode.VAR_TYPEDEF, modifiers.varKeyword, []);
|
| + _reportErrorForToken(ParserErrorCode.VAR_TYPEDEF, modifiers.varKeyword);
|
| }
|
| }
|
| }
|
|
|