| OLD | NEW |
| (Empty) |
| 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | |
| 2 // for details. All rights reserved. Use of this source code is governed by a | |
| 3 // BSD-style license that can be found in the LICENSE file. | |
| 4 | |
| 5 // This code was auto-generated, is not intended to be edited, and is subject to | |
| 6 // significant change. Please see the README file for more information. | |
| 7 | |
| 8 library engine.parser; | |
| 9 | |
| 10 import 'dart:collection'; | |
| 11 import "dart:math" as math; | |
| 12 | |
| 13 import 'ast.dart'; | |
| 14 import 'engine.dart' show AnalysisEngine, AnalysisOptionsImpl; | |
| 15 import 'error.dart'; | |
| 16 import 'java_core.dart'; | |
| 17 import 'java_engine.dart'; | |
| 18 import 'scanner.dart'; | |
| 19 import 'source.dart'; | |
| 20 import 'utilities_collection.dart' show TokenMap; | |
| 21 import 'utilities_dart.dart'; | |
| 22 | |
| 23 Map<String, MethodTrampoline> methodTable_Parser = <String, MethodTrampoline>{ | |
| 24 'parseCompilationUnit_1': new MethodTrampoline( | |
| 25 1, (Parser target, arg0) => target.parseCompilationUnit(arg0)), | |
| 26 'parseDirectives_1': new MethodTrampoline( | |
| 27 1, (Parser target, arg0) => target.parseDirectives(arg0)), | |
| 28 'parseExpression_1': new MethodTrampoline( | |
| 29 1, (Parser target, arg0) => target.parseExpression(arg0)), | |
| 30 'parseStatement_1': new MethodTrampoline( | |
| 31 1, (Parser target, arg0) => target.parseStatement(arg0)), | |
| 32 'parseStatements_1': new MethodTrampoline( | |
| 33 1, (Parser target, arg0) => target.parseStatements(arg0)), | |
| 34 'parseAnnotation_0': | |
| 35 new MethodTrampoline(0, (Parser target) => target.parseAnnotation()), | |
| 36 'parseArgument_0': | |
| 37 new MethodTrampoline(0, (Parser target) => target.parseArgument()), | |
| 38 'parseArgumentList_0': | |
| 39 new MethodTrampoline(0, (Parser target) => target.parseArgumentList()), | |
| 40 'parseBitwiseOrExpression_0': new MethodTrampoline( | |
| 41 0, (Parser target) => target.parseBitwiseOrExpression()), | |
| 42 'parseBlock_0': | |
| 43 new MethodTrampoline(0, (Parser target) => target.parseBlock()), | |
| 44 'parseClassMember_1': new MethodTrampoline( | |
| 45 1, (Parser target, arg0) => target.parseClassMember(arg0)), | |
| 46 'parseCompilationUnit_0': new MethodTrampoline( | |
| 47 0, (Parser target) => target.parseCompilationUnit2()), | |
| 48 'parseConditionalExpression_0': new MethodTrampoline( | |
| 49 0, (Parser target) => target.parseConditionalExpression()), | |
| 50 'parseConstructorName_0': | |
| 51 new MethodTrampoline(0, (Parser target) => target.parseConstructorName()), | |
| 52 'parseExpression_0': | |
| 53 new MethodTrampoline(0, (Parser target) => target.parseExpression2()), | |
| 54 'parseExpressionWithoutCascade_0': new MethodTrampoline( | |
| 55 0, (Parser target) => target.parseExpressionWithoutCascade()), | |
| 56 'parseExtendsClause_0': | |
| 57 new MethodTrampoline(0, (Parser target) => target.parseExtendsClause()), | |
| 58 'parseFormalParameterList_0': new MethodTrampoline( | |
| 59 0, (Parser target) => target.parseFormalParameterList()), | |
| 60 'parseFunctionExpression_0': new MethodTrampoline( | |
| 61 0, (Parser target) => target.parseFunctionExpression()), | |
| 62 'parseImplementsClause_0': new MethodTrampoline( | |
| 63 0, (Parser target) => target.parseImplementsClause()), | |
| 64 'parseLabel_0': | |
| 65 new MethodTrampoline(0, (Parser target) => target.parseLabel()), | |
| 66 'parseLibraryIdentifier_0': new MethodTrampoline( | |
| 67 0, (Parser target) => target.parseLibraryIdentifier()), | |
| 68 'parseLogicalOrExpression_0': new MethodTrampoline( | |
| 69 0, (Parser target) => target.parseLogicalOrExpression()), | |
| 70 'parseMapLiteralEntry_0': | |
| 71 new MethodTrampoline(0, (Parser target) => target.parseMapLiteralEntry()), | |
| 72 'parseNormalFormalParameter_0': new MethodTrampoline( | |
| 73 0, (Parser target) => target.parseNormalFormalParameter()), | |
| 74 'parsePrefixedIdentifier_0': new MethodTrampoline( | |
| 75 0, (Parser target) => target.parsePrefixedIdentifier()), | |
| 76 'parseReturnType_0': | |
| 77 new MethodTrampoline(0, (Parser target) => target.parseReturnType()), | |
| 78 'parseSimpleIdentifier_0': new MethodTrampoline( | |
| 79 0, (Parser target) => target.parseSimpleIdentifier()), | |
| 80 'parseStatement_0': | |
| 81 new MethodTrampoline(0, (Parser target) => target.parseStatement2()), | |
| 82 'parseStringLiteral_0': | |
| 83 new MethodTrampoline(0, (Parser target) => target.parseStringLiteral()), | |
| 84 'parseTypeArgumentList_0': new MethodTrampoline( | |
| 85 0, (Parser target) => target.parseTypeArgumentList()), | |
| 86 'parseTypeName_0': | |
| 87 new MethodTrampoline(0, (Parser target) => target.parseTypeName()), | |
| 88 'parseTypeParameter_0': | |
| 89 new MethodTrampoline(0, (Parser target) => target.parseTypeParameter()), | |
| 90 'parseTypeParameterList_0': new MethodTrampoline( | |
| 91 0, (Parser target) => target.parseTypeParameterList()), | |
| 92 'parseWithClause_0': | |
| 93 new MethodTrampoline(0, (Parser target) => target.parseWithClause()), | |
| 94 'advance_0': new MethodTrampoline(0, (Parser target) => target._advance()), | |
| 95 'appendScalarValue_5': new MethodTrampoline(5, (Parser target, arg0, arg1, | |
| 96 arg2, arg3, | |
| 97 arg4) => target._appendScalarValue(arg0, arg1, arg2, arg3, arg4)), | |
| 98 'computeStringValue_3': new MethodTrampoline(3, (Parser target, arg0, arg1, | |
| 99 arg2) => target._computeStringValue(arg0, arg1, arg2)), | |
| 100 'convertToFunctionDeclaration_1': new MethodTrampoline( | |
| 101 1, (Parser target, arg0) => target._convertToFunctionDeclaration(arg0)), | |
| 102 'couldBeStartOfCompilationUnitMember_0': new MethodTrampoline( | |
| 103 0, (Parser target) => target._couldBeStartOfCompilationUnitMember()), | |
| 104 'createSyntheticIdentifier_0': new MethodTrampoline( | |
| 105 0, (Parser target) => target._createSyntheticIdentifier()), | |
| 106 'createSyntheticKeyword_1': new MethodTrampoline( | |
| 107 1, (Parser target, arg0) => target._createSyntheticKeyword(arg0)), | |
| 108 'createSyntheticStringLiteral_0': new MethodTrampoline( | |
| 109 0, (Parser target) => target._createSyntheticStringLiteral()), | |
| 110 'createSyntheticToken_1': new MethodTrampoline( | |
| 111 1, (Parser target, arg0) => target._createSyntheticToken(arg0)), | |
| 112 'ensureAssignable_1': new MethodTrampoline( | |
| 113 1, (Parser target, arg0) => target._ensureAssignable(arg0)), | |
| 114 'expect_1': | |
| 115 new MethodTrampoline(1, (Parser target, arg0) => target._expect(arg0)), | |
| 116 'expectGt_0': new MethodTrampoline(0, (Parser target) => target._expectGt()), | |
| 117 'expectKeyword_1': new MethodTrampoline( | |
| 118 1, (Parser target, arg0) => target._expectKeyword(arg0)), | |
| 119 'expectSemicolon_0': | |
| 120 new MethodTrampoline(0, (Parser target) => target._expectSemicolon()), | |
| 121 'findRange_2': new MethodTrampoline( | |
| 122 2, (Parser target, arg0, arg1) => target._findRange(arg0, arg1)), | |
| 123 'getCodeBlockRanges_1': new MethodTrampoline( | |
| 124 1, (Parser target, arg0) => target._getCodeBlockRanges(arg0)), | |
| 125 'getEndToken_1': new MethodTrampoline( | |
| 126 1, (Parser target, arg0) => target._getEndToken(arg0)), | |
| 127 'injectToken_1': new MethodTrampoline( | |
| 128 1, (Parser target, arg0) => target._injectToken(arg0)), | |
| 129 'isFunctionDeclaration_0': new MethodTrampoline( | |
| 130 0, (Parser target) => target._isFunctionDeclaration()), | |
| 131 'isFunctionExpression_1': new MethodTrampoline( | |
| 132 1, (Parser target, arg0) => target._isFunctionExpression(arg0)), | |
| 133 'isHexDigit_1': new MethodTrampoline( | |
| 134 1, (Parser target, arg0) => target._isHexDigit(arg0)), | |
| 135 'isInitializedVariableDeclaration_0': new MethodTrampoline( | |
| 136 0, (Parser target) => target._isInitializedVariableDeclaration()), | |
| 137 'isLinkText_2': new MethodTrampoline( | |
| 138 2, (Parser target, arg0, arg1) => target._isLinkText(arg0, arg1)), | |
| 139 'isOperator_1': new MethodTrampoline( | |
| 140 1, (Parser target, arg0) => target._isOperator(arg0)), | |
| 141 'isSwitchMember_0': | |
| 142 new MethodTrampoline(0, (Parser target) => target._isSwitchMember()), | |
| 143 'isTypedIdentifier_1': new MethodTrampoline( | |
| 144 1, (Parser target, arg0) => target._isTypedIdentifier(arg0)), | |
| 145 'lockErrorListener_0': | |
| 146 new MethodTrampoline(0, (Parser target) => target._lockErrorListener()), | |
| 147 'matches_1': | |
| 148 new MethodTrampoline(1, (Parser target, arg0) => target._matches(arg0)), | |
| 149 'matchesGt_0': | |
| 150 new MethodTrampoline(0, (Parser target) => target._matchesGt()), | |
| 151 'matchesIdentifier_0': | |
| 152 new MethodTrampoline(0, (Parser target) => target._matchesIdentifier()), | |
| 153 'matchesKeyword_1': new MethodTrampoline( | |
| 154 1, (Parser target, arg0) => target._matchesKeyword(arg0)), | |
| 155 'matchesString_1': new MethodTrampoline( | |
| 156 1, (Parser target, arg0) => target._matchesString(arg0)), | |
| 157 'optional_1': | |
| 158 new MethodTrampoline(1, (Parser target, arg0) => target._optional(arg0)), | |
| 159 'parseAdditiveExpression_0': new MethodTrampoline( | |
| 160 0, (Parser target) => target._parseAdditiveExpression()), | |
| 161 'parseAssertStatement_0': new MethodTrampoline( | |
| 162 0, (Parser target) => target._parseAssertStatement()), | |
| 163 'parseAssignableExpression_1': new MethodTrampoline( | |
| 164 1, (Parser target, arg0) => target._parseAssignableExpression(arg0)), | |
| 165 'parseAssignableSelector_2': new MethodTrampoline(2, (Parser target, arg0, | |
| 166 arg1) => target._parseAssignableSelector(arg0, arg1)), | |
| 167 'parseAwaitExpression_0': new MethodTrampoline( | |
| 168 0, (Parser target) => target._parseAwaitExpression()), | |
| 169 'parseBitwiseAndExpression_0': new MethodTrampoline( | |
| 170 0, (Parser target) => target._parseBitwiseAndExpression()), | |
| 171 'parseBitwiseXorExpression_0': new MethodTrampoline( | |
| 172 0, (Parser target) => target._parseBitwiseXorExpression()), | |
| 173 'parseBreakStatement_0': | |
| 174 new MethodTrampoline(0, (Parser target) => target._parseBreakStatement()), | |
| 175 'parseCascadeSection_0': | |
| 176 new MethodTrampoline(0, (Parser target) => target._parseCascadeSection()), | |
| 177 'parseClassDeclaration_2': new MethodTrampoline(2, | |
| 178 (Parser target, arg0, arg1) => target._parseClassDeclaration(arg0, arg1)), | |
| 179 'parseClassMembers_2': new MethodTrampoline( | |
| 180 2, (Parser target, arg0, arg1) => target._parseClassMembers(arg0, arg1)), | |
| 181 'parseClassTypeAlias_3': new MethodTrampoline(3, (Parser target, arg0, arg1, | |
| 182 arg2) => target._parseClassTypeAlias(arg0, arg1, arg2)), | |
| 183 'parseCombinator_0': | |
| 184 new MethodTrampoline(0, (Parser target) => target.parseCombinator()), | |
| 185 'parseCombinators_0': | |
| 186 new MethodTrampoline(0, (Parser target) => target._parseCombinators()), | |
| 187 'parseCommentAndMetadata_0': new MethodTrampoline( | |
| 188 0, (Parser target) => target._parseCommentAndMetadata()), | |
| 189 'parseCommentReference_2': new MethodTrampoline(2, | |
| 190 (Parser target, arg0, arg1) => target._parseCommentReference(arg0, arg1)), | |
| 191 'parseCommentReferences_1': new MethodTrampoline( | |
| 192 1, (Parser target, arg0) => target._parseCommentReferences(arg0)), | |
| 193 'parseCompilationUnitMember_1': new MethodTrampoline( | |
| 194 1, (Parser target, arg0) => target._parseCompilationUnitMember(arg0)), | |
| 195 'parseConstExpression_0': new MethodTrampoline( | |
| 196 0, (Parser target) => target._parseConstExpression()), | |
| 197 'parseConstructor_8': new MethodTrampoline(8, (Parser target, arg0, arg1, | |
| 198 arg2, arg3, arg4, arg5, arg6, arg7) => | |
| 199 target._parseConstructor(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)), | |
| 200 'parseConstructorFieldInitializer_0': new MethodTrampoline( | |
| 201 0, (Parser target) => target._parseConstructorFieldInitializer()), | |
| 202 'parseContinueStatement_0': new MethodTrampoline( | |
| 203 0, (Parser target) => target._parseContinueStatement()), | |
| 204 'parseDirective_1': new MethodTrampoline( | |
| 205 1, (Parser target, arg0) => target._parseDirective(arg0)), | |
| 206 'parseDirectives_0': | |
| 207 new MethodTrampoline(0, (Parser target) => target._parseDirectives()), | |
| 208 'parseDocumentationComment_0': new MethodTrampoline( | |
| 209 0, (Parser target) => target._parseDocumentationComment()), | |
| 210 'parseDoStatement_0': | |
| 211 new MethodTrampoline(0, (Parser target) => target._parseDoStatement()), | |
| 212 'parseEmptyStatement_0': | |
| 213 new MethodTrampoline(0, (Parser target) => target._parseEmptyStatement()), | |
| 214 'parseEnumConstantDeclaration_0': new MethodTrampoline( | |
| 215 0, (Parser target) => target._parseEnumConstantDeclaration()), | |
| 216 'parseEnumDeclaration_1': new MethodTrampoline( | |
| 217 1, (Parser target, arg0) => target._parseEnumDeclaration(arg0)), | |
| 218 'parseEqualityExpression_0': new MethodTrampoline( | |
| 219 0, (Parser target) => target._parseEqualityExpression()), | |
| 220 'parseExportDirective_1': new MethodTrampoline( | |
| 221 1, (Parser target, arg0) => target._parseExportDirective(arg0)), | |
| 222 'parseExpressionList_0': | |
| 223 new MethodTrampoline(0, (Parser target) => target._parseExpressionList()), | |
| 224 'parseFinalConstVarOrType_1': new MethodTrampoline( | |
| 225 1, (Parser target, arg0) => target._parseFinalConstVarOrType(arg0)), | |
| 226 'parseFormalParameter_1': new MethodTrampoline( | |
| 227 1, (Parser target, arg0) => target._parseFormalParameter(arg0)), | |
| 228 'parseForStatement_0': | |
| 229 new MethodTrampoline(0, (Parser target) => target._parseForStatement()), | |
| 230 'parseFunctionBody_3': new MethodTrampoline(3, (Parser target, arg0, arg1, | |
| 231 arg2) => target._parseFunctionBody(arg0, arg1, arg2)), | |
| 232 'parseFunctionDeclaration_3': new MethodTrampoline(3, (Parser target, arg0, | |
| 233 arg1, arg2) => target._parseFunctionDeclaration(arg0, arg1, arg2)), | |
| 234 'parseFunctionDeclarationStatement_0': new MethodTrampoline( | |
| 235 0, (Parser target) => target._parseFunctionDeclarationStatement()), | |
| 236 'parseFunctionDeclarationStatementAfterReturnType_2': new MethodTrampoline(2, | |
| 237 (Parser target, arg0, arg1) => | |
| 238 target._parseFunctionDeclarationStatementAfterReturnType(arg0, arg1)), | |
| 239 'parseFunctionTypeAlias_2': new MethodTrampoline(2, (Parser target, arg0, | |
| 240 arg1) => target._parseFunctionTypeAlias(arg0, arg1)), | |
| 241 'parseGetter_4': new MethodTrampoline(4, (Parser target, arg0, arg1, arg2, | |
| 242 arg3) => target._parseGetter(arg0, arg1, arg2, arg3)), | |
| 243 'parseIdentifierList_0': | |
| 244 new MethodTrampoline(0, (Parser target) => target._parseIdentifierList()), | |
| 245 'parseIfStatement_0': | |
| 246 new MethodTrampoline(0, (Parser target) => target._parseIfStatement()), | |
| 247 'parseImportDirective_1': new MethodTrampoline( | |
| 248 1, (Parser target, arg0) => target._parseImportDirective(arg0)), | |
| 249 'parseInitializedIdentifierList_4': new MethodTrampoline(4, | |
| 250 (Parser target, arg0, arg1, arg2, arg3) => | |
| 251 target._parseInitializedIdentifierList(arg0, arg1, arg2, arg3)), | |
| 252 'parseInstanceCreationExpression_1': new MethodTrampoline(1, | |
| 253 (Parser target, arg0) => target._parseInstanceCreationExpression(arg0)), | |
| 254 'parseLibraryDirective_1': new MethodTrampoline( | |
| 255 1, (Parser target, arg0) => target._parseLibraryDirective(arg0)), | |
| 256 'parseLibraryName_2': new MethodTrampoline( | |
| 257 2, (Parser target, arg0, arg1) => target._parseLibraryName(arg0, arg1)), | |
| 258 'parseListLiteral_2': new MethodTrampoline( | |
| 259 2, (Parser target, arg0, arg1) => target._parseListLiteral(arg0, arg1)), | |
| 260 'parseListOrMapLiteral_1': new MethodTrampoline( | |
| 261 1, (Parser target, arg0) => target._parseListOrMapLiteral(arg0)), | |
| 262 'parseLogicalAndExpression_0': new MethodTrampoline( | |
| 263 0, (Parser target) => target._parseLogicalAndExpression()), | |
| 264 'parseMapLiteral_2': new MethodTrampoline( | |
| 265 2, (Parser target, arg0, arg1) => target._parseMapLiteral(arg0, arg1)), | |
| 266 'parseMethodDeclarationAfterParameters_7': new MethodTrampoline(7, | |
| 267 (Parser target, arg0, arg1, arg2, arg3, arg4, arg5, arg6) => target | |
| 268 ._parseMethodDeclarationAfterParameters( | |
| 269 arg0, arg1, arg2, arg3, arg4, arg5, arg6)), | |
| 270 'parseMethodDeclarationAfterReturnType_4': new MethodTrampoline(4, | |
| 271 (Parser target, arg0, arg1, arg2, arg3) => target | |
| 272 ._parseMethodDeclarationAfterReturnType(arg0, arg1, arg2, arg3)), | |
| 273 'parseModifiers_0': | |
| 274 new MethodTrampoline(0, (Parser target) => target._parseModifiers()), | |
| 275 'parseMultiplicativeExpression_0': new MethodTrampoline( | |
| 276 0, (Parser target) => target._parseMultiplicativeExpression()), | |
| 277 'parseNativeClause_0': | |
| 278 new MethodTrampoline(0, (Parser target) => target._parseNativeClause()), | |
| 279 'parseNewExpression_0': | |
| 280 new MethodTrampoline(0, (Parser target) => target._parseNewExpression()), | |
| 281 'parseNonLabeledStatement_0': new MethodTrampoline( | |
| 282 0, (Parser target) => target._parseNonLabeledStatement()), | |
| 283 'parseOperator_3': new MethodTrampoline(3, (Parser target, arg0, arg1, | |
| 284 arg2) => target._parseOperator(arg0, arg1, arg2)), | |
| 285 'parseOptionalReturnType_0': new MethodTrampoline( | |
| 286 0, (Parser target) => target._parseOptionalReturnType()), | |
| 287 'parsePartDirective_1': new MethodTrampoline( | |
| 288 1, (Parser target, arg0) => target._parsePartDirective(arg0)), | |
| 289 'parsePostfixExpression_0': new MethodTrampoline( | |
| 290 0, (Parser target) => target._parsePostfixExpression()), | |
| 291 'parsePrimaryExpression_0': new MethodTrampoline( | |
| 292 0, (Parser target) => target._parsePrimaryExpression()), | |
| 293 'parseRedirectingConstructorInvocation_0': new MethodTrampoline( | |
| 294 0, (Parser target) => target._parseRedirectingConstructorInvocation()), | |
| 295 'parseRelationalExpression_0': new MethodTrampoline( | |
| 296 0, (Parser target) => target._parseRelationalExpression()), | |
| 297 'parseRethrowExpression_0': new MethodTrampoline( | |
| 298 0, (Parser target) => target._parseRethrowExpression()), | |
| 299 'parseReturnStatement_0': new MethodTrampoline( | |
| 300 0, (Parser target) => target._parseReturnStatement()), | |
| 301 'parseSetter_4': new MethodTrampoline(4, (Parser target, arg0, arg1, arg2, | |
| 302 arg3) => target._parseSetter(arg0, arg1, arg2, arg3)), | |
| 303 'parseShiftExpression_0': new MethodTrampoline( | |
| 304 0, (Parser target) => target._parseShiftExpression()), | |
| 305 'parseStatementList_0': | |
| 306 new MethodTrampoline(0, (Parser target) => target._parseStatementList()), | |
| 307 'parseStringInterpolation_1': new MethodTrampoline( | |
| 308 1, (Parser target, arg0) => target._parseStringInterpolation(arg0)), | |
| 309 'parseSuperConstructorInvocation_0': new MethodTrampoline( | |
| 310 0, (Parser target) => target._parseSuperConstructorInvocation()), | |
| 311 'parseSwitchStatement_0': new MethodTrampoline( | |
| 312 0, (Parser target) => target._parseSwitchStatement()), | |
| 313 'parseSymbolLiteral_0': | |
| 314 new MethodTrampoline(0, (Parser target) => target._parseSymbolLiteral()), | |
| 315 'parseThrowExpression_0': new MethodTrampoline( | |
| 316 0, (Parser target) => target._parseThrowExpression()), | |
| 317 'parseThrowExpressionWithoutCascade_0': new MethodTrampoline( | |
| 318 0, (Parser target) => target._parseThrowExpressionWithoutCascade()), | |
| 319 'parseTryStatement_0': | |
| 320 new MethodTrampoline(0, (Parser target) => target._parseTryStatement()), | |
| 321 'parseTypeAlias_1': new MethodTrampoline( | |
| 322 1, (Parser target, arg0) => target._parseTypeAlias(arg0)), | |
| 323 'parseUnaryExpression_0': new MethodTrampoline( | |
| 324 0, (Parser target) => target._parseUnaryExpression()), | |
| 325 'parseVariableDeclaration_0': new MethodTrampoline( | |
| 326 0, (Parser target) => target._parseVariableDeclaration()), | |
| 327 'parseVariableDeclarationListAfterMetadata_1': new MethodTrampoline(1, | |
| 328 (Parser target, arg0) => | |
| 329 target._parseVariableDeclarationListAfterMetadata(arg0)), | |
| 330 'parseVariableDeclarationListAfterType_3': new MethodTrampoline(3, | |
| 331 (Parser target, arg0, arg1, arg2) => | |
| 332 target._parseVariableDeclarationListAfterType(arg0, arg1, arg2)), | |
| 333 'parseVariableDeclarationStatementAfterMetadata_1': new MethodTrampoline(1, | |
| 334 (Parser target, arg0) => | |
| 335 target._parseVariableDeclarationStatementAfterMetadata(arg0)), | |
| 336 'parseVariableDeclarationStatementAfterType_3': new MethodTrampoline(3, | |
| 337 (Parser target, arg0, arg1, arg2) => | |
| 338 target._parseVariableDeclarationStatementAfterType(arg0, arg1, arg2)), | |
| 339 'parseWhileStatement_0': | |
| 340 new MethodTrampoline(0, (Parser target) => target._parseWhileStatement()), | |
| 341 'parseYieldStatement_0': | |
| 342 new MethodTrampoline(0, (Parser target) => target._parseYieldStatement()), | |
| 343 'peek_0': new MethodTrampoline(0, (Parser target) => target._peek()), | |
| 344 'peekAt_1': | |
| 345 new MethodTrampoline(1, (Parser target, arg0) => target._peekAt(arg0)), | |
| 346 'reportError_1': new MethodTrampoline( | |
| 347 1, (Parser target, arg0) => target._reportError(arg0)), | |
| 348 'reportErrorForCurrentToken_2': new MethodTrampoline(2, (Parser target, arg0, | |
| 349 arg1) => target._reportErrorForCurrentToken(arg0, arg1)), | |
| 350 'reportErrorForNode_3': new MethodTrampoline(3, (Parser target, arg0, arg1, | |
| 351 arg2) => target._reportErrorForNode(arg0, arg1, arg2)), | |
| 352 'reportErrorForToken_3': new MethodTrampoline(3, (Parser target, arg0, arg1, | |
| 353 arg2) => target._reportErrorForToken(arg0, arg1, arg2)), | |
| 354 'skipBlock_0': | |
| 355 new MethodTrampoline(0, (Parser target) => target._skipBlock()), | |
| 356 'skipFinalConstVarOrType_1': new MethodTrampoline( | |
| 357 1, (Parser target, arg0) => target._skipFinalConstVarOrType(arg0)), | |
| 358 'skipFormalParameterList_1': new MethodTrampoline( | |
| 359 1, (Parser target, arg0) => target._skipFormalParameterList(arg0)), | |
| 360 'skipPastMatchingToken_1': new MethodTrampoline( | |
| 361 1, (Parser target, arg0) => target._skipPastMatchingToken(arg0)), | |
| 362 'skipPrefixedIdentifier_1': new MethodTrampoline( | |
| 363 1, (Parser target, arg0) => target._skipPrefixedIdentifier(arg0)), | |
| 364 'skipReturnType_1': new MethodTrampoline( | |
| 365 1, (Parser target, arg0) => target._skipReturnType(arg0)), | |
| 366 'skipSimpleIdentifier_1': new MethodTrampoline( | |
| 367 1, (Parser target, arg0) => target._skipSimpleIdentifier(arg0)), | |
| 368 'skipStringInterpolation_1': new MethodTrampoline( | |
| 369 1, (Parser target, arg0) => target._skipStringInterpolation(arg0)), | |
| 370 'skipStringLiteral_1': new MethodTrampoline( | |
| 371 1, (Parser target, arg0) => target._skipStringLiteral(arg0)), | |
| 372 'skipTypeArgumentList_1': new MethodTrampoline( | |
| 373 1, (Parser target, arg0) => target._skipTypeArgumentList(arg0)), | |
| 374 'skipTypeName_1': new MethodTrampoline( | |
| 375 1, (Parser target, arg0) => target._skipTypeName(arg0)), | |
| 376 'skipTypeParameterList_1': new MethodTrampoline( | |
| 377 1, (Parser target, arg0) => target._skipTypeParameterList(arg0)), | |
| 378 'tokenMatches_2': new MethodTrampoline( | |
| 379 2, (Parser target, arg0, arg1) => target._tokenMatches(arg0, arg1)), | |
| 380 'tokenMatchesIdentifier_1': new MethodTrampoline( | |
| 381 1, (Parser target, arg0) => target._tokenMatchesIdentifier(arg0)), | |
| 382 'tokenMatchesKeyword_2': new MethodTrampoline(2, | |
| 383 (Parser target, arg0, arg1) => target._tokenMatchesKeyword(arg0, arg1)), | |
| 384 'tokenMatchesString_2': new MethodTrampoline( | |
| 385 2, (Parser target, arg0, arg1) => target._tokenMatchesString(arg0, arg1)), | |
| 386 'translateCharacter_3': new MethodTrampoline(3, (Parser target, arg0, arg1, | |
| 387 arg2) => target._translateCharacter(arg0, arg1, arg2)), | |
| 388 'unlockErrorListener_0': | |
| 389 new MethodTrampoline(0, (Parser target) => target._unlockErrorListener()), | |
| 390 'validateFormalParameterList_1': new MethodTrampoline( | |
| 391 1, (Parser target, arg0) => target._validateFormalParameterList(arg0)), | |
| 392 'validateModifiersForClass_1': new MethodTrampoline( | |
| 393 1, (Parser target, arg0) => target._validateModifiersForClass(arg0)), | |
| 394 'validateModifiersForConstructor_1': new MethodTrampoline(1, | |
| 395 (Parser target, arg0) => target._validateModifiersForConstructor(arg0)), | |
| 396 'validateModifiersForEnum_1': new MethodTrampoline( | |
| 397 1, (Parser target, arg0) => target._validateModifiersForEnum(arg0)), | |
| 398 'validateModifiersForField_1': new MethodTrampoline( | |
| 399 1, (Parser target, arg0) => target._validateModifiersForField(arg0)), | |
| 400 'validateModifiersForFunctionDeclarationStatement_1': new MethodTrampoline(1, | |
| 401 (Parser target, arg0) => | |
| 402 target._validateModifiersForFunctionDeclarationStatement(arg0)), | |
| 403 'validateModifiersForGetterOrSetterOrMethod_1': new MethodTrampoline(1, | |
| 404 (Parser target, arg0) => | |
| 405 target._validateModifiersForGetterOrSetterOrMethod(arg0)), | |
| 406 'validateModifiersForOperator_1': new MethodTrampoline( | |
| 407 1, (Parser target, arg0) => target._validateModifiersForOperator(arg0)), | |
| 408 'validateModifiersForTopLevelDeclaration_1': new MethodTrampoline(1, | |
| 409 (Parser target, arg0) => | |
| 410 target._validateModifiersForTopLevelDeclaration(arg0)), | |
| 411 'validateModifiersForTopLevelFunction_1': new MethodTrampoline(1, | |
| 412 (Parser target, arg0) => | |
| 413 target._validateModifiersForTopLevelFunction(arg0)), | |
| 414 'validateModifiersForTopLevelVariable_1': new MethodTrampoline(1, | |
| 415 (Parser target, arg0) => | |
| 416 target._validateModifiersForTopLevelVariable(arg0)), | |
| 417 'validateModifiersForTypedef_1': new MethodTrampoline( | |
| 418 1, (Parser target, arg0) => target._validateModifiersForTypedef(arg0)), | |
| 419 }; | |
| 420 | |
| 421 Object invokeParserMethodImpl( | |
| 422 Parser parser, String methodName, List<Object> objects, Token tokenStream) { | |
| 423 parser.currentToken = tokenStream; | |
| 424 MethodTrampoline method = | |
| 425 methodTable_Parser['${methodName}_${objects.length}']; | |
| 426 if (method == null) { | |
| 427 throw new IllegalArgumentException('There is no method named $methodName'); | |
| 428 } | |
| 429 return method.invoke(parser, objects); | |
| 430 } | |
| 431 | |
| 432 /** | |
| 433 * A simple data-holder for a method that needs to return multiple values. | |
| 434 */ | |
| 435 class CommentAndMetadata { | |
| 436 /** | |
| 437 * The documentation comment that was parsed, or `null` if none was given. | |
| 438 */ | |
| 439 final Comment comment; | |
| 440 | |
| 441 /** | |
| 442 * The metadata that was parsed. | |
| 443 */ | |
| 444 final List<Annotation> metadata; | |
| 445 | |
| 446 /** | |
| 447 * Initialize a newly created holder with the given [comment] and [metadata]. | |
| 448 */ | |
| 449 CommentAndMetadata(this.comment, this.metadata); | |
| 450 } | |
| 451 | |
| 452 /** | |
| 453 * A simple data-holder for a method that needs to return multiple values. | |
| 454 */ | |
| 455 class FinalConstVarOrType { | |
| 456 /** | |
| 457 * The 'final', 'const' or 'var' keyword, or `null` if none was given. | |
| 458 */ | |
| 459 final Token keyword; | |
| 460 | |
| 461 /** | |
| 462 * The type, of `null` if no type was specified. | |
| 463 */ | |
| 464 final TypeName type; | |
| 465 | |
| 466 /** | |
| 467 * Initialize a newly created holder with the given [keyword] and [type]. | |
| 468 */ | |
| 469 FinalConstVarOrType(this.keyword, this.type); | |
| 470 } | |
| 471 | |
| 472 /** | |
| 473 * A dispatcher that will invoke the right parse method when re-parsing a | |
| 474 * specified child of the visited node. All of the methods in this class assume | |
| 475 * that the parser is positioned to parse the replacement for the node. All of | |
| 476 * the methods will throw an [IncrementalParseException] if the node could not | |
| 477 * be parsed for some reason. | |
| 478 */ | |
| 479 class IncrementalParseDispatcher implements AstVisitor<AstNode> { | |
| 480 /** | |
| 481 * The parser used to parse the replacement for the node. | |
| 482 */ | |
| 483 final Parser _parser; | |
| 484 | |
| 485 /** | |
| 486 * The node that is to be replaced. | |
| 487 */ | |
| 488 final AstNode _oldNode; | |
| 489 | |
| 490 /** | |
| 491 * Initialize a newly created dispatcher to parse a single node that will | |
| 492 * use the [_parser] to replace the [_oldNode]. | |
| 493 */ | |
| 494 IncrementalParseDispatcher(this._parser, this._oldNode); | |
| 495 | |
| 496 @override | |
| 497 AstNode visitAdjacentStrings(AdjacentStrings node) { | |
| 498 if (node.strings.contains(_oldNode)) { | |
| 499 return _parser.parseStringLiteral(); | |
| 500 } | |
| 501 return _notAChild(node); | |
| 502 } | |
| 503 | |
| 504 @override | |
| 505 AstNode visitAnnotation(Annotation node) { | |
| 506 if (identical(_oldNode, node.name)) { | |
| 507 throw new InsufficientContextException(); | |
| 508 } else if (identical(_oldNode, node.constructorName)) { | |
| 509 throw new InsufficientContextException(); | |
| 510 } else if (identical(_oldNode, node.arguments)) { | |
| 511 return _parser.parseArgumentList(); | |
| 512 } | |
| 513 return _notAChild(node); | |
| 514 } | |
| 515 | |
| 516 @override | |
| 517 AstNode visitArgumentList(ArgumentList node) { | |
| 518 if (node.arguments.contains(_oldNode)) { | |
| 519 return _parser.parseArgument(); | |
| 520 } | |
| 521 return _notAChild(node); | |
| 522 } | |
| 523 | |
| 524 @override | |
| 525 AstNode visitAsExpression(AsExpression node) { | |
| 526 if (identical(_oldNode, node.expression)) { | |
| 527 return _parser.parseBitwiseOrExpression(); | |
| 528 } else if (identical(_oldNode, node.type)) { | |
| 529 return _parser.parseTypeName(); | |
| 530 } | |
| 531 return _notAChild(node); | |
| 532 } | |
| 533 | |
| 534 @override | |
| 535 AstNode visitAssertStatement(AssertStatement node) { | |
| 536 if (identical(_oldNode, node.condition)) { | |
| 537 return _parser.parseExpression2(); | |
| 538 } | |
| 539 return _notAChild(node); | |
| 540 } | |
| 541 | |
| 542 @override | |
| 543 AstNode visitAssignmentExpression(AssignmentExpression node) { | |
| 544 if (identical(_oldNode, node.leftHandSide)) { | |
| 545 // TODO(brianwilkerson) If the assignment is part of a cascade section, | |
| 546 // then we don't have a single parse method that will work. | |
| 547 // Otherwise, we can parse a conditional expression, but need to ensure | |
| 548 // that the resulting expression is assignable. | |
| 549 // return parser.parseConditionalExpression(); | |
| 550 throw new InsufficientContextException(); | |
| 551 } else if (identical(_oldNode, node.rightHandSide)) { | |
| 552 if (_isCascadeAllowedInAssignment(node)) { | |
| 553 return _parser.parseExpression2(); | |
| 554 } | |
| 555 return _parser.parseExpressionWithoutCascade(); | |
| 556 } | |
| 557 return _notAChild(node); | |
| 558 } | |
| 559 | |
| 560 @override | |
| 561 AstNode visitAwaitExpression(AwaitExpression node) { | |
| 562 if (identical(_oldNode, node.expression)) { | |
| 563 // TODO(brianwilkerson) Depending on precedence, | |
| 564 // this might not be sufficient. | |
| 565 return _parser.parseExpression2(); | |
| 566 } | |
| 567 return _notAChild(node); | |
| 568 } | |
| 569 | |
| 570 @override | |
| 571 AstNode visitBinaryExpression(BinaryExpression node) { | |
| 572 if (identical(_oldNode, node.leftOperand)) { | |
| 573 throw new InsufficientContextException(); | |
| 574 } else if (identical(_oldNode, node.rightOperand)) { | |
| 575 throw new InsufficientContextException(); | |
| 576 } | |
| 577 return _notAChild(node); | |
| 578 } | |
| 579 | |
| 580 @override | |
| 581 AstNode visitBlock(Block node) { | |
| 582 if (node.statements.contains(_oldNode)) { | |
| 583 return _parser.parseStatement2(); | |
| 584 } | |
| 585 return _notAChild(node); | |
| 586 } | |
| 587 | |
| 588 @override | |
| 589 AstNode visitBlockFunctionBody(BlockFunctionBody node) { | |
| 590 if (identical(_oldNode, node.block)) { | |
| 591 return _parser.parseBlock(); | |
| 592 } | |
| 593 return _notAChild(node); | |
| 594 } | |
| 595 | |
| 596 @override | |
| 597 AstNode visitBooleanLiteral(BooleanLiteral node) => _notAChild(node); | |
| 598 | |
| 599 @override | |
| 600 AstNode visitBreakStatement(BreakStatement node) { | |
| 601 if (identical(_oldNode, node.label)) { | |
| 602 return _parser.parseSimpleIdentifier(); | |
| 603 } | |
| 604 return _notAChild(node); | |
| 605 } | |
| 606 | |
| 607 @override | |
| 608 AstNode visitCascadeExpression(CascadeExpression node) { | |
| 609 if (identical(_oldNode, node.target)) { | |
| 610 return _parser.parseConditionalExpression(); | |
| 611 } else if (node.cascadeSections.contains(_oldNode)) { | |
| 612 throw new InsufficientContextException(); | |
| 613 } | |
| 614 return _notAChild(node); | |
| 615 } | |
| 616 | |
| 617 @override | |
| 618 AstNode visitCatchClause(CatchClause node) { | |
| 619 if (identical(_oldNode, node.exceptionType)) { | |
| 620 return _parser.parseTypeName(); | |
| 621 } else if (identical(_oldNode, node.exceptionParameter)) { | |
| 622 return _parser.parseSimpleIdentifier(); | |
| 623 } else if (identical(_oldNode, node.stackTraceParameter)) { | |
| 624 return _parser.parseSimpleIdentifier(); | |
| 625 } else if (identical(_oldNode, node.body)) { | |
| 626 return _parser.parseBlock(); | |
| 627 } | |
| 628 return _notAChild(node); | |
| 629 } | |
| 630 | |
| 631 @override | |
| 632 AstNode visitClassDeclaration(ClassDeclaration node) { | |
| 633 if (identical(_oldNode, node.documentationComment)) { | |
| 634 throw new InsufficientContextException(); | |
| 635 } else if (node.metadata.contains(_oldNode)) { | |
| 636 return _parser.parseAnnotation(); | |
| 637 } else if (identical(_oldNode, node.name)) { | |
| 638 // Changing the class name changes whether a member is interpreted as a | |
| 639 // constructor or not, so we'll just have to re-parse the entire class. | |
| 640 throw new InsufficientContextException(); | |
| 641 } else if (identical(_oldNode, node.typeParameters)) { | |
| 642 return _parser.parseTypeParameterList(); | |
| 643 } else if (identical(_oldNode, node.extendsClause)) { | |
| 644 return _parser.parseExtendsClause(); | |
| 645 } else if (identical(_oldNode, node.withClause)) { | |
| 646 return _parser.parseWithClause(); | |
| 647 } else if (identical(_oldNode, node.implementsClause)) { | |
| 648 return _parser.parseImplementsClause(); | |
| 649 } else if (node.members.contains(_oldNode)) { | |
| 650 ClassMember member = _parser.parseClassMember(node.name.name); | |
| 651 if (member == null) { | |
| 652 throw new InsufficientContextException(); | |
| 653 } | |
| 654 return member; | |
| 655 } | |
| 656 return _notAChild(node); | |
| 657 } | |
| 658 | |
| 659 @override | |
| 660 AstNode visitClassTypeAlias(ClassTypeAlias node) { | |
| 661 if (identical(_oldNode, node.documentationComment)) { | |
| 662 throw new InsufficientContextException(); | |
| 663 } else if (node.metadata.contains(_oldNode)) { | |
| 664 return _parser.parseAnnotation(); | |
| 665 } else if (identical(_oldNode, node.name)) { | |
| 666 return _parser.parseSimpleIdentifier(); | |
| 667 } else if (identical(_oldNode, node.typeParameters)) { | |
| 668 return _parser.parseTypeParameterList(); | |
| 669 } else if (identical(_oldNode, node.superclass)) { | |
| 670 return _parser.parseTypeName(); | |
| 671 } else if (identical(_oldNode, node.withClause)) { | |
| 672 return _parser.parseWithClause(); | |
| 673 } else if (identical(_oldNode, node.implementsClause)) { | |
| 674 return _parser.parseImplementsClause(); | |
| 675 } | |
| 676 return _notAChild(node); | |
| 677 } | |
| 678 | |
| 679 @override | |
| 680 AstNode visitComment(Comment node) { | |
| 681 throw new InsufficientContextException(); | |
| 682 } | |
| 683 | |
| 684 @override | |
| 685 AstNode visitCommentReference(CommentReference node) { | |
| 686 if (identical(_oldNode, node.identifier)) { | |
| 687 return _parser.parsePrefixedIdentifier(); | |
| 688 } | |
| 689 return _notAChild(node); | |
| 690 } | |
| 691 | |
| 692 @override | |
| 693 AstNode visitCompilationUnit(CompilationUnit node) { | |
| 694 throw new InsufficientContextException(); | |
| 695 } | |
| 696 | |
| 697 @override | |
| 698 AstNode visitConditionalExpression(ConditionalExpression node) { | |
| 699 if (identical(_oldNode, node.condition)) { | |
| 700 return _parser.parseIfNullExpression(); | |
| 701 } else if (identical(_oldNode, node.thenExpression)) { | |
| 702 return _parser.parseExpressionWithoutCascade(); | |
| 703 } else if (identical(_oldNode, node.elseExpression)) { | |
| 704 return _parser.parseExpressionWithoutCascade(); | |
| 705 } | |
| 706 return _notAChild(node); | |
| 707 } | |
| 708 | |
| 709 @override | |
| 710 AstNode visitConstructorDeclaration(ConstructorDeclaration node) { | |
| 711 if (identical(_oldNode, node.documentationComment)) { | |
| 712 throw new InsufficientContextException(); | |
| 713 } else if (node.metadata.contains(_oldNode)) { | |
| 714 return _parser.parseAnnotation(); | |
| 715 } else if (identical(_oldNode, node.returnType)) { | |
| 716 throw new InsufficientContextException(); | |
| 717 } else if (identical(_oldNode, node.name)) { | |
| 718 throw new InsufficientContextException(); | |
| 719 } else if (identical(_oldNode, node.parameters)) { | |
| 720 return _parser.parseFormalParameterList(); | |
| 721 } else if (identical(_oldNode, node.redirectedConstructor)) { | |
| 722 throw new InsufficientContextException(); | |
| 723 } else if (node.initializers.contains(_oldNode)) { | |
| 724 throw new InsufficientContextException(); | |
| 725 } else if (identical(_oldNode, node.body)) { | |
| 726 throw new InsufficientContextException(); | |
| 727 } | |
| 728 return _notAChild(node); | |
| 729 } | |
| 730 | |
| 731 @override | |
| 732 AstNode visitConstructorFieldInitializer(ConstructorFieldInitializer node) { | |
| 733 if (identical(_oldNode, node.fieldName)) { | |
| 734 return _parser.parseSimpleIdentifier(); | |
| 735 } else if (identical(_oldNode, node.expression)) { | |
| 736 throw new InsufficientContextException(); | |
| 737 } | |
| 738 return _notAChild(node); | |
| 739 } | |
| 740 | |
| 741 @override | |
| 742 AstNode visitConstructorName(ConstructorName node) { | |
| 743 if (identical(_oldNode, node.type)) { | |
| 744 return _parser.parseTypeName(); | |
| 745 } else if (identical(_oldNode, node.name)) { | |
| 746 return _parser.parseSimpleIdentifier(); | |
| 747 } | |
| 748 return _notAChild(node); | |
| 749 } | |
| 750 | |
| 751 @override | |
| 752 AstNode visitContinueStatement(ContinueStatement node) { | |
| 753 if (identical(_oldNode, node.label)) { | |
| 754 return _parser.parseSimpleIdentifier(); | |
| 755 } | |
| 756 return _notAChild(node); | |
| 757 } | |
| 758 | |
| 759 @override | |
| 760 AstNode visitDeclaredIdentifier(DeclaredIdentifier node) { | |
| 761 if (identical(_oldNode, node.documentationComment)) { | |
| 762 throw new InsufficientContextException(); | |
| 763 } else if (node.metadata.contains(_oldNode)) { | |
| 764 return _parser.parseAnnotation(); | |
| 765 } else if (identical(_oldNode, node.type)) { | |
| 766 throw new InsufficientContextException(); | |
| 767 } else if (identical(_oldNode, node.identifier)) { | |
| 768 return _parser.parseSimpleIdentifier(); | |
| 769 } | |
| 770 return _notAChild(node); | |
| 771 } | |
| 772 | |
| 773 @override | |
| 774 AstNode visitDefaultFormalParameter(DefaultFormalParameter node) { | |
| 775 if (identical(_oldNode, node.parameter)) { | |
| 776 return _parser.parseNormalFormalParameter(); | |
| 777 } else if (identical(_oldNode, node.defaultValue)) { | |
| 778 return _parser.parseExpression2(); | |
| 779 } | |
| 780 return _notAChild(node); | |
| 781 } | |
| 782 | |
| 783 @override | |
| 784 AstNode visitDoStatement(DoStatement node) { | |
| 785 if (identical(_oldNode, node.body)) { | |
| 786 return _parser.parseStatement2(); | |
| 787 } else if (identical(_oldNode, node.condition)) { | |
| 788 return _parser.parseExpression2(); | |
| 789 } | |
| 790 return _notAChild(node); | |
| 791 } | |
| 792 | |
| 793 @override | |
| 794 AstNode visitDoubleLiteral(DoubleLiteral node) => _notAChild(node); | |
| 795 | |
| 796 @override | |
| 797 AstNode visitEmptyFunctionBody(EmptyFunctionBody node) => _notAChild(node); | |
| 798 | |
| 799 @override | |
| 800 AstNode visitEmptyStatement(EmptyStatement node) => _notAChild(node); | |
| 801 | |
| 802 @override | |
| 803 AstNode visitEnumConstantDeclaration(EnumConstantDeclaration node) { | |
| 804 if (identical(_oldNode, node.documentationComment)) { | |
| 805 throw new InsufficientContextException(); | |
| 806 } else if (node.metadata.contains(_oldNode)) { | |
| 807 return _parser.parseAnnotation(); | |
| 808 } else if (identical(_oldNode, node.name)) { | |
| 809 return _parser.parseSimpleIdentifier(); | |
| 810 } | |
| 811 return _notAChild(node); | |
| 812 } | |
| 813 | |
| 814 @override | |
| 815 AstNode visitEnumDeclaration(EnumDeclaration node) { | |
| 816 if (identical(_oldNode, node.documentationComment)) { | |
| 817 throw new InsufficientContextException(); | |
| 818 } else if (node.metadata.contains(_oldNode)) { | |
| 819 return _parser.parseAnnotation(); | |
| 820 } else if (identical(_oldNode, node.name)) { | |
| 821 return _parser.parseSimpleIdentifier(); | |
| 822 } else if (node.constants.contains(_oldNode)) { | |
| 823 throw new InsufficientContextException(); | |
| 824 } | |
| 825 return _notAChild(node); | |
| 826 } | |
| 827 | |
| 828 @override | |
| 829 AstNode visitExportDirective(ExportDirective node) { | |
| 830 if (identical(_oldNode, node.documentationComment)) { | |
| 831 throw new InsufficientContextException(); | |
| 832 } else if (node.metadata.contains(_oldNode)) { | |
| 833 return _parser.parseAnnotation(); | |
| 834 } else if (identical(_oldNode, node.uri)) { | |
| 835 return _parser.parseStringLiteral(); | |
| 836 } else if (node.combinators.contains(_oldNode)) { | |
| 837 throw new IncrementalParseException(); | |
| 838 //return parser.parseCombinator(); | |
| 839 } | |
| 840 return _notAChild(node); | |
| 841 } | |
| 842 | |
| 843 @override | |
| 844 AstNode visitExpressionFunctionBody(ExpressionFunctionBody node) { | |
| 845 if (identical(_oldNode, node.expression)) { | |
| 846 return _parser.parseExpression2(); | |
| 847 } | |
| 848 return _notAChild(node); | |
| 849 } | |
| 850 | |
| 851 @override | |
| 852 AstNode visitExpressionStatement(ExpressionStatement node) { | |
| 853 if (identical(_oldNode, node.expression)) { | |
| 854 return _parser.parseExpression2(); | |
| 855 } | |
| 856 return _notAChild(node); | |
| 857 } | |
| 858 | |
| 859 @override | |
| 860 AstNode visitExtendsClause(ExtendsClause node) { | |
| 861 if (identical(_oldNode, node.superclass)) { | |
| 862 return _parser.parseTypeName(); | |
| 863 } | |
| 864 return _notAChild(node); | |
| 865 } | |
| 866 | |
| 867 @override | |
| 868 AstNode visitFieldDeclaration(FieldDeclaration node) { | |
| 869 if (identical(_oldNode, node.documentationComment)) { | |
| 870 throw new InsufficientContextException(); | |
| 871 } else if (node.metadata.contains(_oldNode)) { | |
| 872 return _parser.parseAnnotation(); | |
| 873 } else if (identical(_oldNode, node.fields)) { | |
| 874 throw new InsufficientContextException(); | |
| 875 } | |
| 876 return _notAChild(node); | |
| 877 } | |
| 878 | |
| 879 @override | |
| 880 AstNode visitFieldFormalParameter(FieldFormalParameter node) { | |
| 881 if (identical(_oldNode, node.documentationComment)) { | |
| 882 throw new InsufficientContextException(); | |
| 883 } else if (node.metadata.contains(_oldNode)) { | |
| 884 return _parser.parseAnnotation(); | |
| 885 } else if (identical(_oldNode, node.type)) { | |
| 886 return _parser.parseTypeName(); | |
| 887 } else if (identical(_oldNode, node.identifier)) { | |
| 888 return _parser.parseSimpleIdentifier(); | |
| 889 } else if (identical(_oldNode, node.parameters)) { | |
| 890 return _parser.parseFormalParameterList(); | |
| 891 } | |
| 892 return _notAChild(node); | |
| 893 } | |
| 894 | |
| 895 @override | |
| 896 AstNode visitForEachStatement(ForEachStatement node) { | |
| 897 if (identical(_oldNode, node.loopVariable)) { | |
| 898 throw new InsufficientContextException(); | |
| 899 //return parser.parseDeclaredIdentifier(); | |
| 900 } else if (identical(_oldNode, node.identifier)) { | |
| 901 return _parser.parseSimpleIdentifier(); | |
| 902 } else if (identical(_oldNode, node.body)) { | |
| 903 return _parser.parseStatement2(); | |
| 904 } | |
| 905 return _notAChild(node); | |
| 906 } | |
| 907 | |
| 908 @override | |
| 909 AstNode visitFormalParameterList(FormalParameterList node) { | |
| 910 // We don't know which kind of parameter to parse. | |
| 911 throw new InsufficientContextException(); | |
| 912 } | |
| 913 | |
| 914 @override | |
| 915 AstNode visitForStatement(ForStatement node) { | |
| 916 if (identical(_oldNode, node.variables)) { | |
| 917 throw new InsufficientContextException(); | |
| 918 } else if (identical(_oldNode, node.initialization)) { | |
| 919 throw new InsufficientContextException(); | |
| 920 } else if (identical(_oldNode, node.condition)) { | |
| 921 return _parser.parseExpression2(); | |
| 922 } else if (node.updaters.contains(_oldNode)) { | |
| 923 return _parser.parseExpression2(); | |
| 924 } else if (identical(_oldNode, node.body)) { | |
| 925 return _parser.parseStatement2(); | |
| 926 } | |
| 927 return _notAChild(node); | |
| 928 } | |
| 929 | |
| 930 @override | |
| 931 AstNode visitFunctionDeclaration(FunctionDeclaration node) { | |
| 932 if (identical(_oldNode, node.documentationComment)) { | |
| 933 throw new InsufficientContextException(); | |
| 934 } else if (node.metadata.contains(_oldNode)) { | |
| 935 return _parser.parseAnnotation(); | |
| 936 } else if (identical(_oldNode, node.returnType)) { | |
| 937 return _parser.parseReturnType(); | |
| 938 } else if (identical(_oldNode, node.name)) { | |
| 939 return _parser.parseSimpleIdentifier(); | |
| 940 } else if (identical(_oldNode, node.functionExpression)) { | |
| 941 throw new InsufficientContextException(); | |
| 942 } | |
| 943 return _notAChild(node); | |
| 944 } | |
| 945 | |
| 946 @override | |
| 947 AstNode visitFunctionDeclarationStatement(FunctionDeclarationStatement node) { | |
| 948 if (identical(_oldNode, node.functionDeclaration)) { | |
| 949 throw new InsufficientContextException(); | |
| 950 } | |
| 951 return _notAChild(node); | |
| 952 } | |
| 953 | |
| 954 @override | |
| 955 AstNode visitFunctionExpression(FunctionExpression node) { | |
| 956 if (identical(_oldNode, node.parameters)) { | |
| 957 return _parser.parseFormalParameterList(); | |
| 958 } else if (identical(_oldNode, node.body)) { | |
| 959 throw new InsufficientContextException(); | |
| 960 } | |
| 961 return _notAChild(node); | |
| 962 } | |
| 963 | |
| 964 @override | |
| 965 AstNode visitFunctionExpressionInvocation(FunctionExpressionInvocation node) { | |
| 966 if (identical(_oldNode, node.function)) { | |
| 967 throw new InsufficientContextException(); | |
| 968 } else if (identical(_oldNode, node.argumentList)) { | |
| 969 return _parser.parseArgumentList(); | |
| 970 } | |
| 971 return _notAChild(node); | |
| 972 } | |
| 973 | |
| 974 @override | |
| 975 AstNode visitFunctionTypeAlias(FunctionTypeAlias node) { | |
| 976 if (identical(_oldNode, node.documentationComment)) { | |
| 977 throw new InsufficientContextException(); | |
| 978 } else if (node.metadata.contains(_oldNode)) { | |
| 979 return _parser.parseAnnotation(); | |
| 980 } else if (identical(_oldNode, node.returnType)) { | |
| 981 return _parser.parseReturnType(); | |
| 982 } else if (identical(_oldNode, node.name)) { | |
| 983 return _parser.parseSimpleIdentifier(); | |
| 984 } else if (identical(_oldNode, node.typeParameters)) { | |
| 985 return _parser.parseTypeParameterList(); | |
| 986 } else if (identical(_oldNode, node.parameters)) { | |
| 987 return _parser.parseFormalParameterList(); | |
| 988 } | |
| 989 return _notAChild(node); | |
| 990 } | |
| 991 | |
| 992 @override | |
| 993 AstNode visitFunctionTypedFormalParameter(FunctionTypedFormalParameter node) { | |
| 994 if (identical(_oldNode, node.documentationComment)) { | |
| 995 throw new InsufficientContextException(); | |
| 996 } else if (node.metadata.contains(_oldNode)) { | |
| 997 return _parser.parseAnnotation(); | |
| 998 } else if (identical(_oldNode, node.returnType)) { | |
| 999 return _parser.parseReturnType(); | |
| 1000 } else if (identical(_oldNode, node.identifier)) { | |
| 1001 return _parser.parseSimpleIdentifier(); | |
| 1002 } else if (identical(_oldNode, node.parameters)) { | |
| 1003 return _parser.parseFormalParameterList(); | |
| 1004 } | |
| 1005 return _notAChild(node); | |
| 1006 } | |
| 1007 | |
| 1008 @override | |
| 1009 AstNode visitHideCombinator(HideCombinator node) { | |
| 1010 if (node.hiddenNames.contains(_oldNode)) { | |
| 1011 return _parser.parseSimpleIdentifier(); | |
| 1012 } | |
| 1013 return _notAChild(node); | |
| 1014 } | |
| 1015 | |
| 1016 @override | |
| 1017 AstNode visitIfStatement(IfStatement node) { | |
| 1018 if (identical(_oldNode, node.condition)) { | |
| 1019 return _parser.parseExpression2(); | |
| 1020 } else if (identical(_oldNode, node.thenStatement)) { | |
| 1021 return _parser.parseStatement2(); | |
| 1022 } else if (identical(_oldNode, node.elseStatement)) { | |
| 1023 return _parser.parseStatement2(); | |
| 1024 } | |
| 1025 return _notAChild(node); | |
| 1026 } | |
| 1027 | |
| 1028 @override | |
| 1029 AstNode visitImplementsClause(ImplementsClause node) { | |
| 1030 if (node.interfaces.contains(node)) { | |
| 1031 return _parser.parseTypeName(); | |
| 1032 } | |
| 1033 return _notAChild(node); | |
| 1034 } | |
| 1035 | |
| 1036 @override | |
| 1037 AstNode visitImportDirective(ImportDirective node) { | |
| 1038 if (identical(_oldNode, node.documentationComment)) { | |
| 1039 throw new InsufficientContextException(); | |
| 1040 } else if (node.metadata.contains(_oldNode)) { | |
| 1041 return _parser.parseAnnotation(); | |
| 1042 } else if (identical(_oldNode, node.uri)) { | |
| 1043 return _parser.parseStringLiteral(); | |
| 1044 } else if (identical(_oldNode, node.prefix)) { | |
| 1045 return _parser.parseSimpleIdentifier(); | |
| 1046 } else if (node.combinators.contains(_oldNode)) { | |
| 1047 return _parser.parseCombinator(); | |
| 1048 } | |
| 1049 return _notAChild(node); | |
| 1050 } | |
| 1051 | |
| 1052 @override | |
| 1053 AstNode visitIndexExpression(IndexExpression node) { | |
| 1054 if (identical(_oldNode, node.target)) { | |
| 1055 throw new InsufficientContextException(); | |
| 1056 } else if (identical(_oldNode, node.index)) { | |
| 1057 return _parser.parseExpression2(); | |
| 1058 } | |
| 1059 return _notAChild(node); | |
| 1060 } | |
| 1061 | |
| 1062 @override | |
| 1063 AstNode visitInstanceCreationExpression(InstanceCreationExpression node) { | |
| 1064 if (identical(_oldNode, node.constructorName)) { | |
| 1065 return _parser.parseConstructorName(); | |
| 1066 } else if (identical(_oldNode, node.argumentList)) { | |
| 1067 return _parser.parseArgumentList(); | |
| 1068 } | |
| 1069 return _notAChild(node); | |
| 1070 } | |
| 1071 | |
| 1072 @override | |
| 1073 AstNode visitIntegerLiteral(IntegerLiteral node) => _notAChild(node); | |
| 1074 | |
| 1075 @override | |
| 1076 AstNode visitInterpolationExpression(InterpolationExpression node) { | |
| 1077 if (identical(_oldNode, node.expression)) { | |
| 1078 if (node.leftBracket == null) { | |
| 1079 throw new InsufficientContextException(); | |
| 1080 //return parser.parseThisOrSimpleIdentifier(); | |
| 1081 } | |
| 1082 return _parser.parseExpression2(); | |
| 1083 } | |
| 1084 return _notAChild(node); | |
| 1085 } | |
| 1086 | |
| 1087 @override | |
| 1088 AstNode visitInterpolationString(InterpolationString node) { | |
| 1089 throw new InsufficientContextException(); | |
| 1090 } | |
| 1091 | |
| 1092 @override | |
| 1093 AstNode visitIsExpression(IsExpression node) { | |
| 1094 if (identical(_oldNode, node.expression)) { | |
| 1095 return _parser.parseBitwiseOrExpression(); | |
| 1096 } else if (identical(_oldNode, node.type)) { | |
| 1097 return _parser.parseTypeName(); | |
| 1098 } | |
| 1099 return _notAChild(node); | |
| 1100 } | |
| 1101 | |
| 1102 @override | |
| 1103 AstNode visitLabel(Label node) { | |
| 1104 if (identical(_oldNode, node.label)) { | |
| 1105 return _parser.parseSimpleIdentifier(); | |
| 1106 } | |
| 1107 return _notAChild(node); | |
| 1108 } | |
| 1109 | |
| 1110 @override | |
| 1111 AstNode visitLabeledStatement(LabeledStatement node) { | |
| 1112 if (node.labels.contains(_oldNode)) { | |
| 1113 return _parser.parseLabel(); | |
| 1114 } else if (identical(_oldNode, node.statement)) { | |
| 1115 return _parser.parseStatement2(); | |
| 1116 } | |
| 1117 return _notAChild(node); | |
| 1118 } | |
| 1119 | |
| 1120 @override | |
| 1121 AstNode visitLibraryDirective(LibraryDirective node) { | |
| 1122 if (identical(_oldNode, node.documentationComment)) { | |
| 1123 throw new InsufficientContextException(); | |
| 1124 } else if (node.metadata.contains(_oldNode)) { | |
| 1125 return _parser.parseAnnotation(); | |
| 1126 } else if (identical(_oldNode, node.name)) { | |
| 1127 return _parser.parseLibraryIdentifier(); | |
| 1128 } | |
| 1129 return _notAChild(node); | |
| 1130 } | |
| 1131 | |
| 1132 @override | |
| 1133 AstNode visitLibraryIdentifier(LibraryIdentifier node) { | |
| 1134 if (node.components.contains(_oldNode)) { | |
| 1135 return _parser.parseSimpleIdentifier(); | |
| 1136 } | |
| 1137 return _notAChild(node); | |
| 1138 } | |
| 1139 | |
| 1140 @override | |
| 1141 AstNode visitListLiteral(ListLiteral node) { | |
| 1142 if (identical(_oldNode, node.typeArguments)) { | |
| 1143 return _parser.parseTypeArgumentList(); | |
| 1144 } else if (node.elements.contains(_oldNode)) { | |
| 1145 return _parser.parseExpression2(); | |
| 1146 } | |
| 1147 return _notAChild(node); | |
| 1148 } | |
| 1149 | |
| 1150 @override | |
| 1151 AstNode visitMapLiteral(MapLiteral node) { | |
| 1152 if (identical(_oldNode, node.typeArguments)) { | |
| 1153 return _parser.parseTypeArgumentList(); | |
| 1154 } else if (node.entries.contains(_oldNode)) { | |
| 1155 return _parser.parseMapLiteralEntry(); | |
| 1156 } | |
| 1157 return _notAChild(node); | |
| 1158 } | |
| 1159 | |
| 1160 @override | |
| 1161 AstNode visitMapLiteralEntry(MapLiteralEntry node) { | |
| 1162 if (identical(_oldNode, node.key)) { | |
| 1163 return _parser.parseExpression2(); | |
| 1164 } else if (identical(_oldNode, node.value)) { | |
| 1165 return _parser.parseExpression2(); | |
| 1166 } | |
| 1167 return _notAChild(node); | |
| 1168 } | |
| 1169 | |
| 1170 @override | |
| 1171 AstNode visitMethodDeclaration(MethodDeclaration node) { | |
| 1172 if (identical(_oldNode, node.documentationComment)) { | |
| 1173 throw new InsufficientContextException(); | |
| 1174 } else if (node.metadata.contains(_oldNode)) { | |
| 1175 return _parser.parseAnnotation(); | |
| 1176 } else if (identical(_oldNode, node.returnType)) { | |
| 1177 throw new InsufficientContextException(); | |
| 1178 //return parser.parseTypeName(); | |
| 1179 //return parser.parseReturnType(); | |
| 1180 } else if (identical(_oldNode, node.name)) { | |
| 1181 if (node.operatorKeyword != null) { | |
| 1182 throw new InsufficientContextException(); | |
| 1183 } | |
| 1184 return _parser.parseSimpleIdentifier(); | |
| 1185 } else if (identical(_oldNode, node.body)) { | |
| 1186 //return parser.parseFunctionBody(); | |
| 1187 throw new InsufficientContextException(); | |
| 1188 } else if (identical(_oldNode, node.parameters)) { | |
| 1189 // TODO(paulberry): if we want errors to be correct, we'll need to also | |
| 1190 // call _validateFormalParameterList, and sometimes | |
| 1191 // _validateModifiersForGetterOrSetterOrMethod. | |
| 1192 return _parser.parseFormalParameterList(); | |
| 1193 } | |
| 1194 return _notAChild(node); | |
| 1195 } | |
| 1196 | |
| 1197 @override | |
| 1198 AstNode visitMethodInvocation(MethodInvocation node) { | |
| 1199 if (identical(_oldNode, node.target)) { | |
| 1200 throw new IncrementalParseException(); | |
| 1201 } else if (identical(_oldNode, node.methodName)) { | |
| 1202 return _parser.parseSimpleIdentifier(); | |
| 1203 } else if (identical(_oldNode, node.argumentList)) { | |
| 1204 return _parser.parseArgumentList(); | |
| 1205 } | |
| 1206 return _notAChild(node); | |
| 1207 } | |
| 1208 | |
| 1209 @override | |
| 1210 AstNode visitNamedExpression(NamedExpression node) { | |
| 1211 if (identical(_oldNode, node.name)) { | |
| 1212 return _parser.parseLabel(); | |
| 1213 } else if (identical(_oldNode, node.expression)) { | |
| 1214 return _parser.parseExpression2(); | |
| 1215 } | |
| 1216 return _notAChild(node); | |
| 1217 } | |
| 1218 | |
| 1219 @override | |
| 1220 AstNode visitNativeClause(NativeClause node) { | |
| 1221 if (identical(_oldNode, node.name)) { | |
| 1222 return _parser.parseStringLiteral(); | |
| 1223 } | |
| 1224 return _notAChild(node); | |
| 1225 } | |
| 1226 | |
| 1227 @override | |
| 1228 AstNode visitNativeFunctionBody(NativeFunctionBody node) { | |
| 1229 if (identical(_oldNode, node.stringLiteral)) { | |
| 1230 return _parser.parseStringLiteral(); | |
| 1231 } | |
| 1232 return _notAChild(node); | |
| 1233 } | |
| 1234 | |
| 1235 @override | |
| 1236 AstNode visitNullLiteral(NullLiteral node) => _notAChild(node); | |
| 1237 | |
| 1238 @override | |
| 1239 AstNode visitParenthesizedExpression(ParenthesizedExpression node) { | |
| 1240 if (identical(_oldNode, node.expression)) { | |
| 1241 return _parser.parseExpression2(); | |
| 1242 } | |
| 1243 return _notAChild(node); | |
| 1244 } | |
| 1245 | |
| 1246 @override | |
| 1247 AstNode visitPartDirective(PartDirective node) { | |
| 1248 if (identical(_oldNode, node.documentationComment)) { | |
| 1249 throw new InsufficientContextException(); | |
| 1250 } else if (node.metadata.contains(_oldNode)) { | |
| 1251 return _parser.parseAnnotation(); | |
| 1252 } else if (identical(_oldNode, node.uri)) { | |
| 1253 return _parser.parseStringLiteral(); | |
| 1254 } | |
| 1255 return _notAChild(node); | |
| 1256 } | |
| 1257 | |
| 1258 @override | |
| 1259 AstNode visitPartOfDirective(PartOfDirective node) { | |
| 1260 if (identical(_oldNode, node.documentationComment)) { | |
| 1261 throw new InsufficientContextException(); | |
| 1262 } else if (node.metadata.contains(_oldNode)) { | |
| 1263 return _parser.parseAnnotation(); | |
| 1264 } else if (identical(_oldNode, node.libraryName)) { | |
| 1265 return _parser.parseLibraryIdentifier(); | |
| 1266 } | |
| 1267 return _notAChild(node); | |
| 1268 } | |
| 1269 | |
| 1270 @override | |
| 1271 AstNode visitPostfixExpression(PostfixExpression node) { | |
| 1272 if (identical(_oldNode, node.operand)) { | |
| 1273 throw new InsufficientContextException(); | |
| 1274 } | |
| 1275 return _notAChild(node); | |
| 1276 } | |
| 1277 | |
| 1278 @override | |
| 1279 AstNode visitPrefixedIdentifier(PrefixedIdentifier node) { | |
| 1280 if (identical(_oldNode, node.prefix)) { | |
| 1281 return _parser.parseSimpleIdentifier(); | |
| 1282 } else if (identical(_oldNode, node.identifier)) { | |
| 1283 return _parser.parseSimpleIdentifier(); | |
| 1284 } | |
| 1285 return _notAChild(node); | |
| 1286 } | |
| 1287 | |
| 1288 @override | |
| 1289 AstNode visitPrefixExpression(PrefixExpression node) { | |
| 1290 if (identical(_oldNode, node.operand)) { | |
| 1291 throw new InsufficientContextException(); | |
| 1292 } | |
| 1293 return _notAChild(node); | |
| 1294 } | |
| 1295 | |
| 1296 @override | |
| 1297 AstNode visitPropertyAccess(PropertyAccess node) { | |
| 1298 if (identical(_oldNode, node.target)) { | |
| 1299 throw new InsufficientContextException(); | |
| 1300 } else if (identical(_oldNode, node.propertyName)) { | |
| 1301 return _parser.parseSimpleIdentifier(); | |
| 1302 } | |
| 1303 return _notAChild(node); | |
| 1304 } | |
| 1305 | |
| 1306 @override | |
| 1307 AstNode visitRedirectingConstructorInvocation( | |
| 1308 RedirectingConstructorInvocation node) { | |
| 1309 if (identical(_oldNode, node.constructorName)) { | |
| 1310 return _parser.parseSimpleIdentifier(); | |
| 1311 } else if (identical(_oldNode, node.argumentList)) { | |
| 1312 return _parser.parseArgumentList(); | |
| 1313 } | |
| 1314 return _notAChild(node); | |
| 1315 } | |
| 1316 | |
| 1317 @override | |
| 1318 AstNode visitRethrowExpression(RethrowExpression node) => _notAChild(node); | |
| 1319 | |
| 1320 @override | |
| 1321 AstNode visitReturnStatement(ReturnStatement node) { | |
| 1322 if (identical(_oldNode, node.expression)) { | |
| 1323 return _parser.parseExpression2(); | |
| 1324 } | |
| 1325 return _notAChild(node); | |
| 1326 } | |
| 1327 | |
| 1328 @override | |
| 1329 AstNode visitScriptTag(ScriptTag node) => _notAChild(node); | |
| 1330 | |
| 1331 @override | |
| 1332 AstNode visitShowCombinator(ShowCombinator node) { | |
| 1333 if (node.shownNames.contains(_oldNode)) { | |
| 1334 return _parser.parseSimpleIdentifier(); | |
| 1335 } | |
| 1336 return _notAChild(node); | |
| 1337 } | |
| 1338 | |
| 1339 @override | |
| 1340 AstNode visitSimpleFormalParameter(SimpleFormalParameter node) { | |
| 1341 if (identical(_oldNode, node.documentationComment)) { | |
| 1342 throw new InsufficientContextException(); | |
| 1343 } else if (node.metadata.contains(_oldNode)) { | |
| 1344 return _parser.parseAnnotation(); | |
| 1345 } else if (identical(_oldNode, node.type)) { | |
| 1346 throw new InsufficientContextException(); | |
| 1347 } else if (identical(_oldNode, node.identifier)) { | |
| 1348 throw new InsufficientContextException(); | |
| 1349 } | |
| 1350 return _notAChild(node); | |
| 1351 } | |
| 1352 | |
| 1353 @override | |
| 1354 AstNode visitSimpleIdentifier(SimpleIdentifier node) => _notAChild(node); | |
| 1355 | |
| 1356 @override | |
| 1357 AstNode visitSimpleStringLiteral(SimpleStringLiteral node) => | |
| 1358 _notAChild(node); | |
| 1359 | |
| 1360 @override | |
| 1361 AstNode visitStringInterpolation(StringInterpolation node) { | |
| 1362 if (node.elements.contains(_oldNode)) { | |
| 1363 throw new InsufficientContextException(); | |
| 1364 } | |
| 1365 return _notAChild(node); | |
| 1366 } | |
| 1367 | |
| 1368 @override | |
| 1369 AstNode visitSuperConstructorInvocation(SuperConstructorInvocation node) { | |
| 1370 if (identical(_oldNode, node.constructorName)) { | |
| 1371 return _parser.parseSimpleIdentifier(); | |
| 1372 } else if (identical(_oldNode, node.argumentList)) { | |
| 1373 return _parser.parseArgumentList(); | |
| 1374 } | |
| 1375 return _notAChild(node); | |
| 1376 } | |
| 1377 | |
| 1378 @override | |
| 1379 AstNode visitSuperExpression(SuperExpression node) => _notAChild(node); | |
| 1380 | |
| 1381 @override | |
| 1382 AstNode visitSwitchCase(SwitchCase node) { | |
| 1383 if (node.labels.contains(_oldNode)) { | |
| 1384 return _parser.parseLabel(); | |
| 1385 } else if (identical(_oldNode, node.expression)) { | |
| 1386 return _parser.parseExpression2(); | |
| 1387 } else if (node.statements.contains(_oldNode)) { | |
| 1388 return _parser.parseStatement2(); | |
| 1389 } | |
| 1390 return _notAChild(node); | |
| 1391 } | |
| 1392 | |
| 1393 @override | |
| 1394 AstNode visitSwitchDefault(SwitchDefault node) { | |
| 1395 if (node.labels.contains(_oldNode)) { | |
| 1396 return _parser.parseLabel(); | |
| 1397 } else if (node.statements.contains(_oldNode)) { | |
| 1398 return _parser.parseStatement2(); | |
| 1399 } | |
| 1400 return _notAChild(node); | |
| 1401 } | |
| 1402 | |
| 1403 @override | |
| 1404 AstNode visitSwitchStatement(SwitchStatement node) { | |
| 1405 if (identical(_oldNode, node.expression)) { | |
| 1406 return _parser.parseExpression2(); | |
| 1407 } else if (node.members.contains(_oldNode)) { | |
| 1408 throw new InsufficientContextException(); | |
| 1409 } | |
| 1410 return _notAChild(node); | |
| 1411 } | |
| 1412 | |
| 1413 @override | |
| 1414 AstNode visitSymbolLiteral(SymbolLiteral node) => _notAChild(node); | |
| 1415 | |
| 1416 @override | |
| 1417 AstNode visitThisExpression(ThisExpression node) => _notAChild(node); | |
| 1418 | |
| 1419 @override | |
| 1420 AstNode visitThrowExpression(ThrowExpression node) { | |
| 1421 if (identical(_oldNode, node.expression)) { | |
| 1422 if (_isCascadeAllowedInThrow(node)) { | |
| 1423 return _parser.parseExpression2(); | |
| 1424 } | |
| 1425 return _parser.parseExpressionWithoutCascade(); | |
| 1426 } | |
| 1427 return _notAChild(node); | |
| 1428 } | |
| 1429 | |
| 1430 @override | |
| 1431 AstNode visitTopLevelVariableDeclaration(TopLevelVariableDeclaration node) { | |
| 1432 if (identical(_oldNode, node.documentationComment)) { | |
| 1433 throw new InsufficientContextException(); | |
| 1434 } else if (node.metadata.contains(_oldNode)) { | |
| 1435 return _parser.parseAnnotation(); | |
| 1436 } else if (identical(_oldNode, node.variables)) { | |
| 1437 throw new InsufficientContextException(); | |
| 1438 } | |
| 1439 return _notAChild(node); | |
| 1440 } | |
| 1441 | |
| 1442 @override | |
| 1443 AstNode visitTryStatement(TryStatement node) { | |
| 1444 if (identical(_oldNode, node.body)) { | |
| 1445 return _parser.parseBlock(); | |
| 1446 } else if (node.catchClauses.contains(_oldNode)) { | |
| 1447 throw new InsufficientContextException(); | |
| 1448 } else if (identical(_oldNode, node.finallyBlock)) { | |
| 1449 throw new InsufficientContextException(); | |
| 1450 } | |
| 1451 return _notAChild(node); | |
| 1452 } | |
| 1453 | |
| 1454 @override | |
| 1455 AstNode visitTypeArgumentList(TypeArgumentList node) { | |
| 1456 if (node.arguments.contains(_oldNode)) { | |
| 1457 return _parser.parseTypeName(); | |
| 1458 } | |
| 1459 return _notAChild(node); | |
| 1460 } | |
| 1461 | |
| 1462 @override | |
| 1463 AstNode visitTypeName(TypeName node) { | |
| 1464 if (identical(_oldNode, node.name)) { | |
| 1465 return _parser.parsePrefixedIdentifier(); | |
| 1466 } else if (identical(_oldNode, node.typeArguments)) { | |
| 1467 return _parser.parseTypeArgumentList(); | |
| 1468 } | |
| 1469 return _notAChild(node); | |
| 1470 } | |
| 1471 | |
| 1472 @override | |
| 1473 AstNode visitTypeParameter(TypeParameter node) { | |
| 1474 if (identical(_oldNode, node.documentationComment)) { | |
| 1475 throw new InsufficientContextException(); | |
| 1476 } else if (node.metadata.contains(_oldNode)) { | |
| 1477 return _parser.parseAnnotation(); | |
| 1478 } else if (identical(_oldNode, node.name)) { | |
| 1479 return _parser.parseSimpleIdentifier(); | |
| 1480 } else if (identical(_oldNode, node.bound)) { | |
| 1481 return _parser.parseTypeName(); | |
| 1482 } | |
| 1483 return _notAChild(node); | |
| 1484 } | |
| 1485 | |
| 1486 @override | |
| 1487 AstNode visitTypeParameterList(TypeParameterList node) { | |
| 1488 if (node.typeParameters.contains(node)) { | |
| 1489 return _parser.parseTypeParameter(); | |
| 1490 } | |
| 1491 return _notAChild(node); | |
| 1492 } | |
| 1493 | |
| 1494 @override | |
| 1495 AstNode visitVariableDeclaration(VariableDeclaration node) { | |
| 1496 if (identical(_oldNode, node.documentationComment)) { | |
| 1497 throw new InsufficientContextException(); | |
| 1498 } else if (node.metadata.contains(_oldNode)) { | |
| 1499 return _parser.parseAnnotation(); | |
| 1500 } else if (identical(_oldNode, node.name)) { | |
| 1501 throw new InsufficientContextException(); | |
| 1502 } else if (identical(_oldNode, node.initializer)) { | |
| 1503 throw new InsufficientContextException(); | |
| 1504 } | |
| 1505 return _notAChild(node); | |
| 1506 } | |
| 1507 | |
| 1508 @override | |
| 1509 AstNode visitVariableDeclarationList(VariableDeclarationList node) { | |
| 1510 if (identical(_oldNode, node.documentationComment)) { | |
| 1511 throw new InsufficientContextException(); | |
| 1512 } else if (node.metadata.contains(_oldNode)) { | |
| 1513 return _parser.parseAnnotation(); | |
| 1514 } else if (identical(_oldNode, node.type)) { | |
| 1515 // There is not enough context to know whether we should reparse the type | |
| 1516 // using parseReturnType() (which allows 'void') or parseTypeName() | |
| 1517 // (which doesn't). Note that even though the language disallows | |
| 1518 // variables of type 'void', the parser sometimes accepts them in the | |
| 1519 // course of error recovery (e.g. "class C { void v; }" | |
| 1520 throw new InsufficientContextException(); | |
| 1521 } else if (node.variables.contains(_oldNode)) { | |
| 1522 throw new InsufficientContextException(); | |
| 1523 } | |
| 1524 return _notAChild(node); | |
| 1525 } | |
| 1526 | |
| 1527 @override | |
| 1528 AstNode visitVariableDeclarationStatement(VariableDeclarationStatement node) { | |
| 1529 if (identical(_oldNode, node.variables)) { | |
| 1530 throw new InsufficientContextException(); | |
| 1531 } | |
| 1532 return _notAChild(node); | |
| 1533 } | |
| 1534 | |
| 1535 @override | |
| 1536 AstNode visitWhileStatement(WhileStatement node) { | |
| 1537 if (identical(_oldNode, node.condition)) { | |
| 1538 return _parser.parseExpression2(); | |
| 1539 } else if (identical(_oldNode, node.body)) { | |
| 1540 return _parser.parseStatement2(); | |
| 1541 } | |
| 1542 return _notAChild(node); | |
| 1543 } | |
| 1544 | |
| 1545 @override | |
| 1546 AstNode visitWithClause(WithClause node) { | |
| 1547 if (node.mixinTypes.contains(node)) { | |
| 1548 return _parser.parseTypeName(); | |
| 1549 } | |
| 1550 return _notAChild(node); | |
| 1551 } | |
| 1552 | |
| 1553 @override | |
| 1554 AstNode visitYieldStatement(YieldStatement node) { | |
| 1555 if (identical(_oldNode, node.expression)) { | |
| 1556 return _parser.parseExpression2(); | |
| 1557 } | |
| 1558 return _notAChild(node); | |
| 1559 } | |
| 1560 | |
| 1561 /** | |
| 1562 * Return `true` if the given assignment [expression] can have a cascade | |
| 1563 * expression on the right-hand side. | |
| 1564 */ | |
| 1565 bool _isCascadeAllowedInAssignment(AssignmentExpression expression) { | |
| 1566 // TODO(brianwilkerson) Implement this method. | |
| 1567 throw new InsufficientContextException(); | |
| 1568 } | |
| 1569 | |
| 1570 /** | |
| 1571 * Return `true` if the given throw [expression] can have a cascade | |
| 1572 * expression. | |
| 1573 */ | |
| 1574 bool _isCascadeAllowedInThrow(ThrowExpression expression) { | |
| 1575 // TODO(brianwilkerson) Implement this method. | |
| 1576 throw new InsufficientContextException(); | |
| 1577 } | |
| 1578 | |
| 1579 /** | |
| 1580 * Throw an exception indicating that the visited [node] was not the parent of | |
| 1581 * the node to be replaced. | |
| 1582 */ | |
| 1583 AstNode _notAChild(AstNode node) { | |
| 1584 throw new IncrementalParseException.con1( | |
| 1585 "Internal error: the visited node (a ${node.runtimeType}) was not the pa
rent of the node to be replaced (a ${_oldNode.runtimeType})"); | |
| 1586 } | |
| 1587 } | |
| 1588 | |
| 1589 /** | |
| 1590 * An exception that occurred while attempting to parse a replacement for a | |
| 1591 * specified node in an existing AST structure. | |
| 1592 */ | |
| 1593 class IncrementalParseException extends RuntimeException { | |
| 1594 /** | |
| 1595 * Initialize a newly created exception to have no message and to be its own | |
| 1596 * cause. | |
| 1597 */ | |
| 1598 IncrementalParseException() : super(); | |
| 1599 | |
| 1600 /** | |
| 1601 * Initialize a newly created exception to have the given [message] and to be | |
| 1602 * its own cause. | |
| 1603 */ | |
| 1604 IncrementalParseException.con1(String message) : super(message: message); | |
| 1605 | |
| 1606 /** | |
| 1607 * Initialize a newly created exception to have no message and to have the | |
| 1608 * given [cause]. | |
| 1609 */ | |
| 1610 IncrementalParseException.con2(Exception cause) : super(cause: cause); | |
| 1611 } | |
| 1612 | |
| 1613 /** | |
| 1614 * An object used to re-parse a single AST structure within a larger AST | |
| 1615 * structure. | |
| 1616 */ | |
| 1617 class IncrementalParser { | |
| 1618 /** | |
| 1619 * The source being parsed. | |
| 1620 */ | |
| 1621 final Source _source; | |
| 1622 | |
| 1623 /** | |
| 1624 * A map from old tokens to new tokens used during the cloning process. | |
| 1625 */ | |
| 1626 final TokenMap _tokenMap; | |
| 1627 | |
| 1628 /** | |
| 1629 * The error listener that will be informed of any errors that are found | |
| 1630 * during the parse. | |
| 1631 */ | |
| 1632 final AnalysisErrorListener _errorListener; | |
| 1633 | |
| 1634 /** | |
| 1635 * The node in the AST structure that contains the revised content. | |
| 1636 */ | |
| 1637 AstNode _updatedNode; | |
| 1638 | |
| 1639 /** | |
| 1640 * Initialize a newly created incremental parser to parse a portion of the | |
| 1641 * content of the given [_source]. The [_tokenMap] is a map from old tokens to | |
| 1642 * new tokens that is used during the cloning process. The [_errorListener] | |
| 1643 * will be informed of any errors that are found during the parse. | |
| 1644 */ | |
| 1645 IncrementalParser(this._source, this._tokenMap, this._errorListener); | |
| 1646 | |
| 1647 /** | |
| 1648 * Return the node in the AST structure that contains the revised content. | |
| 1649 */ | |
| 1650 AstNode get updatedNode => _updatedNode; | |
| 1651 | |
| 1652 /** | |
| 1653 * Given a range of tokens that were re-scanned, re-parse the minimum number | |
| 1654 * of tokens to produce a consistent AST structure. The range is represented | |
| 1655 * by the first and last tokens in the range. | |
| 1656 * | |
| 1657 * More specifically, the [leftToken] is the token in the new token stream | |
| 1658 * immediately to the left of the range of tokens that were inserted and the | |
| 1659 * [rightToken] is the token in the new token stream immediately to the right | |
| 1660 * of the range of tokens that were inserted. The [originalStart] and | |
| 1661 * [originalEnd] are the offsets in the original source of the first and last | |
| 1662 * characters that were modified. | |
| 1663 * | |
| 1664 * The tokens are assumed to be contained in the same token stream. | |
| 1665 */ | |
| 1666 AstNode reparse(AstNode originalStructure, Token leftToken, Token rightToken, | |
| 1667 int originalStart, int originalEnd) { | |
| 1668 AstNode oldNode = null; | |
| 1669 AstNode newNode = null; | |
| 1670 // | |
| 1671 // Find the first token that needs to be re-parsed. | |
| 1672 // | |
| 1673 Token firstToken = leftToken.next; | |
| 1674 if (identical(firstToken, rightToken)) { | |
| 1675 // If there are no new tokens, then we need to include at least one copied | |
| 1676 // node in the range. | |
| 1677 firstToken = leftToken; | |
| 1678 } | |
| 1679 // | |
| 1680 // Find the smallest AST node that encompasses the range of re-scanned | |
| 1681 // tokens. | |
| 1682 // | |
| 1683 if (originalEnd < originalStart) { | |
| 1684 oldNode = new NodeLocator(originalStart).searchWithin(originalStructure); | |
| 1685 } else { | |
| 1686 oldNode = new NodeLocator(originalStart, originalEnd) | |
| 1687 .searchWithin(originalStructure); | |
| 1688 } | |
| 1689 // | |
| 1690 // Find the token at which parsing is to begin. | |
| 1691 // | |
| 1692 int originalOffset = oldNode.offset; | |
| 1693 Token parseToken = _findTokenAt(firstToken, originalOffset); | |
| 1694 if (parseToken == null) { | |
| 1695 return null; | |
| 1696 } | |
| 1697 // | |
| 1698 // Parse the appropriate AST structure starting at the appropriate place. | |
| 1699 // | |
| 1700 Parser parser = new Parser(_source, _errorListener); | |
| 1701 parser.currentToken = parseToken; | |
| 1702 while (newNode == null) { | |
| 1703 AstNode parent = oldNode.parent; | |
| 1704 if (parent == null) { | |
| 1705 parseToken = _findFirstToken(parseToken); | |
| 1706 parser.currentToken = parseToken; | |
| 1707 return parser.parseCompilationUnit2(); | |
| 1708 } | |
| 1709 bool advanceToParent = false; | |
| 1710 try { | |
| 1711 IncrementalParseDispatcher dispatcher = | |
| 1712 new IncrementalParseDispatcher(parser, oldNode); | |
| 1713 IncrementalParseStateBuilder contextBuilder = | |
| 1714 new IncrementalParseStateBuilder(parser); | |
| 1715 contextBuilder.buildState(oldNode); | |
| 1716 newNode = parent.accept(dispatcher); | |
| 1717 // | |
| 1718 // Validate that the new node can replace the old node. | |
| 1719 // | |
| 1720 Token mappedToken = _tokenMap.get(oldNode.endToken.next); | |
| 1721 if (mappedToken == null || | |
| 1722 newNode == null || | |
| 1723 mappedToken.offset != newNode.endToken.next.offset || | |
| 1724 newNode.offset != oldNode.offset) { | |
| 1725 advanceToParent = true; | |
| 1726 } | |
| 1727 } on InsufficientContextException { | |
| 1728 advanceToParent = true; | |
| 1729 } catch (exception) { | |
| 1730 return null; | |
| 1731 } | |
| 1732 if (advanceToParent) { | |
| 1733 newNode = null; | |
| 1734 oldNode = parent; | |
| 1735 originalOffset = oldNode.offset; | |
| 1736 parseToken = _findTokenAt(parseToken, originalOffset); | |
| 1737 parser.currentToken = parseToken; | |
| 1738 } | |
| 1739 } | |
| 1740 _updatedNode = newNode; | |
| 1741 // | |
| 1742 // Replace the old node with the new node in a copy of the original AST | |
| 1743 // structure. | |
| 1744 // | |
| 1745 if (identical(oldNode, originalStructure)) { | |
| 1746 // We ended up re-parsing the whole structure, so there's no need for a | |
| 1747 // copy. | |
| 1748 ResolutionCopier.copyResolutionData(oldNode, newNode); | |
| 1749 return newNode; | |
| 1750 } | |
| 1751 ResolutionCopier.copyResolutionData(oldNode, newNode); | |
| 1752 IncrementalAstCloner cloner = | |
| 1753 new IncrementalAstCloner(oldNode, newNode, _tokenMap); | |
| 1754 return originalStructure.accept(cloner) as AstNode; | |
| 1755 } | |
| 1756 | |
| 1757 /** | |
| 1758 * Return the first (non-EOF) token in the token stream containing the | |
| 1759 * [firstToken]. | |
| 1760 */ | |
| 1761 Token _findFirstToken(Token firstToken) { | |
| 1762 while (firstToken.type != TokenType.EOF) { | |
| 1763 firstToken = firstToken.previous; | |
| 1764 } | |
| 1765 return firstToken.next; | |
| 1766 } | |
| 1767 | |
| 1768 /** | |
| 1769 * Find the token at or before the [firstToken] with the given [offset], or | |
| 1770 * `null` if there is no such token. | |
| 1771 */ | |
| 1772 Token _findTokenAt(Token firstToken, int offset) { | |
| 1773 while (firstToken.offset > offset && firstToken.type != TokenType.EOF) { | |
| 1774 firstToken = firstToken.previous; | |
| 1775 } | |
| 1776 return firstToken; | |
| 1777 } | |
| 1778 } | |
| 1779 | |
| 1780 /** | |
| 1781 * A visitor capable of inferring the correct parser state for incremental | |
| 1782 * parsing. This visitor visits each parent/child relationship in the chain of | |
| 1783 * ancestors of the node to be replaced (starting with the root of the parse | |
| 1784 * tree), updating the parser to the correct state for parsing the child of the | |
| 1785 * given parent. Once it has visited all of these relationships, the parser | |
| 1786 * will be in the correct state for reparsing the node to be replaced. | |
| 1787 */ | |
| 1788 class IncrementalParseStateBuilder extends SimpleAstVisitor { | |
| 1789 // TODO(paulberry): add support for other pieces of parser state (_inAsync, | |
| 1790 // _inGenerator, _inLoop, and _inSwitch). Note that _inLoop and _inSwitch | |
| 1791 // only affect error message generation. | |
| 1792 | |
| 1793 /** | |
| 1794 * The parser whose state should be built. | |
| 1795 */ | |
| 1796 final Parser _parser; | |
| 1797 | |
| 1798 /** | |
| 1799 * The child node in the parent/child relationship currently being visited. | |
| 1800 * (The corresponding parent is the node passed to the visit...() function.) | |
| 1801 */ | |
| 1802 AstNode _childNode; | |
| 1803 | |
| 1804 /** | |
| 1805 * Create an IncrementalParseStateBuilder which will build the correct state | |
| 1806 * for [_parser]. | |
| 1807 */ | |
| 1808 IncrementalParseStateBuilder(this._parser); | |
| 1809 | |
| 1810 /** | |
| 1811 * Build the correct parser state for parsing a replacement for [node]. | |
| 1812 */ | |
| 1813 void buildState(AstNode node) { | |
| 1814 List<AstNode> ancestors = <AstNode>[]; | |
| 1815 while (node != null) { | |
| 1816 ancestors.add(node); | |
| 1817 node = node.parent; | |
| 1818 } | |
| 1819 _parser._inInitializer = false; | |
| 1820 for (int i = ancestors.length - 2; i >= 0; i--) { | |
| 1821 _childNode = ancestors[i]; | |
| 1822 ancestors[i + 1].accept(this); | |
| 1823 } | |
| 1824 } | |
| 1825 | |
| 1826 @override | |
| 1827 void visitArgumentList(ArgumentList node) { | |
| 1828 _parser._inInitializer = false; | |
| 1829 } | |
| 1830 | |
| 1831 @override | |
| 1832 void visitConstructorFieldInitializer(ConstructorFieldInitializer node) { | |
| 1833 if (identical(_childNode, node.expression)) { | |
| 1834 _parser._inInitializer = true; | |
| 1835 } | |
| 1836 } | |
| 1837 | |
| 1838 @override | |
| 1839 void visitIndexExpression(IndexExpression node) { | |
| 1840 if (identical(_childNode, node.index)) { | |
| 1841 _parser._inInitializer = false; | |
| 1842 } | |
| 1843 } | |
| 1844 | |
| 1845 @override | |
| 1846 void visitInterpolationExpression(InterpolationExpression node) { | |
| 1847 if (identical(_childNode, node.expression)) { | |
| 1848 _parser._inInitializer = false; | |
| 1849 } | |
| 1850 } | |
| 1851 | |
| 1852 @override | |
| 1853 void visitListLiteral(ListLiteral node) { | |
| 1854 if (node.elements.contains(_childNode)) { | |
| 1855 _parser._inInitializer = false; | |
| 1856 } | |
| 1857 } | |
| 1858 | |
| 1859 @override | |
| 1860 void visitMapLiteral(MapLiteral node) { | |
| 1861 if (node.entries.contains(_childNode)) { | |
| 1862 _parser._inInitializer = false; | |
| 1863 } | |
| 1864 } | |
| 1865 | |
| 1866 @override | |
| 1867 void visitParenthesizedExpression(ParenthesizedExpression node) { | |
| 1868 if (identical(_childNode, node.expression)) { | |
| 1869 _parser._inInitializer = false; | |
| 1870 } | |
| 1871 } | |
| 1872 } | |
| 1873 | |
| 1874 /** | |
| 1875 * An exception indicating that an AST node cannot be re-parsed because there is | |
| 1876 * not enough context to know how to re-parse the node. Clients can attempt to | |
| 1877 * re-parse the parent of the node. | |
| 1878 */ | |
| 1879 class InsufficientContextException extends IncrementalParseException { | |
| 1880 /** | |
| 1881 * Initialize a newly created exception to have no message and to be its own | |
| 1882 * cause. | |
| 1883 */ | |
| 1884 InsufficientContextException() : super(); | |
| 1885 | |
| 1886 /** | |
| 1887 * Initialize a newly created exception to have the given [message] and to be | |
| 1888 * its own cause. | |
| 1889 */ | |
| 1890 InsufficientContextException.con1(String message) : super.con1(message); | |
| 1891 | |
| 1892 /** | |
| 1893 * Initialize a newly created exception to have no message and to have the | |
| 1894 * given [cause]. | |
| 1895 */ | |
| 1896 InsufficientContextException.con2(Exception cause) : super.con2(cause); | |
| 1897 } | |
| 1898 | |
| 1899 /** | |
| 1900 * Wrapper around [Function] which should be called with "target" and | |
| 1901 * "arguments". | |
| 1902 */ | |
| 1903 class MethodTrampoline { | |
| 1904 int parameterCount; | |
| 1905 Function trampoline; | |
| 1906 MethodTrampoline(this.parameterCount, this.trampoline); | |
| 1907 Object invoke(target, List arguments) { | |
| 1908 if (arguments.length != parameterCount) { | |
| 1909 throw new IllegalArgumentException( | |
| 1910 "${arguments.length} != $parameterCount"); | |
| 1911 } | |
| 1912 switch (parameterCount) { | |
| 1913 case 0: | |
| 1914 return trampoline(target); | |
| 1915 case 1: | |
| 1916 return trampoline(target, arguments[0]); | |
| 1917 case 2: | |
| 1918 return trampoline(target, arguments[0], arguments[1]); | |
| 1919 case 3: | |
| 1920 return trampoline(target, arguments[0], arguments[1], arguments[2]); | |
| 1921 case 4: | |
| 1922 return trampoline( | |
| 1923 target, arguments[0], arguments[1], arguments[2], arguments[3]); | |
| 1924 default: | |
| 1925 throw new IllegalArgumentException("Not implemented for > 4 arguments"); | |
| 1926 } | |
| 1927 } | |
| 1928 } | |
| 1929 | |
| 1930 /** | |
| 1931 * A simple data-holder for a method that needs to return multiple values. | |
| 1932 */ | |
| 1933 class Modifiers { | |
| 1934 /** | |
| 1935 * The token representing the keyword 'abstract', or `null` if the keyword was | |
| 1936 * not found. | |
| 1937 */ | |
| 1938 Token abstractKeyword; | |
| 1939 | |
| 1940 /** | |
| 1941 * The token representing the keyword 'const', or `null` if the keyword was | |
| 1942 * not found. | |
| 1943 */ | |
| 1944 Token constKeyword; | |
| 1945 | |
| 1946 /** | |
| 1947 * The token representing the keyword 'external', or `null` if the keyword was | |
| 1948 * not found. | |
| 1949 */ | |
| 1950 Token externalKeyword; | |
| 1951 | |
| 1952 /** | |
| 1953 * The token representing the keyword 'factory', or `null` if the keyword was | |
| 1954 * not found. | |
| 1955 */ | |
| 1956 Token factoryKeyword; | |
| 1957 | |
| 1958 /** | |
| 1959 * The token representing the keyword 'final', or `null` if the keyword was | |
| 1960 * not found. | |
| 1961 */ | |
| 1962 Token finalKeyword; | |
| 1963 | |
| 1964 /** | |
| 1965 * The token representing the keyword 'static', or `null` if the keyword was | |
| 1966 * not found. | |
| 1967 */ | |
| 1968 Token staticKeyword; | |
| 1969 | |
| 1970 /** | |
| 1971 * The token representing the keyword 'var', or `null` if the keyword was not | |
| 1972 * found. | |
| 1973 */ | |
| 1974 Token varKeyword; | |
| 1975 | |
| 1976 @override | |
| 1977 String toString() { | |
| 1978 StringBuffer buffer = new StringBuffer(); | |
| 1979 bool needsSpace = _appendKeyword(buffer, false, abstractKeyword); | |
| 1980 needsSpace = _appendKeyword(buffer, needsSpace, constKeyword); | |
| 1981 needsSpace = _appendKeyword(buffer, needsSpace, externalKeyword); | |
| 1982 needsSpace = _appendKeyword(buffer, needsSpace, factoryKeyword); | |
| 1983 needsSpace = _appendKeyword(buffer, needsSpace, finalKeyword); | |
| 1984 needsSpace = _appendKeyword(buffer, needsSpace, staticKeyword); | |
| 1985 _appendKeyword(buffer, needsSpace, varKeyword); | |
| 1986 return buffer.toString(); | |
| 1987 } | |
| 1988 | |
| 1989 /** | |
| 1990 * If the given [keyword] is not `null`, append it to the given [builder], | |
| 1991 * prefixing it with a space if [needsSpace] is `true`. Return `true` if | |
| 1992 * subsequent keywords need to be prefixed with a space. | |
| 1993 */ | |
| 1994 bool _appendKeyword(StringBuffer buffer, bool needsSpace, Token keyword) { | |
| 1995 if (keyword != null) { | |
| 1996 if (needsSpace) { | |
| 1997 buffer.writeCharCode(0x20); | |
| 1998 } | |
| 1999 buffer.write(keyword.lexeme); | |
| 2000 return true; | |
| 2001 } | |
| 2002 return needsSpace; | |
| 2003 } | |
| 2004 } | |
| 2005 | |
| 2006 /** | |
| 2007 * A parser used to parse tokens into an AST structure. | |
| 2008 */ | |
| 2009 class Parser { | |
| 2010 static String ASYNC = "async"; | |
| 2011 | |
| 2012 static String _AWAIT = "await"; | |
| 2013 | |
| 2014 static String _HIDE = "hide"; | |
| 2015 | |
| 2016 static String _OF = "of"; | |
| 2017 | |
| 2018 static String _ON = "on"; | |
| 2019 | |
| 2020 static String _NATIVE = "native"; | |
| 2021 | |
| 2022 static String _SHOW = "show"; | |
| 2023 | |
| 2024 static String SYNC = "sync"; | |
| 2025 | |
| 2026 static String _YIELD = "yield"; | |
| 2027 | |
| 2028 /** | |
| 2029 * The source being parsed. | |
| 2030 */ | |
| 2031 final Source _source; | |
| 2032 | |
| 2033 /** | |
| 2034 * The error listener that will be informed of any errors that are found | |
| 2035 * during the parse. | |
| 2036 */ | |
| 2037 final AnalysisErrorListener _errorListener; | |
| 2038 | |
| 2039 /** | |
| 2040 * An [errorListener] lock, if more than `0`, then errors are not reported. | |
| 2041 */ | |
| 2042 int _errorListenerLock = 0; | |
| 2043 | |
| 2044 /** | |
| 2045 * A flag indicating whether parser is to parse function bodies. | |
| 2046 */ | |
| 2047 bool _parseFunctionBodies = true; | |
| 2048 | |
| 2049 /** | |
| 2050 * The next token to be parsed. | |
| 2051 */ | |
| 2052 Token _currentToken; | |
| 2053 | |
| 2054 /** | |
| 2055 * A flag indicating whether the parser is currently in a function body marked | |
| 2056 * as being 'async'. | |
| 2057 */ | |
| 2058 bool _inAsync = false; | |
| 2059 | |
| 2060 /** | |
| 2061 * A flag indicating whether the parser is currently in a function body marked | |
| 2062 * as being 'async'. | |
| 2063 */ | |
| 2064 bool _inGenerator = false; | |
| 2065 | |
| 2066 /** | |
| 2067 * A flag indicating whether the parser is currently in the body of a loop. | |
| 2068 */ | |
| 2069 bool _inLoop = false; | |
| 2070 | |
| 2071 /** | |
| 2072 * A flag indicating whether the parser is currently in a switch statement. | |
| 2073 */ | |
| 2074 bool _inSwitch = false; | |
| 2075 | |
| 2076 /** | |
| 2077 * A flag indicating whether the parser is currently in a constructor field | |
| 2078 * initializer, with no intervening parens, braces, or brackets. | |
| 2079 */ | |
| 2080 bool _inInitializer = false; | |
| 2081 | |
| 2082 /** | |
| 2083 * A flag indicating whether the parser is to parse generic method syntax. | |
| 2084 */ | |
| 2085 bool parseGenericMethods = false; | |
| 2086 | |
| 2087 /** | |
| 2088 * Initialize a newly created parser to parse the content of the given | |
| 2089 * [_source] and to report any errors that are found to the given | |
| 2090 * [_errorListener]. | |
| 2091 */ | |
| 2092 Parser(this._source, this._errorListener); | |
| 2093 | |
| 2094 void set currentToken(Token currentToken) { | |
| 2095 this._currentToken = currentToken; | |
| 2096 } | |
| 2097 | |
| 2098 /** | |
| 2099 * Return `true` if the current token is the first token of a return type that | |
| 2100 * is followed by an identifier, possibly followed by a list of type | |
| 2101 * parameters, followed by a left-parenthesis. This is used by | |
| 2102 * [_parseTypeAlias] to determine whether or not to parse a return type. | |
| 2103 */ | |
| 2104 bool get hasReturnTypeInTypeAlias { | |
| 2105 Token next = _skipReturnType(_currentToken); | |
| 2106 if (next == null) { | |
| 2107 return false; | |
| 2108 } | |
| 2109 return _tokenMatchesIdentifier(next); | |
| 2110 } | |
| 2111 | |
| 2112 /** | |
| 2113 * Set whether the parser is to parse the async support. | |
| 2114 */ | |
| 2115 @deprecated | |
| 2116 void set parseAsync(bool parseAsync) { | |
| 2117 // Async support cannot be disabled | |
| 2118 } | |
| 2119 | |
| 2120 /** | |
| 2121 * Set whether the parser is to parse deferred libraries. | |
| 2122 */ | |
| 2123 @deprecated | |
| 2124 void set parseDeferredLibraries(bool parseDeferredLibraries) { | |
| 2125 // Deferred libraries support cannot be disabled | |
| 2126 } | |
| 2127 | |
| 2128 /** | |
| 2129 * Set whether the parser is to parse enum declarations. | |
| 2130 */ | |
| 2131 @deprecated | |
| 2132 void set parseEnum(bool parseEnum) { | |
| 2133 // Enum support cannot be disabled | |
| 2134 } | |
| 2135 | |
| 2136 /** | |
| 2137 * Set whether parser is to parse function bodies. | |
| 2138 */ | |
| 2139 void set parseFunctionBodies(bool parseFunctionBodies) { | |
| 2140 this._parseFunctionBodies = parseFunctionBodies; | |
| 2141 } | |
| 2142 | |
| 2143 /** | |
| 2144 * Advance to the next token in the token stream, making it the new current | |
| 2145 * token and return the token that was current before this method was invoked. | |
| 2146 */ | |
| 2147 Token getAndAdvance() { | |
| 2148 Token token = _currentToken; | |
| 2149 _advance(); | |
| 2150 return token; | |
| 2151 } | |
| 2152 | |
| 2153 /** | |
| 2154 * Parse an annotation. Return the annotation that was parsed. | |
| 2155 * | |
| 2156 * annotation ::= | |
| 2157 * '@' qualified ('.' identifier)? arguments? | |
| 2158 * | |
| 2159 */ | |
| 2160 Annotation parseAnnotation() { | |
| 2161 Token atSign = _expect(TokenType.AT); | |
| 2162 Identifier name = parsePrefixedIdentifier(); | |
| 2163 Token period = null; | |
| 2164 SimpleIdentifier constructorName = null; | |
| 2165 if (_matches(TokenType.PERIOD)) { | |
| 2166 period = getAndAdvance(); | |
| 2167 constructorName = parseSimpleIdentifier(); | |
| 2168 } | |
| 2169 ArgumentList arguments = null; | |
| 2170 if (_matches(TokenType.OPEN_PAREN)) { | |
| 2171 arguments = parseArgumentList(); | |
| 2172 } | |
| 2173 return new Annotation(atSign, name, period, constructorName, arguments); | |
| 2174 } | |
| 2175 | |
| 2176 /** | |
| 2177 * Parse an argument. Return the argument that was parsed. | |
| 2178 * | |
| 2179 * argument ::= | |
| 2180 * namedArgument | |
| 2181 * | expression | |
| 2182 * | |
| 2183 * namedArgument ::= | |
| 2184 * label expression | |
| 2185 */ | |
| 2186 Expression parseArgument() { | |
| 2187 // | |
| 2188 // Both namedArgument and expression can start with an identifier, but only | |
| 2189 // namedArgument can have an identifier followed by a colon. | |
| 2190 // | |
| 2191 if (_matchesIdentifier() && _tokenMatches(_peek(), TokenType.COLON)) { | |
| 2192 return new NamedExpression(parseLabel(), parseExpression2()); | |
| 2193 } else { | |
| 2194 return parseExpression2(); | |
| 2195 } | |
| 2196 } | |
| 2197 | |
| 2198 /** | |
| 2199 * Parse a list of arguments. Return the argument list that was parsed. | |
| 2200 * | |
| 2201 * arguments ::= | |
| 2202 * '(' argumentList? ')' | |
| 2203 * | |
| 2204 * argumentList ::= | |
| 2205 * namedArgument (',' namedArgument)* | |
| 2206 * | expressionList (',' namedArgument)* | |
| 2207 */ | |
| 2208 ArgumentList parseArgumentList() { | |
| 2209 Token leftParenthesis = _expect(TokenType.OPEN_PAREN); | |
| 2210 List<Expression> arguments = new List<Expression>(); | |
| 2211 if (_matches(TokenType.CLOSE_PAREN)) { | |
| 2212 return new ArgumentList(leftParenthesis, arguments, getAndAdvance()); | |
| 2213 } | |
| 2214 // | |
| 2215 // Even though unnamed arguments must all appear before any named arguments, | |
| 2216 // we allow them to appear in any order so that we can recover faster. | |
| 2217 // | |
| 2218 bool wasInInitializer = _inInitializer; | |
| 2219 _inInitializer = false; | |
| 2220 try { | |
| 2221 Expression argument = parseArgument(); | |
| 2222 arguments.add(argument); | |
| 2223 bool foundNamedArgument = argument is NamedExpression; | |
| 2224 bool generatedError = false; | |
| 2225 while (_optional(TokenType.COMMA)) { | |
| 2226 argument = parseArgument(); | |
| 2227 arguments.add(argument); | |
| 2228 if (foundNamedArgument) { | |
| 2229 bool blankArgument = | |
| 2230 argument is SimpleIdentifier && argument.name.isEmpty; | |
| 2231 if (!generatedError && | |
| 2232 !(argument is NamedExpression && !blankArgument)) { | |
| 2233 // Report the error, once, but allow the arguments to be in any | |
| 2234 // order in the AST. | |
| 2235 _reportErrorForCurrentToken( | |
| 2236 ParserErrorCode.POSITIONAL_AFTER_NAMED_ARGUMENT); | |
| 2237 generatedError = true; | |
| 2238 } | |
| 2239 } else if (argument is NamedExpression) { | |
| 2240 foundNamedArgument = true; | |
| 2241 } | |
| 2242 } | |
| 2243 // TODO(brianwilkerson) Recovery: Look at the left parenthesis to see | |
| 2244 // whether there is a matching right parenthesis. If there is, then we're | |
| 2245 // more likely missing a comma and should go back to parsing arguments. | |
| 2246 Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | |
| 2247 return new ArgumentList(leftParenthesis, arguments, rightParenthesis); | |
| 2248 } finally { | |
| 2249 _inInitializer = wasInInitializer; | |
| 2250 } | |
| 2251 } | |
| 2252 | |
| 2253 /** | |
| 2254 * Parse a bitwise or expression. Return the bitwise or expression that was | |
| 2255 * parsed. | |
| 2256 * | |
| 2257 * bitwiseOrExpression ::= | |
| 2258 * bitwiseXorExpression ('|' bitwiseXorExpression)* | |
| 2259 * | 'super' ('|' bitwiseXorExpression)+ | |
| 2260 */ | |
| 2261 Expression parseBitwiseOrExpression() { | |
| 2262 Expression expression; | |
| 2263 if (_matchesKeyword(Keyword.SUPER) && | |
| 2264 _tokenMatches(_peek(), TokenType.BAR)) { | |
| 2265 expression = new SuperExpression(getAndAdvance()); | |
| 2266 } else { | |
| 2267 expression = _parseBitwiseXorExpression(); | |
| 2268 } | |
| 2269 while (_matches(TokenType.BAR)) { | |
| 2270 Token operator = getAndAdvance(); | |
| 2271 expression = new BinaryExpression( | |
| 2272 expression, operator, _parseBitwiseXorExpression()); | |
| 2273 } | |
| 2274 return expression; | |
| 2275 } | |
| 2276 | |
| 2277 /** | |
| 2278 * Parse a block. Return the block that was parsed. | |
| 2279 * | |
| 2280 * block ::= | |
| 2281 * '{' statements '}' | |
| 2282 */ | |
| 2283 Block parseBlock() { | |
| 2284 Token leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET); | |
| 2285 List<Statement> statements = new List<Statement>(); | |
| 2286 Token statementStart = _currentToken; | |
| 2287 while ( | |
| 2288 !_matches(TokenType.EOF) && !_matches(TokenType.CLOSE_CURLY_BRACKET)) { | |
| 2289 Statement statement = parseStatement2(); | |
| 2290 if (statement != null) { | |
| 2291 statements.add(statement); | |
| 2292 } | |
| 2293 if (identical(_currentToken, statementStart)) { | |
| 2294 // Ensure that we are making progress and report an error if we're not. | |
| 2295 _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, | |
| 2296 [_currentToken.lexeme]); | |
| 2297 _advance(); | |
| 2298 } | |
| 2299 statementStart = _currentToken; | |
| 2300 } | |
| 2301 Token rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET); | |
| 2302 return new Block(leftBracket, statements, rightBracket); | |
| 2303 } | |
| 2304 | |
| 2305 /** | |
| 2306 * Parse a class member. The [className] is the name of the class containing | |
| 2307 * the member being parsed. Return the class member that was parsed, or `null` | |
| 2308 * if what was found was not a valid class member. | |
| 2309 * | |
| 2310 * classMemberDefinition ::= | |
| 2311 * declaration ';' | |
| 2312 * | methodSignature functionBody | |
| 2313 */ | |
| 2314 ClassMember parseClassMember(String className) { | |
| 2315 CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata(); | |
| 2316 Modifiers modifiers = _parseModifiers(); | |
| 2317 if (_matchesKeyword(Keyword.VOID)) { | |
| 2318 TypeName returnType = parseReturnType(); | |
| 2319 if (_matchesKeyword(Keyword.GET) && _tokenMatchesIdentifier(_peek())) { | |
| 2320 _validateModifiersForGetterOrSetterOrMethod(modifiers); | |
| 2321 return _parseGetter(commentAndMetadata, modifiers.externalKeyword, | |
| 2322 modifiers.staticKeyword, returnType); | |
| 2323 } else if (_matchesKeyword(Keyword.SET) && | |
| 2324 _tokenMatchesIdentifier(_peek())) { | |
| 2325 _validateModifiersForGetterOrSetterOrMethod(modifiers); | |
| 2326 return _parseSetter(commentAndMetadata, modifiers.externalKeyword, | |
| 2327 modifiers.staticKeyword, returnType); | |
| 2328 } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) { | |
| 2329 _validateModifiersForOperator(modifiers); | |
| 2330 return _parseOperator( | |
| 2331 commentAndMetadata, modifiers.externalKeyword, returnType); | |
| 2332 } else if (_matchesIdentifier() && | |
| 2333 _peek().matchesAny([ | |
| 2334 TokenType.OPEN_PAREN, | |
| 2335 TokenType.OPEN_CURLY_BRACKET, | |
| 2336 TokenType.FUNCTION, | |
| 2337 TokenType.LT | |
| 2338 ])) { | |
| 2339 _validateModifiersForGetterOrSetterOrMethod(modifiers); | |
| 2340 return _parseMethodDeclarationAfterReturnType(commentAndMetadata, | |
| 2341 modifiers.externalKeyword, modifiers.staticKeyword, returnType); | |
| 2342 } else { | |
| 2343 // | |
| 2344 // We have found an error of some kind. Try to recover. | |
| 2345 // | |
| 2346 if (_matchesIdentifier()) { | |
| 2347 if (_peek().matchesAny( | |
| 2348 [TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) { | |
| 2349 // | |
| 2350 // We appear to have a variable declaration with a type of "void". | |
| 2351 // | |
| 2352 _reportErrorForNode(ParserErrorCode.VOID_VARIABLE, returnType); | |
| 2353 return _parseInitializedIdentifierList(commentAndMetadata, | |
| 2354 modifiers.staticKeyword, _validateModifiersForField(modifiers), | |
| 2355 returnType); | |
| 2356 } | |
| 2357 } | |
| 2358 if (_isOperator(_currentToken)) { | |
| 2359 // | |
| 2360 // We appear to have found an operator declaration without the | |
| 2361 // 'operator' keyword. | |
| 2362 // | |
| 2363 _validateModifiersForOperator(modifiers); | |
| 2364 return _parseOperator( | |
| 2365 commentAndMetadata, modifiers.externalKeyword, returnType); | |
| 2366 } | |
| 2367 _reportErrorForToken( | |
| 2368 ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken); | |
| 2369 return null; | |
| 2370 } | |
| 2371 } else if (_matchesKeyword(Keyword.GET) && | |
| 2372 _tokenMatchesIdentifier(_peek())) { | |
| 2373 _validateModifiersForGetterOrSetterOrMethod(modifiers); | |
| 2374 return _parseGetter(commentAndMetadata, modifiers.externalKeyword, | |
| 2375 modifiers.staticKeyword, null); | |
| 2376 } else if (_matchesKeyword(Keyword.SET) && | |
| 2377 _tokenMatchesIdentifier(_peek())) { | |
| 2378 _validateModifiersForGetterOrSetterOrMethod(modifiers); | |
| 2379 return _parseSetter(commentAndMetadata, modifiers.externalKeyword, | |
| 2380 modifiers.staticKeyword, null); | |
| 2381 } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) { | |
| 2382 _validateModifiersForOperator(modifiers); | |
| 2383 return _parseOperator( | |
| 2384 commentAndMetadata, modifiers.externalKeyword, null); | |
| 2385 } else if (!_matchesIdentifier()) { | |
| 2386 // | |
| 2387 // Recover from an error. | |
| 2388 // | |
| 2389 if (_matchesKeyword(Keyword.CLASS)) { | |
| 2390 _reportErrorForCurrentToken(ParserErrorCode.CLASS_IN_CLASS); | |
| 2391 // TODO(brianwilkerson) We don't currently have any way to capture the | |
| 2392 // class that was parsed. | |
| 2393 _parseClassDeclaration(commentAndMetadata, null); | |
| 2394 return null; | |
| 2395 } else if (_matchesKeyword(Keyword.ABSTRACT) && | |
| 2396 _tokenMatchesKeyword(_peek(), Keyword.CLASS)) { | |
| 2397 _reportErrorForToken(ParserErrorCode.CLASS_IN_CLASS, _peek()); | |
| 2398 // TODO(brianwilkerson) We don't currently have any way to capture the | |
| 2399 // class that was parsed. | |
| 2400 _parseClassDeclaration(commentAndMetadata, getAndAdvance()); | |
| 2401 return null; | |
| 2402 } else if (_matchesKeyword(Keyword.ENUM)) { | |
| 2403 _reportErrorForToken(ParserErrorCode.ENUM_IN_CLASS, _peek()); | |
| 2404 // TODO(brianwilkerson) We don't currently have any way to capture the | |
| 2405 // enum that was parsed. | |
| 2406 _parseEnumDeclaration(commentAndMetadata); | |
| 2407 return null; | |
| 2408 } else if (_isOperator(_currentToken)) { | |
| 2409 // | |
| 2410 // We appear to have found an operator declaration without the | |
| 2411 // 'operator' keyword. | |
| 2412 // | |
| 2413 _validateModifiersForOperator(modifiers); | |
| 2414 return _parseOperator( | |
| 2415 commentAndMetadata, modifiers.externalKeyword, null); | |
| 2416 } | |
| 2417 Token keyword = modifiers.varKeyword; | |
| 2418 if (keyword == null) { | |
| 2419 keyword = modifiers.finalKeyword; | |
| 2420 } | |
| 2421 if (keyword == null) { | |
| 2422 keyword = modifiers.constKeyword; | |
| 2423 } | |
| 2424 if (keyword != null) { | |
| 2425 // | |
| 2426 // We appear to have found an incomplete field declaration. | |
| 2427 // | |
| 2428 _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | |
| 2429 List<VariableDeclaration> variables = new List<VariableDeclaration>(); | |
| 2430 variables.add( | |
| 2431 new VariableDeclaration(_createSyntheticIdentifier(), null, null)); | |
| 2432 return new FieldDeclaration(commentAndMetadata.comment, | |
| 2433 commentAndMetadata.metadata, null, | |
| 2434 new VariableDeclarationList(null, null, keyword, null, variables), | |
| 2435 _expectSemicolon()); | |
| 2436 } | |
| 2437 _reportErrorForToken( | |
| 2438 ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken); | |
| 2439 if (commentAndMetadata.comment != null || | |
| 2440 !commentAndMetadata.metadata.isEmpty) { | |
| 2441 // | |
| 2442 // We appear to have found an incomplete declaration at the end of the | |
| 2443 // class. At this point it consists of a metadata, which we don't want | |
| 2444 // to loose, so we'll treat it as a method declaration with a missing | |
| 2445 // name, parameters and empty body. | |
| 2446 // | |
| 2447 return new MethodDeclaration(commentAndMetadata.comment, | |
| 2448 commentAndMetadata.metadata, null, null, null, null, null, | |
| 2449 _createSyntheticIdentifier(), null, new FormalParameterList( | |
| 2450 null, new List<FormalParameter>(), null, null, null), | |
| 2451 new EmptyFunctionBody(_createSyntheticToken(TokenType.SEMICOLON))); | |
| 2452 } | |
| 2453 return null; | |
| 2454 } else if (_tokenMatches(_peek(), TokenType.PERIOD) && | |
| 2455 _tokenMatchesIdentifier(_peekAt(2)) && | |
| 2456 _tokenMatches(_peekAt(3), TokenType.OPEN_PAREN)) { | |
| 2457 return _parseConstructor(commentAndMetadata, modifiers.externalKeyword, | |
| 2458 _validateModifiersForConstructor(modifiers), modifiers.factoryKeyword, | |
| 2459 parseSimpleIdentifier(), getAndAdvance(), parseSimpleIdentifier(), | |
| 2460 parseFormalParameterList()); | |
| 2461 } else if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | |
| 2462 SimpleIdentifier methodName = parseSimpleIdentifier(); | |
| 2463 FormalParameterList parameters = parseFormalParameterList(); | |
| 2464 if (_matches(TokenType.COLON) || | |
| 2465 modifiers.factoryKeyword != null || | |
| 2466 methodName.name == className) { | |
| 2467 return _parseConstructor(commentAndMetadata, modifiers.externalKeyword, | |
| 2468 _validateModifiersForConstructor(modifiers), | |
| 2469 modifiers.factoryKeyword, methodName, null, null, parameters); | |
| 2470 } | |
| 2471 _validateModifiersForGetterOrSetterOrMethod(modifiers); | |
| 2472 _validateFormalParameterList(parameters); | |
| 2473 return _parseMethodDeclarationAfterParameters(commentAndMetadata, | |
| 2474 modifiers.externalKeyword, modifiers.staticKeyword, null, methodName, | |
| 2475 null, parameters); | |
| 2476 } else if (_peek() | |
| 2477 .matchesAny([TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) { | |
| 2478 if (modifiers.constKeyword == null && | |
| 2479 modifiers.finalKeyword == null && | |
| 2480 modifiers.varKeyword == null) { | |
| 2481 _reportErrorForCurrentToken( | |
| 2482 ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE); | |
| 2483 } | |
| 2484 return _parseInitializedIdentifierList(commentAndMetadata, | |
| 2485 modifiers.staticKeyword, _validateModifiersForField(modifiers), null); | |
| 2486 } else if (_matchesKeyword(Keyword.TYPEDEF)) { | |
| 2487 _reportErrorForCurrentToken(ParserErrorCode.TYPEDEF_IN_CLASS); | |
| 2488 // TODO(brianwilkerson) We don't currently have any way to capture the | |
| 2489 // function type alias that was parsed. | |
| 2490 _parseFunctionTypeAlias(commentAndMetadata, getAndAdvance()); | |
| 2491 return null; | |
| 2492 } else if (parseGenericMethods) { | |
| 2493 Token token = _skipTypeParameterList(_peek()); | |
| 2494 if (token != null && _tokenMatches(token, TokenType.OPEN_PAREN)) { | |
| 2495 return _parseMethodDeclarationAfterReturnType(commentAndMetadata, | |
| 2496 modifiers.externalKeyword, modifiers.staticKeyword, null); | |
| 2497 } | |
| 2498 } | |
| 2499 TypeName type = parseTypeName(); | |
| 2500 if (_matchesKeyword(Keyword.GET) && _tokenMatchesIdentifier(_peek())) { | |
| 2501 _validateModifiersForGetterOrSetterOrMethod(modifiers); | |
| 2502 return _parseGetter(commentAndMetadata, modifiers.externalKeyword, | |
| 2503 modifiers.staticKeyword, type); | |
| 2504 } else if (_matchesKeyword(Keyword.SET) && | |
| 2505 _tokenMatchesIdentifier(_peek())) { | |
| 2506 _validateModifiersForGetterOrSetterOrMethod(modifiers); | |
| 2507 return _parseSetter(commentAndMetadata, modifiers.externalKeyword, | |
| 2508 modifiers.staticKeyword, type); | |
| 2509 } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) { | |
| 2510 _validateModifiersForOperator(modifiers); | |
| 2511 return _parseOperator( | |
| 2512 commentAndMetadata, modifiers.externalKeyword, type); | |
| 2513 } else if (!_matchesIdentifier()) { | |
| 2514 if (_matches(TokenType.CLOSE_CURLY_BRACKET)) { | |
| 2515 // | |
| 2516 // We appear to have found an incomplete declaration at the end of the | |
| 2517 // class. At this point it consists of a type name, so we'll treat it as | |
| 2518 // a field declaration with a missing field name and semicolon. | |
| 2519 // | |
| 2520 return _parseInitializedIdentifierList(commentAndMetadata, | |
| 2521 modifiers.staticKeyword, _validateModifiersForField(modifiers), | |
| 2522 type); | |
| 2523 } | |
| 2524 if (_isOperator(_currentToken)) { | |
| 2525 // | |
| 2526 // We appear to have found an operator declaration without the | |
| 2527 // 'operator' keyword. | |
| 2528 // | |
| 2529 _validateModifiersForOperator(modifiers); | |
| 2530 return _parseOperator( | |
| 2531 commentAndMetadata, modifiers.externalKeyword, type); | |
| 2532 } | |
| 2533 // | |
| 2534 // We appear to have found an incomplete declaration before another | |
| 2535 // declaration. At this point it consists of a type name, so we'll treat | |
| 2536 // it as a field declaration with a missing field name and semicolon. | |
| 2537 // | |
| 2538 _reportErrorForToken( | |
| 2539 ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken); | |
| 2540 try { | |
| 2541 _lockErrorListener(); | |
| 2542 return _parseInitializedIdentifierList(commentAndMetadata, | |
| 2543 modifiers.staticKeyword, _validateModifiersForField(modifiers), | |
| 2544 type); | |
| 2545 } finally { | |
| 2546 _unlockErrorListener(); | |
| 2547 } | |
| 2548 } else if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | |
| 2549 SimpleIdentifier methodName = parseSimpleIdentifier(); | |
| 2550 FormalParameterList parameters = parseFormalParameterList(); | |
| 2551 if (methodName.name == className) { | |
| 2552 _reportErrorForNode(ParserErrorCode.CONSTRUCTOR_WITH_RETURN_TYPE, type); | |
| 2553 return _parseConstructor(commentAndMetadata, modifiers.externalKeyword, | |
| 2554 _validateModifiersForConstructor(modifiers), | |
| 2555 modifiers.factoryKeyword, methodName, null, null, parameters); | |
| 2556 } | |
| 2557 _validateModifiersForGetterOrSetterOrMethod(modifiers); | |
| 2558 _validateFormalParameterList(parameters); | |
| 2559 return _parseMethodDeclarationAfterParameters(commentAndMetadata, | |
| 2560 modifiers.externalKeyword, modifiers.staticKeyword, type, methodName, | |
| 2561 null, parameters); | |
| 2562 } else if (parseGenericMethods && _tokenMatches(_peek(), TokenType.LT)) { | |
| 2563 return _parseMethodDeclarationAfterReturnType(commentAndMetadata, | |
| 2564 modifiers.externalKeyword, modifiers.staticKeyword, type); | |
| 2565 } else if (_tokenMatches(_peek(), TokenType.OPEN_CURLY_BRACKET)) { | |
| 2566 // We have found "TypeName identifier {", and are guessing that this is a | |
| 2567 // getter without the keyword 'get'. | |
| 2568 _validateModifiersForGetterOrSetterOrMethod(modifiers); | |
| 2569 _reportErrorForCurrentToken(ParserErrorCode.MISSING_GET); | |
| 2570 _currentToken = _injectToken( | |
| 2571 new Parser_SyntheticKeywordToken(Keyword.GET, _currentToken.offset)); | |
| 2572 return _parseGetter(commentAndMetadata, modifiers.externalKeyword, | |
| 2573 modifiers.staticKeyword, type); | |
| 2574 } | |
| 2575 return _parseInitializedIdentifierList(commentAndMetadata, | |
| 2576 modifiers.staticKeyword, _validateModifiersForField(modifiers), type); | |
| 2577 } | |
| 2578 | |
| 2579 /** | |
| 2580 * Parse a single combinator. Return the combinator that was parsed, or `null` | |
| 2581 * if no combinator is found. | |
| 2582 * | |
| 2583 * combinator ::= | |
| 2584 * 'show' identifier (',' identifier)* | |
| 2585 * | 'hide' identifier (',' identifier)* | |
| 2586 */ | |
| 2587 Combinator parseCombinator() { | |
| 2588 if (_matchesString(_SHOW) || _matchesString(_HIDE)) { | |
| 2589 Token keyword = getAndAdvance(); | |
| 2590 List<SimpleIdentifier> names = _parseIdentifierList(); | |
| 2591 if (keyword.lexeme == _SHOW) { | |
| 2592 return new ShowCombinator(keyword, names); | |
| 2593 } else { | |
| 2594 return new HideCombinator(keyword, names); | |
| 2595 } | |
| 2596 } | |
| 2597 return null; | |
| 2598 } | |
| 2599 | |
| 2600 /** | |
| 2601 * Parse a compilation unit, starting with the given [token]. Return the | |
| 2602 * compilation unit that was parsed. | |
| 2603 */ | |
| 2604 CompilationUnit parseCompilationUnit(Token token) { | |
| 2605 _currentToken = token; | |
| 2606 return parseCompilationUnit2(); | |
| 2607 } | |
| 2608 | |
| 2609 /** | |
| 2610 * Parse a compilation unit. Return the compilation unit that was parsed. | |
| 2611 * | |
| 2612 * Specified: | |
| 2613 * | |
| 2614 * compilationUnit ::= | |
| 2615 * scriptTag? directive* topLevelDeclaration* | |
| 2616 * | |
| 2617 * Actual: | |
| 2618 * | |
| 2619 * compilationUnit ::= | |
| 2620 * scriptTag? topLevelElement* | |
| 2621 * | |
| 2622 * topLevelElement ::= | |
| 2623 * directive | |
| 2624 * | topLevelDeclaration | |
| 2625 */ | |
| 2626 CompilationUnit parseCompilationUnit2() { | |
| 2627 Token firstToken = _currentToken; | |
| 2628 ScriptTag scriptTag = null; | |
| 2629 if (_matches(TokenType.SCRIPT_TAG)) { | |
| 2630 scriptTag = new ScriptTag(getAndAdvance()); | |
| 2631 } | |
| 2632 // | |
| 2633 // Even though all directives must appear before declarations and must occur | |
| 2634 // in a given order, we allow directives and declarations to occur in any | |
| 2635 // order so that we can recover better. | |
| 2636 // | |
| 2637 bool libraryDirectiveFound = false; | |
| 2638 bool partOfDirectiveFound = false; | |
| 2639 bool partDirectiveFound = false; | |
| 2640 bool directiveFoundAfterDeclaration = false; | |
| 2641 List<Directive> directives = new List<Directive>(); | |
| 2642 List<CompilationUnitMember> declarations = | |
| 2643 new List<CompilationUnitMember>(); | |
| 2644 Token memberStart = _currentToken; | |
| 2645 while (!_matches(TokenType.EOF)) { | |
| 2646 CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata(); | |
| 2647 if ((_matchesKeyword(Keyword.IMPORT) || | |
| 2648 _matchesKeyword(Keyword.EXPORT) || | |
| 2649 _matchesKeyword(Keyword.LIBRARY) || | |
| 2650 _matchesKeyword(Keyword.PART)) && | |
| 2651 !_tokenMatches(_peek(), TokenType.PERIOD) && | |
| 2652 !_tokenMatches(_peek(), TokenType.LT) && | |
| 2653 !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | |
| 2654 Directive directive = _parseDirective(commentAndMetadata); | |
| 2655 if (declarations.length > 0 && !directiveFoundAfterDeclaration) { | |
| 2656 _reportErrorForToken(ParserErrorCode.DIRECTIVE_AFTER_DECLARATION, | |
| 2657 directive.beginToken); | |
| 2658 directiveFoundAfterDeclaration = true; | |
| 2659 } | |
| 2660 if (directive is LibraryDirective) { | |
| 2661 if (libraryDirectiveFound) { | |
| 2662 _reportErrorForCurrentToken( | |
| 2663 ParserErrorCode.MULTIPLE_LIBRARY_DIRECTIVES); | |
| 2664 } else { | |
| 2665 if (directives.length > 0) { | |
| 2666 _reportErrorForToken(ParserErrorCode.LIBRARY_DIRECTIVE_NOT_FIRST, | |
| 2667 directive.libraryKeyword); | |
| 2668 } | |
| 2669 libraryDirectiveFound = true; | |
| 2670 } | |
| 2671 } else if (directive is PartDirective) { | |
| 2672 partDirectiveFound = true; | |
| 2673 } else if (partDirectiveFound) { | |
| 2674 if (directive is ExportDirective) { | |
| 2675 _reportErrorForToken( | |
| 2676 ParserErrorCode.EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE, | |
| 2677 directive.keyword); | |
| 2678 } else if (directive is ImportDirective) { | |
| 2679 _reportErrorForToken( | |
| 2680 ParserErrorCode.IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE, | |
| 2681 directive.keyword); | |
| 2682 } | |
| 2683 } | |
| 2684 if (directive is PartOfDirective) { | |
| 2685 if (partOfDirectiveFound) { | |
| 2686 _reportErrorForCurrentToken( | |
| 2687 ParserErrorCode.MULTIPLE_PART_OF_DIRECTIVES); | |
| 2688 } else { | |
| 2689 int directiveCount = directives.length; | |
| 2690 for (int i = 0; i < directiveCount; i++) { | |
| 2691 _reportErrorForToken( | |
| 2692 ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART, | |
| 2693 directives[i].keyword); | |
| 2694 } | |
| 2695 partOfDirectiveFound = true; | |
| 2696 } | |
| 2697 } else { | |
| 2698 if (partOfDirectiveFound) { | |
| 2699 _reportErrorForToken(ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART, | |
| 2700 directive.keyword); | |
| 2701 } | |
| 2702 } | |
| 2703 directives.add(directive); | |
| 2704 } else if (_matches(TokenType.SEMICOLON)) { | |
| 2705 _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, | |
| 2706 [_currentToken.lexeme]); | |
| 2707 _advance(); | |
| 2708 } else { | |
| 2709 CompilationUnitMember member = | |
| 2710 _parseCompilationUnitMember(commentAndMetadata); | |
| 2711 if (member != null) { | |
| 2712 declarations.add(member); | |
| 2713 } | |
| 2714 } | |
| 2715 if (identical(_currentToken, memberStart)) { | |
| 2716 _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, | |
| 2717 [_currentToken.lexeme]); | |
| 2718 _advance(); | |
| 2719 while (!_matches(TokenType.EOF) && | |
| 2720 !_couldBeStartOfCompilationUnitMember()) { | |
| 2721 _advance(); | |
| 2722 } | |
| 2723 } | |
| 2724 memberStart = _currentToken; | |
| 2725 } | |
| 2726 return new CompilationUnit( | |
| 2727 firstToken, scriptTag, directives, declarations, _currentToken); | |
| 2728 } | |
| 2729 | |
| 2730 /** | |
| 2731 * Parse a conditional expression. Return the conditional expression that was | |
| 2732 * parsed. | |
| 2733 * | |
| 2734 * conditionalExpression ::= | |
| 2735 * ifNullExpression ('?' expressionWithoutCascade ':' expressionWithou
tCascade)? | |
| 2736 */ | |
| 2737 Expression parseConditionalExpression() { | |
| 2738 Expression condition = parseIfNullExpression(); | |
| 2739 if (!_matches(TokenType.QUESTION)) { | |
| 2740 return condition; | |
| 2741 } | |
| 2742 Token question = getAndAdvance(); | |
| 2743 Expression thenExpression = parseExpressionWithoutCascade(); | |
| 2744 Token colon = _expect(TokenType.COLON); | |
| 2745 Expression elseExpression = parseExpressionWithoutCascade(); | |
| 2746 return new ConditionalExpression( | |
| 2747 condition, question, thenExpression, colon, elseExpression); | |
| 2748 } | |
| 2749 | |
| 2750 /** | |
| 2751 * Parse the name of a constructor. Return the constructor name that was | |
| 2752 * parsed. | |
| 2753 * | |
| 2754 * constructorName: | |
| 2755 * type ('.' identifier)? | |
| 2756 */ | |
| 2757 ConstructorName parseConstructorName() { | |
| 2758 TypeName type = parseTypeName(); | |
| 2759 Token period = null; | |
| 2760 SimpleIdentifier name = null; | |
| 2761 if (_matches(TokenType.PERIOD)) { | |
| 2762 period = getAndAdvance(); | |
| 2763 name = parseSimpleIdentifier(); | |
| 2764 } | |
| 2765 return new ConstructorName(type, period, name); | |
| 2766 } | |
| 2767 | |
| 2768 /** | |
| 2769 * Parse the script tag and directives in a compilation unit, starting with | |
| 2770 * the given [token], until the first non-directive is encountered. The | |
| 2771 * remainder of the compilation unit will not be parsed. Specifically, if | |
| 2772 * there are directives later in the file, they will not be parsed. Return the | |
| 2773 * compilation unit that was parsed. | |
| 2774 */ | |
| 2775 CompilationUnit parseDirectives(Token token) { | |
| 2776 _currentToken = token; | |
| 2777 return _parseDirectives(); | |
| 2778 } | |
| 2779 | |
| 2780 /** | |
| 2781 * Parse an expression, starting with the given [token]. Return the expression | |
| 2782 * that was parsed, or `null` if the tokens do not represent a recognizable | |
| 2783 * expression. | |
| 2784 */ | |
| 2785 Expression parseExpression(Token token) { | |
| 2786 _currentToken = token; | |
| 2787 return parseExpression2(); | |
| 2788 } | |
| 2789 | |
| 2790 /** | |
| 2791 * Parse an expression that might contain a cascade. Return the expression | |
| 2792 * that was parsed. | |
| 2793 * | |
| 2794 * expression ::= | |
| 2795 * assignableExpression assignmentOperator expression | |
| 2796 * | conditionalExpression cascadeSection* | |
| 2797 * | throwExpression | |
| 2798 */ | |
| 2799 Expression parseExpression2() { | |
| 2800 if (_matchesKeyword(Keyword.THROW)) { | |
| 2801 return _parseThrowExpression(); | |
| 2802 } else if (_matchesKeyword(Keyword.RETHROW)) { | |
| 2803 // TODO(brianwilkerson) Rethrow is a statement again. | |
| 2804 return _parseRethrowExpression(); | |
| 2805 } | |
| 2806 // | |
| 2807 // assignableExpression is a subset of conditionalExpression, so we can | |
| 2808 // parse a conditional expression and then determine whether it is followed | |
| 2809 // by an assignmentOperator, checking for conformance to the restricted | |
| 2810 // grammar after making that determination. | |
| 2811 // | |
| 2812 Expression expression = parseConditionalExpression(); | |
| 2813 TokenType tokenType = _currentToken.type; | |
| 2814 if (tokenType == TokenType.PERIOD_PERIOD) { | |
| 2815 List<Expression> cascadeSections = new List<Expression>(); | |
| 2816 while (tokenType == TokenType.PERIOD_PERIOD) { | |
| 2817 Expression section = _parseCascadeSection(); | |
| 2818 if (section != null) { | |
| 2819 cascadeSections.add(section); | |
| 2820 } | |
| 2821 tokenType = _currentToken.type; | |
| 2822 } | |
| 2823 return new CascadeExpression(expression, cascadeSections); | |
| 2824 } else if (tokenType.isAssignmentOperator) { | |
| 2825 Token operator = getAndAdvance(); | |
| 2826 _ensureAssignable(expression); | |
| 2827 return new AssignmentExpression(expression, operator, parseExpression2()); | |
| 2828 } | |
| 2829 return expression; | |
| 2830 } | |
| 2831 | |
| 2832 /** | |
| 2833 * Parse an expression that does not contain any cascades. Return the | |
| 2834 * expression that was parsed. | |
| 2835 * | |
| 2836 * expressionWithoutCascade ::= | |
| 2837 * assignableExpression assignmentOperator expressionWithoutCascade | |
| 2838 * | conditionalExpression | |
| 2839 * | throwExpressionWithoutCascade | |
| 2840 */ | |
| 2841 Expression parseExpressionWithoutCascade() { | |
| 2842 if (_matchesKeyword(Keyword.THROW)) { | |
| 2843 return _parseThrowExpressionWithoutCascade(); | |
| 2844 } else if (_matchesKeyword(Keyword.RETHROW)) { | |
| 2845 return _parseRethrowExpression(); | |
| 2846 } | |
| 2847 // | |
| 2848 // assignableExpression is a subset of conditionalExpression, so we can | |
| 2849 // parse a conditional expression and then determine whether it is followed | |
| 2850 // by an assignmentOperator, checking for conformance to the restricted | |
| 2851 // grammar after making that determination. | |
| 2852 // | |
| 2853 Expression expression = parseConditionalExpression(); | |
| 2854 if (_currentToken.type.isAssignmentOperator) { | |
| 2855 Token operator = getAndAdvance(); | |
| 2856 _ensureAssignable(expression); | |
| 2857 expression = new AssignmentExpression( | |
| 2858 expression, operator, parseExpressionWithoutCascade()); | |
| 2859 } | |
| 2860 return expression; | |
| 2861 } | |
| 2862 | |
| 2863 /** | |
| 2864 * Parse a class extends clause. Return the class extends clause that was | |
| 2865 * parsed. | |
| 2866 * | |
| 2867 * classExtendsClause ::= | |
| 2868 * 'extends' type | |
| 2869 */ | |
| 2870 ExtendsClause parseExtendsClause() { | |
| 2871 Token keyword = _expectKeyword(Keyword.EXTENDS); | |
| 2872 TypeName superclass = parseTypeName(); | |
| 2873 return new ExtendsClause(keyword, superclass); | |
| 2874 } | |
| 2875 | |
| 2876 /** | |
| 2877 * Parse a list of formal parameters. Return the formal parameters that were | |
| 2878 * parsed. | |
| 2879 * | |
| 2880 * formalParameterList ::= | |
| 2881 * '(' ')' | |
| 2882 * | '(' normalFormalParameters (',' optionalFormalParameters)? ')' | |
| 2883 * | '(' optionalFormalParameters ')' | |
| 2884 * | |
| 2885 * normalFormalParameters ::= | |
| 2886 * normalFormalParameter (',' normalFormalParameter)* | |
| 2887 * | |
| 2888 * optionalFormalParameters ::= | |
| 2889 * optionalPositionalFormalParameters | |
| 2890 * | namedFormalParameters | |
| 2891 * | |
| 2892 * optionalPositionalFormalParameters ::= | |
| 2893 * '[' defaultFormalParameter (',' defaultFormalParameter)* ']' | |
| 2894 * | |
| 2895 * namedFormalParameters ::= | |
| 2896 * '{' defaultNamedParameter (',' defaultNamedParameter)* '}' | |
| 2897 */ | |
| 2898 FormalParameterList parseFormalParameterList() { | |
| 2899 Token leftParenthesis = _expect(TokenType.OPEN_PAREN); | |
| 2900 if (_matches(TokenType.CLOSE_PAREN)) { | |
| 2901 return new FormalParameterList( | |
| 2902 leftParenthesis, null, null, null, getAndAdvance()); | |
| 2903 } | |
| 2904 // | |
| 2905 // Even though it is invalid to have default parameters outside of brackets, | |
| 2906 // required parameters inside of brackets, or multiple groups of default and | |
| 2907 // named parameters, we allow all of these cases so that we can recover | |
| 2908 // better. | |
| 2909 // | |
| 2910 List<FormalParameter> parameters = new List<FormalParameter>(); | |
| 2911 List<FormalParameter> normalParameters = new List<FormalParameter>(); | |
| 2912 List<FormalParameter> positionalParameters = new List<FormalParameter>(); | |
| 2913 List<FormalParameter> namedParameters = new List<FormalParameter>(); | |
| 2914 List<FormalParameter> currentParameters = normalParameters; | |
| 2915 Token leftSquareBracket = null; | |
| 2916 Token rightSquareBracket = null; | |
| 2917 Token leftCurlyBracket = null; | |
| 2918 Token rightCurlyBracket = null; | |
| 2919 ParameterKind kind = ParameterKind.REQUIRED; | |
| 2920 bool firstParameter = true; | |
| 2921 bool reportedMuliplePositionalGroups = false; | |
| 2922 bool reportedMulipleNamedGroups = false; | |
| 2923 bool reportedMixedGroups = false; | |
| 2924 bool wasOptionalParameter = false; | |
| 2925 Token initialToken = null; | |
| 2926 do { | |
| 2927 if (firstParameter) { | |
| 2928 firstParameter = false; | |
| 2929 } else if (!_optional(TokenType.COMMA)) { | |
| 2930 // TODO(brianwilkerson) The token is wrong, we need to recover from this | |
| 2931 // case. | |
| 2932 if (_getEndToken(leftParenthesis) != null) { | |
| 2933 _reportErrorForCurrentToken( | |
| 2934 ParserErrorCode.EXPECTED_TOKEN, [TokenType.COMMA.lexeme]); | |
| 2935 } else { | |
| 2936 _reportErrorForToken(ParserErrorCode.MISSING_CLOSING_PARENTHESIS, | |
| 2937 _currentToken.previous); | |
| 2938 break; | |
| 2939 } | |
| 2940 } | |
| 2941 initialToken = _currentToken; | |
| 2942 // | |
| 2943 // Handle the beginning of parameter groups. | |
| 2944 // | |
| 2945 if (_matches(TokenType.OPEN_SQUARE_BRACKET)) { | |
| 2946 wasOptionalParameter = true; | |
| 2947 if (leftSquareBracket != null && !reportedMuliplePositionalGroups) { | |
| 2948 _reportErrorForCurrentToken( | |
| 2949 ParserErrorCode.MULTIPLE_POSITIONAL_PARAMETER_GROUPS); | |
| 2950 reportedMuliplePositionalGroups = true; | |
| 2951 } | |
| 2952 if (leftCurlyBracket != null && !reportedMixedGroups) { | |
| 2953 _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS); | |
| 2954 reportedMixedGroups = true; | |
| 2955 } | |
| 2956 leftSquareBracket = getAndAdvance(); | |
| 2957 currentParameters = positionalParameters; | |
| 2958 kind = ParameterKind.POSITIONAL; | |
| 2959 } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | |
| 2960 wasOptionalParameter = true; | |
| 2961 if (leftCurlyBracket != null && !reportedMulipleNamedGroups) { | |
| 2962 _reportErrorForCurrentToken( | |
| 2963 ParserErrorCode.MULTIPLE_NAMED_PARAMETER_GROUPS); | |
| 2964 reportedMulipleNamedGroups = true; | |
| 2965 } | |
| 2966 if (leftSquareBracket != null && !reportedMixedGroups) { | |
| 2967 _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS); | |
| 2968 reportedMixedGroups = true; | |
| 2969 } | |
| 2970 leftCurlyBracket = getAndAdvance(); | |
| 2971 currentParameters = namedParameters; | |
| 2972 kind = ParameterKind.NAMED; | |
| 2973 } | |
| 2974 // | |
| 2975 // Parse and record the parameter. | |
| 2976 // | |
| 2977 FormalParameter parameter = _parseFormalParameter(kind); | |
| 2978 parameters.add(parameter); | |
| 2979 currentParameters.add(parameter); | |
| 2980 if (kind == ParameterKind.REQUIRED && wasOptionalParameter) { | |
| 2981 _reportErrorForNode( | |
| 2982 ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS, parameter); | |
| 2983 } | |
| 2984 // | |
| 2985 // Handle the end of parameter groups. | |
| 2986 // | |
| 2987 // TODO(brianwilkerson) Improve the detection and reporting of missing and | |
| 2988 // mismatched delimiters. | |
| 2989 if (_matches(TokenType.CLOSE_SQUARE_BRACKET)) { | |
| 2990 rightSquareBracket = getAndAdvance(); | |
| 2991 currentParameters = normalParameters; | |
| 2992 if (leftSquareBracket == null) { | |
| 2993 if (leftCurlyBracket != null) { | |
| 2994 _reportErrorForCurrentToken( | |
| 2995 ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]); | |
| 2996 rightCurlyBracket = rightSquareBracket; | |
| 2997 rightSquareBracket = null; | |
| 2998 } else { | |
| 2999 _reportErrorForCurrentToken( | |
| 3000 ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP, | |
| 3001 ["["]); | |
| 3002 } | |
| 3003 } | |
| 3004 kind = ParameterKind.REQUIRED; | |
| 3005 } else if (_matches(TokenType.CLOSE_CURLY_BRACKET)) { | |
| 3006 rightCurlyBracket = getAndAdvance(); | |
| 3007 currentParameters = normalParameters; | |
| 3008 if (leftCurlyBracket == null) { | |
| 3009 if (leftSquareBracket != null) { | |
| 3010 _reportErrorForCurrentToken( | |
| 3011 ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]); | |
| 3012 rightSquareBracket = rightCurlyBracket; | |
| 3013 rightCurlyBracket = null; | |
| 3014 } else { | |
| 3015 _reportErrorForCurrentToken( | |
| 3016 ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP, | |
| 3017 ["{"]); | |
| 3018 } | |
| 3019 } | |
| 3020 kind = ParameterKind.REQUIRED; | |
| 3021 } | |
| 3022 } while (!_matches(TokenType.CLOSE_PAREN) && | |
| 3023 !identical(initialToken, _currentToken)); | |
| 3024 Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | |
| 3025 // | |
| 3026 // Check that the groups were closed correctly. | |
| 3027 // | |
| 3028 if (leftSquareBracket != null && rightSquareBracket == null) { | |
| 3029 _reportErrorForCurrentToken( | |
| 3030 ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]); | |
| 3031 } | |
| 3032 if (leftCurlyBracket != null && rightCurlyBracket == null) { | |
| 3033 _reportErrorForCurrentToken( | |
| 3034 ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]); | |
| 3035 } | |
| 3036 // | |
| 3037 // Build the parameter list. | |
| 3038 // | |
| 3039 if (leftSquareBracket == null) { | |
| 3040 leftSquareBracket = leftCurlyBracket; | |
| 3041 } | |
| 3042 if (rightSquareBracket == null) { | |
| 3043 rightSquareBracket = rightCurlyBracket; | |
| 3044 } | |
| 3045 return new FormalParameterList(leftParenthesis, parameters, | |
| 3046 leftSquareBracket, rightSquareBracket, rightParenthesis); | |
| 3047 } | |
| 3048 | |
| 3049 /** | |
| 3050 * Parse a function expression. Return the function expression that was | |
| 3051 * parsed. | |
| 3052 * | |
| 3053 * functionExpression ::= | |
| 3054 * typeParameters? formalParameterList functionExpressionBody | |
| 3055 */ | |
| 3056 FunctionExpression parseFunctionExpression() { | |
| 3057 TypeParameterList typeParameters = null; | |
| 3058 if (parseGenericMethods && _matches(TokenType.LT)) { | |
| 3059 typeParameters = parseTypeParameterList(); | |
| 3060 } | |
| 3061 FormalParameterList parameters = parseFormalParameterList(); | |
| 3062 _validateFormalParameterList(parameters); | |
| 3063 FunctionBody body = | |
| 3064 _parseFunctionBody(false, ParserErrorCode.MISSING_FUNCTION_BODY, true); | |
| 3065 return new FunctionExpression(typeParameters, parameters, body); | |
| 3066 } | |
| 3067 | |
| 3068 /** | |
| 3069 * Parse an if-null expression. Return the if-null expression that was | |
| 3070 * parsed. | |
| 3071 * | |
| 3072 * ifNullExpression ::= logicalOrExpression ('??' logicalOrExpression)* | |
| 3073 */ | |
| 3074 Expression parseIfNullExpression() { | |
| 3075 Expression expression = parseLogicalOrExpression(); | |
| 3076 while (_matches(TokenType.QUESTION_QUESTION)) { | |
| 3077 Token operator = getAndAdvance(); | |
| 3078 expression = new BinaryExpression( | |
| 3079 expression, operator, parseLogicalOrExpression()); | |
| 3080 } | |
| 3081 return expression; | |
| 3082 } | |
| 3083 | |
| 3084 /** | |
| 3085 * Parse an implements clause. Return the implements clause that was parsed. | |
| 3086 * | |
| 3087 * implementsClause ::= | |
| 3088 * 'implements' type (',' type)* | |
| 3089 */ | |
| 3090 ImplementsClause parseImplementsClause() { | |
| 3091 Token keyword = _expectKeyword(Keyword.IMPLEMENTS); | |
| 3092 List<TypeName> interfaces = new List<TypeName>(); | |
| 3093 interfaces.add(parseTypeName()); | |
| 3094 while (_optional(TokenType.COMMA)) { | |
| 3095 interfaces.add(parseTypeName()); | |
| 3096 } | |
| 3097 return new ImplementsClause(keyword, interfaces); | |
| 3098 } | |
| 3099 | |
| 3100 /** | |
| 3101 * Parse a label. Return the label that was parsed. | |
| 3102 * | |
| 3103 * label ::= | |
| 3104 * identifier ':' | |
| 3105 */ | |
| 3106 Label parseLabel() { | |
| 3107 SimpleIdentifier label = parseSimpleIdentifier(); | |
| 3108 Token colon = _expect(TokenType.COLON); | |
| 3109 return new Label(label, colon); | |
| 3110 } | |
| 3111 | |
| 3112 /** | |
| 3113 * Parse a library identifier. Return the library identifier that was parsed. | |
| 3114 * | |
| 3115 * libraryIdentifier ::= | |
| 3116 * identifier ('.' identifier)* | |
| 3117 */ | |
| 3118 LibraryIdentifier parseLibraryIdentifier() { | |
| 3119 List<SimpleIdentifier> components = new List<SimpleIdentifier>(); | |
| 3120 components.add(parseSimpleIdentifier()); | |
| 3121 while (_matches(TokenType.PERIOD)) { | |
| 3122 _advance(); | |
| 3123 components.add(parseSimpleIdentifier()); | |
| 3124 } | |
| 3125 return new LibraryIdentifier(components); | |
| 3126 } | |
| 3127 | |
| 3128 /** | |
| 3129 * Parse a logical or expression. Return the logical or expression that was | |
| 3130 * parsed. | |
| 3131 * | |
| 3132 * logicalOrExpression ::= | |
| 3133 * logicalAndExpression ('||' logicalAndExpression)* | |
| 3134 */ | |
| 3135 Expression parseLogicalOrExpression() { | |
| 3136 Expression expression = _parseLogicalAndExpression(); | |
| 3137 while (_matches(TokenType.BAR_BAR)) { | |
| 3138 Token operator = getAndAdvance(); | |
| 3139 expression = new BinaryExpression( | |
| 3140 expression, operator, _parseLogicalAndExpression()); | |
| 3141 } | |
| 3142 return expression; | |
| 3143 } | |
| 3144 | |
| 3145 /** | |
| 3146 * Parse a map literal entry. Return the map literal entry that was parsed. | |
| 3147 * | |
| 3148 * mapLiteralEntry ::= | |
| 3149 * expression ':' expression | |
| 3150 */ | |
| 3151 MapLiteralEntry parseMapLiteralEntry() { | |
| 3152 Expression key = parseExpression2(); | |
| 3153 Token separator = _expect(TokenType.COLON); | |
| 3154 Expression value = parseExpression2(); | |
| 3155 return new MapLiteralEntry(key, separator, value); | |
| 3156 } | |
| 3157 | |
| 3158 /** | |
| 3159 * Parse a normal formal parameter. Return the normal formal parameter that | |
| 3160 * was parsed. | |
| 3161 * | |
| 3162 * normalFormalParameter ::= | |
| 3163 * functionSignature | |
| 3164 * | fieldFormalParameter | |
| 3165 * | simpleFormalParameter | |
| 3166 * | |
| 3167 * functionSignature: | |
| 3168 * metadata returnType? identifier typeParameters? formalParameterList | |
| 3169 * | |
| 3170 * fieldFormalParameter ::= | |
| 3171 * metadata finalConstVarOrType? 'this' '.' identifier | |
| 3172 * | |
| 3173 * simpleFormalParameter ::= | |
| 3174 * declaredIdentifier | |
| 3175 * | metadata identifier | |
| 3176 */ | |
| 3177 NormalFormalParameter parseNormalFormalParameter() { | |
| 3178 CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata(); | |
| 3179 FinalConstVarOrType holder = _parseFinalConstVarOrType(true); | |
| 3180 Token thisKeyword = null; | |
| 3181 Token period = null; | |
| 3182 if (_matchesKeyword(Keyword.THIS)) { | |
| 3183 thisKeyword = getAndAdvance(); | |
| 3184 period = _expect(TokenType.PERIOD); | |
| 3185 } | |
| 3186 SimpleIdentifier identifier = parseSimpleIdentifier(); | |
| 3187 TypeParameterList typeParameters = null; | |
| 3188 if (parseGenericMethods && _matches(TokenType.LT)) { | |
| 3189 typeParameters = parseTypeParameterList(); | |
| 3190 } | |
| 3191 if (_matches(TokenType.OPEN_PAREN)) { | |
| 3192 FormalParameterList parameters = parseFormalParameterList(); | |
| 3193 if (thisKeyword == null) { | |
| 3194 if (holder.keyword != null) { | |
| 3195 _reportErrorForToken( | |
| 3196 ParserErrorCode.FUNCTION_TYPED_PARAMETER_VAR, holder.keyword); | |
| 3197 } | |
| 3198 return new FunctionTypedFormalParameter(commentAndMetadata.comment, | |
| 3199 commentAndMetadata.metadata, holder.type, identifier, | |
| 3200 typeParameters, parameters); | |
| 3201 } else { | |
| 3202 return new FieldFormalParameter(commentAndMetadata.comment, | |
| 3203 commentAndMetadata.metadata, holder.keyword, holder.type, | |
| 3204 thisKeyword, period, identifier, typeParameters, parameters); | |
| 3205 } | |
| 3206 } else if (typeParameters != null) { | |
| 3207 // TODO(brianwilkerson) Report an error. It looks like a function-typed | |
| 3208 // parameter with no parameter list. | |
| 3209 //_reportErrorForToken(ParserErrorCode.MISSING_PARAMETERS, typeParameters.
endToken); | |
| 3210 } | |
| 3211 TypeName type = holder.type; | |
| 3212 if (type != null) { | |
| 3213 if (_tokenMatchesKeyword(type.name.beginToken, Keyword.VOID)) { | |
| 3214 _reportErrorForToken( | |
| 3215 ParserErrorCode.VOID_PARAMETER, type.name.beginToken); | |
| 3216 } else if (holder.keyword != null && | |
| 3217 _tokenMatchesKeyword(holder.keyword, Keyword.VAR)) { | |
| 3218 _reportErrorForToken(ParserErrorCode.VAR_AND_TYPE, holder.keyword); | |
| 3219 } | |
| 3220 } | |
| 3221 if (thisKeyword != null) { | |
| 3222 // TODO(brianwilkerson) If there are type parameters but no parameters, | |
| 3223 // should we create a synthetic empty parameter list here so we can | |
| 3224 // capture the type parameters? | |
| 3225 return new FieldFormalParameter(commentAndMetadata.comment, | |
| 3226 commentAndMetadata.metadata, holder.keyword, holder.type, thisKeyword, | |
| 3227 period, identifier, null, null); | |
| 3228 } | |
| 3229 return new SimpleFormalParameter(commentAndMetadata.comment, | |
| 3230 commentAndMetadata.metadata, holder.keyword, holder.type, identifier); | |
| 3231 } | |
| 3232 | |
| 3233 /** | |
| 3234 * Parse a prefixed identifier. Return the prefixed identifier that was | |
| 3235 * parsed. | |
| 3236 * | |
| 3237 * prefixedIdentifier ::= | |
| 3238 * identifier ('.' identifier)? | |
| 3239 */ | |
| 3240 Identifier parsePrefixedIdentifier() { | |
| 3241 SimpleIdentifier qualifier = parseSimpleIdentifier(); | |
| 3242 if (!_matches(TokenType.PERIOD)) { | |
| 3243 return qualifier; | |
| 3244 } | |
| 3245 Token period = getAndAdvance(); | |
| 3246 SimpleIdentifier qualified = parseSimpleIdentifier(); | |
| 3247 return new PrefixedIdentifier(qualifier, period, qualified); | |
| 3248 } | |
| 3249 | |
| 3250 /** | |
| 3251 * Parse a return type. Return the return type that was parsed. | |
| 3252 * | |
| 3253 * returnType ::= | |
| 3254 * 'void' | |
| 3255 * | type | |
| 3256 */ | |
| 3257 TypeName parseReturnType() { | |
| 3258 if (_matchesKeyword(Keyword.VOID)) { | |
| 3259 return new TypeName(new SimpleIdentifier(getAndAdvance()), null); | |
| 3260 } else { | |
| 3261 return parseTypeName(); | |
| 3262 } | |
| 3263 } | |
| 3264 | |
| 3265 /** | |
| 3266 * Parse a simple identifier. Return the simple identifier that was parsed. | |
| 3267 * | |
| 3268 * identifier ::= | |
| 3269 * IDENTIFIER | |
| 3270 */ | |
| 3271 SimpleIdentifier parseSimpleIdentifier() { | |
| 3272 if (_matchesIdentifier()) { | |
| 3273 String lexeme = _currentToken.lexeme; | |
| 3274 if ((_inAsync || _inGenerator) && | |
| 3275 (lexeme == 'async' || lexeme == 'await' || lexeme == 'yield')) { | |
| 3276 _reportErrorForCurrentToken( | |
| 3277 ParserErrorCode.ASYNC_KEYWORD_USED_AS_IDENTIFIER); | |
| 3278 } | |
| 3279 return new SimpleIdentifier(getAndAdvance()); | |
| 3280 } | |
| 3281 _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | |
| 3282 return _createSyntheticIdentifier(); | |
| 3283 } | |
| 3284 | |
| 3285 /** | |
| 3286 * Parse a statement, starting with the given [token]. Return the statement | |
| 3287 * that was parsed, or `null` if the tokens do not represent a recognizable | |
| 3288 * statement. | |
| 3289 */ | |
| 3290 Statement parseStatement(Token token) { | |
| 3291 _currentToken = token; | |
| 3292 return parseStatement2(); | |
| 3293 } | |
| 3294 | |
| 3295 /** | |
| 3296 * Parse a statement. Return the statement that was parsed. | |
| 3297 * | |
| 3298 * statement ::= | |
| 3299 * label* nonLabeledStatement | |
| 3300 */ | |
| 3301 Statement parseStatement2() { | |
| 3302 List<Label> labels = new List<Label>(); | |
| 3303 while (_matchesIdentifier() && _tokenMatches(_peek(), TokenType.COLON)) { | |
| 3304 labels.add(parseLabel()); | |
| 3305 } | |
| 3306 Statement statement = _parseNonLabeledStatement(); | |
| 3307 if (labels.isEmpty) { | |
| 3308 return statement; | |
| 3309 } | |
| 3310 return new LabeledStatement(labels, statement); | |
| 3311 } | |
| 3312 | |
| 3313 /** | |
| 3314 * Parse a sequence of statements, starting with the given [token]. Return the | |
| 3315 * statements that were parsed, or `null` if the tokens do not represent a | |
| 3316 * recognizable sequence of statements. | |
| 3317 */ | |
| 3318 List<Statement> parseStatements(Token token) { | |
| 3319 _currentToken = token; | |
| 3320 return _parseStatementList(); | |
| 3321 } | |
| 3322 | |
| 3323 /** | |
| 3324 * Parse a string literal. Return the string literal that was parsed. | |
| 3325 * | |
| 3326 * stringLiteral ::= | |
| 3327 * MULTI_LINE_STRING+ | |
| 3328 * | SINGLE_LINE_STRING+ | |
| 3329 */ | |
| 3330 StringLiteral parseStringLiteral() { | |
| 3331 List<StringLiteral> strings = new List<StringLiteral>(); | |
| 3332 while (_matches(TokenType.STRING)) { | |
| 3333 Token string = getAndAdvance(); | |
| 3334 if (_matches(TokenType.STRING_INTERPOLATION_EXPRESSION) || | |
| 3335 _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER)) { | |
| 3336 strings.add(_parseStringInterpolation(string)); | |
| 3337 } else { | |
| 3338 strings.add(new SimpleStringLiteral( | |
| 3339 string, _computeStringValue(string.lexeme, true, true))); | |
| 3340 } | |
| 3341 } | |
| 3342 if (strings.length < 1) { | |
| 3343 _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_STRING_LITERAL); | |
| 3344 return _createSyntheticStringLiteral(); | |
| 3345 } else if (strings.length == 1) { | |
| 3346 return strings[0]; | |
| 3347 } else { | |
| 3348 return new AdjacentStrings(strings); | |
| 3349 } | |
| 3350 } | |
| 3351 | |
| 3352 /** | |
| 3353 * Parse a list of type arguments. Return the type argument list that was | |
| 3354 * parsed. | |
| 3355 * | |
| 3356 * typeArguments ::= | |
| 3357 * '<' typeList '>' | |
| 3358 * | |
| 3359 * typeList ::= | |
| 3360 * type (',' type)* | |
| 3361 */ | |
| 3362 TypeArgumentList parseTypeArgumentList() { | |
| 3363 Token leftBracket = _expect(TokenType.LT); | |
| 3364 List<TypeName> arguments = new List<TypeName>(); | |
| 3365 arguments.add(parseTypeName()); | |
| 3366 while (_optional(TokenType.COMMA)) { | |
| 3367 arguments.add(parseTypeName()); | |
| 3368 } | |
| 3369 Token rightBracket = _expectGt(); | |
| 3370 return new TypeArgumentList(leftBracket, arguments, rightBracket); | |
| 3371 } | |
| 3372 | |
| 3373 /** | |
| 3374 * Parse a type name. Return the type name that was parsed. | |
| 3375 * | |
| 3376 * type ::= | |
| 3377 * qualified typeArguments? | |
| 3378 */ | |
| 3379 TypeName parseTypeName() { | |
| 3380 Identifier typeName; | |
| 3381 if (_matchesKeyword(Keyword.VAR)) { | |
| 3382 _reportErrorForCurrentToken(ParserErrorCode.VAR_AS_TYPE_NAME); | |
| 3383 typeName = new SimpleIdentifier(getAndAdvance()); | |
| 3384 } else if (_matchesIdentifier()) { | |
| 3385 typeName = parsePrefixedIdentifier(); | |
| 3386 } else { | |
| 3387 typeName = _createSyntheticIdentifier(); | |
| 3388 _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_TYPE_NAME); | |
| 3389 } | |
| 3390 TypeArgumentList typeArguments = null; | |
| 3391 if (_matches(TokenType.LT)) { | |
| 3392 typeArguments = parseTypeArgumentList(); | |
| 3393 } | |
| 3394 return new TypeName(typeName, typeArguments); | |
| 3395 } | |
| 3396 | |
| 3397 /** | |
| 3398 * Parse a type parameter. Return the type parameter that was parsed. | |
| 3399 * | |
| 3400 * typeParameter ::= | |
| 3401 * metadata name ('extends' bound)? | |
| 3402 */ | |
| 3403 TypeParameter parseTypeParameter() { | |
| 3404 CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata(); | |
| 3405 SimpleIdentifier name = parseSimpleIdentifier(); | |
| 3406 if (_matchesKeyword(Keyword.EXTENDS)) { | |
| 3407 Token keyword = getAndAdvance(); | |
| 3408 TypeName bound = parseTypeName(); | |
| 3409 return new TypeParameter(commentAndMetadata.comment, | |
| 3410 commentAndMetadata.metadata, name, keyword, bound); | |
| 3411 } | |
| 3412 return new TypeParameter(commentAndMetadata.comment, | |
| 3413 commentAndMetadata.metadata, name, null, null); | |
| 3414 } | |
| 3415 | |
| 3416 /** | |
| 3417 * Parse a list of type parameters. Return the list of type parameters that | |
| 3418 * were parsed. | |
| 3419 * | |
| 3420 * typeParameterList ::= | |
| 3421 * '<' typeParameter (',' typeParameter)* '>' | |
| 3422 */ | |
| 3423 TypeParameterList parseTypeParameterList() { | |
| 3424 Token leftBracket = _expect(TokenType.LT); | |
| 3425 List<TypeParameter> typeParameters = new List<TypeParameter>(); | |
| 3426 typeParameters.add(parseTypeParameter()); | |
| 3427 while (_optional(TokenType.COMMA)) { | |
| 3428 typeParameters.add(parseTypeParameter()); | |
| 3429 } | |
| 3430 Token rightBracket = _expectGt(); | |
| 3431 return new TypeParameterList(leftBracket, typeParameters, rightBracket); | |
| 3432 } | |
| 3433 | |
| 3434 /** | |
| 3435 * Parse a with clause. Return the with clause that was parsed. | |
| 3436 * | |
| 3437 * withClause ::= | |
| 3438 * 'with' typeName (',' typeName)* | |
| 3439 */ | |
| 3440 WithClause parseWithClause() { | |
| 3441 Token with2 = _expectKeyword(Keyword.WITH); | |
| 3442 List<TypeName> types = new List<TypeName>(); | |
| 3443 types.add(parseTypeName()); | |
| 3444 while (_optional(TokenType.COMMA)) { | |
| 3445 types.add(parseTypeName()); | |
| 3446 } | |
| 3447 return new WithClause(with2, types); | |
| 3448 } | |
| 3449 | |
| 3450 /** | |
| 3451 * Advance to the next token in the token stream. | |
| 3452 */ | |
| 3453 void _advance() { | |
| 3454 _currentToken = _currentToken.next; | |
| 3455 } | |
| 3456 | |
| 3457 /** | |
| 3458 * Append the character equivalent of the given [scalarValue] to the given | |
| 3459 * [builder]. Use the [startIndex] and [endIndex] to report an error, and | |
| 3460 * don't append anything to the builder, if the scalar value is invalid. The | |
| 3461 * [escapeSequence] is the escape sequence that was parsed to produce the | |
| 3462 * scalar value (used for error reporting). | |
| 3463 */ | |
| 3464 void _appendScalarValue(StringBuffer buffer, String escapeSequence, | |
| 3465 int scalarValue, int startIndex, int endIndex) { | |
| 3466 if (scalarValue < 0 || | |
| 3467 scalarValue > Character.MAX_CODE_POINT || | |
| 3468 (scalarValue >= 0xD800 && scalarValue <= 0xDFFF)) { | |
| 3469 _reportErrorForCurrentToken( | |
| 3470 ParserErrorCode.INVALID_CODE_POINT, [escapeSequence]); | |
| 3471 return; | |
| 3472 } | |
| 3473 if (scalarValue < Character.MAX_VALUE) { | |
| 3474 buffer.writeCharCode(scalarValue); | |
| 3475 } else { | |
| 3476 buffer.write(Character.toChars(scalarValue)); | |
| 3477 } | |
| 3478 } | |
| 3479 | |
| 3480 /** | |
| 3481 * Return the content of a string with the given literal representation. The | |
| 3482 * [lexeme] is the literal representation of the string. The flag [isFirst] is | |
| 3483 * `true` if this is the first token in a string literal. The flag [isLast] is | |
| 3484 * `true` if this is the last token in a string literal. | |
| 3485 */ | |
| 3486 String _computeStringValue(String lexeme, bool isFirst, bool isLast) { | |
| 3487 StringLexemeHelper helper = new StringLexemeHelper(lexeme, isFirst, isLast); | |
| 3488 int start = helper.start; | |
| 3489 int end = helper.end; | |
| 3490 bool stringEndsAfterStart = end >= start; | |
| 3491 assert(stringEndsAfterStart); | |
| 3492 if (!stringEndsAfterStart) { | |
| 3493 AnalysisEngine.instance.logger.logError( | |
| 3494 "Internal error: computeStringValue($lexeme, $isFirst, $isLast)"); | |
| 3495 return ""; | |
| 3496 } | |
| 3497 if (helper.isRaw) { | |
| 3498 return lexeme.substring(start, end); | |
| 3499 } | |
| 3500 StringBuffer buffer = new StringBuffer(); | |
| 3501 int index = start; | |
| 3502 while (index < end) { | |
| 3503 index = _translateCharacter(buffer, lexeme, index); | |
| 3504 } | |
| 3505 return buffer.toString(); | |
| 3506 } | |
| 3507 | |
| 3508 /** | |
| 3509 * Convert the given [method] declaration into the nearest valid top-level | |
| 3510 * function declaration (that is, the function declaration that most closely | |
| 3511 * captures the components of the given method declaration). | |
| 3512 */ | |
| 3513 FunctionDeclaration _convertToFunctionDeclaration(MethodDeclaration method) => | |
| 3514 new FunctionDeclaration(method.documentationComment, method.metadata, | |
| 3515 method.externalKeyword, method.returnType, method.propertyKeyword, | |
| 3516 method.name, new FunctionExpression( | |
| 3517 method.typeParameters, method.parameters, method.body)); | |
| 3518 | |
| 3519 /** | |
| 3520 * Return `true` if the current token could be the start of a compilation unit | |
| 3521 * member. This method is used for recovery purposes to decide when to stop | |
| 3522 * skipping tokens after finding an error while parsing a compilation unit | |
| 3523 * member. | |
| 3524 */ | |
| 3525 bool _couldBeStartOfCompilationUnitMember() { | |
| 3526 if ((_matchesKeyword(Keyword.IMPORT) || | |
| 3527 _matchesKeyword(Keyword.EXPORT) || | |
| 3528 _matchesKeyword(Keyword.LIBRARY) || | |
| 3529 _matchesKeyword(Keyword.PART)) && | |
| 3530 !_tokenMatches(_peek(), TokenType.PERIOD) && | |
| 3531 !_tokenMatches(_peek(), TokenType.LT)) { | |
| 3532 // This looks like the start of a directive | |
| 3533 return true; | |
| 3534 } else if (_matchesKeyword(Keyword.CLASS)) { | |
| 3535 // This looks like the start of a class definition | |
| 3536 return true; | |
| 3537 } else if (_matchesKeyword(Keyword.TYPEDEF) && | |
| 3538 !_tokenMatches(_peek(), TokenType.PERIOD) && | |
| 3539 !_tokenMatches(_peek(), TokenType.LT)) { | |
| 3540 // This looks like the start of a typedef | |
| 3541 return true; | |
| 3542 } else if (_matchesKeyword(Keyword.VOID) || | |
| 3543 ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) && | |
| 3544 _tokenMatchesIdentifier(_peek())) || | |
| 3545 (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek()))) { | |
| 3546 // This looks like the start of a function | |
| 3547 return true; | |
| 3548 } else if (_matchesIdentifier()) { | |
| 3549 if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | |
| 3550 // This looks like the start of a function | |
| 3551 return true; | |
| 3552 } | |
| 3553 Token token = _skipReturnType(_currentToken); | |
| 3554 if (token == null) { | |
| 3555 return false; | |
| 3556 } | |
| 3557 if (_matchesKeyword(Keyword.GET) || | |
| 3558 _matchesKeyword(Keyword.SET) || | |
| 3559 (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) || | |
| 3560 _matchesIdentifier()) { | |
| 3561 return true; | |
| 3562 } | |
| 3563 } | |
| 3564 return false; | |
| 3565 } | |
| 3566 | |
| 3567 /** | |
| 3568 * Return a synthetic identifier. | |
| 3569 */ | |
| 3570 SimpleIdentifier _createSyntheticIdentifier() { | |
| 3571 Token syntheticToken; | |
| 3572 if (_currentToken.type == TokenType.KEYWORD) { | |
| 3573 // Consider current keyword token as an identifier. | |
| 3574 // It is not always true, e.g. "^is T" where "^" is place the place for | |
| 3575 // synthetic identifier. By creating SyntheticStringToken we can | |
| 3576 // distinguish a real identifier from synthetic. In the code completion | |
| 3577 // behavior will depend on a cursor position - before or on "is". | |
| 3578 syntheticToken = _injectToken(new SyntheticStringToken( | |
| 3579 TokenType.IDENTIFIER, _currentToken.lexeme, _currentToken.offset)); | |
| 3580 } else { | |
| 3581 syntheticToken = _createSyntheticToken(TokenType.IDENTIFIER); | |
| 3582 } | |
| 3583 return new SimpleIdentifier(syntheticToken); | |
| 3584 } | |
| 3585 | |
| 3586 /** | |
| 3587 * Return a synthetic token representing the given [keyword]. | |
| 3588 */ | |
| 3589 Token _createSyntheticKeyword(Keyword keyword) => _injectToken( | |
| 3590 new Parser_SyntheticKeywordToken(keyword, _currentToken.offset)); | |
| 3591 | |
| 3592 /** | |
| 3593 * Return a synthetic string literal. | |
| 3594 */ | |
| 3595 SimpleStringLiteral _createSyntheticStringLiteral() => | |
| 3596 new SimpleStringLiteral(_createSyntheticToken(TokenType.STRING), ""); | |
| 3597 | |
| 3598 /** | |
| 3599 * Return a synthetic token with the given [type]. | |
| 3600 */ | |
| 3601 Token _createSyntheticToken(TokenType type) => | |
| 3602 _injectToken(new StringToken(type, "", _currentToken.offset)); | |
| 3603 | |
| 3604 /** | |
| 3605 * Create and return a new token with the given [type]. The token will replace | |
| 3606 * the first portion of the given [token], so it will have the same offset and | |
| 3607 * will have any comments that might have preceeded the token. | |
| 3608 */ | |
| 3609 Token _createToken(Token token, TokenType type, {bool isBegin: false}) { | |
| 3610 CommentToken comments = token.precedingComments; | |
| 3611 if (comments == null) { | |
| 3612 if (isBegin) { | |
| 3613 return new BeginToken(type, token.offset); | |
| 3614 } | |
| 3615 return new Token(type, token.offset); | |
| 3616 } else if (isBegin) { | |
| 3617 return new BeginTokenWithComment(type, token.offset, comments); | |
| 3618 } | |
| 3619 return new TokenWithComment(type, token.offset, comments); | |
| 3620 } | |
| 3621 | |
| 3622 /** | |
| 3623 * Check that the given [expression] is assignable and report an error if it | |
| 3624 * isn't. | |
| 3625 * | |
| 3626 * assignableExpression ::= | |
| 3627 * primary (arguments* assignableSelector)+ | |
| 3628 * | 'super' unconditionalAssignableSelector | |
| 3629 * | identifier | |
| 3630 * | |
| 3631 * unconditionalAssignableSelector ::= | |
| 3632 * '[' expression ']' | |
| 3633 * | '.' identifier | |
| 3634 * | |
| 3635 * assignableSelector ::= | |
| 3636 * unconditionalAssignableSelector | |
| 3637 * | '?.' identifier | |
| 3638 */ | |
| 3639 void _ensureAssignable(Expression expression) { | |
| 3640 if (expression != null && !expression.isAssignable) { | |
| 3641 _reportErrorForCurrentToken( | |
| 3642 ParserErrorCode.ILLEGAL_ASSIGNMENT_TO_NON_ASSIGNABLE); | |
| 3643 } | |
| 3644 } | |
| 3645 | |
| 3646 /** | |
| 3647 * If the current token has the expected type, return it after advancing to | |
| 3648 * the next token. Otherwise report an error and return the current token | |
| 3649 * without advancing. | |
| 3650 * | |
| 3651 * Note that the method [_expectGt] should be used if the argument to this | |
| 3652 * method would be [TokenType.GT]. | |
| 3653 * | |
| 3654 * The [type] is the type of token that is expected. | |
| 3655 */ | |
| 3656 Token _expect(TokenType type) { | |
| 3657 if (_matches(type)) { | |
| 3658 return getAndAdvance(); | |
| 3659 } | |
| 3660 // Remove uses of this method in favor of matches? | |
| 3661 // Pass in the error code to use to report the error? | |
| 3662 if (type == TokenType.SEMICOLON) { | |
| 3663 if (_tokenMatches(_currentToken.next, TokenType.SEMICOLON)) { | |
| 3664 _reportErrorForCurrentToken( | |
| 3665 ParserErrorCode.UNEXPECTED_TOKEN, [_currentToken.lexeme]); | |
| 3666 _advance(); | |
| 3667 return getAndAdvance(); | |
| 3668 } | |
| 3669 _reportErrorForToken(ParserErrorCode.EXPECTED_TOKEN, | |
| 3670 _currentToken.previous, [type.lexeme]); | |
| 3671 } else { | |
| 3672 _reportErrorForCurrentToken( | |
| 3673 ParserErrorCode.EXPECTED_TOKEN, [type.lexeme]); | |
| 3674 } | |
| 3675 return _currentToken; | |
| 3676 } | |
| 3677 | |
| 3678 /** | |
| 3679 * If the current token has the type [TokenType.GT], return it after advancing | |
| 3680 * to the next token. Otherwise report an error and return the current token | |
| 3681 * without advancing. | |
| 3682 */ | |
| 3683 Token _expectGt() { | |
| 3684 if (_matchesGt()) { | |
| 3685 return getAndAdvance(); | |
| 3686 } | |
| 3687 _reportErrorForCurrentToken( | |
| 3688 ParserErrorCode.EXPECTED_TOKEN, [TokenType.GT.lexeme]); | |
| 3689 return _currentToken; | |
| 3690 } | |
| 3691 | |
| 3692 /** | |
| 3693 * If the current token is a keyword matching the given [keyword], return it | |
| 3694 * after advancing to the next token. Otherwise report an error and return the | |
| 3695 * current token without advancing. | |
| 3696 */ | |
| 3697 Token _expectKeyword(Keyword keyword) { | |
| 3698 if (_matchesKeyword(keyword)) { | |
| 3699 return getAndAdvance(); | |
| 3700 } | |
| 3701 // Remove uses of this method in favor of matches? | |
| 3702 // Pass in the error code to use to report the error? | |
| 3703 _reportErrorForCurrentToken( | |
| 3704 ParserErrorCode.EXPECTED_TOKEN, [keyword.syntax]); | |
| 3705 return _currentToken; | |
| 3706 } | |
| 3707 | |
| 3708 /** | |
| 3709 * If the current token is a semicolon, return it after advancing to the next | |
| 3710 * token. Otherwise report an error and create a synthetic semicolon. | |
| 3711 */ | |
| 3712 Token _expectSemicolon() { | |
| 3713 // TODO(scheglov) consider pushing this behavior into [_expect] | |
| 3714 if (_matches(TokenType.SEMICOLON)) { | |
| 3715 return getAndAdvance(); | |
| 3716 } else { | |
| 3717 _reportErrorForToken( | |
| 3718 ParserErrorCode.EXPECTED_TOKEN, _currentToken.previous, [";"]); | |
| 3719 return _createSyntheticToken(TokenType.SEMICOLON); | |
| 3720 } | |
| 3721 } | |
| 3722 | |
| 3723 /** | |
| 3724 * Search the given list of [ranges] for a range that contains the given | |
| 3725 * [index]. Return the range that was found, or `null` if none of the ranges | |
| 3726 * contain the index. | |
| 3727 */ | |
| 3728 List<int> _findRange(List<List<int>> ranges, int index) { | |
| 3729 int rangeCount = ranges.length; | |
| 3730 for (int i = 0; i < rangeCount; i++) { | |
| 3731 List<int> range = ranges[i]; | |
| 3732 if (range[0] <= index && index <= range[1]) { | |
| 3733 return range; | |
| 3734 } else if (index < range[0]) { | |
| 3735 return null; | |
| 3736 } | |
| 3737 } | |
| 3738 return null; | |
| 3739 } | |
| 3740 | |
| 3741 /** | |
| 3742 * Return a list of the ranges of characters in the given [comment] that | |
| 3743 * should be treated as code blocks. | |
| 3744 */ | |
| 3745 List<List<int>> _getCodeBlockRanges(String comment) { | |
| 3746 List<List<int>> ranges = new List<List<int>>(); | |
| 3747 int length = comment.length; | |
| 3748 if (length < 3) { | |
| 3749 return ranges; | |
| 3750 } | |
| 3751 int index = 0; | |
| 3752 int firstChar = comment.codeUnitAt(0); | |
| 3753 if (firstChar == 0x2F) { | |
| 3754 int secondChar = comment.codeUnitAt(1); | |
| 3755 int thirdChar = comment.codeUnitAt(2); | |
| 3756 if ((secondChar == 0x2A && thirdChar == 0x2A) || | |
| 3757 (secondChar == 0x2F && thirdChar == 0x2F)) { | |
| 3758 index = 3; | |
| 3759 } | |
| 3760 } | |
| 3761 while (index < length) { | |
| 3762 int currentChar = comment.codeUnitAt(index); | |
| 3763 if (currentChar == 0xD || currentChar == 0xA) { | |
| 3764 index = index + 1; | |
| 3765 while (index < length && | |
| 3766 Character.isWhitespace(comment.codeUnitAt(index))) { | |
| 3767 index = index + 1; | |
| 3768 } | |
| 3769 if (StringUtilities.startsWith6( | |
| 3770 comment, index, 0x2A, 0x20, 0x20, 0x20, 0x20, 0x20)) { | |
| 3771 int end = index + 6; | |
| 3772 while (end < length && | |
| 3773 comment.codeUnitAt(end) != 0xD && | |
| 3774 comment.codeUnitAt(end) != 0xA) { | |
| 3775 end = end + 1; | |
| 3776 } | |
| 3777 ranges.add(<int>[index, end]); | |
| 3778 index = end; | |
| 3779 } | |
| 3780 } else if (index + 1 < length && | |
| 3781 currentChar == 0x5B && | |
| 3782 comment.codeUnitAt(index + 1) == 0x3A) { | |
| 3783 int end = StringUtilities.indexOf2(comment, index + 2, 0x3A, 0x5D); | |
| 3784 if (end < 0) { | |
| 3785 end = length; | |
| 3786 } | |
| 3787 ranges.add(<int>[index, end]); | |
| 3788 index = end + 1; | |
| 3789 } else { | |
| 3790 index = index + 1; | |
| 3791 } | |
| 3792 } | |
| 3793 return ranges; | |
| 3794 } | |
| 3795 | |
| 3796 /** | |
| 3797 * Return the end token associated with the given [beginToken], or `null` if | |
| 3798 * either the given token is not a begin token or it does not have an end | |
| 3799 * token associated with it. | |
| 3800 */ | |
| 3801 Token _getEndToken(Token beginToken) { | |
| 3802 if (beginToken is BeginToken) { | |
| 3803 return beginToken.endToken; | |
| 3804 } | |
| 3805 return null; | |
| 3806 } | |
| 3807 | |
| 3808 /** | |
| 3809 * Inject the given [token] into the token stream immediately before the | |
| 3810 * current token. | |
| 3811 */ | |
| 3812 Token _injectToken(Token token) { | |
| 3813 Token previous = _currentToken.previous; | |
| 3814 token.setNext(_currentToken); | |
| 3815 previous.setNext(token); | |
| 3816 return token; | |
| 3817 } | |
| 3818 | |
| 3819 /** | |
| 3820 * Return `true` if the current token appears to be the beginning of a | |
| 3821 * function declaration. | |
| 3822 */ | |
| 3823 bool _isFunctionDeclaration() { | |
| 3824 if (_matchesKeyword(Keyword.VOID)) { | |
| 3825 return true; | |
| 3826 } | |
| 3827 Token afterReturnType = _skipTypeName(_currentToken); | |
| 3828 if (afterReturnType == null) { | |
| 3829 // There was no return type, but it is optional, so go back to where we | |
| 3830 // started. | |
| 3831 afterReturnType = _currentToken; | |
| 3832 } | |
| 3833 Token afterIdentifier = _skipSimpleIdentifier(afterReturnType); | |
| 3834 if (afterIdentifier == null) { | |
| 3835 // It's possible that we parsed the function name as if it were a type | |
| 3836 // name, so see whether it makes sense if we assume that there is no type. | |
| 3837 afterIdentifier = _skipSimpleIdentifier(_currentToken); | |
| 3838 } | |
| 3839 if (afterIdentifier == null) { | |
| 3840 return false; | |
| 3841 } | |
| 3842 if (_isFunctionExpression(afterIdentifier)) { | |
| 3843 return true; | |
| 3844 } | |
| 3845 // It's possible that we have found a getter. While this isn't valid at this | |
| 3846 // point we test for it in order to recover better. | |
| 3847 if (_matchesKeyword(Keyword.GET)) { | |
| 3848 Token afterName = _skipSimpleIdentifier(_currentToken.next); | |
| 3849 if (afterName == null) { | |
| 3850 return false; | |
| 3851 } | |
| 3852 return _tokenMatches(afterName, TokenType.FUNCTION) || | |
| 3853 _tokenMatches(afterName, TokenType.OPEN_CURLY_BRACKET); | |
| 3854 } else if (_tokenMatchesKeyword(afterReturnType, Keyword.GET)) { | |
| 3855 Token afterName = _skipSimpleIdentifier(afterReturnType.next); | |
| 3856 if (afterName == null) { | |
| 3857 return false; | |
| 3858 } | |
| 3859 return _tokenMatches(afterName, TokenType.FUNCTION) || | |
| 3860 _tokenMatches(afterName, TokenType.OPEN_CURLY_BRACKET); | |
| 3861 } | |
| 3862 return false; | |
| 3863 } | |
| 3864 | |
| 3865 /** | |
| 3866 * Return `true` if the given [token] appears to be the beginning of a | |
| 3867 * function expression. | |
| 3868 */ | |
| 3869 bool _isFunctionExpression(Token token) { | |
| 3870 // Function expressions aren't allowed in initializer lists. | |
| 3871 if (_inInitializer) { | |
| 3872 return false; | |
| 3873 } | |
| 3874 Token afterTypeParameters = _skipTypeParameterList(token); | |
| 3875 if (afterTypeParameters == null) { | |
| 3876 afterTypeParameters = token; | |
| 3877 } | |
| 3878 Token afterParameters = _skipFormalParameterList(afterTypeParameters); | |
| 3879 if (afterParameters == null) { | |
| 3880 return false; | |
| 3881 } | |
| 3882 if (afterParameters | |
| 3883 .matchesAny([TokenType.OPEN_CURLY_BRACKET, TokenType.FUNCTION])) { | |
| 3884 return true; | |
| 3885 } | |
| 3886 String lexeme = afterParameters.lexeme; | |
| 3887 return lexeme == ASYNC || lexeme == SYNC; | |
| 3888 } | |
| 3889 | |
| 3890 /** | |
| 3891 * Return `true` if the given [character] is a valid hexadecimal digit. | |
| 3892 */ | |
| 3893 bool _isHexDigit(int character) => (0x30 <= character && character <= 0x39) || | |
| 3894 (0x41 <= character && character <= 0x46) || | |
| 3895 (0x61 <= character && character <= 0x66); | |
| 3896 | |
| 3897 /** | |
| 3898 * Return `true` if the current token is the first token in an initialized | |
| 3899 * variable declaration rather than an expression. This method assumes that we | |
| 3900 * have already skipped past any metadata that might be associated with the | |
| 3901 * declaration. | |
| 3902 * | |
| 3903 * initializedVariableDeclaration ::= | |
| 3904 * declaredIdentifier ('=' expression)? (',' initializedIdentifier)* | |
| 3905 * | |
| 3906 * declaredIdentifier ::= | |
| 3907 * metadata finalConstVarOrType identifier | |
| 3908 * | |
| 3909 * finalConstVarOrType ::= | |
| 3910 * 'final' type? | |
| 3911 * | 'const' type? | |
| 3912 * | 'var' | |
| 3913 * | type | |
| 3914 * | |
| 3915 * type ::= | |
| 3916 * qualified typeArguments? | |
| 3917 * | |
| 3918 * initializedIdentifier ::= | |
| 3919 * identifier ('=' expression)? | |
| 3920 */ | |
| 3921 bool _isInitializedVariableDeclaration() { | |
| 3922 if (_matchesKeyword(Keyword.FINAL) || _matchesKeyword(Keyword.VAR)) { | |
| 3923 // An expression cannot start with a keyword other than 'const', | |
| 3924 // 'rethrow', or 'throw'. | |
| 3925 return true; | |
| 3926 } | |
| 3927 if (_matchesKeyword(Keyword.CONST)) { | |
| 3928 // Look to see whether we might be at the start of a list or map literal, | |
| 3929 // otherwise this should be the start of a variable declaration. | |
| 3930 return !_peek().matchesAny([ | |
| 3931 TokenType.LT, | |
| 3932 TokenType.OPEN_CURLY_BRACKET, | |
| 3933 TokenType.OPEN_SQUARE_BRACKET, | |
| 3934 TokenType.INDEX | |
| 3935 ]); | |
| 3936 } | |
| 3937 // We know that we have an identifier, and need to see whether it might be | |
| 3938 // a type name. | |
| 3939 Token token = _skipTypeName(_currentToken); | |
| 3940 if (token == null) { | |
| 3941 // There was no type name, so this can't be a declaration. | |
| 3942 return false; | |
| 3943 } | |
| 3944 token = _skipSimpleIdentifier(token); | |
| 3945 if (token == null) { | |
| 3946 return false; | |
| 3947 } | |
| 3948 TokenType type = token.type; | |
| 3949 return type == TokenType.EQ || | |
| 3950 type == TokenType.COMMA || | |
| 3951 type == TokenType.SEMICOLON || | |
| 3952 _tokenMatchesKeyword(token, Keyword.IN); | |
| 3953 } | |
| 3954 | |
| 3955 bool _isLikelyParameterList() { | |
| 3956 if (_matches(TokenType.OPEN_PAREN)) { | |
| 3957 return true; | |
| 3958 } | |
| 3959 if (!parseGenericMethods) { | |
| 3960 return false; | |
| 3961 } | |
| 3962 Token token = _skipTypeArgumentList(_currentToken); | |
| 3963 return token != null && _tokenMatches(token, TokenType.OPEN_PAREN); | |
| 3964 } | |
| 3965 | |
| 3966 /** | |
| 3967 * Given that we have just found bracketed text within the given [comment], | |
| 3968 * look to see whether that text is (a) followed by a parenthesized link | |
| 3969 * address, (b) followed by a colon, or (c) followed by optional whitespace | |
| 3970 * and another square bracket. The [rightIndex] is the index of the right | |
| 3971 * bracket. Return `true` if the bracketed text is followed by a link address. | |
| 3972 * | |
| 3973 * This method uses the syntax described by the | |
| 3974 * <a href="http://daringfireball.net/projects/markdown/syntax">markdown</a> | |
| 3975 * project. | |
| 3976 */ | |
| 3977 bool _isLinkText(String comment, int rightIndex) { | |
| 3978 int length = comment.length; | |
| 3979 int index = rightIndex + 1; | |
| 3980 if (index >= length) { | |
| 3981 return false; | |
| 3982 } | |
| 3983 int nextChar = comment.codeUnitAt(index); | |
| 3984 if (nextChar == 0x28 || nextChar == 0x3A) { | |
| 3985 return true; | |
| 3986 } | |
| 3987 while (Character.isWhitespace(nextChar)) { | |
| 3988 index = index + 1; | |
| 3989 if (index >= length) { | |
| 3990 return false; | |
| 3991 } | |
| 3992 nextChar = comment.codeUnitAt(index); | |
| 3993 } | |
| 3994 return nextChar == 0x5B; | |
| 3995 } | |
| 3996 | |
| 3997 /** | |
| 3998 * Return `true` if the given [startToken] appears to be the beginning of an | |
| 3999 * operator declaration. | |
| 4000 */ | |
| 4001 bool _isOperator(Token startToken) { | |
| 4002 // Accept any operator here, even if it is not user definable. | |
| 4003 if (!startToken.isOperator) { | |
| 4004 return false; | |
| 4005 } | |
| 4006 // Token "=" means that it is actually field initializer. | |
| 4007 if (startToken.type == TokenType.EQ) { | |
| 4008 return false; | |
| 4009 } | |
| 4010 // Consume all operator tokens. | |
| 4011 Token token = startToken.next; | |
| 4012 while (token.isOperator) { | |
| 4013 token = token.next; | |
| 4014 } | |
| 4015 // Formal parameter list is expect now. | |
| 4016 return _tokenMatches(token, TokenType.OPEN_PAREN); | |
| 4017 } | |
| 4018 | |
| 4019 /** | |
| 4020 * Return `true` if the current token appears to be the beginning of a switch | |
| 4021 * member. | |
| 4022 */ | |
| 4023 bool _isSwitchMember() { | |
| 4024 Token token = _currentToken; | |
| 4025 while (_tokenMatches(token, TokenType.IDENTIFIER) && | |
| 4026 _tokenMatches(token.next, TokenType.COLON)) { | |
| 4027 token = token.next.next; | |
| 4028 } | |
| 4029 if (token.type == TokenType.KEYWORD) { | |
| 4030 Keyword keyword = (token as KeywordToken).keyword; | |
| 4031 return keyword == Keyword.CASE || keyword == Keyword.DEFAULT; | |
| 4032 } | |
| 4033 return false; | |
| 4034 } | |
| 4035 | |
| 4036 /** | |
| 4037 * Return `true` if the [startToken] appears to be the first token of a type | |
| 4038 * name that is followed by a variable or field formal parameter. | |
| 4039 */ | |
| 4040 bool _isTypedIdentifier(Token startToken) { | |
| 4041 Token token = _skipReturnType(startToken); | |
| 4042 if (token == null) { | |
| 4043 return false; | |
| 4044 } else if (_tokenMatchesIdentifier(token)) { | |
| 4045 return true; | |
| 4046 } else if (_tokenMatchesKeyword(token, Keyword.THIS) && | |
| 4047 _tokenMatches(token.next, TokenType.PERIOD) && | |
| 4048 _tokenMatchesIdentifier(token.next.next)) { | |
| 4049 return true; | |
| 4050 } else if (_tokenMatchesKeyword(startToken, Keyword.VOID)) { | |
| 4051 // The keyword 'void' isn't a valid identifier, so it should be assumed to | |
| 4052 // be a type name. | |
| 4053 return true; | |
| 4054 } else if (startToken.next != token && | |
| 4055 !_tokenMatches(token, TokenType.OPEN_PAREN)) { | |
| 4056 // The type is more than a simple identifier, so it should be assumed to | |
| 4057 // be a type name. | |
| 4058 return true; | |
| 4059 } | |
| 4060 return false; | |
| 4061 } | |
| 4062 | |
| 4063 /** | |
| 4064 * Increments the error reporting lock level. If level is more than `0`, then | |
| 4065 * [reportError] wont report any error. | |
| 4066 */ | |
| 4067 void _lockErrorListener() { | |
| 4068 _errorListenerLock++; | |
| 4069 } | |
| 4070 | |
| 4071 /** | |
| 4072 * Return `true` if the current token has the given [type]. Note that the | |
| 4073 * method [_matchesGt] should be used if the argument to this method would be | |
| 4074 * [TokenType.GT]. | |
| 4075 */ | |
| 4076 bool _matches(TokenType type) => _currentToken.type == type; | |
| 4077 | |
| 4078 /** | |
| 4079 * Return `true` if the current token has a type of [TokenType.GT]. Note that | |
| 4080 * this method, unlike other variants, will modify the token stream if | |
| 4081 * possible to match desired type. In particular, if the next token is either | |
| 4082 * a '>>' or '>>>', the token stream will be re-written and `true` will be | |
| 4083 * returned. | |
| 4084 */ | |
| 4085 bool _matchesGt() { | |
| 4086 TokenType currentType = _currentToken.type; | |
| 4087 if (currentType == TokenType.GT) { | |
| 4088 return true; | |
| 4089 } else if (currentType == TokenType.GT_GT) { | |
| 4090 Token first = _createToken(_currentToken, TokenType.GT); | |
| 4091 Token second = new Token(TokenType.GT, _currentToken.offset + 1); | |
| 4092 second.setNext(_currentToken.next); | |
| 4093 first.setNext(second); | |
| 4094 _currentToken.previous.setNext(first); | |
| 4095 _currentToken = first; | |
| 4096 return true; | |
| 4097 } else if (currentType == TokenType.GT_EQ) { | |
| 4098 Token first = _createToken(_currentToken, TokenType.GT); | |
| 4099 Token second = new Token(TokenType.EQ, _currentToken.offset + 1); | |
| 4100 second.setNext(_currentToken.next); | |
| 4101 first.setNext(second); | |
| 4102 _currentToken.previous.setNext(first); | |
| 4103 _currentToken = first; | |
| 4104 return true; | |
| 4105 } else if (currentType == TokenType.GT_GT_EQ) { | |
| 4106 int offset = _currentToken.offset; | |
| 4107 Token first = _createToken(_currentToken, TokenType.GT); | |
| 4108 Token second = new Token(TokenType.GT, offset + 1); | |
| 4109 Token third = new Token(TokenType.EQ, offset + 2); | |
| 4110 third.setNext(_currentToken.next); | |
| 4111 second.setNext(third); | |
| 4112 first.setNext(second); | |
| 4113 _currentToken.previous.setNext(first); | |
| 4114 _currentToken = first; | |
| 4115 return true; | |
| 4116 } | |
| 4117 return false; | |
| 4118 } | |
| 4119 | |
| 4120 /** | |
| 4121 * Return `true` if the current token is a valid identifier. Valid identifiers | |
| 4122 * include built-in identifiers (pseudo-keywords). | |
| 4123 */ | |
| 4124 bool _matchesIdentifier() => _tokenMatchesIdentifier(_currentToken); | |
| 4125 | |
| 4126 /** | |
| 4127 * Return `true` if the current token matches the given [keyword]. | |
| 4128 */ | |
| 4129 bool _matchesKeyword(Keyword keyword) => | |
| 4130 _tokenMatchesKeyword(_currentToken, keyword); | |
| 4131 | |
| 4132 /** | |
| 4133 * Return `true` if the current token matches the given [identifier]. | |
| 4134 */ | |
| 4135 bool _matchesString(String identifier) => | |
| 4136 _currentToken.type == TokenType.IDENTIFIER && | |
| 4137 _currentToken.lexeme == identifier; | |
| 4138 | |
| 4139 /** | |
| 4140 * If the current token has the given [type], then advance to the next token | |
| 4141 * and return `true`. Otherwise, return `false` without advancing. This method | |
| 4142 * should not be invoked with an argument value of [TokenType.GT]. | |
| 4143 */ | |
| 4144 bool _optional(TokenType type) { | |
| 4145 if (_matches(type)) { | |
| 4146 _advance(); | |
| 4147 return true; | |
| 4148 } | |
| 4149 return false; | |
| 4150 } | |
| 4151 | |
| 4152 /** | |
| 4153 * Parse an additive expression. Return the additive expression that was | |
| 4154 * parsed. | |
| 4155 * | |
| 4156 * additiveExpression ::= | |
| 4157 * multiplicativeExpression (additiveOperator multiplicativeExpression
)* | |
| 4158 * | 'super' (additiveOperator multiplicativeExpression)+ | |
| 4159 */ | |
| 4160 Expression _parseAdditiveExpression() { | |
| 4161 Expression expression; | |
| 4162 if (_matchesKeyword(Keyword.SUPER) && | |
| 4163 _currentToken.next.type.isAdditiveOperator) { | |
| 4164 expression = new SuperExpression(getAndAdvance()); | |
| 4165 } else { | |
| 4166 expression = _parseMultiplicativeExpression(); | |
| 4167 } | |
| 4168 while (_currentToken.type.isAdditiveOperator) { | |
| 4169 Token operator = getAndAdvance(); | |
| 4170 expression = new BinaryExpression( | |
| 4171 expression, operator, _parseMultiplicativeExpression()); | |
| 4172 } | |
| 4173 return expression; | |
| 4174 } | |
| 4175 | |
| 4176 /** | |
| 4177 * Parse an assert statement. Return the assert statement. | |
| 4178 * | |
| 4179 * assertStatement ::= | |
| 4180 * 'assert' '(' conditionalExpression ')' ';' | |
| 4181 */ | |
| 4182 AssertStatement _parseAssertStatement() { | |
| 4183 Token keyword = _expectKeyword(Keyword.ASSERT); | |
| 4184 Token leftParen = _expect(TokenType.OPEN_PAREN); | |
| 4185 Expression expression = parseExpression2(); | |
| 4186 if (expression is AssignmentExpression) { | |
| 4187 _reportErrorForNode( | |
| 4188 ParserErrorCode.ASSERT_DOES_NOT_TAKE_ASSIGNMENT, expression); | |
| 4189 } else if (expression is CascadeExpression) { | |
| 4190 _reportErrorForNode( | |
| 4191 ParserErrorCode.ASSERT_DOES_NOT_TAKE_CASCADE, expression); | |
| 4192 } else if (expression is ThrowExpression) { | |
| 4193 _reportErrorForNode( | |
| 4194 ParserErrorCode.ASSERT_DOES_NOT_TAKE_THROW, expression); | |
| 4195 } else if (expression is RethrowExpression) { | |
| 4196 _reportErrorForNode( | |
| 4197 ParserErrorCode.ASSERT_DOES_NOT_TAKE_RETHROW, expression); | |
| 4198 } | |
| 4199 Token rightParen = _expect(TokenType.CLOSE_PAREN); | |
| 4200 Token semicolon = _expect(TokenType.SEMICOLON); | |
| 4201 return new AssertStatement( | |
| 4202 keyword, leftParen, expression, rightParen, semicolon); | |
| 4203 } | |
| 4204 | |
| 4205 /** | |
| 4206 * Parse an assignable expression. The [primaryAllowed] is `true` if the | |
| 4207 * expression is allowed to be a primary without any assignable selector. | |
| 4208 * Return the assignable expression that was parsed. | |
| 4209 * | |
| 4210 * assignableExpression ::= | |
| 4211 * primary (arguments* assignableSelector)+ | |
| 4212 * | 'super' unconditionalAssignableSelector | |
| 4213 * | identifier | |
| 4214 */ | |
| 4215 Expression _parseAssignableExpression(bool primaryAllowed) { | |
| 4216 if (_matchesKeyword(Keyword.SUPER)) { | |
| 4217 return _parseAssignableSelector( | |
| 4218 new SuperExpression(getAndAdvance()), false, allowConditional: false); | |
| 4219 } | |
| 4220 // | |
| 4221 // A primary expression can start with an identifier. We resolve the | |
| 4222 // ambiguity by determining whether the primary consists of anything other | |
| 4223 // than an identifier and/or is followed by an assignableSelector. | |
| 4224 // | |
| 4225 Expression expression = _parsePrimaryExpression(); | |
| 4226 bool isOptional = primaryAllowed || expression is SimpleIdentifier; | |
| 4227 while (true) { | |
| 4228 while (_isLikelyParameterList()) { | |
| 4229 TypeArgumentList typeArguments = null; | |
| 4230 if (_matches(TokenType.LT)) { | |
| 4231 typeArguments = parseTypeArgumentList(); | |
| 4232 } | |
| 4233 ArgumentList argumentList = parseArgumentList(); | |
| 4234 if (expression is SimpleIdentifier) { | |
| 4235 expression = new MethodInvocation(null, null, | |
| 4236 expression as SimpleIdentifier, typeArguments, argumentList); | |
| 4237 } else if (expression is PrefixedIdentifier) { | |
| 4238 PrefixedIdentifier identifier = expression as PrefixedIdentifier; | |
| 4239 expression = new MethodInvocation(identifier.prefix, | |
| 4240 identifier.period, identifier.identifier, typeArguments, | |
| 4241 argumentList); | |
| 4242 } else if (expression is PropertyAccess) { | |
| 4243 PropertyAccess access = expression as PropertyAccess; | |
| 4244 expression = new MethodInvocation(access.target, access.operator, | |
| 4245 access.propertyName, typeArguments, argumentList); | |
| 4246 } else { | |
| 4247 expression = new FunctionExpressionInvocation( | |
| 4248 expression, typeArguments, argumentList); | |
| 4249 } | |
| 4250 if (!primaryAllowed) { | |
| 4251 isOptional = false; | |
| 4252 } | |
| 4253 } | |
| 4254 Expression selectorExpression = _parseAssignableSelector( | |
| 4255 expression, isOptional || (expression is PrefixedIdentifier)); | |
| 4256 if (identical(selectorExpression, expression)) { | |
| 4257 if (!isOptional && (expression is PrefixedIdentifier)) { | |
| 4258 PrefixedIdentifier identifier = expression as PrefixedIdentifier; | |
| 4259 expression = new PropertyAccess( | |
| 4260 identifier.prefix, identifier.period, identifier.identifier); | |
| 4261 } | |
| 4262 return expression; | |
| 4263 } | |
| 4264 expression = selectorExpression; | |
| 4265 isOptional = true; | |
| 4266 } | |
| 4267 } | |
| 4268 | |
| 4269 /** | |
| 4270 * Parse an assignable selector. The [prefix] is the expression preceding the | |
| 4271 * selector. The [optional] is `true` if the selector is optional. Return the | |
| 4272 * assignable selector that was parsed, or the original prefix if there was no | |
| 4273 * assignable selector. If [allowConditional] is false, then the '?.' | |
| 4274 * operator will still be parsed, but a parse error will be generated. | |
| 4275 * | |
| 4276 * unconditionalAssignableSelector ::= | |
| 4277 * '[' expression ']' | |
| 4278 * | '.' identifier | |
| 4279 * | |
| 4280 * assignableSelector ::= | |
| 4281 * unconditionalAssignableSelector | |
| 4282 * | '?.' identifier | |
| 4283 */ | |
| 4284 Expression _parseAssignableSelector(Expression prefix, bool optional, | |
| 4285 {bool allowConditional: true}) { | |
| 4286 if (_matches(TokenType.OPEN_SQUARE_BRACKET)) { | |
| 4287 Token leftBracket = getAndAdvance(); | |
| 4288 bool wasInInitializer = _inInitializer; | |
| 4289 _inInitializer = false; | |
| 4290 try { | |
| 4291 Expression index = parseExpression2(); | |
| 4292 Token rightBracket = _expect(TokenType.CLOSE_SQUARE_BRACKET); | |
| 4293 return new IndexExpression.forTarget( | |
| 4294 prefix, leftBracket, index, rightBracket); | |
| 4295 } finally { | |
| 4296 _inInitializer = wasInInitializer; | |
| 4297 } | |
| 4298 } else if (_matches(TokenType.PERIOD) || | |
| 4299 _matches(TokenType.QUESTION_PERIOD)) { | |
| 4300 if (_matches(TokenType.QUESTION_PERIOD) && !allowConditional) { | |
| 4301 _reportErrorForCurrentToken( | |
| 4302 ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, [_currentToken.lexeme]); | |
| 4303 } | |
| 4304 Token operator = getAndAdvance(); | |
| 4305 return new PropertyAccess(prefix, operator, parseSimpleIdentifier()); | |
| 4306 } else { | |
| 4307 if (!optional) { | |
| 4308 // Report the missing selector. | |
| 4309 _reportErrorForCurrentToken( | |
| 4310 ParserErrorCode.MISSING_ASSIGNABLE_SELECTOR); | |
| 4311 } | |
| 4312 return prefix; | |
| 4313 } | |
| 4314 } | |
| 4315 | |
| 4316 /** | |
| 4317 * Parse a await expression. Return the await expression that was parsed. | |
| 4318 * | |
| 4319 * awaitExpression ::= | |
| 4320 * 'await' unaryExpression | |
| 4321 */ | |
| 4322 AwaitExpression _parseAwaitExpression() { | |
| 4323 Token awaitToken = getAndAdvance(); | |
| 4324 Expression expression = _parseUnaryExpression(); | |
| 4325 return new AwaitExpression(awaitToken, expression); | |
| 4326 } | |
| 4327 | |
| 4328 /** | |
| 4329 * Parse a bitwise and expression. Return the bitwise and expression that was | |
| 4330 * parsed. | |
| 4331 * | |
| 4332 * bitwiseAndExpression ::= | |
| 4333 * shiftExpression ('&' shiftExpression)* | |
| 4334 * | 'super' ('&' shiftExpression)+ | |
| 4335 */ | |
| 4336 Expression _parseBitwiseAndExpression() { | |
| 4337 Expression expression; | |
| 4338 if (_matchesKeyword(Keyword.SUPER) && | |
| 4339 _tokenMatches(_peek(), TokenType.AMPERSAND)) { | |
| 4340 expression = new SuperExpression(getAndAdvance()); | |
| 4341 } else { | |
| 4342 expression = _parseShiftExpression(); | |
| 4343 } | |
| 4344 while (_matches(TokenType.AMPERSAND)) { | |
| 4345 Token operator = getAndAdvance(); | |
| 4346 expression = | |
| 4347 new BinaryExpression(expression, operator, _parseShiftExpression()); | |
| 4348 } | |
| 4349 return expression; | |
| 4350 } | |
| 4351 | |
| 4352 /** | |
| 4353 * Parse a bitwise exclusive-or expression. Return the bitwise exclusive-or | |
| 4354 * expression that was parsed. | |
| 4355 * | |
| 4356 * bitwiseXorExpression ::= | |
| 4357 * bitwiseAndExpression ('^' bitwiseAndExpression)* | |
| 4358 * | 'super' ('^' bitwiseAndExpression)+ | |
| 4359 */ | |
| 4360 Expression _parseBitwiseXorExpression() { | |
| 4361 Expression expression; | |
| 4362 if (_matchesKeyword(Keyword.SUPER) && | |
| 4363 _tokenMatches(_peek(), TokenType.CARET)) { | |
| 4364 expression = new SuperExpression(getAndAdvance()); | |
| 4365 } else { | |
| 4366 expression = _parseBitwiseAndExpression(); | |
| 4367 } | |
| 4368 while (_matches(TokenType.CARET)) { | |
| 4369 Token operator = getAndAdvance(); | |
| 4370 expression = new BinaryExpression( | |
| 4371 expression, operator, _parseBitwiseAndExpression()); | |
| 4372 } | |
| 4373 return expression; | |
| 4374 } | |
| 4375 | |
| 4376 /** | |
| 4377 * Parse a break statement. Return the break statement that was parsed. | |
| 4378 * | |
| 4379 * breakStatement ::= | |
| 4380 * 'break' identifier? ';' | |
| 4381 */ | |
| 4382 Statement _parseBreakStatement() { | |
| 4383 Token breakKeyword = _expectKeyword(Keyword.BREAK); | |
| 4384 SimpleIdentifier label = null; | |
| 4385 if (_matchesIdentifier()) { | |
| 4386 label = parseSimpleIdentifier(); | |
| 4387 } | |
| 4388 if (!_inLoop && !_inSwitch && label == null) { | |
| 4389 _reportErrorForToken(ParserErrorCode.BREAK_OUTSIDE_OF_LOOP, breakKeyword); | |
| 4390 } | |
| 4391 Token semicolon = _expect(TokenType.SEMICOLON); | |
| 4392 return new BreakStatement(breakKeyword, label, semicolon); | |
| 4393 } | |
| 4394 | |
| 4395 /** | |
| 4396 * Parse a cascade section. Return the expression representing the cascaded | |
| 4397 * method invocation. | |
| 4398 * | |
| 4399 * cascadeSection ::= | |
| 4400 * '..' (cascadeSelector typeArguments? arguments*) | |
| 4401 * (assignableSelector typeArguments? arguments*)* cascadeAssignment? | |
| 4402 * | |
| 4403 * cascadeSelector ::= | |
| 4404 * '[' expression ']' | |
| 4405 * | identifier | |
| 4406 * | |
| 4407 * cascadeAssignment ::= | |
| 4408 * assignmentOperator expressionWithoutCascade | |
| 4409 */ | |
| 4410 Expression _parseCascadeSection() { | |
| 4411 Token period = _expect(TokenType.PERIOD_PERIOD); | |
| 4412 Expression expression = null; | |
| 4413 SimpleIdentifier functionName = null; | |
| 4414 if (_matchesIdentifier()) { | |
| 4415 functionName = parseSimpleIdentifier(); | |
| 4416 } else if (_currentToken.type == TokenType.OPEN_SQUARE_BRACKET) { | |
| 4417 Token leftBracket = getAndAdvance(); | |
| 4418 bool wasInInitializer = _inInitializer; | |
| 4419 _inInitializer = false; | |
| 4420 try { | |
| 4421 Expression index = parseExpression2(); | |
| 4422 Token rightBracket = _expect(TokenType.CLOSE_SQUARE_BRACKET); | |
| 4423 expression = new IndexExpression.forCascade( | |
| 4424 period, leftBracket, index, rightBracket); | |
| 4425 period = null; | |
| 4426 } finally { | |
| 4427 _inInitializer = wasInInitializer; | |
| 4428 } | |
| 4429 } else { | |
| 4430 _reportErrorForToken(ParserErrorCode.MISSING_IDENTIFIER, _currentToken, | |
| 4431 [_currentToken.lexeme]); | |
| 4432 functionName = _createSyntheticIdentifier(); | |
| 4433 } | |
| 4434 assert((expression == null && functionName != null) || | |
| 4435 (expression != null && functionName == null)); | |
| 4436 if (_isLikelyParameterList()) { | |
| 4437 while (_isLikelyParameterList()) { | |
| 4438 TypeArgumentList typeArguments = null; | |
| 4439 if (_matches(TokenType.LT)) { | |
| 4440 typeArguments = parseTypeArgumentList(); | |
| 4441 } | |
| 4442 if (functionName != null) { | |
| 4443 expression = new MethodInvocation(expression, period, functionName, | |
| 4444 typeArguments, parseArgumentList()); | |
| 4445 period = null; | |
| 4446 functionName = null; | |
| 4447 } else if (expression == null) { | |
| 4448 // It should not be possible to get here. | |
| 4449 expression = new MethodInvocation(expression, period, | |
| 4450 _createSyntheticIdentifier(), typeArguments, parseArgumentList()); | |
| 4451 } else { | |
| 4452 expression = new FunctionExpressionInvocation( | |
| 4453 expression, typeArguments, parseArgumentList()); | |
| 4454 } | |
| 4455 } | |
| 4456 } else if (functionName != null) { | |
| 4457 expression = new PropertyAccess(expression, period, functionName); | |
| 4458 period = null; | |
| 4459 } | |
| 4460 assert(expression != null); | |
| 4461 bool progress = true; | |
| 4462 while (progress) { | |
| 4463 progress = false; | |
| 4464 Expression selector = _parseAssignableSelector(expression, true); | |
| 4465 if (!identical(selector, expression)) { | |
| 4466 expression = selector; | |
| 4467 progress = true; | |
| 4468 while (_isLikelyParameterList()) { | |
| 4469 TypeArgumentList typeArguments = null; | |
| 4470 if (_matches(TokenType.LT)) { | |
| 4471 typeArguments = parseTypeArgumentList(); | |
| 4472 } | |
| 4473 if (expression is PropertyAccess) { | |
| 4474 PropertyAccess propertyAccess = expression as PropertyAccess; | |
| 4475 expression = new MethodInvocation(propertyAccess.target, | |
| 4476 propertyAccess.operator, propertyAccess.propertyName, | |
| 4477 typeArguments, parseArgumentList()); | |
| 4478 } else { | |
| 4479 expression = new FunctionExpressionInvocation( | |
| 4480 expression, typeArguments, parseArgumentList()); | |
| 4481 } | |
| 4482 } | |
| 4483 } | |
| 4484 } | |
| 4485 if (_currentToken.type.isAssignmentOperator) { | |
| 4486 Token operator = getAndAdvance(); | |
| 4487 _ensureAssignable(expression); | |
| 4488 expression = new AssignmentExpression( | |
| 4489 expression, operator, parseExpressionWithoutCascade()); | |
| 4490 } | |
| 4491 return expression; | |
| 4492 } | |
| 4493 | |
| 4494 /** | |
| 4495 * Parse a class declaration. The [commentAndMetadata] is the metadata to be | |
| 4496 * associated with the member. The [abstractKeyword] is the token for the | |
| 4497 * keyword 'abstract', or `null` if the keyword was not given. Return the | |
| 4498 * class declaration that was parsed. | |
| 4499 * | |
| 4500 * classDeclaration ::= | |
| 4501 * metadata 'abstract'? 'class' name typeParameterList? (extendsClause
withClause?)? implementsClause? '{' classMembers '}' | | |
| 4502 * metadata 'abstract'? 'class' mixinApplicationClass | |
| 4503 */ | |
| 4504 CompilationUnitMember _parseClassDeclaration( | |
| 4505 CommentAndMetadata commentAndMetadata, Token abstractKeyword) { | |
| 4506 Token keyword = _expectKeyword(Keyword.CLASS); | |
| 4507 if (_matchesIdentifier()) { | |
| 4508 Token next = _peek(); | |
| 4509 if (_tokenMatches(next, TokenType.LT)) { | |
| 4510 next = _skipTypeParameterList(next); | |
| 4511 if (next != null && _tokenMatches(next, TokenType.EQ)) { | |
| 4512 return _parseClassTypeAlias( | |
| 4513 commentAndMetadata, abstractKeyword, keyword); | |
| 4514 } | |
| 4515 } else if (_tokenMatches(next, TokenType.EQ)) { | |
| 4516 return _parseClassTypeAlias( | |
| 4517 commentAndMetadata, abstractKeyword, keyword); | |
| 4518 } | |
| 4519 } | |
| 4520 SimpleIdentifier name = parseSimpleIdentifier(); | |
| 4521 String className = name.name; | |
| 4522 TypeParameterList typeParameters = null; | |
| 4523 if (_matches(TokenType.LT)) { | |
| 4524 typeParameters = parseTypeParameterList(); | |
| 4525 } | |
| 4526 // | |
| 4527 // Parse the clauses. The parser accepts clauses in any order, but will | |
| 4528 // generate errors if they are not in the order required by the | |
| 4529 // specification. | |
| 4530 // | |
| 4531 ExtendsClause extendsClause = null; | |
| 4532 WithClause withClause = null; | |
| 4533 ImplementsClause implementsClause = null; | |
| 4534 bool foundClause = true; | |
| 4535 while (foundClause) { | |
| 4536 if (_matchesKeyword(Keyword.EXTENDS)) { | |
| 4537 if (extendsClause == null) { | |
| 4538 extendsClause = parseExtendsClause(); | |
| 4539 if (withClause != null) { | |
| 4540 _reportErrorForToken( | |
| 4541 ParserErrorCode.WITH_BEFORE_EXTENDS, withClause.withKeyword); | |
| 4542 } else if (implementsClause != null) { | |
| 4543 _reportErrorForToken(ParserErrorCode.IMPLEMENTS_BEFORE_EXTENDS, | |
| 4544 implementsClause.implementsKeyword); | |
| 4545 } | |
| 4546 } else { | |
| 4547 _reportErrorForToken(ParserErrorCode.MULTIPLE_EXTENDS_CLAUSES, | |
| 4548 extendsClause.extendsKeyword); | |
| 4549 parseExtendsClause(); | |
| 4550 } | |
| 4551 } else if (_matchesKeyword(Keyword.WITH)) { | |
| 4552 if (withClause == null) { | |
| 4553 withClause = parseWithClause(); | |
| 4554 if (implementsClause != null) { | |
| 4555 _reportErrorForToken(ParserErrorCode.IMPLEMENTS_BEFORE_WITH, | |
| 4556 implementsClause.implementsKeyword); | |
| 4557 } | |
| 4558 } else { | |
| 4559 _reportErrorForToken( | |
| 4560 ParserErrorCode.MULTIPLE_WITH_CLAUSES, withClause.withKeyword); | |
| 4561 parseWithClause(); | |
| 4562 // TODO(brianwilkerson) Should we merge the list of applied mixins | |
| 4563 // into a single list? | |
| 4564 } | |
| 4565 } else if (_matchesKeyword(Keyword.IMPLEMENTS)) { | |
| 4566 if (implementsClause == null) { | |
| 4567 implementsClause = parseImplementsClause(); | |
| 4568 } else { | |
| 4569 _reportErrorForToken(ParserErrorCode.MULTIPLE_IMPLEMENTS_CLAUSES, | |
| 4570 implementsClause.implementsKeyword); | |
| 4571 parseImplementsClause(); | |
| 4572 // TODO(brianwilkerson) Should we merge the list of implemented | |
| 4573 // classes into a single list? | |
| 4574 } | |
| 4575 } else { | |
| 4576 foundClause = false; | |
| 4577 } | |
| 4578 } | |
| 4579 if (withClause != null && extendsClause == null) { | |
| 4580 _reportErrorForToken( | |
| 4581 ParserErrorCode.WITH_WITHOUT_EXTENDS, withClause.withKeyword); | |
| 4582 } | |
| 4583 // | |
| 4584 // Look for and skip over the extra-lingual 'native' specification. | |
| 4585 // | |
| 4586 NativeClause nativeClause = null; | |
| 4587 if (_matchesString(_NATIVE) && _tokenMatches(_peek(), TokenType.STRING)) { | |
| 4588 nativeClause = _parseNativeClause(); | |
| 4589 } | |
| 4590 // | |
| 4591 // Parse the body of the class. | |
| 4592 // | |
| 4593 Token leftBracket = null; | |
| 4594 List<ClassMember> members = null; | |
| 4595 Token rightBracket = null; | |
| 4596 if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | |
| 4597 leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET); | |
| 4598 members = _parseClassMembers(className, _getEndToken(leftBracket)); | |
| 4599 rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET); | |
| 4600 } else { | |
| 4601 leftBracket = _createSyntheticToken(TokenType.OPEN_CURLY_BRACKET); | |
| 4602 rightBracket = _createSyntheticToken(TokenType.CLOSE_CURLY_BRACKET); | |
| 4603 _reportErrorForCurrentToken(ParserErrorCode.MISSING_CLASS_BODY); | |
| 4604 } | |
| 4605 ClassDeclaration classDeclaration = new ClassDeclaration( | |
| 4606 commentAndMetadata.comment, commentAndMetadata.metadata, | |
| 4607 abstractKeyword, keyword, name, typeParameters, extendsClause, | |
| 4608 withClause, implementsClause, leftBracket, members, rightBracket); | |
| 4609 classDeclaration.nativeClause = nativeClause; | |
| 4610 return classDeclaration; | |
| 4611 } | |
| 4612 | |
| 4613 /** | |
| 4614 * Parse a list of class members. The [className] is the name of the class | |
| 4615 * whose members are being parsed. The [closingBracket] is the closing bracket | |
| 4616 * for the class, or `null` if the closing bracket is missing. Return the list | |
| 4617 * of class members that were parsed. | |
| 4618 * | |
| 4619 * classMembers ::= | |
| 4620 * (metadata memberDefinition)* | |
| 4621 */ | |
| 4622 List<ClassMember> _parseClassMembers(String className, Token closingBracket) { | |
| 4623 List<ClassMember> members = new List<ClassMember>(); | |
| 4624 Token memberStart = _currentToken; | |
| 4625 while (!_matches(TokenType.EOF) && | |
| 4626 !_matches(TokenType.CLOSE_CURLY_BRACKET) && | |
| 4627 (closingBracket != null || | |
| 4628 (!_matchesKeyword(Keyword.CLASS) && | |
| 4629 !_matchesKeyword(Keyword.TYPEDEF)))) { | |
| 4630 if (_matches(TokenType.SEMICOLON)) { | |
| 4631 _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, | |
| 4632 [_currentToken.lexeme]); | |
| 4633 _advance(); | |
| 4634 } else { | |
| 4635 ClassMember member = parseClassMember(className); | |
| 4636 if (member != null) { | |
| 4637 members.add(member); | |
| 4638 } | |
| 4639 } | |
| 4640 if (identical(_currentToken, memberStart)) { | |
| 4641 _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, | |
| 4642 [_currentToken.lexeme]); | |
| 4643 _advance(); | |
| 4644 } | |
| 4645 memberStart = _currentToken; | |
| 4646 } | |
| 4647 return members; | |
| 4648 } | |
| 4649 | |
| 4650 /** | |
| 4651 * Parse a class type alias. The [commentAndMetadata] is the metadata to be | |
| 4652 * associated with the member. The [abstractKeyword] is the token representing | |
| 4653 * the 'abstract' keyword. The [classKeyword] is the token representing the | |
| 4654 * 'class' keyword. Return the class type alias that was parsed. | |
| 4655 * | |
| 4656 * classTypeAlias ::= | |
| 4657 * identifier typeParameters? '=' 'abstract'? mixinApplication | |
| 4658 * | |
| 4659 * mixinApplication ::= | |
| 4660 * type withClause implementsClause? ';' | |
| 4661 */ | |
| 4662 ClassTypeAlias _parseClassTypeAlias(CommentAndMetadata commentAndMetadata, | |
| 4663 Token abstractKeyword, Token classKeyword) { | |
| 4664 SimpleIdentifier className = parseSimpleIdentifier(); | |
| 4665 TypeParameterList typeParameters = null; | |
| 4666 if (_matches(TokenType.LT)) { | |
| 4667 typeParameters = parseTypeParameterList(); | |
| 4668 } | |
| 4669 Token equals = _expect(TokenType.EQ); | |
| 4670 TypeName superclass = parseTypeName(); | |
| 4671 WithClause withClause = null; | |
| 4672 if (_matchesKeyword(Keyword.WITH)) { | |
| 4673 withClause = parseWithClause(); | |
| 4674 } else { | |
| 4675 _reportErrorForCurrentToken( | |
| 4676 ParserErrorCode.EXPECTED_TOKEN, [Keyword.WITH.syntax]); | |
| 4677 } | |
| 4678 ImplementsClause implementsClause = null; | |
| 4679 if (_matchesKeyword(Keyword.IMPLEMENTS)) { | |
| 4680 implementsClause = parseImplementsClause(); | |
| 4681 } | |
| 4682 Token semicolon; | |
| 4683 if (_matches(TokenType.SEMICOLON)) { | |
| 4684 semicolon = getAndAdvance(); | |
| 4685 } else { | |
| 4686 if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | |
| 4687 _reportErrorForCurrentToken( | |
| 4688 ParserErrorCode.EXPECTED_TOKEN, [TokenType.SEMICOLON.lexeme]); | |
| 4689 Token leftBracket = getAndAdvance(); | |
| 4690 _parseClassMembers(className.name, _getEndToken(leftBracket)); | |
| 4691 _expect(TokenType.CLOSE_CURLY_BRACKET); | |
| 4692 } else { | |
| 4693 _reportErrorForToken(ParserErrorCode.EXPECTED_TOKEN, | |
| 4694 _currentToken.previous, [TokenType.SEMICOLON.lexeme]); | |
| 4695 } | |
| 4696 semicolon = _createSyntheticToken(TokenType.SEMICOLON); | |
| 4697 } | |
| 4698 return new ClassTypeAlias(commentAndMetadata.comment, | |
| 4699 commentAndMetadata.metadata, classKeyword, className, typeParameters, | |
| 4700 equals, abstractKeyword, superclass, withClause, implementsClause, | |
| 4701 semicolon); | |
| 4702 } | |
| 4703 | |
| 4704 /** | |
| 4705 * Parse a list of combinators in a directive. Return the combinators that | |
| 4706 * were parsed. | |
| 4707 * | |
| 4708 * combinator ::= | |
| 4709 * 'show' identifier (',' identifier)* | |
| 4710 * | 'hide' identifier (',' identifier)* | |
| 4711 */ | |
| 4712 List<Combinator> _parseCombinators() { | |
| 4713 List<Combinator> combinators = new List<Combinator>(); | |
| 4714 while (true) { | |
| 4715 Combinator combinator = parseCombinator(); | |
| 4716 if (combinator == null) { | |
| 4717 break; | |
| 4718 } | |
| 4719 combinators.add(combinator); | |
| 4720 } | |
| 4721 return combinators; | |
| 4722 } | |
| 4723 | |
| 4724 /** | |
| 4725 * Parse the documentation comment and metadata preceding a declaration. This | |
| 4726 * method allows any number of documentation comments to occur before, after | |
| 4727 * or between the metadata, but only returns the last (right-most) | |
| 4728 * documentation comment that is found. Return the documentation comment and | |
| 4729 * metadata that were parsed. | |
| 4730 * | |
| 4731 * metadata ::= | |
| 4732 * annotation* | |
| 4733 */ | |
| 4734 CommentAndMetadata _parseCommentAndMetadata() { | |
| 4735 Comment comment = _parseDocumentationComment(); | |
| 4736 List<Annotation> metadata = new List<Annotation>(); | |
| 4737 while (_matches(TokenType.AT)) { | |
| 4738 metadata.add(parseAnnotation()); | |
| 4739 Comment optionalComment = _parseDocumentationComment(); | |
| 4740 if (optionalComment != null) { | |
| 4741 comment = optionalComment; | |
| 4742 } | |
| 4743 } | |
| 4744 return new CommentAndMetadata(comment, metadata); | |
| 4745 } | |
| 4746 | |
| 4747 /** | |
| 4748 * Parse a comment reference from the source between square brackets. The | |
| 4749 * [referenceSource] is the source occurring between the square brackets | |
| 4750 * within a documentation comment. The [sourceOffset] is the offset of the | |
| 4751 * first character of the reference source. Return the comment reference that | |
| 4752 * was parsed, or `null` if no reference could be found. | |
| 4753 * | |
| 4754 * commentReference ::= | |
| 4755 * 'new'? prefixedIdentifier | |
| 4756 */ | |
| 4757 CommentReference _parseCommentReference( | |
| 4758 String referenceSource, int sourceOffset) { | |
| 4759 // TODO(brianwilkerson) The errors are not getting the right offset/length | |
| 4760 // and are being duplicated. | |
| 4761 if (referenceSource.length == 0) { | |
| 4762 Token syntheticToken = | |
| 4763 new SyntheticStringToken(TokenType.IDENTIFIER, "", sourceOffset); | |
| 4764 return new CommentReference(null, new SimpleIdentifier(syntheticToken)); | |
| 4765 } | |
| 4766 try { | |
| 4767 BooleanErrorListener listener = new BooleanErrorListener(); | |
| 4768 Scanner scanner = new Scanner( | |
| 4769 null, new SubSequenceReader(referenceSource, sourceOffset), listener); | |
| 4770 scanner.setSourceStart(1, 1); | |
| 4771 Token firstToken = scanner.tokenize(); | |
| 4772 if (listener.errorReported) { | |
| 4773 return null; | |
| 4774 } | |
| 4775 Token newKeyword = null; | |
| 4776 if (_tokenMatchesKeyword(firstToken, Keyword.NEW)) { | |
| 4777 newKeyword = firstToken; | |
| 4778 firstToken = firstToken.next; | |
| 4779 } | |
| 4780 if (_tokenMatchesIdentifier(firstToken)) { | |
| 4781 Token secondToken = firstToken.next; | |
| 4782 Token thirdToken = secondToken.next; | |
| 4783 Token nextToken; | |
| 4784 Identifier identifier; | |
| 4785 if (_tokenMatches(secondToken, TokenType.PERIOD) && | |
| 4786 _tokenMatchesIdentifier(thirdToken)) { | |
| 4787 identifier = new PrefixedIdentifier(new SimpleIdentifier(firstToken), | |
| 4788 secondToken, new SimpleIdentifier(thirdToken)); | |
| 4789 nextToken = thirdToken.next; | |
| 4790 } else { | |
| 4791 identifier = new SimpleIdentifier(firstToken); | |
| 4792 nextToken = firstToken.next; | |
| 4793 } | |
| 4794 if (nextToken.type != TokenType.EOF) { | |
| 4795 return null; | |
| 4796 } | |
| 4797 return new CommentReference(newKeyword, identifier); | |
| 4798 } else if (_tokenMatchesKeyword(firstToken, Keyword.THIS) || | |
| 4799 _tokenMatchesKeyword(firstToken, Keyword.NULL) || | |
| 4800 _tokenMatchesKeyword(firstToken, Keyword.TRUE) || | |
| 4801 _tokenMatchesKeyword(firstToken, Keyword.FALSE)) { | |
| 4802 // TODO(brianwilkerson) If we want to support this we will need to | |
| 4803 // extend the definition of CommentReference to take an expression | |
| 4804 // rather than an identifier. For now we just ignore it to reduce the | |
| 4805 // number of errors produced, but that's probably not a valid long term | |
| 4806 // approach. | |
| 4807 return null; | |
| 4808 } | |
| 4809 } catch (exception) { | |
| 4810 // Ignored because we assume that it wasn't a real comment reference. | |
| 4811 } | |
| 4812 return null; | |
| 4813 } | |
| 4814 | |
| 4815 /** | |
| 4816 * Parse all of the comment references occurring in the given array of | |
| 4817 * documentation comments. The [tokens] are the comment tokens representing | |
| 4818 * the documentation comments to be parsed. Return the comment references that | |
| 4819 * were parsed. | |
| 4820 * | |
| 4821 * commentReference ::= | |
| 4822 * '[' 'new'? qualified ']' libraryReference? | |
| 4823 * | |
| 4824 * libraryReference ::= | |
| 4825 * '(' stringLiteral ')' | |
| 4826 */ | |
| 4827 List<CommentReference> _parseCommentReferences( | |
| 4828 List<DocumentationCommentToken> tokens) { | |
| 4829 List<CommentReference> references = new List<CommentReference>(); | |
| 4830 for (DocumentationCommentToken token in tokens) { | |
| 4831 String comment = token.lexeme; | |
| 4832 int length = comment.length; | |
| 4833 List<List<int>> codeBlockRanges = _getCodeBlockRanges(comment); | |
| 4834 int leftIndex = comment.indexOf('['); | |
| 4835 while (leftIndex >= 0 && leftIndex + 1 < length) { | |
| 4836 List<int> range = _findRange(codeBlockRanges, leftIndex); | |
| 4837 if (range == null) { | |
| 4838 int nameOffset = token.offset + leftIndex + 1; | |
| 4839 int rightIndex = JavaString.indexOf(comment, ']', leftIndex); | |
| 4840 if (rightIndex >= 0) { | |
| 4841 int firstChar = comment.codeUnitAt(leftIndex + 1); | |
| 4842 if (firstChar != 0x27 && firstChar != 0x22) { | |
| 4843 if (_isLinkText(comment, rightIndex)) { | |
| 4844 // TODO(brianwilkerson) Handle the case where there's a library | |
| 4845 // URI in the link text. | |
| 4846 } else { | |
| 4847 CommentReference reference = _parseCommentReference( | |
| 4848 comment.substring(leftIndex + 1, rightIndex), nameOffset); | |
| 4849 if (reference != null) { | |
| 4850 references.add(reference); | |
| 4851 token.references.add(reference.beginToken); | |
| 4852 } | |
| 4853 } | |
| 4854 } | |
| 4855 } else { | |
| 4856 // terminating ']' is not typed yet | |
| 4857 int charAfterLeft = comment.codeUnitAt(leftIndex + 1); | |
| 4858 if (Character.isLetterOrDigit(charAfterLeft)) { | |
| 4859 int nameEnd = StringUtilities.indexOfFirstNotLetterDigit( | |
| 4860 comment, leftIndex + 1); | |
| 4861 String name = comment.substring(leftIndex + 1, nameEnd); | |
| 4862 Token nameToken = | |
| 4863 new StringToken(TokenType.IDENTIFIER, name, nameOffset); | |
| 4864 references.add( | |
| 4865 new CommentReference(null, new SimpleIdentifier(nameToken))); | |
| 4866 } else { | |
| 4867 Token nameToken = new SyntheticStringToken( | |
| 4868 TokenType.IDENTIFIER, "", nameOffset); | |
| 4869 references.add( | |
| 4870 new CommentReference(null, new SimpleIdentifier(nameToken))); | |
| 4871 } | |
| 4872 // next character | |
| 4873 rightIndex = leftIndex + 1; | |
| 4874 } | |
| 4875 leftIndex = JavaString.indexOf(comment, '[', rightIndex); | |
| 4876 } else { | |
| 4877 leftIndex = JavaString.indexOf(comment, '[', range[1] + 1); | |
| 4878 } | |
| 4879 } | |
| 4880 } | |
| 4881 return references; | |
| 4882 } | |
| 4883 | |
| 4884 /** | |
| 4885 * Parse a compilation unit member. The [commentAndMetadata] is the metadata | |
| 4886 * to be associated with the member. Return the compilation unit member that | |
| 4887 * was parsed, or `null` if what was parsed could not be represented as a | |
| 4888 * compilation unit member. | |
| 4889 * | |
| 4890 * compilationUnitMember ::= | |
| 4891 * classDefinition | |
| 4892 * | functionTypeAlias | |
| 4893 * | external functionSignature | |
| 4894 * | external getterSignature | |
| 4895 * | external setterSignature | |
| 4896 * | functionSignature functionBody | |
| 4897 * | returnType? getOrSet identifier formalParameterList functionBody | |
| 4898 * | (final | const) type? staticFinalDeclarationList ';' | |
| 4899 * | variableDeclaration ';' | |
| 4900 */ | |
| 4901 CompilationUnitMember _parseCompilationUnitMember( | |
| 4902 CommentAndMetadata commentAndMetadata) { | |
| 4903 Modifiers modifiers = _parseModifiers(); | |
| 4904 if (_matchesKeyword(Keyword.CLASS)) { | |
| 4905 return _parseClassDeclaration( | |
| 4906 commentAndMetadata, _validateModifiersForClass(modifiers)); | |
| 4907 } else if (_matchesKeyword(Keyword.TYPEDEF) && | |
| 4908 !_tokenMatches(_peek(), TokenType.PERIOD) && | |
| 4909 !_tokenMatches(_peek(), TokenType.LT) && | |
| 4910 !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | |
| 4911 _validateModifiersForTypedef(modifiers); | |
| 4912 return _parseTypeAlias(commentAndMetadata); | |
| 4913 } else if (_matchesKeyword(Keyword.ENUM)) { | |
| 4914 _validateModifiersForEnum(modifiers); | |
| 4915 return _parseEnumDeclaration(commentAndMetadata); | |
| 4916 } | |
| 4917 if (_matchesKeyword(Keyword.VOID)) { | |
| 4918 TypeName returnType = parseReturnType(); | |
| 4919 if ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) && | |
| 4920 _tokenMatchesIdentifier(_peek())) { | |
| 4921 _validateModifiersForTopLevelFunction(modifiers); | |
| 4922 return _parseFunctionDeclaration( | |
| 4923 commentAndMetadata, modifiers.externalKeyword, returnType); | |
| 4924 } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) { | |
| 4925 _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken); | |
| 4926 return _convertToFunctionDeclaration(_parseOperator( | |
| 4927 commentAndMetadata, modifiers.externalKeyword, returnType)); | |
| 4928 } else if (_matchesIdentifier() && | |
| 4929 _peek().matchesAny([ | |
| 4930 TokenType.OPEN_PAREN, | |
| 4931 TokenType.OPEN_CURLY_BRACKET, | |
| 4932 TokenType.FUNCTION | |
| 4933 ])) { | |
| 4934 _validateModifiersForTopLevelFunction(modifiers); | |
| 4935 return _parseFunctionDeclaration( | |
| 4936 commentAndMetadata, modifiers.externalKeyword, returnType); | |
| 4937 } else { | |
| 4938 // | |
| 4939 // We have found an error of some kind. Try to recover. | |
| 4940 // | |
| 4941 if (_matchesIdentifier()) { | |
| 4942 if (_peek().matchesAny( | |
| 4943 [TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) { | |
| 4944 // | |
| 4945 // We appear to have a variable declaration with a type of "void". | |
| 4946 // | |
| 4947 _reportErrorForNode(ParserErrorCode.VOID_VARIABLE, returnType); | |
| 4948 return new TopLevelVariableDeclaration(commentAndMetadata.comment, | |
| 4949 commentAndMetadata.metadata, | |
| 4950 _parseVariableDeclarationListAfterType(null, | |
| 4951 _validateModifiersForTopLevelVariable(modifiers), null), | |
| 4952 _expect(TokenType.SEMICOLON)); | |
| 4953 } | |
| 4954 } | |
| 4955 _reportErrorForToken( | |
| 4956 ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken); | |
| 4957 return null; | |
| 4958 } | |
| 4959 } else if ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) && | |
| 4960 _tokenMatchesIdentifier(_peek())) { | |
| 4961 _validateModifiersForTopLevelFunction(modifiers); | |
| 4962 return _parseFunctionDeclaration( | |
| 4963 commentAndMetadata, modifiers.externalKeyword, null); | |
| 4964 } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) { | |
| 4965 _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken); | |
| 4966 return _convertToFunctionDeclaration( | |
| 4967 _parseOperator(commentAndMetadata, modifiers.externalKeyword, null)); | |
| 4968 } else if (!_matchesIdentifier()) { | |
| 4969 Token keyword = modifiers.varKeyword; | |
| 4970 if (keyword == null) { | |
| 4971 keyword = modifiers.finalKeyword; | |
| 4972 } | |
| 4973 if (keyword == null) { | |
| 4974 keyword = modifiers.constKeyword; | |
| 4975 } | |
| 4976 if (keyword != null) { | |
| 4977 // | |
| 4978 // We appear to have found an incomplete top-level variable declaration. | |
| 4979 // | |
| 4980 _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | |
| 4981 List<VariableDeclaration> variables = new List<VariableDeclaration>(); | |
| 4982 variables.add( | |
| 4983 new VariableDeclaration(_createSyntheticIdentifier(), null, null)); | |
| 4984 return new TopLevelVariableDeclaration(commentAndMetadata.comment, | |
| 4985 commentAndMetadata.metadata, | |
| 4986 new VariableDeclarationList(null, null, keyword, null, variables), | |
| 4987 _expectSemicolon()); | |
| 4988 } | |
| 4989 _reportErrorForToken(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken); | |
| 4990 return null; | |
| 4991 } else if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | |
| 4992 _validateModifiersForTopLevelFunction(modifiers); | |
| 4993 return _parseFunctionDeclaration( | |
| 4994 commentAndMetadata, modifiers.externalKeyword, null); | |
| 4995 } else if (_peek() | |
| 4996 .matchesAny([TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) { | |
| 4997 if (modifiers.constKeyword == null && | |
| 4998 modifiers.finalKeyword == null && | |
| 4999 modifiers.varKeyword == null) { | |
| 5000 _reportErrorForCurrentToken( | |
| 5001 ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE); | |
| 5002 } | |
| 5003 return new TopLevelVariableDeclaration(commentAndMetadata.comment, | |
| 5004 commentAndMetadata.metadata, _parseVariableDeclarationListAfterType( | |
| 5005 null, _validateModifiersForTopLevelVariable(modifiers), null), | |
| 5006 _expect(TokenType.SEMICOLON)); | |
| 5007 } | |
| 5008 TypeName returnType = parseReturnType(); | |
| 5009 if ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) && | |
| 5010 _tokenMatchesIdentifier(_peek())) { | |
| 5011 _validateModifiersForTopLevelFunction(modifiers); | |
| 5012 return _parseFunctionDeclaration( | |
| 5013 commentAndMetadata, modifiers.externalKeyword, returnType); | |
| 5014 } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) { | |
| 5015 _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken); | |
| 5016 return _convertToFunctionDeclaration(_parseOperator( | |
| 5017 commentAndMetadata, modifiers.externalKeyword, returnType)); | |
| 5018 } else if (_matches(TokenType.AT)) { | |
| 5019 return new TopLevelVariableDeclaration(commentAndMetadata.comment, | |
| 5020 commentAndMetadata.metadata, _parseVariableDeclarationListAfterType( | |
| 5021 null, _validateModifiersForTopLevelVariable(modifiers), | |
| 5022 returnType), _expect(TokenType.SEMICOLON)); | |
| 5023 } else if (!_matchesIdentifier()) { | |
| 5024 // TODO(brianwilkerson) Generalize this error. We could also be parsing a | |
| 5025 // top-level variable at this point. | |
| 5026 _reportErrorForToken(ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken); | |
| 5027 Token semicolon; | |
| 5028 if (_matches(TokenType.SEMICOLON)) { | |
| 5029 semicolon = getAndAdvance(); | |
| 5030 } else { | |
| 5031 semicolon = _createSyntheticToken(TokenType.SEMICOLON); | |
| 5032 } | |
| 5033 List<VariableDeclaration> variables = new List<VariableDeclaration>(); | |
| 5034 variables.add( | |
| 5035 new VariableDeclaration(_createSyntheticIdentifier(), null, null)); | |
| 5036 return new TopLevelVariableDeclaration(commentAndMetadata.comment, | |
| 5037 commentAndMetadata.metadata, | |
| 5038 new VariableDeclarationList(null, null, null, returnType, variables), | |
| 5039 semicolon); | |
| 5040 } | |
| 5041 if (_peek().matchesAny([ | |
| 5042 TokenType.OPEN_PAREN, | |
| 5043 TokenType.FUNCTION, | |
| 5044 TokenType.OPEN_CURLY_BRACKET | |
| 5045 ])) { | |
| 5046 _validateModifiersForTopLevelFunction(modifiers); | |
| 5047 return _parseFunctionDeclaration( | |
| 5048 commentAndMetadata, modifiers.externalKeyword, returnType); | |
| 5049 } | |
| 5050 return new TopLevelVariableDeclaration(commentAndMetadata.comment, | |
| 5051 commentAndMetadata.metadata, _parseVariableDeclarationListAfterType( | |
| 5052 null, _validateModifiersForTopLevelVariable(modifiers), returnType), | |
| 5053 _expect(TokenType.SEMICOLON)); | |
| 5054 } | |
| 5055 | |
| 5056 /** | |
| 5057 * Parse a const expression. Return the const expression that was parsed. | |
| 5058 * | |
| 5059 * constExpression ::= | |
| 5060 * instanceCreationExpression | |
| 5061 * | listLiteral | |
| 5062 * | mapLiteral | |
| 5063 */ | |
| 5064 Expression _parseConstExpression() { | |
| 5065 Token keyword = _expectKeyword(Keyword.CONST); | |
| 5066 if (_matches(TokenType.OPEN_SQUARE_BRACKET) || _matches(TokenType.INDEX)) { | |
| 5067 return _parseListLiteral(keyword, null); | |
| 5068 } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | |
| 5069 return _parseMapLiteral(keyword, null); | |
| 5070 } else if (_matches(TokenType.LT)) { | |
| 5071 return _parseListOrMapLiteral(keyword); | |
| 5072 } | |
| 5073 return _parseInstanceCreationExpression(keyword); | |
| 5074 } | |
| 5075 | |
| 5076 ConstructorDeclaration _parseConstructor( | |
| 5077 CommentAndMetadata commentAndMetadata, Token externalKeyword, | |
| 5078 Token constKeyword, Token factoryKeyword, SimpleIdentifier returnType, | |
| 5079 Token period, SimpleIdentifier name, FormalParameterList parameters) { | |
| 5080 bool bodyAllowed = externalKeyword == null; | |
| 5081 Token separator = null; | |
| 5082 List<ConstructorInitializer> initializers = null; | |
| 5083 if (_matches(TokenType.COLON)) { | |
| 5084 separator = getAndAdvance(); | |
| 5085 initializers = new List<ConstructorInitializer>(); | |
| 5086 do { | |
| 5087 if (_matchesKeyword(Keyword.THIS)) { | |
| 5088 if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | |
| 5089 bodyAllowed = false; | |
| 5090 initializers.add(_parseRedirectingConstructorInvocation()); | |
| 5091 } else if (_tokenMatches(_peek(), TokenType.PERIOD) && | |
| 5092 _tokenMatches(_peekAt(3), TokenType.OPEN_PAREN)) { | |
| 5093 bodyAllowed = false; | |
| 5094 initializers.add(_parseRedirectingConstructorInvocation()); | |
| 5095 } else { | |
| 5096 initializers.add(_parseConstructorFieldInitializer()); | |
| 5097 } | |
| 5098 } else if (_matchesKeyword(Keyword.SUPER)) { | |
| 5099 initializers.add(_parseSuperConstructorInvocation()); | |
| 5100 } else if (_matches(TokenType.OPEN_CURLY_BRACKET) || | |
| 5101 _matches(TokenType.FUNCTION)) { | |
| 5102 _reportErrorForCurrentToken(ParserErrorCode.MISSING_INITIALIZER); | |
| 5103 } else { | |
| 5104 initializers.add(_parseConstructorFieldInitializer()); | |
| 5105 } | |
| 5106 } while (_optional(TokenType.COMMA)); | |
| 5107 if (factoryKeyword != null) { | |
| 5108 _reportErrorForToken( | |
| 5109 ParserErrorCode.FACTORY_WITH_INITIALIZERS, factoryKeyword); | |
| 5110 } | |
| 5111 } | |
| 5112 ConstructorName redirectedConstructor = null; | |
| 5113 FunctionBody body; | |
| 5114 if (_matches(TokenType.EQ)) { | |
| 5115 separator = getAndAdvance(); | |
| 5116 redirectedConstructor = parseConstructorName(); | |
| 5117 body = new EmptyFunctionBody(_expect(TokenType.SEMICOLON)); | |
| 5118 if (factoryKeyword == null) { | |
| 5119 _reportErrorForNode( | |
| 5120 ParserErrorCode.REDIRECTION_IN_NON_FACTORY_CONSTRUCTOR, | |
| 5121 redirectedConstructor); | |
| 5122 } | |
| 5123 } else { | |
| 5124 body = _parseFunctionBody( | |
| 5125 true, ParserErrorCode.MISSING_FUNCTION_BODY, false); | |
| 5126 if (constKeyword != null && | |
| 5127 factoryKeyword != null && | |
| 5128 externalKeyword == null) { | |
| 5129 _reportErrorForToken(ParserErrorCode.CONST_FACTORY, factoryKeyword); | |
| 5130 } else if (body is EmptyFunctionBody) { | |
| 5131 if (factoryKeyword != null && | |
| 5132 externalKeyword == null && | |
| 5133 _parseFunctionBodies) { | |
| 5134 _reportErrorForToken( | |
| 5135 ParserErrorCode.FACTORY_WITHOUT_BODY, factoryKeyword); | |
| 5136 } | |
| 5137 } else { | |
| 5138 if (constKeyword != null) { | |
| 5139 _reportErrorForNode( | |
| 5140 ParserErrorCode.CONST_CONSTRUCTOR_WITH_BODY, body); | |
| 5141 } else if (!bodyAllowed) { | |
| 5142 _reportErrorForNode( | |
| 5143 ParserErrorCode.EXTERNAL_CONSTRUCTOR_WITH_BODY, body); | |
| 5144 } | |
| 5145 } | |
| 5146 } | |
| 5147 return new ConstructorDeclaration(commentAndMetadata.comment, | |
| 5148 commentAndMetadata.metadata, externalKeyword, constKeyword, | |
| 5149 factoryKeyword, returnType, period, name, parameters, separator, | |
| 5150 initializers, redirectedConstructor, body); | |
| 5151 } | |
| 5152 | |
| 5153 /** | |
| 5154 * Parse a field initializer within a constructor. Return the field | |
| 5155 * initializer that was parsed. | |
| 5156 * | |
| 5157 * fieldInitializer: | |
| 5158 * ('this' '.')? identifier '=' conditionalExpression cascadeSection* | |
| 5159 */ | |
| 5160 ConstructorFieldInitializer _parseConstructorFieldInitializer() { | |
| 5161 Token keyword = null; | |
| 5162 Token period = null; | |
| 5163 if (_matchesKeyword(Keyword.THIS)) { | |
| 5164 keyword = getAndAdvance(); | |
| 5165 period = _expect(TokenType.PERIOD); | |
| 5166 } | |
| 5167 SimpleIdentifier fieldName = parseSimpleIdentifier(); | |
| 5168 Token equals = null; | |
| 5169 if (_matches(TokenType.EQ)) { | |
| 5170 equals = getAndAdvance(); | |
| 5171 } else if (!_matchesKeyword(Keyword.THIS) && | |
| 5172 !_matchesKeyword(Keyword.SUPER) && | |
| 5173 !_matches(TokenType.OPEN_CURLY_BRACKET) && | |
| 5174 !_matches(TokenType.FUNCTION)) { | |
| 5175 _reportErrorForCurrentToken( | |
| 5176 ParserErrorCode.MISSING_ASSIGNMENT_IN_INITIALIZER); | |
| 5177 equals = _createSyntheticToken(TokenType.EQ); | |
| 5178 } else { | |
| 5179 _reportErrorForCurrentToken( | |
| 5180 ParserErrorCode.MISSING_ASSIGNMENT_IN_INITIALIZER); | |
| 5181 return new ConstructorFieldInitializer(keyword, period, fieldName, | |
| 5182 _createSyntheticToken(TokenType.EQ), _createSyntheticIdentifier()); | |
| 5183 } | |
| 5184 bool wasInInitializer = _inInitializer; | |
| 5185 _inInitializer = true; | |
| 5186 try { | |
| 5187 Expression expression = parseConditionalExpression(); | |
| 5188 TokenType tokenType = _currentToken.type; | |
| 5189 if (tokenType == TokenType.PERIOD_PERIOD) { | |
| 5190 List<Expression> cascadeSections = new List<Expression>(); | |
| 5191 while (tokenType == TokenType.PERIOD_PERIOD) { | |
| 5192 Expression section = _parseCascadeSection(); | |
| 5193 if (section != null) { | |
| 5194 cascadeSections.add(section); | |
| 5195 } | |
| 5196 tokenType = _currentToken.type; | |
| 5197 } | |
| 5198 expression = new CascadeExpression(expression, cascadeSections); | |
| 5199 } | |
| 5200 return new ConstructorFieldInitializer( | |
| 5201 keyword, period, fieldName, equals, expression); | |
| 5202 } finally { | |
| 5203 _inInitializer = wasInInitializer; | |
| 5204 } | |
| 5205 } | |
| 5206 | |
| 5207 /** | |
| 5208 * Parse a continue statement. Return the continue statement that was parsed. | |
| 5209 * | |
| 5210 * continueStatement ::= | |
| 5211 * 'continue' identifier? ';' | |
| 5212 */ | |
| 5213 Statement _parseContinueStatement() { | |
| 5214 Token continueKeyword = _expectKeyword(Keyword.CONTINUE); | |
| 5215 if (!_inLoop && !_inSwitch) { | |
| 5216 _reportErrorForToken( | |
| 5217 ParserErrorCode.CONTINUE_OUTSIDE_OF_LOOP, continueKeyword); | |
| 5218 } | |
| 5219 SimpleIdentifier label = null; | |
| 5220 if (_matchesIdentifier()) { | |
| 5221 label = parseSimpleIdentifier(); | |
| 5222 } | |
| 5223 if (_inSwitch && !_inLoop && label == null) { | |
| 5224 _reportErrorForToken( | |
| 5225 ParserErrorCode.CONTINUE_WITHOUT_LABEL_IN_CASE, continueKeyword); | |
| 5226 } | |
| 5227 Token semicolon = _expect(TokenType.SEMICOLON); | |
| 5228 return new ContinueStatement(continueKeyword, label, semicolon); | |
| 5229 } | |
| 5230 | |
| 5231 /** | |
| 5232 * Parse a directive. The [commentAndMetadata] is the metadata to be | |
| 5233 * associated with the directive. Return the directive that was parsed. | |
| 5234 * | |
| 5235 * directive ::= | |
| 5236 * exportDirective | |
| 5237 * | libraryDirective | |
| 5238 * | importDirective | |
| 5239 * | partDirective | |
| 5240 */ | |
| 5241 Directive _parseDirective(CommentAndMetadata commentAndMetadata) { | |
| 5242 if (_matchesKeyword(Keyword.IMPORT)) { | |
| 5243 return _parseImportDirective(commentAndMetadata); | |
| 5244 } else if (_matchesKeyword(Keyword.EXPORT)) { | |
| 5245 return _parseExportDirective(commentAndMetadata); | |
| 5246 } else if (_matchesKeyword(Keyword.LIBRARY)) { | |
| 5247 return _parseLibraryDirective(commentAndMetadata); | |
| 5248 } else if (_matchesKeyword(Keyword.PART)) { | |
| 5249 return _parsePartDirective(commentAndMetadata); | |
| 5250 } else { | |
| 5251 // Internal error: this method should not have been invoked if the current | |
| 5252 // token was something other than one of the above. | |
| 5253 throw new IllegalStateException( | |
| 5254 "parseDirective invoked in an invalid state; currentToken = $_currentT
oken"); | |
| 5255 } | |
| 5256 } | |
| 5257 | |
| 5258 /** | |
| 5259 * Parse the script tag and directives in a compilation unit until the first | |
| 5260 * non-directive is encountered. Return the compilation unit that was parsed. | |
| 5261 * | |
| 5262 * compilationUnit ::= | |
| 5263 * scriptTag? directive* | |
| 5264 */ | |
| 5265 CompilationUnit _parseDirectives() { | |
| 5266 Token firstToken = _currentToken; | |
| 5267 ScriptTag scriptTag = null; | |
| 5268 if (_matches(TokenType.SCRIPT_TAG)) { | |
| 5269 scriptTag = new ScriptTag(getAndAdvance()); | |
| 5270 } | |
| 5271 List<Directive> directives = new List<Directive>(); | |
| 5272 while (!_matches(TokenType.EOF)) { | |
| 5273 CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata(); | |
| 5274 if ((_matchesKeyword(Keyword.IMPORT) || | |
| 5275 _matchesKeyword(Keyword.EXPORT) || | |
| 5276 _matchesKeyword(Keyword.LIBRARY) || | |
| 5277 _matchesKeyword(Keyword.PART)) && | |
| 5278 !_tokenMatches(_peek(), TokenType.PERIOD) && | |
| 5279 !_tokenMatches(_peek(), TokenType.LT) && | |
| 5280 !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | |
| 5281 directives.add(_parseDirective(commentAndMetadata)); | |
| 5282 } else if (_matches(TokenType.SEMICOLON)) { | |
| 5283 _advance(); | |
| 5284 } else { | |
| 5285 while (!_matches(TokenType.EOF)) { | |
| 5286 _advance(); | |
| 5287 } | |
| 5288 return new CompilationUnit(firstToken, scriptTag, directives, | |
| 5289 new List<CompilationUnitMember>(), _currentToken); | |
| 5290 } | |
| 5291 } | |
| 5292 return new CompilationUnit(firstToken, scriptTag, directives, | |
| 5293 new List<CompilationUnitMember>(), _currentToken); | |
| 5294 } | |
| 5295 | |
| 5296 /** | |
| 5297 * Parse a documentation comment. Return the documentation comment that was | |
| 5298 * parsed, or `null` if there was no comment. | |
| 5299 * | |
| 5300 * documentationComment ::= | |
| 5301 * multiLineComment? | |
| 5302 * | singleLineComment* | |
| 5303 */ | |
| 5304 Comment _parseDocumentationComment() { | |
| 5305 List<DocumentationCommentToken> documentationTokens = | |
| 5306 <DocumentationCommentToken>[]; | |
| 5307 CommentToken commentToken = _currentToken.precedingComments; | |
| 5308 while (commentToken != null) { | |
| 5309 if (commentToken is DocumentationCommentToken) { | |
| 5310 if (documentationTokens.isNotEmpty) { | |
| 5311 if (commentToken.type == TokenType.SINGLE_LINE_COMMENT) { | |
| 5312 if (documentationTokens[0].type != TokenType.SINGLE_LINE_COMMENT) { | |
| 5313 documentationTokens.clear(); | |
| 5314 } | |
| 5315 } else { | |
| 5316 documentationTokens.clear(); | |
| 5317 } | |
| 5318 } | |
| 5319 documentationTokens.add(commentToken); | |
| 5320 } | |
| 5321 commentToken = commentToken.next; | |
| 5322 } | |
| 5323 if (documentationTokens.isEmpty) { | |
| 5324 return null; | |
| 5325 } | |
| 5326 List<CommentReference> references = | |
| 5327 _parseCommentReferences(documentationTokens); | |
| 5328 return Comment.createDocumentationCommentWithReferences( | |
| 5329 documentationTokens, references); | |
| 5330 } | |
| 5331 | |
| 5332 /** | |
| 5333 * Parse a do statement. Return the do statement that was parsed. | |
| 5334 * | |
| 5335 * doStatement ::= | |
| 5336 * 'do' statement 'while' '(' expression ')' ';' | |
| 5337 */ | |
| 5338 Statement _parseDoStatement() { | |
| 5339 bool wasInLoop = _inLoop; | |
| 5340 _inLoop = true; | |
| 5341 try { | |
| 5342 Token doKeyword = _expectKeyword(Keyword.DO); | |
| 5343 Statement body = parseStatement2(); | |
| 5344 Token whileKeyword = _expectKeyword(Keyword.WHILE); | |
| 5345 Token leftParenthesis = _expect(TokenType.OPEN_PAREN); | |
| 5346 Expression condition = parseExpression2(); | |
| 5347 Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | |
| 5348 Token semicolon = _expect(TokenType.SEMICOLON); | |
| 5349 return new DoStatement(doKeyword, body, whileKeyword, leftParenthesis, | |
| 5350 condition, rightParenthesis, semicolon); | |
| 5351 } finally { | |
| 5352 _inLoop = wasInLoop; | |
| 5353 } | |
| 5354 } | |
| 5355 | |
| 5356 /** | |
| 5357 * Parse an empty statement. Return the empty statement that was parsed. | |
| 5358 * | |
| 5359 * emptyStatement ::= | |
| 5360 * ';' | |
| 5361 */ | |
| 5362 Statement _parseEmptyStatement() => new EmptyStatement(getAndAdvance()); | |
| 5363 | |
| 5364 EnumConstantDeclaration _parseEnumConstantDeclaration() { | |
| 5365 CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata(); | |
| 5366 SimpleIdentifier name; | |
| 5367 if (_matchesIdentifier()) { | |
| 5368 name = parseSimpleIdentifier(); | |
| 5369 } else { | |
| 5370 name = _createSyntheticIdentifier(); | |
| 5371 } | |
| 5372 if (commentAndMetadata.metadata.isNotEmpty) { | |
| 5373 _reportErrorForNode(ParserErrorCode.ANNOTATION_ON_ENUM_CONSTANT, | |
| 5374 commentAndMetadata.metadata[0]); | |
| 5375 } | |
| 5376 return new EnumConstantDeclaration( | |
| 5377 commentAndMetadata.comment, commentAndMetadata.metadata, name); | |
| 5378 } | |
| 5379 | |
| 5380 /** | |
| 5381 * Parse an enum declaration. The [commentAndMetadata] is the metadata to be | |
| 5382 * associated with the member. Return the enum declaration that was parsed. | |
| 5383 * | |
| 5384 * enumType ::= | |
| 5385 * metadata 'enum' id '{' id (',' id)* (',')? '}' | |
| 5386 */ | |
| 5387 EnumDeclaration _parseEnumDeclaration(CommentAndMetadata commentAndMetadata) { | |
| 5388 Token keyword = _expectKeyword(Keyword.ENUM); | |
| 5389 SimpleIdentifier name = parseSimpleIdentifier(); | |
| 5390 Token leftBracket = null; | |
| 5391 List<EnumConstantDeclaration> constants = | |
| 5392 new List<EnumConstantDeclaration>(); | |
| 5393 Token rightBracket = null; | |
| 5394 if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | |
| 5395 leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET); | |
| 5396 if (_matchesIdentifier() || _matches(TokenType.AT)) { | |
| 5397 constants.add(_parseEnumConstantDeclaration()); | |
| 5398 } else if (_matches(TokenType.COMMA) && | |
| 5399 _tokenMatchesIdentifier(_peek())) { | |
| 5400 constants.add(_parseEnumConstantDeclaration()); | |
| 5401 _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | |
| 5402 } else { | |
| 5403 constants.add(_parseEnumConstantDeclaration()); | |
| 5404 _reportErrorForCurrentToken(ParserErrorCode.EMPTY_ENUM_BODY); | |
| 5405 } | |
| 5406 while (_optional(TokenType.COMMA)) { | |
| 5407 if (_matches(TokenType.CLOSE_CURLY_BRACKET)) { | |
| 5408 break; | |
| 5409 } | |
| 5410 constants.add(_parseEnumConstantDeclaration()); | |
| 5411 } | |
| 5412 rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET); | |
| 5413 } else { | |
| 5414 leftBracket = _createSyntheticToken(TokenType.OPEN_CURLY_BRACKET); | |
| 5415 rightBracket = _createSyntheticToken(TokenType.CLOSE_CURLY_BRACKET); | |
| 5416 _reportErrorForCurrentToken(ParserErrorCode.MISSING_ENUM_BODY); | |
| 5417 } | |
| 5418 return new EnumDeclaration(commentAndMetadata.comment, | |
| 5419 commentAndMetadata.metadata, keyword, name, leftBracket, constants, | |
| 5420 rightBracket); | |
| 5421 } | |
| 5422 | |
| 5423 /** | |
| 5424 * Parse an equality expression. Return the equality expression that was | |
| 5425 * parsed. | |
| 5426 * | |
| 5427 * equalityExpression ::= | |
| 5428 * relationalExpression (equalityOperator relationalExpression)? | |
| 5429 * | 'super' equalityOperator relationalExpression | |
| 5430 */ | |
| 5431 Expression _parseEqualityExpression() { | |
| 5432 Expression expression; | |
| 5433 if (_matchesKeyword(Keyword.SUPER) && | |
| 5434 _currentToken.next.type.isEqualityOperator) { | |
| 5435 expression = new SuperExpression(getAndAdvance()); | |
| 5436 } else { | |
| 5437 expression = _parseRelationalExpression(); | |
| 5438 } | |
| 5439 bool leftEqualityExpression = false; | |
| 5440 while (_currentToken.type.isEqualityOperator) { | |
| 5441 Token operator = getAndAdvance(); | |
| 5442 if (leftEqualityExpression) { | |
| 5443 _reportErrorForNode( | |
| 5444 ParserErrorCode.EQUALITY_CANNOT_BE_EQUALITY_OPERAND, expression); | |
| 5445 } | |
| 5446 expression = new BinaryExpression( | |
| 5447 expression, operator, _parseRelationalExpression()); | |
| 5448 leftEqualityExpression = true; | |
| 5449 } | |
| 5450 return expression; | |
| 5451 } | |
| 5452 | |
| 5453 /** | |
| 5454 * Parse an export directive. The [commentAndMetadata] is the metadata to be | |
| 5455 * associated with the directive. Return the export directive that was parsed. | |
| 5456 * | |
| 5457 * exportDirective ::= | |
| 5458 * metadata 'export' stringLiteral combinator*';' | |
| 5459 */ | |
| 5460 ExportDirective _parseExportDirective(CommentAndMetadata commentAndMetadata) { | |
| 5461 Token exportKeyword = _expectKeyword(Keyword.EXPORT); | |
| 5462 StringLiteral libraryUri = _parseUri(); | |
| 5463 List<Combinator> combinators = _parseCombinators(); | |
| 5464 Token semicolon = _expectSemicolon(); | |
| 5465 return new ExportDirective(commentAndMetadata.comment, | |
| 5466 commentAndMetadata.metadata, exportKeyword, libraryUri, combinators, | |
| 5467 semicolon); | |
| 5468 } | |
| 5469 | |
| 5470 /** | |
| 5471 * Parse a list of expressions. Return the expression that was parsed. | |
| 5472 * | |
| 5473 * expressionList ::= | |
| 5474 * expression (',' expression)* | |
| 5475 */ | |
| 5476 List<Expression> _parseExpressionList() { | |
| 5477 List<Expression> expressions = new List<Expression>(); | |
| 5478 expressions.add(parseExpression2()); | |
| 5479 while (_optional(TokenType.COMMA)) { | |
| 5480 expressions.add(parseExpression2()); | |
| 5481 } | |
| 5482 return expressions; | |
| 5483 } | |
| 5484 | |
| 5485 /** | |
| 5486 * Parse the 'final', 'const', 'var' or type preceding a variable declaration. | |
| 5487 * The [optional] is `true` if the keyword and type are optional. Return the | |
| 5488 * 'final', 'const', 'var' or type that was parsed. | |
| 5489 * | |
| 5490 * finalConstVarOrType ::= | |
| 5491 * 'final' type? | |
| 5492 * | 'const' type? | |
| 5493 * | 'var' | |
| 5494 * | type | |
| 5495 */ | |
| 5496 FinalConstVarOrType _parseFinalConstVarOrType(bool optional) { | |
| 5497 Token keyword = null; | |
| 5498 TypeName type = null; | |
| 5499 if (_matchesKeyword(Keyword.FINAL) || _matchesKeyword(Keyword.CONST)) { | |
| 5500 keyword = getAndAdvance(); | |
| 5501 if (_isTypedIdentifier(_currentToken)) { | |
| 5502 type = parseTypeName(); | |
| 5503 } | |
| 5504 } else if (_matchesKeyword(Keyword.VAR)) { | |
| 5505 keyword = getAndAdvance(); | |
| 5506 } else { | |
| 5507 if (_isTypedIdentifier(_currentToken)) { | |
| 5508 type = parseReturnType(); | |
| 5509 } else if (!optional) { | |
| 5510 _reportErrorForCurrentToken( | |
| 5511 ParserErrorCode.MISSING_CONST_FINAL_VAR_OR_TYPE); | |
| 5512 } | |
| 5513 } | |
| 5514 return new FinalConstVarOrType(keyword, type); | |
| 5515 } | |
| 5516 | |
| 5517 /** | |
| 5518 * Parse a formal parameter. At most one of `isOptional` and `isNamed` can be | |
| 5519 * `true`. The [kind] is the kind of parameter being expected based on the | |
| 5520 * presence or absence of group delimiters. Return the formal parameter that | |
| 5521 * was parsed. | |
| 5522 * | |
| 5523 * defaultFormalParameter ::= | |
| 5524 * normalFormalParameter ('=' expression)? | |
| 5525 * | |
| 5526 * defaultNamedParameter ::= | |
| 5527 * normalFormalParameter (':' expression)? | |
| 5528 */ | |
| 5529 FormalParameter _parseFormalParameter(ParameterKind kind) { | |
| 5530 NormalFormalParameter parameter = parseNormalFormalParameter(); | |
| 5531 if (_matches(TokenType.EQ)) { | |
| 5532 Token seperator = getAndAdvance(); | |
| 5533 Expression defaultValue = parseExpression2(); | |
| 5534 if (kind == ParameterKind.NAMED) { | |
| 5535 _reportErrorForToken( | |
| 5536 ParserErrorCode.WRONG_SEPARATOR_FOR_NAMED_PARAMETER, seperator); | |
| 5537 } else if (kind == ParameterKind.REQUIRED) { | |
| 5538 _reportErrorForNode( | |
| 5539 ParserErrorCode.POSITIONAL_PARAMETER_OUTSIDE_GROUP, parameter); | |
| 5540 } | |
| 5541 return new DefaultFormalParameter( | |
| 5542 parameter, kind, seperator, defaultValue); | |
| 5543 } else if (_matches(TokenType.COLON)) { | |
| 5544 Token seperator = getAndAdvance(); | |
| 5545 Expression defaultValue = parseExpression2(); | |
| 5546 if (kind == ParameterKind.POSITIONAL) { | |
| 5547 _reportErrorForToken( | |
| 5548 ParserErrorCode.WRONG_SEPARATOR_FOR_POSITIONAL_PARAMETER, | |
| 5549 seperator); | |
| 5550 } else if (kind == ParameterKind.REQUIRED) { | |
| 5551 _reportErrorForNode( | |
| 5552 ParserErrorCode.NAMED_PARAMETER_OUTSIDE_GROUP, parameter); | |
| 5553 } | |
| 5554 return new DefaultFormalParameter( | |
| 5555 parameter, kind, seperator, defaultValue); | |
| 5556 } else if (kind != ParameterKind.REQUIRED) { | |
| 5557 return new DefaultFormalParameter(parameter, kind, null, null); | |
| 5558 } | |
| 5559 return parameter; | |
| 5560 } | |
| 5561 | |
| 5562 /** | |
| 5563 * Parse a for statement. Return the for statement that was parsed. | |
| 5564 * | |
| 5565 * forStatement ::= | |
| 5566 * 'for' '(' forLoopParts ')' statement | |
| 5567 * | |
| 5568 * forLoopParts ::= | |
| 5569 * forInitializerStatement expression? ';' expressionList? | |
| 5570 * | declaredIdentifier 'in' expression | |
| 5571 * | identifier 'in' expression | |
| 5572 * | |
| 5573 * forInitializerStatement ::= | |
| 5574 * localVariableDeclaration ';' | |
| 5575 * | expression? ';' | |
| 5576 */ | |
| 5577 Statement _parseForStatement() { | |
| 5578 bool wasInLoop = _inLoop; | |
| 5579 _inLoop = true; | |
| 5580 try { | |
| 5581 Token awaitKeyword = null; | |
| 5582 if (_matchesString(_AWAIT)) { | |
| 5583 awaitKeyword = getAndAdvance(); | |
| 5584 } | |
| 5585 Token forKeyword = _expectKeyword(Keyword.FOR); | |
| 5586 Token leftParenthesis = _expect(TokenType.OPEN_PAREN); | |
| 5587 VariableDeclarationList variableList = null; | |
| 5588 Expression initialization = null; | |
| 5589 if (!_matches(TokenType.SEMICOLON)) { | |
| 5590 CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata(); | |
| 5591 if (_matchesIdentifier() && | |
| 5592 (_tokenMatchesKeyword(_peek(), Keyword.IN) || | |
| 5593 _tokenMatches(_peek(), TokenType.COLON))) { | |
| 5594 List<VariableDeclaration> variables = new List<VariableDeclaration>(); | |
| 5595 SimpleIdentifier variableName = parseSimpleIdentifier(); | |
| 5596 variables.add(new VariableDeclaration(variableName, null, null)); | |
| 5597 variableList = new VariableDeclarationList(commentAndMetadata.comment, | |
| 5598 commentAndMetadata.metadata, null, null, variables); | |
| 5599 } else if (_isInitializedVariableDeclaration()) { | |
| 5600 variableList = | |
| 5601 _parseVariableDeclarationListAfterMetadata(commentAndMetadata); | |
| 5602 } else { | |
| 5603 initialization = parseExpression2(); | |
| 5604 } | |
| 5605 if (_matchesKeyword(Keyword.IN) || _matches(TokenType.COLON)) { | |
| 5606 if (_matches(TokenType.COLON)) { | |
| 5607 _reportErrorForCurrentToken(ParserErrorCode.COLON_IN_PLACE_OF_IN); | |
| 5608 } | |
| 5609 DeclaredIdentifier loopVariable = null; | |
| 5610 SimpleIdentifier identifier = null; | |
| 5611 if (variableList == null) { | |
| 5612 // We found: <expression> 'in' | |
| 5613 _reportErrorForCurrentToken( | |
| 5614 ParserErrorCode.MISSING_VARIABLE_IN_FOR_EACH); | |
| 5615 } else { | |
| 5616 NodeList<VariableDeclaration> variables = variableList.variables; | |
| 5617 if (variables.length > 1) { | |
| 5618 _reportErrorForCurrentToken( | |
| 5619 ParserErrorCode.MULTIPLE_VARIABLES_IN_FOR_EACH, | |
| 5620 [variables.length.toString()]); | |
| 5621 } | |
| 5622 VariableDeclaration variable = variables[0]; | |
| 5623 if (variable.initializer != null) { | |
| 5624 _reportErrorForCurrentToken( | |
| 5625 ParserErrorCode.INITIALIZED_VARIABLE_IN_FOR_EACH); | |
| 5626 } | |
| 5627 Token keyword = variableList.keyword; | |
| 5628 TypeName type = variableList.type; | |
| 5629 if (keyword != null || type != null) { | |
| 5630 loopVariable = new DeclaredIdentifier(commentAndMetadata.comment, | |
| 5631 commentAndMetadata.metadata, keyword, type, variable.name); | |
| 5632 } else { | |
| 5633 if (!commentAndMetadata.metadata.isEmpty) { | |
| 5634 // TODO(jwren) metadata isn't allowed before the identifier in | |
| 5635 // "identifier in expression", add warning if commentAndMetadata | |
| 5636 // has content | |
| 5637 } | |
| 5638 identifier = variable.name; | |
| 5639 } | |
| 5640 } | |
| 5641 Token inKeyword = getAndAdvance(); | |
| 5642 Expression iterator = parseExpression2(); | |
| 5643 Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | |
| 5644 Statement body = parseStatement2(); | |
| 5645 if (loopVariable == null) { | |
| 5646 return new ForEachStatement.withReference(awaitKeyword, forKeyword, | |
| 5647 leftParenthesis, identifier, inKeyword, iterator, | |
| 5648 rightParenthesis, body); | |
| 5649 } | |
| 5650 return new ForEachStatement.withDeclaration(awaitKeyword, forKeyword, | |
| 5651 leftParenthesis, loopVariable, inKeyword, iterator, | |
| 5652 rightParenthesis, body); | |
| 5653 } | |
| 5654 } | |
| 5655 if (awaitKeyword != null) { | |
| 5656 _reportErrorForToken( | |
| 5657 ParserErrorCode.INVALID_AWAIT_IN_FOR, awaitKeyword); | |
| 5658 } | |
| 5659 Token leftSeparator = _expect(TokenType.SEMICOLON); | |
| 5660 Expression condition = null; | |
| 5661 if (!_matches(TokenType.SEMICOLON)) { | |
| 5662 condition = parseExpression2(); | |
| 5663 } | |
| 5664 Token rightSeparator = _expect(TokenType.SEMICOLON); | |
| 5665 List<Expression> updaters = null; | |
| 5666 if (!_matches(TokenType.CLOSE_PAREN)) { | |
| 5667 updaters = _parseExpressionList(); | |
| 5668 } | |
| 5669 Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | |
| 5670 Statement body = parseStatement2(); | |
| 5671 return new ForStatement(forKeyword, leftParenthesis, variableList, | |
| 5672 initialization, leftSeparator, condition, rightSeparator, updaters, | |
| 5673 rightParenthesis, body); | |
| 5674 } finally { | |
| 5675 _inLoop = wasInLoop; | |
| 5676 } | |
| 5677 } | |
| 5678 | |
| 5679 /** | |
| 5680 * Parse a function body. The [mayBeEmpty] is `true` if the function body is | |
| 5681 * allowed to be empty. The [emptyErrorCode] is the error code to report if | |
| 5682 * function body expected, but not found. The [inExpression] is `true` if the | |
| 5683 * function body is being parsed as part of an expression and therefore does | |
| 5684 * not have a terminating semicolon. Return the function body that was parsed. | |
| 5685 * | |
| 5686 * functionBody ::= | |
| 5687 * '=>' expression ';' | |
| 5688 * | block | |
| 5689 * | |
| 5690 * functionExpressionBody ::= | |
| 5691 * '=>' expression | |
| 5692 * | block | |
| 5693 */ | |
| 5694 FunctionBody _parseFunctionBody( | |
| 5695 bool mayBeEmpty, ParserErrorCode emptyErrorCode, bool inExpression) { | |
| 5696 bool wasInAsync = _inAsync; | |
| 5697 bool wasInGenerator = _inGenerator; | |
| 5698 bool wasInLoop = _inLoop; | |
| 5699 bool wasInSwitch = _inSwitch; | |
| 5700 _inAsync = false; | |
| 5701 _inGenerator = false; | |
| 5702 _inLoop = false; | |
| 5703 _inSwitch = false; | |
| 5704 try { | |
| 5705 if (_matches(TokenType.SEMICOLON)) { | |
| 5706 if (!mayBeEmpty) { | |
| 5707 _reportErrorForCurrentToken(emptyErrorCode); | |
| 5708 } | |
| 5709 return new EmptyFunctionBody(getAndAdvance()); | |
| 5710 } else if (_matchesString(_NATIVE)) { | |
| 5711 Token nativeToken = getAndAdvance(); | |
| 5712 StringLiteral stringLiteral = null; | |
| 5713 if (_matches(TokenType.STRING)) { | |
| 5714 stringLiteral = parseStringLiteral(); | |
| 5715 } | |
| 5716 return new NativeFunctionBody( | |
| 5717 nativeToken, stringLiteral, _expect(TokenType.SEMICOLON)); | |
| 5718 } | |
| 5719 Token keyword = null; | |
| 5720 Token star = null; | |
| 5721 if (_matchesString(ASYNC)) { | |
| 5722 keyword = getAndAdvance(); | |
| 5723 if (_matches(TokenType.STAR)) { | |
| 5724 star = getAndAdvance(); | |
| 5725 _inGenerator = true; | |
| 5726 } | |
| 5727 _inAsync = true; | |
| 5728 } else if (_matchesString(SYNC)) { | |
| 5729 keyword = getAndAdvance(); | |
| 5730 if (_matches(TokenType.STAR)) { | |
| 5731 star = getAndAdvance(); | |
| 5732 _inGenerator = true; | |
| 5733 } | |
| 5734 } | |
| 5735 if (_matches(TokenType.FUNCTION)) { | |
| 5736 if (keyword != null) { | |
| 5737 if (!_tokenMatchesString(keyword, ASYNC)) { | |
| 5738 _reportErrorForToken(ParserErrorCode.INVALID_SYNC, keyword); | |
| 5739 keyword = null; | |
| 5740 } else if (star != null) { | |
| 5741 _reportErrorForToken( | |
| 5742 ParserErrorCode.INVALID_STAR_AFTER_ASYNC, star); | |
| 5743 } | |
| 5744 } | |
| 5745 Token functionDefinition = getAndAdvance(); | |
| 5746 if (_matchesKeyword(Keyword.RETURN)) { | |
| 5747 _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, | |
| 5748 [_currentToken.lexeme]); | |
| 5749 _advance(); | |
| 5750 } | |
| 5751 Expression expression = parseExpression2(); | |
| 5752 Token semicolon = null; | |
| 5753 if (!inExpression) { | |
| 5754 semicolon = _expect(TokenType.SEMICOLON); | |
| 5755 } | |
| 5756 if (!_parseFunctionBodies) { | |
| 5757 return new EmptyFunctionBody( | |
| 5758 _createSyntheticToken(TokenType.SEMICOLON)); | |
| 5759 } | |
| 5760 return new ExpressionFunctionBody( | |
| 5761 keyword, functionDefinition, expression, semicolon); | |
| 5762 } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | |
| 5763 if (keyword != null) { | |
| 5764 if (_tokenMatchesString(keyword, SYNC) && star == null) { | |
| 5765 _reportErrorForToken( | |
| 5766 ParserErrorCode.MISSING_STAR_AFTER_SYNC, keyword); | |
| 5767 } | |
| 5768 } | |
| 5769 if (!_parseFunctionBodies) { | |
| 5770 _skipBlock(); | |
| 5771 return new EmptyFunctionBody( | |
| 5772 _createSyntheticToken(TokenType.SEMICOLON)); | |
| 5773 } | |
| 5774 return new BlockFunctionBody(keyword, star, parseBlock()); | |
| 5775 } else { | |
| 5776 // Invalid function body | |
| 5777 _reportErrorForCurrentToken(emptyErrorCode); | |
| 5778 return new EmptyFunctionBody( | |
| 5779 _createSyntheticToken(TokenType.SEMICOLON)); | |
| 5780 } | |
| 5781 } finally { | |
| 5782 _inAsync = wasInAsync; | |
| 5783 _inGenerator = wasInGenerator; | |
| 5784 _inLoop = wasInLoop; | |
| 5785 _inSwitch = wasInSwitch; | |
| 5786 } | |
| 5787 } | |
| 5788 | |
| 5789 /** | |
| 5790 * Parse a function declaration. The [commentAndMetadata] is the documentation | |
| 5791 * comment and metadata to be associated with the declaration. The | |
| 5792 * [externalKeyword] is the 'external' keyword, or `null` if the function is | |
| 5793 * not external. The [returnType] is the return type, or `null` if there is no | |
| 5794 * return type. The [isStatement] is `true` if the function declaration is | |
| 5795 * being parsed as a statement. Return the function declaration that was | |
| 5796 * parsed. | |
| 5797 * | |
| 5798 * functionDeclaration ::= | |
| 5799 * functionSignature functionBody | |
| 5800 * | returnType? getOrSet identifier formalParameterList functionBody | |
| 5801 */ | |
| 5802 FunctionDeclaration _parseFunctionDeclaration( | |
| 5803 CommentAndMetadata commentAndMetadata, Token externalKeyword, | |
| 5804 TypeName returnType) { | |
| 5805 Token keyword = null; | |
| 5806 bool isGetter = false; | |
| 5807 if (_matchesKeyword(Keyword.GET) && | |
| 5808 !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | |
| 5809 keyword = getAndAdvance(); | |
| 5810 isGetter = true; | |
| 5811 } else if (_matchesKeyword(Keyword.SET) && | |
| 5812 !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | |
| 5813 keyword = getAndAdvance(); | |
| 5814 } | |
| 5815 SimpleIdentifier name = parseSimpleIdentifier(); | |
| 5816 TypeParameterList typeParameters = null; | |
| 5817 if (parseGenericMethods && _matches(TokenType.LT)) { | |
| 5818 typeParameters = parseTypeParameterList(); | |
| 5819 } | |
| 5820 FormalParameterList parameters = null; | |
| 5821 if (!isGetter) { | |
| 5822 if (_matches(TokenType.OPEN_PAREN)) { | |
| 5823 parameters = parseFormalParameterList(); | |
| 5824 _validateFormalParameterList(parameters); | |
| 5825 } else { | |
| 5826 _reportErrorForCurrentToken( | |
| 5827 ParserErrorCode.MISSING_FUNCTION_PARAMETERS); | |
| 5828 parameters = new FormalParameterList( | |
| 5829 _createSyntheticToken(TokenType.OPEN_PAREN), null, null, null, | |
| 5830 _createSyntheticToken(TokenType.CLOSE_PAREN)); | |
| 5831 } | |
| 5832 } else if (_matches(TokenType.OPEN_PAREN)) { | |
| 5833 _reportErrorForCurrentToken(ParserErrorCode.GETTER_WITH_PARAMETERS); | |
| 5834 parseFormalParameterList(); | |
| 5835 } | |
| 5836 FunctionBody body; | |
| 5837 if (externalKeyword == null) { | |
| 5838 body = _parseFunctionBody( | |
| 5839 false, ParserErrorCode.MISSING_FUNCTION_BODY, false); | |
| 5840 } else { | |
| 5841 body = new EmptyFunctionBody(_expect(TokenType.SEMICOLON)); | |
| 5842 } | |
| 5843 // if (!isStatement && matches(TokenType.SEMICOLON)) { | |
| 5844 // // TODO(brianwilkerson) Improve this error message. | |
| 5845 // reportError(ParserErrorCode.UNEXPECTED_TOKEN, currentToken.getLexeme
()); | |
| 5846 // advance(); | |
| 5847 // } | |
| 5848 return new FunctionDeclaration(commentAndMetadata.comment, | |
| 5849 commentAndMetadata.metadata, externalKeyword, returnType, keyword, name, | |
| 5850 new FunctionExpression(typeParameters, parameters, body)); | |
| 5851 } | |
| 5852 | |
| 5853 /** | |
| 5854 * Parse a function declaration statement. Return the function declaration | |
| 5855 * statement that was parsed. | |
| 5856 * | |
| 5857 * functionDeclarationStatement ::= | |
| 5858 * functionSignature functionBody | |
| 5859 */ | |
| 5860 Statement _parseFunctionDeclarationStatement() { | |
| 5861 Modifiers modifiers = _parseModifiers(); | |
| 5862 _validateModifiersForFunctionDeclarationStatement(modifiers); | |
| 5863 return _parseFunctionDeclarationStatementAfterReturnType( | |
| 5864 _parseCommentAndMetadata(), _parseOptionalReturnType()); | |
| 5865 } | |
| 5866 | |
| 5867 /** | |
| 5868 * Parse a function declaration statement. The [commentAndMetadata] is the | |
| 5869 * documentation comment and metadata to be associated with the declaration. | |
| 5870 * The [returnType] is the return type, or `null` if there is no return type. | |
| 5871 * Return the function declaration statement that was parsed. | |
| 5872 * | |
| 5873 * functionDeclarationStatement ::= | |
| 5874 * functionSignature functionBody | |
| 5875 */ | |
| 5876 Statement _parseFunctionDeclarationStatementAfterReturnType( | |
| 5877 CommentAndMetadata commentAndMetadata, TypeName returnType) { | |
| 5878 FunctionDeclaration declaration = | |
| 5879 _parseFunctionDeclaration(commentAndMetadata, null, returnType); | |
| 5880 Token propertyKeyword = declaration.propertyKeyword; | |
| 5881 if (propertyKeyword != null) { | |
| 5882 if ((propertyKeyword as KeywordToken).keyword == Keyword.GET) { | |
| 5883 _reportErrorForToken( | |
| 5884 ParserErrorCode.GETTER_IN_FUNCTION, propertyKeyword); | |
| 5885 } else { | |
| 5886 _reportErrorForToken( | |
| 5887 ParserErrorCode.SETTER_IN_FUNCTION, propertyKeyword); | |
| 5888 } | |
| 5889 } | |
| 5890 return new FunctionDeclarationStatement(declaration); | |
| 5891 } | |
| 5892 | |
| 5893 /** | |
| 5894 * Parse a function type alias. The [commentAndMetadata] is the metadata to be | |
| 5895 * associated with the member. The [keyword] is the token representing the | |
| 5896 * 'typedef' keyword. Return the function type alias that was parsed. | |
| 5897 * | |
| 5898 * functionTypeAlias ::= | |
| 5899 * functionPrefix typeParameterList? formalParameterList ';' | |
| 5900 * | |
| 5901 * functionPrefix ::= | |
| 5902 * returnType? name | |
| 5903 */ | |
| 5904 FunctionTypeAlias _parseFunctionTypeAlias( | |
| 5905 CommentAndMetadata commentAndMetadata, Token keyword) { | |
| 5906 TypeName returnType = null; | |
| 5907 if (hasReturnTypeInTypeAlias) { | |
| 5908 returnType = parseReturnType(); | |
| 5909 } | |
| 5910 SimpleIdentifier name = parseSimpleIdentifier(); | |
| 5911 TypeParameterList typeParameters = null; | |
| 5912 if (_matches(TokenType.LT)) { | |
| 5913 typeParameters = parseTypeParameterList(); | |
| 5914 } | |
| 5915 if (_matches(TokenType.SEMICOLON) || _matches(TokenType.EOF)) { | |
| 5916 _reportErrorForCurrentToken(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS); | |
| 5917 FormalParameterList parameters = new FormalParameterList( | |
| 5918 _createSyntheticToken(TokenType.OPEN_PAREN), null, null, null, | |
| 5919 _createSyntheticToken(TokenType.CLOSE_PAREN)); | |
| 5920 Token semicolon = _expect(TokenType.SEMICOLON); | |
| 5921 return new FunctionTypeAlias(commentAndMetadata.comment, | |
| 5922 commentAndMetadata.metadata, keyword, returnType, name, | |
| 5923 typeParameters, parameters, semicolon); | |
| 5924 } else if (!_matches(TokenType.OPEN_PAREN)) { | |
| 5925 _reportErrorForCurrentToken(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS); | |
| 5926 // TODO(brianwilkerson) Recover from this error. At the very least we | |
| 5927 // should skip to the start of the next valid compilation unit member, | |
| 5928 // allowing for the possibility of finding the typedef parameters before | |
| 5929 // that point. | |
| 5930 return new FunctionTypeAlias(commentAndMetadata.comment, | |
| 5931 commentAndMetadata.metadata, keyword, returnType, name, | |
| 5932 typeParameters, new FormalParameterList( | |
| 5933 _createSyntheticToken(TokenType.OPEN_PAREN), null, null, null, | |
| 5934 _createSyntheticToken(TokenType.CLOSE_PAREN)), | |
| 5935 _createSyntheticToken(TokenType.SEMICOLON)); | |
| 5936 } | |
| 5937 FormalParameterList parameters = parseFormalParameterList(); | |
| 5938 _validateFormalParameterList(parameters); | |
| 5939 Token semicolon = _expect(TokenType.SEMICOLON); | |
| 5940 return new FunctionTypeAlias(commentAndMetadata.comment, | |
| 5941 commentAndMetadata.metadata, keyword, returnType, name, typeParameters, | |
| 5942 parameters, semicolon); | |
| 5943 } | |
| 5944 | |
| 5945 /** | |
| 5946 * Parse a getter. The [commentAndMetadata] is the documentation comment and | |
| 5947 * metadata to be associated with the declaration. The externalKeyword] is the | |
| 5948 * 'external' token. The staticKeyword] is the static keyword, or `null` if | |
| 5949 * the getter is not static. The [returnType] the return type that has already | |
| 5950 * been parsed, or `null` if there was no return type. Return the getter that | |
| 5951 * was parsed. | |
| 5952 * | |
| 5953 * getter ::= | |
| 5954 * getterSignature functionBody? | |
| 5955 * | |
| 5956 * getterSignature ::= | |
| 5957 * 'external'? 'static'? returnType? 'get' identifier | |
| 5958 */ | |
| 5959 MethodDeclaration _parseGetter(CommentAndMetadata commentAndMetadata, | |
| 5960 Token externalKeyword, Token staticKeyword, TypeName returnType) { | |
| 5961 Token propertyKeyword = _expectKeyword(Keyword.GET); | |
| 5962 SimpleIdentifier name = parseSimpleIdentifier(); | |
| 5963 if (_matches(TokenType.OPEN_PAREN) && | |
| 5964 _tokenMatches(_peek(), TokenType.CLOSE_PAREN)) { | |
| 5965 _reportErrorForCurrentToken(ParserErrorCode.GETTER_WITH_PARAMETERS); | |
| 5966 _advance(); | |
| 5967 _advance(); | |
| 5968 } | |
| 5969 FunctionBody body = _parseFunctionBody( | |
| 5970 externalKeyword != null || staticKeyword == null, | |
| 5971 ParserErrorCode.STATIC_GETTER_WITHOUT_BODY, false); | |
| 5972 if (externalKeyword != null && body is! EmptyFunctionBody) { | |
| 5973 _reportErrorForCurrentToken(ParserErrorCode.EXTERNAL_GETTER_WITH_BODY); | |
| 5974 } | |
| 5975 return new MethodDeclaration(commentAndMetadata.comment, | |
| 5976 commentAndMetadata.metadata, externalKeyword, staticKeyword, returnType, | |
| 5977 propertyKeyword, null, name, null, null, body); | |
| 5978 } | |
| 5979 | |
| 5980 /** | |
| 5981 * Parse a list of identifiers. Return the list of identifiers that were | |
| 5982 * parsed. | |
| 5983 * | |
| 5984 * identifierList ::= | |
| 5985 * identifier (',' identifier)* | |
| 5986 */ | |
| 5987 List<SimpleIdentifier> _parseIdentifierList() { | |
| 5988 List<SimpleIdentifier> identifiers = new List<SimpleIdentifier>(); | |
| 5989 identifiers.add(parseSimpleIdentifier()); | |
| 5990 while (_matches(TokenType.COMMA)) { | |
| 5991 _advance(); | |
| 5992 identifiers.add(parseSimpleIdentifier()); | |
| 5993 } | |
| 5994 return identifiers; | |
| 5995 } | |
| 5996 | |
| 5997 /** | |
| 5998 * Parse an if statement. Return the if statement that was parsed. | |
| 5999 * | |
| 6000 * ifStatement ::= | |
| 6001 * 'if' '(' expression ')' statement ('else' statement)? | |
| 6002 */ | |
| 6003 Statement _parseIfStatement() { | |
| 6004 Token ifKeyword = _expectKeyword(Keyword.IF); | |
| 6005 Token leftParenthesis = _expect(TokenType.OPEN_PAREN); | |
| 6006 Expression condition = parseExpression2(); | |
| 6007 Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | |
| 6008 Statement thenStatement = parseStatement2(); | |
| 6009 Token elseKeyword = null; | |
| 6010 Statement elseStatement = null; | |
| 6011 if (_matchesKeyword(Keyword.ELSE)) { | |
| 6012 elseKeyword = getAndAdvance(); | |
| 6013 elseStatement = parseStatement2(); | |
| 6014 } | |
| 6015 return new IfStatement(ifKeyword, leftParenthesis, condition, | |
| 6016 rightParenthesis, thenStatement, elseKeyword, elseStatement); | |
| 6017 } | |
| 6018 | |
| 6019 /** | |
| 6020 * Parse an import directive. The [commentAndMetadata] is the metadata to be | |
| 6021 * associated with the directive. Return the import directive that was parsed. | |
| 6022 * | |
| 6023 * importDirective ::= | |
| 6024 * metadata 'import' stringLiteral (deferred)? ('as' identifier)? comb
inator*';' | |
| 6025 */ | |
| 6026 ImportDirective _parseImportDirective(CommentAndMetadata commentAndMetadata) { | |
| 6027 Token importKeyword = _expectKeyword(Keyword.IMPORT); | |
| 6028 StringLiteral libraryUri = _parseUri(); | |
| 6029 Token deferredToken = null; | |
| 6030 Token asToken = null; | |
| 6031 SimpleIdentifier prefix = null; | |
| 6032 if (_matchesKeyword(Keyword.DEFERRED)) { | |
| 6033 deferredToken = getAndAdvance(); | |
| 6034 } | |
| 6035 if (_matchesKeyword(Keyword.AS)) { | |
| 6036 asToken = getAndAdvance(); | |
| 6037 prefix = parseSimpleIdentifier(); | |
| 6038 } else if (deferredToken != null) { | |
| 6039 _reportErrorForCurrentToken( | |
| 6040 ParserErrorCode.MISSING_PREFIX_IN_DEFERRED_IMPORT); | |
| 6041 } else if (!_matches(TokenType.SEMICOLON) && | |
| 6042 !_matchesString(_SHOW) && | |
| 6043 !_matchesString(_HIDE)) { | |
| 6044 Token nextToken = _peek(); | |
| 6045 if (_tokenMatchesKeyword(nextToken, Keyword.AS) || | |
| 6046 _tokenMatchesString(nextToken, _SHOW) || | |
| 6047 _tokenMatchesString(nextToken, _HIDE)) { | |
| 6048 _reportErrorForCurrentToken( | |
| 6049 ParserErrorCode.UNEXPECTED_TOKEN, [_currentToken]); | |
| 6050 _advance(); | |
| 6051 if (_matchesKeyword(Keyword.AS)) { | |
| 6052 asToken = getAndAdvance(); | |
| 6053 prefix = parseSimpleIdentifier(); | |
| 6054 } | |
| 6055 } | |
| 6056 } | |
| 6057 List<Combinator> combinators = _parseCombinators(); | |
| 6058 Token semicolon = _expectSemicolon(); | |
| 6059 return new ImportDirective(commentAndMetadata.comment, | |
| 6060 commentAndMetadata.metadata, importKeyword, libraryUri, deferredToken, | |
| 6061 asToken, prefix, combinators, semicolon); | |
| 6062 } | |
| 6063 | |
| 6064 /** | |
| 6065 * Parse a list of initialized identifiers. The [commentAndMetadata] is the | |
| 6066 * documentation comment and metadata to be associated with the declaration. | |
| 6067 * The [staticKeyword] is the static keyword, or `null` if the getter is not | |
| 6068 * static. The [keyword] is the token representing the 'final', 'const' or | |
| 6069 * 'var' keyword, or `null` if there is no keyword. The [type] is the type | |
| 6070 * that has already been parsed, or `null` if 'var' was provided. Return the | |
| 6071 * getter that was parsed. | |
| 6072 * | |
| 6073 * ?? ::= | |
| 6074 * 'static'? ('var' | type) initializedIdentifierList ';' | |
| 6075 * | 'final' type? initializedIdentifierList ';' | |
| 6076 * | |
| 6077 * initializedIdentifierList ::= | |
| 6078 * initializedIdentifier (',' initializedIdentifier)* | |
| 6079 * | |
| 6080 * initializedIdentifier ::= | |
| 6081 * identifier ('=' expression)? | |
| 6082 */ | |
| 6083 FieldDeclaration _parseInitializedIdentifierList( | |
| 6084 CommentAndMetadata commentAndMetadata, Token staticKeyword, Token keyword, | |
| 6085 TypeName type) { | |
| 6086 VariableDeclarationList fieldList = | |
| 6087 _parseVariableDeclarationListAfterType(null, keyword, type); | |
| 6088 return new FieldDeclaration(commentAndMetadata.comment, | |
| 6089 commentAndMetadata.metadata, staticKeyword, fieldList, | |
| 6090 _expect(TokenType.SEMICOLON)); | |
| 6091 } | |
| 6092 | |
| 6093 /** | |
| 6094 * Parse an instance creation expression. The [keyword] is the 'new' or | |
| 6095 * 'const' keyword that introduces the expression. Return the instance | |
| 6096 * creation expression that was parsed. | |
| 6097 * | |
| 6098 * instanceCreationExpression ::= | |
| 6099 * ('new' | 'const') type ('.' identifier)? argumentList | |
| 6100 */ | |
| 6101 InstanceCreationExpression _parseInstanceCreationExpression(Token keyword) { | |
| 6102 ConstructorName constructorName = parseConstructorName(); | |
| 6103 ArgumentList argumentList = parseArgumentList(); | |
| 6104 return new InstanceCreationExpression( | |
| 6105 keyword, constructorName, argumentList); | |
| 6106 } | |
| 6107 | |
| 6108 /** | |
| 6109 * Parse a library directive. The [commentAndMetadata] is the metadata to be | |
| 6110 * associated with the directive. Return the library directive that was | |
| 6111 * parsed. | |
| 6112 * | |
| 6113 * libraryDirective ::= | |
| 6114 * metadata 'library' identifier ';' | |
| 6115 */ | |
| 6116 LibraryDirective _parseLibraryDirective( | |
| 6117 CommentAndMetadata commentAndMetadata) { | |
| 6118 Token keyword = _expectKeyword(Keyword.LIBRARY); | |
| 6119 LibraryIdentifier libraryName = _parseLibraryName( | |
| 6120 ParserErrorCode.MISSING_NAME_IN_LIBRARY_DIRECTIVE, keyword); | |
| 6121 Token semicolon = _expect(TokenType.SEMICOLON); | |
| 6122 return new LibraryDirective(commentAndMetadata.comment, | |
| 6123 commentAndMetadata.metadata, keyword, libraryName, semicolon); | |
| 6124 } | |
| 6125 | |
| 6126 /** | |
| 6127 * Parse a library name. The [missingNameError] is the error code to be used | |
| 6128 * if the library name is missing. The [missingNameToken] is the token | |
| 6129 * associated with the error produced if the library name is missing. Return | |
| 6130 * the library name that was parsed. | |
| 6131 * | |
| 6132 * libraryName ::= | |
| 6133 * libraryIdentifier | |
| 6134 */ | |
| 6135 LibraryIdentifier _parseLibraryName( | |
| 6136 ParserErrorCode missingNameError, Token missingNameToken) { | |
| 6137 if (_matchesIdentifier()) { | |
| 6138 return parseLibraryIdentifier(); | |
| 6139 } else if (_matches(TokenType.STRING)) { | |
| 6140 // TODO(brianwilkerson) Recovery: This should be extended to handle | |
| 6141 // arbitrary tokens until we can find a token that can start a compilation | |
| 6142 // unit member. | |
| 6143 StringLiteral string = parseStringLiteral(); | |
| 6144 _reportErrorForNode(ParserErrorCode.NON_IDENTIFIER_LIBRARY_NAME, string); | |
| 6145 } else { | |
| 6146 _reportErrorForToken(missingNameError, missingNameToken); | |
| 6147 } | |
| 6148 List<SimpleIdentifier> components = new List<SimpleIdentifier>(); | |
| 6149 components.add(_createSyntheticIdentifier()); | |
| 6150 return new LibraryIdentifier(components); | |
| 6151 } | |
| 6152 | |
| 6153 /** | |
| 6154 * Parse a list literal. The [modifier] is the 'const' modifier appearing | |
| 6155 * before the literal, or `null` if there is no modifier. The [typeArguments] | |
| 6156 * is the type arguments appearing before the literal, or `null` if there are | |
| 6157 * no type arguments. Return the list literal that was parsed. | |
| 6158 * | |
| 6159 * listLiteral ::= | |
| 6160 * 'const'? typeArguments? '[' (expressionList ','?)? ']' | |
| 6161 */ | |
| 6162 ListLiteral _parseListLiteral( | |
| 6163 Token modifier, TypeArgumentList typeArguments) { | |
| 6164 // may be empty list literal | |
| 6165 if (_matches(TokenType.INDEX)) { | |
| 6166 BeginToken leftBracket = _createToken( | |
| 6167 _currentToken, TokenType.OPEN_SQUARE_BRACKET, isBegin: true); | |
| 6168 Token rightBracket = | |
| 6169 new Token(TokenType.CLOSE_SQUARE_BRACKET, _currentToken.offset + 1); | |
| 6170 leftBracket.endToken = rightBracket; | |
| 6171 rightBracket.setNext(_currentToken.next); | |
| 6172 leftBracket.setNext(rightBracket); | |
| 6173 _currentToken.previous.setNext(leftBracket); | |
| 6174 _currentToken = _currentToken.next; | |
| 6175 return new ListLiteral( | |
| 6176 modifier, typeArguments, leftBracket, null, rightBracket); | |
| 6177 } | |
| 6178 // open | |
| 6179 Token leftBracket = _expect(TokenType.OPEN_SQUARE_BRACKET); | |
| 6180 if (_matches(TokenType.CLOSE_SQUARE_BRACKET)) { | |
| 6181 return new ListLiteral( | |
| 6182 modifier, typeArguments, leftBracket, null, getAndAdvance()); | |
| 6183 } | |
| 6184 bool wasInInitializer = _inInitializer; | |
| 6185 _inInitializer = false; | |
| 6186 try { | |
| 6187 List<Expression> elements = new List<Expression>(); | |
| 6188 elements.add(parseExpression2()); | |
| 6189 while (_optional(TokenType.COMMA)) { | |
| 6190 if (_matches(TokenType.CLOSE_SQUARE_BRACKET)) { | |
| 6191 return new ListLiteral( | |
| 6192 modifier, typeArguments, leftBracket, elements, getAndAdvance()); | |
| 6193 } | |
| 6194 elements.add(parseExpression2()); | |
| 6195 } | |
| 6196 Token rightBracket = _expect(TokenType.CLOSE_SQUARE_BRACKET); | |
| 6197 return new ListLiteral( | |
| 6198 modifier, typeArguments, leftBracket, elements, rightBracket); | |
| 6199 } finally { | |
| 6200 _inInitializer = wasInInitializer; | |
| 6201 } | |
| 6202 } | |
| 6203 | |
| 6204 /** | |
| 6205 * Parse a list or map literal. The [modifier] is the 'const' modifier | |
| 6206 * appearing before the literal, or `null` if there is no modifier. Return the | |
| 6207 * list or map literal that was parsed. | |
| 6208 * | |
| 6209 * listOrMapLiteral ::= | |
| 6210 * listLiteral | |
| 6211 * | mapLiteral | |
| 6212 */ | |
| 6213 TypedLiteral _parseListOrMapLiteral(Token modifier) { | |
| 6214 TypeArgumentList typeArguments = null; | |
| 6215 if (_matches(TokenType.LT)) { | |
| 6216 typeArguments = parseTypeArgumentList(); | |
| 6217 } | |
| 6218 if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | |
| 6219 return _parseMapLiteral(modifier, typeArguments); | |
| 6220 } else if (_matches(TokenType.OPEN_SQUARE_BRACKET) || | |
| 6221 _matches(TokenType.INDEX)) { | |
| 6222 return _parseListLiteral(modifier, typeArguments); | |
| 6223 } | |
| 6224 _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_LIST_OR_MAP_LITERAL); | |
| 6225 return new ListLiteral(modifier, typeArguments, | |
| 6226 _createSyntheticToken(TokenType.OPEN_SQUARE_BRACKET), null, | |
| 6227 _createSyntheticToken(TokenType.CLOSE_SQUARE_BRACKET)); | |
| 6228 } | |
| 6229 | |
| 6230 /** | |
| 6231 * Parse a logical and expression. Return the logical and expression that was | |
| 6232 * parsed. | |
| 6233 * | |
| 6234 * logicalAndExpression ::= | |
| 6235 * equalityExpression ('&&' equalityExpression)* | |
| 6236 */ | |
| 6237 Expression _parseLogicalAndExpression() { | |
| 6238 Expression expression = _parseEqualityExpression(); | |
| 6239 while (_matches(TokenType.AMPERSAND_AMPERSAND)) { | |
| 6240 Token operator = getAndAdvance(); | |
| 6241 expression = new BinaryExpression( | |
| 6242 expression, operator, _parseEqualityExpression()); | |
| 6243 } | |
| 6244 return expression; | |
| 6245 } | |
| 6246 | |
| 6247 /** | |
| 6248 * Parse a map literal. The [modifier] is the 'const' modifier appearing | |
| 6249 * before the literal, or `null` if there is no modifier. The [typeArguments] | |
| 6250 * is the type arguments that were declared, or `null` if there are no type | |
| 6251 * arguments. Return the map literal that was parsed. | |
| 6252 * | |
| 6253 * mapLiteral ::= | |
| 6254 * 'const'? typeArguments? '{' (mapLiteralEntry (',' mapLiteralEntry)*
','?)? '}' | |
| 6255 */ | |
| 6256 MapLiteral _parseMapLiteral(Token modifier, TypeArgumentList typeArguments) { | |
| 6257 Token leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET); | |
| 6258 List<MapLiteralEntry> entries = new List<MapLiteralEntry>(); | |
| 6259 if (_matches(TokenType.CLOSE_CURLY_BRACKET)) { | |
| 6260 return new MapLiteral( | |
| 6261 modifier, typeArguments, leftBracket, entries, getAndAdvance()); | |
| 6262 } | |
| 6263 bool wasInInitializer = _inInitializer; | |
| 6264 _inInitializer = false; | |
| 6265 try { | |
| 6266 entries.add(parseMapLiteralEntry()); | |
| 6267 while (_optional(TokenType.COMMA)) { | |
| 6268 if (_matches(TokenType.CLOSE_CURLY_BRACKET)) { | |
| 6269 return new MapLiteral( | |
| 6270 modifier, typeArguments, leftBracket, entries, getAndAdvance()); | |
| 6271 } | |
| 6272 entries.add(parseMapLiteralEntry()); | |
| 6273 } | |
| 6274 Token rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET); | |
| 6275 return new MapLiteral( | |
| 6276 modifier, typeArguments, leftBracket, entries, rightBracket); | |
| 6277 } finally { | |
| 6278 _inInitializer = wasInInitializer; | |
| 6279 } | |
| 6280 } | |
| 6281 | |
| 6282 /** | |
| 6283 * Parse a method declaration. The [commentAndMetadata] is the documentation | |
| 6284 * comment and metadata to be associated with the declaration. The | |
| 6285 * [externalKeyword] is the 'external' token. The [staticKeyword] is the | |
| 6286 * static keyword, or `null` if the getter is not static. The [returnType] is | |
| 6287 * the return type of the method. The [name] is the name of the method. The | |
| 6288 * [parameters] is the parameters to the method. Return the method declaration | |
| 6289 * that was parsed. | |
| 6290 * | |
| 6291 * functionDeclaration ::= | |
| 6292 * ('external' 'static'?)? functionSignature functionBody | |
| 6293 * | 'external'? functionSignature ';' | |
| 6294 */ | |
| 6295 MethodDeclaration _parseMethodDeclarationAfterParameters( | |
| 6296 CommentAndMetadata commentAndMetadata, Token externalKeyword, | |
| 6297 Token staticKeyword, TypeName returnType, SimpleIdentifier name, | |
| 6298 TypeParameterList typeParameters, FormalParameterList parameters) { | |
| 6299 FunctionBody body = _parseFunctionBody( | |
| 6300 externalKeyword != null || staticKeyword == null, | |
| 6301 ParserErrorCode.MISSING_FUNCTION_BODY, false); | |
| 6302 if (externalKeyword != null) { | |
| 6303 if (body is! EmptyFunctionBody) { | |
| 6304 _reportErrorForNode(ParserErrorCode.EXTERNAL_METHOD_WITH_BODY, body); | |
| 6305 } | |
| 6306 } else if (staticKeyword != null) { | |
| 6307 if (body is EmptyFunctionBody && _parseFunctionBodies) { | |
| 6308 _reportErrorForNode(ParserErrorCode.ABSTRACT_STATIC_METHOD, body); | |
| 6309 } | |
| 6310 } | |
| 6311 return new MethodDeclaration(commentAndMetadata.comment, | |
| 6312 commentAndMetadata.metadata, externalKeyword, staticKeyword, returnType, | |
| 6313 null, null, name, typeParameters, parameters, body); | |
| 6314 } | |
| 6315 | |
| 6316 /** | |
| 6317 * Parse a method declaration. The [commentAndMetadata] is the documentation | |
| 6318 * comment and metadata to be associated with the declaration. The | |
| 6319 * [externalKeyword] is the 'external' token. The [staticKeyword] is the | |
| 6320 * static keyword, or `null` if the getter is not static. The [returnType] is | |
| 6321 * the return type of the method. Return the method declaration that was | |
| 6322 * parsed. | |
| 6323 * | |
| 6324 * functionDeclaration ::= | |
| 6325 * 'external'? 'static'? functionSignature functionBody | |
| 6326 * | 'external'? functionSignature ';' | |
| 6327 */ | |
| 6328 MethodDeclaration _parseMethodDeclarationAfterReturnType( | |
| 6329 CommentAndMetadata commentAndMetadata, Token externalKeyword, | |
| 6330 Token staticKeyword, TypeName returnType) { | |
| 6331 SimpleIdentifier methodName = parseSimpleIdentifier(); | |
| 6332 TypeParameterList typeParameters = null; | |
| 6333 if (parseGenericMethods && _matches(TokenType.LT)) { | |
| 6334 typeParameters = parseTypeParameterList(); | |
| 6335 } | |
| 6336 FormalParameterList parameters; | |
| 6337 if (!_matches(TokenType.OPEN_PAREN) && | |
| 6338 (_matches(TokenType.OPEN_CURLY_BRACKET) || | |
| 6339 _matches(TokenType.FUNCTION))) { | |
| 6340 _reportErrorForToken( | |
| 6341 ParserErrorCode.MISSING_METHOD_PARAMETERS, _currentToken.previous); | |
| 6342 parameters = new FormalParameterList( | |
| 6343 _createSyntheticToken(TokenType.OPEN_PAREN), null, null, null, | |
| 6344 _createSyntheticToken(TokenType.CLOSE_PAREN)); | |
| 6345 } else { | |
| 6346 parameters = parseFormalParameterList(); | |
| 6347 } | |
| 6348 _validateFormalParameterList(parameters); | |
| 6349 return _parseMethodDeclarationAfterParameters(commentAndMetadata, | |
| 6350 externalKeyword, staticKeyword, returnType, methodName, typeParameters, | |
| 6351 parameters); | |
| 6352 } | |
| 6353 | |
| 6354 /** | |
| 6355 * Parse the modifiers preceding a declaration. This method allows the | |
| 6356 * modifiers to appear in any order but does generate errors for duplicated | |
| 6357 * modifiers. Checks for other problems, such as having the modifiers appear | |
| 6358 * in the wrong order or specifying both 'const' and 'final', are reported in | |
| 6359 * one of the methods whose name is prefixed with `validateModifiersFor`. | |
| 6360 * Return the modifiers that were parsed. | |
| 6361 * | |
| 6362 * modifiers ::= | |
| 6363 * ('abstract' | 'const' | 'external' | 'factory' | 'final' | 'static'
| 'var')* | |
| 6364 */ | |
| 6365 Modifiers _parseModifiers() { | |
| 6366 Modifiers modifiers = new Modifiers(); | |
| 6367 bool progress = true; | |
| 6368 while (progress) { | |
| 6369 if (_tokenMatches(_peek(), TokenType.PERIOD) || | |
| 6370 _tokenMatches(_peek(), TokenType.LT) || | |
| 6371 _tokenMatches(_peek(), TokenType.OPEN_PAREN)) { | |
| 6372 return modifiers; | |
| 6373 } | |
| 6374 if (_matchesKeyword(Keyword.ABSTRACT)) { | |
| 6375 if (modifiers.abstractKeyword != null) { | |
| 6376 _reportErrorForCurrentToken( | |
| 6377 ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); | |
| 6378 _advance(); | |
| 6379 } else { | |
| 6380 modifiers.abstractKeyword = getAndAdvance(); | |
| 6381 } | |
| 6382 } else if (_matchesKeyword(Keyword.CONST)) { | |
| 6383 if (modifiers.constKeyword != null) { | |
| 6384 _reportErrorForCurrentToken( | |
| 6385 ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); | |
| 6386 _advance(); | |
| 6387 } else { | |
| 6388 modifiers.constKeyword = getAndAdvance(); | |
| 6389 } | |
| 6390 } else if (_matchesKeyword(Keyword.EXTERNAL) && | |
| 6391 !_tokenMatches(_peek(), TokenType.PERIOD) && | |
| 6392 !_tokenMatches(_peek(), TokenType.LT)) { | |
| 6393 if (modifiers.externalKeyword != null) { | |
| 6394 _reportErrorForCurrentToken( | |
| 6395 ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); | |
| 6396 _advance(); | |
| 6397 } else { | |
| 6398 modifiers.externalKeyword = getAndAdvance(); | |
| 6399 } | |
| 6400 } else if (_matchesKeyword(Keyword.FACTORY) && | |
| 6401 !_tokenMatches(_peek(), TokenType.PERIOD) && | |
| 6402 !_tokenMatches(_peek(), TokenType.LT)) { | |
| 6403 if (modifiers.factoryKeyword != null) { | |
| 6404 _reportErrorForCurrentToken( | |
| 6405 ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); | |
| 6406 _advance(); | |
| 6407 } else { | |
| 6408 modifiers.factoryKeyword = getAndAdvance(); | |
| 6409 } | |
| 6410 } else if (_matchesKeyword(Keyword.FINAL)) { | |
| 6411 if (modifiers.finalKeyword != null) { | |
| 6412 _reportErrorForCurrentToken( | |
| 6413 ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); | |
| 6414 _advance(); | |
| 6415 } else { | |
| 6416 modifiers.finalKeyword = getAndAdvance(); | |
| 6417 } | |
| 6418 } else if (_matchesKeyword(Keyword.STATIC) && | |
| 6419 !_tokenMatches(_peek(), TokenType.PERIOD) && | |
| 6420 !_tokenMatches(_peek(), TokenType.LT)) { | |
| 6421 if (modifiers.staticKeyword != null) { | |
| 6422 _reportErrorForCurrentToken( | |
| 6423 ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); | |
| 6424 _advance(); | |
| 6425 } else { | |
| 6426 modifiers.staticKeyword = getAndAdvance(); | |
| 6427 } | |
| 6428 } else if (_matchesKeyword(Keyword.VAR)) { | |
| 6429 if (modifiers.varKeyword != null) { | |
| 6430 _reportErrorForCurrentToken( | |
| 6431 ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]); | |
| 6432 _advance(); | |
| 6433 } else { | |
| 6434 modifiers.varKeyword = getAndAdvance(); | |
| 6435 } | |
| 6436 } else { | |
| 6437 progress = false; | |
| 6438 } | |
| 6439 } | |
| 6440 return modifiers; | |
| 6441 } | |
| 6442 | |
| 6443 /** | |
| 6444 * Parse a multiplicative expression. Return the multiplicative expression | |
| 6445 * that was parsed. | |
| 6446 * | |
| 6447 * multiplicativeExpression ::= | |
| 6448 * unaryExpression (multiplicativeOperator unaryExpression)* | |
| 6449 * | 'super' (multiplicativeOperator unaryExpression)+ | |
| 6450 */ | |
| 6451 Expression _parseMultiplicativeExpression() { | |
| 6452 Expression expression; | |
| 6453 if (_matchesKeyword(Keyword.SUPER) && | |
| 6454 _currentToken.next.type.isMultiplicativeOperator) { | |
| 6455 expression = new SuperExpression(getAndAdvance()); | |
| 6456 } else { | |
| 6457 expression = _parseUnaryExpression(); | |
| 6458 } | |
| 6459 while (_currentToken.type.isMultiplicativeOperator) { | |
| 6460 Token operator = getAndAdvance(); | |
| 6461 expression = | |
| 6462 new BinaryExpression(expression, operator, _parseUnaryExpression()); | |
| 6463 } | |
| 6464 return expression; | |
| 6465 } | |
| 6466 | |
| 6467 /** | |
| 6468 * Parse a class native clause. Return the native clause that was parsed. | |
| 6469 * | |
| 6470 * classNativeClause ::= | |
| 6471 * 'native' name | |
| 6472 */ | |
| 6473 NativeClause _parseNativeClause() { | |
| 6474 Token keyword = getAndAdvance(); | |
| 6475 StringLiteral name = parseStringLiteral(); | |
| 6476 return new NativeClause(keyword, name); | |
| 6477 } | |
| 6478 | |
| 6479 /** | |
| 6480 * Parse a new expression. Return the new expression that was parsed. | |
| 6481 * | |
| 6482 * newExpression ::= | |
| 6483 * instanceCreationExpression | |
| 6484 */ | |
| 6485 InstanceCreationExpression _parseNewExpression() => | |
| 6486 _parseInstanceCreationExpression(_expectKeyword(Keyword.NEW)); | |
| 6487 | |
| 6488 /** | |
| 6489 * Parse a non-labeled statement. Return the non-labeled statement that was | |
| 6490 * parsed. | |
| 6491 * | |
| 6492 * nonLabeledStatement ::= | |
| 6493 * block | |
| 6494 * | assertStatement | |
| 6495 * | breakStatement | |
| 6496 * | continueStatement | |
| 6497 * | doStatement | |
| 6498 * | forStatement | |
| 6499 * | ifStatement | |
| 6500 * | returnStatement | |
| 6501 * | switchStatement | |
| 6502 * | tryStatement | |
| 6503 * | whileStatement | |
| 6504 * | variableDeclarationList ';' | |
| 6505 * | expressionStatement | |
| 6506 * | functionSignature functionBody | |
| 6507 */ | |
| 6508 Statement _parseNonLabeledStatement() { | |
| 6509 // TODO(brianwilkerson) Pass the comment and metadata on where appropriate. | |
| 6510 CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata(); | |
| 6511 if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | |
| 6512 if (_tokenMatches(_peek(), TokenType.STRING)) { | |
| 6513 Token afterString = _skipStringLiteral(_currentToken.next); | |
| 6514 if (afterString != null && afterString.type == TokenType.COLON) { | |
| 6515 return new ExpressionStatement( | |
| 6516 parseExpression2(), _expect(TokenType.SEMICOLON)); | |
| 6517 } | |
| 6518 } | |
| 6519 return parseBlock(); | |
| 6520 } else if (_matches(TokenType.KEYWORD) && | |
| 6521 !(_currentToken as KeywordToken).keyword.isPseudoKeyword) { | |
| 6522 Keyword keyword = (_currentToken as KeywordToken).keyword; | |
| 6523 // TODO(jwren) compute some metrics to figure out a better order for this | |
| 6524 // if-then sequence to optimize performance | |
| 6525 if (keyword == Keyword.ASSERT) { | |
| 6526 return _parseAssertStatement(); | |
| 6527 } else if (keyword == Keyword.BREAK) { | |
| 6528 return _parseBreakStatement(); | |
| 6529 } else if (keyword == Keyword.CONTINUE) { | |
| 6530 return _parseContinueStatement(); | |
| 6531 } else if (keyword == Keyword.DO) { | |
| 6532 return _parseDoStatement(); | |
| 6533 } else if (keyword == Keyword.FOR) { | |
| 6534 return _parseForStatement(); | |
| 6535 } else if (keyword == Keyword.IF) { | |
| 6536 return _parseIfStatement(); | |
| 6537 } else if (keyword == Keyword.RETHROW) { | |
| 6538 return new ExpressionStatement( | |
| 6539 _parseRethrowExpression(), _expect(TokenType.SEMICOLON)); | |
| 6540 } else if (keyword == Keyword.RETURN) { | |
| 6541 return _parseReturnStatement(); | |
| 6542 } else if (keyword == Keyword.SWITCH) { | |
| 6543 return _parseSwitchStatement(); | |
| 6544 } else if (keyword == Keyword.THROW) { | |
| 6545 return new ExpressionStatement( | |
| 6546 _parseThrowExpression(), _expect(TokenType.SEMICOLON)); | |
| 6547 } else if (keyword == Keyword.TRY) { | |
| 6548 return _parseTryStatement(); | |
| 6549 } else if (keyword == Keyword.WHILE) { | |
| 6550 return _parseWhileStatement(); | |
| 6551 } else if (keyword == Keyword.VAR || keyword == Keyword.FINAL) { | |
| 6552 return _parseVariableDeclarationStatementAfterMetadata( | |
| 6553 commentAndMetadata); | |
| 6554 } else if (keyword == Keyword.VOID) { | |
| 6555 TypeName returnType = parseReturnType(); | |
| 6556 if (_matchesIdentifier() && | |
| 6557 _peek().matchesAny([ | |
| 6558 TokenType.OPEN_PAREN, | |
| 6559 TokenType.OPEN_CURLY_BRACKET, | |
| 6560 TokenType.FUNCTION | |
| 6561 ])) { | |
| 6562 return _parseFunctionDeclarationStatementAfterReturnType( | |
| 6563 commentAndMetadata, returnType); | |
| 6564 } else { | |
| 6565 // | |
| 6566 // We have found an error of some kind. Try to recover. | |
| 6567 // | |
| 6568 if (_matchesIdentifier()) { | |
| 6569 if (_peek().matchesAny( | |
| 6570 [TokenType.EQ, TokenType.COMMA, TokenType.SEMICOLON])) { | |
| 6571 // | |
| 6572 // We appear to have a variable declaration with a type of "void". | |
| 6573 // | |
| 6574 _reportErrorForNode(ParserErrorCode.VOID_VARIABLE, returnType); | |
| 6575 return _parseVariableDeclarationStatementAfterMetadata( | |
| 6576 commentAndMetadata); | |
| 6577 } | |
| 6578 } else if (_matches(TokenType.CLOSE_CURLY_BRACKET)) { | |
| 6579 // | |
| 6580 // We appear to have found an incomplete statement at the end of a | |
| 6581 // block. Parse it as a variable declaration. | |
| 6582 // | |
| 6583 return _parseVariableDeclarationStatementAfterType( | |
| 6584 commentAndMetadata, null, returnType); | |
| 6585 } | |
| 6586 _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT); | |
| 6587 // TODO(brianwilkerson) Recover from this error. | |
| 6588 return new EmptyStatement(_createSyntheticToken(TokenType.SEMICOLON)); | |
| 6589 } | |
| 6590 } else if (keyword == Keyword.CONST) { | |
| 6591 if (_peek().matchesAny([ | |
| 6592 TokenType.LT, | |
| 6593 TokenType.OPEN_CURLY_BRACKET, | |
| 6594 TokenType.OPEN_SQUARE_BRACKET, | |
| 6595 TokenType.INDEX | |
| 6596 ])) { | |
| 6597 return new ExpressionStatement( | |
| 6598 parseExpression2(), _expect(TokenType.SEMICOLON)); | |
| 6599 } else if (_tokenMatches(_peek(), TokenType.IDENTIFIER)) { | |
| 6600 Token afterType = _skipTypeName(_peek()); | |
| 6601 if (afterType != null) { | |
| 6602 if (_tokenMatches(afterType, TokenType.OPEN_PAREN) || | |
| 6603 (_tokenMatches(afterType, TokenType.PERIOD) && | |
| 6604 _tokenMatches(afterType.next, TokenType.IDENTIFIER) && | |
| 6605 _tokenMatches(afterType.next.next, TokenType.OPEN_PAREN))) { | |
| 6606 return new ExpressionStatement( | |
| 6607 parseExpression2(), _expect(TokenType.SEMICOLON)); | |
| 6608 } | |
| 6609 } | |
| 6610 } | |
| 6611 return _parseVariableDeclarationStatementAfterMetadata( | |
| 6612 commentAndMetadata); | |
| 6613 } else if (keyword == Keyword.NEW || | |
| 6614 keyword == Keyword.TRUE || | |
| 6615 keyword == Keyword.FALSE || | |
| 6616 keyword == Keyword.NULL || | |
| 6617 keyword == Keyword.SUPER || | |
| 6618 keyword == Keyword.THIS) { | |
| 6619 return new ExpressionStatement( | |
| 6620 parseExpression2(), _expect(TokenType.SEMICOLON)); | |
| 6621 } else { | |
| 6622 // | |
| 6623 // We have found an error of some kind. Try to recover. | |
| 6624 // | |
| 6625 _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT); | |
| 6626 return new EmptyStatement(_createSyntheticToken(TokenType.SEMICOLON)); | |
| 6627 } | |
| 6628 } else if (_inGenerator && _matchesString(_YIELD)) { | |
| 6629 return _parseYieldStatement(); | |
| 6630 } else if (_inAsync && _matchesString(_AWAIT)) { | |
| 6631 if (_tokenMatchesKeyword(_peek(), Keyword.FOR)) { | |
| 6632 return _parseForStatement(); | |
| 6633 } | |
| 6634 return new ExpressionStatement( | |
| 6635 parseExpression2(), _expect(TokenType.SEMICOLON)); | |
| 6636 } else if (_matchesString(_AWAIT) && | |
| 6637 _tokenMatchesKeyword(_peek(), Keyword.FOR)) { | |
| 6638 Token awaitToken = _currentToken; | |
| 6639 Statement statement = _parseForStatement(); | |
| 6640 if (statement is! ForStatement) { | |
| 6641 _reportErrorForToken( | |
| 6642 CompileTimeErrorCode.ASYNC_FOR_IN_WRONG_CONTEXT, awaitToken); | |
| 6643 } | |
| 6644 return statement; | |
| 6645 } else if (_matches(TokenType.SEMICOLON)) { | |
| 6646 return _parseEmptyStatement(); | |
| 6647 } else if (_isInitializedVariableDeclaration()) { | |
| 6648 return _parseVariableDeclarationStatementAfterMetadata( | |
| 6649 commentAndMetadata); | |
| 6650 } else if (_isFunctionDeclaration()) { | |
| 6651 return _parseFunctionDeclarationStatement(); | |
| 6652 } else if (_matches(TokenType.CLOSE_CURLY_BRACKET)) { | |
| 6653 _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT); | |
| 6654 return new EmptyStatement(_createSyntheticToken(TokenType.SEMICOLON)); | |
| 6655 } else { | |
| 6656 return new ExpressionStatement(parseExpression2(), _expectSemicolon()); | |
| 6657 } | |
| 6658 } | |
| 6659 | |
| 6660 /** | |
| 6661 * Parse an operator declaration. The [commentAndMetadata] is the | |
| 6662 * documentation comment and metadata to be associated with the declaration. | |
| 6663 * The [externalKeyword] is the 'external' token. The [returnType] is the | |
| 6664 * return type that has already been parsed, or `null` if there was no return | |
| 6665 * type. Return the operator declaration that was parsed. | |
| 6666 * | |
| 6667 * operatorDeclaration ::= | |
| 6668 * operatorSignature (';' | functionBody) | |
| 6669 * | |
| 6670 * operatorSignature ::= | |
| 6671 * 'external'? returnType? 'operator' operator formalParameterList | |
| 6672 */ | |
| 6673 MethodDeclaration _parseOperator(CommentAndMetadata commentAndMetadata, | |
| 6674 Token externalKeyword, TypeName returnType) { | |
| 6675 Token operatorKeyword; | |
| 6676 if (_matchesKeyword(Keyword.OPERATOR)) { | |
| 6677 operatorKeyword = getAndAdvance(); | |
| 6678 } else { | |
| 6679 _reportErrorForToken( | |
| 6680 ParserErrorCode.MISSING_KEYWORD_OPERATOR, _currentToken); | |
| 6681 operatorKeyword = _createSyntheticKeyword(Keyword.OPERATOR); | |
| 6682 } | |
| 6683 if (!_currentToken.isUserDefinableOperator) { | |
| 6684 _reportErrorForCurrentToken( | |
| 6685 ParserErrorCode.NON_USER_DEFINABLE_OPERATOR, [_currentToken.lexeme]); | |
| 6686 } | |
| 6687 SimpleIdentifier name = new SimpleIdentifier(getAndAdvance()); | |
| 6688 if (_matches(TokenType.EQ)) { | |
| 6689 Token previous = _currentToken.previous; | |
| 6690 if ((_tokenMatches(previous, TokenType.EQ_EQ) || | |
| 6691 _tokenMatches(previous, TokenType.BANG_EQ)) && | |
| 6692 _currentToken.offset == previous.offset + 2) { | |
| 6693 _reportErrorForCurrentToken(ParserErrorCode.INVALID_OPERATOR, | |
| 6694 ["${previous.lexeme}${_currentToken.lexeme}"]); | |
| 6695 _advance(); | |
| 6696 } | |
| 6697 } | |
| 6698 FormalParameterList parameters = parseFormalParameterList(); | |
| 6699 _validateFormalParameterList(parameters); | |
| 6700 FunctionBody body = | |
| 6701 _parseFunctionBody(true, ParserErrorCode.MISSING_FUNCTION_BODY, false); | |
| 6702 if (externalKeyword != null && body is! EmptyFunctionBody) { | |
| 6703 _reportErrorForCurrentToken(ParserErrorCode.EXTERNAL_OPERATOR_WITH_BODY); | |
| 6704 } | |
| 6705 return new MethodDeclaration(commentAndMetadata.comment, | |
| 6706 commentAndMetadata.metadata, externalKeyword, null, returnType, null, | |
| 6707 operatorKeyword, name, null, parameters, body); | |
| 6708 } | |
| 6709 | |
| 6710 /** | |
| 6711 * Parse a return type if one is given, otherwise return `null` without | |
| 6712 * advancing. Return the return type that was parsed. | |
| 6713 */ | |
| 6714 TypeName _parseOptionalReturnType() { | |
| 6715 if (_matchesKeyword(Keyword.VOID)) { | |
| 6716 return parseReturnType(); | |
| 6717 } else if (_matchesIdentifier() && | |
| 6718 !_matchesKeyword(Keyword.GET) && | |
| 6719 !_matchesKeyword(Keyword.SET) && | |
| 6720 !_matchesKeyword(Keyword.OPERATOR) && | |
| 6721 (_tokenMatchesIdentifier(_peek()) || | |
| 6722 _tokenMatches(_peek(), TokenType.LT))) { | |
| 6723 return parseReturnType(); | |
| 6724 } else if (_matchesIdentifier() && | |
| 6725 _tokenMatches(_peek(), TokenType.PERIOD) && | |
| 6726 _tokenMatchesIdentifier(_peekAt(2)) && | |
| 6727 (_tokenMatchesIdentifier(_peekAt(3)) || | |
| 6728 _tokenMatches(_peekAt(3), TokenType.LT))) { | |
| 6729 return parseReturnType(); | |
| 6730 } | |
| 6731 return null; | |
| 6732 } | |
| 6733 | |
| 6734 /** | |
| 6735 * Parse a part or part-of directive. The [commentAndMetadata] is the metadata | |
| 6736 * to be associated with the directive. Return the part or part-of directive | |
| 6737 * that was parsed. | |
| 6738 * | |
| 6739 * partDirective ::= | |
| 6740 * metadata 'part' stringLiteral ';' | |
| 6741 * | |
| 6742 * partOfDirective ::= | |
| 6743 * metadata 'part' 'of' identifier ';' | |
| 6744 */ | |
| 6745 Directive _parsePartDirective(CommentAndMetadata commentAndMetadata) { | |
| 6746 Token partKeyword = _expectKeyword(Keyword.PART); | |
| 6747 if (_matchesString(_OF)) { | |
| 6748 Token ofKeyword = getAndAdvance(); | |
| 6749 LibraryIdentifier libraryName = _parseLibraryName( | |
| 6750 ParserErrorCode.MISSING_NAME_IN_PART_OF_DIRECTIVE, ofKeyword); | |
| 6751 Token semicolon = _expect(TokenType.SEMICOLON); | |
| 6752 return new PartOfDirective(commentAndMetadata.comment, | |
| 6753 commentAndMetadata.metadata, partKeyword, ofKeyword, libraryName, | |
| 6754 semicolon); | |
| 6755 } | |
| 6756 StringLiteral partUri = _parseUri(); | |
| 6757 Token semicolon = _expect(TokenType.SEMICOLON); | |
| 6758 return new PartDirective(commentAndMetadata.comment, | |
| 6759 commentAndMetadata.metadata, partKeyword, partUri, semicolon); | |
| 6760 } | |
| 6761 | |
| 6762 /** | |
| 6763 * Parse a postfix expression. Return the postfix expression that was parsed. | |
| 6764 * | |
| 6765 * postfixExpression ::= | |
| 6766 * assignableExpression postfixOperator | |
| 6767 * | primary selector* | |
| 6768 * | |
| 6769 * selector ::= | |
| 6770 * assignableSelector | |
| 6771 * | argumentList | |
| 6772 */ | |
| 6773 Expression _parsePostfixExpression() { | |
| 6774 Expression operand = _parseAssignableExpression(true); | |
| 6775 if (_matches(TokenType.OPEN_SQUARE_BRACKET) || | |
| 6776 _matches(TokenType.PERIOD) || | |
| 6777 _matches(TokenType.QUESTION_PERIOD) || | |
| 6778 _matches(TokenType.OPEN_PAREN) || | |
| 6779 (parseGenericMethods && _matches(TokenType.LT))) { | |
| 6780 do { | |
| 6781 if (_isLikelyParameterList()) { | |
| 6782 TypeArgumentList typeArguments = null; | |
| 6783 if (_matches(TokenType.LT)) { | |
| 6784 typeArguments = parseTypeArgumentList(); | |
| 6785 } | |
| 6786 ArgumentList argumentList = parseArgumentList(); | |
| 6787 if (operand is PropertyAccess) { | |
| 6788 PropertyAccess access = operand as PropertyAccess; | |
| 6789 operand = new MethodInvocation(access.target, access.operator, | |
| 6790 access.propertyName, typeArguments, argumentList); | |
| 6791 } else { | |
| 6792 operand = new FunctionExpressionInvocation( | |
| 6793 operand, typeArguments, argumentList); | |
| 6794 } | |
| 6795 } else { | |
| 6796 operand = _parseAssignableSelector(operand, true); | |
| 6797 } | |
| 6798 } while (_matches(TokenType.OPEN_SQUARE_BRACKET) || | |
| 6799 _matches(TokenType.PERIOD) || | |
| 6800 _matches(TokenType.QUESTION_PERIOD) || | |
| 6801 _matches(TokenType.OPEN_PAREN)); | |
| 6802 return operand; | |
| 6803 } | |
| 6804 if (!_currentToken.type.isIncrementOperator) { | |
| 6805 return operand; | |
| 6806 } | |
| 6807 _ensureAssignable(operand); | |
| 6808 Token operator = getAndAdvance(); | |
| 6809 return new PostfixExpression(operand, operator); | |
| 6810 } | |
| 6811 | |
| 6812 /** | |
| 6813 * Parse a primary expression. Return the primary expression that was parsed. | |
| 6814 * | |
| 6815 * primary ::= | |
| 6816 * thisExpression | |
| 6817 * | 'super' unconditionalAssignableSelector | |
| 6818 * | functionExpression | |
| 6819 * | literal | |
| 6820 * | identifier | |
| 6821 * | newExpression | |
| 6822 * | constObjectExpression | |
| 6823 * | '(' expression ')' | |
| 6824 * | argumentDefinitionTest | |
| 6825 * | |
| 6826 * literal ::= | |
| 6827 * nullLiteral | |
| 6828 * | booleanLiteral | |
| 6829 * | numericLiteral | |
| 6830 * | stringLiteral | |
| 6831 * | symbolLiteral | |
| 6832 * | mapLiteral | |
| 6833 * | listLiteral | |
| 6834 */ | |
| 6835 Expression _parsePrimaryExpression() { | |
| 6836 if (_matchesKeyword(Keyword.THIS)) { | |
| 6837 return new ThisExpression(getAndAdvance()); | |
| 6838 } else if (_matchesKeyword(Keyword.SUPER)) { | |
| 6839 // TODO(paulberry): verify with Gilad that "super" must be followed by | |
| 6840 // unconditionalAssignableSelector in this case. | |
| 6841 return _parseAssignableSelector( | |
| 6842 new SuperExpression(getAndAdvance()), false, allowConditional: false); | |
| 6843 } else if (_matchesKeyword(Keyword.NULL)) { | |
| 6844 return new NullLiteral(getAndAdvance()); | |
| 6845 } else if (_matchesKeyword(Keyword.FALSE)) { | |
| 6846 return new BooleanLiteral(getAndAdvance(), false); | |
| 6847 } else if (_matchesKeyword(Keyword.TRUE)) { | |
| 6848 return new BooleanLiteral(getAndAdvance(), true); | |
| 6849 } else if (_matches(TokenType.DOUBLE)) { | |
| 6850 Token token = getAndAdvance(); | |
| 6851 double value = 0.0; | |
| 6852 try { | |
| 6853 value = double.parse(token.lexeme); | |
| 6854 } on FormatException { | |
| 6855 // The invalid format should have been reported by the scanner. | |
| 6856 } | |
| 6857 return new DoubleLiteral(token, value); | |
| 6858 } else if (_matches(TokenType.HEXADECIMAL)) { | |
| 6859 Token token = getAndAdvance(); | |
| 6860 int value = null; | |
| 6861 try { | |
| 6862 value = int.parse(token.lexeme.substring(2), radix: 16); | |
| 6863 } on FormatException { | |
| 6864 // The invalid format should have been reported by the scanner. | |
| 6865 } | |
| 6866 return new IntegerLiteral(token, value); | |
| 6867 } else if (_matches(TokenType.INT)) { | |
| 6868 Token token = getAndAdvance(); | |
| 6869 int value = null; | |
| 6870 try { | |
| 6871 value = int.parse(token.lexeme); | |
| 6872 } on FormatException { | |
| 6873 // The invalid format should have been reported by the scanner. | |
| 6874 } | |
| 6875 return new IntegerLiteral(token, value); | |
| 6876 } else if (_matches(TokenType.STRING)) { | |
| 6877 return parseStringLiteral(); | |
| 6878 } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) { | |
| 6879 return _parseMapLiteral(null, null); | |
| 6880 } else if (_matches(TokenType.OPEN_SQUARE_BRACKET) || | |
| 6881 _matches(TokenType.INDEX)) { | |
| 6882 return _parseListLiteral(null, null); | |
| 6883 } else if (_matchesIdentifier()) { | |
| 6884 // TODO(brianwilkerson) The code below was an attempt to recover from an | |
| 6885 // error case, but it needs to be applied as a recovery only after we | |
| 6886 // know that parsing it as an identifier doesn't work. Leaving the code as | |
| 6887 // a reminder of how to recover. | |
| 6888 // if (isFunctionExpression(peek())) { | |
| 6889 // // | |
| 6890 // // Function expressions were allowed to have names at one point,
but this is now illegal. | |
| 6891 // // | |
| 6892 // reportError(ParserErrorCode.NAMED_FUNCTION_EXPRESSION, getAndAdv
ance()); | |
| 6893 // return parseFunctionExpression(); | |
| 6894 // } | |
| 6895 return parsePrefixedIdentifier(); | |
| 6896 } else if (_matchesKeyword(Keyword.NEW)) { | |
| 6897 return _parseNewExpression(); | |
| 6898 } else if (_matchesKeyword(Keyword.CONST)) { | |
| 6899 return _parseConstExpression(); | |
| 6900 } else if (_matches(TokenType.OPEN_PAREN)) { | |
| 6901 if (_isFunctionExpression(_currentToken)) { | |
| 6902 return parseFunctionExpression(); | |
| 6903 } | |
| 6904 Token leftParenthesis = getAndAdvance(); | |
| 6905 bool wasInInitializer = _inInitializer; | |
| 6906 _inInitializer = false; | |
| 6907 try { | |
| 6908 Expression expression = parseExpression2(); | |
| 6909 Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | |
| 6910 return new ParenthesizedExpression( | |
| 6911 leftParenthesis, expression, rightParenthesis); | |
| 6912 } finally { | |
| 6913 _inInitializer = wasInInitializer; | |
| 6914 } | |
| 6915 } else if (_matches(TokenType.LT)) { | |
| 6916 return _parseListOrMapLiteral(null); | |
| 6917 } else if (_matches(TokenType.QUESTION) && | |
| 6918 _tokenMatches(_peek(), TokenType.IDENTIFIER)) { | |
| 6919 _reportErrorForCurrentToken( | |
| 6920 ParserErrorCode.UNEXPECTED_TOKEN, [_currentToken.lexeme]); | |
| 6921 _advance(); | |
| 6922 return _parsePrimaryExpression(); | |
| 6923 } else if (_matchesKeyword(Keyword.VOID)) { | |
| 6924 // | |
| 6925 // Recover from having a return type of "void" where a return type is not | |
| 6926 // expected. | |
| 6927 // | |
| 6928 // TODO(brianwilkerson) Improve this error message. | |
| 6929 _reportErrorForCurrentToken( | |
| 6930 ParserErrorCode.UNEXPECTED_TOKEN, [_currentToken.lexeme]); | |
| 6931 _advance(); | |
| 6932 return _parsePrimaryExpression(); | |
| 6933 } else if (_matches(TokenType.HASH)) { | |
| 6934 return _parseSymbolLiteral(); | |
| 6935 } else { | |
| 6936 _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | |
| 6937 return _createSyntheticIdentifier(); | |
| 6938 } | |
| 6939 } | |
| 6940 | |
| 6941 /** | |
| 6942 * Parse a redirecting constructor invocation. Return the redirecting | |
| 6943 * constructor invocation that was parsed. | |
| 6944 * | |
| 6945 * redirectingConstructorInvocation ::= | |
| 6946 * 'this' ('.' identifier)? arguments | |
| 6947 */ | |
| 6948 RedirectingConstructorInvocation _parseRedirectingConstructorInvocation() { | |
| 6949 Token keyword = _expectKeyword(Keyword.THIS); | |
| 6950 Token period = null; | |
| 6951 SimpleIdentifier constructorName = null; | |
| 6952 if (_matches(TokenType.PERIOD)) { | |
| 6953 period = getAndAdvance(); | |
| 6954 constructorName = parseSimpleIdentifier(); | |
| 6955 } | |
| 6956 ArgumentList argumentList = parseArgumentList(); | |
| 6957 return new RedirectingConstructorInvocation( | |
| 6958 keyword, period, constructorName, argumentList); | |
| 6959 } | |
| 6960 | |
| 6961 /** | |
| 6962 * Parse a relational expression. Return the relational expression that was | |
| 6963 * parsed. | |
| 6964 * | |
| 6965 * relationalExpression ::= | |
| 6966 * bitwiseOrExpression ('is' '!'? type | 'as' type | relationalOperato
r bitwiseOrExpression)? | |
| 6967 * | 'super' relationalOperator bitwiseOrExpression | |
| 6968 */ | |
| 6969 Expression _parseRelationalExpression() { | |
| 6970 if (_matchesKeyword(Keyword.SUPER) && | |
| 6971 _currentToken.next.type.isRelationalOperator) { | |
| 6972 Expression expression = new SuperExpression(getAndAdvance()); | |
| 6973 Token operator = getAndAdvance(); | |
| 6974 expression = new BinaryExpression( | |
| 6975 expression, operator, parseBitwiseOrExpression()); | |
| 6976 return expression; | |
| 6977 } | |
| 6978 Expression expression = parseBitwiseOrExpression(); | |
| 6979 if (_matchesKeyword(Keyword.AS)) { | |
| 6980 Token asOperator = getAndAdvance(); | |
| 6981 expression = new AsExpression(expression, asOperator, parseTypeName()); | |
| 6982 } else if (_matchesKeyword(Keyword.IS)) { | |
| 6983 Token isOperator = getAndAdvance(); | |
| 6984 Token notOperator = null; | |
| 6985 if (_matches(TokenType.BANG)) { | |
| 6986 notOperator = getAndAdvance(); | |
| 6987 } | |
| 6988 expression = new IsExpression( | |
| 6989 expression, isOperator, notOperator, parseTypeName()); | |
| 6990 } else if (_currentToken.type.isRelationalOperator) { | |
| 6991 Token operator = getAndAdvance(); | |
| 6992 expression = new BinaryExpression( | |
| 6993 expression, operator, parseBitwiseOrExpression()); | |
| 6994 } | |
| 6995 return expression; | |
| 6996 } | |
| 6997 | |
| 6998 /** | |
| 6999 * Parse a rethrow expression. Return the rethrow expression that was parsed. | |
| 7000 * | |
| 7001 * rethrowExpression ::= | |
| 7002 * 'rethrow' | |
| 7003 */ | |
| 7004 Expression _parseRethrowExpression() => | |
| 7005 new RethrowExpression(_expectKeyword(Keyword.RETHROW)); | |
| 7006 | |
| 7007 /** | |
| 7008 * Parse a return statement. Return the return statement that was parsed. | |
| 7009 * | |
| 7010 * returnStatement ::= | |
| 7011 * 'return' expression? ';' | |
| 7012 */ | |
| 7013 Statement _parseReturnStatement() { | |
| 7014 Token returnKeyword = _expectKeyword(Keyword.RETURN); | |
| 7015 if (_matches(TokenType.SEMICOLON)) { | |
| 7016 return new ReturnStatement(returnKeyword, null, getAndAdvance()); | |
| 7017 } | |
| 7018 Expression expression = parseExpression2(); | |
| 7019 Token semicolon = _expect(TokenType.SEMICOLON); | |
| 7020 return new ReturnStatement(returnKeyword, expression, semicolon); | |
| 7021 } | |
| 7022 | |
| 7023 /** | |
| 7024 * Parse a setter. The [commentAndMetadata] is the documentation comment and | |
| 7025 * metadata to be associated with the declaration. The [externalKeyword] is | |
| 7026 * the 'external' token. The [staticKeyword] is the static keyword, or `null` | |
| 7027 * if the setter is not static. The [returnType] is the return type that has | |
| 7028 * already been parsed, or `null` if there was no return type. Return the | |
| 7029 * setter that was parsed. | |
| 7030 * | |
| 7031 * setter ::= | |
| 7032 * setterSignature functionBody? | |
| 7033 * | |
| 7034 * setterSignature ::= | |
| 7035 * 'external'? 'static'? returnType? 'set' identifier formalParameterL
ist | |
| 7036 */ | |
| 7037 MethodDeclaration _parseSetter(CommentAndMetadata commentAndMetadata, | |
| 7038 Token externalKeyword, Token staticKeyword, TypeName returnType) { | |
| 7039 Token propertyKeyword = _expectKeyword(Keyword.SET); | |
| 7040 SimpleIdentifier name = parseSimpleIdentifier(); | |
| 7041 FormalParameterList parameters = parseFormalParameterList(); | |
| 7042 _validateFormalParameterList(parameters); | |
| 7043 FunctionBody body = _parseFunctionBody( | |
| 7044 externalKeyword != null || staticKeyword == null, | |
| 7045 ParserErrorCode.STATIC_SETTER_WITHOUT_BODY, false); | |
| 7046 if (externalKeyword != null && body is! EmptyFunctionBody) { | |
| 7047 _reportErrorForCurrentToken(ParserErrorCode.EXTERNAL_SETTER_WITH_BODY); | |
| 7048 } | |
| 7049 return new MethodDeclaration(commentAndMetadata.comment, | |
| 7050 commentAndMetadata.metadata, externalKeyword, staticKeyword, returnType, | |
| 7051 propertyKeyword, null, name, null, parameters, body); | |
| 7052 } | |
| 7053 | |
| 7054 /** | |
| 7055 * Parse a shift expression. Return the shift expression that was parsed. | |
| 7056 * | |
| 7057 * shiftExpression ::= | |
| 7058 * additiveExpression (shiftOperator additiveExpression)* | |
| 7059 * | 'super' (shiftOperator additiveExpression)+ | |
| 7060 */ | |
| 7061 Expression _parseShiftExpression() { | |
| 7062 Expression expression; | |
| 7063 if (_matchesKeyword(Keyword.SUPER) && | |
| 7064 _currentToken.next.type.isShiftOperator) { | |
| 7065 expression = new SuperExpression(getAndAdvance()); | |
| 7066 } else { | |
| 7067 expression = _parseAdditiveExpression(); | |
| 7068 } | |
| 7069 while (_currentToken.type.isShiftOperator) { | |
| 7070 Token operator = getAndAdvance(); | |
| 7071 expression = new BinaryExpression( | |
| 7072 expression, operator, _parseAdditiveExpression()); | |
| 7073 } | |
| 7074 return expression; | |
| 7075 } | |
| 7076 | |
| 7077 /** | |
| 7078 * Parse a list of statements within a switch statement. Return the statements | |
| 7079 * that were parsed. | |
| 7080 * | |
| 7081 * statements ::= | |
| 7082 * statement* | |
| 7083 */ | |
| 7084 List<Statement> _parseStatementList() { | |
| 7085 List<Statement> statements = new List<Statement>(); | |
| 7086 Token statementStart = _currentToken; | |
| 7087 while (!_matches(TokenType.EOF) && | |
| 7088 !_matches(TokenType.CLOSE_CURLY_BRACKET) && | |
| 7089 !_isSwitchMember()) { | |
| 7090 statements.add(parseStatement2()); | |
| 7091 if (identical(_currentToken, statementStart)) { | |
| 7092 _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken, | |
| 7093 [_currentToken.lexeme]); | |
| 7094 _advance(); | |
| 7095 } | |
| 7096 statementStart = _currentToken; | |
| 7097 } | |
| 7098 return statements; | |
| 7099 } | |
| 7100 | |
| 7101 /** | |
| 7102 * Parse a string literal that contains interpolations. Return the string | |
| 7103 * literal that was parsed. | |
| 7104 */ | |
| 7105 StringInterpolation _parseStringInterpolation(Token string) { | |
| 7106 List<InterpolationElement> elements = new List<InterpolationElement>(); | |
| 7107 bool hasMore = _matches(TokenType.STRING_INTERPOLATION_EXPRESSION) || | |
| 7108 _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER); | |
| 7109 elements.add(new InterpolationString( | |
| 7110 string, _computeStringValue(string.lexeme, true, !hasMore))); | |
| 7111 while (hasMore) { | |
| 7112 if (_matches(TokenType.STRING_INTERPOLATION_EXPRESSION)) { | |
| 7113 Token openToken = getAndAdvance(); | |
| 7114 bool wasInInitializer = _inInitializer; | |
| 7115 _inInitializer = false; | |
| 7116 try { | |
| 7117 Expression expression = parseExpression2(); | |
| 7118 Token rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET); | |
| 7119 elements.add( | |
| 7120 new InterpolationExpression(openToken, expression, rightBracket)); | |
| 7121 } finally { | |
| 7122 _inInitializer = wasInInitializer; | |
| 7123 } | |
| 7124 } else { | |
| 7125 Token openToken = getAndAdvance(); | |
| 7126 Expression expression = null; | |
| 7127 if (_matchesKeyword(Keyword.THIS)) { | |
| 7128 expression = new ThisExpression(getAndAdvance()); | |
| 7129 } else { | |
| 7130 expression = parseSimpleIdentifier(); | |
| 7131 } | |
| 7132 elements.add(new InterpolationExpression(openToken, expression, null)); | |
| 7133 } | |
| 7134 if (_matches(TokenType.STRING)) { | |
| 7135 string = getAndAdvance(); | |
| 7136 hasMore = _matches(TokenType.STRING_INTERPOLATION_EXPRESSION) || | |
| 7137 _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER); | |
| 7138 elements.add(new InterpolationString( | |
| 7139 string, _computeStringValue(string.lexeme, false, !hasMore))); | |
| 7140 } else { | |
| 7141 hasMore = false; | |
| 7142 } | |
| 7143 } | |
| 7144 return new StringInterpolation(elements); | |
| 7145 } | |
| 7146 | |
| 7147 /** | |
| 7148 * Parse a super constructor invocation. Return the super constructor | |
| 7149 * invocation that was parsed. | |
| 7150 * | |
| 7151 * superConstructorInvocation ::= | |
| 7152 * 'super' ('.' identifier)? arguments | |
| 7153 */ | |
| 7154 SuperConstructorInvocation _parseSuperConstructorInvocation() { | |
| 7155 Token keyword = _expectKeyword(Keyword.SUPER); | |
| 7156 Token period = null; | |
| 7157 SimpleIdentifier constructorName = null; | |
| 7158 if (_matches(TokenType.PERIOD)) { | |
| 7159 period = getAndAdvance(); | |
| 7160 constructorName = parseSimpleIdentifier(); | |
| 7161 } | |
| 7162 ArgumentList argumentList = parseArgumentList(); | |
| 7163 return new SuperConstructorInvocation( | |
| 7164 keyword, period, constructorName, argumentList); | |
| 7165 } | |
| 7166 | |
| 7167 /** | |
| 7168 * Parse a switch statement. Return the switch statement that was parsed. | |
| 7169 * | |
| 7170 * switchStatement ::= | |
| 7171 * 'switch' '(' expression ')' '{' switchCase* defaultCase? '}' | |
| 7172 * | |
| 7173 * switchCase ::= | |
| 7174 * label* ('case' expression ':') statements | |
| 7175 * | |
| 7176 * defaultCase ::= | |
| 7177 * label* 'default' ':' statements | |
| 7178 */ | |
| 7179 SwitchStatement _parseSwitchStatement() { | |
| 7180 bool wasInSwitch = _inSwitch; | |
| 7181 _inSwitch = true; | |
| 7182 try { | |
| 7183 HashSet<String> definedLabels = new HashSet<String>(); | |
| 7184 Token keyword = _expectKeyword(Keyword.SWITCH); | |
| 7185 Token leftParenthesis = _expect(TokenType.OPEN_PAREN); | |
| 7186 Expression expression = parseExpression2(); | |
| 7187 Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | |
| 7188 Token leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET); | |
| 7189 Token defaultKeyword = null; | |
| 7190 List<SwitchMember> members = new List<SwitchMember>(); | |
| 7191 while (!_matches(TokenType.EOF) && | |
| 7192 !_matches(TokenType.CLOSE_CURLY_BRACKET)) { | |
| 7193 List<Label> labels = new List<Label>(); | |
| 7194 while ( | |
| 7195 _matchesIdentifier() && _tokenMatches(_peek(), TokenType.COLON)) { | |
| 7196 SimpleIdentifier identifier = parseSimpleIdentifier(); | |
| 7197 String label = identifier.token.lexeme; | |
| 7198 if (definedLabels.contains(label)) { | |
| 7199 _reportErrorForToken( | |
| 7200 ParserErrorCode.DUPLICATE_LABEL_IN_SWITCH_STATEMENT, | |
| 7201 identifier.token, [label]); | |
| 7202 } else { | |
| 7203 definedLabels.add(label); | |
| 7204 } | |
| 7205 Token colon = _expect(TokenType.COLON); | |
| 7206 labels.add(new Label(identifier, colon)); | |
| 7207 } | |
| 7208 if (_matchesKeyword(Keyword.CASE)) { | |
| 7209 Token caseKeyword = getAndAdvance(); | |
| 7210 Expression caseExpression = parseExpression2(); | |
| 7211 Token colon = _expect(TokenType.COLON); | |
| 7212 members.add(new SwitchCase(labels, caseKeyword, caseExpression, colon, | |
| 7213 _parseStatementList())); | |
| 7214 if (defaultKeyword != null) { | |
| 7215 _reportErrorForToken( | |
| 7216 ParserErrorCode.SWITCH_HAS_CASE_AFTER_DEFAULT_CASE, | |
| 7217 caseKeyword); | |
| 7218 } | |
| 7219 } else if (_matchesKeyword(Keyword.DEFAULT)) { | |
| 7220 if (defaultKeyword != null) { | |
| 7221 _reportErrorForToken( | |
| 7222 ParserErrorCode.SWITCH_HAS_MULTIPLE_DEFAULT_CASES, _peek()); | |
| 7223 } | |
| 7224 defaultKeyword = getAndAdvance(); | |
| 7225 Token colon = _expect(TokenType.COLON); | |
| 7226 members.add(new SwitchDefault( | |
| 7227 labels, defaultKeyword, colon, _parseStatementList())); | |
| 7228 } else { | |
| 7229 // We need to advance, otherwise we could end up in an infinite loop, | |
| 7230 // but this could be a lot smarter about recovering from the error. | |
| 7231 _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_CASE_OR_DEFAULT); | |
| 7232 while (!_matches(TokenType.EOF) && | |
| 7233 !_matches(TokenType.CLOSE_CURLY_BRACKET) && | |
| 7234 !_matchesKeyword(Keyword.CASE) && | |
| 7235 !_matchesKeyword(Keyword.DEFAULT)) { | |
| 7236 _advance(); | |
| 7237 } | |
| 7238 } | |
| 7239 } | |
| 7240 Token rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET); | |
| 7241 return new SwitchStatement(keyword, leftParenthesis, expression, | |
| 7242 rightParenthesis, leftBracket, members, rightBracket); | |
| 7243 } finally { | |
| 7244 _inSwitch = wasInSwitch; | |
| 7245 } | |
| 7246 } | |
| 7247 | |
| 7248 /** | |
| 7249 * Parse a symbol literal. Return the symbol literal that was parsed. | |
| 7250 * | |
| 7251 * symbolLiteral ::= | |
| 7252 * '#' identifier ('.' identifier)* | |
| 7253 */ | |
| 7254 SymbolLiteral _parseSymbolLiteral() { | |
| 7255 Token poundSign = getAndAdvance(); | |
| 7256 List<Token> components = new List<Token>(); | |
| 7257 if (_matchesIdentifier()) { | |
| 7258 components.add(getAndAdvance()); | |
| 7259 while (_matches(TokenType.PERIOD)) { | |
| 7260 _advance(); | |
| 7261 if (_matchesIdentifier()) { | |
| 7262 components.add(getAndAdvance()); | |
| 7263 } else { | |
| 7264 _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | |
| 7265 components.add(_createSyntheticToken(TokenType.IDENTIFIER)); | |
| 7266 break; | |
| 7267 } | |
| 7268 } | |
| 7269 } else if (_currentToken.isOperator) { | |
| 7270 components.add(getAndAdvance()); | |
| 7271 } else if (_tokenMatchesKeyword(_currentToken, Keyword.VOID)) { | |
| 7272 components.add(getAndAdvance()); | |
| 7273 } else { | |
| 7274 _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | |
| 7275 components.add(_createSyntheticToken(TokenType.IDENTIFIER)); | |
| 7276 } | |
| 7277 return new SymbolLiteral(poundSign, components); | |
| 7278 } | |
| 7279 | |
| 7280 /** | |
| 7281 * Parse a throw expression. Return the throw expression that was parsed. | |
| 7282 * | |
| 7283 * throwExpression ::= | |
| 7284 * 'throw' expression | |
| 7285 */ | |
| 7286 Expression _parseThrowExpression() { | |
| 7287 Token keyword = _expectKeyword(Keyword.THROW); | |
| 7288 if (_matches(TokenType.SEMICOLON) || _matches(TokenType.CLOSE_PAREN)) { | |
| 7289 _reportErrorForToken( | |
| 7290 ParserErrorCode.MISSING_EXPRESSION_IN_THROW, _currentToken); | |
| 7291 return new ThrowExpression(keyword, _createSyntheticIdentifier()); | |
| 7292 } | |
| 7293 Expression expression = parseExpression2(); | |
| 7294 return new ThrowExpression(keyword, expression); | |
| 7295 } | |
| 7296 | |
| 7297 /** | |
| 7298 * Parse a throw expression. Return the throw expression that was parsed. | |
| 7299 * | |
| 7300 * throwExpressionWithoutCascade ::= | |
| 7301 * 'throw' expressionWithoutCascade | |
| 7302 */ | |
| 7303 Expression _parseThrowExpressionWithoutCascade() { | |
| 7304 Token keyword = _expectKeyword(Keyword.THROW); | |
| 7305 if (_matches(TokenType.SEMICOLON) || _matches(TokenType.CLOSE_PAREN)) { | |
| 7306 _reportErrorForToken( | |
| 7307 ParserErrorCode.MISSING_EXPRESSION_IN_THROW, _currentToken); | |
| 7308 return new ThrowExpression(keyword, _createSyntheticIdentifier()); | |
| 7309 } | |
| 7310 Expression expression = parseExpressionWithoutCascade(); | |
| 7311 return new ThrowExpression(keyword, expression); | |
| 7312 } | |
| 7313 | |
| 7314 /** | |
| 7315 * Parse a try statement. Return the try statement that was parsed. | |
| 7316 * | |
| 7317 * tryStatement ::= | |
| 7318 * 'try' block (onPart+ finallyPart? | finallyPart) | |
| 7319 * | |
| 7320 * onPart ::= | |
| 7321 * catchPart block | |
| 7322 * | 'on' type catchPart? block | |
| 7323 * | |
| 7324 * catchPart ::= | |
| 7325 * 'catch' '(' identifier (',' identifier)? ')' | |
| 7326 * | |
| 7327 * finallyPart ::= | |
| 7328 * 'finally' block | |
| 7329 */ | |
| 7330 Statement _parseTryStatement() { | |
| 7331 Token tryKeyword = _expectKeyword(Keyword.TRY); | |
| 7332 Block body = parseBlock(); | |
| 7333 List<CatchClause> catchClauses = new List<CatchClause>(); | |
| 7334 Block finallyClause = null; | |
| 7335 while (_matchesString(_ON) || _matchesKeyword(Keyword.CATCH)) { | |
| 7336 Token onKeyword = null; | |
| 7337 TypeName exceptionType = null; | |
| 7338 if (_matchesString(_ON)) { | |
| 7339 onKeyword = getAndAdvance(); | |
| 7340 exceptionType = parseTypeName(); | |
| 7341 } | |
| 7342 Token catchKeyword = null; | |
| 7343 Token leftParenthesis = null; | |
| 7344 SimpleIdentifier exceptionParameter = null; | |
| 7345 Token comma = null; | |
| 7346 SimpleIdentifier stackTraceParameter = null; | |
| 7347 Token rightParenthesis = null; | |
| 7348 if (_matchesKeyword(Keyword.CATCH)) { | |
| 7349 catchKeyword = getAndAdvance(); | |
| 7350 leftParenthesis = _expect(TokenType.OPEN_PAREN); | |
| 7351 exceptionParameter = parseSimpleIdentifier(); | |
| 7352 if (_matches(TokenType.COMMA)) { | |
| 7353 comma = getAndAdvance(); | |
| 7354 stackTraceParameter = parseSimpleIdentifier(); | |
| 7355 } | |
| 7356 rightParenthesis = _expect(TokenType.CLOSE_PAREN); | |
| 7357 } | |
| 7358 Block catchBody = parseBlock(); | |
| 7359 catchClauses.add(new CatchClause(onKeyword, exceptionType, catchKeyword, | |
| 7360 leftParenthesis, exceptionParameter, comma, stackTraceParameter, | |
| 7361 rightParenthesis, catchBody)); | |
| 7362 } | |
| 7363 Token finallyKeyword = null; | |
| 7364 if (_matchesKeyword(Keyword.FINALLY)) { | |
| 7365 finallyKeyword = getAndAdvance(); | |
| 7366 finallyClause = parseBlock(); | |
| 7367 } else { | |
| 7368 if (catchClauses.isEmpty) { | |
| 7369 _reportErrorForCurrentToken(ParserErrorCode.MISSING_CATCH_OR_FINALLY); | |
| 7370 } | |
| 7371 } | |
| 7372 return new TryStatement( | |
| 7373 tryKeyword, body, catchClauses, finallyKeyword, finallyClause); | |
| 7374 } | |
| 7375 | |
| 7376 /** | |
| 7377 * Parse a type alias. The [commentAndMetadata] is the metadata to be | |
| 7378 * associated with the member. Return the type alias that was parsed. | |
| 7379 * | |
| 7380 * typeAlias ::= | |
| 7381 * 'typedef' typeAliasBody | |
| 7382 * | |
| 7383 * typeAliasBody ::= | |
| 7384 * classTypeAlias | |
| 7385 * | functionTypeAlias | |
| 7386 * | |
| 7387 * classTypeAlias ::= | |
| 7388 * identifier typeParameters? '=' 'abstract'? mixinApplication | |
| 7389 * | |
| 7390 * mixinApplication ::= | |
| 7391 * qualified withClause implementsClause? ';' | |
| 7392 * | |
| 7393 * functionTypeAlias ::= | |
| 7394 * functionPrefix typeParameterList? formalParameterList ';' | |
| 7395 * | |
| 7396 * functionPrefix ::= | |
| 7397 * returnType? name | |
| 7398 */ | |
| 7399 TypeAlias _parseTypeAlias(CommentAndMetadata commentAndMetadata) { | |
| 7400 Token keyword = _expectKeyword(Keyword.TYPEDEF); | |
| 7401 if (_matchesIdentifier()) { | |
| 7402 Token next = _peek(); | |
| 7403 if (_tokenMatches(next, TokenType.LT)) { | |
| 7404 next = _skipTypeParameterList(next); | |
| 7405 if (next != null && _tokenMatches(next, TokenType.EQ)) { | |
| 7406 TypeAlias typeAlias = | |
| 7407 _parseClassTypeAlias(commentAndMetadata, null, keyword); | |
| 7408 _reportErrorForToken( | |
| 7409 ParserErrorCode.DEPRECATED_CLASS_TYPE_ALIAS, keyword); | |
| 7410 return typeAlias; | |
| 7411 } | |
| 7412 } else if (_tokenMatches(next, TokenType.EQ)) { | |
| 7413 TypeAlias typeAlias = | |
| 7414 _parseClassTypeAlias(commentAndMetadata, null, keyword); | |
| 7415 _reportErrorForToken( | |
| 7416 ParserErrorCode.DEPRECATED_CLASS_TYPE_ALIAS, keyword); | |
| 7417 return typeAlias; | |
| 7418 } | |
| 7419 } | |
| 7420 return _parseFunctionTypeAlias(commentAndMetadata, keyword); | |
| 7421 } | |
| 7422 | |
| 7423 /** | |
| 7424 * Parse a unary expression. Return the unary expression that was parsed. | |
| 7425 * | |
| 7426 * unaryExpression ::= | |
| 7427 * prefixOperator unaryExpression | |
| 7428 * | awaitExpression | |
| 7429 * | postfixExpression | |
| 7430 * | unaryOperator 'super' | |
| 7431 * | '-' 'super' | |
| 7432 * | incrementOperator assignableExpression | |
| 7433 */ | |
| 7434 Expression _parseUnaryExpression() { | |
| 7435 if (_matches(TokenType.MINUS) || | |
| 7436 _matches(TokenType.BANG) || | |
| 7437 _matches(TokenType.TILDE)) { | |
| 7438 Token operator = getAndAdvance(); | |
| 7439 if (_matchesKeyword(Keyword.SUPER)) { | |
| 7440 if (_tokenMatches(_peek(), TokenType.OPEN_SQUARE_BRACKET) || | |
| 7441 _tokenMatches(_peek(), TokenType.PERIOD)) { | |
| 7442 // "prefixOperator unaryExpression" | |
| 7443 // --> "prefixOperator postfixExpression" | |
| 7444 // --> "prefixOperator primary selector*" | |
| 7445 // --> "prefixOperator 'super' assignableSelector selector*" | |
| 7446 return new PrefixExpression(operator, _parseUnaryExpression()); | |
| 7447 } | |
| 7448 return new PrefixExpression( | |
| 7449 operator, new SuperExpression(getAndAdvance())); | |
| 7450 } | |
| 7451 return new PrefixExpression(operator, _parseUnaryExpression()); | |
| 7452 } else if (_currentToken.type.isIncrementOperator) { | |
| 7453 Token operator = getAndAdvance(); | |
| 7454 if (_matchesKeyword(Keyword.SUPER)) { | |
| 7455 if (_tokenMatches(_peek(), TokenType.OPEN_SQUARE_BRACKET) || | |
| 7456 _tokenMatches(_peek(), TokenType.PERIOD)) { | |
| 7457 // --> "prefixOperator 'super' assignableSelector selector*" | |
| 7458 return new PrefixExpression(operator, _parseUnaryExpression()); | |
| 7459 } | |
| 7460 // | |
| 7461 // Even though it is not valid to use an incrementing operator | |
| 7462 // ('++' or '--') before 'super', we can (and therefore must) interpret | |
| 7463 // "--super" as semantically equivalent to "-(-super)". Unfortunately, | |
| 7464 // we cannot do the same for "++super" because "+super" is also not | |
| 7465 // valid. | |
| 7466 // | |
| 7467 if (operator.type == TokenType.MINUS_MINUS) { | |
| 7468 Token firstOperator = _createToken(operator, TokenType.MINUS); | |
| 7469 Token secondOperator = | |
| 7470 new Token(TokenType.MINUS, operator.offset + 1); | |
| 7471 secondOperator.setNext(_currentToken); | |
| 7472 firstOperator.setNext(secondOperator); | |
| 7473 operator.previous.setNext(firstOperator); | |
| 7474 return new PrefixExpression(firstOperator, new PrefixExpression( | |
| 7475 secondOperator, new SuperExpression(getAndAdvance()))); | |
| 7476 } else { | |
| 7477 // Invalid operator before 'super' | |
| 7478 _reportErrorForCurrentToken( | |
| 7479 ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, [operator.lexeme]); | |
| 7480 return new PrefixExpression( | |
| 7481 operator, new SuperExpression(getAndAdvance())); | |
| 7482 } | |
| 7483 } | |
| 7484 return new PrefixExpression(operator, _parseAssignableExpression(false)); | |
| 7485 } else if (_matches(TokenType.PLUS)) { | |
| 7486 _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER); | |
| 7487 return _createSyntheticIdentifier(); | |
| 7488 } else if (_inAsync && _matchesString(_AWAIT)) { | |
| 7489 return _parseAwaitExpression(); | |
| 7490 } | |
| 7491 return _parsePostfixExpression(); | |
| 7492 } | |
| 7493 | |
| 7494 /** | |
| 7495 * Parse a string literal representing a URI. Return the string literal that | |
| 7496 * was parsed. | |
| 7497 */ | |
| 7498 StringLiteral _parseUri() { | |
| 7499 bool iskeywordAfterUri(Token token) => token.lexeme == Keyword.AS.syntax || | |
| 7500 token.lexeme == _HIDE || | |
| 7501 token.lexeme == _SHOW; | |
| 7502 if (!_matches(TokenType.STRING) && | |
| 7503 !_matches(TokenType.SEMICOLON) && | |
| 7504 !iskeywordAfterUri(_currentToken)) { | |
| 7505 // Attempt to recover in the case where the URI was not enclosed in | |
| 7506 // quotes. | |
| 7507 Token token = _currentToken; | |
| 7508 while ((_tokenMatchesIdentifier(token) && !iskeywordAfterUri(token)) || | |
| 7509 _tokenMatches(token, TokenType.COLON) || | |
| 7510 _tokenMatches(token, TokenType.SLASH) || | |
| 7511 _tokenMatches(token, TokenType.PERIOD) || | |
| 7512 _tokenMatches(token, TokenType.PERIOD_PERIOD) || | |
| 7513 _tokenMatches(token, TokenType.PERIOD_PERIOD_PERIOD) || | |
| 7514 _tokenMatches(token, TokenType.INT) || | |
| 7515 _tokenMatches(token, TokenType.DOUBLE)) { | |
| 7516 token = token.next; | |
| 7517 } | |
| 7518 if (_tokenMatches(token, TokenType.SEMICOLON) || | |
| 7519 iskeywordAfterUri(token)) { | |
| 7520 Token endToken = token.previous; | |
| 7521 token = _currentToken; | |
| 7522 int endOffset = token.end; | |
| 7523 StringBuffer buffer = new StringBuffer(); | |
| 7524 buffer.write(token.lexeme); | |
| 7525 while (token != endToken) { | |
| 7526 token = token.next; | |
| 7527 if (token.offset != endOffset || token.precedingComments != null) { | |
| 7528 return parseStringLiteral(); | |
| 7529 } | |
| 7530 buffer.write(token.lexeme); | |
| 7531 endOffset = token.end; | |
| 7532 } | |
| 7533 String value = buffer.toString(); | |
| 7534 Token newToken = | |
| 7535 new StringToken(TokenType.STRING, "'$value'", _currentToken.offset); | |
| 7536 _reportErrorForToken( | |
| 7537 ParserErrorCode.NON_STRING_LITERAL_AS_URI, newToken); | |
| 7538 _currentToken = endToken.next; | |
| 7539 return new SimpleStringLiteral(newToken, value); | |
| 7540 } | |
| 7541 } | |
| 7542 return parseStringLiteral(); | |
| 7543 } | |
| 7544 | |
| 7545 /** | |
| 7546 * Parse a variable declaration. Return the variable declaration that was | |
| 7547 * parsed. | |
| 7548 * | |
| 7549 * variableDeclaration ::= | |
| 7550 * identifier ('=' expression)? | |
| 7551 */ | |
| 7552 VariableDeclaration _parseVariableDeclaration() { | |
| 7553 // TODO(paulberry): prior to the fix for bug 23204, we permitted | |
| 7554 // annotations before variable declarations (e.g. "String @deprecated s;"). | |
| 7555 // Although such constructions are prohibited by the spec, we may want to | |
| 7556 // consider handling them anyway to allow for better parser recovery in the | |
| 7557 // event that the user erroneously tries to use them. However, as a | |
| 7558 // counterargument, this would likely degrade parser recovery in the event | |
| 7559 // of a construct like "class C { int @deprecated foo() {} }" (i.e. the | |
| 7560 // user is in the middle of inserting "int bar;" prior to | |
| 7561 // "@deprecated foo() {}"). | |
| 7562 SimpleIdentifier name = parseSimpleIdentifier(); | |
| 7563 Token equals = null; | |
| 7564 Expression initializer = null; | |
| 7565 if (_matches(TokenType.EQ)) { | |
| 7566 equals = getAndAdvance(); | |
| 7567 initializer = parseExpression2(); | |
| 7568 } | |
| 7569 return new VariableDeclaration(name, equals, initializer); | |
| 7570 } | |
| 7571 | |
| 7572 /** | |
| 7573 * Parse a variable declaration list. The [commentAndMetadata] is the metadata | |
| 7574 * to be associated with the variable declaration list. Return the variable | |
| 7575 * declaration list that was parsed. | |
| 7576 * | |
| 7577 * variableDeclarationList ::= | |
| 7578 * finalConstVarOrType variableDeclaration (',' variableDeclaration)* | |
| 7579 */ | |
| 7580 VariableDeclarationList _parseVariableDeclarationListAfterMetadata( | |
| 7581 CommentAndMetadata commentAndMetadata) { | |
| 7582 FinalConstVarOrType holder = _parseFinalConstVarOrType(false); | |
| 7583 return _parseVariableDeclarationListAfterType( | |
| 7584 commentAndMetadata, holder.keyword, holder.type); | |
| 7585 } | |
| 7586 | |
| 7587 /** | |
| 7588 * Parse a variable declaration list. The [commentAndMetadata] is the metadata | |
| 7589 * to be associated with the variable declaration list, or `null` if there is | |
| 7590 * no attempt at parsing the comment and metadata. The [keyword] is the token | |
| 7591 * representing the 'final', 'const' or 'var' keyword, or `null` if there is | |
| 7592 * no keyword. The [type] is the type of the variables in the list. Return the | |
| 7593 * variable declaration list that was parsed. | |
| 7594 * | |
| 7595 * variableDeclarationList ::= | |
| 7596 * finalConstVarOrType variableDeclaration (',' variableDeclaration)* | |
| 7597 */ | |
| 7598 VariableDeclarationList _parseVariableDeclarationListAfterType( | |
| 7599 CommentAndMetadata commentAndMetadata, Token keyword, TypeName type) { | |
| 7600 if (type != null && | |
| 7601 keyword != null && | |
| 7602 _tokenMatchesKeyword(keyword, Keyword.VAR)) { | |
| 7603 _reportErrorForToken(ParserErrorCode.VAR_AND_TYPE, keyword); | |
| 7604 } | |
| 7605 List<VariableDeclaration> variables = new List<VariableDeclaration>(); | |
| 7606 variables.add(_parseVariableDeclaration()); | |
| 7607 while (_matches(TokenType.COMMA)) { | |
| 7608 _advance(); | |
| 7609 variables.add(_parseVariableDeclaration()); | |
| 7610 } | |
| 7611 return new VariableDeclarationList( | |
| 7612 commentAndMetadata != null ? commentAndMetadata.comment : null, | |
| 7613 commentAndMetadata != null ? commentAndMetadata.metadata : null, | |
| 7614 keyword, type, variables); | |
| 7615 } | |
| 7616 | |
| 7617 /** | |
| 7618 * Parse a variable declaration statement. The [commentAndMetadata] is the | |
| 7619 * metadata to be associated with the variable declaration statement, or | |
| 7620 * `null` if there is no attempt at parsing the comment and metadata. Return | |
| 7621 * the variable declaration statement that was parsed. | |
| 7622 * | |
| 7623 * variableDeclarationStatement ::= | |
| 7624 * variableDeclarationList ';' | |
| 7625 */ | |
| 7626 VariableDeclarationStatement _parseVariableDeclarationStatementAfterMetadata( | |
| 7627 CommentAndMetadata commentAndMetadata) { | |
| 7628 // Token startToken = currentToken; | |
| 7629 VariableDeclarationList variableList = | |
| 7630 _parseVariableDeclarationListAfterMetadata(commentAndMetadata); | |
| 7631 // if (!matches(TokenType.SEMICOLON)) { | |
| 7632 // if (matches(startToken, Keyword.VAR) && isTypedIdentifier(startToken
.getNext())) { | |
| 7633 // // TODO(brianwilkerson) This appears to be of the form "var type v
ariable". We should do | |
| 7634 // // a better job of recovering in this case. | |
| 7635 // } | |
| 7636 // } | |
| 7637 Token semicolon = _expect(TokenType.SEMICOLON); | |
| 7638 return new VariableDeclarationStatement(variableList, semicolon); | |
| 7639 } | |
| 7640 | |
| 7641 /** | |
| 7642 * Parse a variable declaration statement. The [commentAndMetadata] is the | |
| 7643 * metadata to be associated with the variable declaration statement, or | |
| 7644 * `null` if there is no attempt at parsing the comment and metadata. The | |
| 7645 * [keyword] is the token representing the 'final', 'const' or 'var' keyword, | |
| 7646 * or `null` if there is no keyword. The [type] is the type of the variables | |
| 7647 * in the list. Return the variable declaration statement that was parsed. | |
| 7648 * | |
| 7649 * variableDeclarationStatement ::= | |
| 7650 * variableDeclarationList ';' | |
| 7651 */ | |
| 7652 VariableDeclarationStatement _parseVariableDeclarationStatementAfterType( | |
| 7653 CommentAndMetadata commentAndMetadata, Token keyword, TypeName type) { | |
| 7654 VariableDeclarationList variableList = | |
| 7655 _parseVariableDeclarationListAfterType( | |
| 7656 commentAndMetadata, keyword, type); | |
| 7657 Token semicolon = _expect(TokenType.SEMICOLON); | |
| 7658 return new VariableDeclarationStatement(variableList, semicolon); | |
| 7659 } | |
| 7660 | |
| 7661 /** | |
| 7662 * Parse a while statement. Return the while statement that was parsed. | |
| 7663 * | |
| 7664 * whileStatement ::= | |
| 7665 * 'while' '(' expression ')' statement | |
| 7666 */ | |
| 7667 Statement _parseWhileStatement() { | |
| 7668 bool wasInLoop = _inLoop; | |
| 7669 _inLoop = true; | |
| 7670 try { | |
| 7671 Token keyword = _expectKeyword(Keyword.WHILE); | |
| 7672 Token leftParenthesis = _expect(TokenType.OPEN_PAREN); | |
| 7673 Expression condition = parseExpression2(); | |
| 7674 Token rightParenthesis = _expect(TokenType.CLOSE_PAREN); | |
| 7675 Statement body = parseStatement2(); | |
| 7676 return new WhileStatement( | |
| 7677 keyword, leftParenthesis, condition, rightParenthesis, body); | |
| 7678 } finally { | |
| 7679 _inLoop = wasInLoop; | |
| 7680 } | |
| 7681 } | |
| 7682 | |
| 7683 /** | |
| 7684 * Parse a yield statement. Return the yield statement that was parsed. | |
| 7685 * | |
| 7686 * yieldStatement ::= | |
| 7687 * 'yield' '*'? expression ';' | |
| 7688 */ | |
| 7689 YieldStatement _parseYieldStatement() { | |
| 7690 Token yieldToken = getAndAdvance(); | |
| 7691 Token star = null; | |
| 7692 if (_matches(TokenType.STAR)) { | |
| 7693 star = getAndAdvance(); | |
| 7694 } | |
| 7695 Expression expression = parseExpression2(); | |
| 7696 Token semicolon = _expect(TokenType.SEMICOLON); | |
| 7697 return new YieldStatement(yieldToken, star, expression, semicolon); | |
| 7698 } | |
| 7699 | |
| 7700 /** | |
| 7701 * Return the token that is immediately after the current token. This is | |
| 7702 * equivalent to [_peekAt](1). | |
| 7703 */ | |
| 7704 Token _peek() => _currentToken.next; | |
| 7705 | |
| 7706 /** | |
| 7707 * Return the token that is the given [distance] after the current token, | |
| 7708 * where the distance is the number of tokens to look ahead. A distance of `0` | |
| 7709 * is the current token, `1` is the next token, etc. | |
| 7710 */ | |
| 7711 Token _peekAt(int distance) { | |
| 7712 Token token = _currentToken; | |
| 7713 for (int i = 0; i < distance; i++) { | |
| 7714 token = token.next; | |
| 7715 } | |
| 7716 return token; | |
| 7717 } | |
| 7718 | |
| 7719 /** | |
| 7720 * Report the given [error]. | |
| 7721 */ | |
| 7722 void _reportError(AnalysisError error) { | |
| 7723 if (_errorListenerLock != 0) { | |
| 7724 return; | |
| 7725 } | |
| 7726 _errorListener.onError(error); | |
| 7727 } | |
| 7728 | |
| 7729 /** | |
| 7730 * Report an error with the given [errorCode] and [arguments] associated with | |
| 7731 * the current token. | |
| 7732 */ | |
| 7733 void _reportErrorForCurrentToken(ParserErrorCode errorCode, | |
| 7734 [List<Object> arguments]) { | |
| 7735 _reportErrorForToken(errorCode, _currentToken, arguments); | |
| 7736 } | |
| 7737 | |
| 7738 /** | |
| 7739 * Report an error with the given [errorCode] and [arguments] associated with | |
| 7740 * the given [node]. | |
| 7741 */ | |
| 7742 void _reportErrorForNode(ParserErrorCode errorCode, AstNode node, | |
| 7743 [List<Object> arguments]) { | |
| 7744 _reportError(new AnalysisError( | |
| 7745 _source, node.offset, node.length, errorCode, arguments)); | |
| 7746 } | |
| 7747 | |
| 7748 /** | |
| 7749 * Report an error with the given [errorCode] and [arguments] associated with | |
| 7750 * the given [token]. | |
| 7751 */ | |
| 7752 void _reportErrorForToken(ErrorCode errorCode, Token token, | |
| 7753 [List<Object> arguments]) { | |
| 7754 if (token.type == TokenType.EOF) { | |
| 7755 token = token.previous; | |
| 7756 } | |
| 7757 _reportError(new AnalysisError(_source, token.offset, | |
| 7758 math.max(token.length, 1), errorCode, arguments)); | |
| 7759 } | |
| 7760 | |
| 7761 /** | |
| 7762 * Skips a block with all containing blocks. | |
| 7763 */ | |
| 7764 void _skipBlock() { | |
| 7765 Token endToken = (_currentToken as BeginToken).endToken; | |
| 7766 if (endToken == null) { | |
| 7767 endToken = _currentToken.next; | |
| 7768 while (!identical(endToken, _currentToken)) { | |
| 7769 _currentToken = endToken; | |
| 7770 endToken = _currentToken.next; | |
| 7771 } | |
| 7772 _reportErrorForToken( | |
| 7773 ParserErrorCode.EXPECTED_TOKEN, _currentToken.previous, ["}"]); | |
| 7774 } else { | |
| 7775 _currentToken = endToken.next; | |
| 7776 } | |
| 7777 } | |
| 7778 | |
| 7779 /** | |
| 7780 * Parse the 'final', 'const', 'var' or type preceding a variable declaration, | |
| 7781 * starting at the given token, without actually creating a type or changing | |
| 7782 * the current token. Return the token following the type that was parsed, or | |
| 7783 * `null` if the given token is not the first token in a valid type. The | |
| 7784 * [startToken] is the token at which parsing is to begin. Return the token | |
| 7785 * following the type that was parsed. | |
| 7786 * | |
| 7787 * finalConstVarOrType ::= | |
| 7788 * | 'final' type? | |
| 7789 * | 'const' type? | |
| 7790 * | 'var' | |
| 7791 * | type | |
| 7792 */ | |
| 7793 Token _skipFinalConstVarOrType(Token startToken) { | |
| 7794 if (_tokenMatchesKeyword(startToken, Keyword.FINAL) || | |
| 7795 _tokenMatchesKeyword(startToken, Keyword.CONST)) { | |
| 7796 Token next = startToken.next; | |
| 7797 if (_tokenMatchesIdentifier(next)) { | |
| 7798 Token next2 = next.next; | |
| 7799 // "Type parameter" or "Type<" or "prefix.Type" | |
| 7800 if (_tokenMatchesIdentifier(next2) || | |
| 7801 _tokenMatches(next2, TokenType.LT) || | |
| 7802 _tokenMatches(next2, TokenType.PERIOD)) { | |
| 7803 return _skipTypeName(next); | |
| 7804 } | |
| 7805 // "parameter" | |
| 7806 return next; | |
| 7807 } | |
| 7808 } else if (_tokenMatchesKeyword(startToken, Keyword.VAR)) { | |
| 7809 return startToken.next; | |
| 7810 } else if (_tokenMatchesIdentifier(startToken)) { | |
| 7811 Token next = startToken.next; | |
| 7812 if (_tokenMatchesIdentifier(next) || | |
| 7813 _tokenMatches(next, TokenType.LT) || | |
| 7814 _tokenMatchesKeyword(next, Keyword.THIS) || | |
| 7815 (_tokenMatches(next, TokenType.PERIOD) && | |
| 7816 _tokenMatchesIdentifier(next.next) && | |
| 7817 (_tokenMatchesIdentifier(next.next.next) || | |
| 7818 _tokenMatches(next.next.next, TokenType.LT) || | |
| 7819 _tokenMatchesKeyword(next.next.next, Keyword.THIS)))) { | |
| 7820 return _skipReturnType(startToken); | |
| 7821 } | |
| 7822 } | |
| 7823 return null; | |
| 7824 } | |
| 7825 | |
| 7826 /** | |
| 7827 * Parse a list of formal parameters, starting at the [startToken], without | |
| 7828 * actually creating a formal parameter list or changing the current token. | |
| 7829 * Return the token following the formal parameter list that was parsed, or | |
| 7830 * `null` if the given token is not the first token in a valid list of formal | |
| 7831 * parameter. | |
| 7832 * | |
| 7833 * Note that unlike other skip methods, this method uses a heuristic. In the | |
| 7834 * worst case, the parameters could be prefixed by metadata, which would | |
| 7835 * require us to be able to skip arbitrary expressions. Rather than duplicate | |
| 7836 * the logic of most of the parse methods we simply look for something that is | |
| 7837 * likely to be a list of parameters and then skip to returning the token | |
| 7838 * after the closing parenthesis. | |
| 7839 * | |
| 7840 * This method must be kept in sync with [parseFormalParameterList]. | |
| 7841 * | |
| 7842 * formalParameterList ::= | |
| 7843 * '(' ')' | |
| 7844 * | '(' normalFormalParameters (',' optionalFormalParameters)? ')' | |
| 7845 * | '(' optionalFormalParameters ')' | |
| 7846 * | |
| 7847 * normalFormalParameters ::= | |
| 7848 * normalFormalParameter (',' normalFormalParameter)* | |
| 7849 * | |
| 7850 * optionalFormalParameters ::= | |
| 7851 * optionalPositionalFormalParameters | |
| 7852 * | namedFormalParameters | |
| 7853 * | |
| 7854 * optionalPositionalFormalParameters ::= | |
| 7855 * '[' defaultFormalParameter (',' defaultFormalParameter)* ']' | |
| 7856 * | |
| 7857 * namedFormalParameters ::= | |
| 7858 * '{' defaultNamedParameter (',' defaultNamedParameter)* '}' | |
| 7859 */ | |
| 7860 Token _skipFormalParameterList(Token startToken) { | |
| 7861 if (!_tokenMatches(startToken, TokenType.OPEN_PAREN)) { | |
| 7862 return null; | |
| 7863 } | |
| 7864 Token next = startToken.next; | |
| 7865 if (_tokenMatches(next, TokenType.CLOSE_PAREN)) { | |
| 7866 return next.next; | |
| 7867 } | |
| 7868 // | |
| 7869 // Look to see whether the token after the open parenthesis is something | |
| 7870 // that should only occur at the beginning of a parameter list. | |
| 7871 // | |
| 7872 if (next.matchesAny([ | |
| 7873 TokenType.AT, | |
| 7874 TokenType.OPEN_SQUARE_BRACKET, | |
| 7875 TokenType.OPEN_CURLY_BRACKET | |
| 7876 ]) || | |
| 7877 _tokenMatchesKeyword(next, Keyword.VOID) || | |
| 7878 (_tokenMatchesIdentifier(next) && | |
| 7879 (next.next.matchesAny([TokenType.COMMA, TokenType.CLOSE_PAREN])))) { | |
| 7880 return _skipPastMatchingToken(startToken); | |
| 7881 } | |
| 7882 // | |
| 7883 // Look to see whether the first parameter is a function typed parameter | |
| 7884 // without a return type. | |
| 7885 // | |
| 7886 if (_tokenMatchesIdentifier(next) && | |
| 7887 _tokenMatches(next.next, TokenType.OPEN_PAREN)) { | |
| 7888 Token afterParameters = _skipFormalParameterList(next.next); | |
| 7889 if (afterParameters != null && | |
| 7890 (afterParameters | |
| 7891 .matchesAny([TokenType.COMMA, TokenType.CLOSE_PAREN]))) { | |
| 7892 return _skipPastMatchingToken(startToken); | |
| 7893 } | |
| 7894 } | |
| 7895 // | |
| 7896 // Look to see whether the first parameter has a type or is a function typed | |
| 7897 // parameter with a return type. | |
| 7898 // | |
| 7899 Token afterType = _skipFinalConstVarOrType(next); | |
| 7900 if (afterType == null) { | |
| 7901 return null; | |
| 7902 } | |
| 7903 if (_skipSimpleIdentifier(afterType) == null) { | |
| 7904 return null; | |
| 7905 } | |
| 7906 return _skipPastMatchingToken(startToken); | |
| 7907 } | |
| 7908 | |
| 7909 /** | |
| 7910 * If the [startToken] is a begin token with an associated end token, then | |
| 7911 * return the token following the end token. Otherwise, return `null`. | |
| 7912 */ | |
| 7913 Token _skipPastMatchingToken(Token startToken) { | |
| 7914 if (startToken is! BeginToken) { | |
| 7915 return null; | |
| 7916 } | |
| 7917 Token closeParen = (startToken as BeginToken).endToken; | |
| 7918 if (closeParen == null) { | |
| 7919 return null; | |
| 7920 } | |
| 7921 return closeParen.next; | |
| 7922 } | |
| 7923 | |
| 7924 /** | |
| 7925 * Parse a prefixed identifier, starting at the [startToken], without actually | |
| 7926 * creating a prefixed identifier or changing the current token. Return the | |
| 7927 * token following the prefixed identifier that was parsed, or `null` if the | |
| 7928 * given token is not the first token in a valid prefixed identifier. | |
| 7929 * | |
| 7930 * This method must be kept in sync with [parsePrefixedIdentifier]. | |
| 7931 * | |
| 7932 * prefixedIdentifier ::= | |
| 7933 * identifier ('.' identifier)? | |
| 7934 */ | |
| 7935 Token _skipPrefixedIdentifier(Token startToken) { | |
| 7936 Token token = _skipSimpleIdentifier(startToken); | |
| 7937 if (token == null) { | |
| 7938 return null; | |
| 7939 } else if (!_tokenMatches(token, TokenType.PERIOD)) { | |
| 7940 return token; | |
| 7941 } | |
| 7942 token = token.next; | |
| 7943 Token nextToken = _skipSimpleIdentifier(token); | |
| 7944 if (nextToken != null) { | |
| 7945 return nextToken; | |
| 7946 } else if (_tokenMatches(token, TokenType.CLOSE_PAREN) || | |
| 7947 _tokenMatches(token, TokenType.COMMA)) { | |
| 7948 // If the `id.` is followed by something that cannot produce a valid | |
| 7949 // structure then assume this is a prefixed identifier but missing the | |
| 7950 // trailing identifier | |
| 7951 return token; | |
| 7952 } | |
| 7953 return null; | |
| 7954 } | |
| 7955 | |
| 7956 /** | |
| 7957 * Parse a return type, starting at the [startToken], without actually | |
| 7958 * creating a return type or changing the current token. Return the token | |
| 7959 * following the return type that was parsed, or `null` if the given token is | |
| 7960 * not the first token in a valid return type. | |
| 7961 * | |
| 7962 * This method must be kept in sync with [parseReturnType]. | |
| 7963 * | |
| 7964 * returnType ::= | |
| 7965 * 'void' | |
| 7966 * | type | |
| 7967 */ | |
| 7968 Token _skipReturnType(Token startToken) { | |
| 7969 if (_tokenMatchesKeyword(startToken, Keyword.VOID)) { | |
| 7970 return startToken.next; | |
| 7971 } else { | |
| 7972 return _skipTypeName(startToken); | |
| 7973 } | |
| 7974 } | |
| 7975 | |
| 7976 /** | |
| 7977 * Parse a simple identifier, starting at the [startToken], without actually | |
| 7978 * creating a simple identifier or changing the current token. Return the | |
| 7979 * token following the simple identifier that was parsed, or `null` if the | |
| 7980 * given token is not the first token in a valid simple identifier. | |
| 7981 * | |
| 7982 * This method must be kept in sync with [parseSimpleIdentifier]. | |
| 7983 * | |
| 7984 * identifier ::= | |
| 7985 * IDENTIFIER | |
| 7986 */ | |
| 7987 Token _skipSimpleIdentifier(Token startToken) { | |
| 7988 if (_tokenMatches(startToken, TokenType.IDENTIFIER) || | |
| 7989 (_tokenMatches(startToken, TokenType.KEYWORD) && | |
| 7990 (startToken as KeywordToken).keyword.isPseudoKeyword)) { | |
| 7991 return startToken.next; | |
| 7992 } | |
| 7993 return null; | |
| 7994 } | |
| 7995 | |
| 7996 /** | |
| 7997 * Parse a string literal that contains interpolations, starting at the | |
| 7998 * [startToken], without actually creating a string literal or changing the | |
| 7999 * current token. Return the token following the string literal that was | |
| 8000 * parsed, or `null` if the given token is not the first token in a valid | |
| 8001 * string literal. | |
| 8002 * | |
| 8003 * This method must be kept in sync with [parseStringInterpolation]. | |
| 8004 */ | |
| 8005 Token _skipStringInterpolation(Token startToken) { | |
| 8006 Token token = startToken; | |
| 8007 TokenType type = token.type; | |
| 8008 while (type == TokenType.STRING_INTERPOLATION_EXPRESSION || | |
| 8009 type == TokenType.STRING_INTERPOLATION_IDENTIFIER) { | |
| 8010 if (type == TokenType.STRING_INTERPOLATION_EXPRESSION) { | |
| 8011 token = token.next; | |
| 8012 type = token.type; | |
| 8013 // | |
| 8014 // Rather than verify that the following tokens represent a valid | |
| 8015 // expression, we simply skip tokens until we reach the end of the | |
| 8016 // interpolation, being careful to handle nested string literals. | |
| 8017 // | |
| 8018 int bracketNestingLevel = 1; | |
| 8019 while (bracketNestingLevel > 0) { | |
| 8020 if (type == TokenType.EOF) { | |
| 8021 return null; | |
| 8022 } else if (type == TokenType.OPEN_CURLY_BRACKET) { | |
| 8023 bracketNestingLevel++; | |
| 8024 } else if (type == TokenType.CLOSE_CURLY_BRACKET) { | |
| 8025 bracketNestingLevel--; | |
| 8026 } else if (type == TokenType.STRING) { | |
| 8027 token = _skipStringLiteral(token); | |
| 8028 if (token == null) { | |
| 8029 return null; | |
| 8030 } | |
| 8031 } else { | |
| 8032 token = token.next; | |
| 8033 } | |
| 8034 type = token.type; | |
| 8035 } | |
| 8036 token = token.next; | |
| 8037 type = token.type; | |
| 8038 } else { | |
| 8039 token = token.next; | |
| 8040 if (token.type != TokenType.IDENTIFIER) { | |
| 8041 return null; | |
| 8042 } | |
| 8043 token = token.next; | |
| 8044 } | |
| 8045 type = token.type; | |
| 8046 if (type == TokenType.STRING) { | |
| 8047 token = token.next; | |
| 8048 type = token.type; | |
| 8049 } | |
| 8050 } | |
| 8051 return token; | |
| 8052 } | |
| 8053 | |
| 8054 /** | |
| 8055 * Parse a string literal, starting at the [startToken], without actually | |
| 8056 * creating a string literal or changing the current token. Return the token | |
| 8057 * following the string literal that was parsed, or `null` if the given token | |
| 8058 * is not the first token in a valid string literal. | |
| 8059 * | |
| 8060 * This method must be kept in sync with [parseStringLiteral]. | |
| 8061 * | |
| 8062 * stringLiteral ::= | |
| 8063 * MULTI_LINE_STRING+ | |
| 8064 * | SINGLE_LINE_STRING+ | |
| 8065 */ | |
| 8066 Token _skipStringLiteral(Token startToken) { | |
| 8067 Token token = startToken; | |
| 8068 while (token != null && _tokenMatches(token, TokenType.STRING)) { | |
| 8069 token = token.next; | |
| 8070 TokenType type = token.type; | |
| 8071 if (type == TokenType.STRING_INTERPOLATION_EXPRESSION || | |
| 8072 type == TokenType.STRING_INTERPOLATION_IDENTIFIER) { | |
| 8073 token = _skipStringInterpolation(token); | |
| 8074 } | |
| 8075 } | |
| 8076 if (identical(token, startToken)) { | |
| 8077 return null; | |
| 8078 } | |
| 8079 return token; | |
| 8080 } | |
| 8081 | |
| 8082 /** | |
| 8083 * Parse a list of type arguments, starting at the [startToken], without | |
| 8084 * actually creating a type argument list or changing the current token. | |
| 8085 * Return the token following the type argument list that was parsed, or | |
| 8086 * `null` if the given token is not the first token in a valid type argument | |
| 8087 * list. | |
| 8088 * | |
| 8089 * This method must be kept in sync with [parseTypeArgumentList]. | |
| 8090 * | |
| 8091 * typeArguments ::= | |
| 8092 * '<' typeList '>' | |
| 8093 * | |
| 8094 * typeList ::= | |
| 8095 * type (',' type)* | |
| 8096 */ | |
| 8097 Token _skipTypeArgumentList(Token startToken) { | |
| 8098 Token token = startToken; | |
| 8099 if (!_tokenMatches(token, TokenType.LT)) { | |
| 8100 return null; | |
| 8101 } | |
| 8102 token = _skipTypeName(token.next); | |
| 8103 if (token == null) { | |
| 8104 // If the start token '<' is followed by '>' | |
| 8105 // then assume this should be type argument list but is missing a type | |
| 8106 token = startToken.next; | |
| 8107 if (_tokenMatches(token, TokenType.GT)) { | |
| 8108 return token.next; | |
| 8109 } | |
| 8110 return null; | |
| 8111 } | |
| 8112 while (_tokenMatches(token, TokenType.COMMA)) { | |
| 8113 token = _skipTypeName(token.next); | |
| 8114 if (token == null) { | |
| 8115 return null; | |
| 8116 } | |
| 8117 } | |
| 8118 if (token.type == TokenType.GT) { | |
| 8119 return token.next; | |
| 8120 } else if (token.type == TokenType.GT_GT) { | |
| 8121 Token second = new Token(TokenType.GT, token.offset + 1); | |
| 8122 second.setNextWithoutSettingPrevious(token.next); | |
| 8123 return second; | |
| 8124 } | |
| 8125 return null; | |
| 8126 } | |
| 8127 | |
| 8128 /** | |
| 8129 * Parse a type name, starting at the [startToken], without actually creating | |
| 8130 * a type name or changing the current token. Return the token following the | |
| 8131 * type name that was parsed, or `null` if the given token is not the first | |
| 8132 * token in a valid type name. | |
| 8133 * | |
| 8134 * This method must be kept in sync with [parseTypeName]. | |
| 8135 * | |
| 8136 * type ::= | |
| 8137 * qualified typeArguments? | |
| 8138 */ | |
| 8139 Token _skipTypeName(Token startToken) { | |
| 8140 Token token = _skipPrefixedIdentifier(startToken); | |
| 8141 if (token == null) { | |
| 8142 return null; | |
| 8143 } | |
| 8144 if (_tokenMatches(token, TokenType.LT)) { | |
| 8145 token = _skipTypeArgumentList(token); | |
| 8146 } | |
| 8147 return token; | |
| 8148 } | |
| 8149 | |
| 8150 /** | |
| 8151 * Parse a list of type parameters, starting at the [startToken], without | |
| 8152 * actually creating a type parameter list or changing the current token. | |
| 8153 * Return the token following the type parameter list that was parsed, or | |
| 8154 * `null` if the given token is not the first token in a valid type parameter | |
| 8155 * list. | |
| 8156 * | |
| 8157 * This method must be kept in sync with [parseTypeParameterList]. | |
| 8158 * | |
| 8159 * typeParameterList ::= | |
| 8160 * '<' typeParameter (',' typeParameter)* '>' | |
| 8161 */ | |
| 8162 Token _skipTypeParameterList(Token startToken) { | |
| 8163 if (!_tokenMatches(startToken, TokenType.LT)) { | |
| 8164 return null; | |
| 8165 } | |
| 8166 // | |
| 8167 // We can't skip a type parameter because it can be preceeded by metadata, | |
| 8168 // so we just assume that everything before the matching end token is valid. | |
| 8169 // | |
| 8170 int depth = 1; | |
| 8171 Token next = startToken.next; | |
| 8172 while (depth > 0) { | |
| 8173 if (_tokenMatches(next, TokenType.EOF)) { | |
| 8174 return null; | |
| 8175 } else if (_tokenMatches(next, TokenType.LT)) { | |
| 8176 depth++; | |
| 8177 } else if (_tokenMatches(next, TokenType.GT)) { | |
| 8178 depth--; | |
| 8179 } else if (_tokenMatches(next, TokenType.GT_EQ)) { | |
| 8180 if (depth == 1) { | |
| 8181 Token fakeEquals = new Token(TokenType.EQ, next.offset + 2); | |
| 8182 fakeEquals.setNextWithoutSettingPrevious(next.next); | |
| 8183 return fakeEquals; | |
| 8184 } | |
| 8185 depth--; | |
| 8186 } else if (_tokenMatches(next, TokenType.GT_GT)) { | |
| 8187 depth -= 2; | |
| 8188 } else if (_tokenMatches(next, TokenType.GT_GT_EQ)) { | |
| 8189 if (depth < 2) { | |
| 8190 return null; | |
| 8191 } else if (depth == 2) { | |
| 8192 Token fakeEquals = new Token(TokenType.EQ, next.offset + 2); | |
| 8193 fakeEquals.setNextWithoutSettingPrevious(next.next); | |
| 8194 return fakeEquals; | |
| 8195 } | |
| 8196 depth -= 2; | |
| 8197 } | |
| 8198 next = next.next; | |
| 8199 } | |
| 8200 return next; | |
| 8201 } | |
| 8202 | |
| 8203 /** | |
| 8204 * Return `true` if the given [token] has the given [type]. | |
| 8205 */ | |
| 8206 bool _tokenMatches(Token token, TokenType type) => token.type == type; | |
| 8207 | |
| 8208 /** | |
| 8209 * Return `true` if the given [token] is a valid identifier. Valid identifiers | |
| 8210 * include built-in identifiers (pseudo-keywords). | |
| 8211 */ | |
| 8212 bool _tokenMatchesIdentifier(Token token) => | |
| 8213 _tokenMatches(token, TokenType.IDENTIFIER) || | |
| 8214 (_tokenMatches(token, TokenType.KEYWORD) && | |
| 8215 (token as KeywordToken).keyword.isPseudoKeyword); | |
| 8216 | |
| 8217 /** | |
| 8218 * Return `true` if the given [token] matches the given [keyword]. | |
| 8219 */ | |
| 8220 bool _tokenMatchesKeyword(Token token, Keyword keyword) => | |
| 8221 token.type == TokenType.KEYWORD && | |
| 8222 (token as KeywordToken).keyword == keyword; | |
| 8223 | |
| 8224 /** | |
| 8225 * Return `true` if the given [token] matches the given [identifier]. | |
| 8226 */ | |
| 8227 bool _tokenMatchesString(Token token, String identifier) => | |
| 8228 token.type == TokenType.IDENTIFIER && token.lexeme == identifier; | |
| 8229 | |
| 8230 /** | |
| 8231 * Translate the characters at the given [index] in the given [lexeme], | |
| 8232 * appending the translated character to the given [buffer]. The index is | |
| 8233 * assumed to be valid. | |
| 8234 */ | |
| 8235 int _translateCharacter(StringBuffer buffer, String lexeme, int index) { | |
| 8236 int currentChar = lexeme.codeUnitAt(index); | |
| 8237 if (currentChar != 0x5C) { | |
| 8238 buffer.writeCharCode(currentChar); | |
| 8239 return index + 1; | |
| 8240 } | |
| 8241 // | |
| 8242 // We have found an escape sequence, so we parse the string to determine | |
| 8243 // what kind of escape sequence and what character to add to the builder. | |
| 8244 // | |
| 8245 int length = lexeme.length; | |
| 8246 int currentIndex = index + 1; | |
| 8247 if (currentIndex >= length) { | |
| 8248 // Illegal escape sequence: no char after escape. | |
| 8249 // This cannot actually happen because it would require the escape | |
| 8250 // character to be the last character in the string, but if it were it | |
| 8251 // would escape the closing quote, leaving the string unclosed. | |
| 8252 // reportError(ParserErrorCode.MISSING_CHAR_IN_ESCAPE_SEQUENCE); | |
| 8253 return length; | |
| 8254 } | |
| 8255 currentChar = lexeme.codeUnitAt(currentIndex); | |
| 8256 if (currentChar == 0x6E) { | |
| 8257 buffer.writeCharCode(0xA); | |
| 8258 // newline | |
| 8259 } else if (currentChar == 0x72) { | |
| 8260 buffer.writeCharCode(0xD); | |
| 8261 // carriage return | |
| 8262 } else if (currentChar == 0x66) { | |
| 8263 buffer.writeCharCode(0xC); | |
| 8264 // form feed | |
| 8265 } else if (currentChar == 0x62) { | |
| 8266 buffer.writeCharCode(0x8); | |
| 8267 // backspace | |
| 8268 } else if (currentChar == 0x74) { | |
| 8269 buffer.writeCharCode(0x9); | |
| 8270 // tab | |
| 8271 } else if (currentChar == 0x76) { | |
| 8272 buffer.writeCharCode(0xB); | |
| 8273 // vertical tab | |
| 8274 } else if (currentChar == 0x78) { | |
| 8275 if (currentIndex + 2 >= length) { | |
| 8276 // Illegal escape sequence: not enough hex digits | |
| 8277 _reportErrorForCurrentToken(ParserErrorCode.INVALID_HEX_ESCAPE); | |
| 8278 return length; | |
| 8279 } | |
| 8280 int firstDigit = lexeme.codeUnitAt(currentIndex + 1); | |
| 8281 int secondDigit = lexeme.codeUnitAt(currentIndex + 2); | |
| 8282 if (!_isHexDigit(firstDigit) || !_isHexDigit(secondDigit)) { | |
| 8283 // Illegal escape sequence: invalid hex digit | |
| 8284 _reportErrorForCurrentToken(ParserErrorCode.INVALID_HEX_ESCAPE); | |
| 8285 } else { | |
| 8286 int charCode = (Character.digit(firstDigit, 16) << 4) + | |
| 8287 Character.digit(secondDigit, 16); | |
| 8288 buffer.writeCharCode(charCode); | |
| 8289 } | |
| 8290 return currentIndex + 3; | |
| 8291 } else if (currentChar == 0x75) { | |
| 8292 currentIndex++; | |
| 8293 if (currentIndex >= length) { | |
| 8294 // Illegal escape sequence: not enough hex digits | |
| 8295 _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE); | |
| 8296 return length; | |
| 8297 } | |
| 8298 currentChar = lexeme.codeUnitAt(currentIndex); | |
| 8299 if (currentChar == 0x7B) { | |
| 8300 currentIndex++; | |
| 8301 if (currentIndex >= length) { | |
| 8302 // Illegal escape sequence: incomplete escape | |
| 8303 _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE); | |
| 8304 return length; | |
| 8305 } | |
| 8306 currentChar = lexeme.codeUnitAt(currentIndex); | |
| 8307 int digitCount = 0; | |
| 8308 int value = 0; | |
| 8309 while (currentChar != 0x7D) { | |
| 8310 if (!_isHexDigit(currentChar)) { | |
| 8311 // Illegal escape sequence: invalid hex digit | |
| 8312 _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE); | |
| 8313 currentIndex++; | |
| 8314 while (currentIndex < length && | |
| 8315 lexeme.codeUnitAt(currentIndex) != 0x7D) { | |
| 8316 currentIndex++; | |
| 8317 } | |
| 8318 return currentIndex + 1; | |
| 8319 } | |
| 8320 digitCount++; | |
| 8321 value = (value << 4) + Character.digit(currentChar, 16); | |
| 8322 currentIndex++; | |
| 8323 if (currentIndex >= length) { | |
| 8324 // Illegal escape sequence: incomplete escape | |
| 8325 _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE); | |
| 8326 return length; | |
| 8327 } | |
| 8328 currentChar = lexeme.codeUnitAt(currentIndex); | |
| 8329 } | |
| 8330 if (digitCount < 1 || digitCount > 6) { | |
| 8331 // Illegal escape sequence: not enough or too many hex digits | |
| 8332 _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE); | |
| 8333 } | |
| 8334 _appendScalarValue(buffer, lexeme.substring(index, currentIndex + 1), | |
| 8335 value, index, currentIndex); | |
| 8336 return currentIndex + 1; | |
| 8337 } else { | |
| 8338 if (currentIndex + 3 >= length) { | |
| 8339 // Illegal escape sequence: not enough hex digits | |
| 8340 _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE); | |
| 8341 return length; | |
| 8342 } | |
| 8343 int firstDigit = currentChar; | |
| 8344 int secondDigit = lexeme.codeUnitAt(currentIndex + 1); | |
| 8345 int thirdDigit = lexeme.codeUnitAt(currentIndex + 2); | |
| 8346 int fourthDigit = lexeme.codeUnitAt(currentIndex + 3); | |
| 8347 if (!_isHexDigit(firstDigit) || | |
| 8348 !_isHexDigit(secondDigit) || | |
| 8349 !_isHexDigit(thirdDigit) || | |
| 8350 !_isHexDigit(fourthDigit)) { | |
| 8351 // Illegal escape sequence: invalid hex digits | |
| 8352 _reportErrorForCurrentToken(ParserErrorCode.INVALID_UNICODE_ESCAPE); | |
| 8353 } else { | |
| 8354 _appendScalarValue( | |
| 8355 buffer, | |
| 8356 lexeme | |
| 8357 .substring( | |
| 8358 index, | |
| 8359 currentIndex + 1), | |
| 8360 (((((Character.digit(firstDigit, 16) << 4) + | |
| 8361 Character.digit(secondDigit, 16)) << | |
| 8362 4) + | |
| 8363 Character.digit(thirdDigit, 16)) << | |
| 8364 4) + | |
| 8365 Character | |
| 8366 .digit(fourthDigit, 16), | |
| 8367 index, | |
| 8368 currentIndex + | |
| 8369 3); | |
| 8370 } | |
| 8371 return currentIndex + 4; | |
| 8372 } | |
| 8373 } else { | |
| 8374 buffer.writeCharCode(currentChar); | |
| 8375 } | |
| 8376 return currentIndex + 1; | |
| 8377 } | |
| 8378 | |
| 8379 /** | |
| 8380 * Decrements the error reporting lock level. If level is more than `0`, then | |
| 8381 * [reportError] wont report any error. | |
| 8382 */ | |
| 8383 void _unlockErrorListener() { | |
| 8384 if (_errorListenerLock == 0) { | |
| 8385 throw new IllegalStateException( | |
| 8386 "Attempt to unlock not locked error listener."); | |
| 8387 } | |
| 8388 _errorListenerLock--; | |
| 8389 } | |
| 8390 | |
| 8391 /** | |
| 8392 * Validate that the given [parameterList] does not contain any field | |
| 8393 * initializers. | |
| 8394 */ | |
| 8395 void _validateFormalParameterList(FormalParameterList parameterList) { | |
| 8396 for (FormalParameter parameter in parameterList.parameters) { | |
| 8397 if (parameter is FieldFormalParameter) { | |
| 8398 _reportErrorForNode( | |
| 8399 ParserErrorCode.FIELD_INITIALIZER_OUTSIDE_CONSTRUCTOR, | |
| 8400 parameter.identifier); | |
| 8401 } | |
| 8402 } | |
| 8403 } | |
| 8404 | |
| 8405 /** | |
| 8406 * Validate that the given set of [modifiers] is appropriate for a class and | |
| 8407 * return the 'abstract' keyword if there is one. | |
| 8408 */ | |
| 8409 Token _validateModifiersForClass(Modifiers modifiers) { | |
| 8410 _validateModifiersForTopLevelDeclaration(modifiers); | |
| 8411 if (modifiers.constKeyword != null) { | |
| 8412 _reportErrorForToken(ParserErrorCode.CONST_CLASS, modifiers.constKeyword); | |
| 8413 } | |
| 8414 if (modifiers.externalKeyword != null) { | |
| 8415 _reportErrorForToken( | |
| 8416 ParserErrorCode.EXTERNAL_CLASS, modifiers.externalKeyword); | |
| 8417 } | |
| 8418 if (modifiers.finalKeyword != null) { | |
| 8419 _reportErrorForToken(ParserErrorCode.FINAL_CLASS, modifiers.finalKeyword); | |
| 8420 } | |
| 8421 if (modifiers.varKeyword != null) { | |
| 8422 _reportErrorForToken(ParserErrorCode.VAR_CLASS, modifiers.varKeyword); | |
| 8423 } | |
| 8424 return modifiers.abstractKeyword; | |
| 8425 } | |
| 8426 | |
| 8427 /** | |
| 8428 * Validate that the given set of [modifiers] is appropriate for a constructor | |
| 8429 * and return the 'const' keyword if there is one. | |
| 8430 */ | |
| 8431 Token _validateModifiersForConstructor(Modifiers modifiers) { | |
| 8432 if (modifiers.abstractKeyword != null) { | |
| 8433 _reportErrorForToken( | |
| 8434 ParserErrorCode.ABSTRACT_CLASS_MEMBER, modifiers.abstractKeyword); | |
| 8435 } | |
| 8436 if (modifiers.finalKeyword != null) { | |
| 8437 _reportErrorForToken( | |
| 8438 ParserErrorCode.FINAL_CONSTRUCTOR, modifiers.finalKeyword); | |
| 8439 } | |
| 8440 if (modifiers.staticKeyword != null) { | |
| 8441 _reportErrorForToken( | |
| 8442 ParserErrorCode.STATIC_CONSTRUCTOR, modifiers.staticKeyword); | |
| 8443 } | |
| 8444 if (modifiers.varKeyword != null) { | |
| 8445 _reportErrorForToken( | |
| 8446 ParserErrorCode.CONSTRUCTOR_WITH_RETURN_TYPE, modifiers.varKeyword); | |
| 8447 } | |
| 8448 Token externalKeyword = modifiers.externalKeyword; | |
| 8449 Token constKeyword = modifiers.constKeyword; | |
| 8450 Token factoryKeyword = modifiers.factoryKeyword; | |
| 8451 if (externalKeyword != null && | |
| 8452 constKeyword != null && | |
| 8453 constKeyword.offset < externalKeyword.offset) { | |
| 8454 _reportErrorForToken( | |
| 8455 ParserErrorCode.EXTERNAL_AFTER_CONST, externalKeyword); | |
| 8456 } | |
| 8457 if (externalKeyword != null && | |
| 8458 factoryKeyword != null && | |
| 8459 factoryKeyword.offset < externalKeyword.offset) { | |
| 8460 _reportErrorForToken( | |
| 8461 ParserErrorCode.EXTERNAL_AFTER_FACTORY, externalKeyword); | |
| 8462 } | |
| 8463 return constKeyword; | |
| 8464 } | |
| 8465 | |
| 8466 /** | |
| 8467 * Validate that the given set of [modifiers] is appropriate for a class and | |
| 8468 * return the 'abstract' keyword if there is one. | |
| 8469 */ | |
| 8470 void _validateModifiersForEnum(Modifiers modifiers) { | |
| 8471 _validateModifiersForTopLevelDeclaration(modifiers); | |
| 8472 if (modifiers.abstractKeyword != null) { | |
| 8473 _reportErrorForToken( | |
| 8474 ParserErrorCode.ABSTRACT_ENUM, modifiers.abstractKeyword); | |
| 8475 } | |
| 8476 if (modifiers.constKeyword != null) { | |
| 8477 _reportErrorForToken(ParserErrorCode.CONST_ENUM, modifiers.constKeyword); | |
| 8478 } | |
| 8479 if (modifiers.externalKeyword != null) { | |
| 8480 _reportErrorForToken( | |
| 8481 ParserErrorCode.EXTERNAL_ENUM, modifiers.externalKeyword); | |
| 8482 } | |
| 8483 if (modifiers.finalKeyword != null) { | |
| 8484 _reportErrorForToken(ParserErrorCode.FINAL_ENUM, modifiers.finalKeyword); | |
| 8485 } | |
| 8486 if (modifiers.varKeyword != null) { | |
| 8487 _reportErrorForToken(ParserErrorCode.VAR_ENUM, modifiers.varKeyword); | |
| 8488 } | |
| 8489 } | |
| 8490 | |
| 8491 /** | |
| 8492 * Validate that the given set of [modifiers] is appropriate for a field and | |
| 8493 * return the 'final', 'const' or 'var' keyword if there is one. | |
| 8494 */ | |
| 8495 Token _validateModifiersForField(Modifiers modifiers) { | |
| 8496 if (modifiers.abstractKeyword != null) { | |
| 8497 _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER); | |
| 8498 } | |
| 8499 if (modifiers.externalKeyword != null) { | |
| 8500 _reportErrorForToken( | |
| 8501 ParserErrorCode.EXTERNAL_FIELD, modifiers.externalKeyword); | |
| 8502 } | |
| 8503 if (modifiers.factoryKeyword != null) { | |
| 8504 _reportErrorForToken( | |
| 8505 ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword); | |
| 8506 } | |
| 8507 Token staticKeyword = modifiers.staticKeyword; | |
| 8508 Token constKeyword = modifiers.constKeyword; | |
| 8509 Token finalKeyword = modifiers.finalKeyword; | |
| 8510 Token varKeyword = modifiers.varKeyword; | |
| 8511 if (constKeyword != null) { | |
| 8512 if (finalKeyword != null) { | |
| 8513 _reportErrorForToken(ParserErrorCode.CONST_AND_FINAL, finalKeyword); | |
| 8514 } | |
| 8515 if (varKeyword != null) { | |
| 8516 _reportErrorForToken(ParserErrorCode.CONST_AND_VAR, varKeyword); | |
| 8517 } | |
| 8518 if (staticKeyword != null && constKeyword.offset < staticKeyword.offset) { | |
| 8519 _reportErrorForToken(ParserErrorCode.STATIC_AFTER_CONST, staticKeyword); | |
| 8520 } | |
| 8521 } else if (finalKeyword != null) { | |
| 8522 if (varKeyword != null) { | |
| 8523 _reportErrorForToken(ParserErrorCode.FINAL_AND_VAR, varKeyword); | |
| 8524 } | |
| 8525 if (staticKeyword != null && finalKeyword.offset < staticKeyword.offset) { | |
| 8526 _reportErrorForToken(ParserErrorCode.STATIC_AFTER_FINAL, staticKeyword); | |
| 8527 } | |
| 8528 } else if (varKeyword != null && | |
| 8529 staticKeyword != null && | |
| 8530 varKeyword.offset < staticKeyword.offset) { | |
| 8531 _reportErrorForToken(ParserErrorCode.STATIC_AFTER_VAR, staticKeyword); | |
| 8532 } | |
| 8533 return Token.lexicallyFirst([constKeyword, finalKeyword, varKeyword]); | |
| 8534 } | |
| 8535 | |
| 8536 /** | |
| 8537 * Validate that the given set of [modifiers] is appropriate for a local | |
| 8538 * function. | |
| 8539 */ | |
| 8540 void _validateModifiersForFunctionDeclarationStatement(Modifiers modifiers) { | |
| 8541 if (modifiers.abstractKeyword != null || | |
| 8542 modifiers.constKeyword != null || | |
| 8543 modifiers.externalKeyword != null || | |
| 8544 modifiers.factoryKeyword != null || | |
| 8545 modifiers.finalKeyword != null || | |
| 8546 modifiers.staticKeyword != null || | |
| 8547 modifiers.varKeyword != null) { | |
| 8548 _reportErrorForCurrentToken( | |
| 8549 ParserErrorCode.LOCAL_FUNCTION_DECLARATION_MODIFIER); | |
| 8550 } | |
| 8551 } | |
| 8552 | |
| 8553 /** | |
| 8554 * Validate that the given set of [modifiers] is appropriate for a getter, | |
| 8555 * setter, or method. | |
| 8556 */ | |
| 8557 void _validateModifiersForGetterOrSetterOrMethod(Modifiers modifiers) { | |
| 8558 if (modifiers.abstractKeyword != null) { | |
| 8559 _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER); | |
| 8560 } | |
| 8561 if (modifiers.constKeyword != null) { | |
| 8562 _reportErrorForToken( | |
| 8563 ParserErrorCode.CONST_METHOD, modifiers.constKeyword); | |
| 8564 } | |
| 8565 if (modifiers.factoryKeyword != null) { | |
| 8566 _reportErrorForToken( | |
| 8567 ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword); | |
| 8568 } | |
| 8569 if (modifiers.finalKeyword != null) { | |
| 8570 _reportErrorForToken( | |
| 8571 ParserErrorCode.FINAL_METHOD, modifiers.finalKeyword); | |
| 8572 } | |
| 8573 if (modifiers.varKeyword != null) { | |
| 8574 _reportErrorForToken( | |
| 8575 ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword); | |
| 8576 } | |
| 8577 Token externalKeyword = modifiers.externalKeyword; | |
| 8578 Token staticKeyword = modifiers.staticKeyword; | |
| 8579 if (externalKeyword != null && | |
| 8580 staticKeyword != null && | |
| 8581 staticKeyword.offset < externalKeyword.offset) { | |
| 8582 _reportErrorForToken( | |
| 8583 ParserErrorCode.EXTERNAL_AFTER_STATIC, externalKeyword); | |
| 8584 } | |
| 8585 } | |
| 8586 | |
| 8587 /** | |
| 8588 * Validate that the given set of [modifiers] is appropriate for a getter, | |
| 8589 * setter, or method. | |
| 8590 */ | |
| 8591 void _validateModifiersForOperator(Modifiers modifiers) { | |
| 8592 if (modifiers.abstractKeyword != null) { | |
| 8593 _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_CLASS_MEMBER); | |
| 8594 } | |
| 8595 if (modifiers.constKeyword != null) { | |
| 8596 _reportErrorForToken( | |
| 8597 ParserErrorCode.CONST_METHOD, modifiers.constKeyword); | |
| 8598 } | |
| 8599 if (modifiers.factoryKeyword != null) { | |
| 8600 _reportErrorForToken( | |
| 8601 ParserErrorCode.NON_CONSTRUCTOR_FACTORY, modifiers.factoryKeyword); | |
| 8602 } | |
| 8603 if (modifiers.finalKeyword != null) { | |
| 8604 _reportErrorForToken( | |
| 8605 ParserErrorCode.FINAL_METHOD, modifiers.finalKeyword); | |
| 8606 } | |
| 8607 if (modifiers.staticKeyword != null) { | |
| 8608 _reportErrorForToken( | |
| 8609 ParserErrorCode.STATIC_OPERATOR, modifiers.staticKeyword); | |
| 8610 } | |
| 8611 if (modifiers.varKeyword != null) { | |
| 8612 _reportErrorForToken( | |
| 8613 ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword); | |
| 8614 } | |
| 8615 } | |
| 8616 | |
| 8617 /** | |
| 8618 * Validate that the given set of [modifiers] is appropriate for a top-level | |
| 8619 * declaration. | |
| 8620 */ | |
| 8621 void _validateModifiersForTopLevelDeclaration(Modifiers modifiers) { | |
| 8622 if (modifiers.factoryKeyword != null) { | |
| 8623 _reportErrorForToken(ParserErrorCode.FACTORY_TOP_LEVEL_DECLARATION, | |
| 8624 modifiers.factoryKeyword); | |
| 8625 } | |
| 8626 if (modifiers.staticKeyword != null) { | |
| 8627 _reportErrorForToken(ParserErrorCode.STATIC_TOP_LEVEL_DECLARATION, | |
| 8628 modifiers.staticKeyword); | |
| 8629 } | |
| 8630 } | |
| 8631 | |
| 8632 /** | |
| 8633 * Validate that the given set of [modifiers] is appropriate for a top-level | |
| 8634 * function. | |
| 8635 */ | |
| 8636 void _validateModifiersForTopLevelFunction(Modifiers modifiers) { | |
| 8637 _validateModifiersForTopLevelDeclaration(modifiers); | |
| 8638 if (modifiers.abstractKeyword != null) { | |
| 8639 _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_TOP_LEVEL_FUNCTION); | |
| 8640 } | |
| 8641 if (modifiers.constKeyword != null) { | |
| 8642 _reportErrorForToken(ParserErrorCode.CONST_CLASS, modifiers.constKeyword); | |
| 8643 } | |
| 8644 if (modifiers.finalKeyword != null) { | |
| 8645 _reportErrorForToken(ParserErrorCode.FINAL_CLASS, modifiers.finalKeyword); | |
| 8646 } | |
| 8647 if (modifiers.varKeyword != null) { | |
| 8648 _reportErrorForToken( | |
| 8649 ParserErrorCode.VAR_RETURN_TYPE, modifiers.varKeyword); | |
| 8650 } | |
| 8651 } | |
| 8652 | |
| 8653 /** | |
| 8654 * Validate that the given set of [modifiers] is appropriate for a field and | |
| 8655 * return the 'final', 'const' or 'var' keyword if there is one. | |
| 8656 */ | |
| 8657 Token _validateModifiersForTopLevelVariable(Modifiers modifiers) { | |
| 8658 _validateModifiersForTopLevelDeclaration(modifiers); | |
| 8659 if (modifiers.abstractKeyword != null) { | |
| 8660 _reportErrorForCurrentToken(ParserErrorCode.ABSTRACT_TOP_LEVEL_VARIABLE); | |
| 8661 } | |
| 8662 if (modifiers.externalKeyword != null) { | |
| 8663 _reportErrorForToken( | |
| 8664 ParserErrorCode.EXTERNAL_FIELD, modifiers.externalKeyword); | |
| 8665 } | |
| 8666 Token constKeyword = modifiers.constKeyword; | |
| 8667 Token finalKeyword = modifiers.finalKeyword; | |
| 8668 Token varKeyword = modifiers.varKeyword; | |
| 8669 if (constKeyword != null) { | |
| 8670 if (finalKeyword != null) { | |
| 8671 _reportErrorForToken(ParserErrorCode.CONST_AND_FINAL, finalKeyword); | |
| 8672 } | |
| 8673 if (varKeyword != null) { | |
| 8674 _reportErrorForToken(ParserErrorCode.CONST_AND_VAR, varKeyword); | |
| 8675 } | |
| 8676 } else if (finalKeyword != null) { | |
| 8677 if (varKeyword != null) { | |
| 8678 _reportErrorForToken(ParserErrorCode.FINAL_AND_VAR, varKeyword); | |
| 8679 } | |
| 8680 } | |
| 8681 return Token.lexicallyFirst([constKeyword, finalKeyword, varKeyword]); | |
| 8682 } | |
| 8683 | |
| 8684 /** | |
| 8685 * Validate that the given set of [modifiers] is appropriate for a class and | |
| 8686 * return the 'abstract' keyword if there is one. | |
| 8687 */ | |
| 8688 void _validateModifiersForTypedef(Modifiers modifiers) { | |
| 8689 _validateModifiersForTopLevelDeclaration(modifiers); | |
| 8690 if (modifiers.abstractKeyword != null) { | |
| 8691 _reportErrorForToken( | |
| 8692 ParserErrorCode.ABSTRACT_TYPEDEF, modifiers.abstractKeyword); | |
| 8693 } | |
| 8694 if (modifiers.constKeyword != null) { | |
| 8695 _reportErrorForToken( | |
| 8696 ParserErrorCode.CONST_TYPEDEF, modifiers.constKeyword); | |
| 8697 } | |
| 8698 if (modifiers.externalKeyword != null) { | |
| 8699 _reportErrorForToken( | |
| 8700 ParserErrorCode.EXTERNAL_TYPEDEF, modifiers.externalKeyword); | |
| 8701 } | |
| 8702 if (modifiers.finalKeyword != null) { | |
| 8703 _reportErrorForToken( | |
| 8704 ParserErrorCode.FINAL_TYPEDEF, modifiers.finalKeyword); | |
| 8705 } | |
| 8706 if (modifiers.varKeyword != null) { | |
| 8707 _reportErrorForToken(ParserErrorCode.VAR_TYPEDEF, modifiers.varKeyword); | |
| 8708 } | |
| 8709 } | |
| 8710 } | |
| 8711 /** | |
| 8712 * A synthetic keyword token. | |
| 8713 */ | |
| 8714 class Parser_SyntheticKeywordToken extends KeywordToken { | |
| 8715 /** | |
| 8716 * Initialize a newly created token to represent the given [keyword] at the | |
| 8717 * given [offset]. | |
| 8718 */ | |
| 8719 Parser_SyntheticKeywordToken(Keyword keyword, int offset) | |
| 8720 : super(keyword, offset); | |
| 8721 | |
| 8722 @override | |
| 8723 int get length => 0; | |
| 8724 | |
| 8725 @override | |
| 8726 Token copy() => new Parser_SyntheticKeywordToken(keyword, offset); | |
| 8727 } | |
| 8728 | |
| 8729 /** | |
| 8730 * The error codes used for errors detected by the parser. The convention for | |
| 8731 * this class is for the name of the error code to indicate the problem that | |
| 8732 * caused the error to be generated and for the error message to explain what | |
| 8733 * is wrong and, when appropriate, how the problem can be corrected. | |
| 8734 */ | |
| 8735 class ParserErrorCode extends ErrorCode { | |
| 8736 static const ParserErrorCode ABSTRACT_CLASS_MEMBER = const ParserErrorCode( | |
| 8737 'ABSTRACT_CLASS_MEMBER', | |
| 8738 "Members of classes cannot be declared to be 'abstract'"); | |
| 8739 | |
| 8740 static const ParserErrorCode ABSTRACT_ENUM = const ParserErrorCode( | |
| 8741 'ABSTRACT_ENUM', "Enums cannot be declared to be 'abstract'"); | |
| 8742 | |
| 8743 static const ParserErrorCode ABSTRACT_STATIC_METHOD = const ParserErrorCode( | |
| 8744 'ABSTRACT_STATIC_METHOD', | |
| 8745 "Static methods cannot be declared to be 'abstract'"); | |
| 8746 | |
| 8747 static const ParserErrorCode ABSTRACT_TOP_LEVEL_FUNCTION = | |
| 8748 const ParserErrorCode('ABSTRACT_TOP_LEVEL_FUNCTION', | |
| 8749 "Top-level functions cannot be declared to be 'abstract'"); | |
| 8750 | |
| 8751 static const ParserErrorCode ABSTRACT_TOP_LEVEL_VARIABLE = | |
| 8752 const ParserErrorCode('ABSTRACT_TOP_LEVEL_VARIABLE', | |
| 8753 "Top-level variables cannot be declared to be 'abstract'"); | |
| 8754 | |
| 8755 static const ParserErrorCode ABSTRACT_TYPEDEF = const ParserErrorCode( | |
| 8756 'ABSTRACT_TYPEDEF', "Type aliases cannot be declared to be 'abstract'"); | |
| 8757 | |
| 8758 static const ParserErrorCode ANNOTATION_ON_ENUM_CONSTANT = | |
| 8759 const ParserErrorCode('ANNOTATION_ON_ENUM_CONSTANT', | |
| 8760 "Enum constants cannot have annotations"); | |
| 8761 | |
| 8762 static const ParserErrorCode ASSERT_DOES_NOT_TAKE_ASSIGNMENT = | |
| 8763 const ParserErrorCode('ASSERT_DOES_NOT_TAKE_ASSIGNMENT', | |
| 8764 "Assert cannot be called on an assignment"); | |
| 8765 | |
| 8766 static const ParserErrorCode ASSERT_DOES_NOT_TAKE_CASCADE = | |
| 8767 const ParserErrorCode( | |
| 8768 'ASSERT_DOES_NOT_TAKE_CASCADE', "Assert cannot be called on cascade"); | |
| 8769 | |
| 8770 static const ParserErrorCode ASSERT_DOES_NOT_TAKE_THROW = | |
| 8771 const ParserErrorCode( | |
| 8772 'ASSERT_DOES_NOT_TAKE_THROW', "Assert cannot be called on throws"); | |
| 8773 | |
| 8774 static const ParserErrorCode ASSERT_DOES_NOT_TAKE_RETHROW = | |
| 8775 const ParserErrorCode('ASSERT_DOES_NOT_TAKE_RETHROW', | |
| 8776 "Assert cannot be called on rethrows"); | |
| 8777 | |
| 8778 /** | |
| 8779 * 16.32 Identifier Reference: It is a compile-time error if any of the | |
| 8780 * identifiers async, await, or yield is used as an identifier in a function | |
| 8781 * body marked with either async, async*, or sync*. | |
| 8782 */ | |
| 8783 static const ParserErrorCode ASYNC_KEYWORD_USED_AS_IDENTIFIER = | |
| 8784 const ParserErrorCode('ASYNC_KEYWORD_USED_AS_IDENTIFIER', | |
| 8785 "The keywords 'async', 'await', and 'yield' may not be used as identif
iers in an asynchronous or generator function."); | |
| 8786 | |
| 8787 static const ParserErrorCode BREAK_OUTSIDE_OF_LOOP = const ParserErrorCode( | |
| 8788 'BREAK_OUTSIDE_OF_LOOP', | |
| 8789 "A break statement cannot be used outside of a loop or switch statement"); | |
| 8790 | |
| 8791 static const ParserErrorCode CLASS_IN_CLASS = const ParserErrorCode( | |
| 8792 'CLASS_IN_CLASS', "Classes cannot be declared inside other classes"); | |
| 8793 | |
| 8794 static const ParserErrorCode COLON_IN_PLACE_OF_IN = const ParserErrorCode( | |
| 8795 'COLON_IN_PLACE_OF_IN', "For-in loops use 'in' rather than a colon"); | |
| 8796 | |
| 8797 static const ParserErrorCode CONST_AND_FINAL = const ParserErrorCode( | |
| 8798 'CONST_AND_FINAL', | |
| 8799 "Members cannot be declared to be both 'const' and 'final'"); | |
| 8800 | |
| 8801 static const ParserErrorCode CONST_AND_VAR = const ParserErrorCode( | |
| 8802 'CONST_AND_VAR', | |
| 8803 "Members cannot be declared to be both 'const' and 'var'"); | |
| 8804 | |
| 8805 static const ParserErrorCode CONST_CLASS = const ParserErrorCode( | |
| 8806 'CONST_CLASS', "Classes cannot be declared to be 'const'"); | |
| 8807 | |
| 8808 static const ParserErrorCode CONST_CONSTRUCTOR_WITH_BODY = | |
| 8809 const ParserErrorCode('CONST_CONSTRUCTOR_WITH_BODY', | |
| 8810 "'const' constructors cannot have a body"); | |
| 8811 | |
| 8812 static const ParserErrorCode CONST_ENUM = const ParserErrorCode( | |
| 8813 'CONST_ENUM', "Enums cannot be declared to be 'const'"); | |
| 8814 | |
| 8815 static const ParserErrorCode CONST_FACTORY = const ParserErrorCode( | |
| 8816 'CONST_FACTORY', | |
| 8817 "Only redirecting factory constructors can be declared to be 'const'"); | |
| 8818 | |
| 8819 static const ParserErrorCode CONST_METHOD = const ParserErrorCode( | |
| 8820 'CONST_METHOD', | |
| 8821 "Getters, setters and methods cannot be declared to be 'const'"); | |
| 8822 | |
| 8823 static const ParserErrorCode CONST_TYPEDEF = const ParserErrorCode( | |
| 8824 'CONST_TYPEDEF', "Type aliases cannot be declared to be 'const'"); | |
| 8825 | |
| 8826 static const ParserErrorCode CONSTRUCTOR_WITH_RETURN_TYPE = | |
| 8827 const ParserErrorCode('CONSTRUCTOR_WITH_RETURN_TYPE', | |
| 8828 "Constructors cannot have a return type"); | |
| 8829 | |
| 8830 static const ParserErrorCode CONTINUE_OUTSIDE_OF_LOOP = const ParserErrorCode( | |
| 8831 'CONTINUE_OUTSIDE_OF_LOOP', | |
| 8832 "A continue statement cannot be used outside of a loop or switch statement
"); | |
| 8833 | |
| 8834 static const ParserErrorCode CONTINUE_WITHOUT_LABEL_IN_CASE = | |
| 8835 const ParserErrorCode('CONTINUE_WITHOUT_LABEL_IN_CASE', | |
| 8836 "A continue statement in a switch statement must have a label as a tar
get"); | |
| 8837 | |
| 8838 static const ParserErrorCode DEPRECATED_CLASS_TYPE_ALIAS = | |
| 8839 const ParserErrorCode('DEPRECATED_CLASS_TYPE_ALIAS', | |
| 8840 "The 'typedef' mixin application was replaced with 'class'"); | |
| 8841 | |
| 8842 static const ParserErrorCode DIRECTIVE_AFTER_DECLARATION = | |
| 8843 const ParserErrorCode('DIRECTIVE_AFTER_DECLARATION', | |
| 8844 "Directives must appear before any declarations"); | |
| 8845 | |
| 8846 static const ParserErrorCode DUPLICATE_LABEL_IN_SWITCH_STATEMENT = | |
| 8847 const ParserErrorCode('DUPLICATE_LABEL_IN_SWITCH_STATEMENT', | |
| 8848 "The label {0} was already used in this switch statement"); | |
| 8849 | |
| 8850 static const ParserErrorCode DUPLICATED_MODIFIER = const ParserErrorCode( | |
| 8851 'DUPLICATED_MODIFIER', "The modifier '{0}' was already specified."); | |
| 8852 | |
| 8853 static const ParserErrorCode EMPTY_ENUM_BODY = const ParserErrorCode( | |
| 8854 'EMPTY_ENUM_BODY', "An enum must declare at least one constant name"); | |
| 8855 | |
| 8856 static const ParserErrorCode ENUM_IN_CLASS = const ParserErrorCode( | |
| 8857 'ENUM_IN_CLASS', "Enums cannot be declared inside classes"); | |
| 8858 | |
| 8859 static const ParserErrorCode EQUALITY_CANNOT_BE_EQUALITY_OPERAND = | |
| 8860 const ParserErrorCode('EQUALITY_CANNOT_BE_EQUALITY_OPERAND', | |
| 8861 "Equality expression cannot be operand of another equality expression.
"); | |
| 8862 | |
| 8863 static const ParserErrorCode EXPECTED_CASE_OR_DEFAULT = const ParserErrorCode( | |
| 8864 'EXPECTED_CASE_OR_DEFAULT', "Expected 'case' or 'default'"); | |
| 8865 | |
| 8866 static const ParserErrorCode EXPECTED_CLASS_MEMBER = | |
| 8867 const ParserErrorCode('EXPECTED_CLASS_MEMBER', "Expected a class member"); | |
| 8868 | |
| 8869 static const ParserErrorCode EXPECTED_EXECUTABLE = const ParserErrorCode( | |
| 8870 'EXPECTED_EXECUTABLE', | |
| 8871 "Expected a method, getter, setter or operator declaration"); | |
| 8872 | |
| 8873 static const ParserErrorCode EXPECTED_LIST_OR_MAP_LITERAL = | |
| 8874 const ParserErrorCode( | |
| 8875 'EXPECTED_LIST_OR_MAP_LITERAL', "Expected a list or map literal"); | |
| 8876 | |
| 8877 static const ParserErrorCode EXPECTED_STRING_LITERAL = const ParserErrorCode( | |
| 8878 'EXPECTED_STRING_LITERAL', "Expected a string literal"); | |
| 8879 | |
| 8880 static const ParserErrorCode EXPECTED_TOKEN = | |
| 8881 const ParserErrorCode('EXPECTED_TOKEN', "Expected to find '{0}'"); | |
| 8882 | |
| 8883 static const ParserErrorCode EXPECTED_TYPE_NAME = | |
| 8884 const ParserErrorCode('EXPECTED_TYPE_NAME', "Expected a type name"); | |
| 8885 | |
| 8886 static const ParserErrorCode EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE = | |
| 8887 const ParserErrorCode('EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE', | |
| 8888 "Export directives must preceed part directives"); | |
| 8889 | |
| 8890 static const ParserErrorCode EXTERNAL_AFTER_CONST = const ParserErrorCode( | |
| 8891 'EXTERNAL_AFTER_CONST', | |
| 8892 "The modifier 'external' should be before the modifier 'const'"); | |
| 8893 | |
| 8894 static const ParserErrorCode EXTERNAL_AFTER_FACTORY = const ParserErrorCode( | |
| 8895 'EXTERNAL_AFTER_FACTORY', | |
| 8896 "The modifier 'external' should be before the modifier 'factory'"); | |
| 8897 | |
| 8898 static const ParserErrorCode EXTERNAL_AFTER_STATIC = const ParserErrorCode( | |
| 8899 'EXTERNAL_AFTER_STATIC', | |
| 8900 "The modifier 'external' should be before the modifier 'static'"); | |
| 8901 | |
| 8902 static const ParserErrorCode EXTERNAL_CLASS = const ParserErrorCode( | |
| 8903 'EXTERNAL_CLASS', "Classes cannot be declared to be 'external'"); | |
| 8904 | |
| 8905 static const ParserErrorCode EXTERNAL_CONSTRUCTOR_WITH_BODY = | |
| 8906 const ParserErrorCode('EXTERNAL_CONSTRUCTOR_WITH_BODY', | |
| 8907 "External constructors cannot have a body"); | |
| 8908 | |
| 8909 static const ParserErrorCode EXTERNAL_ENUM = const ParserErrorCode( | |
| 8910 'EXTERNAL_ENUM', "Enums cannot be declared to be 'external'"); | |
| 8911 | |
| 8912 static const ParserErrorCode EXTERNAL_FIELD = const ParserErrorCode( | |
| 8913 'EXTERNAL_FIELD', "Fields cannot be declared to be 'external'"); | |
| 8914 | |
| 8915 static const ParserErrorCode EXTERNAL_GETTER_WITH_BODY = | |
| 8916 const ParserErrorCode( | |
| 8917 'EXTERNAL_GETTER_WITH_BODY', "External getters cannot have a body"); | |
| 8918 | |
| 8919 static const ParserErrorCode EXTERNAL_METHOD_WITH_BODY = | |
| 8920 const ParserErrorCode( | |
| 8921 'EXTERNAL_METHOD_WITH_BODY', "External methods cannot have a body"); | |
| 8922 | |
| 8923 static const ParserErrorCode EXTERNAL_OPERATOR_WITH_BODY = | |
| 8924 const ParserErrorCode('EXTERNAL_OPERATOR_WITH_BODY', | |
| 8925 "External operators cannot have a body"); | |
| 8926 | |
| 8927 static const ParserErrorCode EXTERNAL_SETTER_WITH_BODY = | |
| 8928 const ParserErrorCode( | |
| 8929 'EXTERNAL_SETTER_WITH_BODY', "External setters cannot have a body"); | |
| 8930 | |
| 8931 static const ParserErrorCode EXTERNAL_TYPEDEF = const ParserErrorCode( | |
| 8932 'EXTERNAL_TYPEDEF', "Type aliases cannot be declared to be 'external'"); | |
| 8933 | |
| 8934 static const ParserErrorCode FACTORY_TOP_LEVEL_DECLARATION = | |
| 8935 const ParserErrorCode('FACTORY_TOP_LEVEL_DECLARATION', | |
| 8936 "Top-level declarations cannot be declared to be 'factory'"); | |
| 8937 | |
| 8938 static const ParserErrorCode FACTORY_WITH_INITIALIZERS = | |
| 8939 const ParserErrorCode('FACTORY_WITH_INITIALIZERS', | |
| 8940 "A 'factory' constructor cannot have initializers", | |
| 8941 "Either remove the 'factory' keyword to make this a generative " | |
| 8942 "constructor or remove the initializers."); | |
| 8943 | |
| 8944 static const ParserErrorCode FACTORY_WITHOUT_BODY = const ParserErrorCode( | |
| 8945 'FACTORY_WITHOUT_BODY', | |
| 8946 "A non-redirecting 'factory' constructor must have a body"); | |
| 8947 | |
| 8948 static const ParserErrorCode FIELD_INITIALIZER_OUTSIDE_CONSTRUCTOR = | |
| 8949 const ParserErrorCode('FIELD_INITIALIZER_OUTSIDE_CONSTRUCTOR', | |
| 8950 "Field initializers can only be used in a constructor"); | |
| 8951 | |
| 8952 static const ParserErrorCode FINAL_AND_VAR = const ParserErrorCode( | |
| 8953 'FINAL_AND_VAR', | |
| 8954 "Members cannot be declared to be both 'final' and 'var'"); | |
| 8955 | |
| 8956 static const ParserErrorCode FINAL_CLASS = const ParserErrorCode( | |
| 8957 'FINAL_CLASS', "Classes cannot be declared to be 'final'"); | |
| 8958 | |
| 8959 static const ParserErrorCode FINAL_CONSTRUCTOR = const ParserErrorCode( | |
| 8960 'FINAL_CONSTRUCTOR', "A constructor cannot be declared to be 'final'"); | |
| 8961 | |
| 8962 static const ParserErrorCode FINAL_ENUM = const ParserErrorCode( | |
| 8963 'FINAL_ENUM', "Enums cannot be declared to be 'final'"); | |
| 8964 | |
| 8965 static const ParserErrorCode FINAL_METHOD = const ParserErrorCode( | |
| 8966 'FINAL_METHOD', | |
| 8967 "Getters, setters and methods cannot be declared to be 'final'"); | |
| 8968 | |
| 8969 static const ParserErrorCode FINAL_TYPEDEF = const ParserErrorCode( | |
| 8970 'FINAL_TYPEDEF', "Type aliases cannot be declared to be 'final'"); | |
| 8971 | |
| 8972 static const ParserErrorCode FUNCTION_TYPED_PARAMETER_VAR = const ParserErrorC
ode( | |
| 8973 'FUNCTION_TYPED_PARAMETER_VAR', | |
| 8974 "Function typed parameters cannot specify 'const', 'final' or 'var' instea
d of return type"); | |
| 8975 | |
| 8976 static const ParserErrorCode GETTER_IN_FUNCTION = const ParserErrorCode( | |
| 8977 'GETTER_IN_FUNCTION', | |
| 8978 "Getters cannot be defined within methods or functions"); | |
| 8979 | |
| 8980 static const ParserErrorCode GETTER_WITH_PARAMETERS = const ParserErrorCode( | |
| 8981 'GETTER_WITH_PARAMETERS', | |
| 8982 "Getter should be declared without a parameter list"); | |
| 8983 | |
| 8984 static const ParserErrorCode ILLEGAL_ASSIGNMENT_TO_NON_ASSIGNABLE = | |
| 8985 const ParserErrorCode('ILLEGAL_ASSIGNMENT_TO_NON_ASSIGNABLE', | |
| 8986 "Illegal assignment to non-assignable expression"); | |
| 8987 | |
| 8988 static const ParserErrorCode IMPLEMENTS_BEFORE_EXTENDS = | |
| 8989 const ParserErrorCode('IMPLEMENTS_BEFORE_EXTENDS', | |
| 8990 "The extends clause must be before the implements clause"); | |
| 8991 | |
| 8992 static const ParserErrorCode IMPLEMENTS_BEFORE_WITH = const ParserErrorCode( | |
| 8993 'IMPLEMENTS_BEFORE_WITH', | |
| 8994 "The with clause must be before the implements clause"); | |
| 8995 | |
| 8996 static const ParserErrorCode IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE = | |
| 8997 const ParserErrorCode('IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE', | |
| 8998 "Import directives must preceed part directives"); | |
| 8999 | |
| 9000 static const ParserErrorCode INITIALIZED_VARIABLE_IN_FOR_EACH = | |
| 9001 const ParserErrorCode('INITIALIZED_VARIABLE_IN_FOR_EACH', | |
| 9002 "The loop variable in a for-each loop cannot be initialized"); | |
| 9003 | |
| 9004 static const ParserErrorCode INVALID_AWAIT_IN_FOR = const ParserErrorCode( | |
| 9005 'INVALID_AWAIT_IN_FOR', | |
| 9006 "The modifier 'await' is not allowed for a normal 'for' statement", | |
| 9007 "Remove the keyword or use a for-each statement."); | |
| 9008 | |
| 9009 static const ParserErrorCode INVALID_CODE_POINT = const ParserErrorCode( | |
| 9010 'INVALID_CODE_POINT', | |
| 9011 "The escape sequence '{0}' is not a valid code point"); | |
| 9012 | |
| 9013 static const ParserErrorCode INVALID_COMMENT_REFERENCE = const ParserErrorCode
( | |
| 9014 'INVALID_COMMENT_REFERENCE', | |
| 9015 "Comment references should contain a possibly prefixed identifier and can
start with 'new', but should not contain anything else"); | |
| 9016 | |
| 9017 static const ParserErrorCode INVALID_HEX_ESCAPE = const ParserErrorCode( | |
| 9018 'INVALID_HEX_ESCAPE', | |
| 9019 "An escape sequence starting with '\\x' must be followed by 2 hexidecimal
digits"); | |
| 9020 | |
| 9021 static const ParserErrorCode INVALID_OPERATOR = const ParserErrorCode( | |
| 9022 'INVALID_OPERATOR', "The string '{0}' is not a valid operator"); | |
| 9023 | |
| 9024 static const ParserErrorCode INVALID_OPERATOR_FOR_SUPER = | |
| 9025 const ParserErrorCode('INVALID_OPERATOR_FOR_SUPER', | |
| 9026 "The operator '{0}' cannot be used with 'super'"); | |
| 9027 | |
| 9028 static const ParserErrorCode INVALID_STAR_AFTER_ASYNC = const ParserErrorCode( | |
| 9029 'INVALID_STAR_AFTER_ASYNC', | |
| 9030 "The modifier 'async*' is not allowed for an expression function body", | |
| 9031 "Convert the body to a block."); | |
| 9032 | |
| 9033 static const ParserErrorCode INVALID_SYNC = const ParserErrorCode( | |
| 9034 'INVALID_SYNC', | |
| 9035 "The modifier 'sync' is not allowed for an exrpression function body", | |
| 9036 "Convert the body to a block."); | |
| 9037 | |
| 9038 static const ParserErrorCode INVALID_UNICODE_ESCAPE = const ParserErrorCode( | |
| 9039 'INVALID_UNICODE_ESCAPE', | |
| 9040 "An escape sequence starting with '\\u' must be followed by 4 hexidecimal
digits or from 1 to 6 digits between '{' and '}'"); | |
| 9041 | |
| 9042 static const ParserErrorCode LIBRARY_DIRECTIVE_NOT_FIRST = | |
| 9043 const ParserErrorCode('LIBRARY_DIRECTIVE_NOT_FIRST', | |
| 9044 "The library directive must appear before all other directives"); | |
| 9045 | |
| 9046 static const ParserErrorCode LOCAL_FUNCTION_DECLARATION_MODIFIER = | |
| 9047 const ParserErrorCode('LOCAL_FUNCTION_DECLARATION_MODIFIER', | |
| 9048 "Local function declarations cannot specify any modifier"); | |
| 9049 | |
| 9050 static const ParserErrorCode MISSING_ASSIGNABLE_SELECTOR = | |
| 9051 const ParserErrorCode('MISSING_ASSIGNABLE_SELECTOR', | |
| 9052 "Missing selector such as \".<identifier>\" or \"[0]\""); | |
| 9053 | |
| 9054 static const ParserErrorCode MISSING_ASSIGNMENT_IN_INITIALIZER = | |
| 9055 const ParserErrorCode('MISSING_ASSIGNMENT_IN_INITIALIZER', | |
| 9056 "Expected an assignment after the field name"); | |
| 9057 | |
| 9058 static const ParserErrorCode MISSING_CATCH_OR_FINALLY = const ParserErrorCode( | |
| 9059 'MISSING_CATCH_OR_FINALLY', | |
| 9060 "A try statement must have either a catch or finally clause"); | |
| 9061 | |
| 9062 static const ParserErrorCode MISSING_CLASS_BODY = const ParserErrorCode( | |
| 9063 'MISSING_CLASS_BODY', | |
| 9064 "A class definition must have a body, even if it is empty"); | |
| 9065 | |
| 9066 static const ParserErrorCode MISSING_CLOSING_PARENTHESIS = | |
| 9067 const ParserErrorCode( | |
| 9068 'MISSING_CLOSING_PARENTHESIS', "The closing parenthesis is missing"); | |
| 9069 | |
| 9070 static const ParserErrorCode MISSING_CONST_FINAL_VAR_OR_TYPE = | |
| 9071 const ParserErrorCode('MISSING_CONST_FINAL_VAR_OR_TYPE', | |
| 9072 "Variables must be declared using the keywords 'const', 'final', 'var'
or a type name"); | |
| 9073 | |
| 9074 static const ParserErrorCode MISSING_ENUM_BODY = const ParserErrorCode( | |
| 9075 'MISSING_ENUM_BODY', | |
| 9076 "An enum definition must have a body with at least one constant name"); | |
| 9077 | |
| 9078 static const ParserErrorCode MISSING_EXPRESSION_IN_INITIALIZER = | |
| 9079 const ParserErrorCode('MISSING_EXPRESSION_IN_INITIALIZER', | |
| 9080 "Expected an expression after the assignment operator"); | |
| 9081 | |
| 9082 static const ParserErrorCode MISSING_EXPRESSION_IN_THROW = | |
| 9083 const ParserErrorCode('MISSING_EXPRESSION_IN_THROW', | |
| 9084 "Throw expressions must compute the object to be thrown"); | |
| 9085 | |
| 9086 static const ParserErrorCode MISSING_FUNCTION_BODY = const ParserErrorCode( | |
| 9087 'MISSING_FUNCTION_BODY', "A function body must be provided"); | |
| 9088 | |
| 9089 static const ParserErrorCode MISSING_FUNCTION_PARAMETERS = | |
| 9090 const ParserErrorCode('MISSING_FUNCTION_PARAMETERS', | |
| 9091 "Functions must have an explicit list of parameters"); | |
| 9092 | |
| 9093 static const ParserErrorCode MISSING_METHOD_PARAMETERS = | |
| 9094 const ParserErrorCode('MISSING_METHOD_PARAMETERS', | |
| 9095 "Methods must have an explicit list of parameters"); | |
| 9096 | |
| 9097 static const ParserErrorCode MISSING_GET = const ParserErrorCode( | |
| 9098 'MISSING_GET', | |
| 9099 "Getters must have the keyword 'get' before the getter name"); | |
| 9100 | |
| 9101 static const ParserErrorCode MISSING_IDENTIFIER = | |
| 9102 const ParserErrorCode('MISSING_IDENTIFIER', "Expected an identifier"); | |
| 9103 | |
| 9104 static const ParserErrorCode MISSING_INITIALIZER = | |
| 9105 const ParserErrorCode('MISSING_INITIALIZER', "Expected an initializer"); | |
| 9106 | |
| 9107 static const ParserErrorCode MISSING_KEYWORD_OPERATOR = const ParserErrorCode( | |
| 9108 'MISSING_KEYWORD_OPERATOR', | |
| 9109 "Operator declarations must be preceeded by the keyword 'operator'"); | |
| 9110 | |
| 9111 static const ParserErrorCode MISSING_NAME_IN_LIBRARY_DIRECTIVE = | |
| 9112 const ParserErrorCode('MISSING_NAME_IN_LIBRARY_DIRECTIVE', | |
| 9113 "Library directives must include a library name"); | |
| 9114 | |
| 9115 static const ParserErrorCode MISSING_NAME_IN_PART_OF_DIRECTIVE = | |
| 9116 const ParserErrorCode('MISSING_NAME_IN_PART_OF_DIRECTIVE', | |
| 9117 "Library directives must include a library name"); | |
| 9118 | |
| 9119 static const ParserErrorCode MISSING_PREFIX_IN_DEFERRED_IMPORT = | |
| 9120 const ParserErrorCode('MISSING_PREFIX_IN_DEFERRED_IMPORT', | |
| 9121 "Deferred imports must have a prefix"); | |
| 9122 | |
| 9123 static const ParserErrorCode MISSING_STAR_AFTER_SYNC = const ParserErrorCode( | |
| 9124 'MISSING_STAR_AFTER_SYNC', | |
| 9125 "The modifier 'sync' must be followed by a star ('*')", | |
| 9126 "Remove the modifier or add a star."); | |
| 9127 | |
| 9128 static const ParserErrorCode MISSING_STATEMENT = | |
| 9129 const ParserErrorCode('MISSING_STATEMENT', "Expected a statement"); | |
| 9130 | |
| 9131 static const ParserErrorCode MISSING_TERMINATOR_FOR_PARAMETER_GROUP = | |
| 9132 const ParserErrorCode('MISSING_TERMINATOR_FOR_PARAMETER_GROUP', | |
| 9133 "There is no '{0}' to close the parameter group"); | |
| 9134 | |
| 9135 static const ParserErrorCode MISSING_TYPEDEF_PARAMETERS = | |
| 9136 const ParserErrorCode('MISSING_TYPEDEF_PARAMETERS', | |
| 9137 "Type aliases for functions must have an explicit list of parameters")
; | |
| 9138 | |
| 9139 static const ParserErrorCode MISSING_VARIABLE_IN_FOR_EACH = const ParserErrorC
ode( | |
| 9140 'MISSING_VARIABLE_IN_FOR_EACH', | |
| 9141 "A loop variable must be declared in a for-each loop before the 'in', but
none were found"); | |
| 9142 | |
| 9143 static const ParserErrorCode MIXED_PARAMETER_GROUPS = const ParserErrorCode( | |
| 9144 'MIXED_PARAMETER_GROUPS', | |
| 9145 "Cannot have both positional and named parameters in a single parameter li
st"); | |
| 9146 | |
| 9147 static const ParserErrorCode MULTIPLE_EXTENDS_CLAUSES = const ParserErrorCode( | |
| 9148 'MULTIPLE_EXTENDS_CLAUSES', | |
| 9149 "Each class definition can have at most one extends clause"); | |
| 9150 | |
| 9151 static const ParserErrorCode MULTIPLE_IMPLEMENTS_CLAUSES = | |
| 9152 const ParserErrorCode('MULTIPLE_IMPLEMENTS_CLAUSES', | |
| 9153 "Each class definition can have at most one implements clause"); | |
| 9154 | |
| 9155 static const ParserErrorCode MULTIPLE_LIBRARY_DIRECTIVES = | |
| 9156 const ParserErrorCode('MULTIPLE_LIBRARY_DIRECTIVES', | |
| 9157 "Only one library directive may be declared in a file"); | |
| 9158 | |
| 9159 static const ParserErrorCode MULTIPLE_NAMED_PARAMETER_GROUPS = | |
| 9160 const ParserErrorCode('MULTIPLE_NAMED_PARAMETER_GROUPS', | |
| 9161 "Cannot have multiple groups of named parameters in a single parameter
list"); | |
| 9162 | |
| 9163 static const ParserErrorCode MULTIPLE_PART_OF_DIRECTIVES = | |
| 9164 const ParserErrorCode('MULTIPLE_PART_OF_DIRECTIVES', | |
| 9165 "Only one part-of directive may be declared in a file"); | |
| 9166 | |
| 9167 static const ParserErrorCode MULTIPLE_POSITIONAL_PARAMETER_GROUPS = | |
| 9168 const ParserErrorCode('MULTIPLE_POSITIONAL_PARAMETER_GROUPS', | |
| 9169 "Cannot have multiple groups of positional parameters in a single para
meter list"); | |
| 9170 | |
| 9171 static const ParserErrorCode MULTIPLE_VARIABLES_IN_FOR_EACH = | |
| 9172 const ParserErrorCode('MULTIPLE_VARIABLES_IN_FOR_EACH', | |
| 9173 "A single loop variable must be declared in a for-each loop before the
'in', but {0} were found"); | |
| 9174 | |
| 9175 static const ParserErrorCode MULTIPLE_WITH_CLAUSES = const ParserErrorCode( | |
| 9176 'MULTIPLE_WITH_CLAUSES', | |
| 9177 "Each class definition can have at most one with clause"); | |
| 9178 | |
| 9179 static const ParserErrorCode NAMED_FUNCTION_EXPRESSION = | |
| 9180 const ParserErrorCode( | |
| 9181 'NAMED_FUNCTION_EXPRESSION', "Function expressions cannot be named"); | |
| 9182 | |
| 9183 static const ParserErrorCode NAMED_PARAMETER_OUTSIDE_GROUP = | |
| 9184 const ParserErrorCode('NAMED_PARAMETER_OUTSIDE_GROUP', | |
| 9185 "Named parameters must be enclosed in curly braces ('{' and '}')"); | |
| 9186 | |
| 9187 static const ParserErrorCode NATIVE_CLAUSE_IN_NON_SDK_CODE = | |
| 9188 const ParserErrorCode('NATIVE_CLAUSE_IN_NON_SDK_CODE', | |
| 9189 "Native clause can only be used in the SDK and code that is loaded thr
ough native extensions"); | |
| 9190 | |
| 9191 static const ParserErrorCode NATIVE_FUNCTION_BODY_IN_NON_SDK_CODE = | |
| 9192 const ParserErrorCode('NATIVE_FUNCTION_BODY_IN_NON_SDK_CODE', | |
| 9193 "Native functions can only be declared in the SDK and code that is loa
ded through native extensions"); | |
| 9194 | |
| 9195 static const ParserErrorCode NON_CONSTRUCTOR_FACTORY = const ParserErrorCode( | |
| 9196 'NON_CONSTRUCTOR_FACTORY', | |
| 9197 "Only constructors can be declared to be a 'factory'"); | |
| 9198 | |
| 9199 static const ParserErrorCode NON_IDENTIFIER_LIBRARY_NAME = | |
| 9200 const ParserErrorCode('NON_IDENTIFIER_LIBRARY_NAME', | |
| 9201 "The name of a library must be an identifier"); | |
| 9202 | |
| 9203 static const ParserErrorCode NON_PART_OF_DIRECTIVE_IN_PART = | |
| 9204 const ParserErrorCode('NON_PART_OF_DIRECTIVE_IN_PART', | |
| 9205 "The part-of directive must be the only directive in a part"); | |
| 9206 | |
| 9207 static const ParserErrorCode NON_STRING_LITERAL_AS_URI = | |
| 9208 const ParserErrorCode('NON_STRING_LITERAL_AS_URI', | |
| 9209 "The URI must be a string literal", | |
| 9210 "Enclose the URI in either single or double quotes."); | |
| 9211 | |
| 9212 static const ParserErrorCode NON_USER_DEFINABLE_OPERATOR = | |
| 9213 const ParserErrorCode('NON_USER_DEFINABLE_OPERATOR', | |
| 9214 "The operator '{0}' is not user definable"); | |
| 9215 | |
| 9216 static const ParserErrorCode NORMAL_BEFORE_OPTIONAL_PARAMETERS = | |
| 9217 const ParserErrorCode('NORMAL_BEFORE_OPTIONAL_PARAMETERS', | |
| 9218 "Normal parameters must occur before optional parameters"); | |
| 9219 | |
| 9220 static const ParserErrorCode POSITIONAL_AFTER_NAMED_ARGUMENT = | |
| 9221 const ParserErrorCode('POSITIONAL_AFTER_NAMED_ARGUMENT', | |
| 9222 "Positional arguments must occur before named arguments"); | |
| 9223 | |
| 9224 static const ParserErrorCode POSITIONAL_PARAMETER_OUTSIDE_GROUP = | |
| 9225 const ParserErrorCode('POSITIONAL_PARAMETER_OUTSIDE_GROUP', | |
| 9226 "Positional parameters must be enclosed in square brackets ('[' and ']
')"); | |
| 9227 | |
| 9228 static const ParserErrorCode REDIRECTION_IN_NON_FACTORY_CONSTRUCTOR = | |
| 9229 const ParserErrorCode('REDIRECTION_IN_NON_FACTORY_CONSTRUCTOR', | |
| 9230 "Only factory constructor can specify '=' redirection."); | |
| 9231 | |
| 9232 static const ParserErrorCode SETTER_IN_FUNCTION = const ParserErrorCode( | |
| 9233 'SETTER_IN_FUNCTION', | |
| 9234 "Setters cannot be defined within methods or functions"); | |
| 9235 | |
| 9236 static const ParserErrorCode STATIC_AFTER_CONST = const ParserErrorCode( | |
| 9237 'STATIC_AFTER_CONST', | |
| 9238 "The modifier 'static' should be before the modifier 'const'"); | |
| 9239 | |
| 9240 static const ParserErrorCode STATIC_AFTER_FINAL = const ParserErrorCode( | |
| 9241 'STATIC_AFTER_FINAL', | |
| 9242 "The modifier 'static' should be before the modifier 'final'"); | |
| 9243 | |
| 9244 static const ParserErrorCode STATIC_AFTER_VAR = const ParserErrorCode( | |
| 9245 'STATIC_AFTER_VAR', | |
| 9246 "The modifier 'static' should be before the modifier 'var'"); | |
| 9247 | |
| 9248 static const ParserErrorCode STATIC_CONSTRUCTOR = const ParserErrorCode( | |
| 9249 'STATIC_CONSTRUCTOR', "Constructors cannot be static"); | |
| 9250 | |
| 9251 static const ParserErrorCode STATIC_GETTER_WITHOUT_BODY = | |
| 9252 const ParserErrorCode( | |
| 9253 'STATIC_GETTER_WITHOUT_BODY', "A 'static' getter must have a body"); | |
| 9254 | |
| 9255 static const ParserErrorCode STATIC_OPERATOR = | |
| 9256 const ParserErrorCode('STATIC_OPERATOR', "Operators cannot be static"); | |
| 9257 | |
| 9258 static const ParserErrorCode STATIC_SETTER_WITHOUT_BODY = | |
| 9259 const ParserErrorCode( | |
| 9260 'STATIC_SETTER_WITHOUT_BODY', "A 'static' setter must have a body"); | |
| 9261 | |
| 9262 static const ParserErrorCode STATIC_TOP_LEVEL_DECLARATION = | |
| 9263 const ParserErrorCode('STATIC_TOP_LEVEL_DECLARATION', | |
| 9264 "Top-level declarations cannot be declared to be 'static'"); | |
| 9265 | |
| 9266 static const ParserErrorCode SWITCH_HAS_CASE_AFTER_DEFAULT_CASE = | |
| 9267 const ParserErrorCode('SWITCH_HAS_CASE_AFTER_DEFAULT_CASE', | |
| 9268 "The 'default' case should be the last case in a switch statement"); | |
| 9269 | |
| 9270 static const ParserErrorCode SWITCH_HAS_MULTIPLE_DEFAULT_CASES = | |
| 9271 const ParserErrorCode('SWITCH_HAS_MULTIPLE_DEFAULT_CASES', | |
| 9272 "The 'default' case can only be declared once"); | |
| 9273 | |
| 9274 static const ParserErrorCode TOP_LEVEL_OPERATOR = const ParserErrorCode( | |
| 9275 'TOP_LEVEL_OPERATOR', "Operators must be declared within a class"); | |
| 9276 | |
| 9277 static const ParserErrorCode TYPEDEF_IN_CLASS = const ParserErrorCode( | |
| 9278 'TYPEDEF_IN_CLASS', | |
| 9279 "Function type aliases cannot be declared inside classes"); | |
| 9280 | |
| 9281 static const ParserErrorCode UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP = | |
| 9282 const ParserErrorCode('UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP', | |
| 9283 "There is no '{0}' to open a parameter group"); | |
| 9284 | |
| 9285 static const ParserErrorCode UNEXPECTED_TOKEN = | |
| 9286 const ParserErrorCode('UNEXPECTED_TOKEN', "Unexpected token '{0}'"); | |
| 9287 | |
| 9288 static const ParserErrorCode WITH_BEFORE_EXTENDS = const ParserErrorCode( | |
| 9289 'WITH_BEFORE_EXTENDS', | |
| 9290 "The extends clause must be before the with clause"); | |
| 9291 | |
| 9292 static const ParserErrorCode WITH_WITHOUT_EXTENDS = const ParserErrorCode( | |
| 9293 'WITH_WITHOUT_EXTENDS', | |
| 9294 "The with clause cannot be used without an extends clause"); | |
| 9295 | |
| 9296 static const ParserErrorCode WRONG_SEPARATOR_FOR_NAMED_PARAMETER = | |
| 9297 const ParserErrorCode('WRONG_SEPARATOR_FOR_NAMED_PARAMETER', | |
| 9298 "The default value of a named parameter should be preceeded by ':'"); | |
| 9299 | |
| 9300 static const ParserErrorCode WRONG_SEPARATOR_FOR_POSITIONAL_PARAMETER = | |
| 9301 const ParserErrorCode('WRONG_SEPARATOR_FOR_POSITIONAL_PARAMETER', | |
| 9302 "The default value of a positional parameter should be preceeded by '=
'"); | |
| 9303 | |
| 9304 static const ParserErrorCode WRONG_TERMINATOR_FOR_PARAMETER_GROUP = | |
| 9305 const ParserErrorCode('WRONG_TERMINATOR_FOR_PARAMETER_GROUP', | |
| 9306 "Expected '{0}' to close parameter group"); | |
| 9307 | |
| 9308 static const ParserErrorCode VAR_AND_TYPE = const ParserErrorCode( | |
| 9309 'VAR_AND_TYPE', | |
| 9310 "Variables cannot be declared using both 'var' and a type name; remove the
'var'"); | |
| 9311 | |
| 9312 static const ParserErrorCode VAR_AS_TYPE_NAME = const ParserErrorCode( | |
| 9313 'VAR_AS_TYPE_NAME', "The keyword 'var' cannot be used as a type name"); | |
| 9314 | |
| 9315 static const ParserErrorCode VAR_CLASS = const ParserErrorCode( | |
| 9316 'VAR_CLASS', "Classes cannot be declared to be 'var'"); | |
| 9317 | |
| 9318 static const ParserErrorCode VAR_ENUM = | |
| 9319 const ParserErrorCode('VAR_ENUM', "Enums cannot be declared to be 'var'"); | |
| 9320 | |
| 9321 static const ParserErrorCode VAR_RETURN_TYPE = const ParserErrorCode( | |
| 9322 'VAR_RETURN_TYPE', "The return type cannot be 'var'"); | |
| 9323 | |
| 9324 static const ParserErrorCode VAR_TYPEDEF = const ParserErrorCode( | |
| 9325 'VAR_TYPEDEF', "Type aliases cannot be declared to be 'var'"); | |
| 9326 | |
| 9327 static const ParserErrorCode VOID_PARAMETER = const ParserErrorCode( | |
| 9328 'VOID_PARAMETER', "Parameters cannot have a type of 'void'"); | |
| 9329 | |
| 9330 static const ParserErrorCode VOID_VARIABLE = const ParserErrorCode( | |
| 9331 'VOID_VARIABLE', "Variables cannot have a type of 'void'"); | |
| 9332 | |
| 9333 /** | |
| 9334 * Initialize a newly created error code to have the given [name]. The message | |
| 9335 * associated with the error will be created from the given [message] | |
| 9336 * template. The correction associated with the error will be created from the | |
| 9337 * given [correction] template. | |
| 9338 */ | |
| 9339 const ParserErrorCode(String name, String message, [String correction]) | |
| 9340 : super(name, message, correction); | |
| 9341 | |
| 9342 @override | |
| 9343 ErrorSeverity get errorSeverity => ErrorSeverity.ERROR; | |
| 9344 | |
| 9345 @override | |
| 9346 ErrorType get type => ErrorType.SYNTACTIC_ERROR; | |
| 9347 } | |
| 9348 | |
| 9349 /** | |
| 9350 * An object that copies resolution information from one AST structure to | |
| 9351 * another as long as the structures of the corresponding children of a pair of | |
| 9352 * nodes are the same. | |
| 9353 */ | |
| 9354 class ResolutionCopier implements AstVisitor<bool> { | |
| 9355 /** | |
| 9356 * The AST node with which the node being visited is to be compared. This is | |
| 9357 * only valid at the beginning of each visit method (until [isEqualNodes] is | |
| 9358 * invoked). | |
| 9359 */ | |
| 9360 AstNode _toNode; | |
| 9361 | |
| 9362 @override | |
| 9363 bool visitAdjacentStrings(AdjacentStrings node) { | |
| 9364 AdjacentStrings toNode = this._toNode as AdjacentStrings; | |
| 9365 return _isEqualNodeLists(node.strings, toNode.strings); | |
| 9366 } | |
| 9367 | |
| 9368 @override | |
| 9369 bool visitAnnotation(Annotation node) { | |
| 9370 Annotation toNode = this._toNode as Annotation; | |
| 9371 if (_and(_isEqualTokens(node.atSign, toNode.atSign), | |
| 9372 _isEqualNodes(node.name, toNode.name), | |
| 9373 _isEqualTokens(node.period, toNode.period), | |
| 9374 _isEqualNodes(node.constructorName, toNode.constructorName), | |
| 9375 _isEqualNodes(node.arguments, toNode.arguments))) { | |
| 9376 toNode.element = node.element; | |
| 9377 return true; | |
| 9378 } | |
| 9379 return false; | |
| 9380 } | |
| 9381 | |
| 9382 @override | |
| 9383 bool visitArgumentList(ArgumentList node) { | |
| 9384 ArgumentList toNode = this._toNode as ArgumentList; | |
| 9385 return _and(_isEqualTokens(node.leftParenthesis, toNode.leftParenthesis), | |
| 9386 _isEqualNodeLists(node.arguments, toNode.arguments), | |
| 9387 _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis)); | |
| 9388 } | |
| 9389 | |
| 9390 @override | |
| 9391 bool visitAsExpression(AsExpression node) { | |
| 9392 AsExpression toNode = this._toNode as AsExpression; | |
| 9393 if (_and(_isEqualNodes(node.expression, toNode.expression), | |
| 9394 _isEqualTokens(node.asOperator, toNode.asOperator), | |
| 9395 _isEqualNodes(node.type, toNode.type))) { | |
| 9396 toNode.propagatedType = node.propagatedType; | |
| 9397 toNode.staticType = node.staticType; | |
| 9398 return true; | |
| 9399 } | |
| 9400 return false; | |
| 9401 } | |
| 9402 | |
| 9403 @override | |
| 9404 bool visitAssertStatement(AssertStatement node) { | |
| 9405 AssertStatement toNode = this._toNode as AssertStatement; | |
| 9406 return _and(_isEqualTokens(node.assertKeyword, toNode.assertKeyword), | |
| 9407 _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis), | |
| 9408 _isEqualNodes(node.condition, toNode.condition), | |
| 9409 _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis), | |
| 9410 _isEqualTokens(node.semicolon, toNode.semicolon)); | |
| 9411 } | |
| 9412 | |
| 9413 @override | |
| 9414 bool visitAssignmentExpression(AssignmentExpression node) { | |
| 9415 AssignmentExpression toNode = this._toNode as AssignmentExpression; | |
| 9416 if (_and(_isEqualNodes(node.leftHandSide, toNode.leftHandSide), | |
| 9417 _isEqualTokens(node.operator, toNode.operator), | |
| 9418 _isEqualNodes(node.rightHandSide, toNode.rightHandSide))) { | |
| 9419 toNode.propagatedElement = node.propagatedElement; | |
| 9420 toNode.propagatedType = node.propagatedType; | |
| 9421 toNode.staticElement = node.staticElement; | |
| 9422 toNode.staticType = node.staticType; | |
| 9423 return true; | |
| 9424 } | |
| 9425 return false; | |
| 9426 } | |
| 9427 | |
| 9428 @override | |
| 9429 bool visitAwaitExpression(AwaitExpression node) { | |
| 9430 AwaitExpression toNode = this._toNode as AwaitExpression; | |
| 9431 return _and(_isEqualTokens(node.awaitKeyword, toNode.awaitKeyword), | |
| 9432 _isEqualNodes(node.expression, toNode.expression)); | |
| 9433 } | |
| 9434 | |
| 9435 @override | |
| 9436 bool visitBinaryExpression(BinaryExpression node) { | |
| 9437 BinaryExpression toNode = this._toNode as BinaryExpression; | |
| 9438 if (_and(_isEqualNodes(node.leftOperand, toNode.leftOperand), | |
| 9439 _isEqualTokens(node.operator, toNode.operator), | |
| 9440 _isEqualNodes(node.rightOperand, toNode.rightOperand))) { | |
| 9441 toNode.propagatedElement = node.propagatedElement; | |
| 9442 toNode.propagatedType = node.propagatedType; | |
| 9443 toNode.staticElement = node.staticElement; | |
| 9444 toNode.staticType = node.staticType; | |
| 9445 return true; | |
| 9446 } | |
| 9447 return false; | |
| 9448 } | |
| 9449 | |
| 9450 @override | |
| 9451 bool visitBlock(Block node) { | |
| 9452 Block toNode = this._toNode as Block; | |
| 9453 return _and(_isEqualTokens(node.leftBracket, toNode.leftBracket), | |
| 9454 _isEqualNodeLists(node.statements, toNode.statements), | |
| 9455 _isEqualTokens(node.rightBracket, toNode.rightBracket)); | |
| 9456 } | |
| 9457 | |
| 9458 @override | |
| 9459 bool visitBlockFunctionBody(BlockFunctionBody node) { | |
| 9460 BlockFunctionBody toNode = this._toNode as BlockFunctionBody; | |
| 9461 return _isEqualNodes(node.block, toNode.block); | |
| 9462 } | |
| 9463 | |
| 9464 @override | |
| 9465 bool visitBooleanLiteral(BooleanLiteral node) { | |
| 9466 BooleanLiteral toNode = this._toNode as BooleanLiteral; | |
| 9467 if (_and(_isEqualTokens(node.literal, toNode.literal), | |
| 9468 node.value == toNode.value)) { | |
| 9469 toNode.propagatedType = node.propagatedType; | |
| 9470 toNode.staticType = node.staticType; | |
| 9471 return true; | |
| 9472 } | |
| 9473 return false; | |
| 9474 } | |
| 9475 | |
| 9476 @override | |
| 9477 bool visitBreakStatement(BreakStatement node) { | |
| 9478 BreakStatement toNode = this._toNode as BreakStatement; | |
| 9479 if (_and(_isEqualTokens(node.breakKeyword, toNode.breakKeyword), | |
| 9480 _isEqualNodes(node.label, toNode.label), | |
| 9481 _isEqualTokens(node.semicolon, toNode.semicolon))) { | |
| 9482 // TODO(paulberry): map node.target to toNode.target. | |
| 9483 return true; | |
| 9484 } | |
| 9485 return false; | |
| 9486 } | |
| 9487 | |
| 9488 @override | |
| 9489 bool visitCascadeExpression(CascadeExpression node) { | |
| 9490 CascadeExpression toNode = this._toNode as CascadeExpression; | |
| 9491 if (_and(_isEqualNodes(node.target, toNode.target), | |
| 9492 _isEqualNodeLists(node.cascadeSections, toNode.cascadeSections))) { | |
| 9493 toNode.propagatedType = node.propagatedType; | |
| 9494 toNode.staticType = node.staticType; | |
| 9495 return true; | |
| 9496 } | |
| 9497 return false; | |
| 9498 } | |
| 9499 | |
| 9500 @override | |
| 9501 bool visitCatchClause(CatchClause node) { | |
| 9502 CatchClause toNode = this._toNode as CatchClause; | |
| 9503 return _and(_isEqualTokens(node.onKeyword, toNode.onKeyword), | |
| 9504 _isEqualNodes(node.exceptionType, toNode.exceptionType), | |
| 9505 _isEqualTokens(node.catchKeyword, toNode.catchKeyword), | |
| 9506 _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis), | |
| 9507 _isEqualNodes(node.exceptionParameter, toNode.exceptionParameter), | |
| 9508 _isEqualTokens(node.comma, toNode.comma), | |
| 9509 _isEqualNodes(node.stackTraceParameter, toNode.stackTraceParameter), | |
| 9510 _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis), | |
| 9511 _isEqualNodes(node.body, toNode.body)); | |
| 9512 } | |
| 9513 | |
| 9514 @override | |
| 9515 bool visitClassDeclaration(ClassDeclaration node) { | |
| 9516 ClassDeclaration toNode = this._toNode as ClassDeclaration; | |
| 9517 return _and( | |
| 9518 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 9519 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 9520 _isEqualTokens(node.abstractKeyword, toNode.abstractKeyword), | |
| 9521 _isEqualTokens(node.classKeyword, toNode.classKeyword), | |
| 9522 _isEqualNodes(node.name, toNode.name), | |
| 9523 _isEqualNodes(node.typeParameters, toNode.typeParameters), | |
| 9524 _isEqualNodes(node.extendsClause, toNode.extendsClause), | |
| 9525 _isEqualNodes(node.withClause, toNode.withClause), | |
| 9526 _isEqualNodes(node.implementsClause, toNode.implementsClause), | |
| 9527 _isEqualTokens(node.leftBracket, toNode.leftBracket), | |
| 9528 _isEqualNodeLists(node.members, toNode.members), | |
| 9529 _isEqualTokens(node.rightBracket, toNode.rightBracket)); | |
| 9530 } | |
| 9531 | |
| 9532 @override | |
| 9533 bool visitClassTypeAlias(ClassTypeAlias node) { | |
| 9534 ClassTypeAlias toNode = this._toNode as ClassTypeAlias; | |
| 9535 return _and( | |
| 9536 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 9537 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 9538 _isEqualTokens(node.typedefKeyword, toNode.typedefKeyword), | |
| 9539 _isEqualNodes(node.name, toNode.name), | |
| 9540 _isEqualNodes(node.typeParameters, toNode.typeParameters), | |
| 9541 _isEqualTokens(node.equals, toNode.equals), | |
| 9542 _isEqualTokens(node.abstractKeyword, toNode.abstractKeyword), | |
| 9543 _isEqualNodes(node.superclass, toNode.superclass), | |
| 9544 _isEqualNodes(node.withClause, toNode.withClause), | |
| 9545 _isEqualNodes(node.implementsClause, toNode.implementsClause), | |
| 9546 _isEqualTokens(node.semicolon, toNode.semicolon)); | |
| 9547 } | |
| 9548 | |
| 9549 @override | |
| 9550 bool visitComment(Comment node) { | |
| 9551 Comment toNode = this._toNode as Comment; | |
| 9552 return _isEqualNodeLists(node.references, toNode.references); | |
| 9553 } | |
| 9554 | |
| 9555 @override | |
| 9556 bool visitCommentReference(CommentReference node) { | |
| 9557 CommentReference toNode = this._toNode as CommentReference; | |
| 9558 return _and(_isEqualTokens(node.newKeyword, toNode.newKeyword), | |
| 9559 _isEqualNodes(node.identifier, toNode.identifier)); | |
| 9560 } | |
| 9561 | |
| 9562 @override | |
| 9563 bool visitCompilationUnit(CompilationUnit node) { | |
| 9564 CompilationUnit toNode = this._toNode as CompilationUnit; | |
| 9565 if (_and(_isEqualTokens(node.beginToken, toNode.beginToken), | |
| 9566 _isEqualNodes(node.scriptTag, toNode.scriptTag), | |
| 9567 _isEqualNodeLists(node.directives, toNode.directives), | |
| 9568 _isEqualNodeLists(node.declarations, toNode.declarations), | |
| 9569 _isEqualTokens(node.endToken, toNode.endToken))) { | |
| 9570 toNode.element = node.element; | |
| 9571 return true; | |
| 9572 } | |
| 9573 return false; | |
| 9574 } | |
| 9575 | |
| 9576 @override | |
| 9577 bool visitConditionalExpression(ConditionalExpression node) { | |
| 9578 ConditionalExpression toNode = this._toNode as ConditionalExpression; | |
| 9579 if (_and(_isEqualNodes(node.condition, toNode.condition), | |
| 9580 _isEqualTokens(node.question, toNode.question), | |
| 9581 _isEqualNodes(node.thenExpression, toNode.thenExpression), | |
| 9582 _isEqualTokens(node.colon, toNode.colon), | |
| 9583 _isEqualNodes(node.elseExpression, toNode.elseExpression))) { | |
| 9584 toNode.propagatedType = node.propagatedType; | |
| 9585 toNode.staticType = node.staticType; | |
| 9586 return true; | |
| 9587 } | |
| 9588 return false; | |
| 9589 } | |
| 9590 | |
| 9591 @override | |
| 9592 bool visitConstructorDeclaration(ConstructorDeclaration node) { | |
| 9593 ConstructorDeclaration toNode = this._toNode as ConstructorDeclaration; | |
| 9594 if (_and( | |
| 9595 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 9596 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 9597 _isEqualTokens(node.externalKeyword, toNode.externalKeyword), | |
| 9598 _isEqualTokens(node.constKeyword, toNode.constKeyword), | |
| 9599 _isEqualTokens(node.factoryKeyword, toNode.factoryKeyword), | |
| 9600 _isEqualNodes(node.returnType, toNode.returnType), | |
| 9601 _isEqualTokens(node.period, toNode.period), | |
| 9602 _isEqualNodes(node.name, toNode.name), | |
| 9603 _isEqualNodes(node.parameters, toNode.parameters), | |
| 9604 _isEqualTokens(node.separator, toNode.separator), | |
| 9605 _isEqualNodeLists(node.initializers, toNode.initializers), | |
| 9606 _isEqualNodes(node.redirectedConstructor, toNode.redirectedConstructor), | |
| 9607 _isEqualNodes(node.body, toNode.body))) { | |
| 9608 toNode.element = node.element; | |
| 9609 return true; | |
| 9610 } | |
| 9611 return false; | |
| 9612 } | |
| 9613 | |
| 9614 @override | |
| 9615 bool visitConstructorFieldInitializer(ConstructorFieldInitializer node) { | |
| 9616 ConstructorFieldInitializer toNode = | |
| 9617 this._toNode as ConstructorFieldInitializer; | |
| 9618 return _and(_isEqualTokens(node.thisKeyword, toNode.thisKeyword), | |
| 9619 _isEqualTokens(node.period, toNode.period), | |
| 9620 _isEqualNodes(node.fieldName, toNode.fieldName), | |
| 9621 _isEqualTokens(node.equals, toNode.equals), | |
| 9622 _isEqualNodes(node.expression, toNode.expression)); | |
| 9623 } | |
| 9624 | |
| 9625 @override | |
| 9626 bool visitConstructorName(ConstructorName node) { | |
| 9627 ConstructorName toNode = this._toNode as ConstructorName; | |
| 9628 if (_and(_isEqualNodes(node.type, toNode.type), | |
| 9629 _isEqualTokens(node.period, toNode.period), | |
| 9630 _isEqualNodes(node.name, toNode.name))) { | |
| 9631 toNode.staticElement = node.staticElement; | |
| 9632 return true; | |
| 9633 } | |
| 9634 return false; | |
| 9635 } | |
| 9636 | |
| 9637 @override | |
| 9638 bool visitContinueStatement(ContinueStatement node) { | |
| 9639 ContinueStatement toNode = this._toNode as ContinueStatement; | |
| 9640 if (_and(_isEqualTokens(node.continueKeyword, toNode.continueKeyword), | |
| 9641 _isEqualNodes(node.label, toNode.label), | |
| 9642 _isEqualTokens(node.semicolon, toNode.semicolon))) { | |
| 9643 // TODO(paulberry): map node.target to toNode.target. | |
| 9644 return true; | |
| 9645 } | |
| 9646 return false; | |
| 9647 } | |
| 9648 | |
| 9649 @override | |
| 9650 bool visitDeclaredIdentifier(DeclaredIdentifier node) { | |
| 9651 DeclaredIdentifier toNode = this._toNode as DeclaredIdentifier; | |
| 9652 return _and( | |
| 9653 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 9654 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 9655 _isEqualTokens(node.keyword, toNode.keyword), | |
| 9656 _isEqualNodes(node.type, toNode.type), | |
| 9657 _isEqualNodes(node.identifier, toNode.identifier)); | |
| 9658 } | |
| 9659 | |
| 9660 @override | |
| 9661 bool visitDefaultFormalParameter(DefaultFormalParameter node) { | |
| 9662 DefaultFormalParameter toNode = this._toNode as DefaultFormalParameter; | |
| 9663 return _and(_isEqualNodes(node.parameter, toNode.parameter), | |
| 9664 node.kind == toNode.kind, | |
| 9665 _isEqualTokens(node.separator, toNode.separator), | |
| 9666 _isEqualNodes(node.defaultValue, toNode.defaultValue)); | |
| 9667 } | |
| 9668 | |
| 9669 @override | |
| 9670 bool visitDoStatement(DoStatement node) { | |
| 9671 DoStatement toNode = this._toNode as DoStatement; | |
| 9672 return _and(_isEqualTokens(node.doKeyword, toNode.doKeyword), | |
| 9673 _isEqualNodes(node.body, toNode.body), | |
| 9674 _isEqualTokens(node.whileKeyword, toNode.whileKeyword), | |
| 9675 _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis), | |
| 9676 _isEqualNodes(node.condition, toNode.condition), | |
| 9677 _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis), | |
| 9678 _isEqualTokens(node.semicolon, toNode.semicolon)); | |
| 9679 } | |
| 9680 | |
| 9681 @override | |
| 9682 bool visitDoubleLiteral(DoubleLiteral node) { | |
| 9683 DoubleLiteral toNode = this._toNode as DoubleLiteral; | |
| 9684 if (_and(_isEqualTokens(node.literal, toNode.literal), | |
| 9685 node.value == toNode.value)) { | |
| 9686 toNode.propagatedType = node.propagatedType; | |
| 9687 toNode.staticType = node.staticType; | |
| 9688 return true; | |
| 9689 } | |
| 9690 return false; | |
| 9691 } | |
| 9692 | |
| 9693 @override | |
| 9694 bool visitEmptyFunctionBody(EmptyFunctionBody node) { | |
| 9695 EmptyFunctionBody toNode = this._toNode as EmptyFunctionBody; | |
| 9696 return _isEqualTokens(node.semicolon, toNode.semicolon); | |
| 9697 } | |
| 9698 | |
| 9699 @override | |
| 9700 bool visitEmptyStatement(EmptyStatement node) { | |
| 9701 EmptyStatement toNode = this._toNode as EmptyStatement; | |
| 9702 return _isEqualTokens(node.semicolon, toNode.semicolon); | |
| 9703 } | |
| 9704 | |
| 9705 @override | |
| 9706 bool visitEnumConstantDeclaration(EnumConstantDeclaration node) { | |
| 9707 EnumConstantDeclaration toNode = this._toNode as EnumConstantDeclaration; | |
| 9708 return _and( | |
| 9709 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 9710 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 9711 _isEqualNodes(node.name, toNode.name)); | |
| 9712 } | |
| 9713 | |
| 9714 @override | |
| 9715 bool visitEnumDeclaration(EnumDeclaration node) { | |
| 9716 EnumDeclaration toNode = this._toNode as EnumDeclaration; | |
| 9717 return _and( | |
| 9718 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 9719 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 9720 _isEqualTokens(node.enumKeyword, toNode.enumKeyword), | |
| 9721 _isEqualNodes(node.name, toNode.name), | |
| 9722 _isEqualTokens(node.leftBracket, toNode.leftBracket), | |
| 9723 _isEqualNodeLists(node.constants, toNode.constants), | |
| 9724 _isEqualTokens(node.rightBracket, toNode.rightBracket)); | |
| 9725 } | |
| 9726 | |
| 9727 @override | |
| 9728 bool visitExportDirective(ExportDirective node) { | |
| 9729 ExportDirective toNode = this._toNode as ExportDirective; | |
| 9730 if (_and( | |
| 9731 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 9732 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 9733 _isEqualTokens(node.keyword, toNode.keyword), | |
| 9734 _isEqualNodes(node.uri, toNode.uri), | |
| 9735 _isEqualNodeLists(node.combinators, toNode.combinators), | |
| 9736 _isEqualTokens(node.semicolon, toNode.semicolon))) { | |
| 9737 toNode.element = node.element; | |
| 9738 return true; | |
| 9739 } | |
| 9740 return false; | |
| 9741 } | |
| 9742 | |
| 9743 @override | |
| 9744 bool visitExpressionFunctionBody(ExpressionFunctionBody node) { | |
| 9745 ExpressionFunctionBody toNode = this._toNode as ExpressionFunctionBody; | |
| 9746 return _and( | |
| 9747 _isEqualTokens(node.functionDefinition, toNode.functionDefinition), | |
| 9748 _isEqualNodes(node.expression, toNode.expression), | |
| 9749 _isEqualTokens(node.semicolon, toNode.semicolon)); | |
| 9750 } | |
| 9751 | |
| 9752 @override | |
| 9753 bool visitExpressionStatement(ExpressionStatement node) { | |
| 9754 ExpressionStatement toNode = this._toNode as ExpressionStatement; | |
| 9755 return _and(_isEqualNodes(node.expression, toNode.expression), | |
| 9756 _isEqualTokens(node.semicolon, toNode.semicolon)); | |
| 9757 } | |
| 9758 | |
| 9759 @override | |
| 9760 bool visitExtendsClause(ExtendsClause node) { | |
| 9761 ExtendsClause toNode = this._toNode as ExtendsClause; | |
| 9762 return _and(_isEqualTokens(node.extendsKeyword, toNode.extendsKeyword), | |
| 9763 _isEqualNodes(node.superclass, toNode.superclass)); | |
| 9764 } | |
| 9765 | |
| 9766 @override | |
| 9767 bool visitFieldDeclaration(FieldDeclaration node) { | |
| 9768 FieldDeclaration toNode = this._toNode as FieldDeclaration; | |
| 9769 return _and( | |
| 9770 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 9771 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 9772 _isEqualTokens(node.staticKeyword, toNode.staticKeyword), | |
| 9773 _isEqualNodes(node.fields, toNode.fields), | |
| 9774 _isEqualTokens(node.semicolon, toNode.semicolon)); | |
| 9775 } | |
| 9776 | |
| 9777 @override | |
| 9778 bool visitFieldFormalParameter(FieldFormalParameter node) { | |
| 9779 FieldFormalParameter toNode = this._toNode as FieldFormalParameter; | |
| 9780 return _and( | |
| 9781 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 9782 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 9783 _isEqualTokens(node.keyword, toNode.keyword), | |
| 9784 _isEqualNodes(node.type, toNode.type), | |
| 9785 _isEqualTokens(node.thisKeyword, toNode.thisKeyword), | |
| 9786 _isEqualTokens(node.period, toNode.period), | |
| 9787 _isEqualNodes(node.identifier, toNode.identifier)); | |
| 9788 } | |
| 9789 | |
| 9790 @override | |
| 9791 bool visitForEachStatement(ForEachStatement node) { | |
| 9792 ForEachStatement toNode = this._toNode as ForEachStatement; | |
| 9793 return _and(_isEqualTokens(node.forKeyword, toNode.forKeyword), | |
| 9794 _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis), | |
| 9795 _isEqualNodes(node.loopVariable, toNode.loopVariable), | |
| 9796 _isEqualTokens(node.inKeyword, toNode.inKeyword), | |
| 9797 _isEqualNodes(node.iterable, toNode.iterable), | |
| 9798 _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis), | |
| 9799 _isEqualNodes(node.body, toNode.body)); | |
| 9800 } | |
| 9801 | |
| 9802 @override | |
| 9803 bool visitFormalParameterList(FormalParameterList node) { | |
| 9804 FormalParameterList toNode = this._toNode as FormalParameterList; | |
| 9805 return _and(_isEqualTokens(node.leftParenthesis, toNode.leftParenthesis), | |
| 9806 _isEqualNodeLists(node.parameters, toNode.parameters), | |
| 9807 _isEqualTokens(node.leftDelimiter, toNode.leftDelimiter), | |
| 9808 _isEqualTokens(node.rightDelimiter, toNode.rightDelimiter), | |
| 9809 _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis)); | |
| 9810 } | |
| 9811 | |
| 9812 @override | |
| 9813 bool visitForStatement(ForStatement node) { | |
| 9814 ForStatement toNode = this._toNode as ForStatement; | |
| 9815 return _and(_isEqualTokens(node.forKeyword, toNode.forKeyword), | |
| 9816 _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis), | |
| 9817 _isEqualNodes(node.variables, toNode.variables), | |
| 9818 _isEqualNodes(node.initialization, toNode.initialization), | |
| 9819 _isEqualTokens(node.leftSeparator, toNode.leftSeparator), | |
| 9820 _isEqualNodes(node.condition, toNode.condition), | |
| 9821 _isEqualTokens(node.rightSeparator, toNode.rightSeparator), | |
| 9822 _isEqualNodeLists(node.updaters, toNode.updaters), | |
| 9823 _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis), | |
| 9824 _isEqualNodes(node.body, toNode.body)); | |
| 9825 } | |
| 9826 | |
| 9827 @override | |
| 9828 bool visitFunctionDeclaration(FunctionDeclaration node) { | |
| 9829 FunctionDeclaration toNode = this._toNode as FunctionDeclaration; | |
| 9830 return _and( | |
| 9831 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 9832 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 9833 _isEqualTokens(node.externalKeyword, toNode.externalKeyword), | |
| 9834 _isEqualNodes(node.returnType, toNode.returnType), | |
| 9835 _isEqualTokens(node.propertyKeyword, toNode.propertyKeyword), | |
| 9836 _isEqualNodes(node.name, toNode.name), | |
| 9837 _isEqualNodes(node.functionExpression, toNode.functionExpression)); | |
| 9838 } | |
| 9839 | |
| 9840 @override | |
| 9841 bool visitFunctionDeclarationStatement(FunctionDeclarationStatement node) { | |
| 9842 FunctionDeclarationStatement toNode = | |
| 9843 this._toNode as FunctionDeclarationStatement; | |
| 9844 return _isEqualNodes(node.functionDeclaration, toNode.functionDeclaration); | |
| 9845 } | |
| 9846 | |
| 9847 @override | |
| 9848 bool visitFunctionExpression(FunctionExpression node) { | |
| 9849 FunctionExpression toNode = this._toNode as FunctionExpression; | |
| 9850 if (_and(_isEqualNodes(node.parameters, toNode.parameters), | |
| 9851 _isEqualNodes(node.body, toNode.body))) { | |
| 9852 toNode.element = node.element; | |
| 9853 toNode.propagatedType = node.propagatedType; | |
| 9854 toNode.staticType = node.staticType; | |
| 9855 return true; | |
| 9856 } | |
| 9857 return false; | |
| 9858 } | |
| 9859 | |
| 9860 @override | |
| 9861 bool visitFunctionExpressionInvocation(FunctionExpressionInvocation node) { | |
| 9862 FunctionExpressionInvocation toNode = | |
| 9863 this._toNode as FunctionExpressionInvocation; | |
| 9864 if (_and(_isEqualNodes(node.function, toNode.function), | |
| 9865 _isEqualNodes(node.argumentList, toNode.argumentList))) { | |
| 9866 toNode.propagatedElement = node.propagatedElement; | |
| 9867 toNode.propagatedType = node.propagatedType; | |
| 9868 toNode.staticElement = node.staticElement; | |
| 9869 toNode.staticType = node.staticType; | |
| 9870 return true; | |
| 9871 } | |
| 9872 return false; | |
| 9873 } | |
| 9874 | |
| 9875 @override | |
| 9876 bool visitFunctionTypeAlias(FunctionTypeAlias node) { | |
| 9877 FunctionTypeAlias toNode = this._toNode as FunctionTypeAlias; | |
| 9878 return _and( | |
| 9879 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 9880 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 9881 _isEqualTokens(node.typedefKeyword, toNode.typedefKeyword), | |
| 9882 _isEqualNodes(node.returnType, toNode.returnType), | |
| 9883 _isEqualNodes(node.name, toNode.name), | |
| 9884 _isEqualNodes(node.typeParameters, toNode.typeParameters), | |
| 9885 _isEqualNodes(node.parameters, toNode.parameters), | |
| 9886 _isEqualTokens(node.semicolon, toNode.semicolon)); | |
| 9887 } | |
| 9888 | |
| 9889 @override | |
| 9890 bool visitFunctionTypedFormalParameter(FunctionTypedFormalParameter node) { | |
| 9891 FunctionTypedFormalParameter toNode = | |
| 9892 this._toNode as FunctionTypedFormalParameter; | |
| 9893 return _and( | |
| 9894 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 9895 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 9896 _isEqualNodes(node.returnType, toNode.returnType), | |
| 9897 _isEqualNodes(node.identifier, toNode.identifier), | |
| 9898 _isEqualNodes(node.parameters, toNode.parameters)); | |
| 9899 } | |
| 9900 | |
| 9901 @override | |
| 9902 bool visitHideCombinator(HideCombinator node) { | |
| 9903 HideCombinator toNode = this._toNode as HideCombinator; | |
| 9904 return _and(_isEqualTokens(node.keyword, toNode.keyword), | |
| 9905 _isEqualNodeLists(node.hiddenNames, toNode.hiddenNames)); | |
| 9906 } | |
| 9907 | |
| 9908 @override | |
| 9909 bool visitIfStatement(IfStatement node) { | |
| 9910 IfStatement toNode = this._toNode as IfStatement; | |
| 9911 return _and(_isEqualTokens(node.ifKeyword, toNode.ifKeyword), | |
| 9912 _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis), | |
| 9913 _isEqualNodes(node.condition, toNode.condition), | |
| 9914 _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis), | |
| 9915 _isEqualNodes(node.thenStatement, toNode.thenStatement), | |
| 9916 _isEqualTokens(node.elseKeyword, toNode.elseKeyword), | |
| 9917 _isEqualNodes(node.elseStatement, toNode.elseStatement)); | |
| 9918 } | |
| 9919 | |
| 9920 @override | |
| 9921 bool visitImplementsClause(ImplementsClause node) { | |
| 9922 ImplementsClause toNode = this._toNode as ImplementsClause; | |
| 9923 return _and( | |
| 9924 _isEqualTokens(node.implementsKeyword, toNode.implementsKeyword), | |
| 9925 _isEqualNodeLists(node.interfaces, toNode.interfaces)); | |
| 9926 } | |
| 9927 | |
| 9928 @override | |
| 9929 bool visitImportDirective(ImportDirective node) { | |
| 9930 ImportDirective toNode = this._toNode as ImportDirective; | |
| 9931 if (_and( | |
| 9932 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 9933 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 9934 _isEqualTokens(node.keyword, toNode.keyword), | |
| 9935 _isEqualNodes(node.uri, toNode.uri), | |
| 9936 _isEqualTokens(node.asKeyword, toNode.asKeyword), | |
| 9937 _isEqualNodes(node.prefix, toNode.prefix), | |
| 9938 _isEqualNodeLists(node.combinators, toNode.combinators), | |
| 9939 _isEqualTokens(node.semicolon, toNode.semicolon))) { | |
| 9940 toNode.element = node.element; | |
| 9941 return true; | |
| 9942 } | |
| 9943 return false; | |
| 9944 } | |
| 9945 | |
| 9946 @override | |
| 9947 bool visitIndexExpression(IndexExpression node) { | |
| 9948 IndexExpression toNode = this._toNode as IndexExpression; | |
| 9949 if (_and(_isEqualNodes(node.target, toNode.target), | |
| 9950 _isEqualTokens(node.leftBracket, toNode.leftBracket), | |
| 9951 _isEqualNodes(node.index, toNode.index), | |
| 9952 _isEqualTokens(node.rightBracket, toNode.rightBracket))) { | |
| 9953 toNode.auxiliaryElements = node.auxiliaryElements; | |
| 9954 toNode.propagatedElement = node.propagatedElement; | |
| 9955 toNode.propagatedType = node.propagatedType; | |
| 9956 toNode.staticElement = node.staticElement; | |
| 9957 toNode.staticType = node.staticType; | |
| 9958 return true; | |
| 9959 } | |
| 9960 return false; | |
| 9961 } | |
| 9962 | |
| 9963 @override | |
| 9964 bool visitInstanceCreationExpression(InstanceCreationExpression node) { | |
| 9965 InstanceCreationExpression toNode = | |
| 9966 this._toNode as InstanceCreationExpression; | |
| 9967 if (_and(_isEqualTokens(node.keyword, toNode.keyword), | |
| 9968 _isEqualNodes(node.constructorName, toNode.constructorName), | |
| 9969 _isEqualNodes(node.argumentList, toNode.argumentList))) { | |
| 9970 toNode.propagatedType = node.propagatedType; | |
| 9971 toNode.staticElement = node.staticElement; | |
| 9972 toNode.staticType = node.staticType; | |
| 9973 return true; | |
| 9974 } | |
| 9975 return false; | |
| 9976 } | |
| 9977 | |
| 9978 @override | |
| 9979 bool visitIntegerLiteral(IntegerLiteral node) { | |
| 9980 IntegerLiteral toNode = this._toNode as IntegerLiteral; | |
| 9981 if (_and(_isEqualTokens(node.literal, toNode.literal), | |
| 9982 node.value == toNode.value)) { | |
| 9983 toNode.propagatedType = node.propagatedType; | |
| 9984 toNode.staticType = node.staticType; | |
| 9985 return true; | |
| 9986 } | |
| 9987 return false; | |
| 9988 } | |
| 9989 | |
| 9990 @override | |
| 9991 bool visitInterpolationExpression(InterpolationExpression node) { | |
| 9992 InterpolationExpression toNode = this._toNode as InterpolationExpression; | |
| 9993 return _and(_isEqualTokens(node.leftBracket, toNode.leftBracket), | |
| 9994 _isEqualNodes(node.expression, toNode.expression), | |
| 9995 _isEqualTokens(node.rightBracket, toNode.rightBracket)); | |
| 9996 } | |
| 9997 | |
| 9998 @override | |
| 9999 bool visitInterpolationString(InterpolationString node) { | |
| 10000 InterpolationString toNode = this._toNode as InterpolationString; | |
| 10001 return _and(_isEqualTokens(node.contents, toNode.contents), | |
| 10002 node.value == toNode.value); | |
| 10003 } | |
| 10004 | |
| 10005 @override | |
| 10006 bool visitIsExpression(IsExpression node) { | |
| 10007 IsExpression toNode = this._toNode as IsExpression; | |
| 10008 if (_and(_isEqualNodes(node.expression, toNode.expression), | |
| 10009 _isEqualTokens(node.isOperator, toNode.isOperator), | |
| 10010 _isEqualTokens(node.notOperator, toNode.notOperator), | |
| 10011 _isEqualNodes(node.type, toNode.type))) { | |
| 10012 toNode.propagatedType = node.propagatedType; | |
| 10013 toNode.staticType = node.staticType; | |
| 10014 return true; | |
| 10015 } | |
| 10016 return false; | |
| 10017 } | |
| 10018 | |
| 10019 @override | |
| 10020 bool visitLabel(Label node) { | |
| 10021 Label toNode = this._toNode as Label; | |
| 10022 return _and(_isEqualNodes(node.label, toNode.label), | |
| 10023 _isEqualTokens(node.colon, toNode.colon)); | |
| 10024 } | |
| 10025 | |
| 10026 @override | |
| 10027 bool visitLabeledStatement(LabeledStatement node) { | |
| 10028 LabeledStatement toNode = this._toNode as LabeledStatement; | |
| 10029 return _and(_isEqualNodeLists(node.labels, toNode.labels), | |
| 10030 _isEqualNodes(node.statement, toNode.statement)); | |
| 10031 } | |
| 10032 | |
| 10033 @override | |
| 10034 bool visitLibraryDirective(LibraryDirective node) { | |
| 10035 LibraryDirective toNode = this._toNode as LibraryDirective; | |
| 10036 return _and( | |
| 10037 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 10038 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 10039 _isEqualTokens(node.libraryKeyword, toNode.libraryKeyword), | |
| 10040 _isEqualNodes(node.name, toNode.name), | |
| 10041 _isEqualTokens(node.semicolon, toNode.semicolon)); | |
| 10042 } | |
| 10043 | |
| 10044 @override | |
| 10045 bool visitLibraryIdentifier(LibraryIdentifier node) { | |
| 10046 LibraryIdentifier toNode = this._toNode as LibraryIdentifier; | |
| 10047 if (_isEqualNodeLists(node.components, toNode.components)) { | |
| 10048 toNode.propagatedType = node.propagatedType; | |
| 10049 toNode.staticType = node.staticType; | |
| 10050 return true; | |
| 10051 } | |
| 10052 return false; | |
| 10053 } | |
| 10054 | |
| 10055 @override | |
| 10056 bool visitListLiteral(ListLiteral node) { | |
| 10057 ListLiteral toNode = this._toNode as ListLiteral; | |
| 10058 if (_and(_isEqualTokens(node.constKeyword, toNode.constKeyword), | |
| 10059 _isEqualNodes(node.typeArguments, toNode.typeArguments), | |
| 10060 _isEqualTokens(node.leftBracket, toNode.leftBracket), | |
| 10061 _isEqualNodeLists(node.elements, toNode.elements), | |
| 10062 _isEqualTokens(node.rightBracket, toNode.rightBracket))) { | |
| 10063 toNode.propagatedType = node.propagatedType; | |
| 10064 toNode.staticType = node.staticType; | |
| 10065 return true; | |
| 10066 } | |
| 10067 return false; | |
| 10068 } | |
| 10069 | |
| 10070 @override | |
| 10071 bool visitMapLiteral(MapLiteral node) { | |
| 10072 MapLiteral toNode = this._toNode as MapLiteral; | |
| 10073 if (_and(_isEqualTokens(node.constKeyword, toNode.constKeyword), | |
| 10074 _isEqualNodes(node.typeArguments, toNode.typeArguments), | |
| 10075 _isEqualTokens(node.leftBracket, toNode.leftBracket), | |
| 10076 _isEqualNodeLists(node.entries, toNode.entries), | |
| 10077 _isEqualTokens(node.rightBracket, toNode.rightBracket))) { | |
| 10078 toNode.propagatedType = node.propagatedType; | |
| 10079 toNode.staticType = node.staticType; | |
| 10080 return true; | |
| 10081 } | |
| 10082 return false; | |
| 10083 } | |
| 10084 | |
| 10085 @override | |
| 10086 bool visitMapLiteralEntry(MapLiteralEntry node) { | |
| 10087 MapLiteralEntry toNode = this._toNode as MapLiteralEntry; | |
| 10088 return _and(_isEqualNodes(node.key, toNode.key), | |
| 10089 _isEqualTokens(node.separator, toNode.separator), | |
| 10090 _isEqualNodes(node.value, toNode.value)); | |
| 10091 } | |
| 10092 | |
| 10093 @override | |
| 10094 bool visitMethodDeclaration(MethodDeclaration node) { | |
| 10095 MethodDeclaration toNode = this._toNode as MethodDeclaration; | |
| 10096 return _and( | |
| 10097 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 10098 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 10099 _isEqualTokens(node.externalKeyword, toNode.externalKeyword), | |
| 10100 _isEqualTokens(node.modifierKeyword, toNode.modifierKeyword), | |
| 10101 _isEqualNodes(node.returnType, toNode.returnType), | |
| 10102 _isEqualTokens(node.propertyKeyword, toNode.propertyKeyword), | |
| 10103 _isEqualTokens(node.propertyKeyword, toNode.propertyKeyword), | |
| 10104 _isEqualNodes(node.name, toNode.name), | |
| 10105 _isEqualNodes(node.parameters, toNode.parameters), | |
| 10106 _isEqualNodes(node.body, toNode.body)); | |
| 10107 } | |
| 10108 | |
| 10109 @override | |
| 10110 bool visitMethodInvocation(MethodInvocation node) { | |
| 10111 MethodInvocation toNode = this._toNode as MethodInvocation; | |
| 10112 if (_and(_isEqualNodes(node.target, toNode.target), | |
| 10113 _isEqualTokens(node.operator, toNode.operator), | |
| 10114 _isEqualNodes(node.methodName, toNode.methodName), | |
| 10115 _isEqualNodes(node.argumentList, toNode.argumentList))) { | |
| 10116 toNode.propagatedType = node.propagatedType; | |
| 10117 toNode.staticType = node.staticType; | |
| 10118 return true; | |
| 10119 } | |
| 10120 return false; | |
| 10121 } | |
| 10122 | |
| 10123 @override | |
| 10124 bool visitNamedExpression(NamedExpression node) { | |
| 10125 NamedExpression toNode = this._toNode as NamedExpression; | |
| 10126 if (_and(_isEqualNodes(node.name, toNode.name), | |
| 10127 _isEqualNodes(node.expression, toNode.expression))) { | |
| 10128 toNode.propagatedType = node.propagatedType; | |
| 10129 toNode.staticType = node.staticType; | |
| 10130 return true; | |
| 10131 } | |
| 10132 return false; | |
| 10133 } | |
| 10134 | |
| 10135 @override | |
| 10136 bool visitNativeClause(NativeClause node) { | |
| 10137 NativeClause toNode = this._toNode as NativeClause; | |
| 10138 return _and(_isEqualTokens(node.nativeKeyword, toNode.nativeKeyword), | |
| 10139 _isEqualNodes(node.name, toNode.name)); | |
| 10140 } | |
| 10141 | |
| 10142 @override | |
| 10143 bool visitNativeFunctionBody(NativeFunctionBody node) { | |
| 10144 NativeFunctionBody toNode = this._toNode as NativeFunctionBody; | |
| 10145 return _and(_isEqualTokens(node.nativeKeyword, toNode.nativeKeyword), | |
| 10146 _isEqualNodes(node.stringLiteral, toNode.stringLiteral), | |
| 10147 _isEqualTokens(node.semicolon, toNode.semicolon)); | |
| 10148 } | |
| 10149 | |
| 10150 @override | |
| 10151 bool visitNullLiteral(NullLiteral node) { | |
| 10152 NullLiteral toNode = this._toNode as NullLiteral; | |
| 10153 if (_isEqualTokens(node.literal, toNode.literal)) { | |
| 10154 toNode.propagatedType = node.propagatedType; | |
| 10155 toNode.staticType = node.staticType; | |
| 10156 return true; | |
| 10157 } | |
| 10158 return false; | |
| 10159 } | |
| 10160 | |
| 10161 @override | |
| 10162 bool visitParenthesizedExpression(ParenthesizedExpression node) { | |
| 10163 ParenthesizedExpression toNode = this._toNode as ParenthesizedExpression; | |
| 10164 if (_and(_isEqualTokens(node.leftParenthesis, toNode.leftParenthesis), | |
| 10165 _isEqualNodes(node.expression, toNode.expression), | |
| 10166 _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis))) { | |
| 10167 toNode.propagatedType = node.propagatedType; | |
| 10168 toNode.staticType = node.staticType; | |
| 10169 return true; | |
| 10170 } | |
| 10171 return false; | |
| 10172 } | |
| 10173 | |
| 10174 @override | |
| 10175 bool visitPartDirective(PartDirective node) { | |
| 10176 PartDirective toNode = this._toNode as PartDirective; | |
| 10177 if (_and( | |
| 10178 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 10179 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 10180 _isEqualTokens(node.partKeyword, toNode.partKeyword), | |
| 10181 _isEqualNodes(node.uri, toNode.uri), | |
| 10182 _isEqualTokens(node.semicolon, toNode.semicolon))) { | |
| 10183 toNode.element = node.element; | |
| 10184 return true; | |
| 10185 } | |
| 10186 return false; | |
| 10187 } | |
| 10188 | |
| 10189 @override | |
| 10190 bool visitPartOfDirective(PartOfDirective node) { | |
| 10191 PartOfDirective toNode = this._toNode as PartOfDirective; | |
| 10192 if (_and( | |
| 10193 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 10194 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 10195 _isEqualTokens(node.partKeyword, toNode.partKeyword), | |
| 10196 _isEqualTokens(node.ofKeyword, toNode.ofKeyword), | |
| 10197 _isEqualNodes(node.libraryName, toNode.libraryName), | |
| 10198 _isEqualTokens(node.semicolon, toNode.semicolon))) { | |
| 10199 toNode.element = node.element; | |
| 10200 return true; | |
| 10201 } | |
| 10202 return false; | |
| 10203 } | |
| 10204 | |
| 10205 @override | |
| 10206 bool visitPostfixExpression(PostfixExpression node) { | |
| 10207 PostfixExpression toNode = this._toNode as PostfixExpression; | |
| 10208 if (_and(_isEqualNodes(node.operand, toNode.operand), | |
| 10209 _isEqualTokens(node.operator, toNode.operator))) { | |
| 10210 toNode.propagatedElement = node.propagatedElement; | |
| 10211 toNode.propagatedType = node.propagatedType; | |
| 10212 toNode.staticElement = node.staticElement; | |
| 10213 toNode.staticType = node.staticType; | |
| 10214 return true; | |
| 10215 } | |
| 10216 return false; | |
| 10217 } | |
| 10218 | |
| 10219 @override | |
| 10220 bool visitPrefixedIdentifier(PrefixedIdentifier node) { | |
| 10221 PrefixedIdentifier toNode = this._toNode as PrefixedIdentifier; | |
| 10222 if (_and(_isEqualNodes(node.prefix, toNode.prefix), | |
| 10223 _isEqualTokens(node.period, toNode.period), | |
| 10224 _isEqualNodes(node.identifier, toNode.identifier))) { | |
| 10225 toNode.propagatedType = node.propagatedType; | |
| 10226 toNode.staticType = node.staticType; | |
| 10227 return true; | |
| 10228 } | |
| 10229 return false; | |
| 10230 } | |
| 10231 | |
| 10232 @override | |
| 10233 bool visitPrefixExpression(PrefixExpression node) { | |
| 10234 PrefixExpression toNode = this._toNode as PrefixExpression; | |
| 10235 if (_and(_isEqualTokens(node.operator, toNode.operator), | |
| 10236 _isEqualNodes(node.operand, toNode.operand))) { | |
| 10237 toNode.propagatedElement = node.propagatedElement; | |
| 10238 toNode.propagatedType = node.propagatedType; | |
| 10239 toNode.staticElement = node.staticElement; | |
| 10240 toNode.staticType = node.staticType; | |
| 10241 return true; | |
| 10242 } | |
| 10243 return false; | |
| 10244 } | |
| 10245 | |
| 10246 @override | |
| 10247 bool visitPropertyAccess(PropertyAccess node) { | |
| 10248 PropertyAccess toNode = this._toNode as PropertyAccess; | |
| 10249 if (_and(_isEqualNodes(node.target, toNode.target), | |
| 10250 _isEqualTokens(node.operator, toNode.operator), | |
| 10251 _isEqualNodes(node.propertyName, toNode.propertyName))) { | |
| 10252 toNode.propagatedType = node.propagatedType; | |
| 10253 toNode.staticType = node.staticType; | |
| 10254 return true; | |
| 10255 } | |
| 10256 return false; | |
| 10257 } | |
| 10258 | |
| 10259 @override | |
| 10260 bool visitRedirectingConstructorInvocation( | |
| 10261 RedirectingConstructorInvocation node) { | |
| 10262 RedirectingConstructorInvocation toNode = | |
| 10263 this._toNode as RedirectingConstructorInvocation; | |
| 10264 if (_and(_isEqualTokens(node.thisKeyword, toNode.thisKeyword), | |
| 10265 _isEqualTokens(node.period, toNode.period), | |
| 10266 _isEqualNodes(node.constructorName, toNode.constructorName), | |
| 10267 _isEqualNodes(node.argumentList, toNode.argumentList))) { | |
| 10268 toNode.staticElement = node.staticElement; | |
| 10269 return true; | |
| 10270 } | |
| 10271 return false; | |
| 10272 } | |
| 10273 | |
| 10274 @override | |
| 10275 bool visitRethrowExpression(RethrowExpression node) { | |
| 10276 RethrowExpression toNode = this._toNode as RethrowExpression; | |
| 10277 if (_isEqualTokens(node.rethrowKeyword, toNode.rethrowKeyword)) { | |
| 10278 toNode.propagatedType = node.propagatedType; | |
| 10279 toNode.staticType = node.staticType; | |
| 10280 return true; | |
| 10281 } | |
| 10282 return false; | |
| 10283 } | |
| 10284 | |
| 10285 @override | |
| 10286 bool visitReturnStatement(ReturnStatement node) { | |
| 10287 ReturnStatement toNode = this._toNode as ReturnStatement; | |
| 10288 return _and(_isEqualTokens(node.returnKeyword, toNode.returnKeyword), | |
| 10289 _isEqualNodes(node.expression, toNode.expression), | |
| 10290 _isEqualTokens(node.semicolon, toNode.semicolon)); | |
| 10291 } | |
| 10292 | |
| 10293 @override | |
| 10294 bool visitScriptTag(ScriptTag node) { | |
| 10295 ScriptTag toNode = this._toNode as ScriptTag; | |
| 10296 return _isEqualTokens(node.scriptTag, toNode.scriptTag); | |
| 10297 } | |
| 10298 | |
| 10299 @override | |
| 10300 bool visitShowCombinator(ShowCombinator node) { | |
| 10301 ShowCombinator toNode = this._toNode as ShowCombinator; | |
| 10302 return _and(_isEqualTokens(node.keyword, toNode.keyword), | |
| 10303 _isEqualNodeLists(node.shownNames, toNode.shownNames)); | |
| 10304 } | |
| 10305 | |
| 10306 @override | |
| 10307 bool visitSimpleFormalParameter(SimpleFormalParameter node) { | |
| 10308 SimpleFormalParameter toNode = this._toNode as SimpleFormalParameter; | |
| 10309 return _and( | |
| 10310 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 10311 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 10312 _isEqualTokens(node.keyword, toNode.keyword), | |
| 10313 _isEqualNodes(node.type, toNode.type), | |
| 10314 _isEqualNodes(node.identifier, toNode.identifier)); | |
| 10315 } | |
| 10316 | |
| 10317 @override | |
| 10318 bool visitSimpleIdentifier(SimpleIdentifier node) { | |
| 10319 SimpleIdentifier toNode = this._toNode as SimpleIdentifier; | |
| 10320 if (_isEqualTokens(node.token, toNode.token)) { | |
| 10321 toNode.staticElement = node.staticElement; | |
| 10322 toNode.staticType = node.staticType; | |
| 10323 toNode.propagatedElement = node.propagatedElement; | |
| 10324 toNode.propagatedType = node.propagatedType; | |
| 10325 toNode.auxiliaryElements = node.auxiliaryElements; | |
| 10326 return true; | |
| 10327 } | |
| 10328 return false; | |
| 10329 } | |
| 10330 | |
| 10331 @override | |
| 10332 bool visitSimpleStringLiteral(SimpleStringLiteral node) { | |
| 10333 SimpleStringLiteral toNode = this._toNode as SimpleStringLiteral; | |
| 10334 if (_and(_isEqualTokens(node.literal, toNode.literal), | |
| 10335 node.value == toNode.value)) { | |
| 10336 toNode.propagatedType = node.propagatedType; | |
| 10337 toNode.staticType = node.staticType; | |
| 10338 return true; | |
| 10339 } | |
| 10340 return false; | |
| 10341 } | |
| 10342 | |
| 10343 @override | |
| 10344 bool visitStringInterpolation(StringInterpolation node) { | |
| 10345 StringInterpolation toNode = this._toNode as StringInterpolation; | |
| 10346 if (_isEqualNodeLists(node.elements, toNode.elements)) { | |
| 10347 toNode.propagatedType = node.propagatedType; | |
| 10348 toNode.staticType = node.staticType; | |
| 10349 return true; | |
| 10350 } | |
| 10351 return false; | |
| 10352 } | |
| 10353 | |
| 10354 @override | |
| 10355 bool visitSuperConstructorInvocation(SuperConstructorInvocation node) { | |
| 10356 SuperConstructorInvocation toNode = | |
| 10357 this._toNode as SuperConstructorInvocation; | |
| 10358 if (_and(_isEqualTokens(node.superKeyword, toNode.superKeyword), | |
| 10359 _isEqualTokens(node.period, toNode.period), | |
| 10360 _isEqualNodes(node.constructorName, toNode.constructorName), | |
| 10361 _isEqualNodes(node.argumentList, toNode.argumentList))) { | |
| 10362 toNode.staticElement = node.staticElement; | |
| 10363 return true; | |
| 10364 } | |
| 10365 return false; | |
| 10366 } | |
| 10367 | |
| 10368 @override | |
| 10369 bool visitSuperExpression(SuperExpression node) { | |
| 10370 SuperExpression toNode = this._toNode as SuperExpression; | |
| 10371 if (_isEqualTokens(node.superKeyword, toNode.superKeyword)) { | |
| 10372 toNode.propagatedType = node.propagatedType; | |
| 10373 toNode.staticType = node.staticType; | |
| 10374 return true; | |
| 10375 } | |
| 10376 return false; | |
| 10377 } | |
| 10378 | |
| 10379 @override | |
| 10380 bool visitSwitchCase(SwitchCase node) { | |
| 10381 SwitchCase toNode = this._toNode as SwitchCase; | |
| 10382 return _and(_isEqualNodeLists(node.labels, toNode.labels), | |
| 10383 _isEqualTokens(node.keyword, toNode.keyword), | |
| 10384 _isEqualNodes(node.expression, toNode.expression), | |
| 10385 _isEqualTokens(node.colon, toNode.colon), | |
| 10386 _isEqualNodeLists(node.statements, toNode.statements)); | |
| 10387 } | |
| 10388 | |
| 10389 @override | |
| 10390 bool visitSwitchDefault(SwitchDefault node) { | |
| 10391 SwitchDefault toNode = this._toNode as SwitchDefault; | |
| 10392 return _and(_isEqualNodeLists(node.labels, toNode.labels), | |
| 10393 _isEqualTokens(node.keyword, toNode.keyword), | |
| 10394 _isEqualTokens(node.colon, toNode.colon), | |
| 10395 _isEqualNodeLists(node.statements, toNode.statements)); | |
| 10396 } | |
| 10397 | |
| 10398 @override | |
| 10399 bool visitSwitchStatement(SwitchStatement node) { | |
| 10400 SwitchStatement toNode = this._toNode as SwitchStatement; | |
| 10401 return _and(_isEqualTokens(node.switchKeyword, toNode.switchKeyword), | |
| 10402 _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis), | |
| 10403 _isEqualNodes(node.expression, toNode.expression), | |
| 10404 _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis), | |
| 10405 _isEqualTokens(node.leftBracket, toNode.leftBracket), | |
| 10406 _isEqualNodeLists(node.members, toNode.members), | |
| 10407 _isEqualTokens(node.rightBracket, toNode.rightBracket)); | |
| 10408 } | |
| 10409 | |
| 10410 @override | |
| 10411 bool visitSymbolLiteral(SymbolLiteral node) { | |
| 10412 SymbolLiteral toNode = this._toNode as SymbolLiteral; | |
| 10413 if (_and(_isEqualTokens(node.poundSign, toNode.poundSign), | |
| 10414 _isEqualTokenLists(node.components, toNode.components))) { | |
| 10415 toNode.propagatedType = node.propagatedType; | |
| 10416 toNode.staticType = node.staticType; | |
| 10417 return true; | |
| 10418 } | |
| 10419 return false; | |
| 10420 } | |
| 10421 | |
| 10422 @override | |
| 10423 bool visitThisExpression(ThisExpression node) { | |
| 10424 ThisExpression toNode = this._toNode as ThisExpression; | |
| 10425 if (_isEqualTokens(node.thisKeyword, toNode.thisKeyword)) { | |
| 10426 toNode.propagatedType = node.propagatedType; | |
| 10427 toNode.staticType = node.staticType; | |
| 10428 return true; | |
| 10429 } | |
| 10430 return false; | |
| 10431 } | |
| 10432 | |
| 10433 @override | |
| 10434 bool visitThrowExpression(ThrowExpression node) { | |
| 10435 ThrowExpression toNode = this._toNode as ThrowExpression; | |
| 10436 if (_and(_isEqualTokens(node.throwKeyword, toNode.throwKeyword), | |
| 10437 _isEqualNodes(node.expression, toNode.expression))) { | |
| 10438 toNode.propagatedType = node.propagatedType; | |
| 10439 toNode.staticType = node.staticType; | |
| 10440 return true; | |
| 10441 } | |
| 10442 return false; | |
| 10443 } | |
| 10444 | |
| 10445 @override | |
| 10446 bool visitTopLevelVariableDeclaration(TopLevelVariableDeclaration node) { | |
| 10447 TopLevelVariableDeclaration toNode = | |
| 10448 this._toNode as TopLevelVariableDeclaration; | |
| 10449 return _and( | |
| 10450 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 10451 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 10452 _isEqualNodes(node.variables, toNode.variables), | |
| 10453 _isEqualTokens(node.semicolon, toNode.semicolon)); | |
| 10454 } | |
| 10455 | |
| 10456 @override | |
| 10457 bool visitTryStatement(TryStatement node) { | |
| 10458 TryStatement toNode = this._toNode as TryStatement; | |
| 10459 return _and(_isEqualTokens(node.tryKeyword, toNode.tryKeyword), | |
| 10460 _isEqualNodes(node.body, toNode.body), | |
| 10461 _isEqualNodeLists(node.catchClauses, toNode.catchClauses), | |
| 10462 _isEqualTokens(node.finallyKeyword, toNode.finallyKeyword), | |
| 10463 _isEqualNodes(node.finallyBlock, toNode.finallyBlock)); | |
| 10464 } | |
| 10465 | |
| 10466 @override | |
| 10467 bool visitTypeArgumentList(TypeArgumentList node) { | |
| 10468 TypeArgumentList toNode = this._toNode as TypeArgumentList; | |
| 10469 return _and(_isEqualTokens(node.leftBracket, toNode.leftBracket), | |
| 10470 _isEqualNodeLists(node.arguments, toNode.arguments), | |
| 10471 _isEqualTokens(node.rightBracket, toNode.rightBracket)); | |
| 10472 } | |
| 10473 | |
| 10474 @override | |
| 10475 bool visitTypeName(TypeName node) { | |
| 10476 TypeName toNode = this._toNode as TypeName; | |
| 10477 if (_and(_isEqualNodes(node.name, toNode.name), | |
| 10478 _isEqualNodes(node.typeArguments, toNode.typeArguments))) { | |
| 10479 toNode.type = node.type; | |
| 10480 return true; | |
| 10481 } | |
| 10482 return false; | |
| 10483 } | |
| 10484 | |
| 10485 @override | |
| 10486 bool visitTypeParameter(TypeParameter node) { | |
| 10487 TypeParameter toNode = this._toNode as TypeParameter; | |
| 10488 return _and( | |
| 10489 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 10490 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 10491 _isEqualNodes(node.name, toNode.name), | |
| 10492 _isEqualTokens(node.extendsKeyword, toNode.extendsKeyword), | |
| 10493 _isEqualNodes(node.bound, toNode.bound)); | |
| 10494 } | |
| 10495 | |
| 10496 @override | |
| 10497 bool visitTypeParameterList(TypeParameterList node) { | |
| 10498 TypeParameterList toNode = this._toNode as TypeParameterList; | |
| 10499 return _and(_isEqualTokens(node.leftBracket, toNode.leftBracket), | |
| 10500 _isEqualNodeLists(node.typeParameters, toNode.typeParameters), | |
| 10501 _isEqualTokens(node.rightBracket, toNode.rightBracket)); | |
| 10502 } | |
| 10503 | |
| 10504 @override | |
| 10505 bool visitVariableDeclaration(VariableDeclaration node) { | |
| 10506 VariableDeclaration toNode = this._toNode as VariableDeclaration; | |
| 10507 return _and( | |
| 10508 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 10509 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 10510 _isEqualNodes(node.name, toNode.name), | |
| 10511 _isEqualTokens(node.equals, toNode.equals), | |
| 10512 _isEqualNodes(node.initializer, toNode.initializer)); | |
| 10513 } | |
| 10514 | |
| 10515 @override | |
| 10516 bool visitVariableDeclarationList(VariableDeclarationList node) { | |
| 10517 VariableDeclarationList toNode = this._toNode as VariableDeclarationList; | |
| 10518 return _and( | |
| 10519 _isEqualNodes(node.documentationComment, toNode.documentationComment), | |
| 10520 _isEqualNodeLists(node.metadata, toNode.metadata), | |
| 10521 _isEqualTokens(node.keyword, toNode.keyword), | |
| 10522 _isEqualNodes(node.type, toNode.type), | |
| 10523 _isEqualNodeLists(node.variables, toNode.variables)); | |
| 10524 } | |
| 10525 | |
| 10526 @override | |
| 10527 bool visitVariableDeclarationStatement(VariableDeclarationStatement node) { | |
| 10528 VariableDeclarationStatement toNode = | |
| 10529 this._toNode as VariableDeclarationStatement; | |
| 10530 return _and(_isEqualNodes(node.variables, toNode.variables), | |
| 10531 _isEqualTokens(node.semicolon, toNode.semicolon)); | |
| 10532 } | |
| 10533 | |
| 10534 @override | |
| 10535 bool visitWhileStatement(WhileStatement node) { | |
| 10536 WhileStatement toNode = this._toNode as WhileStatement; | |
| 10537 return _and(_isEqualTokens(node.whileKeyword, toNode.whileKeyword), | |
| 10538 _isEqualTokens(node.leftParenthesis, toNode.leftParenthesis), | |
| 10539 _isEqualNodes(node.condition, toNode.condition), | |
| 10540 _isEqualTokens(node.rightParenthesis, toNode.rightParenthesis), | |
| 10541 _isEqualNodes(node.body, toNode.body)); | |
| 10542 } | |
| 10543 | |
| 10544 @override | |
| 10545 bool visitWithClause(WithClause node) { | |
| 10546 WithClause toNode = this._toNode as WithClause; | |
| 10547 return _and(_isEqualTokens(node.withKeyword, toNode.withKeyword), | |
| 10548 _isEqualNodeLists(node.mixinTypes, toNode.mixinTypes)); | |
| 10549 } | |
| 10550 | |
| 10551 @override | |
| 10552 bool visitYieldStatement(YieldStatement node) { | |
| 10553 YieldStatement toNode = this._toNode as YieldStatement; | |
| 10554 return _and(_isEqualTokens(node.yieldKeyword, toNode.yieldKeyword), | |
| 10555 _isEqualNodes(node.expression, toNode.expression), | |
| 10556 _isEqualTokens(node.semicolon, toNode.semicolon)); | |
| 10557 } | |
| 10558 | |
| 10559 /** | |
| 10560 * Return `true` if all of the parameters are `true`. | |
| 10561 */ | |
| 10562 bool _and(bool b1, bool b2, [bool b3 = true, bool b4 = true, bool b5 = true, | |
| 10563 bool b6 = true, bool b7 = true, bool b8 = true, bool b9 = true, | |
| 10564 bool b10 = true, bool b11 = true, bool b12 = true, bool b13 = true]) { | |
| 10565 // TODO(brianwilkerson) Inline this method. | |
| 10566 return b1 && | |
| 10567 b2 && | |
| 10568 b3 && | |
| 10569 b4 && | |
| 10570 b5 && | |
| 10571 b6 && | |
| 10572 b7 && | |
| 10573 b8 && | |
| 10574 b9 && | |
| 10575 b10 && | |
| 10576 b11 && | |
| 10577 b12 && | |
| 10578 b13; | |
| 10579 } | |
| 10580 | |
| 10581 /** | |
| 10582 * Return `true` if the [first] and [second] lists of AST nodes have the same | |
| 10583 * size and corresponding elements are equal. | |
| 10584 */ | |
| 10585 bool _isEqualNodeLists(NodeList first, NodeList second) { | |
| 10586 if (first == null) { | |
| 10587 return second == null; | |
| 10588 } else if (second == null) { | |
| 10589 return false; | |
| 10590 } | |
| 10591 int size = first.length; | |
| 10592 if (second.length != size) { | |
| 10593 return false; | |
| 10594 } | |
| 10595 bool equal = true; | |
| 10596 for (int i = 0; i < size; i++) { | |
| 10597 if (!_isEqualNodes(first[i], second[i])) { | |
| 10598 equal = false; | |
| 10599 } | |
| 10600 } | |
| 10601 return equal; | |
| 10602 } | |
| 10603 | |
| 10604 /** | |
| 10605 * Return `true` if the [fromNode] and [toNode] have the same structure. As a | |
| 10606 * side-effect, if the nodes do have the same structure, any resolution data | |
| 10607 * from the first node will be copied to the second node. | |
| 10608 */ | |
| 10609 bool _isEqualNodes(AstNode fromNode, AstNode toNode) { | |
| 10610 if (fromNode == null) { | |
| 10611 return toNode == null; | |
| 10612 } else if (toNode == null) { | |
| 10613 return false; | |
| 10614 } else if (fromNode.runtimeType == toNode.runtimeType) { | |
| 10615 this._toNode = toNode; | |
| 10616 return fromNode.accept(this); | |
| 10617 } | |
| 10618 // | |
| 10619 // Check for a simple transformation caused by entering a period. | |
| 10620 // | |
| 10621 if (toNode is PrefixedIdentifier) { | |
| 10622 SimpleIdentifier prefix = toNode.prefix; | |
| 10623 if (fromNode.runtimeType == prefix.runtimeType) { | |
| 10624 this._toNode = prefix; | |
| 10625 return fromNode.accept(this); | |
| 10626 } | |
| 10627 } else if (toNode is PropertyAccess) { | |
| 10628 Expression target = toNode.target; | |
| 10629 if (fromNode.runtimeType == target.runtimeType) { | |
| 10630 this._toNode = target; | |
| 10631 return fromNode.accept(this); | |
| 10632 } | |
| 10633 } | |
| 10634 return false; | |
| 10635 } | |
| 10636 | |
| 10637 /** | |
| 10638 * Return `true` if the [first] and [second] arrays of tokens have the same | |
| 10639 * length and corresponding elements are equal. | |
| 10640 */ | |
| 10641 bool _isEqualTokenLists(List<Token> first, List<Token> second) { | |
| 10642 int length = first.length; | |
| 10643 if (second.length != length) { | |
| 10644 return false; | |
| 10645 } | |
| 10646 for (int i = 0; i < length; i++) { | |
| 10647 if (!_isEqualTokens(first[i], second[i])) { | |
| 10648 return false; | |
| 10649 } | |
| 10650 } | |
| 10651 return true; | |
| 10652 } | |
| 10653 | |
| 10654 /** | |
| 10655 * Return `true` if the [first] and [second] tokens have the same structure. | |
| 10656 */ | |
| 10657 bool _isEqualTokens(Token first, Token second) { | |
| 10658 if (first == null) { | |
| 10659 return second == null; | |
| 10660 } else if (second == null) { | |
| 10661 return false; | |
| 10662 } | |
| 10663 return first.lexeme == second.lexeme; | |
| 10664 } | |
| 10665 | |
| 10666 /** | |
| 10667 * Copy resolution data from the [fromNode] to the [toNode]. | |
| 10668 */ | |
| 10669 static void copyResolutionData(AstNode fromNode, AstNode toNode) { | |
| 10670 ResolutionCopier copier = new ResolutionCopier(); | |
| 10671 copier._isEqualNodes(fromNode, toNode); | |
| 10672 } | |
| 10673 } | |
| OLD | NEW |