OLD | NEW |
(Empty) | |
| 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. |
| 4 |
| 5 library engine.scanner_test; |
| 6 |
| 7 import 'package:analyzer/src/generated/error.dart'; |
| 8 import 'package:analyzer/src/generated/scanner.dart'; |
| 9 import 'package:analyzer/src/generated/source.dart'; |
| 10 import 'package:unittest/unittest.dart'; |
| 11 |
| 12 import '../reflective_tests.dart'; |
| 13 import '../utils.dart'; |
| 14 import 'test_support.dart'; |
| 15 |
| 16 main() { |
| 17 initializeTestEnvironment(); |
| 18 runReflectiveTests(CharSequenceReaderTest); |
| 19 runReflectiveTests(KeywordStateTest); |
| 20 runReflectiveTests(ScannerTest); |
| 21 runReflectiveTests(TokenTypeTest); |
| 22 } |
| 23 |
| 24 class CharacterRangeReaderTest extends EngineTestCase { |
| 25 void test_advance() { |
| 26 CharSequenceReader baseReader = new CharSequenceReader("xyzzy"); |
| 27 CharacterRangeReader reader = new CharacterRangeReader(baseReader, 1, 4); |
| 28 expect(reader.advance(), 0x79); |
| 29 expect(reader.advance(), 0x80); |
| 30 expect(reader.advance(), 0x80); |
| 31 expect(reader.advance(), -1); |
| 32 expect(reader.advance(), -1); |
| 33 } |
| 34 |
| 35 void test_creation() { |
| 36 CharSequenceReader baseReader = new CharSequenceReader("xyzzy"); |
| 37 CharacterRangeReader reader = new CharacterRangeReader(baseReader, 1, 4); |
| 38 expect(reader, isNotNull); |
| 39 } |
| 40 |
| 41 void test_getOffset() { |
| 42 CharSequenceReader baseReader = new CharSequenceReader("xyzzy"); |
| 43 CharacterRangeReader reader = new CharacterRangeReader(baseReader, 1, 2); |
| 44 expect(reader.offset, 1); |
| 45 reader.advance(); |
| 46 expect(reader.offset, 2); |
| 47 reader.advance(); |
| 48 expect(reader.offset, 2); |
| 49 } |
| 50 |
| 51 void test_getString() { |
| 52 CharSequenceReader baseReader = new CharSequenceReader("__xyzzy__"); |
| 53 CharacterRangeReader reader = new CharacterRangeReader(baseReader, 2, 7); |
| 54 reader.offset = 5; |
| 55 expect(reader.getString(3, 0), "yzz"); |
| 56 expect(reader.getString(4, 1), "zzy"); |
| 57 } |
| 58 |
| 59 void test_peek() { |
| 60 CharSequenceReader baseReader = new CharSequenceReader("xyzzy"); |
| 61 CharacterRangeReader reader = new CharacterRangeReader(baseReader, 1, 3); |
| 62 expect(reader.peek(), 0x79); |
| 63 expect(reader.peek(), 0x79); |
| 64 reader.advance(); |
| 65 expect(reader.peek(), 0x80); |
| 66 expect(reader.peek(), 0x80); |
| 67 reader.advance(); |
| 68 expect(reader.peek(), -1); |
| 69 expect(reader.peek(), -1); |
| 70 } |
| 71 |
| 72 void test_setOffset() { |
| 73 CharSequenceReader baseReader = new CharSequenceReader("xyzzy"); |
| 74 CharacterRangeReader reader = new CharacterRangeReader(baseReader, 1, 4); |
| 75 reader.offset = 2; |
| 76 expect(reader.offset, 2); |
| 77 } |
| 78 } |
| 79 |
| 80 @reflectiveTest |
| 81 class CharSequenceReaderTest { |
| 82 void test_advance() { |
| 83 CharSequenceReader reader = new CharSequenceReader("x"); |
| 84 expect(reader.advance(), 0x78); |
| 85 expect(reader.advance(), -1); |
| 86 expect(reader.advance(), -1); |
| 87 } |
| 88 |
| 89 void test_creation() { |
| 90 expect(new CharSequenceReader("x"), isNotNull); |
| 91 } |
| 92 |
| 93 void test_getOffset() { |
| 94 CharSequenceReader reader = new CharSequenceReader("x"); |
| 95 expect(reader.offset, -1); |
| 96 reader.advance(); |
| 97 expect(reader.offset, 0); |
| 98 reader.advance(); |
| 99 expect(reader.offset, 0); |
| 100 } |
| 101 |
| 102 void test_getString() { |
| 103 CharSequenceReader reader = new CharSequenceReader("xyzzy"); |
| 104 reader.offset = 3; |
| 105 expect(reader.getString(1, 0), "yzz"); |
| 106 expect(reader.getString(2, 1), "zzy"); |
| 107 } |
| 108 |
| 109 void test_peek() { |
| 110 CharSequenceReader reader = new CharSequenceReader("xy"); |
| 111 expect(reader.peek(), 0x78); |
| 112 expect(reader.peek(), 0x78); |
| 113 reader.advance(); |
| 114 expect(reader.peek(), 0x79); |
| 115 expect(reader.peek(), 0x79); |
| 116 reader.advance(); |
| 117 expect(reader.peek(), -1); |
| 118 expect(reader.peek(), -1); |
| 119 } |
| 120 |
| 121 void test_setOffset() { |
| 122 CharSequenceReader reader = new CharSequenceReader("xyz"); |
| 123 reader.offset = 2; |
| 124 expect(reader.offset, 2); |
| 125 } |
| 126 } |
| 127 |
| 128 @reflectiveTest |
| 129 class KeywordStateTest { |
| 130 void test_KeywordState() { |
| 131 // |
| 132 // Generate the test data to be scanned. |
| 133 // |
| 134 List<Keyword> keywords = Keyword.values; |
| 135 int keywordCount = keywords.length; |
| 136 List<String> textToTest = new List<String>(keywordCount * 3); |
| 137 for (int i = 0; i < keywordCount; i++) { |
| 138 String syntax = keywords[i].syntax; |
| 139 textToTest[i] = syntax; |
| 140 textToTest[i + keywordCount] = "${syntax}x"; |
| 141 textToTest[i + keywordCount * 2] = syntax.substring(0, syntax.length - 1); |
| 142 } |
| 143 // |
| 144 // Scan each of the identifiers. |
| 145 // |
| 146 KeywordState firstState = KeywordState.KEYWORD_STATE; |
| 147 for (int i = 0; i < textToTest.length; i++) { |
| 148 String text = textToTest[i]; |
| 149 int index = 0; |
| 150 int length = text.length; |
| 151 KeywordState state = firstState; |
| 152 while (index < length && state != null) { |
| 153 state = state.next(text.codeUnitAt(index)); |
| 154 index++; |
| 155 } |
| 156 if (i < keywordCount) { |
| 157 // keyword |
| 158 expect(state, isNotNull); |
| 159 expect(state.keyword(), isNotNull); |
| 160 expect(state.keyword(), keywords[i]); |
| 161 } else if (i < keywordCount * 2) { |
| 162 // keyword + "x" |
| 163 expect(state, isNull); |
| 164 } else { |
| 165 // keyword.substring(0, keyword.length() - 1) |
| 166 expect(state, isNotNull); |
| 167 } |
| 168 } |
| 169 } |
| 170 } |
| 171 |
| 172 @reflectiveTest |
| 173 class ScannerTest { |
| 174 void fail_incomplete_string_interpolation() { |
| 175 // https://code.google.com/p/dart/issues/detail?id=18073 |
| 176 _assertErrorAndTokens( |
| 177 ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 9, "\"foo \${bar", [ |
| 178 new StringToken(TokenType.STRING, "\"foo ", 0), |
| 179 new StringToken(TokenType.STRING_INTERPOLATION_EXPRESSION, "\${", 5), |
| 180 new StringToken(TokenType.IDENTIFIER, "bar", 7) |
| 181 ]); |
| 182 } |
| 183 |
| 184 void test_ampersand() { |
| 185 _assertToken(TokenType.AMPERSAND, "&"); |
| 186 } |
| 187 |
| 188 void test_ampersand_ampersand() { |
| 189 _assertToken(TokenType.AMPERSAND_AMPERSAND, "&&"); |
| 190 } |
| 191 |
| 192 void test_ampersand_eq() { |
| 193 _assertToken(TokenType.AMPERSAND_EQ, "&="); |
| 194 } |
| 195 |
| 196 void test_at() { |
| 197 _assertToken(TokenType.AT, "@"); |
| 198 } |
| 199 |
| 200 void test_backping() { |
| 201 _assertToken(TokenType.BACKPING, "`"); |
| 202 } |
| 203 |
| 204 void test_backslash() { |
| 205 _assertToken(TokenType.BACKSLASH, "\\"); |
| 206 } |
| 207 |
| 208 void test_bang() { |
| 209 _assertToken(TokenType.BANG, "!"); |
| 210 } |
| 211 |
| 212 void test_bang_eq() { |
| 213 _assertToken(TokenType.BANG_EQ, "!="); |
| 214 } |
| 215 |
| 216 void test_bar() { |
| 217 _assertToken(TokenType.BAR, "|"); |
| 218 } |
| 219 |
| 220 void test_bar_bar() { |
| 221 _assertToken(TokenType.BAR_BAR, "||"); |
| 222 } |
| 223 |
| 224 void test_bar_eq() { |
| 225 _assertToken(TokenType.BAR_EQ, "|="); |
| 226 } |
| 227 |
| 228 void test_caret() { |
| 229 _assertToken(TokenType.CARET, "^"); |
| 230 } |
| 231 |
| 232 void test_caret_eq() { |
| 233 _assertToken(TokenType.CARET_EQ, "^="); |
| 234 } |
| 235 |
| 236 void test_close_curly_bracket() { |
| 237 _assertToken(TokenType.CLOSE_CURLY_BRACKET, "}"); |
| 238 } |
| 239 |
| 240 void test_close_paren() { |
| 241 _assertToken(TokenType.CLOSE_PAREN, ")"); |
| 242 } |
| 243 |
| 244 void test_close_quare_bracket() { |
| 245 _assertToken(TokenType.CLOSE_SQUARE_BRACKET, "]"); |
| 246 } |
| 247 |
| 248 void test_colon() { |
| 249 _assertToken(TokenType.COLON, ":"); |
| 250 } |
| 251 |
| 252 void test_comma() { |
| 253 _assertToken(TokenType.COMMA, ","); |
| 254 } |
| 255 |
| 256 void test_comment_disabled_multi() { |
| 257 Scanner scanner = new Scanner( |
| 258 null, |
| 259 new CharSequenceReader("/* comment */ "), |
| 260 AnalysisErrorListener.NULL_LISTENER); |
| 261 scanner.preserveComments = false; |
| 262 Token token = scanner.tokenize(); |
| 263 expect(token, isNotNull); |
| 264 expect(token.precedingComments, isNull); |
| 265 } |
| 266 |
| 267 void test_comment_multi() { |
| 268 _assertComment(TokenType.MULTI_LINE_COMMENT, "/* comment */"); |
| 269 } |
| 270 |
| 271 void test_comment_multi_lineEnds() { |
| 272 String code = r''' |
| 273 /** |
| 274 * aa |
| 275 * bbb |
| 276 * c |
| 277 */'''; |
| 278 GatheringErrorListener listener = new GatheringErrorListener(); |
| 279 Scanner scanner = new Scanner(null, new CharSequenceReader(code), listener); |
| 280 scanner.tokenize(); |
| 281 expect( |
| 282 scanner.lineStarts, |
| 283 equals(<int>[ |
| 284 code.indexOf('/**'), |
| 285 code.indexOf(' * aa'), |
| 286 code.indexOf(' * bbb'), |
| 287 code.indexOf(' * c'), |
| 288 code.indexOf(' */') |
| 289 ])); |
| 290 } |
| 291 |
| 292 void test_comment_multi_unterminated() { |
| 293 _assertError(ScannerErrorCode.UNTERMINATED_MULTI_LINE_COMMENT, 3, "/* x"); |
| 294 } |
| 295 |
| 296 void test_comment_nested() { |
| 297 _assertComment( |
| 298 TokenType.MULTI_LINE_COMMENT, "/* comment /* within a */ comment */"); |
| 299 } |
| 300 |
| 301 void test_comment_single() { |
| 302 _assertComment(TokenType.SINGLE_LINE_COMMENT, "// comment"); |
| 303 } |
| 304 |
| 305 void test_double_both_E() { |
| 306 _assertToken(TokenType.DOUBLE, "0.123E4"); |
| 307 } |
| 308 |
| 309 void test_double_both_e() { |
| 310 _assertToken(TokenType.DOUBLE, "0.123e4"); |
| 311 } |
| 312 |
| 313 void test_double_fraction() { |
| 314 _assertToken(TokenType.DOUBLE, ".123"); |
| 315 } |
| 316 |
| 317 void test_double_fraction_E() { |
| 318 _assertToken(TokenType.DOUBLE, ".123E4"); |
| 319 } |
| 320 |
| 321 void test_double_fraction_e() { |
| 322 _assertToken(TokenType.DOUBLE, ".123e4"); |
| 323 } |
| 324 |
| 325 void test_double_missingDigitInExponent() { |
| 326 _assertError(ScannerErrorCode.MISSING_DIGIT, 1, "1e"); |
| 327 } |
| 328 |
| 329 void test_double_whole_E() { |
| 330 _assertToken(TokenType.DOUBLE, "12E4"); |
| 331 } |
| 332 |
| 333 void test_double_whole_e() { |
| 334 _assertToken(TokenType.DOUBLE, "12e4"); |
| 335 } |
| 336 |
| 337 void test_eq() { |
| 338 _assertToken(TokenType.EQ, "="); |
| 339 } |
| 340 |
| 341 void test_eq_eq() { |
| 342 _assertToken(TokenType.EQ_EQ, "=="); |
| 343 } |
| 344 |
| 345 void test_gt() { |
| 346 _assertToken(TokenType.GT, ">"); |
| 347 } |
| 348 |
| 349 void test_gt_eq() { |
| 350 _assertToken(TokenType.GT_EQ, ">="); |
| 351 } |
| 352 |
| 353 void test_gt_gt() { |
| 354 _assertToken(TokenType.GT_GT, ">>"); |
| 355 } |
| 356 |
| 357 void test_gt_gt_eq() { |
| 358 _assertToken(TokenType.GT_GT_EQ, ">>="); |
| 359 } |
| 360 |
| 361 void test_hash() { |
| 362 _assertToken(TokenType.HASH, "#"); |
| 363 } |
| 364 |
| 365 void test_hexidecimal() { |
| 366 _assertToken(TokenType.HEXADECIMAL, "0x1A2B3C"); |
| 367 } |
| 368 |
| 369 void test_hexidecimal_missingDigit() { |
| 370 _assertError(ScannerErrorCode.MISSING_HEX_DIGIT, 1, "0x"); |
| 371 } |
| 372 |
| 373 void test_identifier() { |
| 374 _assertToken(TokenType.IDENTIFIER, "result"); |
| 375 } |
| 376 |
| 377 void test_illegalChar_cyrillicLetter_middle() { |
| 378 _assertError(ScannerErrorCode.ILLEGAL_CHARACTER, 5, "Shche\u0433lov"); |
| 379 } |
| 380 |
| 381 void test_illegalChar_cyrillicLetter_start() { |
| 382 _assertError(ScannerErrorCode.ILLEGAL_CHARACTER, 0, "\u0429"); |
| 383 } |
| 384 |
| 385 void test_illegalChar_nbsp() { |
| 386 _assertError(ScannerErrorCode.ILLEGAL_CHARACTER, 0, "\u00A0"); |
| 387 } |
| 388 |
| 389 void test_illegalChar_notLetter() { |
| 390 _assertError(ScannerErrorCode.ILLEGAL_CHARACTER, 0, "\u0312"); |
| 391 } |
| 392 |
| 393 void test_index() { |
| 394 _assertToken(TokenType.INDEX, "[]"); |
| 395 } |
| 396 |
| 397 void test_index_eq() { |
| 398 _assertToken(TokenType.INDEX_EQ, "[]="); |
| 399 } |
| 400 |
| 401 void test_int() { |
| 402 _assertToken(TokenType.INT, "123"); |
| 403 } |
| 404 |
| 405 void test_int_initialZero() { |
| 406 _assertToken(TokenType.INT, "0123"); |
| 407 } |
| 408 |
| 409 void test_keyword_abstract() { |
| 410 _assertKeywordToken("abstract"); |
| 411 } |
| 412 |
| 413 void test_keyword_as() { |
| 414 _assertKeywordToken("as"); |
| 415 } |
| 416 |
| 417 void test_keyword_assert() { |
| 418 _assertKeywordToken("assert"); |
| 419 } |
| 420 |
| 421 void test_keyword_break() { |
| 422 _assertKeywordToken("break"); |
| 423 } |
| 424 |
| 425 void test_keyword_case() { |
| 426 _assertKeywordToken("case"); |
| 427 } |
| 428 |
| 429 void test_keyword_catch() { |
| 430 _assertKeywordToken("catch"); |
| 431 } |
| 432 |
| 433 void test_keyword_class() { |
| 434 _assertKeywordToken("class"); |
| 435 } |
| 436 |
| 437 void test_keyword_const() { |
| 438 _assertKeywordToken("const"); |
| 439 } |
| 440 |
| 441 void test_keyword_continue() { |
| 442 _assertKeywordToken("continue"); |
| 443 } |
| 444 |
| 445 void test_keyword_default() { |
| 446 _assertKeywordToken("default"); |
| 447 } |
| 448 |
| 449 void test_keyword_deferred() { |
| 450 _assertKeywordToken("deferred"); |
| 451 } |
| 452 |
| 453 void test_keyword_do() { |
| 454 _assertKeywordToken("do"); |
| 455 } |
| 456 |
| 457 void test_keyword_dynamic() { |
| 458 _assertKeywordToken("dynamic"); |
| 459 } |
| 460 |
| 461 void test_keyword_else() { |
| 462 _assertKeywordToken("else"); |
| 463 } |
| 464 |
| 465 void test_keyword_enum() { |
| 466 _assertKeywordToken("enum"); |
| 467 } |
| 468 |
| 469 void test_keyword_export() { |
| 470 _assertKeywordToken("export"); |
| 471 } |
| 472 |
| 473 void test_keyword_extends() { |
| 474 _assertKeywordToken("extends"); |
| 475 } |
| 476 |
| 477 void test_keyword_factory() { |
| 478 _assertKeywordToken("factory"); |
| 479 } |
| 480 |
| 481 void test_keyword_false() { |
| 482 _assertKeywordToken("false"); |
| 483 } |
| 484 |
| 485 void test_keyword_final() { |
| 486 _assertKeywordToken("final"); |
| 487 } |
| 488 |
| 489 void test_keyword_finally() { |
| 490 _assertKeywordToken("finally"); |
| 491 } |
| 492 |
| 493 void test_keyword_for() { |
| 494 _assertKeywordToken("for"); |
| 495 } |
| 496 |
| 497 void test_keyword_get() { |
| 498 _assertKeywordToken("get"); |
| 499 } |
| 500 |
| 501 void test_keyword_if() { |
| 502 _assertKeywordToken("if"); |
| 503 } |
| 504 |
| 505 void test_keyword_implements() { |
| 506 _assertKeywordToken("implements"); |
| 507 } |
| 508 |
| 509 void test_keyword_import() { |
| 510 _assertKeywordToken("import"); |
| 511 } |
| 512 |
| 513 void test_keyword_in() { |
| 514 _assertKeywordToken("in"); |
| 515 } |
| 516 |
| 517 void test_keyword_is() { |
| 518 _assertKeywordToken("is"); |
| 519 } |
| 520 |
| 521 void test_keyword_library() { |
| 522 _assertKeywordToken("library"); |
| 523 } |
| 524 |
| 525 void test_keyword_new() { |
| 526 _assertKeywordToken("new"); |
| 527 } |
| 528 |
| 529 void test_keyword_null() { |
| 530 _assertKeywordToken("null"); |
| 531 } |
| 532 |
| 533 void test_keyword_operator() { |
| 534 _assertKeywordToken("operator"); |
| 535 } |
| 536 |
| 537 void test_keyword_part() { |
| 538 _assertKeywordToken("part"); |
| 539 } |
| 540 |
| 541 void test_keyword_rethrow() { |
| 542 _assertKeywordToken("rethrow"); |
| 543 } |
| 544 |
| 545 void test_keyword_return() { |
| 546 _assertKeywordToken("return"); |
| 547 } |
| 548 |
| 549 void test_keyword_set() { |
| 550 _assertKeywordToken("set"); |
| 551 } |
| 552 |
| 553 void test_keyword_static() { |
| 554 _assertKeywordToken("static"); |
| 555 } |
| 556 |
| 557 void test_keyword_super() { |
| 558 _assertKeywordToken("super"); |
| 559 } |
| 560 |
| 561 void test_keyword_switch() { |
| 562 _assertKeywordToken("switch"); |
| 563 } |
| 564 |
| 565 void test_keyword_this() { |
| 566 _assertKeywordToken("this"); |
| 567 } |
| 568 |
| 569 void test_keyword_throw() { |
| 570 _assertKeywordToken("throw"); |
| 571 } |
| 572 |
| 573 void test_keyword_true() { |
| 574 _assertKeywordToken("true"); |
| 575 } |
| 576 |
| 577 void test_keyword_try() { |
| 578 _assertKeywordToken("try"); |
| 579 } |
| 580 |
| 581 void test_keyword_typedef() { |
| 582 _assertKeywordToken("typedef"); |
| 583 } |
| 584 |
| 585 void test_keyword_var() { |
| 586 _assertKeywordToken("var"); |
| 587 } |
| 588 |
| 589 void test_keyword_void() { |
| 590 _assertKeywordToken("void"); |
| 591 } |
| 592 |
| 593 void test_keyword_while() { |
| 594 _assertKeywordToken("while"); |
| 595 } |
| 596 |
| 597 void test_keyword_with() { |
| 598 _assertKeywordToken("with"); |
| 599 } |
| 600 |
| 601 void test_lineInfo_multilineComment() { |
| 602 String source = "/*\r *\r */"; |
| 603 _assertLineInfo(source, [ |
| 604 new ScannerTest_ExpectedLocation(0, 1, 1), |
| 605 new ScannerTest_ExpectedLocation(4, 2, 2), |
| 606 new ScannerTest_ExpectedLocation(source.length - 1, 3, 3) |
| 607 ]); |
| 608 } |
| 609 |
| 610 void test_lineInfo_multilineString() { |
| 611 String source = "'''a\r\nbc\r\nd'''"; |
| 612 _assertLineInfo(source, [ |
| 613 new ScannerTest_ExpectedLocation(0, 1, 1), |
| 614 new ScannerTest_ExpectedLocation(7, 2, 2), |
| 615 new ScannerTest_ExpectedLocation(source.length - 1, 3, 4) |
| 616 ]); |
| 617 } |
| 618 |
| 619 void test_lineInfo_multilineString_raw() { |
| 620 String source = "var a = r'''\nblah\n''';\n\nfoo"; |
| 621 _assertLineInfo(source, [ |
| 622 new ScannerTest_ExpectedLocation(0, 1, 1), |
| 623 new ScannerTest_ExpectedLocation(14, 2, 2), |
| 624 new ScannerTest_ExpectedLocation(source.length - 2, 5, 2) |
| 625 ]); |
| 626 } |
| 627 |
| 628 void test_lineInfo_simpleClass() { |
| 629 String source = |
| 630 "class Test {\r\n String s = '...';\r\n int get x => s.MISSING_GET
TER;\r\n}"; |
| 631 _assertLineInfo(source, [ |
| 632 new ScannerTest_ExpectedLocation(0, 1, 1), |
| 633 new ScannerTest_ExpectedLocation(source.indexOf("MISSING_GETTER"), 3, 20), |
| 634 new ScannerTest_ExpectedLocation(source.length - 1, 4, 1) |
| 635 ]); |
| 636 } |
| 637 |
| 638 void test_lineInfo_slashN() { |
| 639 String source = "class Test {\n}"; |
| 640 _assertLineInfo(source, [ |
| 641 new ScannerTest_ExpectedLocation(0, 1, 1), |
| 642 new ScannerTest_ExpectedLocation(source.indexOf("}"), 2, 1) |
| 643 ]); |
| 644 } |
| 645 |
| 646 void test_lt() { |
| 647 _assertToken(TokenType.LT, "<"); |
| 648 } |
| 649 |
| 650 void test_lt_eq() { |
| 651 _assertToken(TokenType.LT_EQ, "<="); |
| 652 } |
| 653 |
| 654 void test_lt_lt() { |
| 655 _assertToken(TokenType.LT_LT, "<<"); |
| 656 } |
| 657 |
| 658 void test_lt_lt_eq() { |
| 659 _assertToken(TokenType.LT_LT_EQ, "<<="); |
| 660 } |
| 661 |
| 662 void test_minus() { |
| 663 _assertToken(TokenType.MINUS, "-"); |
| 664 } |
| 665 |
| 666 void test_minus_eq() { |
| 667 _assertToken(TokenType.MINUS_EQ, "-="); |
| 668 } |
| 669 |
| 670 void test_minus_minus() { |
| 671 _assertToken(TokenType.MINUS_MINUS, "--"); |
| 672 } |
| 673 |
| 674 void test_open_curly_bracket() { |
| 675 _assertToken(TokenType.OPEN_CURLY_BRACKET, "{"); |
| 676 } |
| 677 |
| 678 void test_open_paren() { |
| 679 _assertToken(TokenType.OPEN_PAREN, "("); |
| 680 } |
| 681 |
| 682 void test_open_square_bracket() { |
| 683 _assertToken(TokenType.OPEN_SQUARE_BRACKET, "["); |
| 684 } |
| 685 |
| 686 void test_openSquareBracket() { |
| 687 _assertToken(TokenType.OPEN_SQUARE_BRACKET, "["); |
| 688 } |
| 689 |
| 690 void test_percent() { |
| 691 _assertToken(TokenType.PERCENT, "%"); |
| 692 } |
| 693 |
| 694 void test_percent_eq() { |
| 695 _assertToken(TokenType.PERCENT_EQ, "%="); |
| 696 } |
| 697 |
| 698 void test_period() { |
| 699 _assertToken(TokenType.PERIOD, "."); |
| 700 } |
| 701 |
| 702 void test_period_period() { |
| 703 _assertToken(TokenType.PERIOD_PERIOD, ".."); |
| 704 } |
| 705 |
| 706 void test_period_period_period() { |
| 707 _assertToken(TokenType.PERIOD_PERIOD_PERIOD, "..."); |
| 708 } |
| 709 |
| 710 void test_periodAfterNumberNotIncluded_identifier() { |
| 711 _assertTokens("42.isEven()", [ |
| 712 new StringToken(TokenType.INT, "42", 0), |
| 713 new Token(TokenType.PERIOD, 2), |
| 714 new StringToken(TokenType.IDENTIFIER, "isEven", 3), |
| 715 new Token(TokenType.OPEN_PAREN, 9), |
| 716 new Token(TokenType.CLOSE_PAREN, 10) |
| 717 ]); |
| 718 } |
| 719 |
| 720 void test_periodAfterNumberNotIncluded_period() { |
| 721 _assertTokens("42..isEven()", [ |
| 722 new StringToken(TokenType.INT, "42", 0), |
| 723 new Token(TokenType.PERIOD_PERIOD, 2), |
| 724 new StringToken(TokenType.IDENTIFIER, "isEven", 4), |
| 725 new Token(TokenType.OPEN_PAREN, 10), |
| 726 new Token(TokenType.CLOSE_PAREN, 11) |
| 727 ]); |
| 728 } |
| 729 |
| 730 void test_plus() { |
| 731 _assertToken(TokenType.PLUS, "+"); |
| 732 } |
| 733 |
| 734 void test_plus_eq() { |
| 735 _assertToken(TokenType.PLUS_EQ, "+="); |
| 736 } |
| 737 |
| 738 void test_plus_plus() { |
| 739 _assertToken(TokenType.PLUS_PLUS, "++"); |
| 740 } |
| 741 |
| 742 void test_question() { |
| 743 _assertToken(TokenType.QUESTION, "?"); |
| 744 } |
| 745 |
| 746 void test_question_dot() { |
| 747 _assertToken(TokenType.QUESTION_PERIOD, "?."); |
| 748 } |
| 749 |
| 750 void test_question_question() { |
| 751 _assertToken(TokenType.QUESTION_QUESTION, "??"); |
| 752 } |
| 753 |
| 754 void test_question_question_eq() { |
| 755 _assertToken(TokenType.QUESTION_QUESTION_EQ, "??="); |
| 756 } |
| 757 |
| 758 void test_scriptTag_withArgs() { |
| 759 _assertToken(TokenType.SCRIPT_TAG, "#!/bin/dart -debug"); |
| 760 } |
| 761 |
| 762 void test_scriptTag_withoutSpace() { |
| 763 _assertToken(TokenType.SCRIPT_TAG, "#!/bin/dart"); |
| 764 } |
| 765 |
| 766 void test_scriptTag_withSpace() { |
| 767 _assertToken(TokenType.SCRIPT_TAG, "#! /bin/dart"); |
| 768 } |
| 769 |
| 770 void test_semicolon() { |
| 771 _assertToken(TokenType.SEMICOLON, ";"); |
| 772 } |
| 773 |
| 774 void test_setSourceStart() { |
| 775 int offsetDelta = 42; |
| 776 GatheringErrorListener listener = new GatheringErrorListener(); |
| 777 Scanner scanner = |
| 778 new Scanner(null, new SubSequenceReader("a", offsetDelta), listener); |
| 779 scanner.setSourceStart(3, 9); |
| 780 scanner.tokenize(); |
| 781 List<int> lineStarts = scanner.lineStarts; |
| 782 expect(lineStarts, isNotNull); |
| 783 expect(lineStarts.length, 3); |
| 784 expect(lineStarts[2], 33); |
| 785 } |
| 786 |
| 787 void test_slash() { |
| 788 _assertToken(TokenType.SLASH, "/"); |
| 789 } |
| 790 |
| 791 void test_slash_eq() { |
| 792 _assertToken(TokenType.SLASH_EQ, "/="); |
| 793 } |
| 794 |
| 795 void test_star() { |
| 796 _assertToken(TokenType.STAR, "*"); |
| 797 } |
| 798 |
| 799 void test_star_eq() { |
| 800 _assertToken(TokenType.STAR_EQ, "*="); |
| 801 } |
| 802 |
| 803 void test_startAndEnd() { |
| 804 Token token = _scan("a"); |
| 805 Token previous = token.previous; |
| 806 expect(previous.next, token); |
| 807 expect(previous.previous, previous); |
| 808 Token next = token.next; |
| 809 expect(next.next, next); |
| 810 expect(next.previous, token); |
| 811 } |
| 812 |
| 813 void test_string_multi_double() { |
| 814 _assertToken(TokenType.STRING, "\"\"\"line1\nline2\"\"\""); |
| 815 } |
| 816 |
| 817 void test_string_multi_embeddedQuotes() { |
| 818 _assertToken(TokenType.STRING, "\"\"\"line1\n\"\"\nline2\"\"\""); |
| 819 } |
| 820 |
| 821 void test_string_multi_embeddedQuotes_escapedChar() { |
| 822 _assertToken(TokenType.STRING, "\"\"\"a\"\"\\tb\"\"\""); |
| 823 } |
| 824 |
| 825 void test_string_multi_interpolation_block() { |
| 826 _assertTokens("\"Hello \${name}!\"", [ |
| 827 new StringToken(TokenType.STRING, "\"Hello ", 0), |
| 828 new StringToken(TokenType.STRING_INTERPOLATION_EXPRESSION, "\${", 7), |
| 829 new StringToken(TokenType.IDENTIFIER, "name", 9), |
| 830 new Token(TokenType.CLOSE_CURLY_BRACKET, 13), |
| 831 new StringToken(TokenType.STRING, "!\"", 14) |
| 832 ]); |
| 833 } |
| 834 |
| 835 void test_string_multi_interpolation_identifier() { |
| 836 _assertTokens("\"Hello \$name!\"", [ |
| 837 new StringToken(TokenType.STRING, "\"Hello ", 0), |
| 838 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 7), |
| 839 new StringToken(TokenType.IDENTIFIER, "name", 8), |
| 840 new StringToken(TokenType.STRING, "!\"", 12) |
| 841 ]); |
| 842 } |
| 843 |
| 844 void test_string_multi_single() { |
| 845 _assertToken(TokenType.STRING, "'''string'''"); |
| 846 } |
| 847 |
| 848 void test_string_multi_slashEnter() { |
| 849 _assertToken(TokenType.STRING, "'''\\\n'''"); |
| 850 } |
| 851 |
| 852 void test_string_multi_unterminated() { |
| 853 _assertErrorAndTokens(ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 8, |
| 854 "'''string", [new StringToken(TokenType.STRING, "'''string", 0)]); |
| 855 } |
| 856 |
| 857 void test_string_multi_unterminated_interpolation_block() { |
| 858 _assertErrorAndTokens( |
| 859 ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 8, "'''\${name", [ |
| 860 new StringToken(TokenType.STRING, "'''", 0), |
| 861 new StringToken(TokenType.STRING_INTERPOLATION_EXPRESSION, "\${", 3), |
| 862 new StringToken(TokenType.IDENTIFIER, "name", 5), |
| 863 new StringToken(TokenType.STRING, "", 9) |
| 864 ]); |
| 865 } |
| 866 |
| 867 void test_string_multi_unterminated_interpolation_identifier() { |
| 868 _assertErrorAndTokens( |
| 869 ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 7, "'''\$name", [ |
| 870 new StringToken(TokenType.STRING, "'''", 0), |
| 871 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 3), |
| 872 new StringToken(TokenType.IDENTIFIER, "name", 4), |
| 873 new StringToken(TokenType.STRING, "", 8) |
| 874 ]); |
| 875 } |
| 876 |
| 877 void test_string_raw_multi_double() { |
| 878 _assertToken(TokenType.STRING, "r\"\"\"line1\nline2\"\"\""); |
| 879 } |
| 880 |
| 881 void test_string_raw_multi_single() { |
| 882 _assertToken(TokenType.STRING, "r'''string'''"); |
| 883 } |
| 884 |
| 885 void test_string_raw_multi_unterminated() { |
| 886 String source = "r'''string"; |
| 887 _assertErrorAndTokens(ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 9, |
| 888 source, [new StringToken(TokenType.STRING, source, 0)]); |
| 889 } |
| 890 |
| 891 void test_string_raw_simple_double() { |
| 892 _assertToken(TokenType.STRING, "r\"string\""); |
| 893 } |
| 894 |
| 895 void test_string_raw_simple_single() { |
| 896 _assertToken(TokenType.STRING, "r'string'"); |
| 897 } |
| 898 |
| 899 void test_string_raw_simple_unterminated_eof() { |
| 900 String source = "r'string"; |
| 901 _assertErrorAndTokens(ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 7, |
| 902 source, [new StringToken(TokenType.STRING, source, 0)]); |
| 903 } |
| 904 |
| 905 void test_string_raw_simple_unterminated_eol() { |
| 906 String source = "r'string"; |
| 907 _assertErrorAndTokens(ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 8, |
| 908 "$source\n", [new StringToken(TokenType.STRING, source, 0)]); |
| 909 } |
| 910 |
| 911 void test_string_simple_double() { |
| 912 _assertToken(TokenType.STRING, "\"string\""); |
| 913 } |
| 914 |
| 915 void test_string_simple_escapedDollar() { |
| 916 _assertToken(TokenType.STRING, "'a\\\$b'"); |
| 917 } |
| 918 |
| 919 void test_string_simple_interpolation_adjacentIdentifiers() { |
| 920 _assertTokens("'\$a\$b'", [ |
| 921 new StringToken(TokenType.STRING, "'", 0), |
| 922 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 1), |
| 923 new StringToken(TokenType.IDENTIFIER, "a", 2), |
| 924 new StringToken(TokenType.STRING, "", 3), |
| 925 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 3), |
| 926 new StringToken(TokenType.IDENTIFIER, "b", 4), |
| 927 new StringToken(TokenType.STRING, "'", 5) |
| 928 ]); |
| 929 } |
| 930 |
| 931 void test_string_simple_interpolation_block() { |
| 932 _assertTokens("'Hello \${name}!'", [ |
| 933 new StringToken(TokenType.STRING, "'Hello ", 0), |
| 934 new StringToken(TokenType.STRING_INTERPOLATION_EXPRESSION, "\${", 7), |
| 935 new StringToken(TokenType.IDENTIFIER, "name", 9), |
| 936 new Token(TokenType.CLOSE_CURLY_BRACKET, 13), |
| 937 new StringToken(TokenType.STRING, "!'", 14) |
| 938 ]); |
| 939 } |
| 940 |
| 941 void test_string_simple_interpolation_blockWithNestedMap() { |
| 942 _assertTokens("'a \${f({'b' : 'c'})} d'", [ |
| 943 new StringToken(TokenType.STRING, "'a ", 0), |
| 944 new StringToken(TokenType.STRING_INTERPOLATION_EXPRESSION, "\${", 3), |
| 945 new StringToken(TokenType.IDENTIFIER, "f", 5), |
| 946 new Token(TokenType.OPEN_PAREN, 6), |
| 947 new Token(TokenType.OPEN_CURLY_BRACKET, 7), |
| 948 new StringToken(TokenType.STRING, "'b'", 8), |
| 949 new Token(TokenType.COLON, 12), |
| 950 new StringToken(TokenType.STRING, "'c'", 14), |
| 951 new Token(TokenType.CLOSE_CURLY_BRACKET, 17), |
| 952 new Token(TokenType.CLOSE_PAREN, 18), |
| 953 new Token(TokenType.CLOSE_CURLY_BRACKET, 19), |
| 954 new StringToken(TokenType.STRING, " d'", 20) |
| 955 ]); |
| 956 } |
| 957 |
| 958 void test_string_simple_interpolation_firstAndLast() { |
| 959 _assertTokens("'\$greeting \$name'", [ |
| 960 new StringToken(TokenType.STRING, "'", 0), |
| 961 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 1), |
| 962 new StringToken(TokenType.IDENTIFIER, "greeting", 2), |
| 963 new StringToken(TokenType.STRING, " ", 10), |
| 964 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 11), |
| 965 new StringToken(TokenType.IDENTIFIER, "name", 12), |
| 966 new StringToken(TokenType.STRING, "'", 16) |
| 967 ]); |
| 968 } |
| 969 |
| 970 void test_string_simple_interpolation_identifier() { |
| 971 _assertTokens("'Hello \$name!'", [ |
| 972 new StringToken(TokenType.STRING, "'Hello ", 0), |
| 973 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 7), |
| 974 new StringToken(TokenType.IDENTIFIER, "name", 8), |
| 975 new StringToken(TokenType.STRING, "!'", 12) |
| 976 ]); |
| 977 } |
| 978 |
| 979 void test_string_simple_interpolation_missingIdentifier() { |
| 980 _assertTokens("'\$x\$'", [ |
| 981 new StringToken(TokenType.STRING, "'", 0), |
| 982 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 1), |
| 983 new StringToken(TokenType.IDENTIFIER, "x", 2), |
| 984 new StringToken(TokenType.STRING, "", 3), |
| 985 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 3), |
| 986 new StringToken(TokenType.STRING, "'", 4) |
| 987 ]); |
| 988 } |
| 989 |
| 990 void test_string_simple_interpolation_nonIdentifier() { |
| 991 _assertTokens("'\$1'", [ |
| 992 new StringToken(TokenType.STRING, "'", 0), |
| 993 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 1), |
| 994 new StringToken(TokenType.STRING, "1'", 2) |
| 995 ]); |
| 996 } |
| 997 |
| 998 void test_string_simple_single() { |
| 999 _assertToken(TokenType.STRING, "'string'"); |
| 1000 } |
| 1001 |
| 1002 void test_string_simple_unterminated_eof() { |
| 1003 String source = "'string"; |
| 1004 _assertErrorAndTokens(ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 6, |
| 1005 source, [new StringToken(TokenType.STRING, source, 0)]); |
| 1006 } |
| 1007 |
| 1008 void test_string_simple_unterminated_eol() { |
| 1009 String source = "'string"; |
| 1010 _assertErrorAndTokens(ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 7, |
| 1011 "$source\r", [new StringToken(TokenType.STRING, source, 0)]); |
| 1012 } |
| 1013 |
| 1014 void test_string_simple_unterminated_interpolation_block() { |
| 1015 _assertErrorAndTokens( |
| 1016 ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 6, "'\${name", [ |
| 1017 new StringToken(TokenType.STRING, "'", 0), |
| 1018 new StringToken(TokenType.STRING_INTERPOLATION_EXPRESSION, "\${", 1), |
| 1019 new StringToken(TokenType.IDENTIFIER, "name", 3), |
| 1020 new StringToken(TokenType.STRING, "", 7) |
| 1021 ]); |
| 1022 } |
| 1023 |
| 1024 void test_string_simple_unterminated_interpolation_identifier() { |
| 1025 _assertErrorAndTokens( |
| 1026 ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 5, "'\$name", [ |
| 1027 new StringToken(TokenType.STRING, "'", 0), |
| 1028 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 1), |
| 1029 new StringToken(TokenType.IDENTIFIER, "name", 2), |
| 1030 new StringToken(TokenType.STRING, "", 6) |
| 1031 ]); |
| 1032 } |
| 1033 |
| 1034 void test_tilde() { |
| 1035 _assertToken(TokenType.TILDE, "~"); |
| 1036 } |
| 1037 |
| 1038 void test_tilde_slash() { |
| 1039 _assertToken(TokenType.TILDE_SLASH, "~/"); |
| 1040 } |
| 1041 |
| 1042 void test_tilde_slash_eq() { |
| 1043 _assertToken(TokenType.TILDE_SLASH_EQ, "~/="); |
| 1044 } |
| 1045 |
| 1046 void test_unclosedPairInInterpolation() { |
| 1047 GatheringErrorListener listener = new GatheringErrorListener(); |
| 1048 _scanWithListener("'\${(}'", listener); |
| 1049 } |
| 1050 |
| 1051 void _assertComment(TokenType commentType, String source) { |
| 1052 // |
| 1053 // Test without a trailing end-of-line marker |
| 1054 // |
| 1055 Token token = _scan(source); |
| 1056 expect(token, isNotNull); |
| 1057 expect(token.type, TokenType.EOF); |
| 1058 Token comment = token.precedingComments; |
| 1059 expect(comment, isNotNull); |
| 1060 expect(comment.type, commentType); |
| 1061 expect(comment.offset, 0); |
| 1062 expect(comment.length, source.length); |
| 1063 expect(comment.lexeme, source); |
| 1064 // |
| 1065 // Test with a trailing end-of-line marker |
| 1066 // |
| 1067 token = _scan("$source\n"); |
| 1068 expect(token, isNotNull); |
| 1069 expect(token.type, TokenType.EOF); |
| 1070 comment = token.precedingComments; |
| 1071 expect(comment, isNotNull); |
| 1072 expect(comment.type, commentType); |
| 1073 expect(comment.offset, 0); |
| 1074 expect(comment.length, source.length); |
| 1075 expect(comment.lexeme, source); |
| 1076 } |
| 1077 |
| 1078 /** |
| 1079 * Assert that scanning the given [source] produces an error with the given |
| 1080 * code. |
| 1081 * |
| 1082 * [expectedError] the error that should be produced |
| 1083 * [expectedOffset] the string offset that should be associated with the error |
| 1084 * [source] the source to be scanned to produce the error |
| 1085 */ |
| 1086 void _assertError( |
| 1087 ScannerErrorCode expectedError, int expectedOffset, String source) { |
| 1088 GatheringErrorListener listener = new GatheringErrorListener(); |
| 1089 _scanWithListener(source, listener); |
| 1090 listener.assertErrors([ |
| 1091 new AnalysisError(null, expectedOffset, 1, expectedError, |
| 1092 [source.codeUnitAt(expectedOffset)]) |
| 1093 ]); |
| 1094 } |
| 1095 |
| 1096 /** |
| 1097 * Assert that scanning the given [source] produces an error with the given |
| 1098 * code, and also produces the given tokens. |
| 1099 * |
| 1100 * [expectedError] the error that should be produced |
| 1101 * [expectedOffset] the string offset that should be associated with the error |
| 1102 * [source] the source to be scanned to produce the error |
| 1103 * [expectedTokens] the tokens that are expected to be in the source |
| 1104 */ |
| 1105 void _assertErrorAndTokens(ScannerErrorCode expectedError, int expectedOffset, |
| 1106 String source, List<Token> expectedTokens) { |
| 1107 GatheringErrorListener listener = new GatheringErrorListener(); |
| 1108 Token token = _scanWithListener(source, listener); |
| 1109 listener.assertErrors([ |
| 1110 new AnalysisError(null, expectedOffset, 1, expectedError, |
| 1111 [source.codeUnitAt(expectedOffset)]) |
| 1112 ]); |
| 1113 _checkTokens(token, expectedTokens); |
| 1114 } |
| 1115 |
| 1116 /** |
| 1117 * Assert that when scanned the given [source] contains a single keyword token |
| 1118 * with the same lexeme as the original source. |
| 1119 */ |
| 1120 void _assertKeywordToken(String source) { |
| 1121 Token token = _scan(source); |
| 1122 expect(token, isNotNull); |
| 1123 expect(token.type, TokenType.KEYWORD); |
| 1124 expect(token.offset, 0); |
| 1125 expect(token.length, source.length); |
| 1126 expect(token.lexeme, source); |
| 1127 Object value = token.value(); |
| 1128 expect(value is Keyword, isTrue); |
| 1129 expect((value as Keyword).syntax, source); |
| 1130 token = _scan(" $source "); |
| 1131 expect(token, isNotNull); |
| 1132 expect(token.type, TokenType.KEYWORD); |
| 1133 expect(token.offset, 1); |
| 1134 expect(token.length, source.length); |
| 1135 expect(token.lexeme, source); |
| 1136 value = token.value(); |
| 1137 expect(value is Keyword, isTrue); |
| 1138 expect((value as Keyword).syntax, source); |
| 1139 expect(token.next.type, TokenType.EOF); |
| 1140 } |
| 1141 |
| 1142 void _assertLineInfo( |
| 1143 String source, List<ScannerTest_ExpectedLocation> expectedLocations) { |
| 1144 GatheringErrorListener listener = new GatheringErrorListener(); |
| 1145 _scanWithListener(source, listener); |
| 1146 listener.assertNoErrors(); |
| 1147 LineInfo info = listener.getLineInfo(new TestSource()); |
| 1148 expect(info, isNotNull); |
| 1149 int count = expectedLocations.length; |
| 1150 for (int i = 0; i < count; i++) { |
| 1151 ScannerTest_ExpectedLocation expectedLocation = expectedLocations[i]; |
| 1152 LineInfo_Location location = info.getLocation(expectedLocation._offset); |
| 1153 expect(location.lineNumber, expectedLocation._lineNumber, |
| 1154 reason: 'Line number in location $i'); |
| 1155 expect(location.columnNumber, expectedLocation._columnNumber, |
| 1156 reason: 'Column number in location $i'); |
| 1157 } |
| 1158 } |
| 1159 |
| 1160 /** |
| 1161 * Assert that the token scanned from the given [source] has the |
| 1162 * [expectedType]. |
| 1163 */ |
| 1164 Token _assertToken(TokenType expectedType, String source) { |
| 1165 Token originalToken = _scan(source); |
| 1166 expect(originalToken, isNotNull); |
| 1167 expect(originalToken.type, expectedType); |
| 1168 expect(originalToken.offset, 0); |
| 1169 expect(originalToken.length, source.length); |
| 1170 expect(originalToken.lexeme, source); |
| 1171 if (expectedType == TokenType.SCRIPT_TAG) { |
| 1172 // Adding space before the script tag is not allowed, and adding text at |
| 1173 // the end changes nothing. |
| 1174 return originalToken; |
| 1175 } else if (expectedType == TokenType.SINGLE_LINE_COMMENT) { |
| 1176 // Adding space to an end-of-line comment changes the comment. |
| 1177 Token tokenWithSpaces = _scan(" $source"); |
| 1178 expect(tokenWithSpaces, isNotNull); |
| 1179 expect(tokenWithSpaces.type, expectedType); |
| 1180 expect(tokenWithSpaces.offset, 1); |
| 1181 expect(tokenWithSpaces.length, source.length); |
| 1182 expect(tokenWithSpaces.lexeme, source); |
| 1183 return originalToken; |
| 1184 } else if (expectedType == TokenType.INT || |
| 1185 expectedType == TokenType.DOUBLE) { |
| 1186 Token tokenWithLowerD = _scan("${source}d"); |
| 1187 expect(tokenWithLowerD, isNotNull); |
| 1188 expect(tokenWithLowerD.type, expectedType); |
| 1189 expect(tokenWithLowerD.offset, 0); |
| 1190 expect(tokenWithLowerD.length, source.length); |
| 1191 expect(tokenWithLowerD.lexeme, source); |
| 1192 Token tokenWithUpperD = _scan("${source}D"); |
| 1193 expect(tokenWithUpperD, isNotNull); |
| 1194 expect(tokenWithUpperD.type, expectedType); |
| 1195 expect(tokenWithUpperD.offset, 0); |
| 1196 expect(tokenWithUpperD.length, source.length); |
| 1197 expect(tokenWithUpperD.lexeme, source); |
| 1198 } |
| 1199 Token tokenWithSpaces = _scan(" $source "); |
| 1200 expect(tokenWithSpaces, isNotNull); |
| 1201 expect(tokenWithSpaces.type, expectedType); |
| 1202 expect(tokenWithSpaces.offset, 1); |
| 1203 expect(tokenWithSpaces.length, source.length); |
| 1204 expect(tokenWithSpaces.lexeme, source); |
| 1205 expect(originalToken.next.type, TokenType.EOF); |
| 1206 return originalToken; |
| 1207 } |
| 1208 |
| 1209 /** |
| 1210 * Assert that when scanned the given [source] contains a sequence of tokens |
| 1211 * identical to the given list of [expectedTokens]. |
| 1212 */ |
| 1213 void _assertTokens(String source, List<Token> expectedTokens) { |
| 1214 Token token = _scan(source); |
| 1215 _checkTokens(token, expectedTokens); |
| 1216 } |
| 1217 |
| 1218 void _checkTokens(Token firstToken, List<Token> expectedTokens) { |
| 1219 expect(firstToken, isNotNull); |
| 1220 Token token = firstToken; |
| 1221 for (int i = 0; i < expectedTokens.length; i++) { |
| 1222 Token expectedToken = expectedTokens[i]; |
| 1223 expect(token.type, expectedToken.type, reason: "Wrong type for token $i"); |
| 1224 expect(token.offset, expectedToken.offset, |
| 1225 reason: "Wrong offset for token $i"); |
| 1226 expect(token.length, expectedToken.length, |
| 1227 reason: "Wrong length for token $i"); |
| 1228 expect(token.lexeme, expectedToken.lexeme, |
| 1229 reason: "Wrong lexeme for token $i"); |
| 1230 token = token.next; |
| 1231 expect(token, isNotNull); |
| 1232 } |
| 1233 expect(token.type, TokenType.EOF); |
| 1234 } |
| 1235 |
| 1236 Token _scan(String source) { |
| 1237 GatheringErrorListener listener = new GatheringErrorListener(); |
| 1238 Token token = _scanWithListener(source, listener); |
| 1239 listener.assertNoErrors(); |
| 1240 return token; |
| 1241 } |
| 1242 |
| 1243 Token _scanWithListener(String source, GatheringErrorListener listener) { |
| 1244 Scanner scanner = |
| 1245 new Scanner(null, new CharSequenceReader(source), listener); |
| 1246 Token result = scanner.tokenize(); |
| 1247 listener.setLineInfo(new TestSource(), scanner.lineStarts); |
| 1248 return result; |
| 1249 } |
| 1250 } |
| 1251 |
| 1252 /** |
| 1253 * An `ExpectedLocation` encodes information about the expected location of a |
| 1254 * given offset in source code. |
| 1255 */ |
| 1256 class ScannerTest_ExpectedLocation { |
| 1257 final int _offset; |
| 1258 |
| 1259 final int _lineNumber; |
| 1260 |
| 1261 final int _columnNumber; |
| 1262 |
| 1263 ScannerTest_ExpectedLocation( |
| 1264 this._offset, this._lineNumber, this._columnNumber); |
| 1265 } |
| 1266 |
| 1267 /** |
| 1268 * A `TokenStreamValidator` is used to validate the correct construction of a |
| 1269 * stream of tokens. |
| 1270 */ |
| 1271 class TokenStreamValidator { |
| 1272 /** |
| 1273 * Validate that the stream of tokens that starts with the given [token] is |
| 1274 * correct. |
| 1275 */ |
| 1276 void validate(Token token) { |
| 1277 StringBuffer buffer = new StringBuffer(); |
| 1278 _validateStream(buffer, token); |
| 1279 if (buffer.length > 0) { |
| 1280 fail(buffer.toString()); |
| 1281 } |
| 1282 } |
| 1283 |
| 1284 void _validateStream(StringBuffer buffer, Token token) { |
| 1285 if (token == null) { |
| 1286 return; |
| 1287 } |
| 1288 Token previousToken = null; |
| 1289 int previousEnd = -1; |
| 1290 Token currentToken = token; |
| 1291 while (currentToken != null && currentToken.type != TokenType.EOF) { |
| 1292 _validateStream(buffer, currentToken.precedingComments); |
| 1293 TokenType type = currentToken.type; |
| 1294 if (type == TokenType.OPEN_CURLY_BRACKET || |
| 1295 type == TokenType.OPEN_PAREN || |
| 1296 type == TokenType.OPEN_SQUARE_BRACKET || |
| 1297 type == TokenType.STRING_INTERPOLATION_EXPRESSION) { |
| 1298 if (currentToken is! BeginToken) { |
| 1299 buffer.write("\r\nExpected BeginToken, found "); |
| 1300 buffer.write(currentToken.runtimeType.toString()); |
| 1301 buffer.write(" "); |
| 1302 _writeToken(buffer, currentToken); |
| 1303 } |
| 1304 } |
| 1305 int currentStart = currentToken.offset; |
| 1306 int currentLength = currentToken.length; |
| 1307 int currentEnd = currentStart + currentLength - 1; |
| 1308 if (currentStart <= previousEnd) { |
| 1309 buffer.write("\r\nInvalid token sequence: "); |
| 1310 _writeToken(buffer, previousToken); |
| 1311 buffer.write(" followed by "); |
| 1312 _writeToken(buffer, currentToken); |
| 1313 } |
| 1314 previousEnd = currentEnd; |
| 1315 previousToken = currentToken; |
| 1316 currentToken = currentToken.next; |
| 1317 } |
| 1318 } |
| 1319 |
| 1320 void _writeToken(StringBuffer buffer, Token token) { |
| 1321 buffer.write("["); |
| 1322 buffer.write(token.type); |
| 1323 buffer.write(", '"); |
| 1324 buffer.write(token.lexeme); |
| 1325 buffer.write("', "); |
| 1326 buffer.write(token.offset); |
| 1327 buffer.write(", "); |
| 1328 buffer.write(token.length); |
| 1329 buffer.write("]"); |
| 1330 } |
| 1331 } |
| 1332 |
| 1333 @reflectiveTest |
| 1334 class TokenTypeTest extends EngineTestCase { |
| 1335 void test_isOperator() { |
| 1336 expect(TokenType.AMPERSAND.isOperator, isTrue); |
| 1337 expect(TokenType.AMPERSAND_AMPERSAND.isOperator, isTrue); |
| 1338 expect(TokenType.AMPERSAND_EQ.isOperator, isTrue); |
| 1339 expect(TokenType.BANG.isOperator, isTrue); |
| 1340 expect(TokenType.BANG_EQ.isOperator, isTrue); |
| 1341 expect(TokenType.BAR.isOperator, isTrue); |
| 1342 expect(TokenType.BAR_BAR.isOperator, isTrue); |
| 1343 expect(TokenType.BAR_EQ.isOperator, isTrue); |
| 1344 expect(TokenType.CARET.isOperator, isTrue); |
| 1345 expect(TokenType.CARET_EQ.isOperator, isTrue); |
| 1346 expect(TokenType.EQ.isOperator, isTrue); |
| 1347 expect(TokenType.EQ_EQ.isOperator, isTrue); |
| 1348 expect(TokenType.GT.isOperator, isTrue); |
| 1349 expect(TokenType.GT_EQ.isOperator, isTrue); |
| 1350 expect(TokenType.GT_GT.isOperator, isTrue); |
| 1351 expect(TokenType.GT_GT_EQ.isOperator, isTrue); |
| 1352 expect(TokenType.INDEX.isOperator, isTrue); |
| 1353 expect(TokenType.INDEX_EQ.isOperator, isTrue); |
| 1354 expect(TokenType.IS.isOperator, isTrue); |
| 1355 expect(TokenType.LT.isOperator, isTrue); |
| 1356 expect(TokenType.LT_EQ.isOperator, isTrue); |
| 1357 expect(TokenType.LT_LT.isOperator, isTrue); |
| 1358 expect(TokenType.LT_LT_EQ.isOperator, isTrue); |
| 1359 expect(TokenType.MINUS.isOperator, isTrue); |
| 1360 expect(TokenType.MINUS_EQ.isOperator, isTrue); |
| 1361 expect(TokenType.MINUS_MINUS.isOperator, isTrue); |
| 1362 expect(TokenType.PERCENT.isOperator, isTrue); |
| 1363 expect(TokenType.PERCENT_EQ.isOperator, isTrue); |
| 1364 expect(TokenType.PERIOD_PERIOD.isOperator, isTrue); |
| 1365 expect(TokenType.PLUS.isOperator, isTrue); |
| 1366 expect(TokenType.PLUS_EQ.isOperator, isTrue); |
| 1367 expect(TokenType.PLUS_PLUS.isOperator, isTrue); |
| 1368 expect(TokenType.QUESTION.isOperator, isTrue); |
| 1369 expect(TokenType.SLASH.isOperator, isTrue); |
| 1370 expect(TokenType.SLASH_EQ.isOperator, isTrue); |
| 1371 expect(TokenType.STAR.isOperator, isTrue); |
| 1372 expect(TokenType.STAR_EQ.isOperator, isTrue); |
| 1373 expect(TokenType.TILDE.isOperator, isTrue); |
| 1374 expect(TokenType.TILDE_SLASH.isOperator, isTrue); |
| 1375 expect(TokenType.TILDE_SLASH_EQ.isOperator, isTrue); |
| 1376 } |
| 1377 |
| 1378 void test_isUserDefinableOperator() { |
| 1379 expect(TokenType.AMPERSAND.isUserDefinableOperator, isTrue); |
| 1380 expect(TokenType.BAR.isUserDefinableOperator, isTrue); |
| 1381 expect(TokenType.CARET.isUserDefinableOperator, isTrue); |
| 1382 expect(TokenType.EQ_EQ.isUserDefinableOperator, isTrue); |
| 1383 expect(TokenType.GT.isUserDefinableOperator, isTrue); |
| 1384 expect(TokenType.GT_EQ.isUserDefinableOperator, isTrue); |
| 1385 expect(TokenType.GT_GT.isUserDefinableOperator, isTrue); |
| 1386 expect(TokenType.INDEX.isUserDefinableOperator, isTrue); |
| 1387 expect(TokenType.INDEX_EQ.isUserDefinableOperator, isTrue); |
| 1388 expect(TokenType.LT.isUserDefinableOperator, isTrue); |
| 1389 expect(TokenType.LT_EQ.isUserDefinableOperator, isTrue); |
| 1390 expect(TokenType.LT_LT.isUserDefinableOperator, isTrue); |
| 1391 expect(TokenType.MINUS.isUserDefinableOperator, isTrue); |
| 1392 expect(TokenType.PERCENT.isUserDefinableOperator, isTrue); |
| 1393 expect(TokenType.PLUS.isUserDefinableOperator, isTrue); |
| 1394 expect(TokenType.SLASH.isUserDefinableOperator, isTrue); |
| 1395 expect(TokenType.STAR.isUserDefinableOperator, isTrue); |
| 1396 expect(TokenType.TILDE.isUserDefinableOperator, isTrue); |
| 1397 expect(TokenType.TILDE_SLASH.isUserDefinableOperator, isTrue); |
| 1398 } |
| 1399 } |
OLD | NEW |