OLD | NEW |
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 library analyzer.test.generated.scanner_test; | 5 library analyzer.test.generated.scanner_test; |
6 | 6 |
7 import 'package:analyzer/dart/ast/token.dart'; | 7 import 'package:analyzer/dart/ast/token.dart'; |
8 import 'package:analyzer/error/error.dart'; | |
9 import 'package:analyzer/error/listener.dart'; | |
10 import 'package:analyzer/src/dart/ast/token.dart'; | 8 import 'package:analyzer/src/dart/ast/token.dart'; |
11 import 'package:analyzer/src/dart/scanner/reader.dart'; | 9 import 'package:analyzer/src/dart/scanner/reader.dart'; |
12 import 'package:analyzer/src/dart/scanner/scanner.dart'; | 10 import 'package:analyzer/src/dart/scanner/scanner.dart'; |
13 import 'package:analyzer/src/generated/source.dart'; | 11 import 'package:analyzer/src/generated/source.dart'; |
14 import 'package:test/test.dart'; | 12 import 'package:test/test.dart'; |
15 import 'package:test_reflective_loader/test_reflective_loader.dart'; | 13 import 'package:test_reflective_loader/test_reflective_loader.dart'; |
16 | 14 |
17 import 'test_support.dart'; | 15 import 'test_support.dart'; |
18 | 16 |
19 main() { | 17 main() { |
20 defineReflectiveSuite(() { | 18 defineReflectiveSuite(() { |
21 defineReflectiveTests(CharSequenceReaderTest); | 19 defineReflectiveTests(LineInfoTest); |
22 defineReflectiveTests(KeywordStateTest); | |
23 defineReflectiveTests(ScannerTest); | |
24 defineReflectiveTests(TokenTypeTest); | |
25 }); | 20 }); |
26 } | 21 } |
27 | 22 |
28 class CharacterRangeReaderTest extends EngineTestCase { | 23 class CharacterRangeReaderTest extends EngineTestCase { |
29 void test_advance() { | 24 void test_advance() { |
30 CharSequenceReader baseReader = new CharSequenceReader("xyzzy"); | 25 CharSequenceReader baseReader = new CharSequenceReader("xyzzy"); |
31 CharacterRangeReader reader = new CharacterRangeReader(baseReader, 1, 4); | 26 CharacterRangeReader reader = new CharacterRangeReader(baseReader, 1, 4); |
32 expect(reader.advance(), 0x79); | 27 expect(reader.advance(), 0x79); |
33 expect(reader.advance(), 0x80); | 28 expect(reader.advance(), 0x80); |
34 expect(reader.advance(), 0x80); | 29 expect(reader.advance(), 0x80); |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
75 | 70 |
76 void test_setOffset() { | 71 void test_setOffset() { |
77 CharSequenceReader baseReader = new CharSequenceReader("xyzzy"); | 72 CharSequenceReader baseReader = new CharSequenceReader("xyzzy"); |
78 CharacterRangeReader reader = new CharacterRangeReader(baseReader, 1, 4); | 73 CharacterRangeReader reader = new CharacterRangeReader(baseReader, 1, 4); |
79 reader.offset = 2; | 74 reader.offset = 2; |
80 expect(reader.offset, 2); | 75 expect(reader.offset, 2); |
81 } | 76 } |
82 } | 77 } |
83 | 78 |
84 @reflectiveTest | 79 @reflectiveTest |
85 class CharSequenceReaderTest { | 80 class LineInfoTest extends EngineTestCase { |
86 void test_advance() { | |
87 CharSequenceReader reader = new CharSequenceReader("x"); | |
88 expect(reader.advance(), 0x78); | |
89 expect(reader.advance(), -1); | |
90 expect(reader.advance(), -1); | |
91 } | |
92 | |
93 void test_creation() { | |
94 expect(new CharSequenceReader("x"), isNotNull); | |
95 } | |
96 | |
97 void test_getOffset() { | |
98 CharSequenceReader reader = new CharSequenceReader("x"); | |
99 expect(reader.offset, -1); | |
100 reader.advance(); | |
101 expect(reader.offset, 0); | |
102 reader.advance(); | |
103 expect(reader.offset, 0); | |
104 } | |
105 | |
106 void test_getString() { | |
107 CharSequenceReader reader = new CharSequenceReader("xyzzy"); | |
108 reader.offset = 3; | |
109 expect(reader.getString(1, 0), "yzz"); | |
110 expect(reader.getString(2, 1), "zzy"); | |
111 } | |
112 | |
113 void test_peek() { | |
114 CharSequenceReader reader = new CharSequenceReader("xy"); | |
115 expect(reader.peek(), 0x78); | |
116 expect(reader.peek(), 0x78); | |
117 reader.advance(); | |
118 expect(reader.peek(), 0x79); | |
119 expect(reader.peek(), 0x79); | |
120 reader.advance(); | |
121 expect(reader.peek(), -1); | |
122 expect(reader.peek(), -1); | |
123 } | |
124 | |
125 void test_setOffset() { | |
126 CharSequenceReader reader = new CharSequenceReader("xyz"); | |
127 reader.offset = 2; | |
128 expect(reader.offset, 2); | |
129 } | |
130 } | |
131 | |
132 @reflectiveTest | |
133 class KeywordStateTest { | |
134 void test_KeywordState() { | |
135 // | |
136 // Generate the test data to be scanned. | |
137 // | |
138 List<Keyword> keywords = Keyword.values; | |
139 int keywordCount = keywords.length; | |
140 List<String> textToTest = new List<String>(keywordCount * 3); | |
141 for (int i = 0; i < keywordCount; i++) { | |
142 String syntax = keywords[i].syntax; | |
143 textToTest[i] = syntax; | |
144 textToTest[i + keywordCount] = "${syntax}x"; | |
145 textToTest[i + keywordCount * 2] = syntax.substring(0, syntax.length - 1); | |
146 } | |
147 // | |
148 // Scan each of the identifiers. | |
149 // | |
150 KeywordState firstState = KeywordState.KEYWORD_STATE; | |
151 for (int i = 0; i < textToTest.length; i++) { | |
152 String text = textToTest[i]; | |
153 int index = 0; | |
154 int length = text.length; | |
155 KeywordState state = firstState; | |
156 while (index < length && state != null) { | |
157 state = state.next(text.codeUnitAt(index)); | |
158 index++; | |
159 } | |
160 if (i < keywordCount) { | |
161 // keyword | |
162 expect(state, isNotNull); | |
163 expect(state.keyword(), isNotNull); | |
164 expect(state.keyword(), keywords[i]); | |
165 } else if (i < keywordCount * 2) { | |
166 // keyword + "x" | |
167 expect(state, isNull); | |
168 } else { | |
169 // keyword.substring(0, keyword.length() - 1) | |
170 expect(state, isNotNull); | |
171 } | |
172 } | |
173 } | |
174 } | |
175 | |
176 @reflectiveTest | |
177 class ScannerTest { | |
178 void fail_incomplete_string_interpolation() { | |
179 // https://code.google.com/p/dart/issues/detail?id=18073 | |
180 _assertErrorAndTokens( | |
181 ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 9, "\"foo \${bar", [ | |
182 new StringToken(TokenType.STRING, "\"foo ", 0), | |
183 new StringToken(TokenType.STRING_INTERPOLATION_EXPRESSION, "\${", 5), | |
184 new StringToken(TokenType.IDENTIFIER, "bar", 7) | |
185 ]); | |
186 } | |
187 | |
188 void test_ampersand() { | |
189 _assertToken(TokenType.AMPERSAND, "&"); | |
190 } | |
191 | |
192 void test_ampersand_ampersand() { | |
193 _assertToken(TokenType.AMPERSAND_AMPERSAND, "&&"); | |
194 } | |
195 | |
196 void test_ampersand_ampersand_eq() { | |
197 _assertToken(TokenType.AMPERSAND_AMPERSAND_EQ, "&&=", | |
198 lazyAssignmentOperators: true); | |
199 } | |
200 | |
201 void test_ampersand_eq() { | |
202 _assertToken(TokenType.AMPERSAND_EQ, "&="); | |
203 } | |
204 | |
205 void test_at() { | |
206 _assertToken(TokenType.AT, "@"); | |
207 } | |
208 | |
209 void test_backping() { | |
210 _assertToken(TokenType.BACKPING, "`"); | |
211 } | |
212 | |
213 void test_backslash() { | |
214 _assertToken(TokenType.BACKSLASH, "\\"); | |
215 } | |
216 | |
217 void test_bang() { | |
218 _assertToken(TokenType.BANG, "!"); | |
219 } | |
220 | |
221 void test_bang_eq() { | |
222 _assertToken(TokenType.BANG_EQ, "!="); | |
223 } | |
224 | |
225 void test_bar() { | |
226 _assertToken(TokenType.BAR, "|"); | |
227 } | |
228 | |
229 void test_bar_bar() { | |
230 _assertToken(TokenType.BAR_BAR, "||"); | |
231 } | |
232 | |
233 void test_bar_bar_eq() { | |
234 _assertToken(TokenType.BAR_BAR_EQ, "||=", lazyAssignmentOperators: true); | |
235 } | |
236 | |
237 void test_bar_eq() { | |
238 _assertToken(TokenType.BAR_EQ, "|="); | |
239 } | |
240 | |
241 void test_caret() { | |
242 _assertToken(TokenType.CARET, "^"); | |
243 } | |
244 | |
245 void test_caret_eq() { | |
246 _assertToken(TokenType.CARET_EQ, "^="); | |
247 } | |
248 | |
249 void test_close_curly_bracket() { | |
250 _assertToken(TokenType.CLOSE_CURLY_BRACKET, "}"); | |
251 } | |
252 | |
253 void test_close_paren() { | |
254 _assertToken(TokenType.CLOSE_PAREN, ")"); | |
255 } | |
256 | |
257 void test_close_quare_bracket() { | |
258 _assertToken(TokenType.CLOSE_SQUARE_BRACKET, "]"); | |
259 } | |
260 | |
261 void test_colon() { | |
262 _assertToken(TokenType.COLON, ":"); | |
263 } | |
264 | |
265 void test_comma() { | |
266 _assertToken(TokenType.COMMA, ","); | |
267 } | |
268 | |
269 void test_comment_disabled_multi() { | |
270 Scanner scanner = new Scanner( | |
271 null, | |
272 new CharSequenceReader("/* comment */ "), | |
273 AnalysisErrorListener.NULL_LISTENER); | |
274 scanner.preserveComments = false; | |
275 Token token = scanner.tokenize(); | |
276 expect(token, isNotNull); | |
277 expect(token.precedingComments, isNull); | |
278 } | |
279 | |
280 void test_comment_generic_method_type_assign() { | |
281 _assertComment(TokenType.MULTI_LINE_COMMENT, "/*=comment*/"); | |
282 _assertComment(TokenType.GENERIC_METHOD_TYPE_ASSIGN, "/*=comment*/", | |
283 genericMethodComments: true); | |
284 } | |
285 | |
286 void test_comment_generic_method_type_list() { | |
287 _assertComment(TokenType.MULTI_LINE_COMMENT, "/*<comment>*/"); | |
288 _assertComment(TokenType.GENERIC_METHOD_TYPE_LIST, "/*<comment>*/", | |
289 genericMethodComments: true); | |
290 } | |
291 | |
292 void test_comment_multi() { | |
293 _assertComment(TokenType.MULTI_LINE_COMMENT, "/* comment */"); | |
294 } | |
295 | |
296 void test_comment_multi_lineEnds() { | |
297 String code = r''' | |
298 /** | |
299 * aa | |
300 * bbb | |
301 * c | |
302 */'''; | |
303 GatheringErrorListener listener = new GatheringErrorListener(); | |
304 Scanner scanner = new Scanner(null, new CharSequenceReader(code), listener); | |
305 scanner.tokenize(); | |
306 expect( | |
307 scanner.lineStarts, | |
308 equals(<int>[ | |
309 code.indexOf('/**'), | |
310 code.indexOf(' * aa'), | |
311 code.indexOf(' * bbb'), | |
312 code.indexOf(' * c'), | |
313 code.indexOf(' */') | |
314 ])); | |
315 } | |
316 | |
317 void test_comment_multi_unterminated() { | |
318 _assertError(ScannerErrorCode.UNTERMINATED_MULTI_LINE_COMMENT, 3, "/* x"); | |
319 } | |
320 | |
321 void test_comment_nested() { | |
322 _assertComment( | |
323 TokenType.MULTI_LINE_COMMENT, "/* comment /* within a */ comment */"); | |
324 } | |
325 | |
326 void test_comment_single() { | |
327 _assertComment(TokenType.SINGLE_LINE_COMMENT, "// comment"); | |
328 } | |
329 | |
330 void test_double_both_E() { | |
331 _assertToken(TokenType.DOUBLE, "0.123E4"); | |
332 } | |
333 | |
334 void test_double_both_e() { | |
335 _assertToken(TokenType.DOUBLE, "0.123e4"); | |
336 } | |
337 | |
338 void test_double_fraction() { | |
339 _assertToken(TokenType.DOUBLE, ".123"); | |
340 } | |
341 | |
342 void test_double_fraction_E() { | |
343 _assertToken(TokenType.DOUBLE, ".123E4"); | |
344 } | |
345 | |
346 void test_double_fraction_e() { | |
347 _assertToken(TokenType.DOUBLE, ".123e4"); | |
348 } | |
349 | |
350 void test_double_missingDigitInExponent() { | |
351 _assertError(ScannerErrorCode.MISSING_DIGIT, 1, "1e"); | |
352 } | |
353 | |
354 void test_double_whole_E() { | |
355 _assertToken(TokenType.DOUBLE, "12E4"); | |
356 } | |
357 | |
358 void test_double_whole_e() { | |
359 _assertToken(TokenType.DOUBLE, "12e4"); | |
360 } | |
361 | |
362 void test_eq() { | |
363 _assertToken(TokenType.EQ, "="); | |
364 } | |
365 | |
366 void test_eq_eq() { | |
367 _assertToken(TokenType.EQ_EQ, "=="); | |
368 } | |
369 | |
370 void test_gt() { | |
371 _assertToken(TokenType.GT, ">"); | |
372 } | |
373 | |
374 void test_gt_eq() { | |
375 _assertToken(TokenType.GT_EQ, ">="); | |
376 } | |
377 | |
378 void test_gt_gt() { | |
379 _assertToken(TokenType.GT_GT, ">>"); | |
380 } | |
381 | |
382 void test_gt_gt_eq() { | |
383 _assertToken(TokenType.GT_GT_EQ, ">>="); | |
384 } | |
385 | |
386 void test_hash() { | |
387 _assertToken(TokenType.HASH, "#"); | |
388 } | |
389 | |
390 void test_hexidecimal() { | |
391 _assertToken(TokenType.HEXADECIMAL, "0x1A2B3C"); | |
392 } | |
393 | |
394 void test_hexidecimal_missingDigit() { | |
395 _assertError(ScannerErrorCode.MISSING_HEX_DIGIT, 1, "0x"); | |
396 } | |
397 | |
398 void test_identifier() { | |
399 _assertToken(TokenType.IDENTIFIER, "result"); | |
400 } | |
401 | |
402 void test_illegalChar_cyrillicLetter_middle() { | |
403 _assertError(ScannerErrorCode.ILLEGAL_CHARACTER, 5, "Shche\u0433lov"); | |
404 } | |
405 | |
406 void test_illegalChar_cyrillicLetter_start() { | |
407 _assertError(ScannerErrorCode.ILLEGAL_CHARACTER, 0, "\u0429"); | |
408 } | |
409 | |
410 void test_illegalChar_nbsp() { | |
411 _assertError(ScannerErrorCode.ILLEGAL_CHARACTER, 0, "\u00A0"); | |
412 } | |
413 | |
414 void test_illegalChar_notLetter() { | |
415 _assertError(ScannerErrorCode.ILLEGAL_CHARACTER, 0, "\u0312"); | |
416 } | |
417 | |
418 void test_index() { | |
419 _assertToken(TokenType.INDEX, "[]"); | |
420 } | |
421 | |
422 void test_index_eq() { | |
423 _assertToken(TokenType.INDEX_EQ, "[]="); | |
424 } | |
425 | |
426 void test_int() { | |
427 _assertToken(TokenType.INT, "123"); | |
428 } | |
429 | |
430 void test_int_initialZero() { | |
431 _assertToken(TokenType.INT, "0123"); | |
432 } | |
433 | |
434 void test_keyword_abstract() { | |
435 _assertKeywordToken("abstract"); | |
436 } | |
437 | |
438 void test_keyword_as() { | |
439 _assertKeywordToken("as"); | |
440 } | |
441 | |
442 void test_keyword_assert() { | |
443 _assertKeywordToken("assert"); | |
444 } | |
445 | |
446 void test_keyword_break() { | |
447 _assertKeywordToken("break"); | |
448 } | |
449 | |
450 void test_keyword_case() { | |
451 _assertKeywordToken("case"); | |
452 } | |
453 | |
454 void test_keyword_catch() { | |
455 _assertKeywordToken("catch"); | |
456 } | |
457 | |
458 void test_keyword_class() { | |
459 _assertKeywordToken("class"); | |
460 } | |
461 | |
462 void test_keyword_const() { | |
463 _assertKeywordToken("const"); | |
464 } | |
465 | |
466 void test_keyword_continue() { | |
467 _assertKeywordToken("continue"); | |
468 } | |
469 | |
470 void test_keyword_default() { | |
471 _assertKeywordToken("default"); | |
472 } | |
473 | |
474 void test_keyword_deferred() { | |
475 _assertKeywordToken("deferred"); | |
476 } | |
477 | |
478 void test_keyword_do() { | |
479 _assertKeywordToken("do"); | |
480 } | |
481 | |
482 void test_keyword_dynamic() { | |
483 _assertKeywordToken("dynamic"); | |
484 } | |
485 | |
486 void test_keyword_else() { | |
487 _assertKeywordToken("else"); | |
488 } | |
489 | |
490 void test_keyword_enum() { | |
491 _assertKeywordToken("enum"); | |
492 } | |
493 | |
494 void test_keyword_export() { | |
495 _assertKeywordToken("export"); | |
496 } | |
497 | |
498 void test_keyword_extends() { | |
499 _assertKeywordToken("extends"); | |
500 } | |
501 | |
502 void test_keyword_factory() { | |
503 _assertKeywordToken("factory"); | |
504 } | |
505 | |
506 void test_keyword_false() { | |
507 _assertKeywordToken("false"); | |
508 } | |
509 | |
510 void test_keyword_final() { | |
511 _assertKeywordToken("final"); | |
512 } | |
513 | |
514 void test_keyword_finally() { | |
515 _assertKeywordToken("finally"); | |
516 } | |
517 | |
518 void test_keyword_for() { | |
519 _assertKeywordToken("for"); | |
520 } | |
521 | |
522 void test_keyword_get() { | |
523 _assertKeywordToken("get"); | |
524 } | |
525 | |
526 void test_keyword_if() { | |
527 _assertKeywordToken("if"); | |
528 } | |
529 | |
530 void test_keyword_implements() { | |
531 _assertKeywordToken("implements"); | |
532 } | |
533 | |
534 void test_keyword_import() { | |
535 _assertKeywordToken("import"); | |
536 } | |
537 | |
538 void test_keyword_in() { | |
539 _assertKeywordToken("in"); | |
540 } | |
541 | |
542 void test_keyword_is() { | |
543 _assertKeywordToken("is"); | |
544 } | |
545 | |
546 void test_keyword_library() { | |
547 _assertKeywordToken("library"); | |
548 } | |
549 | |
550 void test_keyword_new() { | |
551 _assertKeywordToken("new"); | |
552 } | |
553 | |
554 void test_keyword_null() { | |
555 _assertKeywordToken("null"); | |
556 } | |
557 | |
558 void test_keyword_operator() { | |
559 _assertKeywordToken("operator"); | |
560 } | |
561 | |
562 void test_keyword_part() { | |
563 _assertKeywordToken("part"); | |
564 } | |
565 | |
566 void test_keyword_rethrow() { | |
567 _assertKeywordToken("rethrow"); | |
568 } | |
569 | |
570 void test_keyword_return() { | |
571 _assertKeywordToken("return"); | |
572 } | |
573 | |
574 void test_keyword_set() { | |
575 _assertKeywordToken("set"); | |
576 } | |
577 | |
578 void test_keyword_static() { | |
579 _assertKeywordToken("static"); | |
580 } | |
581 | |
582 void test_keyword_super() { | |
583 _assertKeywordToken("super"); | |
584 } | |
585 | |
586 void test_keyword_switch() { | |
587 _assertKeywordToken("switch"); | |
588 } | |
589 | |
590 void test_keyword_this() { | |
591 _assertKeywordToken("this"); | |
592 } | |
593 | |
594 void test_keyword_throw() { | |
595 _assertKeywordToken("throw"); | |
596 } | |
597 | |
598 void test_keyword_true() { | |
599 _assertKeywordToken("true"); | |
600 } | |
601 | |
602 void test_keyword_try() { | |
603 _assertKeywordToken("try"); | |
604 } | |
605 | |
606 void test_keyword_typedef() { | |
607 _assertKeywordToken("typedef"); | |
608 } | |
609 | |
610 void test_keyword_var() { | |
611 _assertKeywordToken("var"); | |
612 } | |
613 | |
614 void test_keyword_void() { | |
615 _assertKeywordToken("void"); | |
616 } | |
617 | |
618 void test_keyword_while() { | |
619 _assertKeywordToken("while"); | |
620 } | |
621 | |
622 void test_keyword_with() { | |
623 _assertKeywordToken("with"); | |
624 } | |
625 | |
626 void test_lineInfo_multilineComment() { | 81 void test_lineInfo_multilineComment() { |
627 String source = "/*\r *\r */"; | 82 String source = "/*\r *\r */"; |
628 _assertLineInfo(source, [ | 83 _assertLineInfo(source, [ |
629 new ScannerTest_ExpectedLocation(0, 1, 1), | 84 new ScannerTest_ExpectedLocation(0, 1, 1), |
630 new ScannerTest_ExpectedLocation(4, 2, 2), | 85 new ScannerTest_ExpectedLocation(4, 2, 2), |
631 new ScannerTest_ExpectedLocation(source.length - 1, 3, 3) | 86 new ScannerTest_ExpectedLocation(source.length - 1, 3, 3) |
632 ]); | 87 ]); |
633 } | 88 } |
634 | 89 |
635 void test_lineInfo_multilineString() { | 90 void test_lineInfo_multilineString() { |
(...skipping 25 matching lines...) Expand all Loading... |
661 } | 116 } |
662 | 117 |
663 void test_lineInfo_slashN() { | 118 void test_lineInfo_slashN() { |
664 String source = "class Test {\n}"; | 119 String source = "class Test {\n}"; |
665 _assertLineInfo(source, [ | 120 _assertLineInfo(source, [ |
666 new ScannerTest_ExpectedLocation(0, 1, 1), | 121 new ScannerTest_ExpectedLocation(0, 1, 1), |
667 new ScannerTest_ExpectedLocation(source.indexOf("}"), 2, 1) | 122 new ScannerTest_ExpectedLocation(source.indexOf("}"), 2, 1) |
668 ]); | 123 ]); |
669 } | 124 } |
670 | 125 |
671 void test_lt() { | |
672 _assertToken(TokenType.LT, "<"); | |
673 } | |
674 | |
675 void test_lt_eq() { | |
676 _assertToken(TokenType.LT_EQ, "<="); | |
677 } | |
678 | |
679 void test_lt_lt() { | |
680 _assertToken(TokenType.LT_LT, "<<"); | |
681 } | |
682 | |
683 void test_lt_lt_eq() { | |
684 _assertToken(TokenType.LT_LT_EQ, "<<="); | |
685 } | |
686 | |
687 void test_minus() { | |
688 _assertToken(TokenType.MINUS, "-"); | |
689 } | |
690 | |
691 void test_minus_eq() { | |
692 _assertToken(TokenType.MINUS_EQ, "-="); | |
693 } | |
694 | |
695 void test_minus_minus() { | |
696 _assertToken(TokenType.MINUS_MINUS, "--"); | |
697 } | |
698 | |
699 void test_open_curly_bracket() { | |
700 _assertToken(TokenType.OPEN_CURLY_BRACKET, "{"); | |
701 } | |
702 | |
703 void test_open_paren() { | |
704 _assertToken(TokenType.OPEN_PAREN, "("); | |
705 } | |
706 | |
707 void test_open_square_bracket() { | |
708 _assertToken(TokenType.OPEN_SQUARE_BRACKET, "["); | |
709 } | |
710 | |
711 void test_openSquareBracket() { | |
712 _assertToken(TokenType.OPEN_SQUARE_BRACKET, "["); | |
713 } | |
714 | |
715 void test_percent() { | |
716 _assertToken(TokenType.PERCENT, "%"); | |
717 } | |
718 | |
719 void test_percent_eq() { | |
720 _assertToken(TokenType.PERCENT_EQ, "%="); | |
721 } | |
722 | |
723 void test_period() { | |
724 _assertToken(TokenType.PERIOD, "."); | |
725 } | |
726 | |
727 void test_period_period() { | |
728 _assertToken(TokenType.PERIOD_PERIOD, ".."); | |
729 } | |
730 | |
731 void test_period_period_period() { | |
732 _assertToken(TokenType.PERIOD_PERIOD_PERIOD, "..."); | |
733 } | |
734 | |
735 void test_periodAfterNumberNotIncluded_identifier() { | |
736 _assertTokens("42.isEven()", [ | |
737 new StringToken(TokenType.INT, "42", 0), | |
738 new Token(TokenType.PERIOD, 2), | |
739 new StringToken(TokenType.IDENTIFIER, "isEven", 3), | |
740 new Token(TokenType.OPEN_PAREN, 9), | |
741 new Token(TokenType.CLOSE_PAREN, 10) | |
742 ]); | |
743 } | |
744 | |
745 void test_periodAfterNumberNotIncluded_period() { | |
746 _assertTokens("42..isEven()", [ | |
747 new StringToken(TokenType.INT, "42", 0), | |
748 new Token(TokenType.PERIOD_PERIOD, 2), | |
749 new StringToken(TokenType.IDENTIFIER, "isEven", 4), | |
750 new Token(TokenType.OPEN_PAREN, 10), | |
751 new Token(TokenType.CLOSE_PAREN, 11) | |
752 ]); | |
753 } | |
754 | |
755 void test_plus() { | |
756 _assertToken(TokenType.PLUS, "+"); | |
757 } | |
758 | |
759 void test_plus_eq() { | |
760 _assertToken(TokenType.PLUS_EQ, "+="); | |
761 } | |
762 | |
763 void test_plus_plus() { | |
764 _assertToken(TokenType.PLUS_PLUS, "++"); | |
765 } | |
766 | |
767 void test_question() { | |
768 _assertToken(TokenType.QUESTION, "?"); | |
769 } | |
770 | |
771 void test_question_dot() { | |
772 _assertToken(TokenType.QUESTION_PERIOD, "?."); | |
773 } | |
774 | |
775 void test_question_question() { | |
776 _assertToken(TokenType.QUESTION_QUESTION, "??"); | |
777 } | |
778 | |
779 void test_question_question_eq() { | |
780 _assertToken(TokenType.QUESTION_QUESTION_EQ, "??="); | |
781 } | |
782 | |
783 void test_scriptTag_withArgs() { | |
784 _assertToken(TokenType.SCRIPT_TAG, "#!/bin/dart -debug"); | |
785 } | |
786 | |
787 void test_scriptTag_withoutSpace() { | |
788 _assertToken(TokenType.SCRIPT_TAG, "#!/bin/dart"); | |
789 } | |
790 | |
791 void test_scriptTag_withSpace() { | |
792 _assertToken(TokenType.SCRIPT_TAG, "#! /bin/dart"); | |
793 } | |
794 | |
795 void test_semicolon() { | |
796 _assertToken(TokenType.SEMICOLON, ";"); | |
797 } | |
798 | |
799 void test_setSourceStart() { | |
800 int offsetDelta = 42; | |
801 GatheringErrorListener listener = new GatheringErrorListener(); | |
802 Scanner scanner = | |
803 new Scanner(null, new SubSequenceReader("a", offsetDelta), listener); | |
804 scanner.setSourceStart(3, 9); | |
805 scanner.tokenize(); | |
806 List<int> lineStarts = scanner.lineStarts; | |
807 expect(lineStarts, isNotNull); | |
808 expect(lineStarts.length, 3); | |
809 expect(lineStarts[2], 33); | |
810 } | |
811 | |
812 void test_slash() { | |
813 _assertToken(TokenType.SLASH, "/"); | |
814 } | |
815 | |
816 void test_slash_eq() { | |
817 _assertToken(TokenType.SLASH_EQ, "/="); | |
818 } | |
819 | |
820 void test_star() { | |
821 _assertToken(TokenType.STAR, "*"); | |
822 } | |
823 | |
824 void test_star_eq() { | |
825 _assertToken(TokenType.STAR_EQ, "*="); | |
826 } | |
827 | |
828 void test_startAndEnd() { | |
829 Token token = _scan("a"); | |
830 Token previous = token.previous; | |
831 expect(previous.next, token); | |
832 expect(previous.previous, previous); | |
833 Token next = token.next; | |
834 expect(next.next, next); | |
835 expect(next.previous, token); | |
836 } | |
837 | |
838 void test_string_multi_double() { | |
839 _assertToken(TokenType.STRING, "\"\"\"line1\nline2\"\"\""); | |
840 } | |
841 | |
842 void test_string_multi_embeddedQuotes() { | |
843 _assertToken(TokenType.STRING, "\"\"\"line1\n\"\"\nline2\"\"\""); | |
844 } | |
845 | |
846 void test_string_multi_embeddedQuotes_escapedChar() { | |
847 _assertToken(TokenType.STRING, "\"\"\"a\"\"\\tb\"\"\""); | |
848 } | |
849 | |
850 void test_string_multi_interpolation_block() { | |
851 _assertTokens("\"Hello \${name}!\"", [ | |
852 new StringToken(TokenType.STRING, "\"Hello ", 0), | |
853 new StringToken(TokenType.STRING_INTERPOLATION_EXPRESSION, "\${", 7), | |
854 new StringToken(TokenType.IDENTIFIER, "name", 9), | |
855 new Token(TokenType.CLOSE_CURLY_BRACKET, 13), | |
856 new StringToken(TokenType.STRING, "!\"", 14) | |
857 ]); | |
858 } | |
859 | |
860 void test_string_multi_interpolation_identifier() { | |
861 _assertTokens("\"Hello \$name!\"", [ | |
862 new StringToken(TokenType.STRING, "\"Hello ", 0), | |
863 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 7), | |
864 new StringToken(TokenType.IDENTIFIER, "name", 8), | |
865 new StringToken(TokenType.STRING, "!\"", 12) | |
866 ]); | |
867 } | |
868 | |
869 void test_string_multi_single() { | |
870 _assertToken(TokenType.STRING, "'''string'''"); | |
871 } | |
872 | |
873 void test_string_multi_slashEnter() { | |
874 _assertToken(TokenType.STRING, "'''\\\n'''"); | |
875 } | |
876 | |
877 void test_string_multi_unterminated() { | |
878 _assertErrorAndTokens(ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 8, | |
879 "'''string", [new StringToken(TokenType.STRING, "'''string", 0)]); | |
880 } | |
881 | |
882 void test_string_multi_unterminated_interpolation_block() { | |
883 _assertErrorAndTokens( | |
884 ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 8, "'''\${name", [ | |
885 new StringToken(TokenType.STRING, "'''", 0), | |
886 new StringToken(TokenType.STRING_INTERPOLATION_EXPRESSION, "\${", 3), | |
887 new StringToken(TokenType.IDENTIFIER, "name", 5), | |
888 new StringToken(TokenType.STRING, "", 9) | |
889 ]); | |
890 } | |
891 | |
892 void test_string_multi_unterminated_interpolation_identifier() { | |
893 _assertErrorAndTokens( | |
894 ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 7, "'''\$name", [ | |
895 new StringToken(TokenType.STRING, "'''", 0), | |
896 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 3), | |
897 new StringToken(TokenType.IDENTIFIER, "name", 4), | |
898 new StringToken(TokenType.STRING, "", 8) | |
899 ]); | |
900 } | |
901 | |
902 void test_string_raw_multi_double() { | |
903 _assertToken(TokenType.STRING, "r\"\"\"line1\nline2\"\"\""); | |
904 } | |
905 | |
906 void test_string_raw_multi_single() { | |
907 _assertToken(TokenType.STRING, "r'''string'''"); | |
908 } | |
909 | |
910 void test_string_raw_multi_unterminated() { | |
911 String source = "r'''string"; | |
912 _assertErrorAndTokens(ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 9, | |
913 source, [new StringToken(TokenType.STRING, source, 0)]); | |
914 } | |
915 | |
916 void test_string_raw_simple_double() { | |
917 _assertToken(TokenType.STRING, "r\"string\""); | |
918 } | |
919 | |
920 void test_string_raw_simple_single() { | |
921 _assertToken(TokenType.STRING, "r'string'"); | |
922 } | |
923 | |
924 void test_string_raw_simple_unterminated_eof() { | |
925 String source = "r'string"; | |
926 _assertErrorAndTokens(ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 7, | |
927 source, [new StringToken(TokenType.STRING, source, 0)]); | |
928 } | |
929 | |
930 void test_string_raw_simple_unterminated_eol() { | |
931 String source = "r'string"; | |
932 _assertErrorAndTokens(ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 8, | |
933 "$source\n", [new StringToken(TokenType.STRING, source, 0)]); | |
934 } | |
935 | |
936 void test_string_simple_double() { | |
937 _assertToken(TokenType.STRING, "\"string\""); | |
938 } | |
939 | |
940 void test_string_simple_escapedDollar() { | |
941 _assertToken(TokenType.STRING, "'a\\\$b'"); | |
942 } | |
943 | |
944 void test_string_simple_interpolation_adjacentIdentifiers() { | |
945 _assertTokens("'\$a\$b'", [ | |
946 new StringToken(TokenType.STRING, "'", 0), | |
947 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 1), | |
948 new StringToken(TokenType.IDENTIFIER, "a", 2), | |
949 new StringToken(TokenType.STRING, "", 3), | |
950 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 3), | |
951 new StringToken(TokenType.IDENTIFIER, "b", 4), | |
952 new StringToken(TokenType.STRING, "'", 5) | |
953 ]); | |
954 } | |
955 | |
956 void test_string_simple_interpolation_block() { | |
957 _assertTokens("'Hello \${name}!'", [ | |
958 new StringToken(TokenType.STRING, "'Hello ", 0), | |
959 new StringToken(TokenType.STRING_INTERPOLATION_EXPRESSION, "\${", 7), | |
960 new StringToken(TokenType.IDENTIFIER, "name", 9), | |
961 new Token(TokenType.CLOSE_CURLY_BRACKET, 13), | |
962 new StringToken(TokenType.STRING, "!'", 14) | |
963 ]); | |
964 } | |
965 | |
966 void test_string_simple_interpolation_blockWithNestedMap() { | |
967 _assertTokens("'a \${f({'b' : 'c'})} d'", [ | |
968 new StringToken(TokenType.STRING, "'a ", 0), | |
969 new StringToken(TokenType.STRING_INTERPOLATION_EXPRESSION, "\${", 3), | |
970 new StringToken(TokenType.IDENTIFIER, "f", 5), | |
971 new Token(TokenType.OPEN_PAREN, 6), | |
972 new Token(TokenType.OPEN_CURLY_BRACKET, 7), | |
973 new StringToken(TokenType.STRING, "'b'", 8), | |
974 new Token(TokenType.COLON, 12), | |
975 new StringToken(TokenType.STRING, "'c'", 14), | |
976 new Token(TokenType.CLOSE_CURLY_BRACKET, 17), | |
977 new Token(TokenType.CLOSE_PAREN, 18), | |
978 new Token(TokenType.CLOSE_CURLY_BRACKET, 19), | |
979 new StringToken(TokenType.STRING, " d'", 20) | |
980 ]); | |
981 } | |
982 | |
983 void test_string_simple_interpolation_firstAndLast() { | |
984 _assertTokens("'\$greeting \$name'", [ | |
985 new StringToken(TokenType.STRING, "'", 0), | |
986 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 1), | |
987 new StringToken(TokenType.IDENTIFIER, "greeting", 2), | |
988 new StringToken(TokenType.STRING, " ", 10), | |
989 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 11), | |
990 new StringToken(TokenType.IDENTIFIER, "name", 12), | |
991 new StringToken(TokenType.STRING, "'", 16) | |
992 ]); | |
993 } | |
994 | |
995 void test_string_simple_interpolation_identifier() { | |
996 _assertTokens("'Hello \$name!'", [ | |
997 new StringToken(TokenType.STRING, "'Hello ", 0), | |
998 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 7), | |
999 new StringToken(TokenType.IDENTIFIER, "name", 8), | |
1000 new StringToken(TokenType.STRING, "!'", 12) | |
1001 ]); | |
1002 } | |
1003 | |
1004 void test_string_simple_interpolation_missingIdentifier() { | |
1005 _assertTokens("'\$x\$'", [ | |
1006 new StringToken(TokenType.STRING, "'", 0), | |
1007 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 1), | |
1008 new StringToken(TokenType.IDENTIFIER, "x", 2), | |
1009 new StringToken(TokenType.STRING, "", 3), | |
1010 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 3), | |
1011 new StringToken(TokenType.STRING, "'", 4) | |
1012 ]); | |
1013 } | |
1014 | |
1015 void test_string_simple_interpolation_nonIdentifier() { | |
1016 _assertTokens("'\$1'", [ | |
1017 new StringToken(TokenType.STRING, "'", 0), | |
1018 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 1), | |
1019 new StringToken(TokenType.STRING, "1'", 2) | |
1020 ]); | |
1021 } | |
1022 | |
1023 void test_string_simple_single() { | |
1024 _assertToken(TokenType.STRING, "'string'"); | |
1025 } | |
1026 | |
1027 void test_string_simple_unterminated_eof() { | |
1028 String source = "'string"; | |
1029 _assertErrorAndTokens(ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 6, | |
1030 source, [new StringToken(TokenType.STRING, source, 0)]); | |
1031 } | |
1032 | |
1033 void test_string_simple_unterminated_eol() { | |
1034 String source = "'string"; | |
1035 _assertErrorAndTokens(ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 7, | |
1036 "$source\r", [new StringToken(TokenType.STRING, source, 0)]); | |
1037 } | |
1038 | |
1039 void test_string_simple_unterminated_interpolation_block() { | |
1040 _assertErrorAndTokens( | |
1041 ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 6, "'\${name", [ | |
1042 new StringToken(TokenType.STRING, "'", 0), | |
1043 new StringToken(TokenType.STRING_INTERPOLATION_EXPRESSION, "\${", 1), | |
1044 new StringToken(TokenType.IDENTIFIER, "name", 3), | |
1045 new StringToken(TokenType.STRING, "", 7) | |
1046 ]); | |
1047 } | |
1048 | |
1049 void test_string_simple_unterminated_interpolation_identifier() { | |
1050 _assertErrorAndTokens( | |
1051 ScannerErrorCode.UNTERMINATED_STRING_LITERAL, 5, "'\$name", [ | |
1052 new StringToken(TokenType.STRING, "'", 0), | |
1053 new StringToken(TokenType.STRING_INTERPOLATION_IDENTIFIER, "\$", 1), | |
1054 new StringToken(TokenType.IDENTIFIER, "name", 2), | |
1055 new StringToken(TokenType.STRING, "", 6) | |
1056 ]); | |
1057 } | |
1058 | |
1059 void test_tilde() { | |
1060 _assertToken(TokenType.TILDE, "~"); | |
1061 } | |
1062 | |
1063 void test_tilde_slash() { | |
1064 _assertToken(TokenType.TILDE_SLASH, "~/"); | |
1065 } | |
1066 | |
1067 void test_tilde_slash_eq() { | |
1068 _assertToken(TokenType.TILDE_SLASH_EQ, "~/="); | |
1069 } | |
1070 | |
1071 void test_unclosedPairInInterpolation() { | |
1072 GatheringErrorListener listener = new GatheringErrorListener(); | |
1073 _scanWithListener("'\${(}'", listener); | |
1074 } | |
1075 | |
1076 void _assertComment(TokenType commentType, String source, | |
1077 {bool genericMethodComments: false}) { | |
1078 // | |
1079 // Test without a trailing end-of-line marker | |
1080 // | |
1081 Token token = _scan(source, genericMethodComments: genericMethodComments); | |
1082 expect(token, isNotNull); | |
1083 expect(token.type, TokenType.EOF); | |
1084 Token comment = token.precedingComments; | |
1085 expect(comment, isNotNull); | |
1086 expect(comment.type, commentType); | |
1087 expect(comment.offset, 0); | |
1088 expect(comment.length, source.length); | |
1089 expect(comment.lexeme, source); | |
1090 // | |
1091 // Test with a trailing end-of-line marker | |
1092 // | |
1093 token = _scan("$source\n", genericMethodComments: genericMethodComments); | |
1094 expect(token, isNotNull); | |
1095 expect(token.type, TokenType.EOF); | |
1096 comment = token.precedingComments; | |
1097 expect(comment, isNotNull); | |
1098 expect(comment.type, commentType); | |
1099 expect(comment.offset, 0); | |
1100 expect(comment.length, source.length); | |
1101 expect(comment.lexeme, source); | |
1102 } | |
1103 | |
1104 /** | |
1105 * Assert that scanning the given [source] produces an error with the given | |
1106 * code. | |
1107 * | |
1108 * [expectedError] the error that should be produced | |
1109 * [expectedOffset] the string offset that should be associated with the error | |
1110 * [source] the source to be scanned to produce the error | |
1111 */ | |
1112 void _assertError( | |
1113 ScannerErrorCode expectedError, int expectedOffset, String source) { | |
1114 GatheringErrorListener listener = new GatheringErrorListener(); | |
1115 _scanWithListener(source, listener); | |
1116 listener.assertErrors([ | |
1117 new AnalysisError(null, expectedOffset, 1, expectedError, | |
1118 [source.codeUnitAt(expectedOffset)]) | |
1119 ]); | |
1120 } | |
1121 | |
1122 /** | |
1123 * Assert that scanning the given [source] produces an error with the given | |
1124 * code, and also produces the given tokens. | |
1125 * | |
1126 * [expectedError] the error that should be produced | |
1127 * [expectedOffset] the string offset that should be associated with the error | |
1128 * [source] the source to be scanned to produce the error | |
1129 * [expectedTokens] the tokens that are expected to be in the source | |
1130 */ | |
1131 void _assertErrorAndTokens(ScannerErrorCode expectedError, int expectedOffset, | |
1132 String source, List<Token> expectedTokens) { | |
1133 GatheringErrorListener listener = new GatheringErrorListener(); | |
1134 Token token = _scanWithListener(source, listener); | |
1135 listener.assertErrors([ | |
1136 new AnalysisError(null, expectedOffset, 1, expectedError, | |
1137 [source.codeUnitAt(expectedOffset)]) | |
1138 ]); | |
1139 _checkTokens(token, expectedTokens); | |
1140 } | |
1141 | |
1142 /** | |
1143 * Assert that when scanned the given [source] contains a single keyword token | |
1144 * with the same lexeme as the original source. | |
1145 */ | |
1146 void _assertKeywordToken(String source) { | |
1147 Token token = _scan(source); | |
1148 expect(token, isNotNull); | |
1149 expect(token.type, TokenType.KEYWORD); | |
1150 expect(token.offset, 0); | |
1151 expect(token.length, source.length); | |
1152 expect(token.lexeme, source); | |
1153 Object value = token.value(); | |
1154 expect(value is Keyword, isTrue); | |
1155 expect((value as Keyword).syntax, source); | |
1156 token = _scan(" $source "); | |
1157 expect(token, isNotNull); | |
1158 expect(token.type, TokenType.KEYWORD); | |
1159 expect(token.offset, 1); | |
1160 expect(token.length, source.length); | |
1161 expect(token.lexeme, source); | |
1162 value = token.value(); | |
1163 expect(value is Keyword, isTrue); | |
1164 expect((value as Keyword).syntax, source); | |
1165 expect(token.next.type, TokenType.EOF); | |
1166 } | |
1167 | |
1168 void _assertLineInfo( | 126 void _assertLineInfo( |
1169 String source, List<ScannerTest_ExpectedLocation> expectedLocations) { | 127 String source, List<ScannerTest_ExpectedLocation> expectedLocations) { |
1170 GatheringErrorListener listener = new GatheringErrorListener(); | 128 GatheringErrorListener listener = new GatheringErrorListener(); |
1171 _scanWithListener(source, listener); | 129 _scanWithListener(source, listener); |
1172 listener.assertNoErrors(); | 130 listener.assertNoErrors(); |
1173 LineInfo info = listener.getLineInfo(new TestSource()); | 131 LineInfo info = listener.getLineInfo(new TestSource()); |
1174 expect(info, isNotNull); | 132 expect(info, isNotNull); |
1175 int count = expectedLocations.length; | 133 int count = expectedLocations.length; |
1176 for (int i = 0; i < count; i++) { | 134 for (int i = 0; i < count; i++) { |
1177 ScannerTest_ExpectedLocation expectedLocation = expectedLocations[i]; | 135 ScannerTest_ExpectedLocation expectedLocation = expectedLocations[i]; |
1178 LineInfo_Location location = info.getLocation(expectedLocation._offset); | 136 LineInfo_Location location = info.getLocation(expectedLocation._offset); |
1179 expect(location.lineNumber, expectedLocation._lineNumber, | 137 expect(location.lineNumber, expectedLocation._lineNumber, |
1180 reason: 'Line number in location $i'); | 138 reason: 'Line number in location $i'); |
1181 expect(location.columnNumber, expectedLocation._columnNumber, | 139 expect(location.columnNumber, expectedLocation._columnNumber, |
1182 reason: 'Column number in location $i'); | 140 reason: 'Column number in location $i'); |
1183 } | 141 } |
1184 } | 142 } |
1185 | 143 |
1186 /** | |
1187 * Assert that the token scanned from the given [source] has the | |
1188 * [expectedType]. | |
1189 */ | |
1190 Token _assertToken(TokenType expectedType, String source, | |
1191 {bool lazyAssignmentOperators: false}) { | |
1192 Token originalToken = | |
1193 _scan(source, lazyAssignmentOperators: lazyAssignmentOperators); | |
1194 expect(originalToken, isNotNull); | |
1195 expect(originalToken.type, expectedType); | |
1196 expect(originalToken.offset, 0); | |
1197 expect(originalToken.length, source.length); | |
1198 expect(originalToken.lexeme, source); | |
1199 if (expectedType == TokenType.SCRIPT_TAG) { | |
1200 // Adding space before the script tag is not allowed, and adding text at | |
1201 // the end changes nothing. | |
1202 return originalToken; | |
1203 } else if (expectedType == TokenType.SINGLE_LINE_COMMENT) { | |
1204 // Adding space to an end-of-line comment changes the comment. | |
1205 Token tokenWithSpaces = | |
1206 _scan(" $source", lazyAssignmentOperators: lazyAssignmentOperators); | |
1207 expect(tokenWithSpaces, isNotNull); | |
1208 expect(tokenWithSpaces.type, expectedType); | |
1209 expect(tokenWithSpaces.offset, 1); | |
1210 expect(tokenWithSpaces.length, source.length); | |
1211 expect(tokenWithSpaces.lexeme, source); | |
1212 return originalToken; | |
1213 } else if (expectedType == TokenType.INT || | |
1214 expectedType == TokenType.DOUBLE) { | |
1215 Token tokenWithLowerD = | |
1216 _scan("${source}d", lazyAssignmentOperators: lazyAssignmentOperators); | |
1217 expect(tokenWithLowerD, isNotNull); | |
1218 expect(tokenWithLowerD.type, expectedType); | |
1219 expect(tokenWithLowerD.offset, 0); | |
1220 expect(tokenWithLowerD.length, source.length); | |
1221 expect(tokenWithLowerD.lexeme, source); | |
1222 Token tokenWithUpperD = | |
1223 _scan("${source}D", lazyAssignmentOperators: lazyAssignmentOperators); | |
1224 expect(tokenWithUpperD, isNotNull); | |
1225 expect(tokenWithUpperD.type, expectedType); | |
1226 expect(tokenWithUpperD.offset, 0); | |
1227 expect(tokenWithUpperD.length, source.length); | |
1228 expect(tokenWithUpperD.lexeme, source); | |
1229 } | |
1230 Token tokenWithSpaces = | |
1231 _scan(" $source ", lazyAssignmentOperators: lazyAssignmentOperators); | |
1232 expect(tokenWithSpaces, isNotNull); | |
1233 expect(tokenWithSpaces.type, expectedType); | |
1234 expect(tokenWithSpaces.offset, 1); | |
1235 expect(tokenWithSpaces.length, source.length); | |
1236 expect(tokenWithSpaces.lexeme, source); | |
1237 expect(originalToken.next.type, TokenType.EOF); | |
1238 return originalToken; | |
1239 } | |
1240 | |
1241 /** | |
1242 * Assert that when scanned the given [source] contains a sequence of tokens | |
1243 * identical to the given list of [expectedTokens]. | |
1244 */ | |
1245 void _assertTokens(String source, List<Token> expectedTokens) { | |
1246 Token token = _scan(source); | |
1247 _checkTokens(token, expectedTokens); | |
1248 } | |
1249 | |
1250 void _checkTokens(Token firstToken, List<Token> expectedTokens) { | |
1251 expect(firstToken, isNotNull); | |
1252 Token token = firstToken; | |
1253 for (int i = 0; i < expectedTokens.length; i++) { | |
1254 Token expectedToken = expectedTokens[i]; | |
1255 expect(token.type, expectedToken.type, reason: "Wrong type for token $i"); | |
1256 expect(token.offset, expectedToken.offset, | |
1257 reason: "Wrong offset for token $i"); | |
1258 expect(token.length, expectedToken.length, | |
1259 reason: "Wrong length for token $i"); | |
1260 expect(token.lexeme, expectedToken.lexeme, | |
1261 reason: "Wrong lexeme for token $i"); | |
1262 token = token.next; | |
1263 expect(token, isNotNull); | |
1264 } | |
1265 expect(token.type, TokenType.EOF); | |
1266 } | |
1267 | |
1268 Token _scan(String source, | |
1269 {bool genericMethodComments: false, | |
1270 bool lazyAssignmentOperators: false}) { | |
1271 GatheringErrorListener listener = new GatheringErrorListener(); | |
1272 Token token = _scanWithListener(source, listener, | |
1273 genericMethodComments: genericMethodComments, | |
1274 lazyAssignmentOperators: lazyAssignmentOperators); | |
1275 listener.assertNoErrors(); | |
1276 return token; | |
1277 } | |
1278 | |
1279 Token _scanWithListener(String source, GatheringErrorListener listener, | 144 Token _scanWithListener(String source, GatheringErrorListener listener, |
1280 {bool genericMethodComments: false, | 145 {bool genericMethodComments: false, |
1281 bool lazyAssignmentOperators: false}) { | 146 bool lazyAssignmentOperators: false}) { |
1282 Scanner scanner = | 147 Scanner scanner = |
1283 new Scanner(null, new CharSequenceReader(source), listener); | 148 new Scanner(null, new CharSequenceReader(source), listener); |
1284 scanner.scanGenericMethodComments = genericMethodComments; | 149 scanner.scanGenericMethodComments = genericMethodComments; |
1285 scanner.scanLazyAssignmentOperators = lazyAssignmentOperators; | 150 scanner.scanLazyAssignmentOperators = lazyAssignmentOperators; |
1286 Token result = scanner.tokenize(); | 151 Token result = scanner.tokenize(); |
1287 listener.setLineInfo(new TestSource(), scanner.lineStarts); | 152 listener.setLineInfo(new TestSource(), scanner.lineStarts); |
1288 return result; | 153 return result; |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1362 buffer.write(token.type); | 227 buffer.write(token.type); |
1363 buffer.write(", '"); | 228 buffer.write(", '"); |
1364 buffer.write(token.lexeme); | 229 buffer.write(token.lexeme); |
1365 buffer.write("', "); | 230 buffer.write("', "); |
1366 buffer.write(token.offset); | 231 buffer.write(token.offset); |
1367 buffer.write(", "); | 232 buffer.write(", "); |
1368 buffer.write(token.length); | 233 buffer.write(token.length); |
1369 buffer.write("]"); | 234 buffer.write("]"); |
1370 } | 235 } |
1371 } | 236 } |
1372 | |
1373 @reflectiveTest | |
1374 class TokenTypeTest extends EngineTestCase { | |
1375 void test_isOperator() { | |
1376 expect(TokenType.AMPERSAND.isOperator, isTrue); | |
1377 expect(TokenType.AMPERSAND_AMPERSAND.isOperator, isTrue); | |
1378 expect(TokenType.AMPERSAND_EQ.isOperator, isTrue); | |
1379 expect(TokenType.BANG.isOperator, isTrue); | |
1380 expect(TokenType.BANG_EQ.isOperator, isTrue); | |
1381 expect(TokenType.BAR.isOperator, isTrue); | |
1382 expect(TokenType.BAR_BAR.isOperator, isTrue); | |
1383 expect(TokenType.BAR_EQ.isOperator, isTrue); | |
1384 expect(TokenType.CARET.isOperator, isTrue); | |
1385 expect(TokenType.CARET_EQ.isOperator, isTrue); | |
1386 expect(TokenType.EQ.isOperator, isTrue); | |
1387 expect(TokenType.EQ_EQ.isOperator, isTrue); | |
1388 expect(TokenType.GT.isOperator, isTrue); | |
1389 expect(TokenType.GT_EQ.isOperator, isTrue); | |
1390 expect(TokenType.GT_GT.isOperator, isTrue); | |
1391 expect(TokenType.GT_GT_EQ.isOperator, isTrue); | |
1392 expect(TokenType.INDEX.isOperator, isTrue); | |
1393 expect(TokenType.INDEX_EQ.isOperator, isTrue); | |
1394 expect(TokenType.IS.isOperator, isTrue); | |
1395 expect(TokenType.LT.isOperator, isTrue); | |
1396 expect(TokenType.LT_EQ.isOperator, isTrue); | |
1397 expect(TokenType.LT_LT.isOperator, isTrue); | |
1398 expect(TokenType.LT_LT_EQ.isOperator, isTrue); | |
1399 expect(TokenType.MINUS.isOperator, isTrue); | |
1400 expect(TokenType.MINUS_EQ.isOperator, isTrue); | |
1401 expect(TokenType.MINUS_MINUS.isOperator, isTrue); | |
1402 expect(TokenType.PERCENT.isOperator, isTrue); | |
1403 expect(TokenType.PERCENT_EQ.isOperator, isTrue); | |
1404 expect(TokenType.PERIOD_PERIOD.isOperator, isTrue); | |
1405 expect(TokenType.PLUS.isOperator, isTrue); | |
1406 expect(TokenType.PLUS_EQ.isOperator, isTrue); | |
1407 expect(TokenType.PLUS_PLUS.isOperator, isTrue); | |
1408 expect(TokenType.QUESTION.isOperator, isTrue); | |
1409 expect(TokenType.SLASH.isOperator, isTrue); | |
1410 expect(TokenType.SLASH_EQ.isOperator, isTrue); | |
1411 expect(TokenType.STAR.isOperator, isTrue); | |
1412 expect(TokenType.STAR_EQ.isOperator, isTrue); | |
1413 expect(TokenType.TILDE.isOperator, isTrue); | |
1414 expect(TokenType.TILDE_SLASH.isOperator, isTrue); | |
1415 expect(TokenType.TILDE_SLASH_EQ.isOperator, isTrue); | |
1416 } | |
1417 | |
1418 void test_isUserDefinableOperator() { | |
1419 expect(TokenType.AMPERSAND.isUserDefinableOperator, isTrue); | |
1420 expect(TokenType.BAR.isUserDefinableOperator, isTrue); | |
1421 expect(TokenType.CARET.isUserDefinableOperator, isTrue); | |
1422 expect(TokenType.EQ_EQ.isUserDefinableOperator, isTrue); | |
1423 expect(TokenType.GT.isUserDefinableOperator, isTrue); | |
1424 expect(TokenType.GT_EQ.isUserDefinableOperator, isTrue); | |
1425 expect(TokenType.GT_GT.isUserDefinableOperator, isTrue); | |
1426 expect(TokenType.INDEX.isUserDefinableOperator, isTrue); | |
1427 expect(TokenType.INDEX_EQ.isUserDefinableOperator, isTrue); | |
1428 expect(TokenType.LT.isUserDefinableOperator, isTrue); | |
1429 expect(TokenType.LT_EQ.isUserDefinableOperator, isTrue); | |
1430 expect(TokenType.LT_LT.isUserDefinableOperator, isTrue); | |
1431 expect(TokenType.MINUS.isUserDefinableOperator, isTrue); | |
1432 expect(TokenType.PERCENT.isUserDefinableOperator, isTrue); | |
1433 expect(TokenType.PLUS.isUserDefinableOperator, isTrue); | |
1434 expect(TokenType.SLASH.isUserDefinableOperator, isTrue); | |
1435 expect(TokenType.STAR.isUserDefinableOperator, isTrue); | |
1436 expect(TokenType.TILDE.isUserDefinableOperator, isTrue); | |
1437 expect(TokenType.TILDE_SLASH.isUserDefinableOperator, isTrue); | |
1438 } | |
1439 } | |
OLD | NEW |