| OLD | NEW |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 library dart2js.scanner; | 5 library dart2js.scanner; |
| 6 | 6 |
| 7 import '../io/source_file.dart' show SourceFile, Utf8BytesSourceFile; | 7 import '../io/source_file.dart' show SourceFile, Utf8BytesSourceFile; |
| 8 import '../tokens/keyword.dart' show Keyword, KeywordState; | 8 import '../tokens/keyword.dart' show Keyword, KeywordState; |
| 9 import '../tokens/precedence.dart'; | 9 import '../tokens/precedence.dart'; |
| 10 import '../tokens/precedence_constants.dart'; | 10 import '../tokens/precedence_constants.dart'; |
| (...skipping 617 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 628 if (identical(next, $PERIOD)) { | 628 if (identical(next, $PERIOD)) { |
| 629 int nextnext = peek(); | 629 int nextnext = peek(); |
| 630 if ($0 <= nextnext && nextnext <= $9) { | 630 if ($0 <= nextnext && nextnext <= $9) { |
| 631 return tokenizeFractionPart(advance(), start); | 631 return tokenizeFractionPart(advance(), start); |
| 632 } | 632 } |
| 633 } | 633 } |
| 634 appendSubstringToken(INT_INFO, start, true); | 634 appendSubstringToken(INT_INFO, start, true); |
| 635 return next; | 635 return next; |
| 636 } | 636 } |
| 637 } | 637 } |
| 638 return null; | |
| 639 } | 638 } |
| 640 | 639 |
| 641 int tokenizeHexOrNumber(int next) { | 640 int tokenizeHexOrNumber(int next) { |
| 642 int x = peek(); | 641 int x = peek(); |
| 643 if (identical(x, $x) || identical(x, $X)) { | 642 if (identical(x, $x) || identical(x, $X)) { |
| 644 return tokenizeHex(next); | 643 return tokenizeHex(next); |
| 645 } | 644 } |
| 646 return tokenizeNumber(next); | 645 return tokenizeNumber(next); |
| 647 } | 646 } |
| 648 | 647 |
| 649 int tokenizeHex(int next) { | 648 int tokenizeHex(int next) { |
| 650 int start = scanOffset; | 649 int start = scanOffset; |
| 651 next = advance(); // Advance past the $x or $X. | 650 next = advance(); // Advance past the $x or $X. |
| 652 bool hasDigits = false; | 651 bool hasDigits = false; |
| 653 while (true) { | 652 while (true) { |
| 654 next = advance(); | 653 next = advance(); |
| 655 if (($0 <= next && next <= $9) || | 654 if (($0 <= next && next <= $9) || |
| 656 ($A <= next && next <= $F) || | 655 ($A <= next && next <= $F) || |
| 657 ($a <= next && next <= $f)) { | 656 ($a <= next && next <= $f)) { |
| 658 hasDigits = true; | 657 hasDigits = true; |
| 659 } else { | 658 } else { |
| 660 if (!hasDigits) { | 659 if (!hasDigits) { |
| 661 unterminated('0x', shouldAdvance: false); | 660 unterminated('0x', shouldAdvance: false); |
| 662 return next; | 661 return next; |
| 663 } | 662 } |
| 664 appendSubstringToken(HEXADECIMAL_INFO, start, true); | 663 appendSubstringToken(HEXADECIMAL_INFO, start, true); |
| 665 return next; | 664 return next; |
| 666 } | 665 } |
| 667 } | 666 } |
| 668 return null; | |
| 669 } | 667 } |
| 670 | 668 |
| 671 int tokenizeDotsOrNumber(int next) { | 669 int tokenizeDotsOrNumber(int next) { |
| 672 int start = scanOffset; | 670 int start = scanOffset; |
| 673 next = advance(); | 671 next = advance(); |
| 674 if (($0 <= next && next <= $9)) { | 672 if (($0 <= next && next <= $9)) { |
| 675 return tokenizeFractionPart(next, start); | 673 return tokenizeFractionPart(next, start); |
| 676 } else if (identical($PERIOD, next)) { | 674 } else if (identical($PERIOD, next)) { |
| 677 return select($PERIOD, PERIOD_PERIOD_PERIOD_INFO, PERIOD_PERIOD_INFO); | 675 return select($PERIOD, PERIOD_PERIOD_PERIOD_INFO, PERIOD_PERIOD_INFO); |
| 678 } else { | 676 } else { |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 754 next = advance(); | 752 next = advance(); |
| 755 if (next > 127) asciiOnly = false; | 753 if (next > 127) asciiOnly = false; |
| 756 if (identical($LF, next) || | 754 if (identical($LF, next) || |
| 757 identical($CR, next) || | 755 identical($CR, next) || |
| 758 identical($EOF, next)) { | 756 identical($EOF, next)) { |
| 759 if (!asciiOnly) handleUnicode(start); | 757 if (!asciiOnly) handleUnicode(start); |
| 760 appendComment(start, asciiOnly); | 758 appendComment(start, asciiOnly); |
| 761 return next; | 759 return next; |
| 762 } | 760 } |
| 763 } | 761 } |
| 764 return null; | |
| 765 } | 762 } |
| 766 | 763 |
| 767 int tokenizeMultiLineComment(int next, int start) { | 764 int tokenizeMultiLineComment(int next, int start) { |
| 768 bool asciiOnlyComment = true; // Track if the entire comment is ASCII. | 765 bool asciiOnlyComment = true; // Track if the entire comment is ASCII. |
| 769 bool asciiOnlyLines = true; // Track ASCII since the last handleUnicode. | 766 bool asciiOnlyLines = true; // Track ASCII since the last handleUnicode. |
| 770 int unicodeStart = start; | 767 int unicodeStart = start; |
| 771 int nesting = 1; | 768 int nesting = 1; |
| 772 next = advance(); | 769 next = advance(); |
| 773 while (true) { | 770 while (true) { |
| 774 if (identical($EOF, next)) { | 771 if (identical($EOF, next)) { |
| (...skipping 403 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1178 | 1175 |
| 1179 PrecedenceInfo closeBraceInfoFor(BeginGroupToken begin) { | 1176 PrecedenceInfo closeBraceInfoFor(BeginGroupToken begin) { |
| 1180 return const { | 1177 return const { |
| 1181 '(': CLOSE_PAREN_INFO, | 1178 '(': CLOSE_PAREN_INFO, |
| 1182 '[': CLOSE_SQUARE_BRACKET_INFO, | 1179 '[': CLOSE_SQUARE_BRACKET_INFO, |
| 1183 '{': CLOSE_CURLY_BRACKET_INFO, | 1180 '{': CLOSE_CURLY_BRACKET_INFO, |
| 1184 '<': GT_INFO, | 1181 '<': GT_INFO, |
| 1185 r'${': CLOSE_CURLY_BRACKET_INFO, | 1182 r'${': CLOSE_CURLY_BRACKET_INFO, |
| 1186 }[begin.value]; | 1183 }[begin.value]; |
| 1187 } | 1184 } |
| OLD | NEW |