| Index: pkg/front_end/lib/src/scanner/scanner.dart
|
| diff --git a/pkg/front_end/lib/src/scanner/scanner.dart b/pkg/front_end/lib/src/scanner/scanner.dart
|
| index 48021b7a5756b8b4e1e17f16808d2690b8250694..ff67663ca9d33b8f4351bf168f92db8c67be3389 100644
|
| --- a/pkg/front_end/lib/src/scanner/scanner.dart
|
| +++ b/pkg/front_end/lib/src/scanner/scanner.dart
|
| @@ -15,7 +15,7 @@ class KeywordState {
|
| /**
|
| * An empty transition table used by leaf states.
|
| */
|
| - static List<KeywordState> _EMPTY_TABLE = new List<KeywordState>(26);
|
| + static List<KeywordState> _EMPTY_TABLE = new List<KeywordState>($z - $A + 1);
|
|
|
| /**
|
| * The initial state in the state machine.
|
| @@ -54,7 +54,7 @@ class KeywordState {
|
| * [character], or `null` if there is no valid state reachable from this state
|
| * with such a transition.
|
| */
|
| - KeywordState next(int character) => _table[character - $a];
|
| + KeywordState next(int character) => _table[character - $A];
|
|
|
| /**
|
| * Create the next state in the state machine where we have already recognized
|
| @@ -64,7 +64,7 @@ class KeywordState {
|
| */
|
| static KeywordState _computeKeywordStateTable(
|
| int start, List<String> strings, int offset, int length) {
|
| - List<KeywordState> result = new List<KeywordState>(26);
|
| + List<KeywordState> result = new List<KeywordState>($z - $A + 1);
|
| assert(length != 0);
|
| int chunk = $nul;
|
| int chunkStart = -1;
|
| @@ -77,7 +77,7 @@ class KeywordState {
|
| int c = strings[i].codeUnitAt(start);
|
| if (chunk != c) {
|
| if (chunkStart != -1) {
|
| - result[chunk - $a] = _computeKeywordStateTable(
|
| + result[chunk - $A] = _computeKeywordStateTable(
|
| start + 1, strings, chunkStart, i - chunkStart);
|
| }
|
| chunkStart = i;
|
| @@ -86,8 +86,8 @@ class KeywordState {
|
| }
|
| }
|
| if (chunkStart != -1) {
|
| - assert(result[chunk - $a] == null);
|
| - result[chunk - $a] = _computeKeywordStateTable(
|
| + assert(result[chunk - $A] == null);
|
| + result[chunk - $A] = _computeKeywordStateTable(
|
| start + 1, strings, chunkStart, offset + length - chunkStart);
|
| } else {
|
| assert(length == 1);
|
| @@ -283,12 +283,12 @@ abstract class Scanner {
|
| return _tokenizeString(_reader.advance(), start, true);
|
| }
|
| }
|
| - if ($a <= next && next <= $z) {
|
| - // 'a'-'z'
|
| + if (($A <= next && next <= $Z) || ($a <= next && next <= $z)) {
|
| + // 'A'-'Z' || 'a'-'z'
|
| return _tokenizeKeywordOrIdentifier(next, true);
|
| }
|
| - if (($A <= next && next <= $Z) || next == $_ || next == $$) {
|
| - // 'A'-'Z' || '_' || '$'
|
| + if (next == $_ || next == $$) {
|
| + // '_' || '$'
|
| return _tokenizeIdentifier(next, _reader.offset, true);
|
| }
|
| if (next == $lt) {
|
| @@ -941,17 +941,15 @@ abstract class Scanner {
|
| int _tokenizeKeywordOrIdentifier(int next, bool allowDollar) {
|
| KeywordState state = KeywordState.KEYWORD_STATE;
|
| int start = _reader.offset;
|
| - while (state != null && $a <= next && next <= $z) {
|
| + while (state != null &&
|
| + (($A <= next && next <= $Z) || $a <= next && next <= $z)) {
|
| state = state.next(next);
|
| next = _reader.advance();
|
| }
|
| if (state == null || state.keyword() == null) {
|
| return _tokenizeIdentifier(next, start, allowDollar);
|
| }
|
| - if (($A <= next && next <= $Z) ||
|
| - ($0 <= next && next <= $9) ||
|
| - next == $_ ||
|
| - next == $$) {
|
| + if (($0 <= next && next <= $9) || next == $_ || next == $$) {
|
| return _tokenizeIdentifier(next, start, allowDollar);
|
| } else if (next < 128) {
|
| _appendKeywordToken(state.keyword());
|
|
|