| OLD | NEW |
| 1 // Copyright (c) 2017, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2017, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 import 'package:front_end/src/fasta/scanner/string_scanner.dart'; | 5 import 'package:front_end/src/fasta/scanner/string_scanner.dart'; |
| 6 import 'package:front_end/src/fasta/scanner/token.dart' as fasta; | 6 import 'package:front_end/src/fasta/scanner/token.dart' as fasta; |
| 7 import 'package:front_end/src/scanner/token.dart'; | 7 import 'package:front_end/src/scanner/token.dart'; |
| 8 import 'package:test/test.dart'; | 8 import 'package:test/test.dart'; |
| 9 import 'package:test_reflective_loader/test_reflective_loader.dart'; | 9 import 'package:test_reflective_loader/test_reflective_loader.dart'; |
| 10 | 10 |
| 11 main() { | 11 main() { |
| 12 defineReflectiveSuite(() { | 12 defineReflectiveSuite(() { |
| 13 defineReflectiveTests(PrecedenceInfoTest); | 13 defineReflectiveTests(PrecedenceInfoTest); |
| 14 }); | 14 }); |
| 15 } | 15 } |
| 16 | 16 |
| 17 /// Assert that fasta PrecedenceInfo implements analyzer TokenType. | 17 /// Assert that fasta PrecedenceInfo implements analyzer TokenType. |
| 18 @reflectiveTest | 18 @reflectiveTest |
| 19 class PrecedenceInfoTest { | 19 class PrecedenceInfoTest { |
| 20 void assertInfo(check(String source, Token token), | 20 void assertInfo(check(String source, Token token)) { |
| 21 {bool includeLazyAssignmentOperators: true}) { | |
| 22 void assertLexeme(String source) { | 21 void assertLexeme(String source) { |
| 23 if (source == null || source.isEmpty) return; | 22 if (source == null || source.isEmpty) return; |
| 24 var scanner = new StringScanner(source, includeComments: true); | 23 var scanner = new StringScanner(source, includeComments: true); |
| 25 var token = scanner.tokenize(); | 24 var token = scanner.tokenize(); |
| 26 check(source, token); | 25 check(source, token); |
| 27 } | 26 } |
| 28 | 27 |
| 29 for (TokenType type in TokenType.all) { | 28 for (TokenType type in TokenType.all) { |
| 30 assertLexeme(type.value); | 29 assertLexeme(type.value); |
| 31 } | 30 } |
| 32 assertLexeme('1.0'); // DOUBLE | 31 assertLexeme('1.0'); // DOUBLE |
| 33 assertLexeme('0xA'); // HEXADECIMAL | 32 assertLexeme('0xA'); // HEXADECIMAL |
| 34 assertLexeme('1'); // INT | 33 assertLexeme('1'); // INT |
| 35 assertLexeme('var'); // KEYWORD | 34 assertLexeme('var'); // KEYWORD |
| 36 assertLexeme('#!/'); // SCRIPT_TAG | 35 assertLexeme('#!/'); // SCRIPT_TAG |
| 37 assertLexeme('"foo"'); // STRING | 36 assertLexeme('"foo"'); // STRING |
| 38 assertLexeme('bar'); // IDENTIFIER | 37 assertLexeme('bar'); // IDENTIFIER |
| 39 if (includeLazyAssignmentOperators) { | 38 assertLexeme('&&='); |
| 40 assertLexeme('&&='); | 39 assertLexeme('||='); |
| 41 assertLexeme('||='); | |
| 42 } | |
| 43 } | 40 } |
| 44 | 41 |
| 45 void test_isOperator() { | 42 void test_isOperator() { |
| 46 var operatorLexemes = new Set<String>.from(const [ | 43 var operatorLexemes = new Set<String>.from(const [ |
| 47 '&', | 44 '&', |
| 48 '&&', | 45 '&&', |
| 49 '&&=', | 46 '&&=', |
| 50 '&=', | 47 '&=', |
| 51 '!', | 48 '!', |
| 52 '!=', | 49 '!=', |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 105 ]; | 102 ]; |
| 106 assertInfo((String source, Token token) { | 103 assertInfo((String source, Token token) { |
| 107 expect(token.type.isAdditiveOperator, additiveLexemes.contains(source), | 104 expect(token.type.isAdditiveOperator, additiveLexemes.contains(source), |
| 108 reason: source); | 105 reason: source); |
| 109 }); | 106 }); |
| 110 } | 107 } |
| 111 | 108 |
| 112 void test_isAssignmentOperator() { | 109 void test_isAssignmentOperator() { |
| 113 const assignmentLexemes = const [ | 110 const assignmentLexemes = const [ |
| 114 '&=', | 111 '&=', |
| 112 '&&=', |
| 115 '|=', | 113 '|=', |
| 114 '||=', |
| 116 '^=', | 115 '^=', |
| 117 '=', | 116 '=', |
| 118 '>>=', | 117 '>>=', |
| 119 '<<=', | 118 '<<=', |
| 120 '-=', | 119 '-=', |
| 121 '%=', | 120 '%=', |
| 122 '+=', | 121 '+=', |
| 123 '??=', | 122 '??=', |
| 124 '/=', | 123 '/=', |
| 125 '*=', | 124 '*=', |
| (...skipping 13 matching lines...) Expand all Loading... |
| 139 '|', | 138 '|', |
| 140 '||', | 139 '||', |
| 141 '^', | 140 '^', |
| 142 '+', | 141 '+', |
| 143 '*', | 142 '*', |
| 144 ]; | 143 ]; |
| 145 assertInfo((String source, Token token) { | 144 assertInfo((String source, Token token) { |
| 146 expect( | 145 expect( |
| 147 token.type.isAssociativeOperator, associativeLexemes.contains(source), | 146 token.type.isAssociativeOperator, associativeLexemes.contains(source), |
| 148 reason: source); | 147 reason: source); |
| 149 }, includeLazyAssignmentOperators: false); | 148 }); |
| 150 } | 149 } |
| 151 | 150 |
| 152 void test_isEqualityOperator() { | 151 void test_isEqualityOperator() { |
| 153 const equalityLexemes = const [ | 152 const equalityLexemes = const [ |
| 154 '!=', | 153 '!=', |
| 155 '==', | 154 '==', |
| 156 ]; | 155 ]; |
| 157 assertInfo((String source, Token token) { | 156 assertInfo((String source, Token token) { |
| 158 expect(token.type.isEqualityOperator, equalityLexemes.contains(source), | 157 expect(token.type.isEqualityOperator, equalityLexemes.contains(source), |
| 159 reason: source); | 158 reason: source); |
| (...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 388 | 387 |
| 389 assertLexeme('1.0', TokenType.DOUBLE); | 388 assertLexeme('1.0', TokenType.DOUBLE); |
| 390 assertLexeme('0xA', TokenType.HEXADECIMAL); | 389 assertLexeme('0xA', TokenType.HEXADECIMAL); |
| 391 assertLexeme('1', TokenType.INT); | 390 assertLexeme('1', TokenType.INT); |
| 392 assertLexeme('var', Keyword.VAR); | 391 assertLexeme('var', Keyword.VAR); |
| 393 assertLexeme('#!/', TokenType.SCRIPT_TAG); | 392 assertLexeme('#!/', TokenType.SCRIPT_TAG); |
| 394 assertLexeme('foo', TokenType.IDENTIFIER); | 393 assertLexeme('foo', TokenType.IDENTIFIER); |
| 395 assertLexeme('"foo"', TokenType.STRING); | 394 assertLexeme('"foo"', TokenType.STRING); |
| 396 } | 395 } |
| 397 } | 396 } |
| OLD | NEW |