Index: pkg/third_party/html5lib/test/tokenizer_test.dart |
diff --git a/pkg/third_party/html5lib/test/tokenizer_test.dart b/pkg/third_party/html5lib/test/tokenizer_test.dart |
index fc98012ef4f4e45c1e199c5c4c3d0b43d08e7c48..e3d08e6d3d15507cbfb44ee643009594d887e254 100644 |
--- a/pkg/third_party/html5lib/test/tokenizer_test.dart |
+++ b/pkg/third_party/html5lib/test/tokenizer_test.dart |
@@ -2,8 +2,8 @@ library tokenizer_test; |
// Note: mirrors used to match the getattr usage in the original test |
import 'dart:async'; |
+import 'dart:convert'; |
import 'dart:io'; |
-import 'dart:json' as json; |
import 'dart:mirrors'; |
import 'dart:utf'; |
import 'package:path/path.dart' as pathos; |
@@ -207,10 +207,10 @@ void runTokenizerTest(Map testInfo) { |
} |
Map unescape(Map testInfo) { |
- // TODO(sigmundch,jmesserly): we currently use json.parse to unescape the |
+ // TODO(sigmundch,jmesserly): we currently use JSON.decode to unescape the |
// unicode characters in the string, we should use a decoding that works with |
// any control characters. |
- decode(inp) => inp == '\u0000' ? inp : json.parse('"$inp"'); |
+ decode(inp) => inp == '\u0000' ? inp : JSON.decode('"$inp"'); |
testInfo["input"] = decode(testInfo["input"]); |
for (var token in testInfo["output"]) { |
@@ -248,7 +248,7 @@ void main() { |
if (!path.endsWith('.test')) continue; |
var text = new File(path).readAsStringSync(); |
- var tests = json.parse(text); |
+ var tests = JSON.decode(text); |
var testName = pathos.basenameWithoutExtension(path); |
var testList = tests['tests']; |
if (testList == null) continue; |