Index: pkg/analyzer/test/generated/all_the_rest_test.dart |
diff --git a/pkg/analyzer/test/generated/all_the_rest_test.dart b/pkg/analyzer/test/generated/all_the_rest_test.dart |
index 70ec391108b333bbda927eafd7ddce9bc20db739..7e205ea526b22e6e35a16faa12d5f155a0917259 100644 |
--- a/pkg/analyzer/test/generated/all_the_rest_test.dart |
+++ b/pkg/analyzer/test/generated/all_the_rest_test.dart |
@@ -10,9 +10,7 @@ import 'package:analyzer/src/generated/constant.dart'; |
import 'package:analyzer/src/generated/element.dart'; |
import 'package:analyzer/src/generated/engine.dart'; |
import 'package:analyzer/src/generated/error.dart'; |
-import 'package:analyzer/src/generated/html.dart' as ht; |
import 'package:analyzer/src/generated/java_core.dart'; |
-import 'package:analyzer/src/generated/java_engine.dart'; |
import 'package:analyzer/src/generated/java_engine_io.dart'; |
import 'package:analyzer/src/generated/java_io.dart'; |
import 'package:analyzer/src/generated/resolver.dart'; |
@@ -23,7 +21,6 @@ import 'package:analyzer/src/generated/source.dart'; |
import 'package:analyzer/src/generated/source_io.dart'; |
import 'package:analyzer/src/generated/testing/ast_factory.dart'; |
import 'package:analyzer/src/generated/testing/element_factory.dart'; |
-import 'package:analyzer/src/generated/testing/html_factory.dart'; |
import 'package:analyzer/src/generated/testing/test_type_provider.dart'; |
import 'package:analyzer/src/generated/testing/token_factory.dart'; |
import 'package:analyzer/src/generated/utilities_collection.dart'; |
@@ -64,380 +61,7 @@ main() { |
runReflectiveTests(FileUriResolverTest); |
runReflectiveTests(ReferenceFinderTest); |
runReflectiveTests(SDKLibrariesReaderTest); |
- runReflectiveTests(ToSourceVisitorTest); |
runReflectiveTests(UriKindTest); |
- runReflectiveTests(StringScannerTest); |
-} |
- |
-abstract class AbstractScannerTest { |
- ht.AbstractScanner newScanner(String input); |
- |
- void test_tokenize_attribute() { |
- _tokenize("<html bob=\"one two\">", <Object>[ |
- ht.TokenType.LT, |
- "html", |
- "bob", |
- ht.TokenType.EQ, |
- "\"one two\"", |
- ht.TokenType.GT |
- ]); |
- } |
- |
- void test_tokenize_comment() { |
- _tokenize("<!-- foo -->", <Object>["<!-- foo -->"]); |
- } |
- |
- void test_tokenize_comment_incomplete() { |
- _tokenize("<!-- foo", <Object>["<!-- foo"]); |
- } |
- |
- void test_tokenize_comment_with_gt() { |
- _tokenize("<!-- foo > -> -->", <Object>["<!-- foo > -> -->"]); |
- } |
- |
- void test_tokenize_declaration() { |
- _tokenize("<! foo ><html>", |
- <Object>["<! foo >", ht.TokenType.LT, "html", ht.TokenType.GT]); |
- } |
- |
- void test_tokenize_declaration_malformed() { |
- _tokenize("<! foo /><html>", |
- <Object>["<! foo />", ht.TokenType.LT, "html", ht.TokenType.GT]); |
- } |
- |
- void test_tokenize_directive_incomplete() { |
- _tokenize2("<? \nfoo", <Object>["<? \nfoo"], <int>[0, 4]); |
- } |
- |
- void test_tokenize_directive_xml() { |
- _tokenize("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>", |
- <Object>["<?xml version=\"1.0\" encoding=\"UTF-8\" ?>"]); |
- } |
- |
- void test_tokenize_directives_incomplete_with_newline() { |
- _tokenize2("<! \nfoo", <Object>["<! \nfoo"], <int>[0, 4]); |
- } |
- |
- void test_tokenize_empty() { |
- _tokenize("", <Object>[]); |
- } |
- |
- void test_tokenize_lt() { |
- _tokenize("<", <Object>[ht.TokenType.LT]); |
- } |
- |
- void test_tokenize_script_embedded_tags() { |
- _tokenize("<script> <p></p></script>", <Object>[ |
- ht.TokenType.LT, |
- "script", |
- ht.TokenType.GT, |
- " <p></p>", |
- ht.TokenType.LT_SLASH, |
- "script", |
- ht.TokenType.GT |
- ]); |
- } |
- |
- void test_tokenize_script_embedded_tags2() { |
- _tokenize("<script> <p></p><</script>", <Object>[ |
- ht.TokenType.LT, |
- "script", |
- ht.TokenType.GT, |
- " <p></p><", |
- ht.TokenType.LT_SLASH, |
- "script", |
- ht.TokenType.GT |
- ]); |
- } |
- |
- void test_tokenize_script_embedded_tags3() { |
- _tokenize("<script> <p></p></</script>", <Object>[ |
- ht.TokenType.LT, |
- "script", |
- ht.TokenType.GT, |
- " <p></p></", |
- ht.TokenType.LT_SLASH, |
- "script", |
- ht.TokenType.GT |
- ]); |
- } |
- |
- void test_tokenize_script_partial() { |
- _tokenize("<script> <p> ", |
- <Object>[ht.TokenType.LT, "script", ht.TokenType.GT, " <p> "]); |
- } |
- |
- void test_tokenize_script_partial2() { |
- _tokenize("<script> <p> <", |
- <Object>[ht.TokenType.LT, "script", ht.TokenType.GT, " <p> <"]); |
- } |
- |
- void test_tokenize_script_partial3() { |
- _tokenize("<script> <p> </", |
- <Object>[ht.TokenType.LT, "script", ht.TokenType.GT, " <p> </"]); |
- } |
- |
- void test_tokenize_script_ref() { |
- _tokenize("<script source='some.dart'/> <p>", <Object>[ |
- ht.TokenType.LT, |
- "script", |
- "source", |
- ht.TokenType.EQ, |
- "'some.dart'", |
- ht.TokenType.SLASH_GT, |
- " ", |
- ht.TokenType.LT, |
- "p", |
- ht.TokenType.GT |
- ]); |
- } |
- |
- void test_tokenize_script_with_newline() { |
- _tokenize2("<script> <p>\n </script>", <Object>[ |
- ht.TokenType.LT, |
- "script", |
- ht.TokenType.GT, |
- " <p>\n ", |
- ht.TokenType.LT_SLASH, |
- "script", |
- ht.TokenType.GT |
- ], <int>[ |
- 0, |
- 13 |
- ]); |
- } |
- |
- void test_tokenize_spaces_and_newlines() { |
- ht.Token token = _tokenize2( |
- " < html \n bob = 'joe\n' >\n <\np > one \r\n two <!-- \rfoo --> </ p > </ html > ", |
- <Object>[ |
- " ", |
- ht.TokenType.LT, |
- "html", |
- "bob", |
- ht.TokenType.EQ, |
- "'joe\n'", |
- ht.TokenType.GT, |
- "\n ", |
- ht.TokenType.LT, |
- "p", |
- ht.TokenType.GT, |
- " one \r\n two ", |
- "<!-- \rfoo -->", |
- " ", |
- ht.TokenType.LT_SLASH, |
- "p", |
- ht.TokenType.GT, |
- " ", |
- ht.TokenType.LT_SLASH, |
- "html", |
- ht.TokenType.GT, |
- " " |
- ], |
- <int>[ |
- 0, |
- 9, |
- 21, |
- 25, |
- 28, |
- 38, |
- 49 |
- ]); |
- token = token.next; |
- expect(token.offset, 1); |
- token = token.next; |
- expect(token.offset, 3); |
- token = token.next; |
- expect(token.offset, 10); |
- } |
- |
- void test_tokenize_string() { |
- _tokenize("<p bob=\"foo\">", <Object>[ |
- ht.TokenType.LT, |
- "p", |
- "bob", |
- ht.TokenType.EQ, |
- "\"foo\"", |
- ht.TokenType.GT |
- ]); |
- } |
- |
- void test_tokenize_string_partial() { |
- _tokenize("<p bob=\"foo", |
- <Object>[ht.TokenType.LT, "p", "bob", ht.TokenType.EQ, "\"foo"]); |
- } |
- |
- void test_tokenize_string_single_quote() { |
- _tokenize("<p bob='foo'>", <Object>[ |
- ht.TokenType.LT, |
- "p", |
- "bob", |
- ht.TokenType.EQ, |
- "'foo'", |
- ht.TokenType.GT |
- ]); |
- } |
- |
- void test_tokenize_string_single_quote_partial() { |
- _tokenize("<p bob='foo", |
- <Object>[ht.TokenType.LT, "p", "bob", ht.TokenType.EQ, "'foo"]); |
- } |
- |
- void test_tokenize_tag_begin_end() { |
- _tokenize("<html></html>", <Object>[ |
- ht.TokenType.LT, |
- "html", |
- ht.TokenType.GT, |
- ht.TokenType.LT_SLASH, |
- "html", |
- ht.TokenType.GT |
- ]); |
- } |
- |
- void test_tokenize_tag_begin_only() { |
- ht.Token token = |
- _tokenize("<html>", <Object>[ht.TokenType.LT, "html", ht.TokenType.GT]); |
- token = token.next; |
- expect(token.offset, 1); |
- } |
- |
- void test_tokenize_tag_incomplete_with_special_characters() { |
- _tokenize("<br-a_b", <Object>[ht.TokenType.LT, "br-a_b"]); |
- } |
- |
- void test_tokenize_tag_self_contained() { |
- _tokenize("<br/>", <Object>[ht.TokenType.LT, "br", ht.TokenType.SLASH_GT]); |
- } |
- |
- void test_tokenize_tags_wellformed() { |
- _tokenize("<html><p>one two</p></html>", <Object>[ |
- ht.TokenType.LT, |
- "html", |
- ht.TokenType.GT, |
- ht.TokenType.LT, |
- "p", |
- ht.TokenType.GT, |
- "one two", |
- ht.TokenType.LT_SLASH, |
- "p", |
- ht.TokenType.GT, |
- ht.TokenType.LT_SLASH, |
- "html", |
- ht.TokenType.GT |
- ]); |
- } |
- |
- /** |
- * Given an object representing an expected token, answer the expected token type. |
- * |
- * @param count the token count for error reporting |
- * @param expected the object representing an expected token |
- * @return the expected token type |
- */ |
- ht.TokenType _getExpectedTokenType(int count, Object expected) { |
- if (expected is ht.TokenType) { |
- return expected; |
- } |
- if (expected is String) { |
- String lexeme = expected; |
- if (lexeme.startsWith("\"") || lexeme.startsWith("'")) { |
- return ht.TokenType.STRING; |
- } |
- if (lexeme.startsWith("<!--")) { |
- return ht.TokenType.COMMENT; |
- } |
- if (lexeme.startsWith("<!")) { |
- return ht.TokenType.DECLARATION; |
- } |
- if (lexeme.startsWith("<?")) { |
- return ht.TokenType.DIRECTIVE; |
- } |
- if (_isTag(lexeme)) { |
- return ht.TokenType.TAG; |
- } |
- return ht.TokenType.TEXT; |
- } |
- fail( |
- "Unknown expected token $count: ${expected != null ? expected.runtimeType : "null"}"); |
- return null; |
- } |
- |
- bool _isTag(String lexeme) { |
- if (lexeme.length == 0 || !Character.isLetter(lexeme.codeUnitAt(0))) { |
- return false; |
- } |
- for (int index = 1; index < lexeme.length; index++) { |
- int ch = lexeme.codeUnitAt(index); |
- if (!Character.isLetterOrDigit(ch) && ch != 0x2D && ch != 0x5F) { |
- return false; |
- } |
- } |
- return true; |
- } |
- |
- ht.Token _tokenize(String input, List<Object> expectedTokens) => |
- _tokenize2(input, expectedTokens, <int>[0]); |
- ht.Token _tokenize2( |
- String input, List<Object> expectedTokens, List<int> expectedLineStarts) { |
- ht.AbstractScanner scanner = newScanner(input); |
- scanner.passThroughElements = <String>["script"]; |
- int count = 0; |
- ht.Token firstToken = scanner.tokenize(); |
- ht.Token token = firstToken; |
- ht.Token previousToken = token.previous; |
- expect(previousToken.type == ht.TokenType.EOF, isTrue); |
- expect(previousToken.previous, same(previousToken)); |
- expect(previousToken.offset, -1); |
- expect(previousToken.next, same(token)); |
- expect(token.offset, 0); |
- while (token.type != ht.TokenType.EOF) { |
- if (count == expectedTokens.length) { |
- fail("too many parsed tokens"); |
- } |
- Object expected = expectedTokens[count]; |
- ht.TokenType expectedTokenType = _getExpectedTokenType(count, expected); |
- expect(token.type, same(expectedTokenType), reason: "token $count"); |
- if (expectedTokenType.lexeme != null) { |
- expect(token.lexeme, expectedTokenType.lexeme, reason: "token $count"); |
- } else { |
- expect(token.lexeme, expected, reason: "token $count"); |
- } |
- count++; |
- previousToken = token; |
- token = token.next; |
- expect(token.previous, same(previousToken)); |
- } |
- expect(token.next, same(token)); |
- expect(token.offset, input.length); |
- if (count != expectedTokens.length) { |
- expect(false, isTrue, reason: "not enough parsed tokens"); |
- } |
- List<int> lineStarts = scanner.lineStarts; |
- bool success = expectedLineStarts.length == lineStarts.length; |
- if (success) { |
- for (int i = 0; i < lineStarts.length; i++) { |
- if (expectedLineStarts[i] != lineStarts[i]) { |
- success = false; |
- break; |
- } |
- } |
- } |
- if (!success) { |
- StringBuffer buffer = new StringBuffer(); |
- buffer.write("Expected line starts "); |
- for (int start in expectedLineStarts) { |
- buffer.write(start); |
- buffer.write(", "); |
- } |
- buffer.write(" but found "); |
- for (int start in lineStarts) { |
- buffer.write(start); |
- buffer.write(", "); |
- } |
- fail(buffer.toString()); |
- } |
- return firstToken; |
- } |
} |
/** |
@@ -8587,14 +8211,6 @@ final Map<String, LibraryInfo> LIBRARIES = const <String, LibraryInfo> { |
} |
} |
-@reflectiveTest |
-class StringScannerTest extends AbstractScannerTest { |
- @override |
- ht.AbstractScanner newScanner(String input) { |
- return new ht.StringScanner(null, input); |
- } |
-} |
- |
class TestAnalysisContext_ConstantFinderTest extends TestAnalysisContext { |
bool invoked = false; |
TestAnalysisContext_ConstantFinderTest(); |
@@ -8605,61 +8221,6 @@ class TestAnalysisContext_ConstantFinderTest extends TestAnalysisContext { |
} |
} |
-/** |
- * Instances of the class `ToSourceVisitorTest` |
- */ |
-@reflectiveTest |
-class ToSourceVisitorTest extends EngineTestCase { |
- void fail_visitHtmlScriptTagNode_attributes_content() { |
- _assertSource( |
- "<script type='application/dart'>f() {}</script>", |
- HtmlFactory.scriptTagWithContent( |
- "f() {}", [HtmlFactory.attribute("type", "'application/dart'")])); |
- } |
- |
- void fail_visitHtmlScriptTagNode_noAttributes_content() { |
- _assertSource( |
- "<script>f() {}</script>", HtmlFactory.scriptTagWithContent("f() {}")); |
- } |
- |
- void test_visitHtmlScriptTagNode_attributes_noContent() { |
- _assertSource( |
- "<script type='application/dart'/>", |
- HtmlFactory |
- .scriptTag([HtmlFactory.attribute("type", "'application/dart'")])); |
- } |
- |
- void test_visitHtmlScriptTagNode_noAttributes_noContent() { |
- _assertSource("<script/>", HtmlFactory.scriptTag()); |
- } |
- |
- void test_visitHtmlUnit_empty() { |
- _assertSource("", new ht.HtmlUnit(null, new List<ht.XmlTagNode>(), null)); |
- } |
- |
- void test_visitHtmlUnit_nonEmpty() { |
- _assertSource( |
- "<html/>", new ht.HtmlUnit(null, [HtmlFactory.tagNode("html")], null)); |
- } |
- |
- void test_visitXmlAttributeNode() { |
- _assertSource("x=y", HtmlFactory.attribute("x", "y")); |
- } |
- |
- /** |
- * Assert that a `ToSourceVisitor` will produce the expected source when visiting the given |
- * node. |
- * |
- * @param expectedSource the source string that the visitor is expected to produce |
- * @param node the AST node being visited to produce the actual source |
- */ |
- void _assertSource(String expectedSource, ht.XmlNode node) { |
- PrintStringWriter writer = new PrintStringWriter(); |
- node.accept(new ht.ToSourceVisitor(writer)); |
- expect(writer.toString(), expectedSource); |
- } |
-} |
- |
@reflectiveTest |
class UriKindTest { |
void test_fromEncoding() { |
@@ -8675,237 +8236,3 @@ class UriKindTest { |
expect(UriKind.PACKAGE_URI.encoding, 0x70); |
} |
} |
- |
-/** |
- * Instances of `XmlValidator` traverse an [XmlNode] structure and validate the node |
- * hierarchy. |
- */ |
-class XmlValidator extends ht.RecursiveXmlVisitor<Object> { |
- /** |
- * A list containing the errors found while traversing the AST structure. |
- */ |
- List<String> _errors = new List<String>(); |
- /** |
- * The tags to expect when visiting or `null` if tags should not be checked. |
- */ |
- List<XmlValidator_Tag> _expectedTagsInOrderVisited; |
- /** |
- * The current index into the [expectedTagsInOrderVisited] array. |
- */ |
- int _expectedTagsIndex = 0; |
- /** |
- * The key/value pairs to expect when visiting or `null` if attributes should not be |
- * checked. |
- */ |
- List<String> _expectedAttributeKeyValuePairs; |
- /** |
- * The current index into the [expectedAttributeKeyValuePairs]. |
- */ |
- int _expectedAttributeIndex = 0; |
- /** |
- * Assert that no errors were found while traversing any of the AST structures that have been |
- * visited. |
- */ |
- void assertValid() { |
- while (_expectedTagsIndex < _expectedTagsInOrderVisited.length) { |
- String expectedTag = |
- _expectedTagsInOrderVisited[_expectedTagsIndex++]._tag; |
- _errors.add("Expected to visit node with tag: $expectedTag"); |
- } |
- if (!_errors.isEmpty) { |
- StringBuffer buffer = new StringBuffer(); |
- buffer.write("Invalid XML structure:"); |
- for (String message in _errors) { |
- buffer.writeln(); |
- buffer.write(" "); |
- buffer.write(message); |
- } |
- fail(buffer.toString()); |
- } |
- } |
- |
- /** |
- * Set the tags to be expected when visiting |
- * |
- * @param expectedTags the expected tags |
- */ |
- void expectTags(List<XmlValidator_Tag> expectedTags) { |
- // Flatten the hierarchy into expected order in which the tags are visited |
- List<XmlValidator_Tag> expected = new List<XmlValidator_Tag>(); |
- _expectTags(expected, expectedTags); |
- this._expectedTagsInOrderVisited = expected; |
- } |
- |
- @override |
- Object visitHtmlUnit(ht.HtmlUnit node) { |
- if (node.parent != null) { |
- _errors.add("HtmlUnit should not have a parent"); |
- } |
- if (node.endToken.type != ht.TokenType.EOF) { |
- _errors.add("HtmlUnit end token should be of type EOF"); |
- } |
- _validateNode(node); |
- return super.visitHtmlUnit(node); |
- } |
- |
- @override |
- Object visitXmlAttributeNode(ht.XmlAttributeNode actual) { |
- if (actual.parent is! ht.XmlTagNode) { |
- _errors.add( |
- "Expected ${actual.runtimeType} to have parent of type XmlTagNode"); |
- } |
- String actualName = actual.name; |
- String actualValue = actual.valueToken.lexeme; |
- if (_expectedAttributeIndex < _expectedAttributeKeyValuePairs.length) { |
- String expectedName = |
- _expectedAttributeKeyValuePairs[_expectedAttributeIndex]; |
- if (expectedName != actualName) { |
- _errors.add( |
- "Expected ${_expectedTagsIndex - 1} tag: ${_expectedTagsInOrderVisited[_expectedTagsIndex - 1]._tag} attribute ${_expectedAttributeIndex ~/ 2} to have name: $expectedName but found: $actualName"); |
- } |
- String expectedValue = |
- _expectedAttributeKeyValuePairs[_expectedAttributeIndex + 1]; |
- if (expectedValue != actualValue) { |
- _errors.add( |
- "Expected ${_expectedTagsIndex - 1} tag: ${_expectedTagsInOrderVisited[_expectedTagsIndex - 1]._tag} attribute ${_expectedAttributeIndex ~/ 2} to have value: $expectedValue but found: $actualValue"); |
- } |
- } else { |
- _errors.add( |
- "Unexpected ${_expectedTagsIndex - 1} tag: ${_expectedTagsInOrderVisited[_expectedTagsIndex - 1]._tag} attribute ${_expectedAttributeIndex ~/ 2} name: $actualName value: $actualValue"); |
- } |
- _expectedAttributeIndex += 2; |
- _validateNode(actual); |
- return super.visitXmlAttributeNode(actual); |
- } |
- |
- @override |
- Object visitXmlTagNode(ht.XmlTagNode actual) { |
- if (!(actual.parent is ht.HtmlUnit || actual.parent is ht.XmlTagNode)) { |
- _errors.add( |
- "Expected ${actual.runtimeType} to have parent of type HtmlUnit or XmlTagNode"); |
- } |
- if (_expectedTagsInOrderVisited != null) { |
- String actualTag = actual.tag; |
- if (_expectedTagsIndex < _expectedTagsInOrderVisited.length) { |
- XmlValidator_Tag expected = |
- _expectedTagsInOrderVisited[_expectedTagsIndex]; |
- if (expected._tag != actualTag) { |
- _errors.add( |
- "Expected $_expectedTagsIndex tag: ${expected._tag} but found: $actualTag"); |
- } |
- _expectedAttributeKeyValuePairs = expected._attributes._keyValuePairs; |
- int expectedAttributeCount = |
- _expectedAttributeKeyValuePairs.length ~/ 2; |
- int actualAttributeCount = actual.attributes.length; |
- if (expectedAttributeCount != actualAttributeCount) { |
- _errors.add( |
- "Expected $_expectedTagsIndex tag: ${expected._tag} to have $expectedAttributeCount attributes but found $actualAttributeCount"); |
- } |
- _expectedAttributeIndex = 0; |
- _expectedTagsIndex++; |
- expect(actual.attributeEnd, isNotNull); |
- expect(actual.contentEnd, isNotNull); |
- int count = 0; |
- ht.Token token = actual.attributeEnd.next; |
- ht.Token lastToken = actual.contentEnd; |
- while (!identical(token, lastToken)) { |
- token = token.next; |
- if (++count > 1000) { |
- fail( |
- "Expected $_expectedTagsIndex tag: ${expected._tag} to have a sequence of tokens from getAttributeEnd() to getContentEnd()"); |
- break; |
- } |
- } |
- if (actual.attributeEnd.type == ht.TokenType.GT) { |
- if (ht.HtmlParser.SELF_CLOSING.contains(actual.tag)) { |
- expect(actual.closingTag, isNull); |
- } else { |
- expect(actual.closingTag, isNotNull); |
- } |
- } else if (actual.attributeEnd.type == ht.TokenType.SLASH_GT) { |
- expect(actual.closingTag, isNull); |
- } else { |
- fail("Unexpected attribute end token: ${actual.attributeEnd.lexeme}"); |
- } |
- if (expected._content != null && expected._content != actual.content) { |
- _errors.add( |
- "Expected $_expectedTagsIndex tag: ${expected._tag} to have content '${expected._content}' but found '${actual.content}'"); |
- } |
- if (expected._children.length != actual.tagNodes.length) { |
- _errors.add( |
- "Expected $_expectedTagsIndex tag: ${expected._tag} to have ${expected._children.length} children but found ${actual.tagNodes.length}"); |
- } else { |
- for (int index = 0; index < expected._children.length; index++) { |
- String expectedChildTag = expected._children[index]._tag; |
- String actualChildTag = actual.tagNodes[index].tag; |
- if (expectedChildTag != actualChildTag) { |
- _errors.add( |
- "Expected $_expectedTagsIndex tag: ${expected._tag} child $index to have tag: $expectedChildTag but found: $actualChildTag"); |
- } |
- } |
- } |
- } else { |
- _errors.add("Visited unexpected tag: $actualTag"); |
- } |
- } |
- _validateNode(actual); |
- return super.visitXmlTagNode(actual); |
- } |
- |
- /** |
- * Append the specified tags to the array in depth first order |
- * |
- * @param expected the array to which the tags are added (not `null`) |
- * @param expectedTags the expected tags to be added (not `null`, contains no `null`s) |
- */ |
- void _expectTags( |
- List<XmlValidator_Tag> expected, List<XmlValidator_Tag> expectedTags) { |
- for (XmlValidator_Tag tag in expectedTags) { |
- expected.add(tag); |
- _expectTags(expected, tag._children); |
- } |
- } |
- |
- void _validateNode(ht.XmlNode node) { |
- if (node.beginToken == null) { |
- _errors.add("No begin token for ${node.runtimeType}"); |
- } |
- if (node.endToken == null) { |
- _errors.add("No end token for ${node.runtimeType}"); |
- } |
- int nodeStart = node.offset; |
- int nodeLength = node.length; |
- if (nodeStart < 0 || nodeLength < 0) { |
- _errors.add("No source info for ${node.runtimeType}"); |
- } |
- ht.XmlNode parent = node.parent; |
- if (parent != null) { |
- int nodeEnd = nodeStart + nodeLength; |
- int parentStart = parent.offset; |
- int parentEnd = parentStart + parent.length; |
- if (nodeStart < parentStart) { |
- _errors.add( |
- "Invalid source start ($nodeStart) for ${node.runtimeType} inside ${parent.runtimeType} ($parentStart)"); |
- } |
- if (nodeEnd > parentEnd) { |
- _errors.add( |
- "Invalid source end ($nodeEnd) for ${node.runtimeType} inside ${parent.runtimeType} ($parentStart)"); |
- } |
- } |
- } |
-} |
- |
-class XmlValidator_Attributes { |
- final List<String> _keyValuePairs; |
- XmlValidator_Attributes([this._keyValuePairs = StringUtilities.EMPTY_ARRAY]); |
-} |
- |
-class XmlValidator_Tag { |
- static const List<XmlValidator_Tag> EMPTY_LIST = const <XmlValidator_Tag>[]; |
- final String _tag; |
- final XmlValidator_Attributes _attributes; |
- final String _content; |
- final List<XmlValidator_Tag> _children; |
- XmlValidator_Tag(this._tag, this._attributes, this._content, |
- [this._children = EMPTY_LIST]); |
-} |