| OLD | NEW |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 library yaml.parser; | 5 library yaml.parser; |
| 6 | 6 |
| 7 import 'dart:collection'; | |
| 8 | |
| 9 import 'package:source_span/source_span.dart'; | 7 import 'package:source_span/source_span.dart'; |
| 10 import 'package:string_scanner/string_scanner.dart'; | 8 import 'package:string_scanner/string_scanner.dart'; |
| 11 | 9 |
| 12 import 'equality.dart'; | 10 import 'event.dart'; |
| 13 import 'model.dart'; | 11 import 'scanner.dart'; |
| 12 import 'style.dart'; |
| 13 import 'token.dart'; |
| 14 import 'utils.dart'; | 14 import 'utils.dart'; |
| 15 | 15 import 'yaml_document.dart'; |
| 16 /// Translates a string of characters into a YAML serialization tree. | 16 import 'yaml_exception.dart'; |
| 17 |
| 18 /// A parser that reads [Token]s emitted by a [Scanner] and emits [Event]s. |
| 17 /// | 19 /// |
| 18 /// This parser is designed to closely follow the spec. All productions in the | 20 /// This is based on the libyaml parser, available at |
| 19 /// spec are numbered, and the corresponding methods in the parser have the same | 21 /// https://github.com/yaml/libyaml/blob/master/src/parser.c. The license for |
| 20 /// numbers. This is certainly not the most efficient way of parsing YAML, but | 22 /// that is available in ../../libyaml-license.txt. |
| 21 /// it is the easiest to write and read in the context of the spec. | |
| 22 /// | |
| 23 /// Methods corresponding to productions are also named as in the spec, | |
| 24 /// translating the name of the method (although not the annotation characters) | |
| 25 /// into camel-case for dart style.. For example, the spec has a production | |
| 26 /// named `nb-ns-plain-in-line`, and the method implementing it is named | |
| 27 /// `nb_ns_plainInLine`. The exception to that rule is methods that just | |
| 28 /// recognize character classes; these are named `is*`. | |
| 29 class Parser { | 23 class Parser { |
| 30 static const TAB = 0x9; | 24 /// The underlying [Scanner] that generates [Token]s. |
| 31 static const LF = 0xA; | 25 final Scanner _scanner; |
| 32 static const CR = 0xD; | 26 |
| 33 static const SP = 0x20; | 27 /// The stack of parse states for nested contexts. |
| 34 static const TILDE = 0x7E; | 28 final _states = new List<_State>(); |
| 35 static const NEL = 0x85; | 29 |
| 36 static const PLUS = 0x2B; | 30 /// The current parse state. |
| 37 static const HYPHEN = 0x2D; | 31 var _state = _State.STREAM_START; |
| 38 static const QUESTION_MARK = 0x3F; | 32 |
| 39 static const COLON = 0x3A; | 33 /// The custom tag directives, by tag handle. |
| 40 static const COMMA = 0x2C; | 34 final _tagDirectives = new Map<String, TagDirective>(); |
| 41 static const LEFT_BRACKET = 0x5B; | 35 |
| 42 static const RIGHT_BRACKET = 0x5D; | 36 /// Whether the parser has finished parsing. |
| 43 static const LEFT_BRACE = 0x7B; | 37 bool get isDone => _state == _State.END; |
| 44 static const RIGHT_BRACE = 0x7D; | 38 |
| 45 static const HASH = 0x23; | 39 /// Creates a parser that parses [source]. |
| 46 static const AMPERSAND = 0x26; | 40 /// |
| 47 static const ASTERISK = 0x2A; | 41 /// [sourceUrl] can be a String or a [Uri]. |
| 48 static const EXCLAMATION = 0x21; | 42 Parser(String source, {sourceUrl}) |
| 49 static const VERTICAL_BAR = 0x7C; | 43 : _scanner = new Scanner(source, sourceUrl: sourceUrl); |
| 50 static const GREATER_THAN = 0x3E; | 44 |
| 51 static const SINGLE_QUOTE = 0x27; | 45 /// Consumes and returns the next event. |
| 52 static const DOUBLE_QUOTE = 0x22; | 46 Event parse() { |
| 53 static const PERCENT = 0x25; | |
| 54 static const AT = 0x40; | |
| 55 static const GRAVE_ACCENT = 0x60; | |
| 56 | |
| 57 static const NULL = 0x0; | |
| 58 static const BELL = 0x7; | |
| 59 static const BACKSPACE = 0x8; | |
| 60 static const VERTICAL_TAB = 0xB; | |
| 61 static const FORM_FEED = 0xC; | |
| 62 static const ESCAPE = 0x1B; | |
| 63 static const SLASH = 0x2F; | |
| 64 static const BACKSLASH = 0x5C; | |
| 65 static const UNDERSCORE = 0x5F; | |
| 66 static const NBSP = 0xA0; | |
| 67 static const LINE_SEPARATOR = 0x2028; | |
| 68 static const PARAGRAPH_SEPARATOR = 0x2029; | |
| 69 | |
| 70 static const NUMBER_0 = 0x30; | |
| 71 static const NUMBER_9 = 0x39; | |
| 72 | |
| 73 static const LETTER_A = 0x61; | |
| 74 static const LETTER_B = 0x62; | |
| 75 static const LETTER_E = 0x65; | |
| 76 static const LETTER_F = 0x66; | |
| 77 static const LETTER_N = 0x6E; | |
| 78 static const LETTER_R = 0x72; | |
| 79 static const LETTER_T = 0x74; | |
| 80 static const LETTER_U = 0x75; | |
| 81 static const LETTER_V = 0x76; | |
| 82 static const LETTER_X = 0x78; | |
| 83 | |
| 84 static const LETTER_CAP_A = 0x41; | |
| 85 static const LETTER_CAP_F = 0x46; | |
| 86 static const LETTER_CAP_L = 0x4C; | |
| 87 static const LETTER_CAP_N = 0x4E; | |
| 88 static const LETTER_CAP_P = 0x50; | |
| 89 static const LETTER_CAP_U = 0x55; | |
| 90 static const LETTER_CAP_X = 0x58; | |
| 91 | |
| 92 static const C_SEQUENCE_ENTRY = 4; | |
| 93 static const C_MAPPING_KEY = 5; | |
| 94 static const C_MAPPING_VALUE = 6; | |
| 95 static const C_COLLECT_ENTRY = 7; | |
| 96 static const C_SEQUENCE_START = 8; | |
| 97 static const C_SEQUENCE_END = 9; | |
| 98 static const C_MAPPING_START = 10; | |
| 99 static const C_MAPPING_END = 11; | |
| 100 static const C_COMMENT = 12; | |
| 101 static const C_ANCHOR = 13; | |
| 102 static const C_ALIAS = 14; | |
| 103 static const C_TAG = 15; | |
| 104 static const C_LITERAL = 16; | |
| 105 static const C_FOLDED = 17; | |
| 106 static const C_SINGLE_QUOTE = 18; | |
| 107 static const C_DOUBLE_QUOTE = 19; | |
| 108 static const C_DIRECTIVE = 20; | |
| 109 static const C_RESERVED = 21; | |
| 110 | |
| 111 static const BLOCK_OUT = 0; | |
| 112 static const BLOCK_IN = 1; | |
| 113 static const FLOW_OUT = 2; | |
| 114 static const FLOW_IN = 3; | |
| 115 static const BLOCK_KEY = 4; | |
| 116 static const FLOW_KEY = 5; | |
| 117 | |
| 118 static const CHOMPING_STRIP = 0; | |
| 119 static const CHOMPING_KEEP = 1; | |
| 120 static const CHOMPING_CLIP = 2; | |
| 121 | |
| 122 /// The scanner that's used to scan through the document. | |
| 123 final SpanScanner _scanner; | |
| 124 | |
| 125 /// Whether we're parsing a bare document (that is, one that doesn't begin | |
| 126 /// with `---`). Bare documents don't allow `%` immediately following | |
| 127 /// newlines. | |
| 128 bool _inBareDocument = false; | |
| 129 | |
| 130 /// The state of the scanner when it was the farthest in the document it's | |
| 131 /// been. | |
| 132 LineScannerState _farthestState; | |
| 133 | |
| 134 /// The name of the context of the farthest position that has been parsed | |
| 135 /// successfully before backtracking. Used for error reporting. | |
| 136 String _farthestContext = "document"; | |
| 137 | |
| 138 /// A stack of the names of parse contexts. Used for error reporting. | |
| 139 final _contextStack = <String>["document"]; | |
| 140 | |
| 141 /// Annotations attached to ranges of the source string that add extra | |
| 142 /// information to any errors that occur in the annotated range. | |
| 143 final _errorAnnotations = new _RangeMap<String>(); | |
| 144 | |
| 145 /// The buffer containing the string currently being captured. | |
| 146 StringBuffer _capturedString; | |
| 147 | |
| 148 /// The beginning of the current section of the captured string. | |
| 149 int _captureStart; | |
| 150 | |
| 151 /// Whether the current string capture is being overridden. | |
| 152 bool _capturingAs = false; | |
| 153 | |
| 154 Parser(String yaml, sourceUrl) | |
| 155 : _scanner = new SpanScanner(yaml, sourceUrl: sourceUrl) { | |
| 156 _farthestState = _scanner.state; | |
| 157 } | |
| 158 | |
| 159 /// Returns the character at the current position, then moves that position | |
| 160 /// forward one character. | |
| 161 int next() => _scanner.readChar(); | |
| 162 | |
| 163 /// Returns the code unit at the current position, or the character [i] | |
| 164 /// characters after the current position. | |
| 165 int peek([int i = 0]) => _scanner.peekChar(i); | |
| 166 | |
| 167 /// The truthiness operator. Returns `false` if [obj] is `null` or `false`, | |
| 168 /// `true` otherwise. | |
| 169 bool truth(obj) => obj != null && obj != false; | |
| 170 | |
| 171 /// Consumes the current character if it matches [matcher]. Returns the result | |
| 172 /// of [matcher]. | |
| 173 bool consume(bool matcher(int)) { | |
| 174 if (matcher(peek())) { | |
| 175 next(); | |
| 176 return true; | |
| 177 } | |
| 178 return false; | |
| 179 } | |
| 180 | |
| 181 /// Consumes the current character if it equals [char]. | |
| 182 bool consumeChar(int char) => consume((c) => c == char); | |
| 183 | |
| 184 /// Calls [consumer] until it returns a falsey value. Returns a list of all | |
| 185 /// truthy return values of [consumer], or null if it didn't consume anything. | |
| 186 /// | |
| 187 /// Conceptually, repeats a production one or more times. | |
| 188 List oneOrMore(consumer()) { | |
| 189 var first = consumer(); | |
| 190 if (!truth(first)) return null; | |
| 191 var out = [first]; | |
| 192 while (true) { | |
| 193 var el = consumer(); | |
| 194 if (!truth(el)) return out; | |
| 195 out.add(el); | |
| 196 } | |
| 197 return null; // Unreachable. | |
| 198 } | |
| 199 | |
| 200 /// Calls [consumer] until it returns a falsey value. Returns a list of all | |
| 201 /// truthy return values of [consumer], or the empty list if it didn't consume | |
| 202 /// anything. | |
| 203 /// | |
| 204 /// Conceptually, repeats a production any number of times. | |
| 205 List zeroOrMore(consumer()) { | |
| 206 var out = []; | |
| 207 var oldPos = _scanner.position; | |
| 208 while (true) { | |
| 209 var el = consumer(); | |
| 210 if (!truth(el) || oldPos == _scanner.position) return out; | |
| 211 oldPos = _scanner.position; | |
| 212 out.add(el); | |
| 213 } | |
| 214 return null; // Unreachable. | |
| 215 } | |
| 216 | |
| 217 /// Just calls [consumer] and returns its result. Used to make it explicit | |
| 218 /// that a production is intended to be optional. | |
| 219 zeroOrOne(consumer()) => consumer(); | |
| 220 | |
| 221 /// Calls each function in [consumers] until one returns a truthy value, then | |
| 222 /// returns that. | |
| 223 or(List<Function> consumers) { | |
| 224 for (var c in consumers) { | |
| 225 var res = c(); | |
| 226 if (truth(res)) return res; | |
| 227 } | |
| 228 return null; | |
| 229 } | |
| 230 | |
| 231 /// Calls [consumer] and returns its result, but rolls back the parser state | |
| 232 /// if [consumer] returns a falsey value. | |
| 233 transaction(consumer()) { | |
| 234 var oldState = _scanner.state; | |
| 235 var oldCaptureStart = _captureStart; | |
| 236 String capturedSoFar = _capturedString == null ? null : | |
| 237 _capturedString.toString(); | |
| 238 var res = consumer(); | |
| 239 _refreshFarthestState(); | |
| 240 if (truth(res)) return res; | |
| 241 | |
| 242 _scanner.state = oldState; | |
| 243 _captureStart = oldCaptureStart; | |
| 244 _capturedString = capturedSoFar == null ? null : | |
| 245 new StringBuffer(capturedSoFar); | |
| 246 return res; | |
| 247 } | |
| 248 | |
| 249 /// Consumes [n] characters matching [matcher], or none if there isn't a | |
| 250 /// complete match. The first argument to [matcher] is the character code, the | |
| 251 /// second is the index (from 0 to [n] - 1). | |
| 252 /// | |
| 253 /// Returns whether or not the characters were consumed. | |
| 254 bool nAtOnce(int n, bool matcher(int c, int i)) => transaction(() { | |
| 255 for (int i = 0; i < n; i++) { | |
| 256 if (!consume((c) => matcher(c, i))) return false; | |
| 257 } | |
| 258 return true; | |
| 259 }); | |
| 260 | |
| 261 /// Consumes the exact characters in [str], or nothing. | |
| 262 /// | |
| 263 /// Returns whether or not the string was consumed. | |
| 264 bool rawString(String str) => | |
| 265 nAtOnce(str.length, (c, i) => str.codeUnitAt(i) == c); | |
| 266 | |
| 267 /// Consumes and returns a string of characters matching [matcher], or null if | |
| 268 /// there are no such characters. | |
| 269 String stringOf(bool matcher(int)) => | |
| 270 captureString(() => oneOrMore(() => consume(matcher))); | |
| 271 | |
| 272 /// Calls [consumer] and returns the string that was consumed while doing so, | |
| 273 /// or null if [consumer] returned a falsey value. Automatically wraps | |
| 274 /// [consumer] in `transaction`. | |
| 275 String captureString(consumer()) { | |
| 276 // captureString calls may not be nested | |
| 277 assert(_capturedString == null); | |
| 278 | |
| 279 _captureStart = _scanner.position; | |
| 280 _capturedString = new StringBuffer(); | |
| 281 var res = transaction(consumer); | |
| 282 if (!truth(res)) { | |
| 283 _captureStart = null; | |
| 284 _capturedString = null; | |
| 285 return null; | |
| 286 } | |
| 287 | |
| 288 flushCapture(); | |
| 289 var result = _capturedString.toString(); | |
| 290 _captureStart = null; | |
| 291 _capturedString = null; | |
| 292 return result; | |
| 293 } | |
| 294 | |
| 295 captureAs(String replacement, consumer()) => | |
| 296 captureAndTransform(consumer, (_) => replacement); | |
| 297 | |
| 298 captureAndTransform(consumer(), String transformation(String captured)) { | |
| 299 if (_capturedString == null) return consumer(); | |
| 300 if (_capturingAs) return consumer(); | |
| 301 | |
| 302 flushCapture(); | |
| 303 _capturingAs = true; | |
| 304 var res = consumer(); | |
| 305 _capturingAs = false; | |
| 306 if (!truth(res)) return res; | |
| 307 | |
| 308 _capturedString.write(transformation( | |
| 309 _scanner.string.substring(_captureStart, _scanner.position))); | |
| 310 _captureStart = _scanner.position; | |
| 311 return res; | |
| 312 } | |
| 313 | |
| 314 void flushCapture() { | |
| 315 _capturedString.write(_scanner.string.substring( | |
| 316 _captureStart, _scanner.position)); | |
| 317 _captureStart = _scanner.position; | |
| 318 } | |
| 319 | |
| 320 /// Adds a tag and an anchor to [node], if they're defined. | |
| 321 Node addProps(Node node, Pair<Tag, String> props) { | |
| 322 if (props == null || node == null) return node; | |
| 323 if (truth(props.first)) node.tag = props.first; | |
| 324 if (truth(props.last)) node.anchor = props.last; | |
| 325 return node; | |
| 326 } | |
| 327 | |
| 328 /// Creates a MappingNode from [pairs]. | |
| 329 MappingNode map(List<Pair<Node, Node>> pairs, SourceSpan span) { | |
| 330 var content = new Map<Node, Node>(); | |
| 331 pairs.forEach((pair) => content[pair.first] = pair.last); | |
| 332 return new MappingNode("?", content, span); | |
| 333 } | |
| 334 | |
| 335 /// Runs [fn] in a context named [name]. Used for error reporting. | |
| 336 context(String name, fn()) { | |
| 337 try { | 47 try { |
| 338 _contextStack.add(name); | 48 if (isDone) throw new StateError("No more events."); |
| 339 return fn(); | 49 var event = _stateMachine(); |
| 340 } finally { | 50 return event; |
| 341 var popped = _contextStack.removeLast(); | 51 } on StringScannerException catch (error) { |
| 342 assert(popped == name); | 52 throw new YamlException(error.message, error.span); |
| 343 } | 53 } |
| 344 } | 54 } |
| 345 | 55 |
| 346 /// Adds [message] as extra information to any errors that occur between the | 56 /// Dispatches parsing based on the current state. |
| 347 /// current position and the position of the cursor after running [fn]. The | 57 Event _stateMachine() { |
| 348 /// cursor is reset after [fn] is run. | 58 switch (_state) { |
| 349 annotateError(String message, fn()) { | 59 case _State.STREAM_START: |
| 350 var start = _scanner.position; | 60 return _parseStreamStart(); |
| 351 var end; | 61 case _State.DOCUMENT_START: |
| 352 transaction(() { | 62 return _parseDocumentStart(); |
| 353 fn(); | 63 case _State.DOCUMENT_CONTENT: |
| 354 end = _scanner.position; | 64 return _parseDocumentContent(); |
| 355 return false; | 65 case _State.DOCUMENT_END: |
| 356 }); | 66 return _parseDocumentEnd(); |
| 357 _errorAnnotations[new _Range(start, end)] = message; | 67 case _State.BLOCK_NODE: |
| 358 } | 68 return _parseNode(block: true); |
| 359 | 69 case _State.BLOCK_NODE_OR_INDENTLESS_SEQUENCE: |
| 360 /// Throws an error with additional context information. | 70 return _parseNode(block: true, indentlessSequence: true); |
| 361 void error(String message) => | 71 case _State.FLOW_NODE: |
| 362 _scanner.error("$message (in $_farthestContext)."); | 72 return _parseNode(); |
| 363 | 73 case _State.BLOCK_SEQUENCE_FIRST_ENTRY: |
| 364 /// If [result] is falsey, throws an error saying that [expected] was | 74 // Scan past the `BLOCK-SEQUENCE-FIRST-ENTRY` token to the |
| 365 /// expected. | 75 // `BLOCK-SEQUENCE-ENTRY` token. |
| 366 expect(result, String expected) { | 76 _scanner.scan(); |
| 367 if (truth(result)) return result; | 77 return _parseBlockSequenceEntry(); |
| 368 error("Expected $expected"); | 78 case _State.BLOCK_SEQUENCE_ENTRY: |
| 369 } | 79 return _parseBlockSequenceEntry(); |
| 370 | 80 case _State.INDENTLESS_SEQUENCE_ENTRY: |
| 371 /// Throws an error saying that the parse failed. | 81 return _parseIndentlessSequenceEntry(); |
| 372 /// | 82 case _State.BLOCK_MAPPING_FIRST_KEY: |
| 373 /// Uses [_farthestState] and [_farthestContext] to provide additional | 83 // Scan past the `BLOCK-MAPPING-FIRST-KEY` token to the |
| 374 /// information. | 84 // `BLOCK-MAPPING-KEY` token. |
| 375 parseFailed() { | 85 _scanner.scan(); |
| 376 var message = "Invalid YAML in $_farthestContext"; | 86 return _parseBlockMappingKey(); |
| 377 _refreshFarthestState(); | 87 case _State.BLOCK_MAPPING_KEY: |
| 378 _scanner.state = _farthestState; | 88 return _parseBlockMappingKey(); |
| 379 | 89 case _State.BLOCK_MAPPING_VALUE: |
| 380 var extraError = _errorAnnotations[_scanner.position]; | 90 return _parseBlockMappingValue(); |
| 381 if (extraError != null) message = "$message ($extraError)"; | 91 case _State.FLOW_SEQUENCE_FIRST_ENTRY: |
| 382 _scanner.error("$message."); | 92 return _parseFlowSequenceEntry(first: true); |
| 383 } | 93 case _State.FLOW_SEQUENCE_ENTRY: |
| 384 | 94 return _parseFlowSequenceEntry(); |
| 385 /// Update [_farthestState] if the scanner is farther than it's been before. | 95 case _State.FLOW_SEQUENCE_ENTRY_MAPPING_KEY: |
| 386 void _refreshFarthestState() { | 96 return _parseFlowSequenceEntryMappingKey(); |
| 387 if (_scanner.position <= _farthestState.position) return; | 97 case _State.FLOW_SEQUENCE_ENTRY_MAPPING_VALUE: |
| 388 _farthestState = _scanner.state; | 98 return _parseFlowSequenceEntryMappingValue(); |
| 389 } | 99 case _State.FLOW_SEQUENCE_ENTRY_MAPPING_END: |
| 390 | 100 return _parseFlowSequenceEntryMappingEnd(); |
| 391 /// Returns the number of spaces after the current position. | 101 case _State.FLOW_MAPPING_FIRST_KEY: |
| 392 int countIndentation() { | 102 return _parseFlowMappingKey(first: true); |
| 393 var i = 0; | 103 case _State.FLOW_MAPPING_KEY: |
| 394 while (peek(i) == SP) i++; | 104 return _parseFlowMappingKey(); |
| 395 return i; | 105 case _State.FLOW_MAPPING_VALUE: |
| 396 } | 106 return _parseFlowMappingValue(); |
| 397 | 107 case _State.FLOW_MAPPING_EMPTY_VALUE: |
| 398 /// Returns the indentation for a block scalar. | 108 return _parseFlowMappingValue(empty: true); |
| 399 int blockScalarAdditionalIndentation(_BlockHeader header, int indent) { | 109 default: |
| 400 if (!header.autoDetectIndent) return header.additionalIndent; | 110 throw "Unreachable"; |
| 401 | 111 } |
| 402 var maxSpaces = 0; | 112 } |
| 403 var spaces = 0; | 113 |
| 404 transaction(() { | 114 /// Parses the production: |
| 405 do { | 115 /// |
| 406 spaces = captureString(() => zeroOrMore(() => consumeChar(SP))).length; | 116 /// stream ::= |
| 407 if (spaces > maxSpaces) maxSpaces = spaces; | 117 /// STREAM-START implicit_document? explicit_document* STREAM-END |
| 408 } while (b_break()); | 118 /// ************ |
| 409 return false; | 119 Event _parseStreamStart() { |
| 410 }); | 120 var token = _scanner.scan(); |
| 411 | 121 assert(token.type == TokenType.STREAM_START); |
| 412 // If the next non-empty line isn't indented further than the start of the | 122 |
| 413 // block scalar, that means the scalar is going to be empty. Returning any | 123 _state = _State.DOCUMENT_START; |
| 414 // value > 0 will cause the parser not to consume any text. | 124 return new Event(EventType.STREAM_START, token.span); |
| 415 if (spaces <= indent) return 1; | 125 } |
| 416 | 126 |
| 417 // It's an error for a leading empty line to be indented more than the first | 127 /// Parses the productions: |
| 418 // non-empty line. | 128 /// |
| 419 if (maxSpaces > spaces) { | 129 /// implicit_document ::= block_node DOCUMENT-END* |
| 420 _scanner.error("Leading empty lines may not be indented more than the " | 130 /// * |
| 421 "first non-empty line."); | 131 /// explicit_document ::= |
| 422 } | 132 /// DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* |
| 423 | 133 /// ************************* |
| 424 return spaces - indent; | 134 Event _parseDocumentStart() { |
| 425 } | 135 var token = _scanner.peek(); |
| 426 | 136 |
| 427 /// Returns whether the current position is at the beginning of a line. | 137 // libyaml requires any document beyond the first in the stream to have an |
| 428 bool get atStartOfLine => _scanner.column == 0; | 138 // explicit document start indicator, but the spec allows it to be omitted |
| 429 | 139 // as long as there was an end indicator. |
| 430 /// Given an indicator character, returns the type of that indicator (or null | 140 |
| 431 /// if the indicator isn't found. | 141 // Parse extra document end indicators. |
| 432 int indicatorType(int char) { | 142 while (token.type == TokenType.DOCUMENT_END) { |
| 433 switch (char) { | 143 token = _scanner.advance(); |
| 434 case HYPHEN: return C_SEQUENCE_ENTRY; | 144 } |
| 435 case QUESTION_MARK: return C_MAPPING_KEY; | 145 |
| 436 case COLON: return C_MAPPING_VALUE; | 146 if (token.type != TokenType.VERSION_DIRECTIVE && |
| 437 case COMMA: return C_COLLECT_ENTRY; | 147 token.type != TokenType.TAG_DIRECTIVE && |
| 438 case LEFT_BRACKET: return C_SEQUENCE_START; | 148 token.type != TokenType.DOCUMENT_START && |
| 439 case RIGHT_BRACKET: return C_SEQUENCE_END; | 149 token.type != TokenType.STREAM_END) { |
| 440 case LEFT_BRACE: return C_MAPPING_START; | 150 // Parse an implicit document. |
| 441 case RIGHT_BRACE: return C_MAPPING_END; | 151 _processDirectives(); |
| 442 case HASH: return C_COMMENT; | 152 _states.add(_State.DOCUMENT_END); |
| 443 case AMPERSAND: return C_ANCHOR; | 153 _state = _State.BLOCK_NODE; |
| 444 case ASTERISK: return C_ALIAS; | 154 return new DocumentStartEvent(token.span.start.pointSpan()); |
| 445 case EXCLAMATION: return C_TAG; | 155 } |
| 446 case VERTICAL_BAR: return C_LITERAL; | 156 |
| 447 case GREATER_THAN: return C_FOLDED; | 157 if (token.type == TokenType.STREAM_END) { |
| 448 case SINGLE_QUOTE: return C_SINGLE_QUOTE; | 158 _state = _State.END; |
| 449 case DOUBLE_QUOTE: return C_DOUBLE_QUOTE; | 159 _scanner.scan(); |
| 450 case PERCENT: return C_DIRECTIVE; | 160 return new Event(EventType.STREAM_END, token.span); |
| 451 case AT: | 161 } |
| 452 case GRAVE_ACCENT: | 162 |
| 453 return C_RESERVED; | 163 // Parse an explicit document. |
| 454 default: return null; | 164 var start = token.span; |
| 455 } | 165 var pair = _processDirectives(); |
| 456 } | 166 var versionDirective = pair.first; |
| 457 | 167 var tagDirectives = pair.last; |
| 458 // 1 | 168 token = _scanner.peek(); |
| 459 bool isPrintable(int char) { | 169 if (token.type != TokenType.DOCUMENT_START) { |
| 460 if (char == null) return false; | 170 throw new YamlException("Expected document start.", token.span); |
| 461 return char == TAB || | 171 } |
| 462 char == LF || | 172 |
| 463 char == CR || | 173 _states.add(_State.DOCUMENT_END); |
| 464 (char >= SP && char <= TILDE) || | 174 _state = _State.DOCUMENT_CONTENT; |
| 465 char == NEL || | 175 _scanner.scan(); |
| 466 (char >= 0xA0 && char <= 0xD7FF) || | 176 return new DocumentStartEvent(start.expand(token.span), |
| 467 (char >= 0xE000 && char <= 0xFFFD) || | 177 versionDirective: versionDirective, |
| 468 (char >= 0x10000 && char <= 0x10FFFF); | 178 tagDirectives: tagDirectives, |
| 469 } | 179 isImplicit: false); |
| 470 | 180 } |
| 471 // 2 | 181 |
| 472 bool isJson(int char) => char != null && | 182 /// Parses the productions: |
| 473 (char == TAB || (char >= SP && char <= 0x10FFFF)); | 183 /// |
| 474 | 184 /// explicit_document ::= |
| 475 // 22 | 185 /// DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* |
| 476 bool c_indicator(int type) => consume((c) => indicatorType(c) == type); | 186 /// *********** |
| 477 | 187 Event _parseDocumentContent() { |
| 478 // 23 | 188 var token = _scanner.peek(); |
| 479 bool isFlowIndicator(int char) { | 189 |
| 480 var indicator = indicatorType(char); | 190 switch (token.type) { |
| 481 return indicator == C_COLLECT_ENTRY || | 191 case TokenType.VERSION_DIRECTIVE: |
| 482 indicator == C_SEQUENCE_START || | 192 case TokenType.TAG_DIRECTIVE: |
| 483 indicator == C_SEQUENCE_END || | 193 case TokenType.DOCUMENT_START: |
| 484 indicator == C_MAPPING_START || | 194 case TokenType.DOCUMENT_END: |
| 485 indicator == C_MAPPING_END; | 195 case TokenType.STREAM_END: |
| 486 } | 196 _state = _states.removeLast(); |
| 487 | 197 return _processEmptyScalar(token.span.start); |
| 488 // 26 | 198 default: |
| 489 bool isBreak(int char) => char == LF || char == CR; | 199 return _parseNode(block: true); |
| 490 | 200 } |
| 491 // 27 | 201 } |
| 492 bool isNonBreak(int char) => isPrintable(char) && !isBreak(char); | 202 |
| 493 | 203 /// Parses the productions: |
| 494 // 28 | 204 /// |
| 495 bool b_break() { | 205 /// implicit_document ::= block_node DOCUMENT-END* |
| 496 if (consumeChar(CR)) { | 206 /// ************* |
| 497 zeroOrOne(() => consumeChar(LF)); | 207 /// explicit_document ::= |
| 498 return true; | 208 /// DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* |
| 499 } | 209 /// ************* |
| 500 return consumeChar(LF); | 210 Event _parseDocumentEnd() { |
| 501 } | 211 _tagDirectives.clear(); |
| 502 | 212 _state = _State.DOCUMENT_START; |
| 503 // 29 | 213 |
| 504 bool b_asLineFeed() => captureAs("\n", () => b_break()); | 214 var token = _scanner.peek(); |
| 505 | 215 if (token.type == TokenType.DOCUMENT_END) { |
| 506 // 30 | 216 _scanner.scan(); |
| 507 bool b_nonContent() => captureAs("", () => b_break()); | 217 return new DocumentEndEvent(token.span, isImplicit: false); |
| 508 | 218 } else { |
| 509 // 33 | 219 return new DocumentEndEvent( |
| 510 bool isSpace(int char) => char == SP || char == TAB; | 220 token.span.start.pointSpan(), isImplicit: true); |
| 511 | 221 } |
| 512 // 34 | 222 } |
| 513 bool isNonSpace(int char) => isNonBreak(char) && !isSpace(char); | 223 |
| 514 | 224 /// Parses the productions: |
| 515 // 35 | 225 /// |
| 516 bool isDecDigit(int char) => char != null && char >= NUMBER_0 && | 226 /// block_node_or_indentless_sequence ::= |
| 517 char <= NUMBER_9; | 227 /// ALIAS |
| 518 | 228 /// ***** |
| 519 // 36 | 229 /// | properties (block_content | indentless_block_sequence)? |
| 520 bool isHexDigit(int char) { | 230 /// ********** * |
| 521 if (char == null) return false; | 231 /// | block_content | indentless_block_sequence |
| 522 return isDecDigit(char) || | 232 /// * |
| 523 (char >= LETTER_A && char <= LETTER_F) || | 233 /// block_node ::= ALIAS |
| 524 (char >= LETTER_CAP_A && char <= LETTER_CAP_F); | 234 /// ***** |
| 525 } | 235 /// | properties block_content? |
| 526 | 236 /// ********** * |
| 527 // 41 | 237 /// | block_content |
| 528 bool c_escape() => captureAs("", () => consumeChar(BACKSLASH)); | 238 /// * |
| 529 | 239 /// flow_node ::= ALIAS |
| 530 // 42 | 240 /// ***** |
| 531 bool ns_escNull() => captureAs("\x00", () => consumeChar(NUMBER_0)); | 241 /// | properties flow_content? |
| 532 | 242 /// ********** * |
| 533 // 43 | 243 /// | flow_content |
| 534 bool ns_escBell() => captureAs("\x07", () => consumeChar(LETTER_A)); | 244 /// * |
| 535 | 245 /// properties ::= TAG ANCHOR? | ANCHOR TAG? |
| 536 // 44 | 246 /// ************************* |
| 537 bool ns_escBackspace() => captureAs("\b", () => consumeChar(LETTER_B)); | 247 /// block_content ::= block_collection | flow_collection | SCALAR |
| 538 | 248 /// ****** |
| 539 // 45 | 249 /// flow_content ::= flow_collection | SCALAR |
| 540 bool ns_escHorizontalTab() => captureAs("\t", () { | 250 /// ****** |
| 541 return consume((c) => c == LETTER_T || c == TAB); | 251 Event _parseNode({bool block: false, bool indentlessSequence: false}) { |
| 542 }); | 252 var token = _scanner.peek(); |
| 543 | 253 |
| 544 // 46 | 254 if (token is AliasToken) { |
| 545 bool ns_escLineFeed() => captureAs("\n", () => consumeChar(LETTER_N)); | 255 _scanner.scan(); |
| 546 | 256 _state = _states.removeLast(); |
| 547 // 47 | 257 return new AliasEvent(token.span, token.name); |
| 548 bool ns_escVerticalTab() => captureAs("\v", () => consumeChar(LETTER_V)); | 258 } |
| 549 | 259 |
| 550 // 48 | 260 var anchor; |
| 551 bool ns_escFormFeed() => captureAs("\f", () => consumeChar(LETTER_F)); | 261 var tagToken; |
| 552 | 262 var span = token.span.start.pointSpan(); |
| 553 // 49 | 263 parseAnchor() { |
| 554 bool ns_escCarriageReturn() => captureAs("\r", () => consumeChar(LETTER_R)); | 264 anchor = token.name; |
| 555 | 265 span = span.expand(token.span); |
| 556 // 50 | 266 token = _scanner.advance(); |
| 557 bool ns_escEscape() => captureAs("\x1B", () => consumeChar(LETTER_E)); | 267 } |
| 558 | 268 |
| 559 // 51 | 269 parseTag() { |
| 560 bool ns_escSpace() => consumeChar(SP); | 270 tagToken = token; |
| 561 | 271 span = span.expand(token.span); |
| 562 // 52 | 272 token = _scanner.advance(); |
| 563 bool ns_escDoubleQuote() => consumeChar(DOUBLE_QUOTE); | 273 } |
| 564 | 274 |
| 565 // 53 | 275 if (token is AnchorToken) { |
| 566 bool ns_escSlash() => consumeChar(SLASH); | 276 parseAnchor(); |
| 567 | 277 if (token is TagToken) parseTag(); |
| 568 // 54 | 278 } else if (token is TagToken) { |
| 569 bool ns_escBackslash() => consumeChar(BACKSLASH); | 279 parseTag(); |
| 570 | 280 if (token is AnchorToken) parseAnchor(); |
| 571 // 55 | 281 } |
| 572 bool ns_escNextLine() => captureAs("\x85", () => consumeChar(LETTER_CAP_N)); | 282 |
| 573 | 283 var tag; |
| 574 // 56 | 284 if (tagToken != null) { |
| 575 bool ns_escNonBreakingSpace() => | 285 if (tagToken.handle == null) { |
| 576 captureAs("\xA0", () => consumeChar(UNDERSCORE)); | 286 tag = tagToken.suffix; |
| 577 | 287 } else { |
| 578 // 57 | 288 var tagDirective = _tagDirectives[tagToken.handle]; |
| 579 bool ns_escLineSeparator() => | 289 if (tagDirective == null) { |
| 580 captureAs("\u2028", () => consumeChar(LETTER_CAP_L)); | 290 throw new YamlException("Undefined tag handle.", tagToken.span); |
| 581 | |
| 582 // 58 | |
| 583 bool ns_escParagraphSeparator() => | |
| 584 captureAs("\u2029", () => consumeChar(LETTER_CAP_P)); | |
| 585 | |
| 586 // 59 | |
| 587 bool ns_esc8Bit() => ns_escNBit(LETTER_X, 2); | |
| 588 | |
| 589 // 60 | |
| 590 bool ns_esc16Bit() => ns_escNBit(LETTER_U, 4); | |
| 591 | |
| 592 // 61 | |
| 593 bool ns_esc32Bit() => ns_escNBit(LETTER_CAP_U, 8); | |
| 594 | |
| 595 // Helper method for 59 - 61 | |
| 596 bool ns_escNBit(int char, int digits) { | |
| 597 if (!captureAs('', () => consumeChar(char))) return false; | |
| 598 var captured = captureAndTransform( | |
| 599 () => nAtOnce(digits, (c, _) => isHexDigit(c)), | |
| 600 (hex) => new String.fromCharCodes([int.parse("0x$hex")])); | |
| 601 return expect(captured, "$digits hexidecimal digits"); | |
| 602 } | |
| 603 | |
| 604 // 62 | |
| 605 bool c_ns_escChar() => context('escape sequence', () => transaction(() { | |
| 606 if (!truth(c_escape())) return false; | |
| 607 return truth(or([ | |
| 608 ns_escNull, ns_escBell, ns_escBackspace, ns_escHorizontalTab, | |
| 609 ns_escLineFeed, ns_escVerticalTab, ns_escFormFeed, ns_escCarriageReturn, | |
| 610 ns_escEscape, ns_escSpace, ns_escDoubleQuote, ns_escSlash, | |
| 611 ns_escBackslash, ns_escNextLine, ns_escNonBreakingSpace, | |
| 612 ns_escLineSeparator, ns_escParagraphSeparator, ns_esc8Bit, ns_esc16Bit, | |
| 613 ns_esc32Bit | |
| 614 ])); | |
| 615 })); | |
| 616 | |
| 617 // 63 | |
| 618 bool s_indent(int indent) { | |
| 619 var result = nAtOnce(indent, (c, i) => c == SP); | |
| 620 if (peek() == TAB) { | |
| 621 annotateError("tab characters are not allowed as indentation in YAML", | |
| 622 () => zeroOrMore(() => consume(isSpace))); | |
| 623 } | |
| 624 return result; | |
| 625 } | |
| 626 | |
| 627 // 64 | |
| 628 bool s_indentLessThan(int indent) { | |
| 629 for (int i = 0; i < indent - 1; i++) { | |
| 630 if (!consumeChar(SP)) { | |
| 631 if (peek() == TAB) { | |
| 632 annotateError("tab characters are not allowed as indentation in YAML", | |
| 633 () { | |
| 634 for (; i < indent - 1; i++) { | |
| 635 if (!consume(isSpace)) break; | |
| 636 } | |
| 637 }); | |
| 638 } | 291 } |
| 639 break; | 292 |
| 640 } | 293 tag = tagDirective.prefix + tagToken.suffix; |
| 641 } | 294 } |
| 642 return true; | 295 } |
| 643 } | 296 |
| 644 | 297 if (indentlessSequence && token.type == TokenType.BLOCK_ENTRY) { |
| 645 // 65 | 298 _state = _State.INDENTLESS_SEQUENCE_ENTRY; |
| 646 bool s_indentLessThanOrEqualTo(int indent) => s_indentLessThan(indent + 1); | 299 return new SequenceStartEvent( |
| 647 | 300 span.expand(token.span), CollectionStyle.BLOCK, |
| 648 // 66 | 301 anchor: anchor, tag: tag); |
| 649 bool s_separateInLine() => transaction(() { | 302 } |
| 650 return captureAs('', () => | 303 |
| 651 truth(oneOrMore(() => consume(isSpace))) || atStartOfLine); | 304 if (token is ScalarToken) { |
| 652 }); | 305 // All non-plain scalars have the "!" tag by default. |
| 653 | 306 if (tag == null && token.style != ScalarStyle.PLAIN) tag = "!"; |
| 654 // 67 | 307 |
| 655 bool s_linePrefix(int indent, int ctx) => captureAs("", () { | 308 _state = _states.removeLast(); |
| 656 switch (ctx) { | 309 _scanner.scan(); |
| 657 case BLOCK_OUT: | 310 return new ScalarEvent( |
| 658 case BLOCK_IN: | 311 span.expand(token.span), token.value, token.style, |
| 659 return s_blockLinePrefix(indent); | 312 anchor: anchor, tag: tag); |
| 660 case FLOW_OUT: | 313 } |
| 661 case FLOW_IN: | 314 |
| 662 return s_flowLinePrefix(indent); | 315 if (token.type == TokenType.FLOW_SEQUENCE_START) { |
| 663 } | 316 _state = _State.FLOW_SEQUENCE_FIRST_ENTRY; |
| 664 }); | 317 return new SequenceStartEvent( |
| 665 | 318 span.expand(token.span), CollectionStyle.FLOW, |
| 666 // 68 | 319 anchor: anchor, tag: tag); |
| 667 bool s_blockLinePrefix(int indent) => s_indent(indent); | 320 } |
| 668 | 321 |
| 669 // 69 | 322 if (token.type == TokenType.FLOW_MAPPING_START) { |
| 670 bool s_flowLinePrefix(int indent) => captureAs('', () { | 323 _state = _State.FLOW_MAPPING_FIRST_KEY; |
| 671 if (!truth(s_indent(indent))) return false; | 324 return new MappingStartEvent( |
| 672 zeroOrOne(s_separateInLine); | 325 span.expand(token.span), CollectionStyle.FLOW, |
| 673 return true; | 326 anchor: anchor, tag: tag); |
| 674 }); | 327 } |
| 675 | 328 |
| 676 // 70 | 329 if (block && token.type == TokenType.BLOCK_SEQUENCE_START) { |
| 677 bool l_empty(int indent, int ctx) => transaction(() { | 330 _state = _State.BLOCK_SEQUENCE_FIRST_ENTRY; |
| 678 var start = or([ | 331 return new SequenceStartEvent( |
| 679 () => s_linePrefix(indent, ctx), | 332 span.expand(token.span), CollectionStyle.BLOCK, |
| 680 () => s_indentLessThan(indent) | 333 anchor: anchor, tag: tag); |
| 681 ]); | 334 } |
| 682 if (!truth(start)) return false; | 335 |
| 683 return b_asLineFeed(); | 336 |
| 684 }); | 337 if (block && token.type == TokenType.BLOCK_MAPPING_START) { |
| 685 | 338 _state = _State.BLOCK_MAPPING_FIRST_KEY; |
| 686 // 71 | 339 return new MappingStartEvent( |
| 687 bool b_asSpace() => captureAs(" ", () => consume(isBreak)); | 340 span.expand(token.span), CollectionStyle.BLOCK, |
| 688 | 341 anchor: anchor, tag: tag); |
| 689 // 72 | 342 } |
| 690 bool b_l_trimmed(int indent, int ctx) => transaction(() { | 343 |
| 691 if (!truth(b_nonContent())) return false; | 344 if (anchor != null || tag != null) { |
| 692 return truth(oneOrMore(() => captureAs("\n", () => l_empty(indent, ctx)))); | 345 _state = _states.removeLast(); |
| 693 }); | 346 return new ScalarEvent( |
| 694 | 347 span, '', ScalarStyle.PLAIN, |
| 695 // 73 | 348 anchor: anchor, tag: tag); |
| 696 bool b_l_folded(int indent, int ctx) => | 349 } |
| 697 or([() => b_l_trimmed(indent, ctx), b_asSpace]); | 350 |
| 698 | 351 throw new YamlException("Expected node content.", span); |
| 699 // 74 | 352 } |
| 700 bool s_flowFolded(int indent) => transaction(() { | 353 |
| 701 zeroOrOne(s_separateInLine); | 354 /// Parses the productions: |
| 702 if (!truth(b_l_folded(indent, FLOW_IN))) return false; | 355 /// |
| 703 return s_flowLinePrefix(indent); | 356 /// block_sequence ::= |
| 704 }); | 357 /// BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END |
| 705 | 358 /// ******************** *********** * ********* |
| 706 // 75 | 359 Event _parseBlockSequenceEntry() { |
| 707 bool c_nb_commentText() { | 360 var token = _scanner.peek(); |
| 708 if (!truth(c_indicator(C_COMMENT))) return false; | 361 |
| 709 zeroOrMore(() => consume(isNonBreak)); | 362 if (token.type == TokenType.BLOCK_ENTRY) { |
| 710 return true; | 363 token = _scanner.advance(); |
| 711 } | 364 |
| 712 | 365 if (token.type == TokenType.BLOCK_ENTRY || |
| 713 // 76 | 366 token.type == TokenType.BLOCK_END) { |
| 714 bool b_comment() => _scanner.isDone || b_nonContent(); | 367 _state = _State.BLOCK_SEQUENCE_ENTRY; |
| 715 | 368 return _processEmptyScalar(token.span.end); |
| 716 // 77 | |
| 717 bool s_b_comment() { | |
| 718 if (truth(s_separateInLine())) { | |
| 719 zeroOrOne(c_nb_commentText); | |
| 720 } | |
| 721 return b_comment(); | |
| 722 } | |
| 723 | |
| 724 // 78 | |
| 725 bool l_comment() => transaction(() { | |
| 726 if (!truth(s_separateInLine())) return false; | |
| 727 zeroOrOne(c_nb_commentText); | |
| 728 return b_comment(); | |
| 729 }); | |
| 730 | |
| 731 // 79 | |
| 732 bool s_l_comments() { | |
| 733 if (!truth(s_b_comment()) && !atStartOfLine) return false; | |
| 734 zeroOrMore(l_comment); | |
| 735 return true; | |
| 736 } | |
| 737 | |
| 738 // 80 | |
| 739 bool s_separate(int indent, int ctx) { | |
| 740 switch (ctx) { | |
| 741 case BLOCK_OUT: | |
| 742 case BLOCK_IN: | |
| 743 case FLOW_OUT: | |
| 744 case FLOW_IN: | |
| 745 return s_separateLines(indent); | |
| 746 case BLOCK_KEY: | |
| 747 case FLOW_KEY: | |
| 748 return s_separateInLine(); | |
| 749 default: throw 'Invalid context "$ctx".'; | |
| 750 } | |
| 751 } | |
| 752 | |
| 753 // 81 | |
| 754 bool s_separateLines(int indent) { | |
| 755 return transaction(() => s_l_comments() && s_flowLinePrefix(indent)) || | |
| 756 s_separateInLine(); | |
| 757 } | |
| 758 | |
| 759 // 82 | |
| 760 bool l_directive() => false; // TODO(nweiz): implement | |
| 761 | |
| 762 // 96 | |
| 763 Pair<Tag, String> c_ns_properties(int indent, int ctx) { | |
| 764 var tag, anchor; | |
| 765 tag = c_ns_tagProperty(); | |
| 766 if (truth(tag)) { | |
| 767 anchor = transaction(() { | |
| 768 if (!truth(s_separate(indent, ctx))) return null; | |
| 769 return c_ns_anchorProperty(); | |
| 770 }); | |
| 771 return new Pair<Tag, String>(tag, anchor); | |
| 772 } | |
| 773 | |
| 774 anchor = c_ns_anchorProperty(); | |
| 775 if (truth(anchor)) { | |
| 776 tag = transaction(() { | |
| 777 if (!truth(s_separate(indent, ctx))) return null; | |
| 778 return c_ns_tagProperty(); | |
| 779 }); | |
| 780 return new Pair<Tag, String>(tag, anchor); | |
| 781 } | |
| 782 | |
| 783 return null; | |
| 784 } | |
| 785 | |
| 786 // 97 | |
| 787 Tag c_ns_tagProperty() => null; // TODO(nweiz): implement | |
| 788 | |
| 789 // 101 | |
| 790 String c_ns_anchorProperty() => null; // TODO(nweiz): implement | |
| 791 | |
| 792 // 102 | |
| 793 bool isAnchorChar(int char) => isNonSpace(char) && !isFlowIndicator(char); | |
| 794 | |
| 795 // 103 | |
| 796 String ns_anchorName() => | |
| 797 captureString(() => oneOrMore(() => consume(isAnchorChar))); | |
| 798 | |
| 799 // 104 | |
| 800 Node c_ns_aliasNode() { | |
| 801 var start = _scanner.state; | |
| 802 if (!truth(c_indicator(C_ALIAS))) return null; | |
| 803 var name = expect(ns_anchorName(), 'anchor name'); | |
| 804 return new AliasNode(name, _scanner.spanFrom(start)); | |
| 805 } | |
| 806 | |
| 807 // 105 | |
| 808 ScalarNode e_scalar() => new ScalarNode("?", _scanner.emptySpan, content: ""); | |
| 809 | |
| 810 // 106 | |
| 811 ScalarNode e_node() => e_scalar(); | |
| 812 | |
| 813 // 107 | |
| 814 bool nb_doubleChar() => or([ | |
| 815 c_ns_escChar, | |
| 816 () => consume((c) => isJson(c) && c != BACKSLASH && c != DOUBLE_QUOTE) | |
| 817 ]); | |
| 818 | |
| 819 // 108 | |
| 820 bool ns_doubleChar() => !isSpace(peek()) && truth(nb_doubleChar()); | |
| 821 | |
| 822 // 109 | |
| 823 Node c_doubleQuoted(int indent, int ctx) => context('string', () { | |
| 824 return transaction(() { | |
| 825 var start = _scanner.state; | |
| 826 if (!truth(c_indicator(C_DOUBLE_QUOTE))) return null; | |
| 827 var contents = nb_doubleText(indent, ctx); | |
| 828 if (!truth(c_indicator(C_DOUBLE_QUOTE))) return null; | |
| 829 return new ScalarNode("!", _scanner.spanFrom(start), content: contents); | |
| 830 }); | |
| 831 }); | |
| 832 | |
| 833 // 110 | |
| 834 String nb_doubleText(int indent, int ctx) => captureString(() { | |
| 835 switch (ctx) { | |
| 836 case FLOW_OUT: | |
| 837 case FLOW_IN: | |
| 838 nb_doubleMultiLine(indent); | |
| 839 break; | |
| 840 case BLOCK_KEY: | |
| 841 case FLOW_KEY: | |
| 842 nb_doubleOneLine(); | |
| 843 break; | |
| 844 } | |
| 845 return true; | |
| 846 }); | |
| 847 | |
| 848 // 111 | |
| 849 void nb_doubleOneLine() { | |
| 850 zeroOrMore(nb_doubleChar); | |
| 851 } | |
| 852 | |
| 853 // 112 | |
| 854 bool s_doubleEscaped(int indent) => transaction(() { | |
| 855 zeroOrMore(() => consume(isSpace)); | |
| 856 if (!captureAs("", () => consumeChar(BACKSLASH))) return false; | |
| 857 if (!truth(b_nonContent())) return false; | |
| 858 zeroOrMore(() => captureAs("\n", () => l_empty(indent, FLOW_IN))); | |
| 859 return s_flowLinePrefix(indent); | |
| 860 }); | |
| 861 | |
| 862 // 113 | |
| 863 bool s_doubleBreak(int indent) => or([ | |
| 864 () => s_doubleEscaped(indent), | |
| 865 () => s_flowFolded(indent) | |
| 866 ]); | |
| 867 | |
| 868 // 114 | |
| 869 void nb_ns_doubleInLine() { | |
| 870 zeroOrMore(() => transaction(() { | |
| 871 zeroOrMore(() => consume(isSpace)); | |
| 872 return ns_doubleChar(); | |
| 873 })); | |
| 874 } | |
| 875 | |
| 876 // 115 | |
| 877 bool s_doubleNextLine(int indent) { | |
| 878 if (!truth(s_doubleBreak(indent))) return false; | |
| 879 zeroOrOne(() { | |
| 880 if (!truth(ns_doubleChar())) return; | |
| 881 nb_ns_doubleInLine(); | |
| 882 or([ | |
| 883 () => s_doubleNextLine(indent), | |
| 884 () => zeroOrMore(() => consume(isSpace)) | |
| 885 ]); | |
| 886 }); | |
| 887 return true; | |
| 888 } | |
| 889 | |
| 890 // 116 | |
| 891 void nb_doubleMultiLine(int indent) { | |
| 892 nb_ns_doubleInLine(); | |
| 893 or([ | |
| 894 () => s_doubleNextLine(indent), | |
| 895 () => zeroOrMore(() => consume(isSpace)) | |
| 896 ]); | |
| 897 } | |
| 898 | |
| 899 // 117 | |
| 900 bool c_quotedQuote() => captureAs("'", () => rawString("''")); | |
| 901 | |
| 902 // 118 | |
| 903 bool nb_singleChar() => or([ | |
| 904 c_quotedQuote, | |
| 905 () => consume((c) => isJson(c) && c != SINGLE_QUOTE) | |
| 906 ]); | |
| 907 | |
| 908 // 119 | |
| 909 bool ns_singleChar() => !isSpace(peek()) && truth(nb_singleChar()); | |
| 910 | |
| 911 // 120 | |
| 912 Node c_singleQuoted(int indent, int ctx) => context('string', () { | |
| 913 return transaction(() { | |
| 914 var start = _scanner.state; | |
| 915 if (!truth(c_indicator(C_SINGLE_QUOTE))) return null; | |
| 916 var contents = nb_singleText(indent, ctx); | |
| 917 if (!truth(c_indicator(C_SINGLE_QUOTE))) return null; | |
| 918 return new ScalarNode("!", _scanner.spanFrom(start), content: contents); | |
| 919 }); | |
| 920 }); | |
| 921 | |
| 922 // 121 | |
| 923 String nb_singleText(int indent, int ctx) => captureString(() { | |
| 924 switch (ctx) { | |
| 925 case FLOW_OUT: | |
| 926 case FLOW_IN: | |
| 927 nb_singleMultiLine(indent); | |
| 928 break; | |
| 929 case BLOCK_KEY: | |
| 930 case FLOW_KEY: | |
| 931 nb_singleOneLine(indent); | |
| 932 break; | |
| 933 } | |
| 934 return true; | |
| 935 }); | |
| 936 | |
| 937 // 122 | |
| 938 void nb_singleOneLine(int indent) { | |
| 939 zeroOrMore(nb_singleChar); | |
| 940 } | |
| 941 | |
| 942 // 123 | |
| 943 void nb_ns_singleInLine() { | |
| 944 zeroOrMore(() => transaction(() { | |
| 945 zeroOrMore(() => consume(isSpace)); | |
| 946 return ns_singleChar(); | |
| 947 })); | |
| 948 } | |
| 949 | |
| 950 // 124 | |
| 951 bool s_singleNextLine(int indent) { | |
| 952 if (!truth(s_flowFolded(indent))) return false; | |
| 953 zeroOrOne(() { | |
| 954 if (!truth(ns_singleChar())) return; | |
| 955 nb_ns_singleInLine(); | |
| 956 or([ | |
| 957 () => s_singleNextLine(indent), | |
| 958 () => zeroOrMore(() => consume(isSpace)) | |
| 959 ]); | |
| 960 }); | |
| 961 return true; | |
| 962 } | |
| 963 | |
| 964 // 125 | |
| 965 void nb_singleMultiLine(int indent) { | |
| 966 nb_ns_singleInLine(); | |
| 967 or([ | |
| 968 () => s_singleNextLine(indent), | |
| 969 () => zeroOrMore(() => consume(isSpace)) | |
| 970 ]); | |
| 971 } | |
| 972 | |
| 973 // 126 | |
| 974 bool ns_plainFirst(int ctx) { | |
| 975 var char = peek(); | |
| 976 var indicator = indicatorType(char); | |
| 977 if (indicator == C_RESERVED) { | |
| 978 error("Reserved indicators can't start a plain scalar"); | |
| 979 } | |
| 980 var match = (isNonSpace(char) && indicator == null) || | |
| 981 ((indicator == C_MAPPING_KEY || | |
| 982 indicator == C_MAPPING_VALUE || | |
| 983 indicator == C_SEQUENCE_ENTRY) && | |
| 984 isPlainSafe(ctx, peek(1))); | |
| 985 | |
| 986 if (match) next(); | |
| 987 return match; | |
| 988 } | |
| 989 | |
| 990 // 127 | |
| 991 bool isPlainSafe(int ctx, int char) { | |
| 992 switch (ctx) { | |
| 993 case FLOW_OUT: | |
| 994 case BLOCK_KEY: | |
| 995 // 128 | |
| 996 return isNonSpace(char); | |
| 997 case FLOW_IN: | |
| 998 case FLOW_KEY: | |
| 999 // 129 | |
| 1000 return isNonSpace(char) && !isFlowIndicator(char); | |
| 1001 default: throw 'Invalid context "$ctx".'; | |
| 1002 } | |
| 1003 } | |
| 1004 | |
| 1005 // 130 | |
| 1006 bool ns_plainChar(int ctx) { | |
| 1007 var char = peek(); | |
| 1008 var indicator = indicatorType(char); | |
| 1009 var safeChar = isPlainSafe(ctx, char) && indicator != C_MAPPING_VALUE && | |
| 1010 indicator != C_COMMENT; | |
| 1011 var nonCommentHash = isNonSpace(peek(-1)) && indicator == C_COMMENT; | |
| 1012 var nonMappingColon = indicator == C_MAPPING_VALUE && | |
| 1013 isPlainSafe(ctx, peek(1)); | |
| 1014 var match = safeChar || nonCommentHash || nonMappingColon; | |
| 1015 | |
| 1016 if (match) next(); | |
| 1017 return match; | |
| 1018 } | |
| 1019 | |
| 1020 // 131 | |
| 1021 String ns_plain(int indent, int ctx) => context('plain scalar', () { | |
| 1022 return captureString(() { | |
| 1023 switch (ctx) { | |
| 1024 case FLOW_OUT: | |
| 1025 case FLOW_IN: | |
| 1026 return ns_plainMultiLine(indent, ctx); | |
| 1027 case BLOCK_KEY: | |
| 1028 case FLOW_KEY: | |
| 1029 return ns_plainOneLine(ctx); | |
| 1030 default: throw 'Invalid context "$ctx".'; | |
| 1031 } | |
| 1032 }); | |
| 1033 }); | |
| 1034 | |
| 1035 // 132 | |
| 1036 void nb_ns_plainInLine(int ctx) { | |
| 1037 zeroOrMore(() => transaction(() { | |
| 1038 zeroOrMore(() => consume(isSpace)); | |
| 1039 return ns_plainChar(ctx); | |
| 1040 })); | |
| 1041 } | |
| 1042 | |
| 1043 // 133 | |
| 1044 bool ns_plainOneLine(int ctx) { | |
| 1045 if (truth(c_forbidden())) return false; | |
| 1046 if (!truth(ns_plainFirst(ctx))) return false; | |
| 1047 nb_ns_plainInLine(ctx); | |
| 1048 return true; | |
| 1049 } | |
| 1050 | |
| 1051 // 134 | |
| 1052 bool s_ns_plainNextLine(int indent, int ctx) => transaction(() { | |
| 1053 if (!truth(s_flowFolded(indent))) return false; | |
| 1054 if (truth(c_forbidden())) return false; | |
| 1055 if (!truth(ns_plainChar(ctx))) return false; | |
| 1056 nb_ns_plainInLine(ctx); | |
| 1057 return true; | |
| 1058 }); | |
| 1059 | |
| 1060 // 135 | |
| 1061 bool ns_plainMultiLine(int indent, int ctx) { | |
| 1062 if (!truth(ns_plainOneLine(ctx))) return false; | |
| 1063 zeroOrMore(() => s_ns_plainNextLine(indent, ctx)); | |
| 1064 return true; | |
| 1065 } | |
| 1066 | |
| 1067 // 136 | |
| 1068 int inFlow(int ctx) { | |
| 1069 switch (ctx) { | |
| 1070 case FLOW_OUT: | |
| 1071 case FLOW_IN: | |
| 1072 return FLOW_IN; | |
| 1073 case BLOCK_KEY: | |
| 1074 case FLOW_KEY: | |
| 1075 return FLOW_KEY; | |
| 1076 } | |
| 1077 throw "unreachable"; | |
| 1078 } | |
| 1079 | |
| 1080 // 137 | |
| 1081 SequenceNode c_flowSequence(int indent, int ctx) => transaction(() { | |
| 1082 var start = _scanner.state; | |
| 1083 if (!truth(c_indicator(C_SEQUENCE_START))) return null; | |
| 1084 zeroOrOne(() => s_separate(indent, ctx)); | |
| 1085 var content = zeroOrOne(() => ns_s_flowSeqEntries(indent, inFlow(ctx))); | |
| 1086 if (!truth(c_indicator(C_SEQUENCE_END))) return null; | |
| 1087 return new SequenceNode("?", new List<Node>.from(content), | |
| 1088 _scanner.spanFrom(start)); | |
| 1089 }); | |
| 1090 | |
| 1091 // 138 | |
| 1092 Iterable<Node> ns_s_flowSeqEntries(int indent, int ctx) { | |
| 1093 var first = ns_flowSeqEntry(indent, ctx); | |
| 1094 if (!truth(first)) return new Queue<Node>(); | |
| 1095 zeroOrOne(() => s_separate(indent, ctx)); | |
| 1096 | |
| 1097 var rest; | |
| 1098 if (truth(c_indicator(C_COLLECT_ENTRY))) { | |
| 1099 zeroOrOne(() => s_separate(indent, ctx)); | |
| 1100 rest = zeroOrOne(() => ns_s_flowSeqEntries(indent, ctx)); | |
| 1101 } | |
| 1102 | |
| 1103 if (rest == null) rest = new Queue<Node>(); | |
| 1104 rest.addFirst(first); | |
| 1105 | |
| 1106 return rest; | |
| 1107 } | |
| 1108 | |
| 1109 // 139 | |
| 1110 Node ns_flowSeqEntry(int indent, int ctx) => or([ | |
| 1111 () => ns_flowPair(indent, ctx), | |
| 1112 () => ns_flowNode(indent, ctx) | |
| 1113 ]); | |
| 1114 | |
| 1115 // 140 | |
| 1116 Node c_flowMapping(int indent, int ctx) { | |
| 1117 var start = _scanner.state; | |
| 1118 if (!truth(c_indicator(C_MAPPING_START))) return null; | |
| 1119 zeroOrOne(() => s_separate(indent, ctx)); | |
| 1120 var content = zeroOrOne(() => ns_s_flowMapEntries(indent, inFlow(ctx))); | |
| 1121 if (!truth(c_indicator(C_MAPPING_END))) return null; | |
| 1122 return new MappingNode("?", content, _scanner.spanFrom(start)); | |
| 1123 } | |
| 1124 | |
| 1125 // 141 | |
| 1126 Map ns_s_flowMapEntries(int indent, int ctx) { | |
| 1127 var first = ns_flowMapEntry(indent, ctx); | |
| 1128 if (!truth(first)) return deepEqualsMap(); | |
| 1129 zeroOrOne(() => s_separate(indent, ctx)); | |
| 1130 | |
| 1131 var rest; | |
| 1132 if (truth(c_indicator(C_COLLECT_ENTRY))) { | |
| 1133 zeroOrOne(() => s_separate(indent, ctx)); | |
| 1134 rest = ns_s_flowMapEntries(indent, ctx); | |
| 1135 } | |
| 1136 | |
| 1137 if (rest == null) rest = deepEqualsMap(); | |
| 1138 | |
| 1139 // TODO(nweiz): Duplicate keys should be an error. This includes keys with | |
| 1140 // different representations but the same value (e.g. 10 vs 0xa). To make | |
| 1141 // this user-friendly we'll probably also want to associate nodes with a | |
| 1142 // source range. | |
| 1143 if (!rest.containsKey(first.first)) rest[first.first] = first.last; | |
| 1144 | |
| 1145 return rest; | |
| 1146 } | |
| 1147 | |
| 1148 // 142 | |
| 1149 Pair<Node, Node> ns_flowMapEntry(int indent, int ctx) => or([ | |
| 1150 () => transaction(() { | |
| 1151 if (!truth(c_indicator(C_MAPPING_KEY))) return false; | |
| 1152 if (!truth(s_separate(indent, ctx))) return false; | |
| 1153 return ns_flowMapExplicitEntry(indent, ctx); | |
| 1154 }), | |
| 1155 () => ns_flowMapImplicitEntry(indent, ctx) | |
| 1156 ]); | |
| 1157 | |
| 1158 // 143 | |
| 1159 Pair<Node, Node> ns_flowMapExplicitEntry(int indent, int ctx) => or([ | |
| 1160 () => ns_flowMapImplicitEntry(indent, ctx), | |
| 1161 () => new Pair<Node, Node>(e_node(), e_node()) | |
| 1162 ]); | |
| 1163 | |
| 1164 // 144 | |
| 1165 Pair<Node, Node> ns_flowMapImplicitEntry(int indent, int ctx) => or([ | |
| 1166 () => ns_flowMapYamlKeyEntry(indent, ctx), | |
| 1167 () => c_ns_flowMapEmptyKeyEntry(indent, ctx), | |
| 1168 () => c_ns_flowMapJsonKeyEntry(indent, ctx) | |
| 1169 ]); | |
| 1170 | |
| 1171 // 145 | |
| 1172 Pair<Node, Node> ns_flowMapYamlKeyEntry(int indent, int ctx) { | |
| 1173 var key = ns_flowYamlNode(indent, ctx); | |
| 1174 if (!truth(key)) return null; | |
| 1175 var value = or([ | |
| 1176 () => transaction(() { | |
| 1177 zeroOrOne(() => s_separate(indent, ctx)); | |
| 1178 return c_ns_flowMapSeparateValue(indent, ctx); | |
| 1179 }), | |
| 1180 e_node | |
| 1181 ]); | |
| 1182 return new Pair<Node, Node>(key, value); | |
| 1183 } | |
| 1184 | |
| 1185 // 146 | |
| 1186 Pair<Node, Node> c_ns_flowMapEmptyKeyEntry(int indent, int ctx) { | |
| 1187 var value = c_ns_flowMapSeparateValue(indent, ctx); | |
| 1188 if (!truth(value)) return null; | |
| 1189 return new Pair<Node, Node>(e_node(), value); | |
| 1190 } | |
| 1191 | |
| 1192 // 147 | |
| 1193 Node c_ns_flowMapSeparateValue(int indent, int ctx) => transaction(() { | |
| 1194 if (!truth(c_indicator(C_MAPPING_VALUE))) return null; | |
| 1195 if (isPlainSafe(ctx, peek())) return null; | |
| 1196 | |
| 1197 return or([ | |
| 1198 () => transaction(() { | |
| 1199 if (!s_separate(indent, ctx)) return null; | |
| 1200 return ns_flowNode(indent, ctx); | |
| 1201 }), | |
| 1202 e_node | |
| 1203 ]); | |
| 1204 }); | |
| 1205 | |
| 1206 // 148 | |
| 1207 Pair<Node, Node> c_ns_flowMapJsonKeyEntry(int indent, int ctx) { | |
| 1208 var key = c_flowJsonNode(indent, ctx); | |
| 1209 if (!truth(key)) return null; | |
| 1210 var value = or([ | |
| 1211 () => transaction(() { | |
| 1212 zeroOrOne(() => s_separate(indent, ctx)); | |
| 1213 return c_ns_flowMapAdjacentValue(indent, ctx); | |
| 1214 }), | |
| 1215 e_node | |
| 1216 ]); | |
| 1217 return new Pair<Node, Node>(key, value); | |
| 1218 } | |
| 1219 | |
| 1220 // 149 | |
| 1221 Node c_ns_flowMapAdjacentValue(int indent, int ctx) { | |
| 1222 if (!truth(c_indicator(C_MAPPING_VALUE))) return null; | |
| 1223 return or([ | |
| 1224 () => transaction(() { | |
| 1225 zeroOrOne(() => s_separate(indent, ctx)); | |
| 1226 return ns_flowNode(indent, ctx); | |
| 1227 }), | |
| 1228 e_node | |
| 1229 ]); | |
| 1230 } | |
| 1231 | |
| 1232 // 150 | |
| 1233 Node ns_flowPair(int indent, int ctx) { | |
| 1234 var start = _scanner.state; | |
| 1235 var pair = or([ | |
| 1236 () => transaction(() { | |
| 1237 if (!truth(c_indicator(C_MAPPING_KEY))) return null; | |
| 1238 if (!truth(s_separate(indent, ctx))) return null; | |
| 1239 return ns_flowMapExplicitEntry(indent, ctx); | |
| 1240 }), | |
| 1241 () => ns_flowPairEntry(indent, ctx) | |
| 1242 ]); | |
| 1243 if (!truth(pair)) return null; | |
| 1244 | |
| 1245 return map([pair], _scanner.spanFrom(start)); | |
| 1246 } | |
| 1247 | |
| 1248 // 151 | |
| 1249 Pair<Node, Node> ns_flowPairEntry(int indent, int ctx) => or([ | |
| 1250 () => ns_flowPairYamlKeyEntry(indent, ctx), | |
| 1251 () => c_ns_flowMapEmptyKeyEntry(indent, ctx), | |
| 1252 () => c_ns_flowPairJsonKeyEntry(indent, ctx) | |
| 1253 ]); | |
| 1254 | |
| 1255 // 152 | |
| 1256 Pair<Node, Node> ns_flowPairYamlKeyEntry(int indent, int ctx) => | |
| 1257 transaction(() { | |
| 1258 var key = ns_s_implicitYamlKey(FLOW_KEY); | |
| 1259 if (!truth(key)) return null; | |
| 1260 var value = c_ns_flowMapSeparateValue(indent, ctx); | |
| 1261 if (!truth(value)) return null; | |
| 1262 return new Pair<Node, Node>(key, value); | |
| 1263 }); | |
| 1264 | |
| 1265 // 153 | |
| 1266 Pair<Node, Node> c_ns_flowPairJsonKeyEntry(int indent, int ctx) => | |
| 1267 transaction(() { | |
| 1268 var key = c_s_implicitJsonKey(FLOW_KEY); | |
| 1269 if (!truth(key)) return null; | |
| 1270 var value = c_ns_flowMapAdjacentValue(indent, ctx); | |
| 1271 if (!truth(value)) return null; | |
| 1272 return new Pair<Node, Node>(key, value); | |
| 1273 }); | |
| 1274 | |
| 1275 // 154 | |
| 1276 Node ns_s_implicitYamlKey(int ctx) => transaction(() { | |
| 1277 // TODO(nweiz): this is supposed to be limited to 1024 characters. | |
| 1278 | |
| 1279 // The indentation parameter is "null" since it's unused in this path | |
| 1280 var node = ns_flowYamlNode(null, ctx); | |
| 1281 if (!truth(node)) return null; | |
| 1282 zeroOrOne(s_separateInLine); | |
| 1283 return node; | |
| 1284 }); | |
| 1285 | |
| 1286 // 155 | |
| 1287 Node c_s_implicitJsonKey(int ctx) => transaction(() { | |
| 1288 // TODO(nweiz): this is supposed to be limited to 1024 characters. | |
| 1289 | |
| 1290 // The indentation parameter is "null" since it's unused in this path | |
| 1291 var node = c_flowJsonNode(null, ctx); | |
| 1292 if (!truth(node)) return null; | |
| 1293 zeroOrOne(s_separateInLine); | |
| 1294 return node; | |
| 1295 }); | |
| 1296 | |
| 1297 // 156 | |
| 1298 Node ns_flowYamlContent(int indent, int ctx) { | |
| 1299 var start = _scanner.state; | |
| 1300 var str = ns_plain(indent, ctx); | |
| 1301 if (!truth(str)) return null; | |
| 1302 return new ScalarNode("?", _scanner.spanFrom(start), content: str); | |
| 1303 } | |
| 1304 | |
| 1305 // 157 | |
| 1306 Node c_flowJsonContent(int indent, int ctx) => or([ | |
| 1307 () => c_flowSequence(indent, ctx), | |
| 1308 () => c_flowMapping(indent, ctx), | |
| 1309 () => c_singleQuoted(indent, ctx), | |
| 1310 () => c_doubleQuoted(indent, ctx) | |
| 1311 ]); | |
| 1312 | |
| 1313 // 158 | |
| 1314 Node ns_flowContent(int indent, int ctx) => or([ | |
| 1315 () => ns_flowYamlContent(indent, ctx), | |
| 1316 () => c_flowJsonContent(indent, ctx) | |
| 1317 ]); | |
| 1318 | |
| 1319 // 159 | |
| 1320 Node ns_flowYamlNode(int indent, int ctx) => or([ | |
| 1321 c_ns_aliasNode, | |
| 1322 () => ns_flowYamlContent(indent, ctx), | |
| 1323 () { | |
| 1324 var props = c_ns_properties(indent, ctx); | |
| 1325 if (!truth(props)) return null; | |
| 1326 var node = or([ | |
| 1327 () => transaction(() { | |
| 1328 if (!truth(s_separate(indent, ctx))) return null; | |
| 1329 return ns_flowYamlContent(indent, ctx); | |
| 1330 }), | |
| 1331 e_scalar | |
| 1332 ]); | |
| 1333 return addProps(node, props); | |
| 1334 } | |
| 1335 ]); | |
| 1336 | |
| 1337 // 160 | |
| 1338 Node c_flowJsonNode(int indent, int ctx) => transaction(() { | |
| 1339 var props; | |
| 1340 zeroOrOne(() => transaction(() { | |
| 1341 props = c_ns_properties(indent, ctx); | |
| 1342 if (!truth(props)) return null; | |
| 1343 return s_separate(indent, ctx); | |
| 1344 })); | |
| 1345 | |
| 1346 return addProps(c_flowJsonContent(indent, ctx), props); | |
| 1347 }); | |
| 1348 | |
| 1349 // 161 | |
| 1350 Node ns_flowNode(int indent, int ctx) => or([ | |
| 1351 c_ns_aliasNode, | |
| 1352 () => ns_flowContent(indent, ctx), | |
| 1353 () => transaction(() { | |
| 1354 var props = c_ns_properties(indent, ctx); | |
| 1355 if (!truth(props)) return null; | |
| 1356 var node = or([ | |
| 1357 () => transaction(() => s_separate(indent, ctx) ? | |
| 1358 ns_flowContent(indent, ctx) : null), | |
| 1359 e_scalar]); | |
| 1360 return addProps(node, props); | |
| 1361 }) | |
| 1362 ]); | |
| 1363 | |
| 1364 // 162 | |
| 1365 _BlockHeader c_b_blockHeader() => transaction(() { | |
| 1366 var indentation = c_indentationIndicator(); | |
| 1367 var chomping = c_chompingIndicator(); | |
| 1368 if (!truth(indentation)) indentation = c_indentationIndicator(); | |
| 1369 if (!truth(s_b_comment())) return null; | |
| 1370 | |
| 1371 return new _BlockHeader(indentation, chomping); | |
| 1372 }); | |
| 1373 | |
| 1374 // 163 | |
| 1375 int c_indentationIndicator() { | |
| 1376 if (!isDecDigit(peek())) return null; | |
| 1377 return next() - NUMBER_0; | |
| 1378 } | |
| 1379 | |
| 1380 // 164 | |
| 1381 int c_chompingIndicator() { | |
| 1382 switch (peek()) { | |
| 1383 case HYPHEN: | |
| 1384 next(); | |
| 1385 return CHOMPING_STRIP; | |
| 1386 case PLUS: | |
| 1387 next(); | |
| 1388 return CHOMPING_KEEP; | |
| 1389 default: | |
| 1390 return CHOMPING_CLIP; | |
| 1391 } | |
| 1392 } | |
| 1393 | |
| 1394 // 165 | |
| 1395 bool b_chompedLast(int chomping) { | |
| 1396 if (_scanner.isDone) return true; | |
| 1397 switch (chomping) { | |
| 1398 case CHOMPING_STRIP: | |
| 1399 return b_nonContent(); | |
| 1400 case CHOMPING_CLIP: | |
| 1401 case CHOMPING_KEEP: | |
| 1402 return b_asLineFeed(); | |
| 1403 } | |
| 1404 throw "unreachable"; | |
| 1405 } | |
| 1406 | |
| 1407 // 166 | |
| 1408 void l_chompedEmpty(int indent, int chomping) { | |
| 1409 switch (chomping) { | |
| 1410 case CHOMPING_STRIP: | |
| 1411 case CHOMPING_CLIP: | |
| 1412 l_stripEmpty(indent); | |
| 1413 break; | |
| 1414 case CHOMPING_KEEP: | |
| 1415 l_keepEmpty(indent); | |
| 1416 break; | |
| 1417 } | |
| 1418 } | |
| 1419 | |
| 1420 // 167 | |
| 1421 void l_stripEmpty(int indent) { | |
| 1422 captureAs('', () { | |
| 1423 zeroOrMore(() => transaction(() { | |
| 1424 if (!truth(s_indentLessThanOrEqualTo(indent))) return false; | |
| 1425 return b_nonContent(); | |
| 1426 })); | |
| 1427 zeroOrOne(() => l_trailComments(indent)); | |
| 1428 return true; | |
| 1429 }); | |
| 1430 } | |
| 1431 | |
| 1432 // 168 | |
| 1433 void l_keepEmpty(int indent) { | |
| 1434 zeroOrMore(() => captureAs('\n', () => l_empty(indent, BLOCK_IN))); | |
| 1435 zeroOrOne(() => captureAs('', () => l_trailComments(indent))); | |
| 1436 } | |
| 1437 | |
| 1438 // 169 | |
| 1439 bool l_trailComments(int indent) => transaction(() { | |
| 1440 if (!truth(s_indentLessThanOrEqualTo(indent))) return false; | |
| 1441 if (!truth(c_nb_commentText())) return false; | |
| 1442 if (!truth(b_comment())) return false; | |
| 1443 zeroOrMore(l_comment); | |
| 1444 return true; | |
| 1445 }); | |
| 1446 | |
| 1447 // 170 | |
| 1448 Node c_l_literal(int indent) => transaction(() { | |
| 1449 var start = _scanner.state; | |
| 1450 if (!truth(c_indicator(C_LITERAL))) return null; | |
| 1451 var header = c_b_blockHeader(); | |
| 1452 if (!truth(header)) return null; | |
| 1453 | |
| 1454 var additionalIndent = blockScalarAdditionalIndentation(header, indent); | |
| 1455 var content = l_literalContent(indent + additionalIndent, header.chomping); | |
| 1456 if (!truth(content)) return null; | |
| 1457 | |
| 1458 return new ScalarNode("!", _scanner.spanFrom(start), content: content); | |
| 1459 }); | |
| 1460 | |
| 1461 // 171 | |
| 1462 bool l_nb_literalText(int indent) => transaction(() { | |
| 1463 zeroOrMore(() => captureAs("\n", () => l_empty(indent, BLOCK_IN))); | |
| 1464 if (!truth(captureAs("", () => s_indent(indent)))) return false; | |
| 1465 return truth(oneOrMore(() => consume(isNonBreak))); | |
| 1466 }); | |
| 1467 | |
| 1468 // 172 | |
| 1469 bool b_nb_literalNext(int indent) => transaction(() { | |
| 1470 if (!truth(b_asLineFeed())) return false; | |
| 1471 return l_nb_literalText(indent); | |
| 1472 }); | |
| 1473 | |
| 1474 // 173 | |
| 1475 String l_literalContent(int indent, int chomping) => captureString(() { | |
| 1476 transaction(() { | |
| 1477 if (!truth(l_nb_literalText(indent))) return false; | |
| 1478 zeroOrMore(() => b_nb_literalNext(indent)); | |
| 1479 return b_chompedLast(chomping); | |
| 1480 }); | |
| 1481 l_chompedEmpty(indent, chomping); | |
| 1482 return true; | |
| 1483 }); | |
| 1484 | |
| 1485 // 174 | |
| 1486 Node c_l_folded(int indent) => transaction(() { | |
| 1487 var start = _scanner.state; | |
| 1488 if (!truth(c_indicator(C_FOLDED))) return null; | |
| 1489 var header = c_b_blockHeader(); | |
| 1490 if (!truth(header)) return null; | |
| 1491 | |
| 1492 var additionalIndent = blockScalarAdditionalIndentation(header, indent); | |
| 1493 var content = l_foldedContent(indent + additionalIndent, header.chomping); | |
| 1494 if (!truth(content)) return null; | |
| 1495 | |
| 1496 return new ScalarNode("!", _scanner.spanFrom(start), content: content); | |
| 1497 }); | |
| 1498 | |
| 1499 // 175 | |
| 1500 bool s_nb_foldedText(int indent) => transaction(() { | |
| 1501 if (!truth(captureAs('', () => s_indent(indent)))) return false; | |
| 1502 if (!truth(consume(isNonSpace))) return false; | |
| 1503 zeroOrMore(() => consume(isNonBreak)); | |
| 1504 return true; | |
| 1505 }); | |
| 1506 | |
| 1507 // 176 | |
| 1508 bool l_nb_foldedLines(int indent) { | |
| 1509 if (!truth(s_nb_foldedText(indent))) return false; | |
| 1510 zeroOrMore(() => transaction(() { | |
| 1511 if (!truth(b_l_folded(indent, BLOCK_IN))) return false; | |
| 1512 return s_nb_foldedText(indent); | |
| 1513 })); | |
| 1514 return true; | |
| 1515 } | |
| 1516 | |
| 1517 // 177 | |
| 1518 bool s_nb_spacedText(int indent) => transaction(() { | |
| 1519 if (!truth(captureAs('', () => s_indent(indent)))) return false; | |
| 1520 if (!truth(consume(isSpace))) return false; | |
| 1521 zeroOrMore(() => consume(isNonBreak)); | |
| 1522 return true; | |
| 1523 }); | |
| 1524 | |
| 1525 // 178 | |
| 1526 bool b_l_spaced(int indent) { | |
| 1527 if (!truth(b_asLineFeed())) return false; | |
| 1528 zeroOrMore(() => captureAs("\n", () => l_empty(indent, BLOCK_IN))); | |
| 1529 return true; | |
| 1530 } | |
| 1531 | |
| 1532 // 179 | |
| 1533 bool l_nb_spacedLines(int indent) { | |
| 1534 if (!truth(s_nb_spacedText(indent))) return false; | |
| 1535 zeroOrMore(() => transaction(() { | |
| 1536 if (!truth(b_l_spaced(indent))) return false; | |
| 1537 return s_nb_spacedText(indent); | |
| 1538 })); | |
| 1539 return true; | |
| 1540 } | |
| 1541 | |
| 1542 // 180 | |
| 1543 bool l_nb_sameLines(int indent) => transaction(() { | |
| 1544 zeroOrMore(() => captureAs('\n', () => l_empty(indent, BLOCK_IN))); | |
| 1545 return or([ | |
| 1546 () => l_nb_foldedLines(indent), | |
| 1547 () => l_nb_spacedLines(indent) | |
| 1548 ]); | |
| 1549 }); | |
| 1550 | |
| 1551 // 181 | |
| 1552 bool l_nb_diffLines(int indent) { | |
| 1553 if (!truth(l_nb_sameLines(indent))) return false; | |
| 1554 zeroOrMore(() => transaction(() { | |
| 1555 if (!truth(b_asLineFeed())) return false; | |
| 1556 return l_nb_sameLines(indent); | |
| 1557 })); | |
| 1558 return true; | |
| 1559 } | |
| 1560 | |
| 1561 // 182 | |
| 1562 String l_foldedContent(int indent, int chomping) => captureString(() { | |
| 1563 transaction(() { | |
| 1564 if (!truth(l_nb_diffLines(indent))) return false; | |
| 1565 return b_chompedLast(chomping); | |
| 1566 }); | |
| 1567 l_chompedEmpty(indent, chomping); | |
| 1568 return true; | |
| 1569 }); | |
| 1570 | |
| 1571 // 183 | |
| 1572 SequenceNode l_blockSequence(int indent) => context('sequence', () { | |
| 1573 var additionalIndent = countIndentation() - indent; | |
| 1574 if (additionalIndent <= 0) return null; | |
| 1575 | |
| 1576 var start = _scanner.state; | |
| 1577 var content = oneOrMore(() => transaction(() { | |
| 1578 if (!truth(s_indent(indent + additionalIndent))) return null; | |
| 1579 return c_l_blockSeqEntry(indent + additionalIndent); | |
| 1580 })); | |
| 1581 if (!truth(content)) return null; | |
| 1582 | |
| 1583 return new SequenceNode("?", content, _scanner.spanFrom(start)); | |
| 1584 }); | |
| 1585 | |
| 1586 // 184 | |
| 1587 Node c_l_blockSeqEntry(int indent) => transaction(() { | |
| 1588 if (!truth(c_indicator(C_SEQUENCE_ENTRY))) return null; | |
| 1589 if (isNonSpace(peek())) return null; | |
| 1590 | |
| 1591 return s_l_blockIndented(indent, BLOCK_IN); | |
| 1592 }); | |
| 1593 | |
| 1594 // 185 | |
| 1595 Node s_l_blockIndented(int indent, int ctx) { | |
| 1596 var additionalIndent = countIndentation(); | |
| 1597 return or([ | |
| 1598 () => transaction(() { | |
| 1599 if (!truth(s_indent(additionalIndent))) return null; | |
| 1600 return or([ | |
| 1601 () => ns_l_compactSequence(indent + 1 + additionalIndent), | |
| 1602 () => ns_l_compactMapping(indent + 1 + additionalIndent)]); | |
| 1603 }), | |
| 1604 () => s_l_blockNode(indent, ctx), | |
| 1605 () => s_l_comments() ? e_node() : null]); | |
| 1606 } | |
| 1607 | |
| 1608 // 186 | |
| 1609 Node ns_l_compactSequence(int indent) => context('sequence', () { | |
| 1610 var start = _scanner.state; | |
| 1611 var first = c_l_blockSeqEntry(indent); | |
| 1612 if (!truth(first)) return null; | |
| 1613 | |
| 1614 var content = zeroOrMore(() => transaction(() { | |
| 1615 if (!truth(s_indent(indent))) return null; | |
| 1616 return c_l_blockSeqEntry(indent); | |
| 1617 })); | |
| 1618 content.insert(0, first); | |
| 1619 | |
| 1620 return new SequenceNode("?", content, _scanner.spanFrom(start)); | |
| 1621 }); | |
| 1622 | |
| 1623 // 187 | |
| 1624 Node l_blockMapping(int indent) => context('mapping', () { | |
| 1625 var additionalIndent = countIndentation() - indent; | |
| 1626 if (additionalIndent <= 0) return null; | |
| 1627 | |
| 1628 var start = _scanner.state; | |
| 1629 var pairs = oneOrMore(() => transaction(() { | |
| 1630 if (!truth(s_indent(indent + additionalIndent))) return null; | |
| 1631 return ns_l_blockMapEntry(indent + additionalIndent); | |
| 1632 })); | |
| 1633 if (!truth(pairs)) return null; | |
| 1634 | |
| 1635 return map(pairs, _scanner.spanFrom(start)); | |
| 1636 }); | |
| 1637 | |
| 1638 // 188 | |
| 1639 Pair<Node, Node> ns_l_blockMapEntry(int indent) => or([ | |
| 1640 () => c_l_blockMapExplicitEntry(indent), | |
| 1641 () => ns_l_blockMapImplicitEntry(indent) | |
| 1642 ]); | |
| 1643 | |
| 1644 // 189 | |
| 1645 Pair<Node, Node> c_l_blockMapExplicitEntry(int indent) { | |
| 1646 var key = c_l_blockMapExplicitKey(indent); | |
| 1647 if (!truth(key)) return null; | |
| 1648 | |
| 1649 var value = or([ | |
| 1650 () => l_blockMapExplicitValue(indent), | |
| 1651 e_node | |
| 1652 ]); | |
| 1653 | |
| 1654 return new Pair<Node, Node>(key, value); | |
| 1655 } | |
| 1656 | |
| 1657 // 190 | |
| 1658 Node c_l_blockMapExplicitKey(int indent) => transaction(() { | |
| 1659 if (!truth(c_indicator(C_MAPPING_KEY))) return null; | |
| 1660 return s_l_blockIndented(indent, BLOCK_OUT); | |
| 1661 }); | |
| 1662 | |
| 1663 // 191 | |
| 1664 Node l_blockMapExplicitValue(int indent) => transaction(() { | |
| 1665 if (!truth(s_indent(indent))) return null; | |
| 1666 if (!truth(c_indicator(C_MAPPING_VALUE))) return null; | |
| 1667 return s_l_blockIndented(indent, BLOCK_OUT); | |
| 1668 }); | |
| 1669 | |
| 1670 // 192 | |
| 1671 Pair<Node, Node> ns_l_blockMapImplicitEntry(int indent) => transaction(() { | |
| 1672 var key = or([ns_s_blockMapImplicitKey, e_node]); | |
| 1673 var value = c_l_blockMapImplicitValue(indent); | |
| 1674 return truth(value) ? new Pair<Node, Node>(key, value) : null; | |
| 1675 }); | |
| 1676 | |
| 1677 // 193 | |
| 1678 Node ns_s_blockMapImplicitKey() => context('mapping key', () => or([ | |
| 1679 () => c_s_implicitJsonKey(BLOCK_KEY), | |
| 1680 () => ns_s_implicitYamlKey(BLOCK_KEY) | |
| 1681 ])); | |
| 1682 | |
| 1683 // 194 | |
| 1684 Node c_l_blockMapImplicitValue(int indent) => context('mapping value', () => | |
| 1685 transaction(() { | |
| 1686 if (!truth(c_indicator(C_MAPPING_VALUE))) return null; | |
| 1687 return or([ | |
| 1688 () => s_l_blockNode(indent, BLOCK_OUT), | |
| 1689 () => s_l_comments() ? e_node() : null | |
| 1690 ]); | |
| 1691 })); | |
| 1692 | |
| 1693 // 195 | |
| 1694 Node ns_l_compactMapping(int indent) => context('mapping', () { | |
| 1695 var start = _scanner.state; | |
| 1696 var first = ns_l_blockMapEntry(indent); | |
| 1697 if (!truth(first)) return null; | |
| 1698 | |
| 1699 var pairs = zeroOrMore(() => transaction(() { | |
| 1700 if (!truth(s_indent(indent))) return null; | |
| 1701 return ns_l_blockMapEntry(indent); | |
| 1702 })); | |
| 1703 pairs.insert(0, first); | |
| 1704 | |
| 1705 return map(pairs, _scanner.spanFrom(start)); | |
| 1706 }); | |
| 1707 | |
| 1708 // 196 | |
| 1709 Node s_l_blockNode(int indent, int ctx) => or([ | |
| 1710 () => s_l_blockInBlock(indent, ctx), | |
| 1711 () => s_l_flowInBlock(indent) | |
| 1712 ]); | |
| 1713 | |
| 1714 // 197 | |
| 1715 Node s_l_flowInBlock(int indent) => transaction(() { | |
| 1716 if (!truth(s_separate(indent + 1, FLOW_OUT))) return null; | |
| 1717 var node = ns_flowNode(indent + 1, FLOW_OUT); | |
| 1718 if (!truth(node)) return null; | |
| 1719 if (!truth(s_l_comments())) return null; | |
| 1720 return node; | |
| 1721 }); | |
| 1722 | |
| 1723 // 198 | |
| 1724 Node s_l_blockInBlock(int indent, int ctx) => or([ | |
| 1725 () => s_l_blockScalar(indent, ctx), | |
| 1726 () => s_l_blockCollection(indent, ctx) | |
| 1727 ]); | |
| 1728 | |
| 1729 // 199 | |
| 1730 Node s_l_blockScalar(int indent, int ctx) => transaction(() { | |
| 1731 if (!truth(s_separate(indent + 1, ctx))) return null; | |
| 1732 var props = transaction(() { | |
| 1733 var innerProps = c_ns_properties(indent + 1, ctx); | |
| 1734 if (!truth(innerProps)) return null; | |
| 1735 if (!truth(s_separate(indent + 1, ctx))) return null; | |
| 1736 return innerProps; | |
| 1737 }); | |
| 1738 | |
| 1739 var node = or([() => c_l_literal(indent), () => c_l_folded(indent)]); | |
| 1740 if (!truth(node)) return null; | |
| 1741 return addProps(node, props); | |
| 1742 }); | |
| 1743 | |
| 1744 // 200 | |
| 1745 Node s_l_blockCollection(int indent, int ctx) => transaction(() { | |
| 1746 var props = transaction(() { | |
| 1747 if (!truth(s_separate(indent + 1, ctx))) return null; | |
| 1748 return c_ns_properties(indent + 1, ctx); | |
| 1749 }); | |
| 1750 | |
| 1751 if (!truth(s_l_comments())) return null; | |
| 1752 return or([ | |
| 1753 () => l_blockSequence(seqSpaces(indent, ctx)), | |
| 1754 () => l_blockMapping(indent)]); | |
| 1755 }); | |
| 1756 | |
| 1757 // 201 | |
| 1758 int seqSpaces(int indent, int ctx) => ctx == BLOCK_OUT ? indent - 1 : indent; | |
| 1759 | |
| 1760 // 202 | |
| 1761 void l_documentPrefix() { | |
| 1762 zeroOrMore(l_comment); | |
| 1763 } | |
| 1764 | |
| 1765 // 203 | |
| 1766 bool c_directivesEnd() => rawString("---"); | |
| 1767 | |
| 1768 // 204 | |
| 1769 bool c_documentEnd() => rawString("..."); | |
| 1770 | |
| 1771 // 205 | |
| 1772 bool l_documentSuffix() => transaction(() { | |
| 1773 if (!truth(c_documentEnd())) return false; | |
| 1774 return s_l_comments(); | |
| 1775 }); | |
| 1776 | |
| 1777 // 206 | |
| 1778 bool c_forbidden() { | |
| 1779 if (!_inBareDocument || !atStartOfLine) return false; | |
| 1780 var forbidden = false; | |
| 1781 transaction(() { | |
| 1782 if (!truth(or([c_directivesEnd, c_documentEnd]))) return; | |
| 1783 var char = peek(); | |
| 1784 forbidden = isBreak(char) || isSpace(char) || _scanner.isDone; | |
| 1785 return; | |
| 1786 }); | |
| 1787 return forbidden; | |
| 1788 } | |
| 1789 | |
| 1790 // 207 | |
| 1791 Node l_bareDocument() { | |
| 1792 try { | |
| 1793 _inBareDocument = true; | |
| 1794 return s_l_blockNode(-1, BLOCK_IN); | |
| 1795 } finally { | |
| 1796 _inBareDocument = false; | |
| 1797 } | |
| 1798 } | |
| 1799 | |
| 1800 // 208 | |
| 1801 Node l_explicitDocument() { | |
| 1802 if (!truth(c_directivesEnd())) return null; | |
| 1803 var doc = l_bareDocument(); | |
| 1804 if (truth(doc)) return doc; | |
| 1805 | |
| 1806 doc = e_node(); | |
| 1807 s_l_comments(); | |
| 1808 return doc; | |
| 1809 } | |
| 1810 | |
| 1811 // 209 | |
| 1812 Node l_directiveDocument() { | |
| 1813 if (!truth(oneOrMore(l_directive))) return null; | |
| 1814 var doc = l_explicitDocument(); | |
| 1815 if (doc != null) return doc; | |
| 1816 parseFailed(); | |
| 1817 return null; // Unreachable. | |
| 1818 } | |
| 1819 | |
| 1820 // 210 | |
| 1821 Node l_anyDocument() => | |
| 1822 or([l_directiveDocument, l_explicitDocument, l_bareDocument]); | |
| 1823 | |
| 1824 // 211 | |
| 1825 Pair<List<Node>, SourceSpan> l_yamlStream() { | |
| 1826 var start = _scanner.state; | |
| 1827 var docs = []; | |
| 1828 zeroOrMore(l_documentPrefix); | |
| 1829 var first = zeroOrOne(l_anyDocument); | |
| 1830 if (!truth(first)) first = e_node(); | |
| 1831 docs.add(first); | |
| 1832 | |
| 1833 zeroOrMore(() { | |
| 1834 var doc; | |
| 1835 if (truth(oneOrMore(l_documentSuffix))) { | |
| 1836 zeroOrMore(l_documentPrefix); | |
| 1837 doc = zeroOrOne(l_anyDocument); | |
| 1838 } else { | 369 } else { |
| 1839 zeroOrMore(l_documentPrefix); | 370 _states.add(_State.BLOCK_SEQUENCE_ENTRY); |
| 1840 doc = zeroOrOne(l_explicitDocument); | 371 return _parseNode(block: true); |
| 1841 } | 372 } |
| 1842 if (truth(doc)) docs.add(doc); | 373 } |
| 1843 return doc; | 374 |
| 1844 }); | 375 if (token.type == TokenType.BLOCK_END) { |
| 1845 | 376 _scanner.scan(); |
| 1846 if (!_scanner.isDone) parseFailed(); | 377 _state = _states.removeLast(); |
| 1847 return new Pair(docs, _scanner.spanFrom(start)); | 378 return new Event(EventType.SEQUENCE_END, token.span); |
| 379 } |
| 380 |
| 381 throw new YamlException("While parsing a block collection, expected '-'.", |
| 382 token.span.start.pointSpan()); |
| 383 } |
| 384 |
| 385 /// Parses the productions: |
| 386 /// |
| 387 /// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ |
| 388 /// *********** * |
| 389 Event _parseIndentlessSequenceEntry() { |
| 390 var token = _scanner.peek(); |
| 391 |
| 392 if (token.type != TokenType.BLOCK_ENTRY) { |
| 393 _state = _states.removeLast(); |
| 394 return new Event(EventType.SEQUENCE_END, token.span.start.pointSpan()); |
| 395 } |
| 396 |
| 397 var start = token.span.start; |
| 398 token = _scanner.advance(); |
| 399 |
| 400 if (token.type == TokenType.BLOCK_ENTRY || |
| 401 token.type == TokenType.KEY || |
| 402 token.type == TokenType.VALUE || |
| 403 token.type == TokenType.BLOCK_END) { |
| 404 _state = _State.INDENTLESS_SEQUENCE_ENTRY; |
| 405 return _processEmptyScalar(start); |
| 406 } else { |
| 407 _states.add(_State.INDENTLESS_SEQUENCE_ENTRY); |
| 408 return _parseNode(block: true); |
| 409 } |
| 410 } |
| 411 |
| 412 /// Parses the productions: |
| 413 /// |
| 414 /// block_mapping ::= BLOCK-MAPPING_START |
| 415 /// ******************* |
| 416 /// ((KEY block_node_or_indentless_sequence?)? |
| 417 /// *** * |
| 418 /// (VALUE block_node_or_indentless_sequence?)?)* |
| 419 /// |
| 420 /// BLOCK-END |
| 421 /// ********* |
| 422 Event _parseBlockMappingKey() { |
| 423 var token = _scanner.peek(); |
| 424 if (token.type == TokenType.KEY) { |
| 425 var start = token.span.start; |
| 426 token = _scanner.advance(); |
| 427 |
| 428 if (token.type == TokenType.KEY || |
| 429 token.type == TokenType.VALUE || |
| 430 token.type == TokenType.BLOCK_END) { |
| 431 _state = _State.BLOCK_MAPPING_VALUE; |
| 432 return _processEmptyScalar(start); |
| 433 } else { |
| 434 _states.add(_State.BLOCK_MAPPING_VALUE); |
| 435 return _parseNode(block: true, indentlessSequence: true); |
| 436 } |
| 437 } |
| 438 |
| 439 // libyaml doesn't allow empty keys without an explicit key indicator, but |
| 440 // the spec does. See example 8.18: |
| 441 // http://yaml.org/spec/1.2/spec.html#id2798896. |
| 442 if (token.type == TokenType.VALUE) { |
| 443 _state = _State.BLOCK_MAPPING_VALUE; |
| 444 return _processEmptyScalar(token.span.start); |
| 445 } |
| 446 |
| 447 if (token.type == TokenType.BLOCK_END) { |
| 448 _scanner.scan(); |
| 449 _state = _states.removeLast(); |
| 450 return new Event(EventType.MAPPING_END, token.span); |
| 451 } |
| 452 |
| 453 throw new YamlException("Expected a key while parsing a block mapping.", |
| 454 token.span.start.pointSpan()); |
| 455 } |
| 456 |
| 457 /// Parses the productions: |
| 458 /// |
| 459 /// block_mapping ::= BLOCK-MAPPING_START |
| 460 /// |
| 461 /// ((KEY block_node_or_indentless_sequence?)? |
| 462 /// |
| 463 /// (VALUE block_node_or_indentless_sequence?)?)* |
| 464 /// ***** * |
| 465 /// BLOCK-END |
| 466 /// |
| 467 Event _parseBlockMappingValue() { |
| 468 var token = _scanner.peek(); |
| 469 |
| 470 if (token.type != TokenType.VALUE) { |
| 471 _state = _State.BLOCK_MAPPING_KEY; |
| 472 return _processEmptyScalar(token.span.start); |
| 473 } |
| 474 |
| 475 var start = token.span.start; |
| 476 token = _scanner.advance(); |
| 477 if (token.type == TokenType.KEY || |
| 478 token.type == TokenType.VALUE || |
| 479 token.type == TokenType.BLOCK_END) { |
| 480 _state = _State.BLOCK_MAPPING_KEY; |
| 481 return _processEmptyScalar(start); |
| 482 } else { |
| 483 _states.add(_State.BLOCK_MAPPING_KEY); |
| 484 return _parseNode(block: true, indentlessSequence: true); |
| 485 } |
| 486 } |
| 487 |
| 488 /// Parses the productions: |
| 489 /// |
| 490 /// flow_sequence ::= FLOW-SEQUENCE-START |
| 491 /// ******************* |
| 492 /// (flow_sequence_entry FLOW-ENTRY)* |
| 493 /// * ********** |
| 494 /// flow_sequence_entry? |
| 495 /// * |
| 496 /// FLOW-SEQUENCE-END |
| 497 /// ***************** |
| 498 /// flow_sequence_entry ::= |
| 499 /// flow_node | KEY flow_node? (VALUE flow_node?)? |
| 500 /// * |
| 501 Event _parseFlowSequenceEntry({bool first: false}) { |
| 502 if (first) _scanner.scan(); |
| 503 var token = _scanner.peek(); |
| 504 |
| 505 if (token.type != TokenType.FLOW_SEQUENCE_END) { |
| 506 if (!first) { |
| 507 if (token.type != TokenType.FLOW_ENTRY) { |
| 508 throw new YamlException( |
| 509 "While parsing a flow sequence, expected ',' or ']'.", |
| 510 token.span.start.pointSpan()); |
| 511 } |
| 512 |
| 513 token = _scanner.advance(); |
| 514 } |
| 515 |
| 516 if (token.type == TokenType.KEY) { |
| 517 _state = _State.FLOW_SEQUENCE_ENTRY_MAPPING_KEY; |
| 518 _scanner.scan(); |
| 519 return new MappingStartEvent( |
| 520 token.span, CollectionStyle.FLOW); |
| 521 } else if (token.type != TokenType.FLOW_SEQUENCE_END) { |
| 522 _states.add(_State.FLOW_SEQUENCE_ENTRY); |
| 523 return _parseNode(); |
| 524 } |
| 525 } |
| 526 |
| 527 _scanner.scan(); |
| 528 _state = _states.removeLast(); |
| 529 return new Event(EventType.SEQUENCE_END, token.span); |
| 530 } |
| 531 |
| 532 /// Parses the productions: |
| 533 /// |
| 534 /// flow_sequence_entry ::= |
| 535 /// flow_node | KEY flow_node? (VALUE flow_node?)? |
| 536 /// *** * |
| 537 Event _parseFlowSequenceEntryMappingKey() { |
| 538 var token = _scanner.peek(); |
| 539 |
| 540 if (token.type == TokenType.VALUE || |
| 541 token.type == TokenType.FLOW_ENTRY || |
| 542 token.type == TokenType.FLOW_SEQUENCE_END) { |
| 543 // libyaml consumes the token here, but that seems like a bug, since it |
| 544 // always causes [_parseFlowSequenceEntryMappingValue] to emit an empty |
| 545 // scalar. |
| 546 |
| 547 var start = token.span.start; |
| 548 _state = _State.FLOW_SEQUENCE_ENTRY_MAPPING_VALUE; |
| 549 return _processEmptyScalar(start); |
| 550 } else { |
| 551 _states.add(_State.FLOW_SEQUENCE_ENTRY_MAPPING_VALUE); |
| 552 return _parseNode(); |
| 553 } |
| 554 } |
| 555 |
| 556 /// Parses the productions: |
| 557 /// |
| 558 /// flow_sequence_entry ::= |
| 559 /// flow_node | KEY flow_node? (VALUE flow_node?)? |
| 560 /// ***** * |
| 561 Event _parseFlowSequenceEntryMappingValue() { |
| 562 var token = _scanner.peek(); |
| 563 |
| 564 if (token.type == TokenType.VALUE) { |
| 565 token = _scanner.advance(); |
| 566 if (token.type != TokenType.FLOW_ENTRY && |
| 567 token.type != TokenType.FLOW_SEQUENCE_END) { |
| 568 _states.add(_State.FLOW_SEQUENCE_ENTRY_MAPPING_END); |
| 569 return _parseNode(); |
| 570 } |
| 571 } |
| 572 |
| 573 _state = _State.FLOW_SEQUENCE_ENTRY_MAPPING_END; |
| 574 return _processEmptyScalar(token.span.start); |
| 575 } |
| 576 |
| 577 /// Parses the productions: |
| 578 /// |
| 579 /// flow_sequence_entry ::= |
| 580 /// flow_node | KEY flow_node? (VALUE flow_node?)? |
| 581 /// * |
| 582 Event _parseFlowSequenceEntryMappingEnd() { |
| 583 _state = _State.FLOW_SEQUENCE_ENTRY; |
| 584 return new Event(EventType.MAPPING_END, |
| 585 _scanner.peek().span.start.pointSpan()); |
| 586 } |
| 587 |
| 588 /// Parses the productions: |
| 589 /// |
| 590 /// flow_mapping ::= FLOW-MAPPING-START |
| 591 /// ****************** |
| 592 /// (flow_mapping_entry FLOW-ENTRY)* |
| 593 /// * ********** |
| 594 /// flow_mapping_entry? |
| 595 /// ****************** |
| 596 /// FLOW-MAPPING-END |
| 597 /// **************** |
| 598 /// flow_mapping_entry ::= |
| 599 /// flow_node | KEY flow_node? (VALUE flow_node?)? |
| 600 /// * *** * |
| 601 Event _parseFlowMappingKey({bool first: false}) { |
| 602 if (first) _scanner.scan(); |
| 603 var token = _scanner.peek(); |
| 604 |
| 605 if (token.type != TokenType.FLOW_MAPPING_END) { |
| 606 if (!first) { |
| 607 if (token.type != TokenType.FLOW_ENTRY) { |
| 608 throw new YamlException( |
| 609 "While parsing a flow mapping, expected ',' or '}'.", |
| 610 token.span.start.pointSpan()); |
| 611 } |
| 612 |
| 613 token = _scanner.advance(); |
| 614 } |
| 615 |
| 616 if (token.type == TokenType.KEY) { |
| 617 token = _scanner.advance(); |
| 618 if (token.type != TokenType.VALUE && |
| 619 token.type != TokenType.FLOW_ENTRY && |
| 620 token.type != TokenType.FLOW_MAPPING_END) { |
| 621 _states.add(_State.FLOW_MAPPING_VALUE); |
| 622 return _parseNode(); |
| 623 } else { |
| 624 _state = _State.FLOW_MAPPING_VALUE; |
| 625 return _processEmptyScalar(token.span.start); |
| 626 } |
| 627 } else if (token.type != TokenType.FLOW_MAPPING_END) { |
| 628 _states.add(_State.FLOW_MAPPING_EMPTY_VALUE); |
| 629 return _parseNode(); |
| 630 } |
| 631 } |
| 632 |
| 633 _scanner.scan(); |
| 634 _state = _states.removeLast(); |
| 635 return new Event(EventType.MAPPING_END, token.span); |
| 636 } |
| 637 |
| 638 /// Parses the productions: |
| 639 /// |
| 640 /// flow_mapping_entry ::= |
| 641 /// flow_node | KEY flow_node? (VALUE flow_node?)? |
| 642 /// * ***** * |
| 643 Event _parseFlowMappingValue({bool empty: false}) { |
| 644 var token = _scanner.peek(); |
| 645 |
| 646 if (empty) { |
| 647 _state = _State.FLOW_MAPPING_KEY; |
| 648 return _processEmptyScalar(token.span.start); |
| 649 } |
| 650 |
| 651 if (token.type == TokenType.VALUE) { |
| 652 token = _scanner.advance(); |
| 653 if (token.type != TokenType.FLOW_ENTRY && |
| 654 token.type != TokenType.FLOW_MAPPING_END) { |
| 655 _states.add(_State.FLOW_MAPPING_KEY); |
| 656 return _parseNode(); |
| 657 } |
| 658 } |
| 659 |
| 660 _state = _State.FLOW_MAPPING_KEY; |
| 661 return _processEmptyScalar(token.span.start); |
| 662 } |
| 663 |
| 664 /// Generate an empty scalar event. |
| 665 Event _processEmptyScalar(SourceLocation location) => |
| 666 new ScalarEvent(location.pointSpan(), '', ScalarStyle.PLAIN); |
| 667 |
| 668 /// Parses directives. |
| 669 Pair<VersionDirective, List<TagDirective>> _processDirectives() { |
| 670 var token = _scanner.peek(); |
| 671 |
| 672 var versionDirective; |
| 673 var tagDirectives = []; |
| 674 var reservedDirectives = []; |
| 675 while (token.type == TokenType.VERSION_DIRECTIVE || |
| 676 token.type == TokenType.TAG_DIRECTIVE) { |
| 677 if (token is VersionDirectiveToken) { |
| 678 if (versionDirective != null) { |
| 679 throw new YamlException("Duplicate %YAML directive.", token.span); |
| 680 } |
| 681 |
| 682 if (token.major != 1 || token.minor == 0) { |
| 683 throw new YamlException( |
| 684 "Incompatible YAML document. This parser only supports YAML 1.1 " |
| 685 "and 1.2.", |
| 686 token.span); |
| 687 } else if (token.minor > 2) { |
| 688 // TODO(nweiz): Print to stderr when issue 6943 is fixed and dart:io |
| 689 // is available. |
| 690 warn("Warning: this parser only supports YAML 1.1 and 1.2.", |
| 691 token.span); |
| 692 } |
| 693 |
| 694 versionDirective = new VersionDirective(token.major, token.minor); |
| 695 } else if (token is TagDirectiveToken) { |
| 696 var tagDirective = new TagDirective(token.handle, token.prefix); |
| 697 _appendTagDirective(tagDirective, token.span); |
| 698 tagDirectives.add(tagDirective); |
| 699 } |
| 700 |
| 701 token = _scanner.advance(); |
| 702 } |
| 703 |
| 704 _appendTagDirective( |
| 705 new TagDirective("!", "!"), |
| 706 token.span.start.pointSpan(), |
| 707 allowDuplicates: true); |
| 708 _appendTagDirective( |
| 709 new TagDirective("!!", "tag:yaml.org,2002:"), |
| 710 token.span.start.pointSpan(), |
| 711 allowDuplicates: true); |
| 712 |
| 713 return new Pair(versionDirective, tagDirectives); |
| 714 } |
| 715 |
| 716 /// Adds a tag directive to the directives stack. |
| 717 void _appendTagDirective(TagDirective newDirective, FileSpan span, |
| 718 {bool allowDuplicates: false}) { |
| 719 if (_tagDirectives.containsKey(newDirective.handle)) { |
| 720 if (allowDuplicates) return; |
| 721 throw new YamlException("Duplicate %TAG directive.", span); |
| 722 } |
| 723 |
| 724 _tagDirectives[newDirective.handle] = newDirective; |
| 1848 } | 725 } |
| 1849 } | 726 } |
| 1850 | 727 |
| 1851 /// The information in the header for a block scalar. | 728 /// The possible states for the parser. |
| 1852 class _BlockHeader { | 729 class _State { |
| 1853 final int additionalIndent; | 730 /// Expect [TokenType.STREAM_START]. |
| 1854 final int chomping; | 731 static const STREAM_START = const _State("STREAM_START"); |
| 1855 | 732 |
| 1856 _BlockHeader(this.additionalIndent, this.chomping); | 733 /// Expect the beginning of an implicit document. |
| 1857 | 734 static const IMPLICIT_DOCUMENT_START = |
| 1858 bool get autoDetectIndent => additionalIndent == null; | 735 const _State("IMPLICIT_DOCUMENT_START"); |
| 736 |
| 737 /// Expect [TokenType.DOCUMENT_START]. |
| 738 static const DOCUMENT_START = const _State("DOCUMENT_START"); |
| 739 |
| 740 /// Expect the content of a document. |
| 741 static const DOCUMENT_CONTENT = const _State("DOCUMENT_CONTENT"); |
| 742 |
| 743 /// Expect [TokenType.DOCUMENT_END]. |
| 744 static const DOCUMENT_END = const _State("DOCUMENT_END"); |
| 745 |
| 746 /// Expect a block node. |
| 747 static const BLOCK_NODE = const _State("BLOCK_NODE"); |
| 748 |
| 749 /// Expect a block node or indentless sequence. |
| 750 static const BLOCK_NODE_OR_INDENTLESS_SEQUENCE = |
| 751 const _State("BLOCK_NODE_OR_INDENTLESS_SEQUENCE"); |
| 752 |
| 753 /// Expect a flow node. |
| 754 static const FLOW_NODE = const _State("FLOW_NODE"); |
| 755 |
| 756 /// Expect the first entry of a block sequence. |
| 757 static const BLOCK_SEQUENCE_FIRST_ENTRY = |
| 758 const _State("BLOCK_SEQUENCE_FIRST_ENTRY"); |
| 759 |
| 760 /// Expect an entry of a block sequence. |
| 761 static const BLOCK_SEQUENCE_ENTRY = const _State("BLOCK_SEQUENCE_ENTRY"); |
| 762 |
| 763 /// Expect an entry of an indentless sequence. |
| 764 static const INDENTLESS_SEQUENCE_ENTRY = |
| 765 const _State("INDENTLESS_SEQUENCE_ENTRY"); |
| 766 |
| 767 /// Expect the first key of a block mapping. |
| 768 static const BLOCK_MAPPING_FIRST_KEY = |
| 769 const _State("BLOCK_MAPPING_FIRST_KEY"); |
| 770 |
| 771 /// Expect a block mapping key. |
| 772 static const BLOCK_MAPPING_KEY = const _State("BLOCK_MAPPING_KEY"); |
| 773 |
| 774 /// Expect a block mapping value. |
| 775 static const BLOCK_MAPPING_VALUE = const _State("BLOCK_MAPPING_VALUE"); |
| 776 |
| 777 /// Expect the first entry of a flow sequence. |
| 778 static const FLOW_SEQUENCE_FIRST_ENTRY = |
| 779 const _State("FLOW_SEQUENCE_FIRST_ENTRY"); |
| 780 |
| 781 /// Expect an entry of a flow sequence. |
| 782 static const FLOW_SEQUENCE_ENTRY = const _State("FLOW_SEQUENCE_ENTRY"); |
| 783 |
| 784 /// Expect a key of an ordered mapping. |
| 785 static const FLOW_SEQUENCE_ENTRY_MAPPING_KEY = |
| 786 const _State("FLOW_SEQUENCE_ENTRY_MAPPING_KEY"); |
| 787 |
| 788 /// Expect a value of an ordered mapping. |
| 789 static const FLOW_SEQUENCE_ENTRY_MAPPING_VALUE = |
| 790 const _State("FLOW_SEQUENCE_ENTRY_MAPPING_VALUE"); |
| 791 |
| 792 /// Expect the and of an ordered mapping entry. |
| 793 static const FLOW_SEQUENCE_ENTRY_MAPPING_END = |
| 794 const _State("FLOW_SEQUENCE_ENTRY_MAPPING_END"); |
| 795 |
| 796 /// Expect the first key of a flow mapping. |
| 797 static const FLOW_MAPPING_FIRST_KEY = const _State("FLOW_MAPPING_FIRST_KEY"); |
| 798 |
| 799 /// Expect a key of a flow mapping. |
| 800 static const FLOW_MAPPING_KEY = const _State("FLOW_MAPPING_KEY"); |
| 801 |
| 802 /// Expect a value of a flow mapping. |
| 803 static const FLOW_MAPPING_VALUE = const _State("FLOW_MAPPING_VALUE"); |
| 804 |
| 805 /// Expect an empty value of a flow mapping. |
| 806 static const FLOW_MAPPING_EMPTY_VALUE = |
| 807 const _State("FLOW_MAPPING_EMPTY_VALUE"); |
| 808 |
| 809 /// Expect nothing. |
| 810 static const END = const _State("END"); |
| 811 |
| 812 final String name; |
| 813 |
| 814 const _State(this.name); |
| 815 |
| 816 String toString() => name; |
| 1859 } | 817 } |
| 1860 | |
| 1861 /// A range of characters in the YAML document, from [start] to [end] | |
| 1862 /// (inclusive). | |
| 1863 class _Range { | |
| 1864 /// The first character in the range. | |
| 1865 final int start; | |
| 1866 | |
| 1867 /// The last character in the range. | |
| 1868 final int end; | |
| 1869 | |
| 1870 _Range(this.start, this.end); | |
| 1871 | |
| 1872 /// Returns whether or not [pos] lies within this range. | |
| 1873 bool contains(int pos) => pos >= start && pos <= end; | |
| 1874 } | |
| 1875 | |
| 1876 /// A map that associates [E] values with [_Range]s. It's efficient to create | |
| 1877 /// new associations, but finding the value associated with a position is more | |
| 1878 /// expensive. | |
| 1879 class _RangeMap<E> { | |
| 1880 /// The ranges and their associated elements. | |
| 1881 final List<Pair<_Range, E>> _contents = <Pair<_Range, E>>[]; | |
| 1882 | |
| 1883 _RangeMap(); | |
| 1884 | |
| 1885 /// Returns the value associated with the range in which [pos] lies, or null | |
| 1886 /// if there is no such range. If there's more than one such range, the most | |
| 1887 /// recently set one is used. | |
| 1888 E operator[](int pos) { | |
| 1889 // Iterate backwards through contents so the more recent range takes | |
| 1890 // precedence. | |
| 1891 for (var pair in _contents.reversed) { | |
| 1892 if (pair.first.contains(pos)) return pair.last; | |
| 1893 } | |
| 1894 return null; | |
| 1895 } | |
| 1896 | |
| 1897 /// Associates [value] with [range]. | |
| 1898 operator[]=(_Range range, E value) => | |
| 1899 _contents.add(new Pair<_Range, E>(range, value)); | |
| 1900 } | |
| OLD | NEW |