OLD | NEW |
(Empty) | |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. |
| 4 |
| 5 /// Contains the top-level function to parse source maps version 3. |
| 6 library source_maps.parser; |
| 7 |
| 8 import 'dart:collection'; |
| 9 import 'dart:convert'; |
| 10 |
| 11 import 'package:source_span/source_span.dart'; |
| 12 |
| 13 import 'builder.dart' as builder; |
| 14 import 'src/source_map_span.dart'; |
| 15 import 'src/utils.dart'; |
| 16 import 'src/vlq.dart'; |
| 17 |
| 18 /// Parses a source map directly from a json string. |
| 19 /// |
| 20 /// [mapUrl], which may be either a [String] or a [Uri], indicates the URL of |
| 21 /// the source map file itself. If it's passed, any URLs in the source |
| 22 /// map will be interpreted as relative to this URL when generating spans. |
| 23 // TODO(sigmund): evaluate whether other maps should have the json parsed, or |
| 24 // the string represenation. |
| 25 // TODO(tjblasi): Ignore the first line of [jsonMap] if the JSON safety string |
| 26 // `)]}'` begins the string representation of the map. |
| 27 Mapping parse(String jsonMap, {Map<String, Map> otherMaps, mapUrl}) => |
| 28 parseJson(JSON.decode(jsonMap), otherMaps: otherMaps, mapUrl: mapUrl); |
| 29 |
| 30 /// Parses a source map directly from a json map object. |
| 31 /// |
| 32 /// [mapUrl], which may be either a [String] or a [Uri], indicates the URL of |
| 33 /// the source map file itself. If it's passed, any URLs in the source |
| 34 /// map will be interpreted as relative to this URL when generating spans. |
| 35 Mapping parseJson(Map map, {Map<String, Map> otherMaps, mapUrl}) { |
| 36 if (map['version'] != 3) { |
| 37 throw new ArgumentError( |
| 38 'unexpected source map version: ${map["version"]}. ' |
| 39 'Only version 3 is supported.'); |
| 40 } |
| 41 |
| 42 if (map.containsKey('sections')) { |
| 43 if (map.containsKey('mappings') || map.containsKey('sources') || |
| 44 map.containsKey('names')) { |
| 45 throw new FormatException('map containing "sections" ' |
| 46 'cannot contain "mappings", "sources", or "names".'); |
| 47 } |
| 48 return new MultiSectionMapping.fromJson(map['sections'], otherMaps, |
| 49 mapUrl: mapUrl); |
| 50 } |
| 51 return new SingleMapping.fromJson(map, mapUrl: mapUrl); |
| 52 } |
| 53 |
| 54 |
| 55 /// A mapping parsed out of a source map. |
| 56 abstract class Mapping { |
| 57 /// Returns the span associated with [line] and [column]. |
| 58 SourceMapSpan spanFor(int line, int column, {Map<String, SourceFile> files}); |
| 59 |
| 60 /// Returns the span associated with [location]. |
| 61 SourceMapSpan spanForLocation(SourceLocation location, |
| 62 {Map<String, SourceFile> files}) { |
| 63 return spanFor(location.line, location.column, files: files); |
| 64 } |
| 65 } |
| 66 |
| 67 /// A meta-level map containing sections. |
| 68 class MultiSectionMapping extends Mapping { |
| 69 /// For each section, the start line offset. |
| 70 final List<int> _lineStart = <int>[]; |
| 71 |
| 72 /// For each section, the start column offset. |
| 73 final List<int> _columnStart = <int>[]; |
| 74 |
| 75 /// For each section, the actual source map information, which is not adjusted |
| 76 /// for offsets. |
| 77 final List<Mapping> _maps = <Mapping>[]; |
| 78 |
| 79 /// Creates a section mapping from json. |
| 80 MultiSectionMapping.fromJson(List sections, Map<String, Map> otherMaps, |
| 81 {mapUrl}) { |
| 82 for (var section in sections) { |
| 83 var offset = section['offset']; |
| 84 if (offset == null) throw new FormatException('section missing offset'); |
| 85 |
| 86 var line = section['offset']['line']; |
| 87 if (line == null) throw new FormatException('offset missing line'); |
| 88 |
| 89 var column = section['offset']['column']; |
| 90 if (column == null) throw new FormatException('offset missing column'); |
| 91 |
| 92 _lineStart.add(line); |
| 93 _columnStart.add(column); |
| 94 |
| 95 var url = section['url']; |
| 96 var map = section['map']; |
| 97 |
| 98 if (url != null && map != null) { |
| 99 throw new FormatException("section can't use both url and map entries"); |
| 100 } else if (url != null) { |
| 101 if (otherMaps == null || otherMaps[url] == null) { |
| 102 throw new FormatException( |
| 103 'section contains refers to $url, but no map was ' |
| 104 'given for it. Make sure a map is passed in "otherMaps"'); |
| 105 } |
| 106 _maps.add(parseJson(otherMaps[url], otherMaps: otherMaps, mapUrl: url)); |
| 107 } else if (map != null) { |
| 108 _maps.add(parseJson(map, otherMaps: otherMaps, mapUrl: mapUrl)); |
| 109 } else { |
| 110 throw new FormatException('section missing url or map'); |
| 111 } |
| 112 } |
| 113 if (_lineStart.length == 0) { |
| 114 throw new FormatException('expected at least one section'); |
| 115 } |
| 116 } |
| 117 |
| 118 int _indexFor(line, column) { |
| 119 for(int i = 0; i < _lineStart.length; i++) { |
| 120 if (line < _lineStart[i]) return i - 1; |
| 121 if (line == _lineStart[i] && column < _columnStart[i]) return i - 1; |
| 122 } |
| 123 return _lineStart.length - 1; |
| 124 } |
| 125 |
| 126 SourceMapSpan spanFor(int line, int column, {Map<String, SourceFile> files}) { |
| 127 int index = _indexFor(line, column); |
| 128 return _maps[index].spanFor( |
| 129 line - _lineStart[index], column - _columnStart[index], files: files); |
| 130 } |
| 131 |
| 132 String toString() { |
| 133 var buff = new StringBuffer("$runtimeType : ["); |
| 134 for (int i = 0; i < _lineStart.length; i++) { |
| 135 buff..write('(') |
| 136 ..write(_lineStart[i]) |
| 137 ..write(',') |
| 138 ..write(_columnStart[i]) |
| 139 ..write(':') |
| 140 ..write(_maps[i]) |
| 141 ..write(')'); |
| 142 } |
| 143 buff.write(']'); |
| 144 return buff.toString(); |
| 145 } |
| 146 } |
| 147 |
| 148 /// A map containing direct source mappings. |
| 149 class SingleMapping extends Mapping { |
| 150 /// Source urls used in the mapping, indexed by id. |
| 151 final List<String> urls; |
| 152 |
| 153 /// Source names used in the mapping, indexed by id. |
| 154 final List<String> names; |
| 155 |
| 156 /// Entries indicating the beginning of each span. |
| 157 final List<TargetLineEntry> lines; |
| 158 |
| 159 /// Url of the target file. |
| 160 String targetUrl; |
| 161 |
| 162 /// Source root prepended to all entries in [urls]. |
| 163 String sourceRoot; |
| 164 |
| 165 final Uri _mapUrl; |
| 166 |
| 167 SingleMapping._(this.targetUrl, this.urls, this.names, this.lines) |
| 168 : _mapUrl = null; |
| 169 |
| 170 factory SingleMapping.fromEntries( |
| 171 Iterable<builder.Entry> entries, [String fileUrl]) { |
| 172 // The entries needs to be sorted by the target offsets. |
| 173 var sourceEntries = new List.from(entries)..sort(); |
| 174 var lines = <TargetLineEntry>[]; |
| 175 |
| 176 // Indices associated with file urls that will be part of the source map. We |
| 177 // use a linked hash-map so that `_urls.keys[_urls[u]] == u` |
| 178 var urls = new LinkedHashMap<String, int>(); |
| 179 |
| 180 // Indices associated with identifiers that will be part of the source map. |
| 181 // We use a linked hash-map so that `_names.keys[_names[n]] == n` |
| 182 var names = new LinkedHashMap<String, int>(); |
| 183 |
| 184 var lineNum; |
| 185 var targetEntries; |
| 186 for (var sourceEntry in sourceEntries) { |
| 187 if (lineNum == null || sourceEntry.target.line > lineNum) { |
| 188 lineNum = sourceEntry.target.line; |
| 189 targetEntries = <TargetEntry>[]; |
| 190 lines.add(new TargetLineEntry(lineNum, targetEntries)); |
| 191 } |
| 192 |
| 193 if (sourceEntry.source == null) { |
| 194 targetEntries.add(new TargetEntry(sourceEntry.target.column)); |
| 195 } else { |
| 196 var sourceUrl = sourceEntry.source.sourceUrl; |
| 197 var urlId = urls.putIfAbsent( |
| 198 sourceUrl == null ? '' : sourceUrl.toString(), () => urls.length); |
| 199 var srcNameId = sourceEntry.identifierName == null ? null : |
| 200 names.putIfAbsent(sourceEntry.identifierName, () => names.length); |
| 201 targetEntries.add(new TargetEntry( |
| 202 sourceEntry.target.column, |
| 203 urlId, |
| 204 sourceEntry.source.line, |
| 205 sourceEntry.source.column, |
| 206 srcNameId)); |
| 207 } |
| 208 } |
| 209 return new SingleMapping._( |
| 210 fileUrl, urls.keys.toList(), names.keys.toList(), lines); |
| 211 } |
| 212 |
| 213 SingleMapping.fromJson(Map map, {mapUrl}) |
| 214 : targetUrl = map['file'], |
| 215 urls = map['sources'], |
| 216 names = map['names'], |
| 217 sourceRoot = map['sourceRoot'], |
| 218 lines = <TargetLineEntry>[], |
| 219 _mapUrl = mapUrl is String ? Uri.parse(mapUrl) : mapUrl { |
| 220 int line = 0; |
| 221 int column = 0; |
| 222 int srcUrlId = 0; |
| 223 int srcLine = 0; |
| 224 int srcColumn = 0; |
| 225 int srcNameId = 0; |
| 226 var tokenizer = new _MappingTokenizer(map['mappings']); |
| 227 var entries = <TargetEntry>[]; |
| 228 |
| 229 while (tokenizer.hasTokens) { |
| 230 if (tokenizer.nextKind.isNewLine) { |
| 231 if (!entries.isEmpty) { |
| 232 lines.add(new TargetLineEntry(line, entries)); |
| 233 entries = <TargetEntry>[]; |
| 234 } |
| 235 line++; |
| 236 column = 0; |
| 237 tokenizer._consumeNewLine(); |
| 238 continue; |
| 239 } |
| 240 |
| 241 // Decode the next entry, using the previous encountered values to |
| 242 // decode the relative values. |
| 243 // |
| 244 // We expect 1, 4, or 5 values. If present, values are expected in the |
| 245 // following order: |
| 246 // 0: the starting column in the current line of the generated file |
| 247 // 1: the id of the original source file |
| 248 // 2: the starting line in the original source |
| 249 // 3: the starting column in the original source |
| 250 // 4: the id of the original symbol name |
| 251 // The values are relative to the previous encountered values. |
| 252 if (tokenizer.nextKind.isNewSegment) throw _segmentError(0, line); |
| 253 column += tokenizer._consumeValue(); |
| 254 if (!tokenizer.nextKind.isValue) { |
| 255 entries.add(new TargetEntry(column)); |
| 256 } else { |
| 257 srcUrlId += tokenizer._consumeValue(); |
| 258 if (srcUrlId >= urls.length) { |
| 259 throw new StateError( |
| 260 'Invalid source url id. $targetUrl, $line, $srcUrlId'); |
| 261 } |
| 262 if (!tokenizer.nextKind.isValue) throw _segmentError(2, line); |
| 263 srcLine += tokenizer._consumeValue(); |
| 264 if (!tokenizer.nextKind.isValue) throw _segmentError(3, line); |
| 265 srcColumn += tokenizer._consumeValue(); |
| 266 if (!tokenizer.nextKind.isValue) { |
| 267 entries.add(new TargetEntry(column, srcUrlId, srcLine, srcColumn)); |
| 268 } else { |
| 269 srcNameId += tokenizer._consumeValue(); |
| 270 if (srcNameId >= names.length) { |
| 271 throw new StateError( |
| 272 'Invalid name id: $targetUrl, $line, $srcNameId'); |
| 273 } |
| 274 entries.add(new TargetEntry(column, srcUrlId, srcLine, srcColumn, |
| 275 srcNameId)); |
| 276 } |
| 277 } |
| 278 if (tokenizer.nextKind.isNewSegment) tokenizer._consumeNewSegment(); |
| 279 } |
| 280 if (!entries.isEmpty) { |
| 281 lines.add(new TargetLineEntry(line, entries)); |
| 282 } |
| 283 } |
| 284 |
| 285 /// Encodes the Mapping mappings as a json map. |
| 286 Map toJson() { |
| 287 var buff = new StringBuffer(); |
| 288 var line = 0; |
| 289 var column = 0; |
| 290 var srcLine = 0; |
| 291 var srcColumn = 0; |
| 292 var srcUrlId = 0; |
| 293 var srcNameId = 0; |
| 294 var first = true; |
| 295 |
| 296 for (var entry in lines) { |
| 297 int nextLine = entry.line; |
| 298 if (nextLine > line) { |
| 299 for (int i = line; i < nextLine; ++i) { |
| 300 buff.write(';'); |
| 301 } |
| 302 line = nextLine; |
| 303 column = 0; |
| 304 first = true; |
| 305 } |
| 306 |
| 307 for (var segment in entry.entries) { |
| 308 if (!first) buff.write(','); |
| 309 first = false; |
| 310 column = _append(buff, column, segment.column); |
| 311 |
| 312 // Encoding can be just the column offset if there is no source |
| 313 // information. |
| 314 var newUrlId = segment.sourceUrlId; |
| 315 if (newUrlId == null) continue; |
| 316 srcUrlId = _append(buff, srcUrlId, newUrlId); |
| 317 srcLine = _append(buff, srcLine, segment.sourceLine); |
| 318 srcColumn = _append(buff, srcColumn, segment.sourceColumn); |
| 319 |
| 320 if (segment.sourceNameId == null) continue; |
| 321 srcNameId = _append(buff, srcNameId, segment.sourceNameId); |
| 322 } |
| 323 } |
| 324 |
| 325 var result = { |
| 326 'version': 3, |
| 327 'sourceRoot': sourceRoot == null ? '' : sourceRoot, |
| 328 'sources': urls, |
| 329 'names' : names, |
| 330 'mappings' : buff.toString() |
| 331 }; |
| 332 if (targetUrl != null) { |
| 333 result['file'] = targetUrl; |
| 334 } |
| 335 return result; |
| 336 } |
| 337 |
| 338 /// Appends to [buff] a VLQ encoding of [newValue] using the difference |
| 339 /// between [oldValue] and [newValue] |
| 340 static int _append(StringBuffer buff, int oldValue, int newValue) { |
| 341 buff.writeAll(encodeVlq(newValue - oldValue)); |
| 342 return newValue; |
| 343 } |
| 344 |
| 345 _segmentError(int seen, int line) => new StateError( |
| 346 'Invalid entry in sourcemap, expected 1, 4, or 5' |
| 347 ' values, but got $seen.\ntargeturl: $targetUrl, line: $line'); |
| 348 |
| 349 /// Returns [TargetLineEntry] which includes the location in the target [line] |
| 350 /// number. In particular, the resulting entry is the last entry whose line |
| 351 /// number is lower or equal to [line]. |
| 352 TargetLineEntry _findLine(int line) { |
| 353 int index = binarySearch(lines, (e) => e.line > line); |
| 354 return (index <= 0) ? null : lines[index - 1]; |
| 355 } |
| 356 |
| 357 /// Returns [TargetEntry] which includes the location denoted by |
| 358 /// [line], [column]. If [lineEntry] corresponds to [line], then this will be |
| 359 /// the last entry whose column is lower or equal than [column]. If |
| 360 /// [lineEntry] corresponds to a line prior to [line], then the result will be |
| 361 /// the very last entry on that line. |
| 362 TargetEntry _findColumn(int line, int column, TargetLineEntry lineEntry) { |
| 363 if (lineEntry == null || lineEntry.entries.length == 0) return null; |
| 364 if (lineEntry.line != line) return lineEntry.entries.last; |
| 365 var entries = lineEntry.entries; |
| 366 int index = binarySearch(entries, (e) => e.column > column); |
| 367 return (index <= 0) ? null : entries[index - 1]; |
| 368 } |
| 369 |
| 370 SourceMapSpan spanFor(int line, int column, {Map<String, SourceFile> files}) { |
| 371 var entry = _findColumn(line, column, _findLine(line)); |
| 372 if (entry == null || entry.sourceUrlId == null) return null; |
| 373 var url = urls[entry.sourceUrlId]; |
| 374 if (sourceRoot != null) { |
| 375 url = '${sourceRoot}${url}'; |
| 376 } |
| 377 if (files != null && files[url] != null) { |
| 378 var file = files[url]; |
| 379 var start = file.getOffset(entry.sourceLine, entry.sourceColumn); |
| 380 if (entry.sourceNameId != null) { |
| 381 var text = names[entry.sourceNameId]; |
| 382 return new SourceMapFileSpan( |
| 383 files[url].span(start, start + text.length), |
| 384 isIdentifier: true); |
| 385 } else { |
| 386 return new SourceMapFileSpan(files[url].location(start).pointSpan()); |
| 387 } |
| 388 } else { |
| 389 var start = new SourceLocation(0, |
| 390 sourceUrl: _mapUrl == null ? url : _mapUrl.resolve(url), |
| 391 line: entry.sourceLine, |
| 392 column: entry.sourceColumn); |
| 393 |
| 394 // Offset and other context is not available. |
| 395 if (entry.sourceNameId != null) { |
| 396 return new SourceMapSpan.identifier(start, names[entry.sourceNameId]); |
| 397 } else { |
| 398 return new SourceMapSpan(start, start, ''); |
| 399 } |
| 400 } |
| 401 } |
| 402 |
| 403 String toString() { |
| 404 return (new StringBuffer("$runtimeType : [") |
| 405 ..write('targetUrl: ') |
| 406 ..write(targetUrl) |
| 407 ..write(', sourceRoot: ') |
| 408 ..write(sourceRoot) |
| 409 ..write(', urls: ') |
| 410 ..write(urls) |
| 411 ..write(', names: ') |
| 412 ..write(names) |
| 413 ..write(', lines: ') |
| 414 ..write(lines) |
| 415 ..write(']')).toString(); |
| 416 } |
| 417 |
| 418 String get debugString { |
| 419 var buff = new StringBuffer(); |
| 420 for (var lineEntry in lines) { |
| 421 var line = lineEntry.line; |
| 422 for (var entry in lineEntry.entries) { |
| 423 buff..write(targetUrl) |
| 424 ..write(': ') |
| 425 ..write(line) |
| 426 ..write(':') |
| 427 ..write(entry.column); |
| 428 if (entry.sourceUrlId != null) { |
| 429 buff..write(' --> ') |
| 430 ..write(sourceRoot) |
| 431 ..write(urls[entry.sourceUrlId]) |
| 432 ..write(': ') |
| 433 ..write(entry.sourceLine) |
| 434 ..write(':') |
| 435 ..write(entry.sourceColumn); |
| 436 } |
| 437 if (entry.sourceNameId != null) { |
| 438 buff..write(' (') |
| 439 ..write(names[entry.sourceNameId]) |
| 440 ..write(')'); |
| 441 } |
| 442 buff.write('\n'); |
| 443 } |
| 444 } |
| 445 return buff.toString(); |
| 446 } |
| 447 } |
| 448 |
| 449 /// A line entry read from a source map. |
| 450 class TargetLineEntry { |
| 451 final int line; |
| 452 List<TargetEntry> entries; |
| 453 TargetLineEntry(this.line, this.entries); |
| 454 |
| 455 String toString() => '$runtimeType: $line $entries'; |
| 456 } |
| 457 |
| 458 /// A target segment entry read from a source map |
| 459 class TargetEntry { |
| 460 final int column; |
| 461 final int sourceUrlId; |
| 462 final int sourceLine; |
| 463 final int sourceColumn; |
| 464 final int sourceNameId; |
| 465 |
| 466 TargetEntry(this.column, [this.sourceUrlId, this.sourceLine, |
| 467 this.sourceColumn, this.sourceNameId]); |
| 468 |
| 469 String toString() => '$runtimeType: ' |
| 470 '($column, $sourceUrlId, $sourceLine, $sourceColumn, $sourceNameId)'; |
| 471 } |
| 472 |
| 473 /** A character iterator over a string that can peek one character ahead. */ |
| 474 class _MappingTokenizer implements Iterator<String> { |
| 475 final String _internal; |
| 476 final int _length; |
| 477 int index = -1; |
| 478 _MappingTokenizer(String internal) |
| 479 : _internal = internal, |
| 480 _length = internal.length; |
| 481 |
| 482 // Iterator API is used by decodeVlq to consume VLQ entries. |
| 483 bool moveNext() => ++index < _length; |
| 484 String get current => |
| 485 (index >= 0 && index < _length) ? _internal[index] : null; |
| 486 |
| 487 bool get hasTokens => index < _length - 1 && _length > 0; |
| 488 |
| 489 _TokenKind get nextKind { |
| 490 if (!hasTokens) return _TokenKind.EOF; |
| 491 var next = _internal[index + 1]; |
| 492 if (next == ';') return _TokenKind.LINE; |
| 493 if (next == ',') return _TokenKind.SEGMENT; |
| 494 return _TokenKind.VALUE; |
| 495 } |
| 496 |
| 497 int _consumeValue() => decodeVlq(this); |
| 498 void _consumeNewLine() { ++index; } |
| 499 void _consumeNewSegment() { ++index; } |
| 500 |
| 501 // Print the state of the iterator, with colors indicating the current |
| 502 // position. |
| 503 String toString() { |
| 504 var buff = new StringBuffer(); |
| 505 for (int i = 0; i < index; i++) { |
| 506 buff.write(_internal[i]); |
| 507 } |
| 508 buff.write('[31m'); |
| 509 buff.write(current == null ? '' : current); |
| 510 buff.write('[0m'); |
| 511 for (int i = index + 1; i < _internal.length; i++) { |
| 512 buff.write(_internal[i]); |
| 513 } |
| 514 buff.write(' ($index)'); |
| 515 return buff.toString(); |
| 516 } |
| 517 } |
| 518 |
| 519 class _TokenKind { |
| 520 static const _TokenKind LINE = const _TokenKind(isNewLine: true); |
| 521 static const _TokenKind SEGMENT = const _TokenKind(isNewSegment: true); |
| 522 static const _TokenKind EOF = const _TokenKind(isEof: true); |
| 523 static const _TokenKind VALUE = const _TokenKind(); |
| 524 final bool isNewLine; |
| 525 final bool isNewSegment; |
| 526 final bool isEof; |
| 527 bool get isValue => !isNewLine && !isNewSegment && !isEof; |
| 528 |
| 529 const _TokenKind( |
| 530 {this.isNewLine: false, this.isNewSegment: false, this.isEof: false}); |
| 531 } |
OLD | NEW |