| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 /// Contains the top-level function to parse source maps version 3. | 5 /// Contains the top-level function to parse source maps version 3. |
| 6 library source_maps.parser; | 6 library source_maps.parser; |
| 7 | 7 |
| 8 import 'dart:collection'; | 8 import 'dart:collection'; |
| 9 import 'dart:convert'; | 9 import 'dart:convert'; |
| 10 | 10 |
| 11 import 'builder.dart' as builder; | 11 import 'builder.dart' as builder; |
| 12 import 'span.dart'; | 12 import 'span.dart'; |
| 13 import 'src/utils.dart'; | 13 import 'src/utils.dart'; |
| 14 import 'src/vlq.dart'; | 14 import 'src/vlq.dart'; |
| 15 | 15 |
| 16 /// Parses a source map directly from a json string. | 16 /// Parses a source map directly from a json string. |
| 17 // TODO(sigmund): evaluate whether other maps should have the json parsed, or | 17 // TODO(sigmund): evaluate whether other maps should have the json parsed, or |
| 18 // the string represenation. | 18 // the string represenation. |
| 19 // TODO(tjblasi): Ignore the first line of [jsonMap] if the JSON safety string |
| 20 // `)]}'` begins the string representation of the map. |
| 19 Mapping parse(String jsonMap, {Map<String, Map> otherMaps}) => | 21 Mapping parse(String jsonMap, {Map<String, Map> otherMaps}) => |
| 20 parseJson(JSON.decode(jsonMap), otherMaps: otherMaps); | 22 parseJson(JSON.decode(jsonMap), otherMaps: otherMaps); |
| 21 | 23 |
| 22 /// Parses a source map directly from a json map object. | 24 /// Parses a source map directly from a json map object. |
| 23 Mapping parseJson(Map map, {Map<String, Map> otherMaps}) { | 25 Mapping parseJson(Map map, {Map<String, Map> otherMaps}) { |
| 24 if (map['version'] != 3) { | 26 if (map['version'] != 3) { |
| 25 throw new ArgumentError( | 27 throw new ArgumentError( |
| 26 'unexpected source map version: ${map["version"]}. ' | 28 'unexpected source map version: ${map["version"]}. ' |
| 27 'Only version 3 is supported.'); | 29 'Only version 3 is supported.'); |
| 28 } | 30 } |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 139 | 141 |
| 140 /// Source urls used in the mapping, indexed by id. | 142 /// Source urls used in the mapping, indexed by id. |
| 141 final List<String> urls; | 143 final List<String> urls; |
| 142 | 144 |
| 143 /// Source names used in the mapping, indexed by id. | 145 /// Source names used in the mapping, indexed by id. |
| 144 final List<String> names; | 146 final List<String> names; |
| 145 | 147 |
| 146 /// Entries indicating the beginning of each span. | 148 /// Entries indicating the beginning of each span. |
| 147 final List<TargetLineEntry> lines; | 149 final List<TargetLineEntry> lines; |
| 148 | 150 |
| 151 /// Source root appended to the start of all entries in [urls]. |
| 152 String sourceRoot = null; |
| 153 |
| 149 SingleMapping._internal(this.targetUrl, this.urls, this.names, this.lines); | 154 SingleMapping._internal(this.targetUrl, this.urls, this.names, this.lines); |
| 150 | 155 |
| 151 factory SingleMapping.fromEntries( | 156 factory SingleMapping.fromEntries( |
| 152 Iterable<builder.Entry> entries, [String fileUrl]) { | 157 Iterable<builder.Entry> entries, [String fileUrl]) { |
| 153 // The entries needs to be sorted by the target offsets. | 158 // The entries needs to be sorted by the target offsets. |
| 154 var sourceEntries = new List.from(entries)..sort(); | 159 var sourceEntries = new List.from(entries)..sort(); |
| 155 var lines = <TargetLineEntry>[]; | 160 var lines = <TargetLineEntry>[]; |
| 156 | 161 |
| 157 // Indices associated with file urls that will be part of the source map. We | 162 // Indices associated with file urls that will be part of the source map. We |
| 158 // use a linked hash-map so that `_urls.keys[_urls[u]] == u` | 163 // use a linked hash-map so that `_urls.keys[_urls[u]] == u` |
| (...skipping 26 matching lines...) Expand all Loading... |
| 185 sourceEntry.source.column, | 190 sourceEntry.source.column, |
| 186 srcNameId)); | 191 srcNameId)); |
| 187 } | 192 } |
| 188 } | 193 } |
| 189 return new SingleMapping._internal( | 194 return new SingleMapping._internal( |
| 190 fileUrl, urls.keys.toList(), names.keys.toList(), lines); | 195 fileUrl, urls.keys.toList(), names.keys.toList(), lines); |
| 191 } | 196 } |
| 192 | 197 |
| 193 SingleMapping.fromJson(Map map) | 198 SingleMapping.fromJson(Map map) |
| 194 : targetUrl = map['file'], | 199 : targetUrl = map['file'], |
| 195 // TODO(sigmund): add support for 'sourceRoot' | |
| 196 urls = map['sources'], | 200 urls = map['sources'], |
| 197 names = map['names'], | 201 names = map['names'], |
| 202 sourceRoot = map['sourceRoot'], |
| 198 lines = <TargetLineEntry>[] { | 203 lines = <TargetLineEntry>[] { |
| 199 int line = 0; | 204 int line = 0; |
| 200 int column = 0; | 205 int column = 0; |
| 201 int srcUrlId = 0; | 206 int srcUrlId = 0; |
| 202 int srcLine = 0; | 207 int srcLine = 0; |
| 203 int srcColumn = 0; | 208 int srcColumn = 0; |
| 204 int srcNameId = 0; | 209 int srcNameId = 0; |
| 205 var tokenizer = new _MappingTokenizer(map['mappings']); | 210 var tokenizer = new _MappingTokenizer(map['mappings']); |
| 206 var entries = <TargetEntry>[]; | 211 var entries = <TargetEntry>[]; |
| 207 | 212 |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 296 srcLine = _append(buff, srcLine, segment.sourceLine); | 301 srcLine = _append(buff, srcLine, segment.sourceLine); |
| 297 srcColumn = _append(buff, srcColumn, segment.sourceColumn); | 302 srcColumn = _append(buff, srcColumn, segment.sourceColumn); |
| 298 | 303 |
| 299 if (segment.sourceNameId == null) continue; | 304 if (segment.sourceNameId == null) continue; |
| 300 srcNameId = _append(buff, srcNameId, segment.sourceNameId); | 305 srcNameId = _append(buff, srcNameId, segment.sourceNameId); |
| 301 } | 306 } |
| 302 } | 307 } |
| 303 | 308 |
| 304 var result = { | 309 var result = { |
| 305 'version': 3, | 310 'version': 3, |
| 306 'sourceRoot': '', | 311 'sourceRoot': sourceRoot == null ? '' : sourceRoot, |
| 307 'sources': urls, | 312 'sources': urls, |
| 308 'names' : names, | 313 'names' : names, |
| 309 'mappings' : buff.toString() | 314 'mappings' : buff.toString() |
| 310 }; | 315 }; |
| 311 if (targetUrl != null) { | 316 if (targetUrl != null) { |
| 312 result['file'] = targetUrl; | 317 result['file'] = targetUrl; |
| 313 } | 318 } |
| 314 return result; | 319 return result; |
| 315 } | 320 } |
| 316 | 321 |
| (...skipping 26 matching lines...) Expand all Loading... |
| 343 if (lineEntry.line != line) return lineEntry.entries.last; | 348 if (lineEntry.line != line) return lineEntry.entries.last; |
| 344 var entries = lineEntry.entries; | 349 var entries = lineEntry.entries; |
| 345 int index = binarySearch(entries, (e) => e.column > column); | 350 int index = binarySearch(entries, (e) => e.column > column); |
| 346 return (index <= 0) ? null : entries[index - 1]; | 351 return (index <= 0) ? null : entries[index - 1]; |
| 347 } | 352 } |
| 348 | 353 |
| 349 Span spanFor(int line, int column, {Map<String, SourceFile> files}) { | 354 Span spanFor(int line, int column, {Map<String, SourceFile> files}) { |
| 350 var entry = _findColumn(line, column, _findLine(line)); | 355 var entry = _findColumn(line, column, _findLine(line)); |
| 351 if (entry == null || entry.sourceUrlId == null) return null; | 356 if (entry == null || entry.sourceUrlId == null) return null; |
| 352 var url = urls[entry.sourceUrlId]; | 357 var url = urls[entry.sourceUrlId]; |
| 358 if (sourceRoot != null) { |
| 359 url = '${sourceRoot}${url}'; |
| 360 } |
| 353 if (files != null && files[url] != null) { | 361 if (files != null && files[url] != null) { |
| 354 var file = files[url]; | 362 var file = files[url]; |
| 355 var start = file.getOffset(entry.sourceLine, entry.sourceColumn); | 363 var start = file.getOffset(entry.sourceLine, entry.sourceColumn); |
| 356 if (entry.sourceNameId != null) { | 364 if (entry.sourceNameId != null) { |
| 357 var text = names[entry.sourceNameId]; | 365 var text = names[entry.sourceNameId]; |
| 358 return new FileSpan(files[url], start, start + text.length, true); | 366 return new FileSpan(files[url], start, start + text.length, true); |
| 359 } else { | 367 } else { |
| 360 return new FileSpan(files[url], start); | 368 return new FileSpan(files[url], start); |
| 361 } | 369 } |
| 362 } else { | 370 } else { |
| 363 // Offset and other context is not available. | 371 // Offset and other context is not available. |
| 364 if (entry.sourceNameId != null) { | 372 if (entry.sourceNameId != null) { |
| 365 return new FixedSpan(url, 0, entry.sourceLine, entry.sourceColumn, | 373 return new FixedSpan(url, 0, entry.sourceLine, entry.sourceColumn, |
| 366 text: names[entry.sourceNameId], isIdentifier: true); | 374 text: names[entry.sourceNameId], isIdentifier: true); |
| 367 } else { | 375 } else { |
| 368 return new FixedSpan(url, 0, entry.sourceLine, entry.sourceColumn); | 376 return new FixedSpan(url, 0, entry.sourceLine, entry.sourceColumn); |
| 369 } | 377 } |
| 370 } | 378 } |
| 371 } | 379 } |
| 372 | 380 |
| 373 String toString() { | 381 String toString() { |
| 374 return (new StringBuffer("$runtimeType : [") | 382 return (new StringBuffer("$runtimeType : [") |
| 375 ..write('targetUrl: ') | 383 ..write('targetUrl: ') |
| 376 ..write(targetUrl) | 384 ..write(targetUrl) |
| 385 ..write(', sourceRoot: ') |
| 386 ..write(sourceRoot) |
| 377 ..write(', urls: ') | 387 ..write(', urls: ') |
| 378 ..write(urls) | 388 ..write(urls) |
| 379 ..write(', names: ') | 389 ..write(', names: ') |
| 380 ..write(names) | 390 ..write(names) |
| 381 ..write(', lines: ') | 391 ..write(', lines: ') |
| 382 ..write(lines) | 392 ..write(lines) |
| 383 ..write(']')).toString(); | 393 ..write(']')).toString(); |
| 384 } | 394 } |
| 385 | 395 |
| 386 String get debugString { | 396 String get debugString { |
| 387 var buff = new StringBuffer(); | 397 var buff = new StringBuffer(); |
| 388 for (var lineEntry in lines) { | 398 for (var lineEntry in lines) { |
| 389 var line = lineEntry.line; | 399 var line = lineEntry.line; |
| 390 for (var entry in lineEntry.entries) { | 400 for (var entry in lineEntry.entries) { |
| 391 buff..write(targetUrl) | 401 buff..write(targetUrl) |
| 392 ..write(': ') | 402 ..write(': ') |
| 393 ..write(line) | 403 ..write(line) |
| 394 ..write(':') | 404 ..write(':') |
| 395 ..write(entry.column) | 405 ..write(entry.column); |
| 396 ..write(' --> ') | 406 if (entry.sourceUrlId != null) { |
| 397 ..write(urls[entry.sourceUrlId]) | 407 buff..write(' --> ') |
| 398 ..write(': ') | 408 ..write(sourceRoot) |
| 399 ..write(entry.sourceLine) | 409 ..write(urls[entry.sourceUrlId]) |
| 400 ..write(':') | 410 ..write(': ') |
| 401 ..write(entry.sourceColumn); | 411 ..write(entry.sourceLine) |
| 412 ..write(':') |
| 413 ..write(entry.sourceColumn); |
| 414 } |
| 402 if (entry.sourceNameId != null) { | 415 if (entry.sourceNameId != null) { |
| 403 buff..write(' (') | 416 buff..write(' (') |
| 404 ..write(names[entry.sourceNameId]) | 417 ..write(names[entry.sourceNameId]) |
| 405 ..write(')'); | 418 ..write(')'); |
| 406 } | 419 } |
| 407 buff.write('\n'); | 420 buff.write('\n'); |
| 408 } | 421 } |
| 409 } | 422 } |
| 410 return buff.toString(); | 423 return buff.toString(); |
| 411 } | 424 } |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 487 static const _TokenKind EOF = const _TokenKind(isEof: true); | 500 static const _TokenKind EOF = const _TokenKind(isEof: true); |
| 488 static const _TokenKind VALUE = const _TokenKind(); | 501 static const _TokenKind VALUE = const _TokenKind(); |
| 489 final bool isNewLine; | 502 final bool isNewLine; |
| 490 final bool isNewSegment; | 503 final bool isNewSegment; |
| 491 final bool isEof; | 504 final bool isEof; |
| 492 bool get isValue => !isNewLine && !isNewSegment && !isEof; | 505 bool get isValue => !isNewLine && !isNewSegment && !isEof; |
| 493 | 506 |
| 494 const _TokenKind( | 507 const _TokenKind( |
| 495 {this.isNewLine: false, this.isNewSegment: false, this.isEof: false}); | 508 {this.isNewLine: false, this.isNewSegment: false, this.isEof: false}); |
| 496 } | 509 } |
| OLD | NEW |