| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 /// Contains the top-level function to parse source maps version 3. | 5 /// Contains the top-level function to parse source maps version 3. |
| 6 library source_maps.parser; | 6 library source_maps.parser; |
| 7 | 7 |
| 8 import 'dart:collection'; | 8 import 'dart:collection'; |
| 9 import 'dart:convert'; | 9 import 'dart:convert'; |
| 10 | 10 |
| 11 import 'package:source_span/source_span.dart'; |
| 12 |
| 11 import 'builder.dart' as builder; | 13 import 'builder.dart' as builder; |
| 12 import 'span.dart'; | 14 import 'src/source_map_span.dart'; |
| 13 import 'src/span_wrapper.dart'; | |
| 14 import 'src/utils.dart'; | 15 import 'src/utils.dart'; |
| 15 import 'src/vlq.dart'; | 16 import 'src/vlq.dart'; |
| 16 | 17 |
| 17 /// Parses a source map directly from a json string. | 18 /// Parses a source map directly from a json string. |
| 18 // TODO(sigmund): evaluate whether other maps should have the json parsed, or | 19 // TODO(sigmund): evaluate whether other maps should have the json parsed, or |
| 19 // the string represenation. | 20 // the string represenation. |
| 20 // TODO(tjblasi): Ignore the first line of [jsonMap] if the JSON safety string | 21 // TODO(tjblasi): Ignore the first line of [jsonMap] if the JSON safety string |
| 21 // `)]}'` begins the string representation of the map. | 22 // `)]}'` begins the string representation of the map. |
| 22 Mapping parse(String jsonMap, {Map<String, Map> otherMaps}) => | 23 Mapping parse(String jsonMap, {Map<String, Map> otherMaps}) => |
| 23 parseJson(JSON.decode(jsonMap), otherMaps: otherMaps); | 24 parseJson(JSON.decode(jsonMap), otherMaps: otherMaps); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 42 } | 43 } |
| 43 return new MultiSectionMapping.fromJson(map['sections'], otherMaps); | 44 return new MultiSectionMapping.fromJson(map['sections'], otherMaps); |
| 44 } | 45 } |
| 45 return new SingleMapping.fromJson(map); | 46 return new SingleMapping.fromJson(map); |
| 46 } | 47 } |
| 47 | 48 |
| 48 | 49 |
| 49 /// A mapping parsed out of a source map. | 50 /// A mapping parsed out of a source map. |
| 50 abstract class Mapping { | 51 abstract class Mapping { |
| 51 /// Returns the span associated with [line] and [column]. | 52 /// Returns the span associated with [line] and [column]. |
| 52 /// | 53 SourceMapSpan spanFor(int line, int column, {Map<String, SourceFile> files}); |
| 53 /// The values of [files] can be either `source_map` [SourceFile]s or | |
| 54 /// `source_span` `SourceFile`s. Using `source_map` [SourceFile]s is | |
| 55 /// deprecated and will be unsupported in version 0.10.0. | |
| 56 Span spanFor(int line, int column, {Map<String, dynamic> files}); | |
| 57 | 54 |
| 58 /// Returns the span associated with [location]. | 55 /// Returns the span associated with [location]. |
| 59 /// | 56 SourceMapSpan spanForLocation(SourceLocation location, |
| 60 /// The values of [files] may be either `source_map` [SourceFile]s or | 57 {Map<String, SourceFile> files}) { |
| 61 /// `source_span` `SourceFile`s. Using `source_map` [SourceFile]s is | |
| 62 /// deprecated and will be unsupported in version 0.10.0. | |
| 63 Span spanForLocation(location, {Map<String, dynamic> files}) { | |
| 64 location = LocationWrapper.wrap(location); | |
| 65 return spanFor(location.line, location.column, files: files); | 58 return spanFor(location.line, location.column, files: files); |
| 66 } | 59 } |
| 67 } | 60 } |
| 68 | 61 |
| 69 /// A meta-level map containing sections. | 62 /// A meta-level map containing sections. |
| 70 class MultiSectionMapping extends Mapping { | 63 class MultiSectionMapping extends Mapping { |
| 71 /// For each section, the start line offset. | 64 /// For each section, the start line offset. |
| 72 final List<int> _lineStart = <int>[]; | 65 final List<int> _lineStart = <int>[]; |
| 73 | 66 |
| 74 /// For each section, the start column offset. | 67 /// For each section, the start column offset. |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 117 } | 110 } |
| 118 | 111 |
| 119 int _indexFor(line, column) { | 112 int _indexFor(line, column) { |
| 120 for(int i = 0; i < _lineStart.length; i++) { | 113 for(int i = 0; i < _lineStart.length; i++) { |
| 121 if (line < _lineStart[i]) return i - 1; | 114 if (line < _lineStart[i]) return i - 1; |
| 122 if (line == _lineStart[i] && column < _columnStart[i]) return i - 1; | 115 if (line == _lineStart[i] && column < _columnStart[i]) return i - 1; |
| 123 } | 116 } |
| 124 return _lineStart.length - 1; | 117 return _lineStart.length - 1; |
| 125 } | 118 } |
| 126 | 119 |
| 127 Span spanFor(int line, int column, {Map<String, dynamic> files}) { | 120 SourceMapSpan spanFor(int line, int column, {Map<String, SourceFile> files}) { |
| 128 int index = _indexFor(line, column); | 121 int index = _indexFor(line, column); |
| 129 return _maps[index].spanFor( | 122 return _maps[index].spanFor( |
| 130 line - _lineStart[index], column - _columnStart[index], files: files); | 123 line - _lineStart[index], column - _columnStart[index], files: files); |
| 131 } | 124 } |
| 132 | 125 |
| 133 String toString() { | 126 String toString() { |
| 134 var buff = new StringBuffer("$runtimeType : ["); | 127 var buff = new StringBuffer("$runtimeType : ["); |
| 135 for (int i = 0; i < _lineStart.length; i++) { | 128 for (int i = 0; i < _lineStart.length; i++) { |
| 136 buff..write('(') | 129 buff..write('(') |
| 137 ..write(_lineStart[i]) | 130 ..write(_lineStart[i]) |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 184 for (var sourceEntry in sourceEntries) { | 177 for (var sourceEntry in sourceEntries) { |
| 185 if (lineNum == null || sourceEntry.target.line > lineNum) { | 178 if (lineNum == null || sourceEntry.target.line > lineNum) { |
| 186 lineNum = sourceEntry.target.line; | 179 lineNum = sourceEntry.target.line; |
| 187 targetEntries = <TargetEntry>[]; | 180 targetEntries = <TargetEntry>[]; |
| 188 lines.add(new TargetLineEntry(lineNum, targetEntries)); | 181 lines.add(new TargetLineEntry(lineNum, targetEntries)); |
| 189 } | 182 } |
| 190 | 183 |
| 191 if (sourceEntry.source == null) { | 184 if (sourceEntry.source == null) { |
| 192 targetEntries.add(new TargetEntry(sourceEntry.target.column)); | 185 targetEntries.add(new TargetEntry(sourceEntry.target.column)); |
| 193 } else { | 186 } else { |
| 187 var sourceUrl = sourceEntry.source.sourceUrl; |
| 194 var urlId = urls.putIfAbsent( | 188 var urlId = urls.putIfAbsent( |
| 195 sourceEntry.source.sourceUrl, () => urls.length); | 189 sourceUrl == null ? '' : sourceUrl.toString(), () => urls.length); |
| 196 var srcNameId = sourceEntry.identifierName == null ? null : | 190 var srcNameId = sourceEntry.identifierName == null ? null : |
| 197 names.putIfAbsent(sourceEntry.identifierName, () => names.length); | 191 names.putIfAbsent(sourceEntry.identifierName, () => names.length); |
| 198 targetEntries.add(new TargetEntry( | 192 targetEntries.add(new TargetEntry( |
| 199 sourceEntry.target.column, | 193 sourceEntry.target.column, |
| 200 urlId, | 194 urlId, |
| 201 sourceEntry.source.line, | 195 sourceEntry.source.line, |
| 202 sourceEntry.source.column, | 196 sourceEntry.source.column, |
| 203 srcNameId)); | 197 srcNameId)); |
| 204 } | 198 } |
| 205 } | 199 } |
| (...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 356 /// [lineEntry] corresponds to a line prior to [line], then the result will be | 350 /// [lineEntry] corresponds to a line prior to [line], then the result will be |
| 357 /// the very last entry on that line. | 351 /// the very last entry on that line. |
| 358 TargetEntry _findColumn(int line, int column, TargetLineEntry lineEntry) { | 352 TargetEntry _findColumn(int line, int column, TargetLineEntry lineEntry) { |
| 359 if (lineEntry == null || lineEntry.entries.length == 0) return null; | 353 if (lineEntry == null || lineEntry.entries.length == 0) return null; |
| 360 if (lineEntry.line != line) return lineEntry.entries.last; | 354 if (lineEntry.line != line) return lineEntry.entries.last; |
| 361 var entries = lineEntry.entries; | 355 var entries = lineEntry.entries; |
| 362 int index = binarySearch(entries, (e) => e.column > column); | 356 int index = binarySearch(entries, (e) => e.column > column); |
| 363 return (index <= 0) ? null : entries[index - 1]; | 357 return (index <= 0) ? null : entries[index - 1]; |
| 364 } | 358 } |
| 365 | 359 |
| 366 Span spanFor(int line, int column, {Map<String, dynamic> files}) { | 360 SourceMapSpan spanFor(int line, int column, {Map<String, SourceFile> files}) { |
| 367 var entry = _findColumn(line, column, _findLine(line)); | 361 var entry = _findColumn(line, column, _findLine(line)); |
| 368 if (entry == null || entry.sourceUrlId == null) return null; | 362 if (entry == null || entry.sourceUrlId == null) return null; |
| 369 var url = urls[entry.sourceUrlId]; | 363 var url = urls[entry.sourceUrlId]; |
| 370 if (sourceRoot != null) { | 364 if (sourceRoot != null) { |
| 371 url = '${sourceRoot}${url}'; | 365 url = '${sourceRoot}${url}'; |
| 372 } | 366 } |
| 373 if (files != null && files[url] != null) { | 367 if (files != null && files[url] != null) { |
| 374 var file = SourceFileWrapper.wrap(files[url]); | 368 var file = files[url]; |
| 375 var start = file.getOffset(entry.sourceLine, entry.sourceColumn); | 369 var start = file.getOffset(entry.sourceLine, entry.sourceColumn); |
| 376 if (entry.sourceNameId != null) { | 370 if (entry.sourceNameId != null) { |
| 377 var text = names[entry.sourceNameId]; | 371 var text = names[entry.sourceNameId]; |
| 378 return new FileSpan(files[url], start, start + text.length, true); | 372 return new SourceMapFileSpan( |
| 373 files[url].span(start, start + text.length), |
| 374 isIdentifier: true); |
| 379 } else { | 375 } else { |
| 380 return new FileSpan(files[url], start); | 376 return new SourceMapFileSpan(files[url].location(start).pointSpan()); |
| 381 } | 377 } |
| 382 } else { | 378 } else { |
| 379 var start = new SourceLocation(0, |
| 380 sourceUrl: url, line: entry.sourceLine, column: entry.sourceColumn); |
| 383 // Offset and other context is not available. | 381 // Offset and other context is not available. |
| 384 if (entry.sourceNameId != null) { | 382 if (entry.sourceNameId != null) { |
| 385 return new FixedSpan(url, 0, entry.sourceLine, entry.sourceColumn, | 383 return new SourceMapSpan.identifier(start, names[entry.sourceNameId]); |
| 386 text: names[entry.sourceNameId], isIdentifier: true); | |
| 387 } else { | 384 } else { |
| 388 return new FixedSpan(url, 0, entry.sourceLine, entry.sourceColumn); | 385 return new SourceMapSpan(start, start, ''); |
| 389 } | 386 } |
| 390 } | 387 } |
| 391 } | 388 } |
| 392 | 389 |
| 393 String toString() { | 390 String toString() { |
| 394 return (new StringBuffer("$runtimeType : [") | 391 return (new StringBuffer("$runtimeType : [") |
| 395 ..write('targetUrl: ') | 392 ..write('targetUrl: ') |
| 396 ..write(targetUrl) | 393 ..write(targetUrl) |
| 397 ..write(', sourceRoot: ') | 394 ..write(', sourceRoot: ') |
| 398 ..write(sourceRoot) | 395 ..write(sourceRoot) |
| (...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 512 static const _TokenKind EOF = const _TokenKind(isEof: true); | 509 static const _TokenKind EOF = const _TokenKind(isEof: true); |
| 513 static const _TokenKind VALUE = const _TokenKind(); | 510 static const _TokenKind VALUE = const _TokenKind(); |
| 514 final bool isNewLine; | 511 final bool isNewLine; |
| 515 final bool isNewSegment; | 512 final bool isNewSegment; |
| 516 final bool isEof; | 513 final bool isEof; |
| 517 bool get isValue => !isNewLine && !isNewSegment && !isEof; | 514 bool get isValue => !isNewLine && !isNewSegment && !isEof; |
| 518 | 515 |
| 519 const _TokenKind( | 516 const _TokenKind( |
| 520 {this.isNewLine: false, this.isNewSegment: false, this.isEof: false}); | 517 {this.isNewLine: false, this.isNewSegment: false, this.isEof: false}); |
| 521 } | 518 } |
| OLD | NEW |