OLD | NEW |
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 library analyzer.src.generated.incremental_resolver; | 5 library analyzer.src.generated.incremental_resolver; |
6 | 6 |
7 import 'dart:collection'; | 7 import 'dart:collection'; |
8 import 'dart:math' as math; | 8 import 'dart:math' as math; |
9 | 9 |
10 import 'package:analyzer/dart/ast/ast.dart'; | 10 import 'package:analyzer/dart/ast/ast.dart'; |
(...skipping 551 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
562 _findLastDifferentToken(_oldUnit.endToken, newUnit.endToken); | 562 _findLastDifferentToken(_oldUnit.endToken, newUnit.endToken); |
563 if (firstPair != null && lastPair != null) { | 563 if (firstPair != null && lastPair != null) { |
564 int firstOffsetOld = firstPair.oldToken.offset; | 564 int firstOffsetOld = firstPair.oldToken.offset; |
565 int firstOffsetNew = firstPair.newToken.offset; | 565 int firstOffsetNew = firstPair.newToken.offset; |
566 int lastOffsetOld = lastPair.oldToken.end; | 566 int lastOffsetOld = lastPair.oldToken.end; |
567 int lastOffsetNew = lastPair.newToken.end; | 567 int lastOffsetNew = lastPair.newToken.end; |
568 int beginOffsetOld = math.min(firstOffsetOld, lastOffsetOld); | 568 int beginOffsetOld = math.min(firstOffsetOld, lastOffsetOld); |
569 int endOffsetOld = math.max(firstOffsetOld, lastOffsetOld); | 569 int endOffsetOld = math.max(firstOffsetOld, lastOffsetOld); |
570 int beginOffsetNew = math.min(firstOffsetNew, lastOffsetNew); | 570 int beginOffsetNew = math.min(firstOffsetNew, lastOffsetNew); |
571 int endOffsetNew = math.max(firstOffsetNew, lastOffsetNew); | 571 int endOffsetNew = math.max(firstOffsetNew, lastOffsetNew); |
572 // check for a whitespace only change | 572 // A pure whitespace change. |
573 if (identical(lastPair.oldToken, firstPair.oldToken) && | 573 if (identical(firstPair.oldToken, lastPair.oldToken) && |
574 identical(lastPair.newToken, firstPair.newToken)) { | 574 identical(firstPair.newToken, lastPair.newToken) && |
| 575 firstPair.kind == _TokenDifferenceKind.OFFSET) { |
575 _updateOffset = beginOffsetOld - 1; | 576 _updateOffset = beginOffsetOld - 1; |
576 _updateEndOld = endOffsetOld; | 577 _updateEndOld = endOffsetOld; |
577 _updateEndNew = endOffsetNew; | 578 _updateEndNew = endOffsetNew; |
578 _updateDelta = newUnit.length - _oldUnit.length; | 579 _updateDelta = newUnit.length - _oldUnit.length; |
579 // A Dart documentation comment change. | 580 logger.log('Whitespace change.'); |
580 if (firstPair.kind == _TokenDifferenceKind.COMMENT_DOC) { | 581 _shiftTokens(firstPair.oldToken, true); |
581 bool success = _resolveCommentDoc(newUnit, firstPair); | 582 IncrementalResolver incrementalResolver = new IncrementalResolver( |
| 583 _cache, |
| 584 _sourceEntry, |
| 585 _unitEntry, |
| 586 _unitElement, |
| 587 _updateOffset, |
| 588 _updateEndOld, |
| 589 _updateEndNew); |
| 590 incrementalResolver._updateCache(); |
| 591 incrementalResolver._updateElementNameOffsets(); |
| 592 incrementalResolver._shiftEntryErrors(); |
| 593 _updateEntry(); |
| 594 logger.log('Success.'); |
| 595 return true; |
| 596 } |
| 597 // A Dart documentation comment change. |
| 598 { |
| 599 Token firstOldToken = firstPair.oldToken; |
| 600 Token firstNewToken = firstPair.newToken; |
| 601 Token lastOldToken = lastPair.oldToken; |
| 602 Token lastNewToken = lastPair.newToken; |
| 603 if (firstOldToken is DocumentationCommentToken && |
| 604 firstNewToken is DocumentationCommentToken && |
| 605 lastOldToken is DocumentationCommentToken && |
| 606 lastNewToken is DocumentationCommentToken && |
| 607 identical(firstOldToken.parent, lastOldToken.parent) && |
| 608 identical(firstNewToken.parent, lastNewToken.parent)) { |
| 609 _updateOffset = beginOffsetOld; |
| 610 _updateEndOld = firstOldToken.parent.offset; |
| 611 _updateEndNew = firstNewToken.parent.offset; |
| 612 _updateDelta = newUnit.length - _oldUnit.length; |
| 613 bool success = |
| 614 _resolveCommentDoc(newUnit, firstOldToken, firstNewToken); |
582 logger.log('Documentation comment resolved: $success'); | 615 logger.log('Documentation comment resolved: $success'); |
583 return success; | 616 return success; |
584 } | 617 } |
585 // A pure whitespace change. | |
586 if (firstPair.kind == _TokenDifferenceKind.OFFSET) { | |
587 logger.log('Whitespace change.'); | |
588 _shiftTokens(firstPair.oldToken); | |
589 { | |
590 IncrementalResolver incrementalResolver = new IncrementalResolver( | |
591 _cache, | |
592 _sourceEntry, | |
593 _unitEntry, | |
594 _unitElement, | |
595 _updateOffset, | |
596 _updateEndOld, | |
597 _updateEndNew); | |
598 incrementalResolver._updateCache(); | |
599 incrementalResolver._updateElementNameOffsets(); | |
600 incrementalResolver._shiftEntryErrors(); | |
601 } | |
602 _updateEntry(); | |
603 logger.log('Success.'); | |
604 return true; | |
605 } | |
606 // fall-through, end-of-line comment | |
607 } | 618 } |
608 // Find nodes covering the "old" and "new" token ranges. | 619 // Find nodes covering the "old" and "new" token ranges. |
609 AstNode oldNode = | 620 AstNode oldNode = |
610 _findNodeCovering(_oldUnit, beginOffsetOld, endOffsetOld - 1); | 621 _findNodeCovering(_oldUnit, beginOffsetOld, endOffsetOld - 1); |
611 AstNode newNode = | 622 AstNode newNode = |
612 _findNodeCovering(newUnit, beginOffsetNew, endOffsetNew - 1); | 623 _findNodeCovering(newUnit, beginOffsetNew, endOffsetNew - 1); |
613 logger.log(() => 'oldNode: $oldNode'); | 624 logger.log(() => 'oldNode: $oldNode'); |
614 logger.log(() => 'newNode: $newNode'); | 625 logger.log(() => 'newNode: $newNode'); |
615 // Try to find the smallest common node, a FunctionBody currently. | 626 // Try to find the smallest common node, a FunctionBody currently. |
616 { | 627 { |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
670 } else { | 681 } else { |
671 logger.log('Failure: old and new parent mismatch' | 682 logger.log('Failure: old and new parent mismatch' |
672 ' ${oldParent.runtimeType} vs. ${newParent.runtimeType}'); | 683 ' ${oldParent.runtimeType} vs. ${newParent.runtimeType}'); |
673 return false; | 684 return false; |
674 } | 685 } |
675 } | 686 } |
676 if (!found) { | 687 if (!found) { |
677 logger.log('Failure: no enclosing function body or executable.'); | 688 logger.log('Failure: no enclosing function body or executable.'); |
678 return false; | 689 return false; |
679 } | 690 } |
680 // fail if a comment change outside the bodies | |
681 if (firstPair.kind == _TokenDifferenceKind.COMMENT) { | |
682 if (beginOffsetOld <= oldNode.offset || | |
683 beginOffsetNew <= newNode.offset) { | |
684 logger.log('Failure: comment outside a function body.'); | |
685 return false; | |
686 } | |
687 } | |
688 } | 691 } |
689 logger.log(() => 'oldNode: $oldNode'); | 692 logger.log(() => 'oldNode: $oldNode'); |
690 logger.log(() => 'newNode: $newNode'); | 693 logger.log(() => 'newNode: $newNode'); |
691 // prepare update range | 694 // prepare update range |
692 _updateOffset = oldNode.offset; | 695 _updateOffset = oldNode.offset; |
693 _updateEndOld = oldNode.end; | 696 _updateEndOld = oldNode.end; |
694 _updateEndNew = newNode.end; | 697 _updateEndNew = newNode.end; |
695 _updateDelta = _updateEndNew - _updateEndOld; | 698 _updateDelta = _updateEndNew - _updateEndOld; |
696 // replace node | 699 // replace node |
697 NodeReplacer.replace(oldNode, newNode); | 700 NodeReplacer.replace(oldNode, newNode); |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
749 return unit; | 752 return unit; |
750 } finally { | 753 } finally { |
751 timer.stop('parse'); | 754 timer.stop('parse'); |
752 } | 755 } |
753 } | 756 } |
754 | 757 |
755 /** | 758 /** |
756 * Attempts to resolve a documentation comment change. | 759 * Attempts to resolve a documentation comment change. |
757 * Returns `true` if success. | 760 * Returns `true` if success. |
758 */ | 761 */ |
759 bool _resolveCommentDoc(CompilationUnit newUnit, _TokenPair firstPair) { | 762 bool _resolveCommentDoc( |
760 Token oldToken = firstPair.oldToken; | 763 CompilationUnit newUnit, CommentToken oldToken, CommentToken newToken) { |
761 Token newToken = firstPair.newToken; | 764 if (oldToken == null || newToken == null) { |
762 CommentToken oldComments = oldToken.precedingComments; | |
763 CommentToken newComments = newToken.precedingComments; | |
764 if (oldComments == null || newComments == null) { | |
765 return false; | 765 return false; |
766 } | 766 } |
767 // find nodes | 767 // find nodes |
768 int offset = oldComments.offset; | 768 int offset = oldToken.offset; |
769 logger.log('offset: $offset'); | 769 logger.log('offset: $offset'); |
770 AstNode oldNode = _findNodeCovering(_oldUnit, offset, offset); | 770 AstNode oldNode = _findNodeCovering(_oldUnit, offset, offset); |
771 AstNode newNode = _findNodeCovering(newUnit, offset, offset); | 771 AstNode newNode = _findNodeCovering(newUnit, offset, offset); |
772 if (oldNode is! Comment || newNode is! Comment) { | 772 if (oldNode is! Comment || newNode is! Comment) { |
773 return false; | 773 return false; |
774 } | 774 } |
775 Comment oldComment = oldNode; | 775 Comment oldComment = oldNode; |
776 Comment newComment = newNode; | 776 Comment newComment = newNode; |
777 logger.log('oldComment.beginToken: ${oldComment.beginToken}'); | 777 logger.log('oldComment.beginToken: ${oldComment.beginToken}'); |
778 logger.log('newComment.beginToken: ${newComment.beginToken}'); | 778 logger.log('newComment.beginToken: ${newComment.beginToken}'); |
779 _updateOffset = oldToken.offset - 1; | |
780 // update token references | 779 // update token references |
781 _shiftTokens(firstPair.oldToken); | 780 _shiftTokens(oldToken.parent); |
782 _setPrecedingComments(oldToken, newComment.tokens.first); | 781 _setPrecedingComments(oldToken.parent, newComment.tokens.first); |
783 // replace node | 782 // replace node |
784 NodeReplacer.replace(oldComment, newComment); | 783 NodeReplacer.replace(oldComment, newComment); |
785 // update elements | 784 // update elements |
786 IncrementalResolver incrementalResolver = new IncrementalResolver( | 785 IncrementalResolver incrementalResolver = new IncrementalResolver( |
787 _cache, | 786 _cache, |
788 _sourceEntry, | 787 _sourceEntry, |
789 _unitEntry, | 788 _unitEntry, |
790 _unitElement, | 789 _unitElement, |
791 _updateOffset, | 790 _updateOffset, |
792 _updateEndOld, | 791 _updateEndOld, |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
841 } else if (token is StringTokenWithComment) { | 840 } else if (token is StringTokenWithComment) { |
842 token.precedingComments = comment; | 841 token.precedingComments = comment; |
843 } else if (token is TokenWithComment) { | 842 } else if (token is TokenWithComment) { |
844 token.precedingComments = comment; | 843 token.precedingComments = comment; |
845 } else { | 844 } else { |
846 Type parentType = token?.runtimeType; | 845 Type parentType = token?.runtimeType; |
847 throw new AnalysisException('Uknown parent token type: $parentType'); | 846 throw new AnalysisException('Uknown parent token type: $parentType'); |
848 } | 847 } |
849 } | 848 } |
850 | 849 |
851 void _shiftTokens(Token token) { | 850 void _shiftTokens(Token token, [bool goUpComment = false]) { |
852 while (token != null) { | 851 while (token != null) { |
| 852 if (goUpComment && token is CommentToken) { |
| 853 token = (token as CommentToken).parent; |
| 854 } |
853 if (token.offset > _updateOffset) { | 855 if (token.offset > _updateOffset) { |
854 token.offset += _updateDelta; | 856 token.offset += _updateDelta; |
855 } | 857 } |
856 // comments | 858 // comments |
857 _shiftTokens(token.precedingComments); | 859 _shiftTokens(token.precedingComments); |
858 if (token is DocumentationCommentToken) { | 860 if (token is DocumentationCommentToken) { |
859 for (Token reference in token.references) { | 861 for (Token reference in token.references) { |
860 _shiftTokens(reference); | 862 _shiftTokens(reference); |
861 } | 863 } |
862 } | 864 } |
(...skipping 23 matching lines...) Expand all Loading... |
886 */ | 888 */ |
887 static bool _areCurlyBracketsBalanced(Token token) { | 889 static bool _areCurlyBracketsBalanced(Token token) { |
888 int numOpen = _getTokenCount(token, TokenType.OPEN_CURLY_BRACKET); | 890 int numOpen = _getTokenCount(token, TokenType.OPEN_CURLY_BRACKET); |
889 int numOpen2 = | 891 int numOpen2 = |
890 _getTokenCount(token, TokenType.STRING_INTERPOLATION_EXPRESSION); | 892 _getTokenCount(token, TokenType.STRING_INTERPOLATION_EXPRESSION); |
891 int numClosed = _getTokenCount(token, TokenType.CLOSE_CURLY_BRACKET); | 893 int numClosed = _getTokenCount(token, TokenType.CLOSE_CURLY_BRACKET); |
892 return numOpen + numOpen2 == numClosed; | 894 return numOpen + numOpen2 == numClosed; |
893 } | 895 } |
894 | 896 |
895 static _TokenDifferenceKind _compareToken( | 897 static _TokenDifferenceKind _compareToken( |
896 Token oldToken, Token newToken, int delta, bool forComment) { | 898 Token oldToken, Token newToken, int delta) { |
897 while (true) { | 899 if (oldToken == null && newToken == null) { |
898 if (oldToken == null && newToken == null) { | 900 return null; |
899 return null; | 901 } |
900 } | 902 if (oldToken == null || newToken == null) { |
901 if (oldToken == null || newToken == null) { | 903 return _TokenDifferenceKind.CONTENT; |
902 return _TokenDifferenceKind.CONTENT; | 904 } |
903 } | 905 if (oldToken.type != newToken.type) { |
904 if (oldToken.type != newToken.type) { | 906 return _TokenDifferenceKind.CONTENT; |
905 return _TokenDifferenceKind.CONTENT; | 907 } |
906 } | 908 if (oldToken.lexeme != newToken.lexeme) { |
907 if (oldToken.lexeme != newToken.lexeme) { | 909 return _TokenDifferenceKind.CONTENT; |
908 return _TokenDifferenceKind.CONTENT; | 910 } |
909 } | 911 if (newToken.offset - oldToken.offset != delta) { |
910 if (newToken.offset - oldToken.offset != delta) { | 912 return _TokenDifferenceKind.OFFSET; |
911 return _TokenDifferenceKind.OFFSET; | |
912 } | |
913 // continue if comment tokens are being checked | |
914 if (!forComment) { | |
915 break; | |
916 } | |
917 oldToken = oldToken.next; | |
918 newToken = newToken.next; | |
919 } | 913 } |
920 return null; | 914 return null; |
921 } | 915 } |
922 | 916 |
923 static _TokenPair _findFirstDifferentToken(Token oldToken, Token newToken) { | 917 static _TokenPair _findFirstDifferentToken(Token oldToken, Token newToken) { |
924 while (oldToken.type != TokenType.EOF || newToken.type != TokenType.EOF) { | 918 while (oldToken.type != TokenType.EOF || newToken.type != TokenType.EOF) { |
925 if (oldToken.type == TokenType.EOF || newToken.type == TokenType.EOF) { | 919 if (oldToken.type == TokenType.EOF || newToken.type == TokenType.EOF) { |
926 return new _TokenPair(_TokenDifferenceKind.CONTENT, oldToken, newToken); | 920 return new _TokenPair(_TokenDifferenceKind.CONTENT, oldToken, newToken); |
927 } | 921 } |
928 // compare comments | 922 // compare comments |
929 { | 923 { |
930 Token oldComment = oldToken.precedingComments; | 924 Token oldComment = oldToken.precedingComments; |
931 Token newComment = newToken.precedingComments; | 925 Token newComment = newToken.precedingComments; |
932 if (_compareToken(oldComment, newComment, 0, true) != null) { | 926 while (true) { |
933 _TokenDifferenceKind diffKind = _TokenDifferenceKind.COMMENT; | 927 _TokenDifferenceKind diffKind = |
934 if (oldComment is DocumentationCommentToken && | 928 _compareToken(oldComment, newComment, 0); |
935 newComment is DocumentationCommentToken) { | 929 if (diffKind != null) { |
936 diffKind = _TokenDifferenceKind.COMMENT_DOC; | 930 return new _TokenPair( |
| 931 diffKind, oldComment ?? oldToken, newComment ?? newToken); |
937 } | 932 } |
938 return new _TokenPair(diffKind, oldToken, newToken); | 933 if (oldComment == null && newComment == null) { |
| 934 break; |
| 935 } |
| 936 oldComment = oldComment.next; |
| 937 newComment = newComment.next; |
939 } | 938 } |
940 } | 939 } |
941 // compare tokens | 940 // compare tokens |
942 _TokenDifferenceKind diffKind = | 941 _TokenDifferenceKind diffKind = _compareToken(oldToken, newToken, 0); |
943 _compareToken(oldToken, newToken, 0, false); | |
944 if (diffKind != null) { | 942 if (diffKind != null) { |
945 return new _TokenPair(diffKind, oldToken, newToken); | 943 return new _TokenPair(diffKind, oldToken, newToken); |
946 } | 944 } |
947 // next tokens | 945 // next tokens |
948 oldToken = oldToken.next; | 946 oldToken = oldToken.next; |
949 newToken = newToken.next; | 947 newToken = newToken.next; |
950 } | 948 } |
951 // no difference | 949 // no difference |
952 return null; | 950 return null; |
953 } | 951 } |
954 | 952 |
955 static _TokenPair _findLastDifferentToken(Token oldToken, Token newToken) { | 953 static _TokenPair _findLastDifferentToken(Token oldToken, Token newToken) { |
956 int delta = newToken.offset - oldToken.offset; | 954 int delta = newToken.offset - oldToken.offset; |
| 955 Token prevOldToken; |
| 956 Token prevNewToken; |
957 while (oldToken.previous != oldToken && newToken.previous != newToken) { | 957 while (oldToken.previous != oldToken && newToken.previous != newToken) { |
958 // compare tokens | 958 // compare tokens |
959 _TokenDifferenceKind diffKind = | 959 _TokenDifferenceKind diffKind = _compareToken(oldToken, newToken, delta); |
960 _compareToken(oldToken, newToken, delta, false); | |
961 if (diffKind != null) { | 960 if (diffKind != null) { |
962 return new _TokenPair(diffKind, oldToken.next, newToken.next); | 961 return new _TokenPair(diffKind, prevOldToken, prevNewToken); |
963 } | 962 } |
| 963 prevOldToken = oldToken; |
| 964 prevNewToken = newToken; |
964 // compare comments | 965 // compare comments |
965 { | 966 { |
966 Token oldComment = oldToken.precedingComments; | 967 Token oldComment = oldToken.precedingComments; |
967 Token newComment = newToken.precedingComments; | 968 Token newComment = newToken.precedingComments; |
968 if (_compareToken(oldComment, newComment, delta, true) != null) { | 969 while (oldComment?.next != null) { |
969 _TokenDifferenceKind diffKind = _TokenDifferenceKind.COMMENT; | 970 oldComment = oldComment.next; |
970 if (oldComment is DocumentationCommentToken && | 971 } |
971 newComment is DocumentationCommentToken) { | 972 while (newComment?.next != null) { |
972 diffKind = _TokenDifferenceKind.COMMENT_DOC; | 973 newComment = newComment.next; |
| 974 } |
| 975 while (true) { |
| 976 _TokenDifferenceKind diffKind = |
| 977 _compareToken(oldComment, newComment, delta); |
| 978 if (diffKind != null) { |
| 979 return new _TokenPair( |
| 980 diffKind, oldComment ?? oldToken, newComment ?? newToken); |
973 } | 981 } |
974 return new _TokenPair(diffKind, oldToken, newToken); | 982 if (oldComment == null && newComment == null) { |
| 983 break; |
| 984 } |
| 985 prevOldToken = oldComment; |
| 986 prevNewToken = newComment; |
| 987 oldComment = oldComment.previous; |
| 988 newComment = newComment.previous; |
975 } | 989 } |
976 } | 990 } |
977 // next tokens | 991 // next tokens |
978 oldToken = oldToken.previous; | 992 oldToken = oldToken.previous; |
979 newToken = newToken.previous; | 993 newToken = newToken.previous; |
980 } | 994 } |
981 return null; | 995 return null; |
982 } | 996 } |
983 | 997 |
984 static AstNode _findNodeCovering(AstNode root, int offset, int end) { | 998 static AstNode _findNodeCovering(AstNode root, int offset, int end) { |
(...skipping 298 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1283 } | 1297 } |
1284 token = token.next; | 1298 token = token.next; |
1285 } | 1299 } |
1286 } | 1300 } |
1287 } | 1301 } |
1288 | 1302 |
1289 /** | 1303 /** |
1290 * Describes how two [Token]s are different. | 1304 * Describes how two [Token]s are different. |
1291 */ | 1305 */ |
1292 class _TokenDifferenceKind { | 1306 class _TokenDifferenceKind { |
1293 static const COMMENT = const _TokenDifferenceKind('COMMENT'); | |
1294 static const COMMENT_DOC = const _TokenDifferenceKind('COMMENT_DOC'); | |
1295 static const CONTENT = const _TokenDifferenceKind('CONTENT'); | 1307 static const CONTENT = const _TokenDifferenceKind('CONTENT'); |
1296 static const OFFSET = const _TokenDifferenceKind('OFFSET'); | 1308 static const OFFSET = const _TokenDifferenceKind('OFFSET'); |
1297 | 1309 |
1298 final String name; | 1310 final String name; |
1299 | 1311 |
1300 const _TokenDifferenceKind(this.name); | 1312 const _TokenDifferenceKind(this.name); |
1301 | 1313 |
1302 @override | 1314 @override |
1303 String toString() => name; | 1315 String toString() => name; |
1304 } | 1316 } |
1305 | 1317 |
1306 class _TokenPair { | 1318 class _TokenPair { |
1307 final _TokenDifferenceKind kind; | 1319 final _TokenDifferenceKind kind; |
1308 final Token oldToken; | 1320 final Token oldToken; |
1309 final Token newToken; | 1321 final Token newToken; |
1310 _TokenPair(this.kind, this.oldToken, this.newToken); | 1322 _TokenPair(this.kind, this.oldToken, this.newToken); |
1311 } | 1323 } |
OLD | NEW |