Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 //===- subzero/src/IceCfgNode.cpp - Basic block (node) implementation -----===// | 1 //===- subzero/src/IceCfgNode.cpp - Basic block (node) implementation -----===// |
| 2 // | 2 // |
| 3 // The Subzero Code Generator | 3 // The Subzero Code Generator |
| 4 // | 4 // |
| 5 // This file is distributed under the University of Illinois Open Source | 5 // This file is distributed under the University of Illinois Open Source |
| 6 // License. See LICENSE.TXT for details. | 6 // License. See LICENSE.TXT for details. |
| 7 // | 7 // |
| 8 //===----------------------------------------------------------------------===// | 8 //===----------------------------------------------------------------------===// |
| 9 // | 9 // |
| 10 // This file implements the CfgNode class, including the complexities | 10 // This file implements the CfgNode class, including the complexities |
| (...skipping 816 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 827 Str << ","; | 827 Str << ","; |
| 828 Var->emit(Func); | 828 Var->emit(Func); |
| 829 First = false; | 829 First = false; |
| 830 } | 830 } |
| 831 } | 831 } |
| 832 } | 832 } |
| 833 } | 833 } |
| 834 } | 834 } |
| 835 | 835 |
| 836 void updateStats(Cfg *Func, const Inst *I) { | 836 void updateStats(Cfg *Func, const Inst *I) { |
| 837 if (!ALLOW_DUMP) | |
| 838 return; | |
| 837 // Update emitted instruction count, plus fill/spill count for | 839 // Update emitted instruction count, plus fill/spill count for |
| 838 // Variable operands without a physical register. | 840 // Variable operands without a physical register. |
| 839 if (uint32_t Count = I->getEmitInstCount()) { | 841 if (uint32_t Count = I->getEmitInstCount()) { |
| 840 Func->getContext()->statsUpdateEmitted(Count); | 842 Func->getContext()->statsUpdateEmitted(Count); |
| 841 if (Variable *Dest = I->getDest()) { | 843 if (Variable *Dest = I->getDest()) { |
| 842 if (!Dest->hasReg()) | 844 if (!Dest->hasReg()) |
| 843 Func->getContext()->statsUpdateFills(); | 845 Func->getContext()->statsUpdateFills(); |
| 844 } | 846 } |
| 845 for (SizeT S = 0; S < I->getSrcSize(); ++S) { | 847 for (SizeT S = 0; S < I->getSrcSize(); ++S) { |
| 846 if (Variable *Src = llvm::dyn_cast<Variable>(I->getSrc(S))) { | 848 if (Variable *Src = llvm::dyn_cast<Variable>(I->getSrc(S))) { |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 883 } | 885 } |
| 884 I.emit(Func); | 886 I.emit(Func); |
| 885 if (DecorateAsm) | 887 if (DecorateAsm) |
| 886 emitLiveRangesEnded(Str, Func, &I, LiveRegCount); | 888 emitLiveRangesEnded(Str, Func, &I, LiveRegCount); |
| 887 Str << "\n"; | 889 Str << "\n"; |
| 888 updateStats(Func, &I); | 890 updateStats(Func, &I); |
| 889 } | 891 } |
| 890 if (DecorateAsm) | 892 if (DecorateAsm) |
| 891 emitRegisterUsage(Str, Func, this, false, LiveRegCount); | 893 emitRegisterUsage(Str, Func, this, false, LiveRegCount); |
| 892 } | 894 } |
| 893 | 895 |
|
jvoung (off chromium)
2015/02/19 21:01:47
could put in an anonymous namespace
Jim Stichnoth
2015/02/19 23:17:39
Done.
| |
| 896 // Helper class for emitIAS(). | |
| 897 class EmitHelper { | |
|
jvoung (off chromium)
2015/02/19 21:01:47
Perhaps have bundle/bundling in the name somewhere
Jim Stichnoth
2015/02/19 23:17:39
Done.
| |
| 898 EmitHelper() = delete; | |
| 899 EmitHelper(const EmitHelper &) = delete; | |
| 900 EmitHelper &operator=(const EmitHelper &) = delete; | |
| 901 | |
| 902 public: | |
| 903 EmitHelper(Assembler *Asm, const InstList &Insts) | |
| 904 : Asm(Asm), End(Insts.end()), BundleLockStart(End), | |
| 905 BundleSize(1 << Asm->getBundleAlignLog2Bytes()), | |
| 906 BundleMaskLo(BundleSize - 1), BundleMaskHi(~BundleMaskLo), | |
| 907 SizeSnapshotPre(0), SizeSnapshotPost(0) {} | |
| 908 // Check whether we're currently within a bundle_lock region. | |
| 909 bool isInBundleLockRegion() const { return BundleLockStart != End; } | |
| 910 // Check whether the current bundle_lock region has the align_to_end | |
| 911 // option. | |
| 912 bool isAlignToEnd() const { | |
| 913 assert(isInBundleLockRegion()); | |
| 914 return llvm::cast<InstBundleLock>(getBundleLockStart())->getOption() == | |
| 915 InstBundleLock::Opt_AlignToEnd; | |
| 916 } | |
| 917 // Check whether the entire bundle_lock region falls within the same | |
| 918 // bundle. | |
| 919 bool isSameBundle() const { | |
| 920 assert(isInBundleLockRegion()); | |
| 921 return SizeSnapshotPre == SizeSnapshotPost || | |
| 922 (SizeSnapshotPre & BundleMaskHi) == | |
| 923 ((SizeSnapshotPost - 1) & BundleMaskHi); | |
| 924 } | |
| 925 // Get the bundle alignment of the first instruction of the | |
| 926 // bundle_lock region. | |
| 927 intptr_t getPreAlignment() const { | |
| 928 assert(isInBundleLockRegion()); | |
| 929 return SizeSnapshotPre & BundleMaskLo; | |
| 930 } | |
| 931 // Get the bundle alignment of the first instruction past the | |
| 932 // bundle_lock region. | |
| 933 intptr_t getPostAlignment() const { | |
| 934 assert(isInBundleLockRegion()); | |
| 935 return SizeSnapshotPost & BundleMaskLo; | |
| 936 } | |
| 937 // Get the iterator pointing to the bundle_lock instruction, e.g. to | |
| 938 // roll back the instruction iteration to that point. | |
| 939 InstList::const_iterator getBundleLockStart() const { | |
| 940 assert(isInBundleLockRegion()); | |
| 941 return BundleLockStart; | |
| 942 } | |
| 943 // Set up bookkeeping when the bundle_lock instruction is first | |
| 944 // processed. | |
| 945 void enterBundleLock(InstList::const_iterator I) { | |
| 946 assert(!isInBundleLockRegion()); | |
| 947 BundleLockStart = I; | |
| 948 SizeSnapshotPre = Asm->getBufferSize(); | |
| 949 Asm->setPreliminary(true); | |
| 950 assert(isInBundleLockRegion()); | |
| 951 } | |
| 952 // Update bookkeeping when the bundle_unlock instlist is processed. | |
|
jvoung (off chromium)
2015/02/19 21:01:47
instlist -> instruction
Jim Stichnoth
2015/02/19 23:17:39
Done.
| |
| 953 void enterBundleUnlock() { | |
| 954 assert(isInBundleLockRegion()); | |
| 955 SizeSnapshotPost = Asm->getBufferSize(); | |
| 956 } | |
| 957 // Update bookkeeping when we are completely finished with the | |
| 958 // bundle_lock region. | |
| 959 void leaveBundleLockRegion() { BundleLockStart = End; } | |
| 960 // Check whether the instruction sequence fits within the current | |
| 961 // bundle, and if not, add nop padding to the end of the current | |
| 962 // bundle. | |
| 963 void padToNextBundle() { | |
| 964 assert(isInBundleLockRegion()); | |
| 965 if (!isSameBundle()) { | |
| 966 intptr_t PadToNextBundle = BundleSize - getPreAlignment(); | |
| 967 Asm->padWithNop(PadToNextBundle); | |
| 968 SizeSnapshotPre += PadToNextBundle; | |
| 969 SizeSnapshotPost += PadToNextBundle; | |
| 970 assert((Asm->getBufferSize() & BundleMaskLo) == 0); | |
| 971 assert(Asm->getBufferSize() == SizeSnapshotPre); | |
| 972 } | |
| 973 } | |
| 974 // If align_to_end is specified, add padding such that the | |
| 975 // instruction sequences ends precisely at a bundle boundary. | |
| 976 void padForAlignToEnd() { | |
| 977 assert(isInBundleLockRegion()); | |
| 978 if (isAlignToEnd()) { | |
| 979 if (intptr_t Offset = getPostAlignment()) { | |
| 980 Asm->padWithNop(BundleSize - Offset); | |
| 981 SizeSnapshotPre = Asm->getBufferSize(); | |
| 982 } | |
| 983 } | |
| 984 } | |
| 985 // Update bookkeeping when rolling back for the second pass. | |
| 986 void rollback() { | |
| 987 assert(isInBundleLockRegion()); | |
| 988 Asm->setBufferSize(SizeSnapshotPre); | |
| 989 Asm->setPreliminary(false); | |
| 990 } | |
| 991 | |
| 992 private: | |
| 993 Assembler *const Asm; | |
| 994 // End is a sentinel value such that BundleLockStart==End implies | |
| 995 // that we are not in a bundle_lock region. | |
| 996 const InstList::const_iterator End; | |
| 997 InstList::const_iterator BundleLockStart; | |
| 998 const intptr_t BundleSize; | |
| 999 // Masking with BundleMaskLo identifies an address's bundle offset. | |
| 1000 const intptr_t BundleMaskLo; | |
| 1001 // Masking with BundleMaskHi identifies an address's bundle. | |
| 1002 const intptr_t BundleMaskHi; | |
| 1003 intptr_t SizeSnapshotPre; | |
| 1004 intptr_t SizeSnapshotPost; | |
| 1005 }; | |
| 1006 | |
| 894 void CfgNode::emitIAS(Cfg *Func) const { | 1007 void CfgNode::emitIAS(Cfg *Func) const { |
| 895 Func->setCurrentNode(this); | 1008 Func->setCurrentNode(this); |
| 896 Assembler *Asm = Func->getAssembler<>(); | 1009 Assembler *Asm = Func->getAssembler<>(); |
| 1010 // TODO(stichnot): When sandboxing, defer binding the node label | |
| 1011 // until just before the first instruction is emitted, to reduce the | |
| 1012 // chance that a padding nop is a branch target. | |
| 897 Asm->BindCfgNodeLabel(getIndex()); | 1013 Asm->BindCfgNodeLabel(getIndex()); |
| 898 for (const Inst &I : Phis) { | 1014 for (const Inst &I : Phis) { |
| 899 if (I.isDeleted()) | 1015 if (I.isDeleted()) |
| 900 continue; | 1016 continue; |
| 901 // Emitting a Phi instruction should cause an error. | 1017 // Emitting a Phi instruction should cause an error. |
| 902 I.emitIAS(Func); | 1018 I.emitIAS(Func); |
| 903 } | 1019 } |
| 904 for (const Inst &I : Insts) { | 1020 |
| 905 if (I.isDeleted()) | 1021 // Do the simple emission if not sandboxed. |
| 1022 if (!Func->getContext()->getFlags().getUseSandboxing()) { | |
| 1023 for (const Inst &I : Insts) { | |
| 1024 if (!I.isDeleted() && !I.isRedundantAssign()) { | |
| 1025 I.emitIAS(Func); | |
| 1026 updateStats(Func, &I); | |
| 1027 } | |
| 1028 } | |
| 1029 return; | |
| 1030 } | |
| 1031 // The remainder of the function handles emission with sandboxing. | |
| 1032 | |
|
jvoung (off chromium)
2015/02/19 21:01:47
Leave a brief high level comment somewhere about t
Jim Stichnoth
2015/02/19 23:17:39
Done.
| |
| 1033 EmitHelper Helper(Asm, Insts); | |
| 1034 InstList::const_iterator End = Insts.end(); | |
| 1035 // Retrying indicates that we had to roll back to the bundle_lock | |
| 1036 // instruction to apply padding before the bundle_lock sequence. | |
| 1037 bool Retrying = false; | |
| 1038 for (InstList::const_iterator I = Insts.begin(); I != End; ++I) { | |
| 1039 if (I->isDeleted() || I->isRedundantAssign()) | |
| 906 continue; | 1040 continue; |
| 907 if (I.isRedundantAssign()) | 1041 |
| 1042 if (llvm::isa<InstBundleLock>(I)) { | |
| 1043 // Set up the initial bundle_lock state. This should not happen | |
| 1044 // while retrying, because the retry rolls back to the | |
| 1045 // instruction following the bundle_lock instruction. | |
| 1046 assert(!Retrying); | |
| 1047 Helper.enterBundleLock(I); | |
| 908 continue; | 1048 continue; |
| 909 I.emitIAS(Func); | 1049 } |
| 910 updateStats(Func, &I); | 1050 |
| 1051 if (llvm::isa<InstBundleUnlock>(I)) { | |
| 1052 Helper.enterBundleUnlock(); | |
| 1053 if (Retrying) { | |
| 1054 // Make sure all instructions are in the same bundle. | |
| 1055 assert(Helper.isSameBundle()); | |
| 1056 // If align_to_end is specified, make sure the next | |
| 1057 // instruction begins the bundle. | |
| 1058 assert(!Helper.isAlignToEnd() || Helper.getPostAlignment() == 0); | |
| 1059 Helper.leaveBundleLockRegion(); | |
| 1060 Retrying = false; | |
| 1061 } else { | |
| 1062 // This is the first pass, so roll back for the retry pass. | |
| 1063 Helper.rollback(); | |
| 1064 // Pad to the next bundle if the instruction sequence crossed | |
| 1065 // a bundle boundary. | |
| 1066 Helper.padToNextBundle(); | |
| 1067 // Insert additional padding to make AlignToEnd work. | |
| 1068 Helper.padForAlignToEnd(); | |
| 1069 // Prepare for the retry pass after padding is done. | |
| 1070 Retrying = true; | |
| 1071 I = Helper.getBundleLockStart(); | |
| 1072 } | |
| 1073 continue; | |
| 1074 } | |
| 1075 | |
| 1076 // I points to a non bundle_lock/bundle_unlock instruction. | |
| 1077 if (Helper.isInBundleLockRegion()) { | |
| 1078 I->emitIAS(Func); | |
| 1079 // Only update stats during the final pass. | |
| 1080 if (Retrying) | |
| 1081 updateStats(Func, I); | |
| 1082 } else { | |
| 1083 // Treat it as though there were an implicit bundle_lock and | |
| 1084 // bundle_unlock wrapping the instruction. | |
| 1085 Helper.enterBundleLock(I); | |
| 1086 I->emitIAS(Func); | |
| 1087 Helper.enterBundleUnlock(); | |
| 1088 Helper.rollback(); | |
| 1089 Helper.padToNextBundle(); | |
| 1090 I->emitIAS(Func); | |
| 1091 updateStats(Func, I); | |
| 1092 Helper.leaveBundleLockRegion(); | |
| 1093 } | |
| 911 } | 1094 } |
| 1095 | |
| 1096 // Don't allow bundle locking across basic blocks, to keep the | |
| 1097 // backtracking mechanism simple. | |
| 1098 assert(!Helper.isInBundleLockRegion()); | |
| 1099 assert(!Retrying); | |
| 912 } | 1100 } |
| 913 | 1101 |
| 914 void CfgNode::dump(Cfg *Func) const { | 1102 void CfgNode::dump(Cfg *Func) const { |
| 915 if (!ALLOW_DUMP) | 1103 if (!ALLOW_DUMP) |
| 916 return; | 1104 return; |
| 917 Func->setCurrentNode(this); | 1105 Func->setCurrentNode(this); |
| 918 Ostream &Str = Func->getContext()->getStrDump(); | 1106 Ostream &Str = Func->getContext()->getStrDump(); |
| 919 Liveness *Liveness = Func->getLiveness(); | 1107 Liveness *Liveness = Func->getLiveness(); |
| 920 if (Func->isVerbose(IceV_Instructions)) { | 1108 if (Func->isVerbose(IceV_Instructions)) { |
| 921 Str << getName() << ":\n"; | 1109 Str << getName() << ":\n"; |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 985 if (!First) | 1173 if (!First) |
| 986 Str << ", "; | 1174 Str << ", "; |
| 987 First = false; | 1175 First = false; |
| 988 Str << "%" << I->getName(); | 1176 Str << "%" << I->getName(); |
| 989 } | 1177 } |
| 990 Str << "\n"; | 1178 Str << "\n"; |
| 991 } | 1179 } |
| 992 } | 1180 } |
| 993 | 1181 |
| 994 } // end of namespace Ice | 1182 } // end of namespace Ice |
| OLD | NEW |