Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(87)

Side by Side Diff: src/arm64/macro-assembler-arm64.cc

Issue 271543004: Reland - Arm64: Ensure that csp is always aligned to 16 byte values even if jssp is not. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: ip0 ip1 -> TmpList Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm64/macro-assembler-arm64.h ('k') | src/arm64/macro-assembler-arm64-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "v8.h" 5 #include "v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM64 7 #if V8_TARGET_ARCH_ARM64
8 8
9 #include "bootstrapper.h" 9 #include "bootstrapper.h"
10 #include "codegen.h" 10 #include "codegen.h"
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
117 LogicalImmediate(rd, rn, n, imm_s, imm_r, op); 117 LogicalImmediate(rd, rn, n, imm_s, imm_r, op);
118 } else { 118 } else {
119 // Immediate can't be encoded: synthesize using move immediate. 119 // Immediate can't be encoded: synthesize using move immediate.
120 Register temp = temps.AcquireSameSizeAs(rn); 120 Register temp = temps.AcquireSameSizeAs(rn);
121 Mov(temp, immediate); 121 Mov(temp, immediate);
122 if (rd.Is(csp)) { 122 if (rd.Is(csp)) {
123 // If rd is the stack pointer we cannot use it as the destination 123 // If rd is the stack pointer we cannot use it as the destination
124 // register so we use the temp register as an intermediate again. 124 // register so we use the temp register as an intermediate again.
125 Logical(temp, rn, temp, op); 125 Logical(temp, rn, temp, op);
126 Mov(csp, temp); 126 Mov(csp, temp);
127 AssertStackConsistency();
127 } else { 128 } else {
128 Logical(rd, rn, temp, op); 129 Logical(rd, rn, temp, op);
129 } 130 }
130 } 131 }
131 132
132 } else if (operand.IsExtendedRegister()) { 133 } else if (operand.IsExtendedRegister()) {
133 ASSERT(operand.reg().SizeInBits() <= rd.SizeInBits()); 134 ASSERT(operand.reg().SizeInBits() <= rd.SizeInBits());
134 // Add/sub extended supports shift <= 4. We want to support exactly the 135 // Add/sub extended supports shift <= 4. We want to support exactly the
135 // same modes here. 136 // same modes here.
136 ASSERT(operand.shift_amount() <= 4); 137 ASSERT(operand.shift_amount() <= 4);
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
224 movk(temp, imm16, 16 * i); 225 movk(temp, imm16, 16 * i);
225 } 226 }
226 } 227 }
227 } 228 }
228 ASSERT(first_mov_done); 229 ASSERT(first_mov_done);
229 230
230 // Move the temporary if the original destination register was the stack 231 // Move the temporary if the original destination register was the stack
231 // pointer. 232 // pointer.
232 if (rd.IsSP()) { 233 if (rd.IsSP()) {
233 mov(rd, temp); 234 mov(rd, temp);
235 AssertStackConsistency();
234 } 236 }
235 } 237 }
236 } 238 }
237 239
238 240
239 void MacroAssembler::Mov(const Register& rd, 241 void MacroAssembler::Mov(const Register& rd,
240 const Operand& operand, 242 const Operand& operand,
241 DiscardMoveMode discard_mode) { 243 DiscardMoveMode discard_mode) {
242 ASSERT(allow_macro_instructions_); 244 ASSERT(allow_macro_instructions_);
243 ASSERT(!rd.IsZero()); 245 ASSERT(!rd.IsZero());
(...skipping 516 matching lines...) Expand 10 before | Expand all | Expand 10 after
760 // Abstracted stack operations. 762 // Abstracted stack operations.
761 763
762 764
763 void MacroAssembler::Push(const CPURegister& src0, const CPURegister& src1, 765 void MacroAssembler::Push(const CPURegister& src0, const CPURegister& src1,
764 const CPURegister& src2, const CPURegister& src3) { 766 const CPURegister& src2, const CPURegister& src3) {
765 ASSERT(AreSameSizeAndType(src0, src1, src2, src3)); 767 ASSERT(AreSameSizeAndType(src0, src1, src2, src3));
766 768
767 int count = 1 + src1.IsValid() + src2.IsValid() + src3.IsValid(); 769 int count = 1 + src1.IsValid() + src2.IsValid() + src3.IsValid();
768 int size = src0.SizeInBytes(); 770 int size = src0.SizeInBytes();
769 771
770 PrepareForPush(count, size); 772 PushPreamble(count, size);
771 PushHelper(count, size, src0, src1, src2, src3); 773 PushHelper(count, size, src0, src1, src2, src3);
772 } 774 }
773 775
774 776
775 void MacroAssembler::Push(const CPURegister& src0, const CPURegister& src1, 777 void MacroAssembler::Push(const CPURegister& src0, const CPURegister& src1,
776 const CPURegister& src2, const CPURegister& src3, 778 const CPURegister& src2, const CPURegister& src3,
777 const CPURegister& src4, const CPURegister& src5, 779 const CPURegister& src4, const CPURegister& src5,
778 const CPURegister& src6, const CPURegister& src7) { 780 const CPURegister& src6, const CPURegister& src7) {
779 ASSERT(AreSameSizeAndType(src0, src1, src2, src3, src4, src5, src6, src7)); 781 ASSERT(AreSameSizeAndType(src0, src1, src2, src3, src4, src5, src6, src7));
780 782
781 int count = 5 + src5.IsValid() + src6.IsValid() + src6.IsValid(); 783 int count = 5 + src5.IsValid() + src6.IsValid() + src6.IsValid();
782 int size = src0.SizeInBytes(); 784 int size = src0.SizeInBytes();
783 785
784 PrepareForPush(count, size); 786 PushPreamble(count, size);
785 PushHelper(4, size, src0, src1, src2, src3); 787 PushHelper(4, size, src0, src1, src2, src3);
786 PushHelper(count - 4, size, src4, src5, src6, src7); 788 PushHelper(count - 4, size, src4, src5, src6, src7);
787 } 789 }
788 790
789 791
790 void MacroAssembler::Pop(const CPURegister& dst0, const CPURegister& dst1, 792 void MacroAssembler::Pop(const CPURegister& dst0, const CPURegister& dst1,
791 const CPURegister& dst2, const CPURegister& dst3) { 793 const CPURegister& dst2, const CPURegister& dst3) {
792 // It is not valid to pop into the same register more than once in one 794 // It is not valid to pop into the same register more than once in one
793 // instruction, not even into the zero register. 795 // instruction, not even into the zero register.
794 ASSERT(!AreAliased(dst0, dst1, dst2, dst3)); 796 ASSERT(!AreAliased(dst0, dst1, dst2, dst3));
795 ASSERT(AreSameSizeAndType(dst0, dst1, dst2, dst3)); 797 ASSERT(AreSameSizeAndType(dst0, dst1, dst2, dst3));
796 ASSERT(dst0.IsValid()); 798 ASSERT(dst0.IsValid());
797 799
798 int count = 1 + dst1.IsValid() + dst2.IsValid() + dst3.IsValid(); 800 int count = 1 + dst1.IsValid() + dst2.IsValid() + dst3.IsValid();
799 int size = dst0.SizeInBytes(); 801 int size = dst0.SizeInBytes();
800 802
801 PrepareForPop(count, size);
802 PopHelper(count, size, dst0, dst1, dst2, dst3); 803 PopHelper(count, size, dst0, dst1, dst2, dst3);
803 804 PopPostamble(count, size);
804 if (!csp.Is(StackPointer()) && emit_debug_code()) {
805 // It is safe to leave csp where it is when unwinding the JavaScript stack,
806 // but if we keep it matching StackPointer, the simulator can detect memory
807 // accesses in the now-free part of the stack.
808 Mov(csp, StackPointer());
809 }
810 } 805 }
811 806
812 807
813 void MacroAssembler::PushPopQueue::PushQueued() { 808 void MacroAssembler::PushPopQueue::PushQueued() {
814 if (queued_.empty()) return; 809 if (queued_.empty()) return;
815 810
816 masm_->PrepareForPush(size_); 811 masm_->PushPreamble(size_);
817 812
818 int count = queued_.size(); 813 int count = queued_.size();
819 int index = 0; 814 int index = 0;
820 while (index < count) { 815 while (index < count) {
821 // PushHelper can only handle registers with the same size and type, and it 816 // PushHelper can only handle registers with the same size and type, and it
822 // can handle only four at a time. Batch them up accordingly. 817 // can handle only four at a time. Batch them up accordingly.
823 CPURegister batch[4] = {NoReg, NoReg, NoReg, NoReg}; 818 CPURegister batch[4] = {NoReg, NoReg, NoReg, NoReg};
824 int batch_index = 0; 819 int batch_index = 0;
825 do { 820 do {
826 batch[batch_index++] = queued_[index++]; 821 batch[batch_index++] = queued_[index++];
827 } while ((batch_index < 4) && (index < count) && 822 } while ((batch_index < 4) && (index < count) &&
828 batch[0].IsSameSizeAndType(queued_[index])); 823 batch[0].IsSameSizeAndType(queued_[index]));
829 824
830 masm_->PushHelper(batch_index, batch[0].SizeInBytes(), 825 masm_->PushHelper(batch_index, batch[0].SizeInBytes(),
831 batch[0], batch[1], batch[2], batch[3]); 826 batch[0], batch[1], batch[2], batch[3]);
832 } 827 }
833 828
834 queued_.clear(); 829 queued_.clear();
835 } 830 }
836 831
837 832
838 void MacroAssembler::PushPopQueue::PopQueued() { 833 void MacroAssembler::PushPopQueue::PopQueued() {
839 if (queued_.empty()) return; 834 if (queued_.empty()) return;
840 835
841 masm_->PrepareForPop(size_);
842
843 int count = queued_.size(); 836 int count = queued_.size();
844 int index = 0; 837 int index = 0;
845 while (index < count) { 838 while (index < count) {
846 // PopHelper can only handle registers with the same size and type, and it 839 // PopHelper can only handle registers with the same size and type, and it
847 // can handle only four at a time. Batch them up accordingly. 840 // can handle only four at a time. Batch them up accordingly.
848 CPURegister batch[4] = {NoReg, NoReg, NoReg, NoReg}; 841 CPURegister batch[4] = {NoReg, NoReg, NoReg, NoReg};
849 int batch_index = 0; 842 int batch_index = 0;
850 do { 843 do {
851 batch[batch_index++] = queued_[index++]; 844 batch[batch_index++] = queued_[index++];
852 } while ((batch_index < 4) && (index < count) && 845 } while ((batch_index < 4) && (index < count) &&
853 batch[0].IsSameSizeAndType(queued_[index])); 846 batch[0].IsSameSizeAndType(queued_[index]));
854 847
855 masm_->PopHelper(batch_index, batch[0].SizeInBytes(), 848 masm_->PopHelper(batch_index, batch[0].SizeInBytes(),
856 batch[0], batch[1], batch[2], batch[3]); 849 batch[0], batch[1], batch[2], batch[3]);
857 } 850 }
858 851
852 masm_->PopPostamble(size_);
859 queued_.clear(); 853 queued_.clear();
860 } 854 }
861 855
862 856
863 void MacroAssembler::PushCPURegList(CPURegList registers) { 857 void MacroAssembler::PushCPURegList(CPURegList registers) {
864 int size = registers.RegisterSizeInBytes(); 858 int size = registers.RegisterSizeInBytes();
865 859
866 PrepareForPush(registers.Count(), size); 860 PushPreamble(registers.Count(), size);
867 // Push up to four registers at a time because if the current stack pointer is 861 // Push up to four registers at a time because if the current stack pointer is
868 // csp and reg_size is 32, registers must be pushed in blocks of four in order 862 // csp and reg_size is 32, registers must be pushed in blocks of four in order
869 // to maintain the 16-byte alignment for csp. 863 // to maintain the 16-byte alignment for csp.
870 while (!registers.IsEmpty()) { 864 while (!registers.IsEmpty()) {
871 int count_before = registers.Count(); 865 int count_before = registers.Count();
872 const CPURegister& src0 = registers.PopHighestIndex(); 866 const CPURegister& src0 = registers.PopHighestIndex();
873 const CPURegister& src1 = registers.PopHighestIndex(); 867 const CPURegister& src1 = registers.PopHighestIndex();
874 const CPURegister& src2 = registers.PopHighestIndex(); 868 const CPURegister& src2 = registers.PopHighestIndex();
875 const CPURegister& src3 = registers.PopHighestIndex(); 869 const CPURegister& src3 = registers.PopHighestIndex();
876 int count = count_before - registers.Count(); 870 int count = count_before - registers.Count();
877 PushHelper(count, size, src0, src1, src2, src3); 871 PushHelper(count, size, src0, src1, src2, src3);
878 } 872 }
879 } 873 }
880 874
881 875
882 void MacroAssembler::PopCPURegList(CPURegList registers) { 876 void MacroAssembler::PopCPURegList(CPURegList registers) {
883 int size = registers.RegisterSizeInBytes(); 877 int size = registers.RegisterSizeInBytes();
884 878
885 PrepareForPop(registers.Count(), size);
886 // Pop up to four registers at a time because if the current stack pointer is 879 // Pop up to four registers at a time because if the current stack pointer is
887 // csp and reg_size is 32, registers must be pushed in blocks of four in 880 // csp and reg_size is 32, registers must be pushed in blocks of four in
888 // order to maintain the 16-byte alignment for csp. 881 // order to maintain the 16-byte alignment for csp.
889 while (!registers.IsEmpty()) { 882 while (!registers.IsEmpty()) {
890 int count_before = registers.Count(); 883 int count_before = registers.Count();
891 const CPURegister& dst0 = registers.PopLowestIndex(); 884 const CPURegister& dst0 = registers.PopLowestIndex();
892 const CPURegister& dst1 = registers.PopLowestIndex(); 885 const CPURegister& dst1 = registers.PopLowestIndex();
893 const CPURegister& dst2 = registers.PopLowestIndex(); 886 const CPURegister& dst2 = registers.PopLowestIndex();
894 const CPURegister& dst3 = registers.PopLowestIndex(); 887 const CPURegister& dst3 = registers.PopLowestIndex();
895 int count = count_before - registers.Count(); 888 int count = count_before - registers.Count();
896 PopHelper(count, size, dst0, dst1, dst2, dst3); 889 PopHelper(count, size, dst0, dst1, dst2, dst3);
897 } 890 }
898 891 PopPostamble(registers.Count(), size);
899 if (!csp.Is(StackPointer()) && emit_debug_code()) {
900 // It is safe to leave csp where it is when unwinding the JavaScript stack,
901 // but if we keep it matching StackPointer, the simulator can detect memory
902 // accesses in the now-free part of the stack.
903 Mov(csp, StackPointer());
904 }
905 } 892 }
906 893
907 894
908 void MacroAssembler::PushMultipleTimes(CPURegister src, int count) { 895 void MacroAssembler::PushMultipleTimes(CPURegister src, int count) {
909 int size = src.SizeInBytes(); 896 int size = src.SizeInBytes();
910 897
911 PrepareForPush(count, size); 898 PushPreamble(count, size);
912 899
913 if (FLAG_optimize_for_size && count > 8) { 900 if (FLAG_optimize_for_size && count > 8) {
914 UseScratchRegisterScope temps(this); 901 UseScratchRegisterScope temps(this);
915 Register temp = temps.AcquireX(); 902 Register temp = temps.AcquireX();
916 903
917 Label loop; 904 Label loop;
918 __ Mov(temp, count / 2); 905 __ Mov(temp, count / 2);
919 __ Bind(&loop); 906 __ Bind(&loop);
920 PushHelper(2, size, src, src, NoReg, NoReg); 907 PushHelper(2, size, src, src, NoReg, NoReg);
921 __ Subs(temp, temp, 1); 908 __ Subs(temp, temp, 1);
(...skipping 15 matching lines...) Expand all
937 } 924 }
938 if (count == 1) { 925 if (count == 1) {
939 PushHelper(1, size, src, NoReg, NoReg, NoReg); 926 PushHelper(1, size, src, NoReg, NoReg, NoReg);
940 count -= 1; 927 count -= 1;
941 } 928 }
942 ASSERT(count == 0); 929 ASSERT(count == 0);
943 } 930 }
944 931
945 932
946 void MacroAssembler::PushMultipleTimes(CPURegister src, Register count) { 933 void MacroAssembler::PushMultipleTimes(CPURegister src, Register count) {
947 PrepareForPush(Operand(count, UXTW, WhichPowerOf2(src.SizeInBytes()))); 934 PushPreamble(Operand(count, UXTW, WhichPowerOf2(src.SizeInBytes())));
948 935
949 UseScratchRegisterScope temps(this); 936 UseScratchRegisterScope temps(this);
950 Register temp = temps.AcquireSameSizeAs(count); 937 Register temp = temps.AcquireSameSizeAs(count);
951 938
952 if (FLAG_optimize_for_size) { 939 if (FLAG_optimize_for_size) {
953 Label loop, done; 940 Label loop, done;
954 941
955 Subs(temp, count, 1); 942 Subs(temp, count, 1);
956 B(mi, &done); 943 B(mi, &done);
957 944
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
1063 // for csp at all times. 1050 // for csp at all times.
1064 ldp(dst2, dst3, MemOperand(StackPointer(), 2 * size)); 1051 ldp(dst2, dst3, MemOperand(StackPointer(), 2 * size));
1065 ldp(dst0, dst1, MemOperand(StackPointer(), 4 * size, PostIndex)); 1052 ldp(dst0, dst1, MemOperand(StackPointer(), 4 * size, PostIndex));
1066 break; 1053 break;
1067 default: 1054 default:
1068 UNREACHABLE(); 1055 UNREACHABLE();
1069 } 1056 }
1070 } 1057 }
1071 1058
1072 1059
1073 void MacroAssembler::PrepareForPush(Operand total_size) { 1060 void MacroAssembler::PushPreamble(Operand total_size) {
1074 // TODO(jbramley): This assertion generates too much code in some debug tests.
1075 // AssertStackConsistency();
1076 if (csp.Is(StackPointer())) { 1061 if (csp.Is(StackPointer())) {
1077 // If the current stack pointer is csp, then it must be aligned to 16 bytes 1062 // If the current stack pointer is csp, then it must be aligned to 16 bytes
1078 // on entry and the total size of the specified registers must also be a 1063 // on entry and the total size of the specified registers must also be a
1079 // multiple of 16 bytes. 1064 // multiple of 16 bytes.
1080 if (total_size.IsImmediate()) { 1065 if (total_size.IsImmediate()) {
1081 ASSERT((total_size.immediate() % 16) == 0); 1066 ASSERT((total_size.immediate() % 16) == 0);
1082 } 1067 }
1083 1068
1084 // Don't check access size for non-immediate sizes. It's difficult to do 1069 // Don't check access size for non-immediate sizes. It's difficult to do
1085 // well, and it will be caught by hardware (or the simulator) anyway. 1070 // well, and it will be caught by hardware (or the simulator) anyway.
1086 } else { 1071 } else {
1087 // Even if the current stack pointer is not the system stack pointer (csp), 1072 // Even if the current stack pointer is not the system stack pointer (csp),
1088 // the system stack pointer will still be modified in order to comply with 1073 // the system stack pointer will still be modified in order to comply with
1089 // ABI rules about accessing memory below the system stack pointer. 1074 // ABI rules about accessing memory below the system stack pointer.
1090 BumpSystemStackPointer(total_size); 1075 BumpSystemStackPointer(total_size);
1091 } 1076 }
1092 } 1077 }
1093 1078
1094 1079
1095 void MacroAssembler::PrepareForPop(Operand total_size) { 1080 void MacroAssembler::PopPostamble(Operand total_size) {
1096 AssertStackConsistency();
1097 if (csp.Is(StackPointer())) { 1081 if (csp.Is(StackPointer())) {
1098 // If the current stack pointer is csp, then it must be aligned to 16 bytes 1082 // If the current stack pointer is csp, then it must be aligned to 16 bytes
1099 // on entry and the total size of the specified registers must also be a 1083 // on entry and the total size of the specified registers must also be a
1100 // multiple of 16 bytes. 1084 // multiple of 16 bytes.
1101 if (total_size.IsImmediate()) { 1085 if (total_size.IsImmediate()) {
1102 ASSERT((total_size.immediate() % 16) == 0); 1086 ASSERT((total_size.immediate() % 16) == 0);
1103 } 1087 }
1104 1088
1105 // Don't check access size for non-immediate sizes. It's difficult to do 1089 // Don't check access size for non-immediate sizes. It's difficult to do
1106 // well, and it will be caught by hardware (or the simulator) anyway. 1090 // well, and it will be caught by hardware (or the simulator) anyway.
1091 } else if (emit_debug_code()) {
1092 // It is safe to leave csp where it is when unwinding the JavaScript stack,
1093 // but if we keep it matching StackPointer, the simulator can detect memory
1094 // accesses in the now-free part of the stack.
1095 SyncSystemStackPointer();
1107 } 1096 }
1108 } 1097 }
1109 1098
1110 1099
1111 void MacroAssembler::Poke(const CPURegister& src, const Operand& offset) { 1100 void MacroAssembler::Poke(const CPURegister& src, const Operand& offset) {
1112 if (offset.IsImmediate()) { 1101 if (offset.IsImmediate()) {
1113 ASSERT(offset.immediate() >= 0); 1102 ASSERT(offset.immediate() >= 0);
1114 } else if (emit_debug_code()) { 1103 } else if (emit_debug_code()) {
1115 Cmp(xzr, offset); 1104 Cmp(xzr, offset);
1116 Check(le, kStackAccessBelowStackPointer); 1105 Check(le, kStackAccessBelowStackPointer);
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
1192 ldp(x29, x30, tos); 1181 ldp(x29, x30, tos);
1193 1182
1194 ldp(d8, d9, tos); 1183 ldp(d8, d9, tos);
1195 ldp(d10, d11, tos); 1184 ldp(d10, d11, tos);
1196 ldp(d12, d13, tos); 1185 ldp(d12, d13, tos);
1197 ldp(d14, d15, tos); 1186 ldp(d14, d15, tos);
1198 } 1187 }
1199 1188
1200 1189
1201 void MacroAssembler::AssertStackConsistency() { 1190 void MacroAssembler::AssertStackConsistency() {
1202 if (emit_debug_code()) { 1191 // Avoid emitting code when !use_real_abort() since non-real aborts cause too
1203 if (csp.Is(StackPointer())) { 1192 // much code to be generated.
1204 // We can't check the alignment of csp without using a scratch register 1193 if (emit_debug_code() && use_real_aborts()) {
1205 // (or clobbering the flags), but the processor (or simulator) will abort 1194 if (csp.Is(StackPointer()) || CpuFeatures::IsSupported(ALWAYS_ALIGN_CSP)) {
1206 // if it is not properly aligned during a load. 1195 // Always check the alignment of csp if ALWAYS_ALIGN_CSP is true. We
1196 // can't check the alignment of csp without using a scratch register (or
1197 // clobbering the flags), but the processor (or simulator) will abort if
1198 // it is not properly aligned during a load.
1207 ldr(xzr, MemOperand(csp, 0)); 1199 ldr(xzr, MemOperand(csp, 0));
1208 } else if (FLAG_enable_slow_asserts) { 1200 }
1201 if (FLAG_enable_slow_asserts && !csp.Is(StackPointer())) {
1209 Label ok; 1202 Label ok;
1210 // Check that csp <= StackPointer(), preserving all registers and NZCV. 1203 // Check that csp <= StackPointer(), preserving all registers and NZCV.
1211 sub(StackPointer(), csp, StackPointer()); 1204 sub(StackPointer(), csp, StackPointer());
1212 cbz(StackPointer(), &ok); // Ok if csp == StackPointer(). 1205 cbz(StackPointer(), &ok); // Ok if csp == StackPointer().
1213 tbnz(StackPointer(), kXSignBit, &ok); // Ok if csp < StackPointer(). 1206 tbnz(StackPointer(), kXSignBit, &ok); // Ok if csp < StackPointer().
1214 1207
1215 Abort(kTheCurrentStackPointerIsBelowCsp); 1208 // Avoid generating AssertStackConsistency checks for the Push in Abort.
1209 { DontEmitDebugCodeScope dont_emit_debug_code_scope(this);
1210 Abort(kTheCurrentStackPointerIsBelowCsp);
1211 }
1216 1212
1217 bind(&ok); 1213 bind(&ok);
1218 // Restore StackPointer(). 1214 // Restore StackPointer().
1219 sub(StackPointer(), csp, StackPointer()); 1215 sub(StackPointer(), csp, StackPointer());
1220 } 1216 }
1221 } 1217 }
1222 } 1218 }
1223 1219
1224 1220
1225 void MacroAssembler::AssertFPCRState(Register fpcr) { 1221 void MacroAssembler::AssertFPCRState(Register fpcr) {
(...skipping 4009 matching lines...) Expand 10 before | Expand all | Expand 10 after
5235 } 5231 }
5236 } 5232 }
5237 5233
5238 5234
5239 #undef __ 5235 #undef __
5240 5236
5241 5237
5242 } } // namespace v8::internal 5238 } } // namespace v8::internal
5243 5239
5244 #endif // V8_TARGET_ARCH_ARM64 5240 #endif // V8_TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « src/arm64/macro-assembler-arm64.h ('k') | src/arm64/macro-assembler-arm64-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698