Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(405)

Side by Side Diff: src/a64/macro-assembler-a64.cc

Issue 170623002: A64: Tidy up Push and Pop TODOs. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/macro-assembler-a64.h ('k') | src/a64/macro-assembler-a64-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 804 matching lines...) Expand 10 before | Expand all | Expand 10 after
815 815
816 if (!csp.Is(StackPointer()) && emit_debug_code()) { 816 if (!csp.Is(StackPointer()) && emit_debug_code()) {
817 // It is safe to leave csp where it is when unwinding the JavaScript stack, 817 // It is safe to leave csp where it is when unwinding the JavaScript stack,
818 // but if we keep it matching StackPointer, the simulator can detect memory 818 // but if we keep it matching StackPointer, the simulator can detect memory
819 // accesses in the now-free part of the stack. 819 // accesses in the now-free part of the stack.
820 Mov(csp, StackPointer()); 820 Mov(csp, StackPointer());
821 } 821 }
822 } 822 }
823 823
824 824
825 void MacroAssembler::PushPopQueue::PushQueued() {
826 if (queued_.empty()) return;
827
828 masm_->PrepareForPush(size_);
829
830 int count = queued_.size();
831 int index = 0;
832 while (index < count) {
833 // PushHelper can only handle registers with the same size and type, and it
834 // can handle only four at a time. Batch them up accordingly.
835 CPURegister batch[4] = {NoReg, NoReg, NoReg, NoReg};
836 int batch_index = 0;
837 do {
838 batch[batch_index++] = queued_[index++];
839 } while ((batch_index < 4) && (index < count) &&
840 batch[0].IsSameSizeAndType(queued_[index]));
841
842 masm_->PushHelper(batch_index, batch[0].SizeInBytes(),
843 batch[0], batch[1], batch[2], batch[3]);
844 }
845
846 queued_.clear();
847 }
848
849
850 void MacroAssembler::PushPopQueue::PopQueued() {
851 if (queued_.empty()) return;
852
853 masm_->PrepareForPop(size_);
854
855 int count = queued_.size();
856 int index = 0;
857 while (index < count) {
858 // PopHelper can only handle registers with the same size and type, and it
859 // can handle only four at a time. Batch them up accordingly.
860 CPURegister batch[4] = {NoReg, NoReg, NoReg, NoReg};
861 int batch_index = 0;
862 do {
863 batch[batch_index++] = queued_[index++];
864 } while ((batch_index < 4) && (index < count) &&
865 batch[0].IsSameSizeAndType(queued_[index]));
866
867 masm_->PopHelper(batch_index, batch[0].SizeInBytes(),
868 batch[0], batch[1], batch[2], batch[3]);
869 }
870
871 queued_.clear();
872 }
873
874
825 void MacroAssembler::PushCPURegList(CPURegList registers) { 875 void MacroAssembler::PushCPURegList(CPURegList registers) {
826 int size = registers.RegisterSizeInBytes(); 876 int size = registers.RegisterSizeInBytes();
827 877
828 PrepareForPush(registers.Count(), size); 878 PrepareForPush(registers.Count(), size);
829 // Push up to four registers at a time because if the current stack pointer is 879 // Push up to four registers at a time because if the current stack pointer is
830 // csp and reg_size is 32, registers must be pushed in blocks of four in order 880 // csp and reg_size is 32, registers must be pushed in blocks of four in order
831 // to maintain the 16-byte alignment for csp. 881 // to maintain the 16-byte alignment for csp.
832 while (!registers.IsEmpty()) { 882 while (!registers.IsEmpty()) {
833 int count_before = registers.Count(); 883 int count_before = registers.Count();
834 const CPURegister& src0 = registers.PopHighestIndex(); 884 const CPURegister& src0 = registers.PopHighestIndex();
(...skipping 25 matching lines...) Expand all
860 910
861 if (!csp.Is(StackPointer()) && emit_debug_code()) { 911 if (!csp.Is(StackPointer()) && emit_debug_code()) {
862 // It is safe to leave csp where it is when unwinding the JavaScript stack, 912 // It is safe to leave csp where it is when unwinding the JavaScript stack,
863 // but if we keep it matching StackPointer, the simulator can detect memory 913 // but if we keep it matching StackPointer, the simulator can detect memory
864 // accesses in the now-free part of the stack. 914 // accesses in the now-free part of the stack.
865 Mov(csp, StackPointer()); 915 Mov(csp, StackPointer());
866 } 916 }
867 } 917 }
868 918
869 919
870 void MacroAssembler::PushMultipleTimes(int count, Register src) { 920 void MacroAssembler::PushMultipleTimes(CPURegister src, int count) {
871 int size = src.SizeInBytes(); 921 int size = src.SizeInBytes();
872 922
873 PrepareForPush(count, size); 923 PrepareForPush(count, size);
874 924
875 if (FLAG_optimize_for_size && count > 8) { 925 if (FLAG_optimize_for_size && count > 8) {
876 Label loop; 926 Label loop;
877 __ Mov(Tmp0(), count / 2); 927 __ Mov(Tmp0(), count / 2);
878 __ Bind(&loop); 928 __ Bind(&loop);
879 PushHelper(2, size, src, src, NoReg, NoReg); 929 PushHelper(2, size, src, src, NoReg, NoReg);
880 __ Subs(Tmp0(), Tmp0(), 1); 930 __ Subs(Tmp0(), Tmp0(), 1);
(...skipping 14 matching lines...) Expand all
895 count -= 2; 945 count -= 2;
896 } 946 }
897 if (count == 1) { 947 if (count == 1) {
898 PushHelper(1, size, src, NoReg, NoReg, NoReg); 948 PushHelper(1, size, src, NoReg, NoReg, NoReg);
899 count -= 1; 949 count -= 1;
900 } 950 }
901 ASSERT(count == 0); 951 ASSERT(count == 0);
902 } 952 }
903 953
904 954
955 void MacroAssembler::PushMultipleTimes(CPURegister src, Register count) {
956 PrepareForPush(Operand(count, UXTW, WhichPowerOf2(src.SizeInBytes())));
957
958 Register temp = AppropriateTempFor(count);
959
960 if (FLAG_optimize_for_size) {
961 Label loop, done;
962
963 Subs(temp, count, 1);
964 B(mi, &done);
965
966 // Push all registers individually, to save code size.
967 Bind(&loop);
968 Subs(temp, temp, 1);
969 PushHelper(1, src.SizeInBytes(), src, NoReg, NoReg, NoReg);
970 B(pl, &loop);
971
972 Bind(&done);
973 } else {
974 Label loop, leftover2, leftover1, done;
975
976 Subs(temp, count, 4);
977 B(mi, &leftover2);
978
979 // Push groups of four first.
980 Bind(&loop);
981 Subs(temp, temp, 4);
982 PushHelper(4, src.SizeInBytes(), src, src, src, src);
983 B(pl, &loop);
984
985 // Push groups of two.
986 Bind(&leftover2);
987 Tbz(count, 1, &leftover1);
988 PushHelper(2, src.SizeInBytes(), src, src, NoReg, NoReg);
989
990 // Push the last one (if required).
991 Bind(&leftover1);
992 Tbz(count, 0, &done);
993 PushHelper(1, src.SizeInBytes(), src, NoReg, NoReg, NoReg);
994
995 Bind(&done);
996 }
997 }
998
999
905 void MacroAssembler::PushHelper(int count, int size, 1000 void MacroAssembler::PushHelper(int count, int size,
906 const CPURegister& src0, 1001 const CPURegister& src0,
907 const CPURegister& src1, 1002 const CPURegister& src1,
908 const CPURegister& src2, 1003 const CPURegister& src2,
909 const CPURegister& src3) { 1004 const CPURegister& src3) {
910 // Ensure that we don't unintentially modify scratch or debug registers. 1005 // Ensure that we don't unintentially modify scratch or debug registers.
911 InstructionAccurateScope scope(this); 1006 InstructionAccurateScope scope(this);
912 1007
913 ASSERT(AreSameSizeAndType(src0, src1, src2, src3)); 1008 ASSERT(AreSameSizeAndType(src0, src1, src2, src3));
914 ASSERT(size == src0.SizeInBytes()); 1009 ASSERT(size == src0.SizeInBytes());
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
976 // for csp at all times. 1071 // for csp at all times.
977 ldp(dst2, dst3, MemOperand(StackPointer(), 2 * size)); 1072 ldp(dst2, dst3, MemOperand(StackPointer(), 2 * size));
978 ldp(dst0, dst1, MemOperand(StackPointer(), 4 * size, PostIndex)); 1073 ldp(dst0, dst1, MemOperand(StackPointer(), 4 * size, PostIndex));
979 break; 1074 break;
980 default: 1075 default:
981 UNREACHABLE(); 1076 UNREACHABLE();
982 } 1077 }
983 } 1078 }
984 1079
985 1080
986 void MacroAssembler::PrepareForPush(int count, int size) { 1081 void MacroAssembler::PrepareForPush(Operand total_size) {
987 // TODO(jbramley): Use AssertStackConsistency here, if possible. See the
988 // AssertStackConsistency for details of why we can't at the moment.
989 if (csp.Is(StackPointer())) {
990 // If the current stack pointer is csp, then it must be aligned to 16 bytes
991 // on entry and the total size of the specified registers must also be a
992 // multiple of 16 bytes.
993 ASSERT((count * size) % 16 == 0);
994 } else {
995 // Even if the current stack pointer is not the system stack pointer (csp),
996 // the system stack pointer will still be modified in order to comply with
997 // ABI rules about accessing memory below the system stack pointer.
998 BumpSystemStackPointer(count * size);
999 }
1000 }
1001
1002
1003 void MacroAssembler::PrepareForPop(int count, int size) {
1004 AssertStackConsistency(); 1082 AssertStackConsistency();
1005 if (csp.Is(StackPointer())) { 1083 if (csp.Is(StackPointer())) {
1006 // If the current stack pointer is csp, then it must be aligned to 16 bytes 1084 // If the current stack pointer is csp, then it must be aligned to 16 bytes
1007 // on entry and the total size of the specified registers must also be a 1085 // on entry and the total size of the specified registers must also be a
1008 // multiple of 16 bytes. 1086 // multiple of 16 bytes.
1009 ASSERT((count * size) % 16 == 0); 1087 if (total_size.IsImmediate()) {
1088 ASSERT((total_size.immediate() % 16) == 0);
1089 }
1090
1091 // Don't check access size for non-immediate sizes. It's difficult to do
1092 // well, and it will be caught by hardware (or the simulator) anyway.
1093 } else {
1094 // Even if the current stack pointer is not the system stack pointer (csp),
1095 // the system stack pointer will still be modified in order to comply with
1096 // ABI rules about accessing memory below the system stack pointer.
1097 BumpSystemStackPointer(total_size);
1010 } 1098 }
1011 } 1099 }
1012 1100
1101
1102 void MacroAssembler::PrepareForPop(Operand total_size) {
1103 AssertStackConsistency();
1104 if (csp.Is(StackPointer())) {
1105 // If the current stack pointer is csp, then it must be aligned to 16 bytes
1106 // on entry and the total size of the specified registers must also be a
1107 // multiple of 16 bytes.
1108 if (total_size.IsImmediate()) {
1109 ASSERT((total_size.immediate() % 16) == 0);
1110 }
1111
1112 // Don't check access size for non-immediate sizes. It's difficult to do
1113 // well, and it will be caught by hardware (or the simulator) anyway.
1114 }
1115 }
1116
1013 1117
1014 void MacroAssembler::Poke(const CPURegister& src, const Operand& offset) { 1118 void MacroAssembler::Poke(const CPURegister& src, const Operand& offset) {
1015 if (offset.IsImmediate()) { 1119 if (offset.IsImmediate()) {
1016 ASSERT(offset.immediate() >= 0); 1120 ASSERT(offset.immediate() >= 0);
1017 } else if (emit_debug_code()) { 1121 } else if (emit_debug_code()) {
1018 Cmp(xzr, offset); 1122 Cmp(xzr, offset);
1019 Check(le, kStackAccessBelowStackPointer); 1123 Check(le, kStackAccessBelowStackPointer);
1020 } 1124 }
1021 1125
1022 Str(src, MemOperand(StackPointer(), offset)); 1126 Str(src, MemOperand(StackPointer(), offset));
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
1095 ldp(x29, x30, tos); 1199 ldp(x29, x30, tos);
1096 1200
1097 ldp(d8, d9, tos); 1201 ldp(d8, d9, tos);
1098 ldp(d10, d11, tos); 1202 ldp(d10, d11, tos);
1099 ldp(d12, d13, tos); 1203 ldp(d12, d13, tos);
1100 ldp(d14, d15, tos); 1204 ldp(d14, d15, tos);
1101 } 1205 }
1102 1206
1103 1207
1104 void MacroAssembler::AssertStackConsistency() { 1208 void MacroAssembler::AssertStackConsistency() {
1105 if (emit_debug_code() && !csp.Is(StackPointer())) { 1209 if (emit_debug_code()) {
1106 if (csp.Is(StackPointer())) { 1210 if (csp.Is(StackPointer())) {
1107 // TODO(jbramley): Check for csp alignment if it is the stack pointer. 1211 // We can't check the alignment of csp without using a scratch register
1108 } else { 1212 // (or clobbering the flags), but the processor (or simulator) will abort
1109 // TODO(jbramley): Currently we cannot use this assertion in Push because 1213 // if it is not properly aligned during a load.
1110 // some calling code assumes that the flags are preserved. For an example, 1214 ldr(xzr, MemOperand(csp, 0));
1111 // look at Builtins::Generate_ArgumentsAdaptorTrampoline. 1215 } else if (FLAG_enable_slow_asserts) {
1112 Cmp(csp, StackPointer()); 1216 Label ok;
1113 Check(ls, kTheCurrentStackPointerIsBelowCsp); 1217 // Check that csp <= StackPointer(), preserving all registers and NZCV.
1218 sub(StackPointer(), csp, StackPointer());
1219 cbz(StackPointer(), &ok); // Ok if csp == StackPointer().
1220 tbnz(StackPointer(), kXSignBit, &ok); // Ok if csp < StackPointer().
1221
1222 Abort(kTheCurrentStackPointerIsBelowCsp);
1223
1224 bind(&ok);
1225 // Restore StackPointer().
1226 sub(StackPointer(), csp, StackPointer());
1114 } 1227 }
1115 } 1228 }
1116 } 1229 }
1117 1230
1118 1231
1119 void MacroAssembler::LoadRoot(Register destination, 1232 void MacroAssembler::LoadRoot(Register destination,
1120 Heap::RootListIndex index) { 1233 Heap::RootListIndex index) {
1121 // TODO(jbramley): Most root values are constants, and can be synthesized 1234 // TODO(jbramley): Most root values are constants, and can be synthesized
1122 // without a load. Refer to the ARM back end for details. 1235 // without a load. Refer to the ARM back end for details.
1123 Ldr(destination, MemOperand(root, index << kPointerSizeLog2)); 1236 Ldr(destination, MemOperand(root, index << kPointerSizeLog2));
(...skipping 3391 matching lines...) Expand 10 before | Expand all | Expand 10 after
4515 #endif 4628 #endif
4516 4629
4517 // Abort is used in some contexts where csp is the stack pointer. In order to 4630 // Abort is used in some contexts where csp is the stack pointer. In order to
4518 // simplify the CallRuntime code, make sure that jssp is the stack pointer. 4631 // simplify the CallRuntime code, make sure that jssp is the stack pointer.
4519 // There is no risk of register corruption here because Abort doesn't return. 4632 // There is no risk of register corruption here because Abort doesn't return.
4520 Register old_stack_pointer = StackPointer(); 4633 Register old_stack_pointer = StackPointer();
4521 SetStackPointer(jssp); 4634 SetStackPointer(jssp);
4522 Mov(jssp, old_stack_pointer); 4635 Mov(jssp, old_stack_pointer);
4523 4636
4524 if (use_real_aborts()) { 4637 if (use_real_aborts()) {
4638 // Avoid infinite recursion; Push contains some assertions that use Abort.
4639 NoUseRealAbortsScope no_real_aborts(this);
4640
4525 Mov(x0, Operand(Smi::FromInt(reason))); 4641 Mov(x0, Operand(Smi::FromInt(reason)));
4526 Push(x0); 4642 Push(x0);
4527 4643
4528 if (!has_frame_) { 4644 if (!has_frame_) {
4529 // We don't actually want to generate a pile of code for this, so just 4645 // We don't actually want to generate a pile of code for this, so just
4530 // claim there is a stack frame, without generating one. 4646 // claim there is a stack frame, without generating one.
4531 FrameScope scope(this, StackFrame::NONE); 4647 FrameScope scope(this, StackFrame::NONE);
4532 CallRuntime(Runtime::kAbort, 1); 4648 CallRuntime(Runtime::kAbort, 1);
4533 } else { 4649 } else {
4534 CallRuntime(Runtime::kAbort, 1); 4650 CallRuntime(Runtime::kAbort, 1);
(...skipping 431 matching lines...) Expand 10 before | Expand all | Expand 10 after
4966 } 5082 }
4967 } 5083 }
4968 5084
4969 5085
4970 #undef __ 5086 #undef __
4971 5087
4972 5088
4973 } } // namespace v8::internal 5089 } } // namespace v8::internal
4974 5090
4975 #endif // V8_TARGET_ARCH_A64 5091 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/macro-assembler-a64.h ('k') | src/a64/macro-assembler-a64-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698