Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(158)

Side by Side Diff: src/a64/macro-assembler-a64.cc

Issue 169533002: A64: Tidy up Push and Pop TODOs. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 605 matching lines...) Expand 10 before | Expand all | Expand 10 after
616 616
617 if (!csp.Is(StackPointer()) && emit_debug_code()) { 617 if (!csp.Is(StackPointer()) && emit_debug_code()) {
618 // It is safe to leave csp where it is when unwinding the JavaScript stack, 618 // It is safe to leave csp where it is when unwinding the JavaScript stack,
619 // but if we keep it matching StackPointer, the simulator can detect memory 619 // but if we keep it matching StackPointer, the simulator can detect memory
620 // accesses in the now-free part of the stack. 620 // accesses in the now-free part of the stack.
621 Mov(csp, StackPointer()); 621 Mov(csp, StackPointer());
622 } 622 }
623 } 623 }
624 624
625 625
626 void MacroAssembler::PushPopQueue::PushQueued() {
627 if (queued_.empty()) return;
628
629 masm_->PrepareForPush(size_);
630
631 int count = queued_.size();
632 int index = 0;
633 while (index < count) {
634 // PushHelper can only handle registers with the same size and type, and it
635 // can handle only four at a time. Batch them up accordingly.
636 CPURegister batch[4] = {NoReg, NoReg, NoReg, NoReg};
637 int batch_index = 0;
638 do {
639 batch[batch_index++] = queued_[index++];
640 } while ((batch_index < 4) && (index < count) &&
641 batch[0].IsSameSizeAndType(queued_[index]));
642
643 masm_->PushHelper(batch_index, batch[0].SizeInBytes(),
644 batch[0], batch[1], batch[2], batch[3]);
645 }
646
647 queued_.clear();
648 }
649
650
651 void MacroAssembler::PushPopQueue::PopQueued() {
rmcilroy 2014/02/17 12:31:06 If I read this right, doing the following would no
jbramley 2014/02/17 12:43:21 That's correct. At the moment, PopQueued() isn't u
652 if (queued_.empty()) return;
653
654 masm_->PrepareForPop(size_);
655
656 int count = queued_.size();
657 int index = 0;
658 while (index < count) {
659 // PopHelper can only handle registers with the same size and type, and it
660 // can handle only four at a time. Batch them up accordingly.
661 CPURegister batch[4] = {NoReg, NoReg, NoReg, NoReg};
662 int batch_index = 0;
663 do {
664 batch[batch_index++] = queued_[index++];
665 } while ((batch_index < 4) && (index < count) &&
666 batch[0].IsSameSizeAndType(queued_[index]));
667
668 masm_->PopHelper(batch_index, batch[0].SizeInBytes(),
669 batch[0], batch[1], batch[2], batch[3]);
670 }
671
672 queued_.clear();
673 }
674
675
626 void MacroAssembler::PushCPURegList(CPURegList registers) { 676 void MacroAssembler::PushCPURegList(CPURegList registers) {
627 int size = registers.RegisterSizeInBytes(); 677 int size = registers.RegisterSizeInBytes();
628 678
629 PrepareForPush(registers.Count(), size); 679 PrepareForPush(registers.Count(), size);
630 // Push up to four registers at a time because if the current stack pointer is 680 // Push up to four registers at a time because if the current stack pointer is
631 // csp and reg_size is 32, registers must be pushed in blocks of four in order 681 // csp and reg_size is 32, registers must be pushed in blocks of four in order
632 // to maintain the 16-byte alignment for csp. 682 // to maintain the 16-byte alignment for csp.
633 while (!registers.IsEmpty()) { 683 while (!registers.IsEmpty()) {
634 int count_before = registers.Count(); 684 int count_before = registers.Count();
635 const CPURegister& src0 = registers.PopHighestIndex(); 685 const CPURegister& src0 = registers.PopHighestIndex();
(...skipping 25 matching lines...) Expand all
661 711
662 if (!csp.Is(StackPointer()) && emit_debug_code()) { 712 if (!csp.Is(StackPointer()) && emit_debug_code()) {
663 // It is safe to leave csp where it is when unwinding the JavaScript stack, 713 // It is safe to leave csp where it is when unwinding the JavaScript stack,
664 // but if we keep it matching StackPointer, the simulator can detect memory 714 // but if we keep it matching StackPointer, the simulator can detect memory
665 // accesses in the now-free part of the stack. 715 // accesses in the now-free part of the stack.
666 Mov(csp, StackPointer()); 716 Mov(csp, StackPointer());
667 } 717 }
668 } 718 }
669 719
670 720
671 void MacroAssembler::PushMultipleTimes(int count, Register src) { 721 void MacroAssembler::PushMultipleTimes(CPURegister src, int count) {
672 int size = src.SizeInBytes(); 722 int size = src.SizeInBytes();
673 723
674 PrepareForPush(count, size); 724 PrepareForPush(count, size);
675 725
676 if (FLAG_optimize_for_size && count > 8) { 726 if (FLAG_optimize_for_size && count > 8) {
677 Label loop; 727 Label loop;
678 __ Mov(Tmp0(), count / 2); 728 __ Mov(Tmp0(), count / 2);
679 __ Bind(&loop); 729 __ Bind(&loop);
680 PushHelper(2, size, src, src, NoReg, NoReg); 730 PushHelper(2, size, src, src, NoReg, NoReg);
681 __ Subs(Tmp0(), Tmp0(), 1); 731 __ Subs(Tmp0(), Tmp0(), 1);
(...skipping 14 matching lines...) Expand all
696 count -= 2; 746 count -= 2;
697 } 747 }
698 if (count == 1) { 748 if (count == 1) {
699 PushHelper(1, size, src, NoReg, NoReg, NoReg); 749 PushHelper(1, size, src, NoReg, NoReg, NoReg);
700 count -= 1; 750 count -= 1;
701 } 751 }
702 ASSERT(count == 0); 752 ASSERT(count == 0);
703 } 753 }
704 754
705 755
756 void MacroAssembler::PushMultipleTimes(CPURegister src, Register count) {
757 PrepareForPush(Operand(count, UXTW, WhichPowerOf2(src.SizeInBytes())));
758
759 Register temp = AppropriateTempFor(count);
760
761 if (FLAG_optimize_for_size) {
762 Label loop, done;
763
764 Subs(temp, count, 1);
765 B(mi, &done);
766
767 // Push all registers individually, to save code size.
768 Bind(&loop);
769 Subs(temp, temp, 1);
770 PushHelper(1, src.SizeInBytes(), src, NoReg, NoReg, NoReg);
771 B(pl, &loop);
772
773 Bind(&done);
774 } else {
775 Label loop, leftover2, leftover1, done;
776
777 Subs(temp, count, 4);
778 B(mi, &leftover2);
779
780 // Push groups of four first.
781 Bind(&loop);
782 Subs(temp, temp, 4);
783 PushHelper(4, src.SizeInBytes(), src, src, src, src);
784 B(pl, &loop);
785
786 // Push groups of two.
787 Bind(&leftover2);
788 Tbz(count, 1, &leftover1);
789 PushHelper(2, src.SizeInBytes(), src, src, NoReg, NoReg);
790
791 // Push the last one (if required).
792 Bind(&leftover1);
793 Tbz(count, 0, &done);
794 PushHelper(1, src.SizeInBytes(), src, NoReg, NoReg, NoReg);
795
796 Bind(&done);
797 }
798 }
799
800
706 void MacroAssembler::PushHelper(int count, int size, 801 void MacroAssembler::PushHelper(int count, int size,
707 const CPURegister& src0, 802 const CPURegister& src0,
708 const CPURegister& src1, 803 const CPURegister& src1,
709 const CPURegister& src2, 804 const CPURegister& src2,
710 const CPURegister& src3) { 805 const CPURegister& src3) {
711 // Ensure that we don't unintentially modify scratch or debug registers. 806 // Ensure that we don't unintentially modify scratch or debug registers.
712 InstructionAccurateScope scope(this); 807 InstructionAccurateScope scope(this);
713 808
714 ASSERT(AreSameSizeAndType(src0, src1, src2, src3)); 809 ASSERT(AreSameSizeAndType(src0, src1, src2, src3));
715 ASSERT(size == src0.SizeInBytes()); 810 ASSERT(size == src0.SizeInBytes());
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
777 // for csp at all times. 872 // for csp at all times.
778 ldp(dst2, dst3, MemOperand(StackPointer(), 2 * size)); 873 ldp(dst2, dst3, MemOperand(StackPointer(), 2 * size));
779 ldp(dst0, dst1, MemOperand(StackPointer(), 4 * size, PostIndex)); 874 ldp(dst0, dst1, MemOperand(StackPointer(), 4 * size, PostIndex));
780 break; 875 break;
781 default: 876 default:
782 UNREACHABLE(); 877 UNREACHABLE();
783 } 878 }
784 } 879 }
785 880
786 881
787 void MacroAssembler::PrepareForPush(int count, int size) { 882 void MacroAssembler::PrepareForPush(Operand total_size) {
788 // TODO(jbramley): Use AssertStackConsistency here, if possible. See the
789 // AssertStackConsistency for details of why we can't at the moment.
790 if (csp.Is(StackPointer())) {
791 // If the current stack pointer is csp, then it must be aligned to 16 bytes
792 // on entry and the total size of the specified registers must also be a
793 // multiple of 16 bytes.
794 ASSERT((count * size) % 16 == 0);
795 } else {
796 // Even if the current stack pointer is not the system stack pointer (csp),
797 // the system stack pointer will still be modified in order to comply with
798 // ABI rules about accessing memory below the system stack pointer.
799 BumpSystemStackPointer(count * size);
800 }
801 }
802
803
804 void MacroAssembler::PrepareForPop(int count, int size) {
805 AssertStackConsistency(); 883 AssertStackConsistency();
806 if (csp.Is(StackPointer())) { 884 if (csp.Is(StackPointer())) {
807 // If the current stack pointer is csp, then it must be aligned to 16 bytes 885 // If the current stack pointer is csp, then it must be aligned to 16 bytes
808 // on entry and the total size of the specified registers must also be a 886 // on entry and the total size of the specified registers must also be a
809 // multiple of 16 bytes. 887 // multiple of 16 bytes.
810 ASSERT((count * size) % 16 == 0); 888 if (total_size.IsImmediate()) {
889 ASSERT((total_size.immediate() % 16) == 0);
890 }
891
892 // Don't check access size for non-immediate sizes. It's difficult to do
893 // well, and it will be caught by hardware (or the simulator) anyway.
894 } else {
895 // Even if the current stack pointer is not the system stack pointer (csp),
896 // the system stack pointer will still be modified in order to comply with
897 // ABI rules about accessing memory below the system stack pointer.
898 BumpSystemStackPointer(total_size);
811 } 899 }
812 } 900 }
813 901
902
903 void MacroAssembler::PrepareForPop(Operand total_size) {
904 AssertStackConsistency();
905 if (csp.Is(StackPointer())) {
906 // If the current stack pointer is csp, then it must be aligned to 16 bytes
907 // on entry and the total size of the specified registers must also be a
908 // multiple of 16 bytes.
909 if (total_size.IsImmediate()) {
910 ASSERT((total_size.immediate() % 16) == 0);
911 }
912
913 // Don't check access size for non-immediate sizes. It's difficult to do
914 // well, and it will be caught by hardware (or the simulator) anyway.
915 }
916 }
917
814 918
815 void MacroAssembler::Poke(const CPURegister& src, const Operand& offset) { 919 void MacroAssembler::Poke(const CPURegister& src, const Operand& offset) {
816 if (offset.IsImmediate()) { 920 if (offset.IsImmediate()) {
817 ASSERT(offset.immediate() >= 0); 921 ASSERT(offset.immediate() >= 0);
818 } else if (emit_debug_code()) { 922 } else if (emit_debug_code()) {
819 Cmp(xzr, offset); 923 Cmp(xzr, offset);
820 Check(le, kStackAccessBelowStackPointer); 924 Check(le, kStackAccessBelowStackPointer);
821 } 925 }
822 926
823 Str(src, MemOperand(StackPointer(), offset)); 927 Str(src, MemOperand(StackPointer(), offset));
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
896 ldp(x29, x30, tos); 1000 ldp(x29, x30, tos);
897 1001
898 ldp(d8, d9, tos); 1002 ldp(d8, d9, tos);
899 ldp(d10, d11, tos); 1003 ldp(d10, d11, tos);
900 ldp(d12, d13, tos); 1004 ldp(d12, d13, tos);
901 ldp(d14, d15, tos); 1005 ldp(d14, d15, tos);
902 } 1006 }
903 1007
904 1008
905 void MacroAssembler::AssertStackConsistency() { 1009 void MacroAssembler::AssertStackConsistency() {
906 if (emit_debug_code() && !csp.Is(StackPointer())) { 1010 if (emit_debug_code()) {
907 if (csp.Is(StackPointer())) { 1011 if (csp.Is(StackPointer())) {
908 // TODO(jbramley): Check for csp alignment if it is the stack pointer. 1012 // We can't check the alignment of csp without using a scratch register
909 } else { 1013 // (or clobbering the flags), but the processor (or simulator) will abort
910 // TODO(jbramley): Currently we cannot use this assertion in Push because 1014 // if it is not properly aligned during a load.
911 // some calling code assumes that the flags are preserved. For an example, 1015 ldr(xzr, MemOperand(csp, 0));
912 // look at Builtins::Generate_ArgumentsAdaptorTrampoline. 1016 } else if (FLAG_enable_slow_asserts) {
913 Cmp(csp, StackPointer()); 1017 Label ok;
914 Check(ls, kTheCurrentStackPointerIsBelowCsp); 1018 // Check that csp <= StackPointer(), preserving all registers and NZCV.
1019 sub(StackPointer(), csp, StackPointer());
1020 cbz(StackPointer(), &ok); // Ok if csp == StackPointer().
1021 tbnz(StackPointer(), kXSignBit, &ok); // Ok if csp < StackPointer().
1022
1023 Abort(kTheCurrentStackPointerIsBelowCsp);
1024
1025 bind(&ok);
1026 // Restore StackPointer().
1027 sub(StackPointer(), csp, StackPointer());
915 } 1028 }
916 } 1029 }
917 } 1030 }
918 1031
919 1032
920 void MacroAssembler::LoadRoot(Register destination, 1033 void MacroAssembler::LoadRoot(Register destination,
921 Heap::RootListIndex index) { 1034 Heap::RootListIndex index) {
922 // TODO(jbramley): Most root values are constants, and can be synthesized 1035 // TODO(jbramley): Most root values are constants, and can be synthesized
923 // without a load. Refer to the ARM back end for details. 1036 // without a load. Refer to the ARM back end for details.
924 Ldr(destination, MemOperand(root, index << kPointerSizeLog2)); 1037 Ldr(destination, MemOperand(root, index << kPointerSizeLog2));
(...skipping 3412 matching lines...) Expand 10 before | Expand all | Expand 10 after
4337 if (FLAG_trap_on_abort) { 4450 if (FLAG_trap_on_abort) {
4338 Brk(0); 4451 Brk(0);
4339 return; 4452 return;
4340 } 4453 }
4341 #endif 4454 #endif
4342 4455
4343 Label msg_address; 4456 Label msg_address;
4344 Adr(x0, &msg_address); 4457 Adr(x0, &msg_address);
4345 4458
4346 if (use_real_aborts()) { 4459 if (use_real_aborts()) {
4460 // Avoid infinite recursion; Push contains some assertions that use Abort.
4461 NoUseRealAbortsScope no_real_aborts(this);
4462
4347 // Split the message pointer into two SMI to avoid the GC 4463 // Split the message pointer into two SMI to avoid the GC
4348 // trying to scan the string. 4464 // trying to scan the string.
4349 STATIC_ASSERT((kSmiShift == 32) && (kSmiTag == 0)); 4465 STATIC_ASSERT((kSmiShift == 32) && (kSmiTag == 0));
4350 SmiTag(x1, x0); 4466 SmiTag(x1, x0);
4351 Bic(x0, x0, kSmiShiftMask); 4467 Bic(x0, x0, kSmiShiftMask);
4352 4468
4353 Push(x0, x1); 4469 Push(x0, x1);
4354 4470
4355 if (!has_frame_) { 4471 if (!has_frame_) {
4356 // We don't actually want to generate a pile of code for this, so just 4472 // We don't actually want to generate a pile of code for this, so just
(...skipping 440 matching lines...) Expand 10 before | Expand all | Expand 10 after
4797 } 4913 }
4798 } 4914 }
4799 4915
4800 4916
4801 #undef __ 4917 #undef __
4802 4918
4803 4919
4804 } } // namespace v8::internal 4920 } } // namespace v8::internal
4805 4921
4806 #endif // V8_TARGET_ARCH_A64 4922 #endif // V8_TARGET_ARCH_A64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698