OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 678 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
689 ASSERT(!dst.is(src2)); | 689 ASSERT(!dst.is(src2)); |
690 if (on_not_smi_result == NULL) { | 690 if (on_not_smi_result == NULL) { |
691 // No overflow checking. Use only when it's known that | 691 // No overflow checking. Use only when it's known that |
692 // overflowing is impossible. | 692 // overflowing is impossible. |
693 if (dst.is(src1)) { | 693 if (dst.is(src1)) { |
694 addq(dst, src2); | 694 addq(dst, src2); |
695 } else { | 695 } else { |
696 movq(dst, src1); | 696 movq(dst, src1); |
697 addq(dst, src2); | 697 addq(dst, src2); |
698 } | 698 } |
699 Assert(no_overflow, "Smi addition onverflow"); | 699 Assert(no_overflow, "Smi addition overflow"); |
700 } else if (dst.is(src1)) { | 700 } else if (dst.is(src1)) { |
701 addq(dst, src2); | 701 movq(kScratchRegister, src1); |
| 702 addq(kScratchRegister, src2); |
702 Label smi_result; | 703 Label smi_result; |
703 j(no_overflow, &smi_result); | 704 j(overflow, on_not_smi_result); |
704 // Restore src1. | 705 movq(dst, kScratchRegister); |
705 subq(src1, src2); | |
706 jmp(on_not_smi_result); | |
707 bind(&smi_result); | |
708 } else { | 706 } else { |
709 movq(dst, src1); | 707 movq(dst, src1); |
710 addq(dst, src2); | 708 addq(dst, src2); |
711 j(overflow, on_not_smi_result); | 709 j(overflow, on_not_smi_result); |
712 } | 710 } |
713 } | 711 } |
714 | 712 |
715 | 713 |
716 void MacroAssembler::SmiSub(Register dst, | 714 void MacroAssembler::SmiSub(Register dst, |
717 Register src1, | 715 Register src1, |
718 Register src2, | 716 Register src2, |
719 Label* on_not_smi_result) { | 717 Label* on_not_smi_result) { |
720 ASSERT(!dst.is(src2)); | 718 ASSERT(!dst.is(src2)); |
721 if (on_not_smi_result == NULL) { | 719 if (on_not_smi_result == NULL) { |
722 // No overflow checking. Use only when it's known that | 720 // No overflow checking. Use only when it's known that |
723 // overflowing is impossible (e.g., subtracting two positive smis). | 721 // overflowing is impossible (e.g., subtracting two positive smis). |
724 if (dst.is(src1)) { | 722 if (dst.is(src1)) { |
725 subq(dst, src2); | 723 subq(dst, src2); |
726 } else { | 724 } else { |
727 movq(dst, src1); | 725 movq(dst, src1); |
728 subq(dst, src2); | 726 subq(dst, src2); |
729 } | 727 } |
730 Assert(no_overflow, "Smi substraction onverflow"); | 728 Assert(no_overflow, "Smi subtraction overflow"); |
731 } else if (dst.is(src1)) { | 729 } else if (dst.is(src1)) { |
| 730 cmpq(dst, src2); |
| 731 j(overflow, on_not_smi_result); |
732 subq(dst, src2); | 732 subq(dst, src2); |
733 Label smi_result; | |
734 j(no_overflow, &smi_result); | |
735 // Restore src1. | |
736 addq(src1, src2); | |
737 jmp(on_not_smi_result); | |
738 bind(&smi_result); | |
739 } else { | 733 } else { |
740 movq(dst, src1); | 734 movq(dst, src1); |
741 subq(dst, src2); | 735 subq(dst, src2); |
742 j(overflow, on_not_smi_result); | 736 j(overflow, on_not_smi_result); |
743 } | 737 } |
744 } | 738 } |
745 | 739 |
746 | 740 |
747 void MacroAssembler::SmiSub(Register dst, | 741 void MacroAssembler::SmiSub(Register dst, |
748 Register src1, | 742 Register src1, |
749 const Operand& src2, | 743 const Operand& src2, |
750 Label* on_not_smi_result) { | 744 Label* on_not_smi_result) { |
751 if (on_not_smi_result == NULL) { | 745 if (on_not_smi_result == NULL) { |
752 // No overflow checking. Use only when it's known that | 746 // No overflow checking. Use only when it's known that |
753 // overflowing is impossible (e.g., subtracting two positive smis). | 747 // overflowing is impossible (e.g., subtracting two positive smis). |
754 if (dst.is(src1)) { | 748 if (dst.is(src1)) { |
755 subq(dst, src2); | 749 subq(dst, src2); |
756 } else { | 750 } else { |
757 movq(dst, src1); | 751 movq(dst, src1); |
758 subq(dst, src2); | 752 subq(dst, src2); |
759 } | 753 } |
760 Assert(no_overflow, "Smi substraction onverflow"); | 754 Assert(no_overflow, "Smi subtraction overflow"); |
761 } else if (dst.is(src1)) { | 755 } else if (dst.is(src1)) { |
762 subq(dst, src2); | 756 movq(kScratchRegister, src1); |
763 Label smi_result; | 757 subq(kScratchRegister, src2); |
764 j(no_overflow, &smi_result); | 758 j(overflow, on_not_smi_result); |
765 // Restore src1. | 759 movq(src1, kScratchRegister); |
766 addq(src1, src2); | |
767 jmp(on_not_smi_result); | |
768 bind(&smi_result); | |
769 } else { | 760 } else { |
770 movq(dst, src1); | 761 movq(dst, src1); |
771 subq(dst, src2); | 762 subq(dst, src2); |
772 j(overflow, on_not_smi_result); | 763 j(overflow, on_not_smi_result); |
773 } | 764 } |
774 } | 765 } |
775 | 766 |
776 void MacroAssembler::SmiMul(Register dst, | 767 void MacroAssembler::SmiMul(Register dst, |
777 Register src1, | 768 Register src1, |
778 Register src2, | 769 Register src2, |
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
876 Smi* constant, | 867 Smi* constant, |
877 Label* on_not_smi_result) { | 868 Label* on_not_smi_result) { |
878 if (constant->value() == 0) { | 869 if (constant->value() == 0) { |
879 if (!dst.is(src)) { | 870 if (!dst.is(src)) { |
880 movq(dst, src); | 871 movq(dst, src); |
881 } | 872 } |
882 } else if (dst.is(src)) { | 873 } else if (dst.is(src)) { |
883 ASSERT(!dst.is(kScratchRegister)); | 874 ASSERT(!dst.is(kScratchRegister)); |
884 | 875 |
885 Move(kScratchRegister, constant); | 876 Move(kScratchRegister, constant); |
886 addq(dst, kScratchRegister); | 877 addq(kScratchRegister, dst); |
887 Label result_ok; | 878 j(overflow, on_not_smi_result); |
888 j(no_overflow, &result_ok); | 879 movq(dst, kScratchRegister); |
889 subq(dst, kScratchRegister); | |
890 jmp(on_not_smi_result); | |
891 bind(&result_ok); | |
892 } else { | 880 } else { |
893 Move(dst, constant); | 881 Move(dst, constant); |
894 addq(dst, src); | 882 addq(dst, src); |
895 j(overflow, on_not_smi_result); | 883 j(overflow, on_not_smi_result); |
896 } | 884 } |
897 } | 885 } |
898 | 886 |
899 | 887 |
900 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) { | 888 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) { |
901 if (constant->value() == 0) { | 889 if (constant->value() == 0) { |
902 if (!dst.is(src)) { | 890 if (!dst.is(src)) { |
903 movq(dst, src); | 891 movq(dst, src); |
904 } | 892 } |
905 } else if (dst.is(src)) { | 893 } else if (dst.is(src)) { |
906 ASSERT(!dst.is(kScratchRegister)); | 894 ASSERT(!dst.is(kScratchRegister)); |
907 | 895 |
908 Move(kScratchRegister, constant); | 896 Move(kScratchRegister, constant); |
909 subq(dst, kScratchRegister); | 897 subq(dst, kScratchRegister); |
910 } else { | 898 } else { |
911 // Subtract by adding the negative, to do it in two operations. | 899 // Subtract by adding the negative, to do it in two operations. |
912 if (constant->value() == Smi::kMinValue) { | 900 if (constant->value() == Smi::kMinValue) { |
913 Move(kScratchRegister, constant); | 901 Move(dst, constant); |
914 movq(dst, src); | 902 // Adding and subtracting the min-value gives the same result, it only |
915 subq(dst, kScratchRegister); | 903 // differs on the overflow bit, which we don't check here. |
| 904 addq(dst, src); |
916 } else { | 905 } else { |
| 906 // Subtract by adding the negation. |
917 Move(dst, Smi::FromInt(-constant->value())); | 907 Move(dst, Smi::FromInt(-constant->value())); |
918 addq(dst, src); | 908 addq(dst, src); |
919 } | 909 } |
920 } | 910 } |
921 } | 911 } |
922 | 912 |
923 | 913 |
924 void MacroAssembler::SmiSubConstant(Register dst, | 914 void MacroAssembler::SmiSubConstant(Register dst, |
925 Register src, | 915 Register src, |
926 Smi* constant, | 916 Smi* constant, |
927 Label* on_not_smi_result) { | 917 Label* on_not_smi_result) { |
928 if (constant->value() == 0) { | 918 if (constant->value() == 0) { |
929 if (!dst.is(src)) { | 919 if (!dst.is(src)) { |
930 movq(dst, src); | 920 movq(dst, src); |
931 } | 921 } |
932 } else if (dst.is(src)) { | 922 } else if (dst.is(src)) { |
933 ASSERT(!dst.is(kScratchRegister)); | 923 ASSERT(!dst.is(kScratchRegister)); |
934 | 924 if (constant->value() == Smi::kMinValue) { |
935 Move(kScratchRegister, constant); | 925 // Subtracting min-value from any non-negative value will overflow. |
936 subq(dst, kScratchRegister); | 926 // We test the non-negativeness before doing the subtraction. |
937 Label sub_success; | 927 testq(src, src); |
938 j(no_overflow, &sub_success); | 928 j(not_sign, on_not_smi_result); |
939 addq(src, kScratchRegister); | 929 Move(kScratchRegister, constant); |
940 jmp(on_not_smi_result); | 930 subq(dst, kScratchRegister); |
941 bind(&sub_success); | 931 } else { |
| 932 // Subtract by adding the negation. |
| 933 Move(kScratchRegister, Smi::FromInt(-constant->value())); |
| 934 addq(kScratchRegister, dst); |
| 935 j(overflow, on_not_smi_result); |
| 936 movq(dst, kScratchRegister); |
| 937 } |
942 } else { | 938 } else { |
943 if (constant->value() == Smi::kMinValue) { | 939 if (constant->value() == Smi::kMinValue) { |
944 Move(kScratchRegister, constant); | 940 // Subtracting min-value from any non-negative value will overflow. |
945 movq(dst, src); | 941 // We test the non-negativeness before doing the subtraction. |
946 subq(dst, kScratchRegister); | 942 testq(src, src); |
947 j(overflow, on_not_smi_result); | 943 j(not_sign, on_not_smi_result); |
| 944 Move(dst, constant); |
| 945 // Adding and subtracting the min-value gives the same result, it only |
| 946 // differs on the overflow bit, which we don't check here. |
| 947 addq(dst, src); |
948 } else { | 948 } else { |
| 949 // Subtract by adding the negation. |
949 Move(dst, Smi::FromInt(-(constant->value()))); | 950 Move(dst, Smi::FromInt(-(constant->value()))); |
950 addq(dst, src); | 951 addq(dst, src); |
951 j(overflow, on_not_smi_result); | 952 j(overflow, on_not_smi_result); |
952 } | 953 } |
953 } | 954 } |
954 } | 955 } |
955 | 956 |
956 | 957 |
957 void MacroAssembler::SmiDiv(Register dst, | 958 void MacroAssembler::SmiDiv(Register dst, |
958 Register src1, | 959 Register src1, |
(...skipping 1754 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2713 CPU::FlushICache(address_, size_); | 2714 CPU::FlushICache(address_, size_); |
2714 | 2715 |
2715 // Check that the code was patched as expected. | 2716 // Check that the code was patched as expected. |
2716 ASSERT(masm_.pc_ == address_ + size_); | 2717 ASSERT(masm_.pc_ == address_ + size_); |
2717 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2718 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2718 } | 2719 } |
2719 | 2720 |
2720 } } // namespace v8::internal | 2721 } } // namespace v8::internal |
2721 | 2722 |
2722 #endif // V8_TARGET_ARCH_X64 | 2723 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |