Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(401)

Side by Side Diff: runtime/vm/intrinsifier_ia32.cc

Issue 1005353004: ia32: Remove redundant CTX preservation from intrinsics; tweak definition of CTX. (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 5 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « runtime/vm/constants_x64.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 // 4 //
5 // The intrinsic code below is executed before a method has built its frame. 5 // The intrinsic code below is executed before a method has built its frame.
6 // The return address is on the stack and the arguments below it. 6 // The return address is on the stack and the arguments below it.
7 // Registers EDX (arguments descriptor) and ECX (function) must be preserved. 7 // Registers EDX (arguments descriptor) and ECX (function) must be preserved.
8 // Each intrinsification method returns true if the corresponding 8 // Each intrinsification method returns true if the corresponding
9 // Dart method was intrinsified. 9 // Dart method was intrinsified.
10 10
(...skipping 792 matching lines...) Expand 10 before | Expand all | Expand 10 after
803 __ SmiTag(EAX); 803 __ SmiTag(EAX);
804 __ ret(); 804 __ ret();
805 } 805 }
806 806
807 807
808 void Intrinsifier::Bigint_absAdd(Assembler* assembler) { 808 void Intrinsifier::Bigint_absAdd(Assembler* assembler) {
809 // static void _absAdd(Uint32List digits, int used, 809 // static void _absAdd(Uint32List digits, int used,
810 // Uint32List a_digits, int a_used, 810 // Uint32List a_digits, int a_used,
811 // Uint32List r_digits) 811 // Uint32List r_digits)
812 812
813 // Preserve CTX to free ESI. 813 __ movl(EDI, Address(ESP, 5 * kWordSize)); // digits
814 __ pushl(CTX); 814 __ movl(EAX, Address(ESP, 4 * kWordSize)); // used is Smi
815 ASSERT(CTX == ESI);
816
817 __ movl(EDI, Address(ESP, 6 * kWordSize)); // digits
818 __ movl(EAX, Address(ESP, 5 * kWordSize)); // used is Smi
819 __ SmiUntag(EAX); // used > 0. 815 __ SmiUntag(EAX); // used > 0.
820 __ movl(ESI, Address(ESP, 4 * kWordSize)); // a_digits 816 __ movl(ESI, Address(ESP, 3 * kWordSize)); // a_digits
821 __ movl(ECX, Address(ESP, 3 * kWordSize)); // a_used is Smi 817 __ movl(ECX, Address(ESP, 2 * kWordSize)); // a_used is Smi
822 __ SmiUntag(ECX); // a_used > 0. 818 __ SmiUntag(ECX); // a_used > 0.
823 __ movl(EBX, Address(ESP, 2 * kWordSize)); // r_digits 819 __ movl(EBX, Address(ESP, 1 * kWordSize)); // r_digits
824 820
825 // Precompute 'used - a_used' now so that carry flag is not lost later. 821 // Precompute 'used - a_used' now so that carry flag is not lost later.
826 __ subl(EAX, ECX); 822 __ subl(EAX, ECX);
827 __ incl(EAX); // To account for the extra test between loops. 823 __ incl(EAX); // To account for the extra test between loops.
828 __ pushl(EAX); 824 __ pushl(EAX);
829 825
830 __ xorl(EDX, EDX); // EDX = 0, carry flag = 0. 826 __ xorl(EDX, EDX); // EDX = 0, carry flag = 0.
831 Label add_loop; 827 Label add_loop;
832 __ Bind(&add_loop); 828 __ Bind(&add_loop);
833 // Loop a_used times, ECX = a_used, ECX > 0. 829 // Loop a_used times, ECX = a_used, ECX > 0.
(...skipping 17 matching lines...) Expand all
851 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX); 847 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX);
852 __ incl(EDX); // Does not affect carry flag. 848 __ incl(EDX); // Does not affect carry flag.
853 __ decl(ECX); // Does not affect carry flag. 849 __ decl(ECX); // Does not affect carry flag.
854 __ j(NOT_ZERO, &carry_loop, Assembler::kNearJump); 850 __ j(NOT_ZERO, &carry_loop, Assembler::kNearJump);
855 851
856 __ Bind(&last_carry); 852 __ Bind(&last_carry);
857 __ movl(EAX, Immediate(0)); 853 __ movl(EAX, Immediate(0));
858 __ adcl(EAX, Immediate(0)); 854 __ adcl(EAX, Immediate(0));
859 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX); 855 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX);
860 856
861 // Restore CTX and return.
862 __ popl(CTX);
863 // Returning Object::null() is not required, since this method is private. 857 // Returning Object::null() is not required, since this method is private.
864 __ ret(); 858 __ ret();
865 } 859 }
866 860
867 861
868 void Intrinsifier::Bigint_absSub(Assembler* assembler) { 862 void Intrinsifier::Bigint_absSub(Assembler* assembler) {
869 // static void _absSub(Uint32List digits, int used, 863 // static void _absSub(Uint32List digits, int used,
870 // Uint32List a_digits, int a_used, 864 // Uint32List a_digits, int a_used,
871 // Uint32List r_digits) 865 // Uint32List r_digits)
872 866
873 // Preserve CTX to free ESI. 867 __ movl(EDI, Address(ESP, 5 * kWordSize)); // digits
874 __ pushl(CTX); 868 __ movl(EAX, Address(ESP, 4 * kWordSize)); // used is Smi
875 ASSERT(CTX == ESI);
876
877 __ movl(EDI, Address(ESP, 6 * kWordSize)); // digits
878 __ movl(EAX, Address(ESP, 5 * kWordSize)); // used is Smi
879 __ SmiUntag(EAX); // used > 0. 869 __ SmiUntag(EAX); // used > 0.
880 __ movl(ESI, Address(ESP, 4 * kWordSize)); // a_digits 870 __ movl(ESI, Address(ESP, 3 * kWordSize)); // a_digits
881 __ movl(ECX, Address(ESP, 3 * kWordSize)); // a_used is Smi 871 __ movl(ECX, Address(ESP, 2 * kWordSize)); // a_used is Smi
882 __ SmiUntag(ECX); // a_used > 0. 872 __ SmiUntag(ECX); // a_used > 0.
883 __ movl(EBX, Address(ESP, 2 * kWordSize)); // r_digits 873 __ movl(EBX, Address(ESP, 1 * kWordSize)); // r_digits
884 874
885 // Precompute 'used - a_used' now so that carry flag is not lost later. 875 // Precompute 'used - a_used' now so that carry flag is not lost later.
886 __ subl(EAX, ECX); 876 __ subl(EAX, ECX);
887 __ incl(EAX); // To account for the extra test between loops. 877 __ incl(EAX); // To account for the extra test between loops.
888 __ pushl(EAX); 878 __ pushl(EAX);
889 879
890 __ xorl(EDX, EDX); // EDX = 0, carry flag = 0. 880 __ xorl(EDX, EDX); // EDX = 0, carry flag = 0.
891 Label sub_loop; 881 Label sub_loop;
892 __ Bind(&sub_loop); 882 __ Bind(&sub_loop);
893 // Loop a_used times, ECX = a_used, ECX > 0. 883 // Loop a_used times, ECX = a_used, ECX > 0.
(...skipping 13 matching lines...) Expand all
907 __ Bind(&carry_loop); 897 __ Bind(&carry_loop);
908 // Loop used - a_used times, ECX = used - a_used, ECX > 0. 898 // Loop used - a_used times, ECX = used - a_used, ECX > 0.
909 __ movl(EAX, FieldAddress(EDI, EDX, TIMES_4, TypedData::data_offset())); 899 __ movl(EAX, FieldAddress(EDI, EDX, TIMES_4, TypedData::data_offset()));
910 __ sbbl(EAX, Immediate(0)); 900 __ sbbl(EAX, Immediate(0));
911 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX); 901 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX);
912 __ incl(EDX); // Does not affect carry flag. 902 __ incl(EDX); // Does not affect carry flag.
913 __ decl(ECX); // Does not affect carry flag. 903 __ decl(ECX); // Does not affect carry flag.
914 __ j(NOT_ZERO, &carry_loop, Assembler::kNearJump); 904 __ j(NOT_ZERO, &carry_loop, Assembler::kNearJump);
915 905
916 __ Bind(&done); 906 __ Bind(&done);
917 // Restore CTX and return.
918 __ popl(CTX);
919 // Returning Object::null() is not required, since this method is private. 907 // Returning Object::null() is not required, since this method is private.
920 __ ret(); 908 __ ret();
921 } 909 }
922 910
923 911
924 void Intrinsifier::Bigint_mulAdd(Assembler* assembler) { 912 void Intrinsifier::Bigint_mulAdd(Assembler* assembler) {
925 // Pseudo code: 913 // Pseudo code:
926 // static int _mulAdd(Uint32List x_digits, int xi, 914 // static int _mulAdd(Uint32List x_digits, int xi,
927 // Uint32List m_digits, int i, 915 // Uint32List m_digits, int i,
928 // Uint32List a_digits, int j, int n) { 916 // Uint32List a_digits, int j, int n) {
(...skipping 26 matching lines...) Expand all
955 __ movl(EAX, Address(ESP, 6 * kWordSize)); // xi is Smi 943 __ movl(EAX, Address(ESP, 6 * kWordSize)); // xi is Smi
956 __ movl(EBX, FieldAddress(ECX, EAX, TIMES_2, TypedData::data_offset())); 944 __ movl(EBX, FieldAddress(ECX, EAX, TIMES_2, TypedData::data_offset()));
957 __ testl(EBX, EBX); 945 __ testl(EBX, EBX);
958 __ j(ZERO, &no_op, Assembler::kNearJump); 946 __ j(ZERO, &no_op, Assembler::kNearJump);
959 947
960 // EDX = SmiUntag(n), no_op if n == 0 948 // EDX = SmiUntag(n), no_op if n == 0
961 __ movl(EDX, Address(ESP, 1 * kWordSize)); 949 __ movl(EDX, Address(ESP, 1 * kWordSize));
962 __ SmiUntag(EDX); 950 __ SmiUntag(EDX);
963 __ j(ZERO, &no_op, Assembler::kNearJump); 951 __ j(ZERO, &no_op, Assembler::kNearJump);
964 952
965 // Preserve CTX to free ESI.
966 __ pushl(CTX);
967 ASSERT(CTX == ESI);
968
969 // EDI = mip = &m_digits[i >> 1] 953 // EDI = mip = &m_digits[i >> 1]
970 __ movl(EDI, Address(ESP, 6 * kWordSize)); // m_digits 954 __ movl(EDI, Address(ESP, 5 * kWordSize)); // m_digits
971 __ movl(EAX, Address(ESP, 5 * kWordSize)); // i is Smi 955 __ movl(EAX, Address(ESP, 4 * kWordSize)); // i is Smi
972 __ leal(EDI, FieldAddress(EDI, EAX, TIMES_2, TypedData::data_offset())); 956 __ leal(EDI, FieldAddress(EDI, EAX, TIMES_2, TypedData::data_offset()));
973 957
974 // ESI = ajp = &a_digits[j >> 1] 958 // ESI = ajp = &a_digits[j >> 1]
975 __ movl(ESI, Address(ESP, 4 * kWordSize)); // a_digits 959 __ movl(ESI, Address(ESP, 3 * kWordSize)); // a_digits
976 __ movl(EAX, Address(ESP, 3 * kWordSize)); // j is Smi 960 __ movl(EAX, Address(ESP, 2 * kWordSize)); // j is Smi
977 __ leal(ESI, FieldAddress(ESI, EAX, TIMES_2, TypedData::data_offset())); 961 __ leal(ESI, FieldAddress(ESI, EAX, TIMES_2, TypedData::data_offset()));
978 962
979 // Save n 963 // Save n
980 __ pushl(EDX); 964 __ pushl(EDX);
981 Address n_addr = Address(ESP, 0 * kWordSize); 965 Address n_addr = Address(ESP, 0 * kWordSize);
982 966
983 // ECX = c = 0 967 // ECX = c = 0
984 __ xorl(ECX, ECX); 968 __ xorl(ECX, ECX);
985 969
986 Label muladd_loop; 970 Label muladd_loop;
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
1025 __ j(NOT_CARRY, &done, Assembler::kNearJump); 1009 __ j(NOT_CARRY, &done, Assembler::kNearJump);
1026 1010
1027 Label propagate_carry_loop; 1011 Label propagate_carry_loop;
1028 __ Bind(&propagate_carry_loop); 1012 __ Bind(&propagate_carry_loop);
1029 __ addl(ESI, Immediate(Bigint::kBytesPerDigit)); 1013 __ addl(ESI, Immediate(Bigint::kBytesPerDigit));
1030 __ incl(Address(ESI, 0)); // c == 0 or 1 1014 __ incl(Address(ESI, 0)); // c == 0 or 1
1031 __ j(CARRY, &propagate_carry_loop, Assembler::kNearJump); 1015 __ j(CARRY, &propagate_carry_loop, Assembler::kNearJump);
1032 1016
1033 __ Bind(&done); 1017 __ Bind(&done);
1034 __ Drop(1); // n 1018 __ Drop(1); // n
1035 // Restore CTX and return.
1036 __ popl(CTX);
1037 1019
1038 __ Bind(&no_op); 1020 __ Bind(&no_op);
1039 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed. 1021 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed.
1040 __ ret(); 1022 __ ret();
1041 } 1023 }
1042 1024
1043 1025
1044 void Intrinsifier::Bigint_sqrAdd(Assembler* assembler) { 1026 void Intrinsifier::Bigint_sqrAdd(Assembler* assembler) {
1045 // Pseudo code: 1027 // Pseudo code:
1046 // static int _sqrAdd(Uint32List x_digits, int i, 1028 // static int _sqrAdd(Uint32List x_digits, int i,
(...skipping 26 matching lines...) Expand all
1073 __ movl(EAX, Address(ESP, 3 * kWordSize)); // i is Smi 1055 __ movl(EAX, Address(ESP, 3 * kWordSize)); // i is Smi
1074 __ leal(EDI, FieldAddress(EDI, EAX, TIMES_2, TypedData::data_offset())); 1056 __ leal(EDI, FieldAddress(EDI, EAX, TIMES_2, TypedData::data_offset()));
1075 1057
1076 // EBX = x = *xip++, return if x == 0 1058 // EBX = x = *xip++, return if x == 0
1077 Label x_zero; 1059 Label x_zero;
1078 __ movl(EBX, Address(EDI, 0)); 1060 __ movl(EBX, Address(EDI, 0));
1079 __ cmpl(EBX, Immediate(0)); 1061 __ cmpl(EBX, Immediate(0));
1080 __ j(EQUAL, &x_zero, Assembler::kNearJump); 1062 __ j(EQUAL, &x_zero, Assembler::kNearJump);
1081 __ addl(EDI, Immediate(Bigint::kBytesPerDigit)); 1063 __ addl(EDI, Immediate(Bigint::kBytesPerDigit));
1082 1064
1083 // Preserve CTX to free ESI.
1084 __ pushl(CTX);
1085 ASSERT(CTX == ESI);
1086
1087 // ESI = ajp = &a_digits[i] 1065 // ESI = ajp = &a_digits[i]
1088 __ movl(ESI, Address(ESP, 3 * kWordSize)); // a_digits 1066 __ movl(ESI, Address(ESP, 2 * kWordSize)); // a_digits
1089 __ leal(ESI, FieldAddress(ESI, EAX, TIMES_4, TypedData::data_offset())); 1067 __ leal(ESI, FieldAddress(ESI, EAX, TIMES_4, TypedData::data_offset()));
1090 1068
1091 // EDX:EAX = t = x*x + *ajp 1069 // EDX:EAX = t = x*x + *ajp
1092 __ movl(EAX, EBX); 1070 __ movl(EAX, EBX);
1093 __ mull(EBX); 1071 __ mull(EBX);
1094 __ addl(EAX, Address(ESI, 0)); 1072 __ addl(EAX, Address(ESI, 0));
1095 __ adcl(EDX, Immediate(0)); 1073 __ adcl(EDX, Immediate(0));
1096 1074
1097 // *ajp++ = low32(t) 1075 // *ajp++ = low32(t)
1098 __ movl(Address(ESI, 0), EAX); 1076 __ movl(Address(ESI, 0), EAX);
1099 __ addl(ESI, Immediate(Bigint::kBytesPerDigit)); 1077 __ addl(ESI, Immediate(Bigint::kBytesPerDigit));
1100 1078
1101 // int n = used - i - 1 1079 // int n = used - i - 1
1102 __ movl(EAX, Address(ESP, 2 * kWordSize)); // used is Smi 1080 __ movl(EAX, Address(ESP, 1 * kWordSize)); // used is Smi
1103 __ subl(EAX, Address(ESP, 4 * kWordSize)); // i is Smi 1081 __ subl(EAX, Address(ESP, 3 * kWordSize)); // i is Smi
1104 __ SmiUntag(EAX); 1082 __ SmiUntag(EAX);
1105 __ decl(EAX); 1083 __ decl(EAX);
1106 __ pushl(EAX); // Save n on stack. 1084 __ pushl(EAX); // Save n on stack.
1107 1085
1108 // uint64_t c = high32(t) 1086 // uint64_t c = high32(t)
1109 __ pushl(Immediate(0)); // push high32(c) == 0 1087 __ pushl(Immediate(0)); // push high32(c) == 0
1110 __ pushl(EDX); // push low32(c) == high32(t) 1088 __ pushl(EDX); // push low32(c) == high32(t)
1111 1089
1112 Address n_addr = Address(ESP, 2 * kWordSize); 1090 Address n_addr = Address(ESP, 2 * kWordSize);
1113 Address ch_addr = Address(ESP, 1 * kWordSize); 1091 Address ch_addr = Address(ESP, 1 * kWordSize);
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
1158 __ movl(EAX, cl_addr); // t = c 1136 __ movl(EAX, cl_addr); // t = c
1159 __ movl(EDX, ch_addr); 1137 __ movl(EDX, ch_addr);
1160 __ addl(EAX, Address(ESI, 0)); // t += *ajp 1138 __ addl(EAX, Address(ESI, 0)); // t += *ajp
1161 __ adcl(EDX, Immediate(0)); 1139 __ adcl(EDX, Immediate(0));
1162 1140
1163 // *ajp++ = low32(t) 1141 // *ajp++ = low32(t)
1164 // *ajp = high32(t) 1142 // *ajp = high32(t)
1165 __ movl(Address(ESI, 0), EAX); 1143 __ movl(Address(ESI, 0), EAX);
1166 __ movl(Address(ESI, Bigint::kBytesPerDigit), EDX); 1144 __ movl(Address(ESI, Bigint::kBytesPerDigit), EDX);
1167 1145
1168 // Restore CTX and return.
1169 __ Drop(3); 1146 __ Drop(3);
1170 __ popl(CTX);
1171 __ Bind(&x_zero); 1147 __ Bind(&x_zero);
1172 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed. 1148 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed.
1173 __ ret(); 1149 __ ret();
1174 } 1150 }
1175 1151
1176 1152
1177 void Intrinsifier::Bigint_estQuotientDigit(Assembler* assembler) { 1153 void Intrinsifier::Bigint_estQuotientDigit(Assembler* assembler) {
1178 // Pseudo code: 1154 // Pseudo code:
1179 // static int _estQuotientDigit(Uint32List args, Uint32List digits, int i) { 1155 // static int _estQuotientDigit(Uint32List args, Uint32List digits, int i) {
1180 // uint32_t yt = args[_YT]; // _YT == 1. 1156 // uint32_t yt = args[_YT]; // _YT == 1.
(...skipping 849 matching lines...) Expand 10 before | Expand all | Expand 10 after
2030 Isolate::current_tag_offset()); 2006 Isolate::current_tag_offset());
2031 // Set return value to Isolate::current_tag_. 2007 // Set return value to Isolate::current_tag_.
2032 __ movl(EAX, current_tag_addr); 2008 __ movl(EAX, current_tag_addr);
2033 __ ret(); 2009 __ ret();
2034 } 2010 }
2035 2011
2036 #undef __ 2012 #undef __
2037 } // namespace dart 2013 } // namespace dart
2038 2014
2039 #endif // defined TARGET_ARCH_IA32 2015 #endif // defined TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « runtime/vm/constants_x64.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698