| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 979 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 990 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; | 990 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; |
| 991 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; | 991 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; |
| 992 } | 992 } |
| 993 stream->Add("UnaryOpStub_%s_%s_%s", | 993 stream->Add("UnaryOpStub_%s_%s_%s", |
| 994 op_name, | 994 op_name, |
| 995 overwrite_name, | 995 overwrite_name, |
| 996 UnaryOpIC::GetName(operand_type_)); | 996 UnaryOpIC::GetName(operand_type_)); |
| 997 } | 997 } |
| 998 | 998 |
| 999 | 999 |
| 1000 void BinaryOpStub::Initialize() {} |
| 1001 |
| 1002 |
| 1000 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { | 1003 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
| 1001 __ pop(rcx); // Save return address. | 1004 __ pop(rcx); // Save return address. |
| 1002 __ push(rdx); | 1005 __ push(rdx); |
| 1003 __ push(rax); | 1006 __ push(rax); |
| 1004 // Left and right arguments are now on top. | 1007 // Left and right arguments are now on top. |
| 1005 // Push this stub's key. Although the operation and the type info are | |
| 1006 // encoded into the key, the encoding is opaque, so push them too. | |
| 1007 __ Push(Smi::FromInt(MinorKey())); | 1008 __ Push(Smi::FromInt(MinorKey())); |
| 1008 __ Push(Smi::FromInt(op_)); | |
| 1009 __ Push(Smi::FromInt(operands_type_)); | |
| 1010 | 1009 |
| 1011 __ push(rcx); // Push return address. | 1010 __ push(rcx); // Push return address. |
| 1012 | 1011 |
| 1013 // Patch the caller to an appropriate specialized stub and return the | 1012 // Patch the caller to an appropriate specialized stub and return the |
| 1014 // operation result to the caller of the stub. | 1013 // operation result to the caller of the stub. |
| 1015 __ TailCallExternalReference( | 1014 __ TailCallExternalReference( |
| 1016 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), | 1015 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), |
| 1017 masm->isolate()), | 1016 masm->isolate()), |
| 1018 5, | 1017 3, |
| 1019 1); | 1018 1); |
| 1020 } | 1019 } |
| 1021 | 1020 |
| 1022 | 1021 |
| 1023 void BinaryOpStub::Generate(MacroAssembler* masm) { | 1022 static void BinaryOpStub_GenerateSmiCode( |
| 1024 // Explicitly allow generation of nested stubs. It is safe here because | |
| 1025 // generation code does not use any raw pointers. | |
| 1026 AllowStubCallsScope allow_stub_calls(masm, true); | |
| 1027 | |
| 1028 switch (operands_type_) { | |
| 1029 case BinaryOpIC::UNINITIALIZED: | |
| 1030 GenerateTypeTransition(masm); | |
| 1031 break; | |
| 1032 case BinaryOpIC::SMI: | |
| 1033 GenerateSmiStub(masm); | |
| 1034 break; | |
| 1035 case BinaryOpIC::INT32: | |
| 1036 UNREACHABLE(); | |
| 1037 // The int32 case is identical to the Smi case. We avoid creating this | |
| 1038 // ic state on x64. | |
| 1039 break; | |
| 1040 case BinaryOpIC::HEAP_NUMBER: | |
| 1041 GenerateHeapNumberStub(masm); | |
| 1042 break; | |
| 1043 case BinaryOpIC::ODDBALL: | |
| 1044 GenerateOddballStub(masm); | |
| 1045 break; | |
| 1046 case BinaryOpIC::BOTH_STRING: | |
| 1047 GenerateBothStringStub(masm); | |
| 1048 break; | |
| 1049 case BinaryOpIC::STRING: | |
| 1050 GenerateStringStub(masm); | |
| 1051 break; | |
| 1052 case BinaryOpIC::GENERIC: | |
| 1053 GenerateGeneric(masm); | |
| 1054 break; | |
| 1055 default: | |
| 1056 UNREACHABLE(); | |
| 1057 } | |
| 1058 } | |
| 1059 | |
| 1060 | |
| 1061 void BinaryOpStub::PrintName(StringStream* stream) { | |
| 1062 const char* op_name = Token::Name(op_); | |
| 1063 const char* overwrite_name; | |
| 1064 switch (mode_) { | |
| 1065 case NO_OVERWRITE: overwrite_name = "Alloc"; break; | |
| 1066 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break; | |
| 1067 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break; | |
| 1068 default: overwrite_name = "UnknownOverwrite"; break; | |
| 1069 } | |
| 1070 stream->Add("BinaryOpStub_%s_%s_%s", | |
| 1071 op_name, | |
| 1072 overwrite_name, | |
| 1073 BinaryOpIC::GetName(operands_type_)); | |
| 1074 } | |
| 1075 | |
| 1076 | |
| 1077 void BinaryOpStub::GenerateSmiCode( | |
| 1078 MacroAssembler* masm, | 1023 MacroAssembler* masm, |
| 1079 Label* slow, | 1024 Label* slow, |
| 1080 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { | 1025 BinaryOpStub::SmiCodeGenerateHeapNumberResults allow_heapnumber_results, |
| 1026 Token::Value op) { |
| 1081 | 1027 |
| 1082 // Arguments to BinaryOpStub are in rdx and rax. | 1028 // Arguments to BinaryOpStub are in rdx and rax. |
| 1083 const Register left = rdx; | 1029 const Register left = rdx; |
| 1084 const Register right = rax; | 1030 const Register right = rax; |
| 1085 | 1031 |
| 1086 // We only generate heapnumber answers for overflowing calculations | 1032 // We only generate heapnumber answers for overflowing calculations |
| 1087 // for the four basic arithmetic operations and logical right shift by 0. | 1033 // for the four basic arithmetic operations and logical right shift by 0. |
| 1088 bool generate_inline_heapnumber_results = | 1034 bool generate_inline_heapnumber_results = |
| 1089 (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) && | 1035 (allow_heapnumber_results == BinaryOpStub::ALLOW_HEAPNUMBER_RESULTS) && |
| 1090 (op_ == Token::ADD || op_ == Token::SUB || | 1036 (op == Token::ADD || op == Token::SUB || |
| 1091 op_ == Token::MUL || op_ == Token::DIV || op_ == Token::SHR); | 1037 op == Token::MUL || op == Token::DIV || op == Token::SHR); |
| 1092 | 1038 |
| 1093 // Smi check of both operands. If op is BIT_OR, the check is delayed | 1039 // Smi check of both operands. If op is BIT_OR, the check is delayed |
| 1094 // until after the OR operation. | 1040 // until after the OR operation. |
| 1095 Label not_smis; | 1041 Label not_smis; |
| 1096 Label use_fp_on_smis; | 1042 Label use_fp_on_smis; |
| 1097 Label fail; | 1043 Label fail; |
| 1098 | 1044 |
| 1099 if (op_ != Token::BIT_OR) { | 1045 if (op != Token::BIT_OR) { |
| 1100 Comment smi_check_comment(masm, "-- Smi check arguments"); | 1046 Comment smi_check_comment(masm, "-- Smi check arguments"); |
| 1101 __ JumpIfNotBothSmi(left, right, ¬_smis); | 1047 __ JumpIfNotBothSmi(left, right, ¬_smis); |
| 1102 } | 1048 } |
| 1103 | 1049 |
| 1104 Label smi_values; | 1050 Label smi_values; |
| 1105 __ bind(&smi_values); | 1051 __ bind(&smi_values); |
| 1106 // Perform the operation. | 1052 // Perform the operation. |
| 1107 Comment perform_smi(masm, "-- Perform smi operation"); | 1053 Comment perform_smi(masm, "-- Perform smi operation"); |
| 1108 switch (op_) { | 1054 switch (op) { |
| 1109 case Token::ADD: | 1055 case Token::ADD: |
| 1110 ASSERT(right.is(rax)); | 1056 ASSERT(right.is(rax)); |
| 1111 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative. | 1057 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative. |
| 1112 break; | 1058 break; |
| 1113 | 1059 |
| 1114 case Token::SUB: | 1060 case Token::SUB: |
| 1115 __ SmiSub(left, left, right, &use_fp_on_smis); | 1061 __ SmiSub(left, left, right, &use_fp_on_smis); |
| 1116 __ movq(rax, left); | 1062 __ movq(rax, left); |
| 1117 break; | 1063 break; |
| 1118 | 1064 |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1170 } | 1116 } |
| 1171 | 1117 |
| 1172 // 5. Emit return of result in rax. Some operations have registers pushed. | 1118 // 5. Emit return of result in rax. Some operations have registers pushed. |
| 1173 __ ret(0); | 1119 __ ret(0); |
| 1174 | 1120 |
| 1175 if (use_fp_on_smis.is_linked()) { | 1121 if (use_fp_on_smis.is_linked()) { |
| 1176 // 6. For some operations emit inline code to perform floating point | 1122 // 6. For some operations emit inline code to perform floating point |
| 1177 // operations on known smis (e.g., if the result of the operation | 1123 // operations on known smis (e.g., if the result of the operation |
| 1178 // overflowed the smi range). | 1124 // overflowed the smi range). |
| 1179 __ bind(&use_fp_on_smis); | 1125 __ bind(&use_fp_on_smis); |
| 1180 if (op_ == Token::DIV || op_ == Token::MOD) { | 1126 if (op == Token::DIV || op == Token::MOD) { |
| 1181 // Restore left and right to rdx and rax. | 1127 // Restore left and right to rdx and rax. |
| 1182 __ movq(rdx, rcx); | 1128 __ movq(rdx, rcx); |
| 1183 __ movq(rax, rbx); | 1129 __ movq(rax, rbx); |
| 1184 } | 1130 } |
| 1185 | 1131 |
| 1186 if (generate_inline_heapnumber_results) { | 1132 if (generate_inline_heapnumber_results) { |
| 1187 __ AllocateHeapNumber(rcx, rbx, slow); | 1133 __ AllocateHeapNumber(rcx, rbx, slow); |
| 1188 Comment perform_float(masm, "-- Perform float operation on smis"); | 1134 Comment perform_float(masm, "-- Perform float operation on smis"); |
| 1189 if (op_ == Token::SHR) { | 1135 if (op == Token::SHR) { |
| 1190 __ SmiToInteger32(left, left); | 1136 __ SmiToInteger32(left, left); |
| 1191 __ cvtqsi2sd(xmm0, left); | 1137 __ cvtqsi2sd(xmm0, left); |
| 1192 } else { | 1138 } else { |
| 1193 FloatingPointHelper::LoadSSE2SmiOperands(masm); | 1139 FloatingPointHelper::LoadSSE2SmiOperands(masm); |
| 1194 switch (op_) { | 1140 switch (op) { |
| 1195 case Token::ADD: __ addsd(xmm0, xmm1); break; | 1141 case Token::ADD: __ addsd(xmm0, xmm1); break; |
| 1196 case Token::SUB: __ subsd(xmm0, xmm1); break; | 1142 case Token::SUB: __ subsd(xmm0, xmm1); break; |
| 1197 case Token::MUL: __ mulsd(xmm0, xmm1); break; | 1143 case Token::MUL: __ mulsd(xmm0, xmm1); break; |
| 1198 case Token::DIV: __ divsd(xmm0, xmm1); break; | 1144 case Token::DIV: __ divsd(xmm0, xmm1); break; |
| 1199 default: UNREACHABLE(); | 1145 default: UNREACHABLE(); |
| 1200 } | 1146 } |
| 1201 } | 1147 } |
| 1202 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0); | 1148 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0); |
| 1203 __ movq(rax, rcx); | 1149 __ movq(rax, rcx); |
| 1204 __ ret(0); | 1150 __ ret(0); |
| 1205 } else { | 1151 } else { |
| 1206 __ jmp(&fail); | 1152 __ jmp(&fail); |
| 1207 } | 1153 } |
| 1208 } | 1154 } |
| 1209 | 1155 |
| 1210 // 7. Non-smi operands reach the end of the code generated by | 1156 // 7. Non-smi operands reach the end of the code generated by |
| 1211 // GenerateSmiCode, and fall through to subsequent code, | 1157 // GenerateSmiCode, and fall through to subsequent code, |
| 1212 // with the operands in rdx and rax. | 1158 // with the operands in rdx and rax. |
| 1213 // But first we check if non-smi values are HeapNumbers holding | 1159 // But first we check if non-smi values are HeapNumbers holding |
| 1214 // values that could be smi. | 1160 // values that could be smi. |
| 1215 __ bind(¬_smis); | 1161 __ bind(¬_smis); |
| 1216 Comment done_comment(masm, "-- Enter non-smi code"); | 1162 Comment done_comment(masm, "-- Enter non-smi code"); |
| 1217 FloatingPointHelper::NumbersToSmis(masm, left, right, rbx, rdi, rcx, | 1163 FloatingPointHelper::NumbersToSmis(masm, left, right, rbx, rdi, rcx, |
| 1218 &smi_values, &fail); | 1164 &smi_values, &fail); |
| 1219 __ jmp(&smi_values); | 1165 __ jmp(&smi_values); |
| 1220 __ bind(&fail); | 1166 __ bind(&fail); |
| 1221 } | 1167 } |
| 1222 | 1168 |
| 1223 | 1169 |
| 1224 void BinaryOpStub::GenerateFloatingPointCode(MacroAssembler* masm, | 1170 static void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm, |
| 1225 Label* allocation_failure, | 1171 Label* alloc_failure, |
| 1226 Label* non_numeric_failure) { | 1172 OverwriteMode mode); |
| 1227 switch (op_) { | 1173 |
| 1174 |
| 1175 static void BinaryOpStub_GenerateFloatingPointCode(MacroAssembler* masm, |
| 1176 Label* allocation_failure, |
| 1177 Label* non_numeric_failure, |
| 1178 Token::Value op, |
| 1179 OverwriteMode mode) { |
| 1180 switch (op) { |
| 1228 case Token::ADD: | 1181 case Token::ADD: |
| 1229 case Token::SUB: | 1182 case Token::SUB: |
| 1230 case Token::MUL: | 1183 case Token::MUL: |
| 1231 case Token::DIV: { | 1184 case Token::DIV: { |
| 1232 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure); | 1185 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure); |
| 1233 | 1186 |
| 1234 switch (op_) { | 1187 switch (op) { |
| 1235 case Token::ADD: __ addsd(xmm0, xmm1); break; | 1188 case Token::ADD: __ addsd(xmm0, xmm1); break; |
| 1236 case Token::SUB: __ subsd(xmm0, xmm1); break; | 1189 case Token::SUB: __ subsd(xmm0, xmm1); break; |
| 1237 case Token::MUL: __ mulsd(xmm0, xmm1); break; | 1190 case Token::MUL: __ mulsd(xmm0, xmm1); break; |
| 1238 case Token::DIV: __ divsd(xmm0, xmm1); break; | 1191 case Token::DIV: __ divsd(xmm0, xmm1); break; |
| 1239 default: UNREACHABLE(); | 1192 default: UNREACHABLE(); |
| 1240 } | 1193 } |
| 1241 GenerateHeapResultAllocation(masm, allocation_failure); | 1194 BinaryOpStub_GenerateHeapResultAllocation( |
| 1195 masm, allocation_failure, mode); |
| 1242 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); | 1196 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); |
| 1243 __ ret(0); | 1197 __ ret(0); |
| 1244 break; | 1198 break; |
| 1245 } | 1199 } |
| 1246 case Token::MOD: { | 1200 case Token::MOD: { |
| 1247 // For MOD we jump to the allocation_failure label, to call runtime. | 1201 // For MOD we jump to the allocation_failure label, to call runtime. |
| 1248 __ jmp(allocation_failure); | 1202 __ jmp(allocation_failure); |
| 1249 break; | 1203 break; |
| 1250 } | 1204 } |
| 1251 case Token::BIT_OR: | 1205 case Token::BIT_OR: |
| 1252 case Token::BIT_AND: | 1206 case Token::BIT_AND: |
| 1253 case Token::BIT_XOR: | 1207 case Token::BIT_XOR: |
| 1254 case Token::SAR: | 1208 case Token::SAR: |
| 1255 case Token::SHL: | 1209 case Token::SHL: |
| 1256 case Token::SHR: { | 1210 case Token::SHR: { |
| 1257 Label non_smi_shr_result; | 1211 Label non_smi_shr_result; |
| 1258 Register heap_number_map = r9; | 1212 Register heap_number_map = r9; |
| 1259 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | 1213 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); |
| 1260 FloatingPointHelper::LoadAsIntegers(masm, non_numeric_failure, | 1214 FloatingPointHelper::LoadAsIntegers(masm, non_numeric_failure, |
| 1261 heap_number_map); | 1215 heap_number_map); |
| 1262 switch (op_) { | 1216 switch (op) { |
| 1263 case Token::BIT_OR: __ orl(rax, rcx); break; | 1217 case Token::BIT_OR: __ orl(rax, rcx); break; |
| 1264 case Token::BIT_AND: __ andl(rax, rcx); break; | 1218 case Token::BIT_AND: __ andl(rax, rcx); break; |
| 1265 case Token::BIT_XOR: __ xorl(rax, rcx); break; | 1219 case Token::BIT_XOR: __ xorl(rax, rcx); break; |
| 1266 case Token::SAR: __ sarl_cl(rax); break; | 1220 case Token::SAR: __ sarl_cl(rax); break; |
| 1267 case Token::SHL: __ shll_cl(rax); break; | 1221 case Token::SHL: __ shll_cl(rax); break; |
| 1268 case Token::SHR: { | 1222 case Token::SHR: { |
| 1269 __ shrl_cl(rax); | 1223 __ shrl_cl(rax); |
| 1270 // Check if result is negative. This can only happen for a shift | 1224 // Check if result is negative. This can only happen for a shift |
| 1271 // by zero. | 1225 // by zero. |
| 1272 __ testl(rax, rax); | 1226 __ testl(rax, rax); |
| 1273 __ j(negative, &non_smi_shr_result); | 1227 __ j(negative, &non_smi_shr_result); |
| 1274 break; | 1228 break; |
| 1275 } | 1229 } |
| 1276 default: UNREACHABLE(); | 1230 default: UNREACHABLE(); |
| 1277 } | 1231 } |
| 1278 STATIC_ASSERT(kSmiValueSize == 32); | 1232 STATIC_ASSERT(kSmiValueSize == 32); |
| 1279 // Tag smi result and return. | 1233 // Tag smi result and return. |
| 1280 __ Integer32ToSmi(rax, rax); | 1234 __ Integer32ToSmi(rax, rax); |
| 1281 __ Ret(); | 1235 __ Ret(); |
| 1282 | 1236 |
| 1283 // Logical shift right can produce an unsigned int32 that is not | 1237 // Logical shift right can produce an unsigned int32 that is not |
| 1284 // an int32, and so is not in the smi range. Allocate a heap number | 1238 // an int32, and so is not in the smi range. Allocate a heap number |
| 1285 // in that case. | 1239 // in that case. |
| 1286 if (op_ == Token::SHR) { | 1240 if (op == Token::SHR) { |
| 1287 __ bind(&non_smi_shr_result); | 1241 __ bind(&non_smi_shr_result); |
| 1288 Label allocation_failed; | 1242 Label allocation_failed; |
| 1289 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64). | 1243 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64). |
| 1290 // Allocate heap number in new space. | 1244 // Allocate heap number in new space. |
| 1291 // Not using AllocateHeapNumber macro in order to reuse | 1245 // Not using AllocateHeapNumber macro in order to reuse |
| 1292 // already loaded heap_number_map. | 1246 // already loaded heap_number_map. |
| 1293 __ AllocateInNewSpace(HeapNumber::kSize, | 1247 __ AllocateInNewSpace(HeapNumber::kSize, |
| 1294 rax, | 1248 rax, |
| 1295 rdx, | 1249 rdx, |
| 1296 no_reg, | 1250 no_reg, |
| (...skipping 16 matching lines...) Expand all Loading... |
| 1313 __ Integer32ToSmi(rdx, rbx); | 1267 __ Integer32ToSmi(rdx, rbx); |
| 1314 __ jmp(allocation_failure); | 1268 __ jmp(allocation_failure); |
| 1315 } | 1269 } |
| 1316 break; | 1270 break; |
| 1317 } | 1271 } |
| 1318 default: UNREACHABLE(); break; | 1272 default: UNREACHABLE(); break; |
| 1319 } | 1273 } |
| 1320 // No fall-through from this generated code. | 1274 // No fall-through from this generated code. |
| 1321 if (FLAG_debug_code) { | 1275 if (FLAG_debug_code) { |
| 1322 __ Abort("Unexpected fall-through in " | 1276 __ Abort("Unexpected fall-through in " |
| 1323 "BinaryStub::GenerateFloatingPointCode."); | 1277 "BinaryStub_GenerateFloatingPointCode."); |
| 1324 } | 1278 } |
| 1325 } | 1279 } |
| 1326 | 1280 |
| 1327 | 1281 |
| 1328 void BinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) { | 1282 void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) { |
| 1329 ASSERT(op_ == Token::ADD); | 1283 ASSERT(op_ == Token::ADD); |
| 1330 Label left_not_string, call_runtime; | 1284 Label left_not_string, call_runtime; |
| 1331 | 1285 |
| 1332 // Registers containing left and right operands respectively. | 1286 // Registers containing left and right operands respectively. |
| 1333 Register left = rdx; | 1287 Register left = rdx; |
| 1334 Register right = rax; | 1288 Register right = rax; |
| 1335 | 1289 |
| 1336 // Test if left operand is a string. | 1290 // Test if left operand is a string. |
| 1337 __ JumpIfSmi(left, &left_not_string, Label::kNear); | 1291 __ JumpIfSmi(left, &left_not_string, Label::kNear); |
| 1338 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); | 1292 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1349 | 1303 |
| 1350 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); | 1304 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); |
| 1351 GenerateRegisterArgsPush(masm); | 1305 GenerateRegisterArgsPush(masm); |
| 1352 __ TailCallStub(&string_add_right_stub); | 1306 __ TailCallStub(&string_add_right_stub); |
| 1353 | 1307 |
| 1354 // Neither argument is a string. | 1308 // Neither argument is a string. |
| 1355 __ bind(&call_runtime); | 1309 __ bind(&call_runtime); |
| 1356 } | 1310 } |
| 1357 | 1311 |
| 1358 | 1312 |
| 1359 void BinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) { | |
| 1360 GenerateRegisterArgsPush(masm); | |
| 1361 switch (op_) { | |
| 1362 case Token::ADD: | |
| 1363 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION); | |
| 1364 break; | |
| 1365 case Token::SUB: | |
| 1366 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION); | |
| 1367 break; | |
| 1368 case Token::MUL: | |
| 1369 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION); | |
| 1370 break; | |
| 1371 case Token::DIV: | |
| 1372 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION); | |
| 1373 break; | |
| 1374 case Token::MOD: | |
| 1375 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION); | |
| 1376 break; | |
| 1377 case Token::BIT_OR: | |
| 1378 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION); | |
| 1379 break; | |
| 1380 case Token::BIT_AND: | |
| 1381 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION); | |
| 1382 break; | |
| 1383 case Token::BIT_XOR: | |
| 1384 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION); | |
| 1385 break; | |
| 1386 case Token::SAR: | |
| 1387 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION); | |
| 1388 break; | |
| 1389 case Token::SHL: | |
| 1390 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION); | |
| 1391 break; | |
| 1392 case Token::SHR: | |
| 1393 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); | |
| 1394 break; | |
| 1395 default: | |
| 1396 UNREACHABLE(); | |
| 1397 } | |
| 1398 } | |
| 1399 | |
| 1400 | |
| 1401 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { | 1313 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
| 1402 Label call_runtime; | 1314 Label call_runtime; |
| 1403 if (result_type_ == BinaryOpIC::UNINITIALIZED || | 1315 if (result_type_ == BinaryOpIC::UNINITIALIZED || |
| 1404 result_type_ == BinaryOpIC::SMI) { | 1316 result_type_ == BinaryOpIC::SMI) { |
| 1405 // Only allow smi results. | 1317 // Only allow smi results. |
| 1406 GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS); | 1318 BinaryOpStub_GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS, op_); |
| 1407 } else { | 1319 } else { |
| 1408 // Allow heap number result and don't make a transition if a heap number | 1320 // Allow heap number result and don't make a transition if a heap number |
| 1409 // cannot be allocated. | 1321 // cannot be allocated. |
| 1410 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); | 1322 BinaryOpStub_GenerateSmiCode( |
| 1323 masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS, op_); |
| 1411 } | 1324 } |
| 1412 | 1325 |
| 1413 // Code falls through if the result is not returned as either a smi or heap | 1326 // Code falls through if the result is not returned as either a smi or heap |
| 1414 // number. | 1327 // number. |
| 1415 GenerateTypeTransition(masm); | 1328 GenerateTypeTransition(masm); |
| 1416 | 1329 |
| 1417 if (call_runtime.is_linked()) { | 1330 if (call_runtime.is_linked()) { |
| 1418 __ bind(&call_runtime); | 1331 __ bind(&call_runtime); |
| 1419 GenerateCallRuntimeCode(masm); | 1332 GenerateRegisterArgsPush(masm); |
| 1333 GenerateCallRuntime(masm); |
| 1420 } | 1334 } |
| 1421 } | 1335 } |
| 1422 | 1336 |
| 1423 | 1337 |
| 1424 void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) { | 1338 void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) { |
| 1425 ASSERT(operands_type_ == BinaryOpIC::STRING); | 1339 // The int32 case is identical to the Smi case. We avoid creating this |
| 1426 ASSERT(op_ == Token::ADD); | 1340 // ic state on x64. |
| 1427 GenerateStringAddCode(masm); | 1341 UNREACHABLE(); |
| 1428 // Try to add arguments as strings, otherwise, transition to the generic | |
| 1429 // BinaryOpIC type. | |
| 1430 GenerateTypeTransition(masm); | |
| 1431 } | 1342 } |
| 1432 | 1343 |
| 1433 | 1344 |
| 1434 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { | 1345 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { |
| 1435 Label call_runtime; | 1346 Label call_runtime; |
| 1436 ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING); | 1347 ASSERT(left_type_ == BinaryOpIC::STRING && right_type_ == BinaryOpIC::STRING); |
| 1437 ASSERT(op_ == Token::ADD); | 1348 ASSERT(op_ == Token::ADD); |
| 1438 // If both arguments are strings, call the string add stub. | 1349 // If both arguments are strings, call the string add stub. |
| 1439 // Otherwise, do a transition. | 1350 // Otherwise, do a transition. |
| 1440 | 1351 |
| 1441 // Registers containing left and right operands respectively. | 1352 // Registers containing left and right operands respectively. |
| 1442 Register left = rdx; | 1353 Register left = rdx; |
| 1443 Register right = rax; | 1354 Register right = rax; |
| 1444 | 1355 |
| 1445 // Test if left operand is a string. | 1356 // Test if left operand is a string. |
| 1446 __ JumpIfSmi(left, &call_runtime); | 1357 __ JumpIfSmi(left, &call_runtime); |
| (...skipping 13 matching lines...) Expand all Loading... |
| 1460 GenerateTypeTransition(masm); | 1371 GenerateTypeTransition(masm); |
| 1461 } | 1372 } |
| 1462 | 1373 |
| 1463 | 1374 |
| 1464 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { | 1375 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { |
| 1465 Label call_runtime; | 1376 Label call_runtime; |
| 1466 | 1377 |
| 1467 if (op_ == Token::ADD) { | 1378 if (op_ == Token::ADD) { |
| 1468 // Handle string addition here, because it is the only operation | 1379 // Handle string addition here, because it is the only operation |
| 1469 // that does not do a ToNumber conversion on the operands. | 1380 // that does not do a ToNumber conversion on the operands. |
| 1470 GenerateStringAddCode(masm); | 1381 GenerateAddStrings(masm); |
| 1471 } | 1382 } |
| 1472 | 1383 |
| 1473 // Convert oddball arguments to numbers. | 1384 // Convert oddball arguments to numbers. |
| 1474 Label check, done; | 1385 Label check, done; |
| 1475 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); | 1386 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); |
| 1476 __ j(not_equal, &check, Label::kNear); | 1387 __ j(not_equal, &check, Label::kNear); |
| 1477 if (Token::IsBitOp(op_)) { | 1388 if (Token::IsBitOp(op_)) { |
| 1478 __ xor_(rdx, rdx); | 1389 __ xor_(rdx, rdx); |
| 1479 } else { | 1390 } else { |
| 1480 __ LoadRoot(rdx, Heap::kNanValueRootIndex); | 1391 __ LoadRoot(rdx, Heap::kNanValueRootIndex); |
| 1481 } | 1392 } |
| 1482 __ jmp(&done, Label::kNear); | 1393 __ jmp(&done, Label::kNear); |
| 1483 __ bind(&check); | 1394 __ bind(&check); |
| 1484 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); | 1395 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); |
| 1485 __ j(not_equal, &done, Label::kNear); | 1396 __ j(not_equal, &done, Label::kNear); |
| 1486 if (Token::IsBitOp(op_)) { | 1397 if (Token::IsBitOp(op_)) { |
| 1487 __ xor_(rax, rax); | 1398 __ xor_(rax, rax); |
| 1488 } else { | 1399 } else { |
| 1489 __ LoadRoot(rax, Heap::kNanValueRootIndex); | 1400 __ LoadRoot(rax, Heap::kNanValueRootIndex); |
| 1490 } | 1401 } |
| 1491 __ bind(&done); | 1402 __ bind(&done); |
| 1492 | 1403 |
| 1493 GenerateHeapNumberStub(masm); | 1404 GenerateHeapNumberStub(masm); |
| 1494 } | 1405 } |
| 1495 | 1406 |
| 1496 | 1407 |
| 1408 static void BinaryOpStub_CheckSmiInput(MacroAssembler* masm, |
| 1409 Register input, |
| 1410 Label* fail) { |
| 1411 Label ok; |
| 1412 __ JumpIfSmi(input, &ok, Label::kNear); |
| 1413 Register heap_number_map = r8; |
| 1414 Register scratch1 = r9; |
| 1415 Register scratch2 = r10; |
| 1416 // HeapNumbers containing 32bit integer values are also allowed. |
| 1417 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); |
| 1418 __ cmpq(FieldOperand(input, HeapObject::kMapOffset), heap_number_map); |
| 1419 __ j(not_equal, fail); |
| 1420 __ movsd(xmm0, FieldOperand(input, HeapNumber::kValueOffset)); |
| 1421 // Convert, convert back, and compare the two doubles' bits. |
| 1422 __ cvttsd2siq(scratch2, xmm0); |
| 1423 __ cvtlsi2sd(xmm1, scratch2); |
| 1424 __ movq(scratch1, xmm0); |
| 1425 __ movq(scratch2, xmm1); |
| 1426 __ cmpq(scratch1, scratch2); |
| 1427 __ j(not_equal, fail); |
| 1428 __ bind(&ok); |
| 1429 } |
| 1430 |
| 1431 |
| 1497 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { | 1432 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
| 1498 Label gc_required, not_number; | 1433 Label gc_required, not_number; |
| 1499 GenerateFloatingPointCode(masm, &gc_required, ¬_number); | 1434 |
| 1435 // It could be that only SMIs have been seen at either the left |
| 1436 // or the right operand. For precise type feedback, patch the IC |
| 1437 // again if this changes. |
| 1438 if (left_type_ == BinaryOpIC::SMI) { |
| 1439 BinaryOpStub_CheckSmiInput(masm, rdx, ¬_number); |
| 1440 } |
| 1441 if (right_type_ == BinaryOpIC::SMI) { |
| 1442 BinaryOpStub_CheckSmiInput(masm, rax, ¬_number); |
| 1443 } |
| 1444 |
| 1445 BinaryOpStub_GenerateFloatingPointCode( |
| 1446 masm, &gc_required, ¬_number, op_, mode_); |
| 1500 | 1447 |
| 1501 __ bind(¬_number); | 1448 __ bind(¬_number); |
| 1502 GenerateTypeTransition(masm); | 1449 GenerateTypeTransition(masm); |
| 1503 | 1450 |
| 1504 __ bind(&gc_required); | 1451 __ bind(&gc_required); |
| 1505 GenerateCallRuntimeCode(masm); | 1452 GenerateRegisterArgsPush(masm); |
| 1453 GenerateCallRuntime(masm); |
| 1506 } | 1454 } |
| 1507 | 1455 |
| 1508 | 1456 |
| 1509 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) { | 1457 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) { |
| 1510 Label call_runtime, call_string_add_or_runtime; | 1458 Label call_runtime, call_string_add_or_runtime; |
| 1511 | 1459 |
| 1512 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); | 1460 BinaryOpStub_GenerateSmiCode( |
| 1461 masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS, op_); |
| 1513 | 1462 |
| 1514 GenerateFloatingPointCode(masm, &call_runtime, &call_string_add_or_runtime); | 1463 BinaryOpStub_GenerateFloatingPointCode( |
| 1464 masm, &call_runtime, &call_string_add_or_runtime, op_, mode_); |
| 1515 | 1465 |
| 1516 __ bind(&call_string_add_or_runtime); | 1466 __ bind(&call_string_add_or_runtime); |
| 1517 if (op_ == Token::ADD) { | 1467 if (op_ == Token::ADD) { |
| 1518 GenerateStringAddCode(masm); | 1468 GenerateAddStrings(masm); |
| 1519 } | 1469 } |
| 1520 | 1470 |
| 1521 __ bind(&call_runtime); | 1471 __ bind(&call_runtime); |
| 1522 GenerateCallRuntimeCode(masm); | 1472 GenerateRegisterArgsPush(masm); |
| 1473 GenerateCallRuntime(masm); |
| 1523 } | 1474 } |
| 1524 | 1475 |
| 1525 | 1476 |
| 1526 void BinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm, | 1477 static void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm, |
| 1527 Label* alloc_failure) { | 1478 Label* alloc_failure, |
| 1479 OverwriteMode mode) { |
| 1528 Label skip_allocation; | 1480 Label skip_allocation; |
| 1529 OverwriteMode mode = mode_; | |
| 1530 switch (mode) { | 1481 switch (mode) { |
| 1531 case OVERWRITE_LEFT: { | 1482 case OVERWRITE_LEFT: { |
| 1532 // If the argument in rdx is already an object, we skip the | 1483 // If the argument in rdx is already an object, we skip the |
| 1533 // allocation of a heap number. | 1484 // allocation of a heap number. |
| 1534 __ JumpIfNotSmi(rdx, &skip_allocation); | 1485 __ JumpIfNotSmi(rdx, &skip_allocation); |
| 1535 // Allocate a heap number for the result. Keep eax and edx intact | 1486 // Allocate a heap number for the result. Keep eax and edx intact |
| 1536 // for the possible runtime call. | 1487 // for the possible runtime call. |
| 1537 __ AllocateHeapNumber(rbx, rcx, alloc_failure); | 1488 __ AllocateHeapNumber(rbx, rcx, alloc_failure); |
| 1538 // Now rdx can be overwritten losing one of the arguments as we are | 1489 // Now rdx can be overwritten losing one of the arguments as we are |
| 1539 // now done and will not need it any more. | 1490 // now done and will not need it any more. |
| (...skipping 478 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2018 void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm, | 1969 void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm, |
| 2019 Register first, | 1970 Register first, |
| 2020 Register second, | 1971 Register second, |
| 2021 Register scratch1, | 1972 Register scratch1, |
| 2022 Register scratch2, | 1973 Register scratch2, |
| 2023 Register scratch3, | 1974 Register scratch3, |
| 2024 Label* on_success, | 1975 Label* on_success, |
| 2025 Label* on_not_smis) { | 1976 Label* on_not_smis) { |
| 2026 Register heap_number_map = scratch3; | 1977 Register heap_number_map = scratch3; |
| 2027 Register smi_result = scratch1; | 1978 Register smi_result = scratch1; |
| 2028 Label done; | 1979 Label done, maybe_undefined_first, maybe_undefined_second, first_done; |
| 2029 | 1980 |
| 2030 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | 1981 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); |
| 2031 | 1982 |
| 2032 Label first_smi; | 1983 Label first_smi; |
| 2033 __ JumpIfSmi(first, &first_smi, Label::kNear); | 1984 __ JumpIfSmi(first, &first_smi, Label::kNear); |
| 2034 __ cmpq(FieldOperand(first, HeapObject::kMapOffset), heap_number_map); | 1985 __ cmpq(FieldOperand(first, HeapObject::kMapOffset), heap_number_map); |
| 2035 __ j(not_equal, on_not_smis); | 1986 __ j(not_equal, &maybe_undefined_first); |
| 2036 // Convert HeapNumber to smi if possible. | 1987 // Convert HeapNumber to smi if possible. |
| 2037 __ movsd(xmm0, FieldOperand(first, HeapNumber::kValueOffset)); | 1988 __ movsd(xmm0, FieldOperand(first, HeapNumber::kValueOffset)); |
| 2038 __ movq(scratch2, xmm0); | 1989 __ movq(scratch2, xmm0); |
| 2039 __ cvttsd2siq(smi_result, xmm0); | 1990 __ cvttsd2siq(smi_result, xmm0); |
| 2040 // Check if conversion was successful by converting back and | 1991 // Check if conversion was successful by converting back and |
| 2041 // comparing to the original double's bits. | 1992 // comparing to the original double's bits. |
| 2042 __ cvtlsi2sd(xmm1, smi_result); | 1993 __ cvtlsi2sd(xmm1, smi_result); |
| 2043 __ movq(kScratchRegister, xmm1); | 1994 __ movq(kScratchRegister, xmm1); |
| 2044 __ cmpq(scratch2, kScratchRegister); | 1995 __ cmpq(scratch2, kScratchRegister); |
| 2045 __ j(not_equal, on_not_smis); | 1996 __ j(not_equal, on_not_smis); |
| 2046 __ Integer32ToSmi(first, smi_result); | 1997 __ Integer32ToSmi(first, smi_result); |
| 2047 | 1998 |
| 1999 __ bind(&first_done); |
| 2048 __ JumpIfSmi(second, (on_success != NULL) ? on_success : &done); | 2000 __ JumpIfSmi(second, (on_success != NULL) ? on_success : &done); |
| 2049 __ bind(&first_smi); | 2001 __ bind(&first_smi); |
| 2050 __ AssertNotSmi(second); | 2002 __ AssertNotSmi(second); |
| 2051 __ cmpq(FieldOperand(second, HeapObject::kMapOffset), heap_number_map); | 2003 __ cmpq(FieldOperand(second, HeapObject::kMapOffset), heap_number_map); |
| 2052 __ j(not_equal, on_not_smis); | 2004 __ j(not_equal, &maybe_undefined_second); |
| 2053 // Convert second to smi, if possible. | 2005 // Convert second to smi, if possible. |
| 2054 __ movsd(xmm0, FieldOperand(second, HeapNumber::kValueOffset)); | 2006 __ movsd(xmm0, FieldOperand(second, HeapNumber::kValueOffset)); |
| 2055 __ movq(scratch2, xmm0); | 2007 __ movq(scratch2, xmm0); |
| 2056 __ cvttsd2siq(smi_result, xmm0); | 2008 __ cvttsd2siq(smi_result, xmm0); |
| 2057 __ cvtlsi2sd(xmm1, smi_result); | 2009 __ cvtlsi2sd(xmm1, smi_result); |
| 2058 __ movq(kScratchRegister, xmm1); | 2010 __ movq(kScratchRegister, xmm1); |
| 2059 __ cmpq(scratch2, kScratchRegister); | 2011 __ cmpq(scratch2, kScratchRegister); |
| 2060 __ j(not_equal, on_not_smis); | 2012 __ j(not_equal, on_not_smis); |
| 2061 __ Integer32ToSmi(second, smi_result); | 2013 __ Integer32ToSmi(second, smi_result); |
| 2062 if (on_success != NULL) { | 2014 if (on_success != NULL) { |
| 2063 __ jmp(on_success); | 2015 __ jmp(on_success); |
| 2064 } else { | 2016 } else { |
| 2065 __ bind(&done); | 2017 __ jmp(&done); |
| 2066 } | 2018 } |
| 2019 |
| 2020 __ bind(&maybe_undefined_first); |
| 2021 __ CompareRoot(first, Heap::kUndefinedValueRootIndex); |
| 2022 __ j(not_equal, on_not_smis); |
| 2023 __ xor_(first, first); |
| 2024 __ jmp(&first_done); |
| 2025 |
| 2026 __ bind(&maybe_undefined_second); |
| 2027 __ CompareRoot(second, Heap::kUndefinedValueRootIndex); |
| 2028 __ j(not_equal, on_not_smis); |
| 2029 __ xor_(second, second); |
| 2030 if (on_success != NULL) { |
| 2031 __ jmp(on_success); |
| 2032 } |
| 2033 // Else: fall through. |
| 2034 |
| 2035 __ bind(&done); |
| 2067 } | 2036 } |
| 2068 | 2037 |
| 2069 | 2038 |
| 2070 void MathPowStub::Generate(MacroAssembler* masm) { | 2039 void MathPowStub::Generate(MacroAssembler* masm) { |
| 2071 // Choose register conforming to calling convention (when bailing out). | 2040 // Choose register conforming to calling convention (when bailing out). |
| 2072 #ifdef _WIN64 | 2041 #ifdef _WIN64 |
| 2073 const Register exponent = rdx; | 2042 const Register exponent = rdx; |
| 2074 #else | 2043 #else |
| 2075 const Register exponent = rdi; | 2044 const Register exponent = rdi; |
| 2076 #endif | 2045 #endif |
| (...skipping 1294 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3371 | 3340 |
| 3372 | 3341 |
| 3373 static int NegativeComparisonResult(Condition cc) { | 3342 static int NegativeComparisonResult(Condition cc) { |
| 3374 ASSERT(cc != equal); | 3343 ASSERT(cc != equal); |
| 3375 ASSERT((cc == less) || (cc == less_equal) | 3344 ASSERT((cc == less) || (cc == less_equal) |
| 3376 || (cc == greater) || (cc == greater_equal)); | 3345 || (cc == greater) || (cc == greater_equal)); |
| 3377 return (cc == greater || cc == greater_equal) ? LESS : GREATER; | 3346 return (cc == greater || cc == greater_equal) ? LESS : GREATER; |
| 3378 } | 3347 } |
| 3379 | 3348 |
| 3380 | 3349 |
| 3381 void CompareStub::Generate(MacroAssembler* masm) { | 3350 static void CheckInputType(MacroAssembler* masm, |
| 3382 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); | 3351 Register input, |
| 3352 CompareIC::State expected, |
| 3353 Label* fail) { |
| 3354 Label ok; |
| 3355 if (expected == CompareIC::SMI) { |
| 3356 __ JumpIfNotSmi(input, fail); |
| 3357 } else if (expected == CompareIC::HEAP_NUMBER) { |
| 3358 __ JumpIfSmi(input, &ok); |
| 3359 __ CompareMap(input, masm->isolate()->factory()->heap_number_map(), NULL); |
| 3360 __ j(not_equal, fail); |
| 3361 } |
| 3362 // We could be strict about symbol/string here, but as long as |
| 3363 // hydrogen doesn't care, the stub doesn't have to care either. |
| 3364 __ bind(&ok); |
| 3365 } |
| 3383 | 3366 |
| 3367 |
| 3368 static void BranchIfNonSymbol(MacroAssembler* masm, |
| 3369 Label* label, |
| 3370 Register object, |
| 3371 Register scratch) { |
| 3372 __ JumpIfSmi(object, label); |
| 3373 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset)); |
| 3374 __ movzxbq(scratch, |
| 3375 FieldOperand(scratch, Map::kInstanceTypeOffset)); |
| 3376 // Ensure that no non-strings have the symbol bit set. |
| 3377 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask); |
| 3378 STATIC_ASSERT(kSymbolTag != 0); |
| 3379 __ testb(scratch, Immediate(kIsSymbolMask)); |
| 3380 __ j(zero, label); |
| 3381 } |
| 3382 |
| 3383 |
| 3384 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { |
| 3384 Label check_unequal_objects, done; | 3385 Label check_unequal_objects, done; |
| 3386 Condition cc = GetCondition(); |
| 3385 Factory* factory = masm->isolate()->factory(); | 3387 Factory* factory = masm->isolate()->factory(); |
| 3386 | 3388 |
| 3387 // Compare two smis if required. | 3389 Label miss; |
| 3388 if (include_smi_compare_) { | 3390 CheckInputType(masm, rdx, left_, &miss); |
| 3389 Label non_smi, smi_done; | 3391 CheckInputType(masm, rax, right_, &miss); |
| 3390 __ JumpIfNotBothSmi(rax, rdx, &non_smi); | 3392 |
| 3391 __ subq(rdx, rax); | 3393 // Compare two smis. |
| 3392 __ j(no_overflow, &smi_done); | 3394 Label non_smi, smi_done; |
| 3393 __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here. | 3395 __ JumpIfNotBothSmi(rax, rdx, &non_smi); |
| 3394 __ bind(&smi_done); | 3396 __ subq(rdx, rax); |
| 3395 __ movq(rax, rdx); | 3397 __ j(no_overflow, &smi_done); |
| 3396 __ ret(0); | 3398 __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here. |
| 3397 __ bind(&non_smi); | 3399 __ bind(&smi_done); |
| 3398 } else if (FLAG_debug_code) { | 3400 __ movq(rax, rdx); |
| 3399 Label ok; | 3401 __ ret(0); |
| 3400 __ JumpIfNotSmi(rdx, &ok); | 3402 __ bind(&non_smi); |
| 3401 __ JumpIfNotSmi(rax, &ok); | |
| 3402 __ Abort("CompareStub: smi operands"); | |
| 3403 __ bind(&ok); | |
| 3404 } | |
| 3405 | 3403 |
| 3406 // The compare stub returns a positive, negative, or zero 64-bit integer | 3404 // The compare stub returns a positive, negative, or zero 64-bit integer |
| 3407 // value in rax, corresponding to result of comparing the two inputs. | 3405 // value in rax, corresponding to result of comparing the two inputs. |
| 3408 // NOTICE! This code is only reached after a smi-fast-case check, so | 3406 // NOTICE! This code is only reached after a smi-fast-case check, so |
| 3409 // it is certain that at least one operand isn't a smi. | 3407 // it is certain that at least one operand isn't a smi. |
| 3410 | 3408 |
| 3411 // Two identical objects are equal unless they are both NaN or undefined. | 3409 // Two identical objects are equal unless they are both NaN or undefined. |
| 3412 { | 3410 { |
| 3413 Label not_identical; | 3411 Label not_identical; |
| 3414 __ cmpq(rax, rdx); | 3412 __ cmpq(rax, rdx); |
| 3415 __ j(not_equal, ¬_identical, Label::kNear); | 3413 __ j(not_equal, ¬_identical, Label::kNear); |
| 3416 | 3414 |
| 3417 if (cc_ != equal) { | 3415 if (cc != equal) { |
| 3418 // Check for undefined. undefined OP undefined is false even though | 3416 // Check for undefined. undefined OP undefined is false even though |
| 3419 // undefined == undefined. | 3417 // undefined == undefined. |
| 3420 Label check_for_nan; | 3418 Label check_for_nan; |
| 3421 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); | 3419 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); |
| 3422 __ j(not_equal, &check_for_nan, Label::kNear); | 3420 __ j(not_equal, &check_for_nan, Label::kNear); |
| 3423 __ Set(rax, NegativeComparisonResult(cc_)); | 3421 __ Set(rax, NegativeComparisonResult(cc)); |
| 3424 __ ret(0); | 3422 __ ret(0); |
| 3425 __ bind(&check_for_nan); | 3423 __ bind(&check_for_nan); |
| 3426 } | 3424 } |
| 3427 | 3425 |
| 3428 // Test for NaN. Sadly, we can't just compare to FACTORY->nan_value(), | 3426 // Test for NaN. Sadly, we can't just compare to FACTORY->nan_value(), |
| 3429 // so we do the second best thing - test it ourselves. | 3427 // so we do the second best thing - test it ourselves. |
| 3430 // Note: if cc_ != equal, never_nan_nan_ is not used. | 3428 Label heap_number; |
| 3431 // We cannot set rax to EQUAL until just before return because | 3429 // If it's not a heap number, then return equal for (in)equality operator. |
| 3432 // rax must be unchanged on jump to not_identical. | 3430 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), |
| 3433 if (never_nan_nan_ && (cc_ == equal)) { | 3431 factory->heap_number_map()); |
| 3434 __ Set(rax, EQUAL); | 3432 __ j(equal, &heap_number, Label::kNear); |
| 3435 __ ret(0); | 3433 if (cc != equal) { |
| 3436 } else { | 3434 // Call runtime on identical objects. Otherwise return equal. |
| 3437 Label heap_number; | 3435 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); |
| 3438 // If it's not a heap number, then return equal for (in)equality operator. | 3436 __ j(above_equal, ¬_identical, Label::kNear); |
| 3439 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), | 3437 } |
| 3440 factory->heap_number_map()); | 3438 __ Set(rax, EQUAL); |
| 3441 __ j(equal, &heap_number, Label::kNear); | 3439 __ ret(0); |
| 3442 if (cc_ != equal) { | |
| 3443 // Call runtime on identical objects. Otherwise return equal. | |
| 3444 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); | |
| 3445 __ j(above_equal, ¬_identical, Label::kNear); | |
| 3446 } | |
| 3447 __ Set(rax, EQUAL); | |
| 3448 __ ret(0); | |
| 3449 | 3440 |
| 3450 __ bind(&heap_number); | 3441 __ bind(&heap_number); |
| 3451 // It is a heap number, so return equal if it's not NaN. | 3442 // It is a heap number, so return equal if it's not NaN. |
| 3452 // For NaN, return 1 for every condition except greater and | 3443 // For NaN, return 1 for every condition except greater and |
| 3453 // greater-equal. Return -1 for them, so the comparison yields | 3444 // greater-equal. Return -1 for them, so the comparison yields |
| 3454 // false for all conditions except not-equal. | 3445 // false for all conditions except not-equal. |
| 3455 __ Set(rax, EQUAL); | 3446 __ Set(rax, EQUAL); |
| 3456 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); | 3447 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); |
| 3457 __ ucomisd(xmm0, xmm0); | 3448 __ ucomisd(xmm0, xmm0); |
| 3458 __ setcc(parity_even, rax); | 3449 __ setcc(parity_even, rax); |
| 3459 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs. | 3450 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs. |
| 3460 if (cc_ == greater_equal || cc_ == greater) { | 3451 if (cc == greater_equal || cc == greater) { |
| 3461 __ neg(rax); | 3452 __ neg(rax); |
| 3462 } | |
| 3463 __ ret(0); | |
| 3464 } | 3453 } |
| 3454 __ ret(0); |
| 3465 | 3455 |
| 3466 __ bind(¬_identical); | 3456 __ bind(¬_identical); |
| 3467 } | 3457 } |
| 3468 | 3458 |
| 3469 if (cc_ == equal) { // Both strict and non-strict. | 3459 if (cc == equal) { // Both strict and non-strict. |
| 3470 Label slow; // Fallthrough label. | 3460 Label slow; // Fallthrough label. |
| 3471 | 3461 |
| 3472 // If we're doing a strict equality comparison, we don't have to do | 3462 // If we're doing a strict equality comparison, we don't have to do |
| 3473 // type conversion, so we generate code to do fast comparison for objects | 3463 // type conversion, so we generate code to do fast comparison for objects |
| 3474 // and oddballs. Non-smi numbers and strings still go through the usual | 3464 // and oddballs. Non-smi numbers and strings still go through the usual |
| 3475 // slow-case code. | 3465 // slow-case code. |
| 3476 if (strict_) { | 3466 if (strict()) { |
| 3477 // If either is a Smi (we know that not both are), then they can only | 3467 // If either is a Smi (we know that not both are), then they can only |
| 3478 // be equal if the other is a HeapNumber. If so, use the slow case. | 3468 // be equal if the other is a HeapNumber. If so, use the slow case. |
| 3479 { | 3469 { |
| 3480 Label not_smis; | 3470 Label not_smis; |
| 3481 __ SelectNonSmi(rbx, rax, rdx, ¬_smis); | 3471 __ SelectNonSmi(rbx, rax, rdx, ¬_smis); |
| 3482 | 3472 |
| 3483 // Check if the non-smi operand is a heap number. | 3473 // Check if the non-smi operand is a heap number. |
| 3484 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), | 3474 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), |
| 3485 factory->heap_number_map()); | 3475 factory->heap_number_map()); |
| 3486 // If heap number, handle it in the slow case. | 3476 // If heap number, handle it in the slow case. |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3518 // Check for oddballs: true, false, null, undefined. | 3508 // Check for oddballs: true, false, null, undefined. |
| 3519 __ CmpInstanceType(rcx, ODDBALL_TYPE); | 3509 __ CmpInstanceType(rcx, ODDBALL_TYPE); |
| 3520 __ j(equal, &return_not_equal); | 3510 __ j(equal, &return_not_equal); |
| 3521 | 3511 |
| 3522 // Fall through to the general case. | 3512 // Fall through to the general case. |
| 3523 } | 3513 } |
| 3524 __ bind(&slow); | 3514 __ bind(&slow); |
| 3525 } | 3515 } |
| 3526 | 3516 |
| 3527 // Generate the number comparison code. | 3517 // Generate the number comparison code. |
| 3528 if (include_number_compare_) { | 3518 Label non_number_comparison; |
| 3529 Label non_number_comparison; | 3519 Label unordered; |
| 3530 Label unordered; | 3520 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison); |
| 3531 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison); | 3521 __ xorl(rax, rax); |
| 3532 __ xorl(rax, rax); | 3522 __ xorl(rcx, rcx); |
| 3533 __ xorl(rcx, rcx); | 3523 __ ucomisd(xmm0, xmm1); |
| 3534 __ ucomisd(xmm0, xmm1); | |
| 3535 | 3524 |
| 3536 // Don't base result on EFLAGS when a NaN is involved. | 3525 // Don't base result on EFLAGS when a NaN is involved. |
| 3537 __ j(parity_even, &unordered, Label::kNear); | 3526 __ j(parity_even, &unordered, Label::kNear); |
| 3538 // Return a result of -1, 0, or 1, based on EFLAGS. | 3527 // Return a result of -1, 0, or 1, based on EFLAGS. |
| 3539 __ setcc(above, rax); | 3528 __ setcc(above, rax); |
| 3540 __ setcc(below, rcx); | 3529 __ setcc(below, rcx); |
| 3541 __ subq(rax, rcx); | 3530 __ subq(rax, rcx); |
| 3542 __ ret(0); | 3531 __ ret(0); |
| 3543 | 3532 |
| 3544 // If one of the numbers was NaN, then the result is always false. | 3533 // If one of the numbers was NaN, then the result is always false. |
| 3545 // The cc is never not-equal. | 3534 // The cc is never not-equal. |
| 3546 __ bind(&unordered); | 3535 __ bind(&unordered); |
| 3547 ASSERT(cc_ != not_equal); | 3536 ASSERT(cc != not_equal); |
| 3548 if (cc_ == less || cc_ == less_equal) { | 3537 if (cc == less || cc == less_equal) { |
| 3549 __ Set(rax, 1); | 3538 __ Set(rax, 1); |
| 3550 } else { | 3539 } else { |
| 3551 __ Set(rax, -1); | 3540 __ Set(rax, -1); |
| 3552 } | 3541 } |
| 3553 __ ret(0); | 3542 __ ret(0); |
| 3554 | 3543 |
| 3555 // The number comparison code did not provide a valid result. | 3544 // The number comparison code did not provide a valid result. |
| 3556 __ bind(&non_number_comparison); | 3545 __ bind(&non_number_comparison); |
| 3557 } | |
| 3558 | 3546 |
| 3559 // Fast negative check for symbol-to-symbol equality. | 3547 // Fast negative check for symbol-to-symbol equality. |
| 3560 Label check_for_strings; | 3548 Label check_for_strings; |
| 3561 if (cc_ == equal) { | 3549 if (cc == equal) { |
| 3562 BranchIfNonSymbol(masm, &check_for_strings, rax, kScratchRegister); | 3550 BranchIfNonSymbol(masm, &check_for_strings, rax, kScratchRegister); |
| 3563 BranchIfNonSymbol(masm, &check_for_strings, rdx, kScratchRegister); | 3551 BranchIfNonSymbol(masm, &check_for_strings, rdx, kScratchRegister); |
| 3564 | 3552 |
| 3565 // We've already checked for object identity, so if both operands | 3553 // We've already checked for object identity, so if both operands |
| 3566 // are symbols they aren't equal. Register eax (not rax) already holds a | 3554 // are symbols they aren't equal. Register eax (not rax) already holds a |
| 3567 // non-zero value, which indicates not equal, so just return. | 3555 // non-zero value, which indicates not equal, so just return. |
| 3568 __ ret(0); | 3556 __ ret(0); |
| 3569 } | 3557 } |
| 3570 | 3558 |
| 3571 __ bind(&check_for_strings); | 3559 __ bind(&check_for_strings); |
| 3572 | 3560 |
| 3573 __ JumpIfNotBothSequentialAsciiStrings( | 3561 __ JumpIfNotBothSequentialAsciiStrings( |
| 3574 rdx, rax, rcx, rbx, &check_unequal_objects); | 3562 rdx, rax, rcx, rbx, &check_unequal_objects); |
| 3575 | 3563 |
| 3576 // Inline comparison of ASCII strings. | 3564 // Inline comparison of ASCII strings. |
| 3577 if (cc_ == equal) { | 3565 if (cc == equal) { |
| 3578 StringCompareStub::GenerateFlatAsciiStringEquals(masm, | 3566 StringCompareStub::GenerateFlatAsciiStringEquals(masm, |
| 3579 rdx, | 3567 rdx, |
| 3580 rax, | 3568 rax, |
| 3581 rcx, | 3569 rcx, |
| 3582 rbx); | 3570 rbx); |
| 3583 } else { | 3571 } else { |
| 3584 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, | 3572 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, |
| 3585 rdx, | 3573 rdx, |
| 3586 rax, | 3574 rax, |
| 3587 rcx, | 3575 rcx, |
| 3588 rbx, | 3576 rbx, |
| 3589 rdi, | 3577 rdi, |
| 3590 r8); | 3578 r8); |
| 3591 } | 3579 } |
| 3592 | 3580 |
| 3593 #ifdef DEBUG | 3581 #ifdef DEBUG |
| 3594 __ Abort("Unexpected fall-through from string comparison"); | 3582 __ Abort("Unexpected fall-through from string comparison"); |
| 3595 #endif | 3583 #endif |
| 3596 | 3584 |
| 3597 __ bind(&check_unequal_objects); | 3585 __ bind(&check_unequal_objects); |
| 3598 if (cc_ == equal && !strict_) { | 3586 if (cc == equal && !strict()) { |
| 3599 // Not strict equality. Objects are unequal if | 3587 // Not strict equality. Objects are unequal if |
| 3600 // they are both JSObjects and not undetectable, | 3588 // they are both JSObjects and not undetectable, |
| 3601 // and their pointers are different. | 3589 // and their pointers are different. |
| 3602 Label not_both_objects, return_unequal; | 3590 Label not_both_objects, return_unequal; |
| 3603 // At most one is a smi, so we can test for smi by adding the two. | 3591 // At most one is a smi, so we can test for smi by adding the two. |
| 3604 // A smi plus a heap object has the low bit set, a heap object plus | 3592 // A smi plus a heap object has the low bit set, a heap object plus |
| 3605 // a heap object has the low bit clear. | 3593 // a heap object has the low bit clear. |
| 3606 STATIC_ASSERT(kSmiTag == 0); | 3594 STATIC_ASSERT(kSmiTag == 0); |
| 3607 STATIC_ASSERT(kSmiTagMask == 1); | 3595 STATIC_ASSERT(kSmiTagMask == 1); |
| 3608 __ lea(rcx, Operand(rax, rdx, times_1, 0)); | 3596 __ lea(rcx, Operand(rax, rdx, times_1, 0)); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 3628 __ bind(¬_both_objects); | 3616 __ bind(¬_both_objects); |
| 3629 } | 3617 } |
| 3630 | 3618 |
| 3631 // Push arguments below the return address to prepare jump to builtin. | 3619 // Push arguments below the return address to prepare jump to builtin. |
| 3632 __ pop(rcx); | 3620 __ pop(rcx); |
| 3633 __ push(rdx); | 3621 __ push(rdx); |
| 3634 __ push(rax); | 3622 __ push(rax); |
| 3635 | 3623 |
| 3636 // Figure out which native to call and setup the arguments. | 3624 // Figure out which native to call and setup the arguments. |
| 3637 Builtins::JavaScript builtin; | 3625 Builtins::JavaScript builtin; |
| 3638 if (cc_ == equal) { | 3626 if (cc == equal) { |
| 3639 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS; | 3627 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS; |
| 3640 } else { | 3628 } else { |
| 3641 builtin = Builtins::COMPARE; | 3629 builtin = Builtins::COMPARE; |
| 3642 __ Push(Smi::FromInt(NegativeComparisonResult(cc_))); | 3630 __ Push(Smi::FromInt(NegativeComparisonResult(cc))); |
| 3643 } | 3631 } |
| 3644 | 3632 |
| 3645 // Restore return address on the stack. | 3633 // Restore return address on the stack. |
| 3646 __ push(rcx); | 3634 __ push(rcx); |
| 3647 | 3635 |
| 3648 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 3636 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
| 3649 // tagged as a small integer. | 3637 // tagged as a small integer. |
| 3650 __ InvokeBuiltin(builtin, JUMP_FUNCTION); | 3638 __ InvokeBuiltin(builtin, JUMP_FUNCTION); |
| 3639 |
| 3640 __ bind(&miss); |
| 3641 GenerateMiss(masm); |
| 3651 } | 3642 } |
| 3652 | 3643 |
| 3653 | 3644 |
| 3654 void CompareStub::BranchIfNonSymbol(MacroAssembler* masm, | |
| 3655 Label* label, | |
| 3656 Register object, | |
| 3657 Register scratch) { | |
| 3658 __ JumpIfSmi(object, label); | |
| 3659 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset)); | |
| 3660 __ movzxbq(scratch, | |
| 3661 FieldOperand(scratch, Map::kInstanceTypeOffset)); | |
| 3662 // Ensure that no non-strings have the symbol bit set. | |
| 3663 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask); | |
| 3664 STATIC_ASSERT(kSymbolTag != 0); | |
| 3665 __ testb(scratch, Immediate(kIsSymbolMask)); | |
| 3666 __ j(zero, label); | |
| 3667 } | |
| 3668 | |
| 3669 | |
| 3670 void StackCheckStub::Generate(MacroAssembler* masm) { | 3645 void StackCheckStub::Generate(MacroAssembler* masm) { |
| 3671 __ TailCallRuntime(Runtime::kStackGuard, 0, 1); | 3646 __ TailCallRuntime(Runtime::kStackGuard, 0, 1); |
| 3672 } | 3647 } |
| 3673 | 3648 |
| 3674 | 3649 |
| 3675 void InterruptStub::Generate(MacroAssembler* masm) { | 3650 void InterruptStub::Generate(MacroAssembler* masm) { |
| 3676 __ TailCallRuntime(Runtime::kInterrupt, 0, 1); | 3651 __ TailCallRuntime(Runtime::kInterrupt, 0, 1); |
| 3677 } | 3652 } |
| 3678 | 3653 |
| 3679 | 3654 |
| (...skipping 734 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4414 } | 4389 } |
| 4415 | 4390 |
| 4416 | 4391 |
| 4417 // Passing arguments in registers is not supported. | 4392 // Passing arguments in registers is not supported. |
| 4418 Register InstanceofStub::left() { return no_reg; } | 4393 Register InstanceofStub::left() { return no_reg; } |
| 4419 | 4394 |
| 4420 | 4395 |
| 4421 Register InstanceofStub::right() { return no_reg; } | 4396 Register InstanceofStub::right() { return no_reg; } |
| 4422 | 4397 |
| 4423 | 4398 |
| 4424 int CompareStub::MinorKey() { | |
| 4425 // Encode the three parameters in a unique 16 bit value. To avoid duplicate | |
| 4426 // stubs the never NaN NaN condition is only taken into account if the | |
| 4427 // condition is equals. | |
| 4428 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); | |
| 4429 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); | |
| 4430 return ConditionField::encode(static_cast<unsigned>(cc_)) | |
| 4431 | RegisterField::encode(false) // lhs_ and rhs_ are not used | |
| 4432 | StrictField::encode(strict_) | |
| 4433 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false) | |
| 4434 | IncludeNumberCompareField::encode(include_number_compare_) | |
| 4435 | IncludeSmiCompareField::encode(include_smi_compare_); | |
| 4436 } | |
| 4437 | |
| 4438 | |
| 4439 // Unfortunately you have to run without snapshots to see most of these | |
| 4440 // names in the profile since most compare stubs end up in the snapshot. | |
| 4441 void CompareStub::PrintName(StringStream* stream) { | |
| 4442 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); | |
| 4443 const char* cc_name; | |
| 4444 switch (cc_) { | |
| 4445 case less: cc_name = "LT"; break; | |
| 4446 case greater: cc_name = "GT"; break; | |
| 4447 case less_equal: cc_name = "LE"; break; | |
| 4448 case greater_equal: cc_name = "GE"; break; | |
| 4449 case equal: cc_name = "EQ"; break; | |
| 4450 case not_equal: cc_name = "NE"; break; | |
| 4451 default: cc_name = "UnknownCondition"; break; | |
| 4452 } | |
| 4453 bool is_equality = cc_ == equal || cc_ == not_equal; | |
| 4454 stream->Add("CompareStub_%s", cc_name); | |
| 4455 if (strict_ && is_equality) stream->Add("_STRICT"); | |
| 4456 if (never_nan_nan_ && is_equality) stream->Add("_NO_NAN"); | |
| 4457 if (!include_number_compare_) stream->Add("_NO_NUMBER"); | |
| 4458 if (!include_smi_compare_) stream->Add("_NO_SMI"); | |
| 4459 } | |
| 4460 | |
| 4461 | |
| 4462 // ------------------------------------------------------------------------- | 4399 // ------------------------------------------------------------------------- |
| 4463 // StringCharCodeAtGenerator | 4400 // StringCharCodeAtGenerator |
| 4464 | 4401 |
| 4465 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { | 4402 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
| 4466 Label flat_string; | 4403 Label flat_string; |
| 4467 Label ascii_string; | 4404 Label ascii_string; |
| 4468 Label got_char_code; | 4405 Label got_char_code; |
| 4469 Label sliced_string; | 4406 Label sliced_string; |
| 4470 | 4407 |
| 4471 // If the receiver is a smi trigger the non-string case. | 4408 // If the receiver is a smi trigger the non-string case. |
| (...skipping 1089 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5561 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); | 5498 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); |
| 5562 | 5499 |
| 5563 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 5500 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
| 5564 // tagged as a small integer. | 5501 // tagged as a small integer. |
| 5565 __ bind(&runtime); | 5502 __ bind(&runtime); |
| 5566 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 5503 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
| 5567 } | 5504 } |
| 5568 | 5505 |
| 5569 | 5506 |
| 5570 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 5507 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
| 5571 ASSERT(state_ == CompareIC::SMIS); | 5508 ASSERT(state_ == CompareIC::SMI); |
| 5572 Label miss; | 5509 Label miss; |
| 5573 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear); | 5510 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear); |
| 5574 | 5511 |
| 5575 if (GetCondition() == equal) { | 5512 if (GetCondition() == equal) { |
| 5576 // For equality we do not care about the sign of the result. | 5513 // For equality we do not care about the sign of the result. |
| 5577 __ subq(rax, rdx); | 5514 __ subq(rax, rdx); |
| 5578 } else { | 5515 } else { |
| 5579 Label done; | 5516 Label done; |
| 5580 __ subq(rdx, rax); | 5517 __ subq(rdx, rax); |
| 5581 __ j(no_overflow, &done, Label::kNear); | 5518 __ j(no_overflow, &done, Label::kNear); |
| 5582 // Correct sign of result in case of overflow. | 5519 // Correct sign of result in case of overflow. |
| 5583 __ SmiNot(rdx, rdx); | 5520 __ SmiNot(rdx, rdx); |
| 5584 __ bind(&done); | 5521 __ bind(&done); |
| 5585 __ movq(rax, rdx); | 5522 __ movq(rax, rdx); |
| 5586 } | 5523 } |
| 5587 __ ret(0); | 5524 __ ret(0); |
| 5588 | 5525 |
| 5589 __ bind(&miss); | 5526 __ bind(&miss); |
| 5590 GenerateMiss(masm); | 5527 GenerateMiss(masm); |
| 5591 } | 5528 } |
| 5592 | 5529 |
| 5593 | 5530 |
| 5594 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { | 5531 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { |
| 5595 ASSERT(state_ == CompareIC::HEAP_NUMBERS); | 5532 ASSERT(state_ == CompareIC::HEAP_NUMBER); |
| 5596 | 5533 |
| 5597 Label generic_stub; | 5534 Label generic_stub; |
| 5598 Label unordered, maybe_undefined1, maybe_undefined2; | 5535 Label unordered, maybe_undefined1, maybe_undefined2; |
| 5599 Label miss; | 5536 Label miss; |
| 5600 Condition either_smi = masm->CheckEitherSmi(rax, rdx); | |
| 5601 __ j(either_smi, &generic_stub, Label::kNear); | |
| 5602 | 5537 |
| 5603 __ CmpObjectType(rax, HEAP_NUMBER_TYPE, rcx); | 5538 if (left_ == CompareIC::SMI) { |
| 5539 __ JumpIfNotSmi(rdx, &miss); |
| 5540 } |
| 5541 if (right_ == CompareIC::SMI) { |
| 5542 __ JumpIfNotSmi(rax, &miss); |
| 5543 } |
| 5544 |
| 5545 // Load left and right operand. |
| 5546 Label done, left, left_smi, right_smi; |
| 5547 __ JumpIfSmi(rax, &right_smi, Label::kNear); |
| 5548 __ CompareMap(rax, masm->isolate()->factory()->heap_number_map(), NULL); |
| 5604 __ j(not_equal, &maybe_undefined1, Label::kNear); | 5549 __ j(not_equal, &maybe_undefined1, Label::kNear); |
| 5605 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); | 5550 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
| 5551 __ jmp(&left, Label::kNear); |
| 5552 __ bind(&right_smi); |
| 5553 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet. |
| 5554 __ cvtlsi2sd(xmm1, rcx); |
| 5555 |
| 5556 __ bind(&left); |
| 5557 __ JumpIfSmi(rdx, &left_smi, Label::kNear); |
| 5558 __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map(), NULL); |
| 5606 __ j(not_equal, &maybe_undefined2, Label::kNear); | 5559 __ j(not_equal, &maybe_undefined2, Label::kNear); |
| 5560 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); |
| 5561 __ jmp(&done); |
| 5562 __ bind(&left_smi); |
| 5563 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet. |
| 5564 __ cvtlsi2sd(xmm0, rcx); |
| 5607 | 5565 |
| 5608 // Load left and right operand | 5566 __ bind(&done); |
| 5609 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); | |
| 5610 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); | |
| 5611 | |
| 5612 // Compare operands | 5567 // Compare operands |
| 5613 __ ucomisd(xmm0, xmm1); | 5568 __ ucomisd(xmm0, xmm1); |
| 5614 | 5569 |
| 5615 // Don't base result on EFLAGS when a NaN is involved. | 5570 // Don't base result on EFLAGS when a NaN is involved. |
| 5616 __ j(parity_even, &unordered, Label::kNear); | 5571 __ j(parity_even, &unordered, Label::kNear); |
| 5617 | 5572 |
| 5618 // Return a result of -1, 0, or 1, based on EFLAGS. | 5573 // Return a result of -1, 0, or 1, based on EFLAGS. |
| 5619 // Performing mov, because xor would destroy the flag register. | 5574 // Performing mov, because xor would destroy the flag register. |
| 5620 __ movl(rax, Immediate(0)); | 5575 __ movl(rax, Immediate(0)); |
| 5621 __ movl(rcx, Immediate(0)); | 5576 __ movl(rcx, Immediate(0)); |
| 5622 __ setcc(above, rax); // Add one to zero if carry clear and not equal. | 5577 __ setcc(above, rax); // Add one to zero if carry clear and not equal. |
| 5623 __ sbbq(rax, rcx); // Subtract one if below (aka. carry set). | 5578 __ sbbq(rax, rcx); // Subtract one if below (aka. carry set). |
| 5624 __ ret(0); | 5579 __ ret(0); |
| 5625 | 5580 |
| 5626 __ bind(&unordered); | 5581 __ bind(&unordered); |
| 5627 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS); | |
| 5628 __ bind(&generic_stub); | 5582 __ bind(&generic_stub); |
| 5583 ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC, |
| 5584 CompareIC::GENERIC); |
| 5629 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); | 5585 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 5630 | 5586 |
| 5631 __ bind(&maybe_undefined1); | 5587 __ bind(&maybe_undefined1); |
| 5632 if (Token::IsOrderedRelationalCompareOp(op_)) { | 5588 if (Token::IsOrderedRelationalCompareOp(op_)) { |
| 5633 __ Cmp(rax, masm->isolate()->factory()->undefined_value()); | 5589 __ Cmp(rax, masm->isolate()->factory()->undefined_value()); |
| 5634 __ j(not_equal, &miss); | 5590 __ j(not_equal, &miss); |
| 5591 __ JumpIfSmi(rdx, &unordered); |
| 5635 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); | 5592 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); |
| 5636 __ j(not_equal, &maybe_undefined2, Label::kNear); | 5593 __ j(not_equal, &maybe_undefined2, Label::kNear); |
| 5637 __ jmp(&unordered); | 5594 __ jmp(&unordered); |
| 5638 } | 5595 } |
| 5639 | 5596 |
| 5640 __ bind(&maybe_undefined2); | 5597 __ bind(&maybe_undefined2); |
| 5641 if (Token::IsOrderedRelationalCompareOp(op_)) { | 5598 if (Token::IsOrderedRelationalCompareOp(op_)) { |
| 5642 __ Cmp(rdx, masm->isolate()->factory()->undefined_value()); | 5599 __ Cmp(rdx, masm->isolate()->factory()->undefined_value()); |
| 5643 __ j(equal, &unordered); | 5600 __ j(equal, &unordered); |
| 5644 } | 5601 } |
| 5645 | 5602 |
| 5646 __ bind(&miss); | 5603 __ bind(&miss); |
| 5647 GenerateMiss(masm); | 5604 GenerateMiss(masm); |
| 5648 } | 5605 } |
| 5649 | 5606 |
| 5650 | 5607 |
| 5651 void ICCompareStub::GenerateSymbols(MacroAssembler* masm) { | 5608 void ICCompareStub::GenerateSymbols(MacroAssembler* masm) { |
| 5652 ASSERT(state_ == CompareIC::SYMBOLS); | 5609 ASSERT(state_ == CompareIC::SYMBOL); |
| 5653 ASSERT(GetCondition() == equal); | 5610 ASSERT(GetCondition() == equal); |
| 5654 | 5611 |
| 5655 // Registers containing left and right operands respectively. | 5612 // Registers containing left and right operands respectively. |
| 5656 Register left = rdx; | 5613 Register left = rdx; |
| 5657 Register right = rax; | 5614 Register right = rax; |
| 5658 Register tmp1 = rcx; | 5615 Register tmp1 = rcx; |
| 5659 Register tmp2 = rbx; | 5616 Register tmp2 = rbx; |
| 5660 | 5617 |
| 5661 // Check that both operands are heap objects. | 5618 // Check that both operands are heap objects. |
| 5662 Label miss; | 5619 Label miss; |
| (...skipping 22 matching lines...) Expand all Loading... |
| 5685 __ Move(rax, Smi::FromInt(EQUAL)); | 5642 __ Move(rax, Smi::FromInt(EQUAL)); |
| 5686 __ bind(&done); | 5643 __ bind(&done); |
| 5687 __ ret(0); | 5644 __ ret(0); |
| 5688 | 5645 |
| 5689 __ bind(&miss); | 5646 __ bind(&miss); |
| 5690 GenerateMiss(masm); | 5647 GenerateMiss(masm); |
| 5691 } | 5648 } |
| 5692 | 5649 |
| 5693 | 5650 |
| 5694 void ICCompareStub::GenerateStrings(MacroAssembler* masm) { | 5651 void ICCompareStub::GenerateStrings(MacroAssembler* masm) { |
| 5695 ASSERT(state_ == CompareIC::STRINGS); | 5652 ASSERT(state_ == CompareIC::STRING); |
| 5696 Label miss; | 5653 Label miss; |
| 5697 | 5654 |
| 5698 bool equality = Token::IsEqualityOp(op_); | 5655 bool equality = Token::IsEqualityOp(op_); |
| 5699 | 5656 |
| 5700 // Registers containing left and right operands respectively. | 5657 // Registers containing left and right operands respectively. |
| 5701 Register left = rdx; | 5658 Register left = rdx; |
| 5702 Register right = rax; | 5659 Register right = rax; |
| 5703 Register tmp1 = rcx; | 5660 Register tmp1 = rcx; |
| 5704 Register tmp2 = rbx; | 5661 Register tmp2 = rbx; |
| 5705 Register tmp3 = rdi; | 5662 Register tmp3 = rdi; |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5771 } else { | 5728 } else { |
| 5772 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 5729 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
| 5773 } | 5730 } |
| 5774 | 5731 |
| 5775 __ bind(&miss); | 5732 __ bind(&miss); |
| 5776 GenerateMiss(masm); | 5733 GenerateMiss(masm); |
| 5777 } | 5734 } |
| 5778 | 5735 |
| 5779 | 5736 |
| 5780 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { | 5737 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { |
| 5781 ASSERT(state_ == CompareIC::OBJECTS); | 5738 ASSERT(state_ == CompareIC::OBJECT); |
| 5782 Label miss; | 5739 Label miss; |
| 5783 Condition either_smi = masm->CheckEitherSmi(rdx, rax); | 5740 Condition either_smi = masm->CheckEitherSmi(rdx, rax); |
| 5784 __ j(either_smi, &miss, Label::kNear); | 5741 __ j(either_smi, &miss, Label::kNear); |
| 5785 | 5742 |
| 5786 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx); | 5743 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx); |
| 5787 __ j(not_equal, &miss, Label::kNear); | 5744 __ j(not_equal, &miss, Label::kNear); |
| 5788 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx); | 5745 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx); |
| 5789 __ j(not_equal, &miss, Label::kNear); | 5746 __ j(not_equal, &miss, Label::kNear); |
| 5790 | 5747 |
| 5791 ASSERT(GetCondition() == equal); | 5748 ASSERT(GetCondition() == equal); |
| (...skipping 707 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6499 #endif | 6456 #endif |
| 6500 | 6457 |
| 6501 __ Ret(); | 6458 __ Ret(); |
| 6502 } | 6459 } |
| 6503 | 6460 |
| 6504 #undef __ | 6461 #undef __ |
| 6505 | 6462 |
| 6506 } } // namespace v8::internal | 6463 } } // namespace v8::internal |
| 6507 | 6464 |
| 6508 #endif // V8_TARGET_ARCH_X64 | 6465 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |