OLD | NEW |
---|---|
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1021 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1032 overwrite_name, | 1032 overwrite_name, |
1033 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "", | 1033 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "", |
1034 args_in_registers_ ? "RegArgs" : "StackArgs", | 1034 args_in_registers_ ? "RegArgs" : "StackArgs", |
1035 args_reversed_ ? "_R" : "", | 1035 args_reversed_ ? "_R" : "", |
1036 static_operands_type_.ToString(), | 1036 static_operands_type_.ToString(), |
1037 BinaryOpIC::GetName(runtime_operands_type_)); | 1037 BinaryOpIC::GetName(runtime_operands_type_)); |
1038 return name_; | 1038 return name_; |
1039 } | 1039 } |
1040 | 1040 |
1041 | 1041 |
1042 // Call the specialized stub for a binary operation. | 1042 // Perform or call the specialized stub for a binary operation. Requires the |
1043 // three registers left, right and dst to be distinct and spilled. This | |
1044 // deferred operation has up to three entry points: The main one calls the | |
1045 // runtime system. The second is for when the result is a non-Smi. The | |
1046 // third is for when at least one of the inputs is non-Smi and we have SSE2. | |
1043 class DeferredInlineBinaryOperation: public DeferredCode { | 1047 class DeferredInlineBinaryOperation: public DeferredCode { |
1044 public: | 1048 public: |
1045 DeferredInlineBinaryOperation(Token::Value op, | 1049 DeferredInlineBinaryOperation(Token::Value op, |
1046 Register dst, | 1050 Register dst, |
1047 Register left, | 1051 Register left, |
1048 Register right, | 1052 Register right, |
1049 TypeInfo left_info, | 1053 TypeInfo left_info, |
1050 TypeInfo right_info, | 1054 TypeInfo right_info, |
1051 OverwriteMode mode) | 1055 OverwriteMode mode) |
1052 : op_(op), dst_(dst), left_(left), right_(right), | 1056 : op_(op), dst_(dst), left_(left), right_(right), |
1053 left_info_(left_info), right_info_(right_info), mode_(mode) { | 1057 left_info_(left_info), right_info_(right_info), mode_(mode) { |
1054 set_comment("[ DeferredInlineBinaryOperation"); | 1058 set_comment("[ DeferredInlineBinaryOperation"); |
1055 } | 1059 } |
1056 | 1060 |
1057 virtual void Generate(); | 1061 virtual void Generate(); |
1058 | 1062 |
1063 // This stub makes explicit calls to SaveRegisters(), RestoreRegisters() and | |
1064 // Exit(). | |
1065 virtual bool AutoSaveAndRestore() { return false; } | |
1066 | |
1067 void JumpToAnswerOutOfRange(Condition cond); | |
1068 void JumpToConstantRhs(Condition cond, Smi* smi_value); | |
1069 Label* NonSmiInputLabel(); | |
1070 | |
1059 private: | 1071 private: |
1072 void GenerateAnswerOutOfRange(); | |
1073 void GenerateNonSmiInput(); | |
1074 | |
1060 Token::Value op_; | 1075 Token::Value op_; |
1061 Register dst_; | 1076 Register dst_; |
1062 Register left_; | 1077 Register left_; |
1063 Register right_; | 1078 Register right_; |
1064 TypeInfo left_info_; | 1079 TypeInfo left_info_; |
1065 TypeInfo right_info_; | 1080 TypeInfo right_info_; |
1066 OverwriteMode mode_; | 1081 OverwriteMode mode_; |
1082 Label answer_out_of_range_; | |
1083 Label non_smi_input_; | |
1084 Label constant_rhs_; | |
1085 Smi* smi_value_; | |
1067 }; | 1086 }; |
1068 | 1087 |
1069 | 1088 |
1089 Label* DeferredInlineBinaryOperation::NonSmiInputLabel() { | |
1090 if (Token::IsBitOp(op_) && CpuFeatures::IsSupported(SSE2)) { | |
1091 return &non_smi_input_; | |
1092 } else { | |
1093 return entry_label(); | |
1094 } | |
1095 } | |
1096 | |
1097 | |
1098 void DeferredInlineBinaryOperation::JumpToAnswerOutOfRange(Condition cond) { | |
1099 __ j(cond, &answer_out_of_range_); | |
1100 } | |
1101 | |
1102 | |
1103 void DeferredInlineBinaryOperation::JumpToConstantRhs(Condition cond, | |
1104 Smi* smi_value) { | |
1105 smi_value_ = smi_value; | |
1106 __ j(cond, &constant_rhs_); | |
1107 } | |
1108 | |
1109 | |
1070 void DeferredInlineBinaryOperation::Generate() { | 1110 void DeferredInlineBinaryOperation::Generate() { |
1071 Label done; | 1111 // Registers are not saved implicitly for this stub, so we should not |
1072 if (CpuFeatures::IsSupported(SSE2) && ((op_ == Token::ADD) || | 1112 // tread on the registers that were not passed to us. |
1073 (op_ ==Token::SUB) || | 1113 if (CpuFeatures::IsSupported(SSE2) && |
1074 (op_ == Token::MUL) || | 1114 ((op_ == Token::ADD) || |
1075 (op_ == Token::DIV))) { | 1115 (op_ == Token::SUB) || |
1116 (op_ == Token::MUL) || | |
1117 (op_ == Token::DIV))) { | |
1076 CpuFeatures::Scope use_sse2(SSE2); | 1118 CpuFeatures::Scope use_sse2(SSE2); |
1077 Label call_runtime, after_alloc_failure; | 1119 Label call_runtime, after_alloc_failure; |
1078 Label left_smi, right_smi, load_right, do_op; | 1120 Label left_smi, right_smi, load_right, do_op; |
1079 if (!left_info_.IsSmi()) { | 1121 if (!left_info_.IsSmi()) { |
1080 __ test(left_, Immediate(kSmiTagMask)); | 1122 __ test(left_, Immediate(kSmiTagMask)); |
1081 __ j(zero, &left_smi); | 1123 __ j(zero, &left_smi); |
1082 if (!left_info_.IsNumber()) { | 1124 if (!left_info_.IsNumber()) { |
1083 __ cmp(FieldOperand(left_, HeapObject::kMapOffset), | 1125 __ cmp(FieldOperand(left_, HeapObject::kMapOffset), |
1084 Factory::heap_number_map()); | 1126 Factory::heap_number_map()); |
1085 __ j(not_equal, &call_runtime); | 1127 __ j(not_equal, &call_runtime); |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1125 __ jmp(&do_op); | 1167 __ jmp(&do_op); |
1126 | 1168 |
1127 __ bind(&right_smi); | 1169 __ bind(&right_smi); |
1128 } else { | 1170 } else { |
1129 if (FLAG_debug_code) __ AbortIfNotSmi(right_); | 1171 if (FLAG_debug_code) __ AbortIfNotSmi(right_); |
1130 } | 1172 } |
1131 __ SmiUntag(right_); | 1173 __ SmiUntag(right_); |
1132 __ cvtsi2sd(xmm1, Operand(right_)); | 1174 __ cvtsi2sd(xmm1, Operand(right_)); |
1133 __ SmiTag(right_); | 1175 __ SmiTag(right_); |
1134 if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) { | 1176 if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) { |
1135 Label alloc_failure; | |
1136 __ push(left_); | 1177 __ push(left_); |
1137 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure); | 1178 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure); |
1138 __ pop(left_); | 1179 __ pop(left_); |
1139 } | 1180 } |
1140 | 1181 |
1141 __ bind(&do_op); | 1182 __ bind(&do_op); |
1142 switch (op_) { | 1183 switch (op_) { |
1143 case Token::ADD: __ addsd(xmm0, xmm1); break; | 1184 case Token::ADD: __ addsd(xmm0, xmm1); break; |
1144 case Token::SUB: __ subsd(xmm0, xmm1); break; | 1185 case Token::SUB: __ subsd(xmm0, xmm1); break; |
1145 case Token::MUL: __ mulsd(xmm0, xmm1); break; | 1186 case Token::MUL: __ mulsd(xmm0, xmm1); break; |
1146 case Token::DIV: __ divsd(xmm0, xmm1); break; | 1187 case Token::DIV: __ divsd(xmm0, xmm1); break; |
1147 default: UNREACHABLE(); | 1188 default: UNREACHABLE(); |
1148 } | 1189 } |
1149 __ movdbl(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0); | 1190 __ movdbl(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0); |
1150 __ jmp(&done); | 1191 Exit(); |
1192 | |
1151 | 1193 |
1152 __ bind(&after_alloc_failure); | 1194 __ bind(&after_alloc_failure); |
1153 __ pop(left_); | 1195 __ pop(left_); |
1154 __ bind(&call_runtime); | 1196 __ bind(&call_runtime); |
1155 } | 1197 } |
1198 // Register spilling is not done implicitly for this stub. | |
1199 // We can't postpone it any more now though. | |
1200 SaveRegisters(); | |
1201 | |
1156 GenericBinaryOpStub stub(op_, | 1202 GenericBinaryOpStub stub(op_, |
1157 mode_, | 1203 mode_, |
1158 NO_SMI_CODE_IN_STUB, | 1204 NO_SMI_CODE_IN_STUB, |
1159 TypeInfo::Combine(left_info_, right_info_)); | 1205 TypeInfo::Combine(left_info_, right_info_)); |
1160 stub.GenerateCall(masm_, left_, right_); | 1206 stub.GenerateCall(masm_, left_, right_); |
1161 if (!dst_.is(eax)) __ mov(dst_, eax); | 1207 if (!dst_.is(eax)) __ mov(dst_, eax); |
1162 __ bind(&done); | 1208 RestoreRegisters(); |
1209 Exit(); | |
1210 | |
1211 if (non_smi_input_.is_linked() || constant_rhs_.is_linked()) { | |
1212 GenerateNonSmiInput(); | |
1213 } | |
1214 if (answer_out_of_range_.is_linked()) { | |
1215 GenerateAnswerOutOfRange(); | |
1216 } | |
1217 } | |
1218 | |
1219 | |
1220 void DeferredInlineBinaryOperation::GenerateNonSmiInput() { | |
1221 // We know at least one of the inputs was not a Smi. | |
1222 // This is a third entry point into the deferred code. | |
1223 // We may not overwrite left_ because we want to be able | |
1224 // to call the handling code for non-smi answer and it | |
1225 // might want to overwrite the heap number in left_. | |
1226 ASSERT(!right_.is(dst_)); | |
1227 ASSERT(!left_.is(dst_)); | |
1228 ASSERT(!left_.is(right_)); | |
1229 // This entry point is used for bit ops where the right hand side | |
1230 // is a constant Smi and the left hand side is a heap object. It | |
1231 // is also used for bit ops where both sides are unknown, but where | |
1232 // at least one of them is a heap object. | |
1233 bool rhs_is_constant = constant_rhs_.is_linked(); | |
1234 // We can't generate code for both cases. | |
1235 ASSERT(!non_smi_input_.is_linked() || !constant_rhs_.is_linked()); | |
1236 | |
1237 if (FLAG_debug_code) { | |
1238 __ int3(); // We don't fall through into this code. | |
1239 } | |
1240 | |
1241 __ bind(&non_smi_input_); | |
1242 | |
1243 if (rhs_is_constant) { | |
1244 __ bind(&constant_rhs_); | |
1245 // In this case the input is a heap object and it is in the dst_ register. | |
1246 // The left_ and right_ registers have not been initialized yet. | |
1247 __ mov(right_, Immediate(smi_value_)); | |
1248 __ mov(left_, Operand(dst_)); | |
1249 if (!CpuFeatures::IsSupported(SSE2)) { | |
1250 __ jmp(entry_label()); | |
1251 return; | |
1252 } else { | |
1253 CpuFeatures::Scope use_sse2(SSE2); | |
1254 __ JumpIfNotNumber(dst_, left_info_, entry_label()); | |
1255 __ JumpIfNotInt32(left_, dst_, left_info_, entry_label()); | |
Lasse Reichstein
2010/08/06 08:20:12
The JumpIfNotInt32 name seems incorrect if it acce
Erik Corry
2010/08/09 13:13:49
Renamed to JumpIfNotInt32Range
| |
1256 __ cvttsd2si(dst_, FieldOperand(left_, HeapNumber::kValueOffset)); | |
1257 __ SmiUntag(right_); | |
1258 } | |
1259 } else { | |
1260 CpuFeatures::Scope use_sse2(SSE2); | |
Lasse Reichstein
2010/08/06 08:20:12
What if SSE2 is not supported?
(It was tested in t
Erik Corry
2010/08/09 13:13:49
Comment added.
| |
1261 // Handle the non-constant right hand side situation: | |
1262 if (left_info_.IsSmi()) { | |
1263 // Right is a heap object. | |
1264 __ JumpIfNotNumber(right_, right_info_, entry_label()); | |
1265 __ JumpIfNotInt32(right_, dst_, right_info_, entry_label()); | |
1266 __ cvttsd2si(right_, FieldOperand(right_, HeapNumber::kValueOffset)); | |
1267 __ mov(dst_, Operand(left_)); | |
1268 __ SmiUntag(dst_); | |
1269 } else if (right_info_.IsSmi()) { | |
1270 // Left is a heap object. | |
1271 __ JumpIfNotNumber(left_, left_info_, entry_label()); | |
1272 __ JumpIfNotInt32(left_, dst_, left_info_, entry_label()); | |
1273 __ cvttsd2si(dst_, FieldOperand(left_, HeapNumber::kValueOffset)); | |
1274 __ SmiUntag(right_); | |
1275 } else { | |
1276 // Here we don't know if it's one or both that is a heap object. | |
1277 Label only_right_is_heap_object, got_both; | |
1278 __ mov(dst_, Operand(left_)); | |
1279 __ SmiUntagAndBranchOnSmi(dst_, &only_right_is_heap_object); | |
1280 // Left was a heap object. | |
1281 __ JumpIfNotNumber(left_, left_info_, entry_label()); | |
1282 __ JumpIfNotInt32(left_, dst_, left_info_, entry_label()); | |
1283 __ cvttsd2si(dst_, FieldOperand(left_, HeapNumber::kValueOffset)); | |
1284 __ SmiUntagAndBranchOnSmi(right_, &got_both); | |
1285 // Both were heap objects. | |
1286 __ rcl(right_, 1); // Put tag back. | |
1287 __ JumpIfNotNumber(right_, right_info_, entry_label()); | |
1288 __ JumpIfNotInt32(right_, no_reg, right_info_, entry_label()); | |
1289 __ cvttsd2si(right_, FieldOperand(right_, HeapNumber::kValueOffset)); | |
1290 __ jmp(&got_both); | |
1291 __ bind(&only_right_is_heap_object); | |
1292 __ JumpIfNotNumber(right_, right_info_, entry_label()); | |
1293 __ JumpIfNotInt32(right_, no_reg, right_info_, entry_label()); | |
1294 __ cvttsd2si(right_, FieldOperand(right_, HeapNumber::kValueOffset)); | |
1295 __ bind(&got_both); | |
1296 } | |
1297 } | |
1298 ASSERT(op_ == Token::BIT_AND || | |
1299 op_ == Token::BIT_OR || | |
1300 op_ == Token::BIT_XOR || | |
1301 right_.is(ecx)); | |
1302 switch (op_) { | |
1303 case Token::BIT_AND: __ and_(dst_, Operand(right_)); break; | |
1304 case Token::BIT_OR: __ or_(dst_, Operand(right_)); break; | |
1305 case Token::BIT_XOR: __ xor_(dst_, Operand(right_)); break; | |
1306 case Token::SHR: __ shr_cl(dst_); break; | |
1307 case Token::SAR: __ sar_cl(dst_); break; | |
1308 case Token::SHL: __ shl_cl(dst_); break; | |
1309 default: UNREACHABLE(); | |
1310 } | |
1311 if (op_ == Token::SHR) { | |
1312 // Check that the *unsigned* result fits in a smi. Neither of | |
1313 // the two high-order bits can be set: | |
1314 // * 0x80000000: high bit would be lost when smi tagging. | |
1315 // * 0x40000000: this number would convert to negative when smi | |
1316 // tagging. | |
1317 __ test(dst_, Immediate(0xc0000000)); | |
1318 __ j(not_zero, &answer_out_of_range_); | |
1319 } else { | |
1320 // Check that the *signed* result fits in a smi. | |
1321 __ cmp(dst_, 0xc0000000); | |
1322 __ j(negative, &answer_out_of_range_); | |
1323 } | |
1324 __ SmiTag(dst_); | |
1325 Exit(); | |
1326 } | |
1327 | |
1328 | |
1329 void DeferredInlineBinaryOperation::GenerateAnswerOutOfRange() { | |
1330 Label after_alloc_failure2; | |
1331 Label allocation_ok; | |
1332 __ bind(&after_alloc_failure2); | |
1333 // We have to allocate a number, causing a GC, while keeping hold of | |
1334 // the answer in dst_. The answer is not a Smi. We can't just call the | |
1335 // runtime shift function here because we already threw away the inputs. | |
1336 __ xor_(left_, Operand(left_)); | |
1337 __ shl(dst_, 1); // Put top bit in carry flag and Smi tag the low bits. | |
1338 __ rcr(left_, 1); // Rotate with carry. | |
1339 __ push(dst_); // Smi tagged low 31 bits. | |
1340 __ push(left_); // 0 or 0x80000000. | |
Lasse Reichstein
2010/08/06 08:20:12
Make comment and/or ASSERT saying that left_ must
Erik Corry
2010/08/09 13:13:49
Done.
| |
1341 __ CallRuntime(Runtime::kNumberAlloc, 0); | |
1342 if (!left_.is(eax)) { | |
1343 __ mov(left_, eax); | |
1344 } | |
1345 __ pop(right_); // High bit. | |
1346 __ pop(dst_); // Low 31 bits. | |
1347 __ shr(dst_, 1); // Put 0 in top bit. | |
1348 __ or_(dst_, Operand(right_)); | |
1349 __ jmp(&allocation_ok); | |
1350 | |
1351 // This is the second entry point to the deferred code. It is used only by | |
1352 // the bit operations. | |
1353 // The dst_ register has the answer. It is not Smi tagged. If mode_ is | |
1354 // OVERWRITE_LEFT then left_ must contain either an overwritable heap number | |
1355 // or a Smi. | |
1356 // Put a heap number pointer in left_. | |
1357 __ bind(&answer_out_of_range_); | |
1358 SaveRegisters(); | |
1359 if (mode_ == OVERWRITE_LEFT) { | |
1360 __ test(left_, Immediate(kSmiTagMask)); | |
1361 __ j(not_zero, &allocation_ok); | |
1362 } | |
1363 // This trashes right_. | |
1364 __ AllocateHeapNumber(left_, right_, no_reg, &after_alloc_failure2); | |
1365 __ bind(&allocation_ok); | |
1366 if (CpuFeatures::IsSupported(SSE2) && op_ != Token::SHR) { | |
1367 CpuFeatures::Scope use_sse2(SSE2); | |
1368 ASSERT(Token::IsBitOp(op_)); | |
1369 // Signed conversion. | |
1370 __ cvtsi2sd(xmm0, Operand(dst_)); | |
1371 __ movdbl(FieldOperand(left_, HeapNumber::kValueOffset), xmm0); | |
1372 } else { | |
1373 if (op_ == Token::SHR) { | |
1374 __ push(dst_); | |
1375 __ push(Immediate(0)); // High word of unsigned value. | |
1376 __ fild_d(Operand(esp, 0)); | |
1377 __ Drop(2); | |
1378 } else { | |
1379 ASSERT(Token::IsBitOp(op_)); | |
1380 __ push(dst_); | |
1381 __ fild_s(Operand(esp, 0)); // Signed conversion. | |
1382 __ pop(dst_); | |
1383 } | |
1384 __ fstp_d(FieldOperand(left_, HeapNumber::kValueOffset)); | |
1385 } | |
1386 __ mov(dst_, left_); | |
1387 RestoreRegisters(); | |
1388 Exit(); | |
1163 } | 1389 } |
1164 | 1390 |
1165 | 1391 |
1166 static TypeInfo CalculateTypeInfo(TypeInfo operands_type, | 1392 static TypeInfo CalculateTypeInfo(TypeInfo operands_type, |
1167 Token::Value op, | 1393 Token::Value op, |
1168 const Result& right, | 1394 const Result& right, |
1169 const Result& left) { | 1395 const Result& left) { |
1170 // Set TypeInfo of result according to the operation performed. | 1396 // Set TypeInfo of result according to the operation performed. |
1171 // Rely on the fact that smis have a 31 bit payload on ia32. | 1397 // Rely on the fact that smis have a 31 bit payload on ia32. |
1172 STATIC_ASSERT(kSmiValueSize == 31); | 1398 STATIC_ASSERT(kSmiValueSize == 31); |
(...skipping 320 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1493 } | 1719 } |
1494 } | 1720 } |
1495 | 1721 |
1496 | 1722 |
1497 void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left, | 1723 void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left, |
1498 Register right, | 1724 Register right, |
1499 Register scratch, | 1725 Register scratch, |
1500 TypeInfo left_info, | 1726 TypeInfo left_info, |
1501 TypeInfo right_info, | 1727 TypeInfo right_info, |
1502 DeferredCode* deferred) { | 1728 DeferredCode* deferred) { |
1729 JumpIfNotBothSmiUsingTypeInfo(left, | |
1730 right, | |
1731 scratch, | |
1732 left_info, | |
1733 right_info, | |
1734 deferred->entry_label()); | |
1735 } | |
1736 | |
1737 | |
1738 void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left, | |
1739 Register right, | |
1740 Register scratch, | |
1741 TypeInfo left_info, | |
1742 TypeInfo right_info, | |
1743 Label* on_not_smi) { | |
1503 if (left.is(right)) { | 1744 if (left.is(right)) { |
1504 if (!left_info.IsSmi()) { | 1745 if (!left_info.IsSmi()) { |
1505 __ test(left, Immediate(kSmiTagMask)); | 1746 __ test(left, Immediate(kSmiTagMask)); |
1506 deferred->Branch(not_zero); | 1747 __ j(not_zero, on_not_smi); |
1507 } else { | 1748 } else { |
1508 if (FLAG_debug_code) __ AbortIfNotSmi(left); | 1749 if (FLAG_debug_code) __ AbortIfNotSmi(left); |
1509 } | 1750 } |
1510 } else if (!left_info.IsSmi()) { | 1751 } else if (!left_info.IsSmi()) { |
1511 if (!right_info.IsSmi()) { | 1752 if (!right_info.IsSmi()) { |
1512 __ mov(scratch, left); | 1753 __ mov(scratch, left); |
1513 __ or_(scratch, Operand(right)); | 1754 __ or_(scratch, Operand(right)); |
1514 __ test(scratch, Immediate(kSmiTagMask)); | 1755 __ test(scratch, Immediate(kSmiTagMask)); |
1515 deferred->Branch(not_zero); | 1756 __ j(not_zero, on_not_smi); |
1516 } else { | 1757 } else { |
1517 __ test(left, Immediate(kSmiTagMask)); | 1758 __ test(left, Immediate(kSmiTagMask)); |
1518 deferred->Branch(not_zero); | 1759 __ j(not_zero, on_not_smi); |
1519 if (FLAG_debug_code) __ AbortIfNotSmi(right); | 1760 if (FLAG_debug_code) __ AbortIfNotSmi(right); |
1520 } | 1761 } |
1521 } else { | 1762 } else { |
1522 if (FLAG_debug_code) __ AbortIfNotSmi(left); | 1763 if (FLAG_debug_code) __ AbortIfNotSmi(left); |
1523 if (!right_info.IsSmi()) { | 1764 if (!right_info.IsSmi()) { |
1524 __ test(right, Immediate(kSmiTagMask)); | 1765 __ test(right, Immediate(kSmiTagMask)); |
1525 deferred->Branch(not_zero); | 1766 __ j(not_zero, on_not_smi); |
1526 } else { | 1767 } else { |
1527 if (FLAG_debug_code) __ AbortIfNotSmi(right); | 1768 if (FLAG_debug_code) __ AbortIfNotSmi(right); |
1528 } | 1769 } |
1529 } | 1770 } |
1530 } | 1771 } |
1531 | 1772 |
1532 | 1773 |
1533 // Implements a binary operation using a deferred code object and some | 1774 // Implements a binary operation using a deferred code object and some |
1534 // inline code to operate on smis quickly. | 1775 // inline code to operate on smis quickly. |
1535 Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr, | 1776 Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr, |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1600 } | 1841 } |
1601 } | 1842 } |
1602 ASSERT(remainder.is_register() && remainder.reg().is(edx)); | 1843 ASSERT(remainder.is_register() && remainder.reg().is(edx)); |
1603 ASSERT(!(left->is_register() && left->reg().is(edx))); | 1844 ASSERT(!(left->is_register() && left->reg().is(edx))); |
1604 ASSERT(!(right->is_register() && right->reg().is(edx))); | 1845 ASSERT(!(right->is_register() && right->reg().is(edx))); |
1605 | 1846 |
1606 left->ToRegister(); | 1847 left->ToRegister(); |
1607 right->ToRegister(); | 1848 right->ToRegister(); |
1608 frame_->Spill(eax); | 1849 frame_->Spill(eax); |
1609 frame_->Spill(edx); | 1850 frame_->Spill(edx); |
1851 // DeferredInlineBinaryOperation requires all the registers that it is | |
1852 // told about to be spilled. | |
1853 frame_->Spill(left->reg()); | |
1854 frame_->Spill(right->reg()); | |
1610 | 1855 |
1611 // Check that left and right are smi tagged. | 1856 // Check that left and right are smi tagged. |
1612 DeferredInlineBinaryOperation* deferred = | 1857 DeferredInlineBinaryOperation* deferred = |
1613 new DeferredInlineBinaryOperation(op, | 1858 new DeferredInlineBinaryOperation(op, |
1614 (op == Token::DIV) ? eax : edx, | 1859 (op == Token::DIV) ? eax : edx, |
1615 left->reg(), | 1860 left->reg(), |
1616 right->reg(), | 1861 right->reg(), |
1617 left_type_info, | 1862 left_type_info, |
1618 right_type_info, | 1863 right_type_info, |
1619 overwrite_mode); | 1864 overwrite_mode); |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1689 // Move left out of ecx if necessary. | 1934 // Move left out of ecx if necessary. |
1690 if (left->is_register() && left->reg().is(ecx)) { | 1935 if (left->is_register() && left->reg().is(ecx)) { |
1691 *left = allocator_->Allocate(); | 1936 *left = allocator_->Allocate(); |
1692 ASSERT(left->is_valid()); | 1937 ASSERT(left->is_valid()); |
1693 __ mov(left->reg(), ecx); | 1938 __ mov(left->reg(), ecx); |
1694 } | 1939 } |
1695 right->ToRegister(ecx); | 1940 right->ToRegister(ecx); |
1696 left->ToRegister(); | 1941 left->ToRegister(); |
1697 ASSERT(left->is_register() && !left->reg().is(ecx)); | 1942 ASSERT(left->is_register() && !left->reg().is(ecx)); |
1698 ASSERT(right->is_register() && right->reg().is(ecx)); | 1943 ASSERT(right->is_register() && right->reg().is(ecx)); |
1944 if (left_type_info.IsSmi()) { | |
1945 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg()); | |
1946 } | |
1947 if (right_type_info.IsSmi()) { | |
1948 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg()); | |
1949 } | |
1699 | 1950 |
1700 // We will modify right, it must be spilled. | 1951 // We will modify right, it must be spilled. |
1701 frame_->Spill(ecx); | 1952 frame_->Spill(ecx); |
1953 // DeferredInlineBinaryOperation requires all the registers that it is told | |
1954 // about to be spilled. | |
1955 frame_->Spill(left->reg()); | |
1702 | 1956 |
1703 // Use a fresh answer register to avoid spilling the left operand. | 1957 // Use a fresh answer register to avoid spilling the left operand. |
1704 answer = allocator_->Allocate(); | 1958 answer = allocator_->Allocate(); |
1705 ASSERT(answer.is_valid()); | 1959 ASSERT(answer.is_valid()); |
1706 // Check that both operands are smis using the answer register as a | |
1707 // temporary. | |
1708 DeferredInlineBinaryOperation* deferred = | 1960 DeferredInlineBinaryOperation* deferred = |
1709 new DeferredInlineBinaryOperation(op, | 1961 new DeferredInlineBinaryOperation(op, |
1710 answer.reg(), | 1962 answer.reg(), |
1711 left->reg(), | 1963 left->reg(), |
1712 ecx, | 1964 ecx, |
1713 left_type_info, | 1965 left_type_info, |
1714 right_type_info, | 1966 right_type_info, |
1715 overwrite_mode); | 1967 overwrite_mode); |
1968 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(), | |
1969 left_type_info, right_type_info, | |
1970 deferred->NonSmiInputLabel()); | |
1716 | 1971 |
1717 Label do_op, left_nonsmi; | 1972 // Untag both operands. |
1718 // If right is a smi we make a fast case if left is either a smi | 1973 __ mov(answer.reg(), left->reg()); |
1719 // or a heapnumber. | 1974 __ SmiUntag(answer.reg()); |
1720 if (CpuFeatures::IsSupported(SSE2) && right_type_info.IsSmi()) { | 1975 __ SmiUntag(right->reg()); // Right is ecx. |
1721 CpuFeatures::Scope use_sse2(SSE2); | 1976 |
1722 __ mov(answer.reg(), left->reg()); | 1977 // Perform the operation. |
1723 // Fast case - both are actually smis. | 1978 ASSERT(right->reg().is(ecx)); |
1724 if (!left_type_info.IsSmi()) { | 1979 switch (op) { |
1725 __ test(answer.reg(), Immediate(kSmiTagMask)); | 1980 case Token::SAR: { |
1726 __ j(not_zero, &left_nonsmi); | 1981 __ sar_cl(answer.reg()); |
1727 } else { | 1982 if (!left_type_info.IsSmi()) { |
1728 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg()); | 1983 // Check that the *signed* result fits in a smi. |
1984 __ cmp(answer.reg(), 0xc0000000); | |
1985 deferred->JumpToAnswerOutOfRange(negative); | |
1986 } | |
1987 break; | |
1729 } | 1988 } |
1730 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg()); | |
1731 __ SmiUntag(answer.reg()); | |
1732 __ jmp(&do_op); | |
1733 | |
1734 __ bind(&left_nonsmi); | |
1735 // Branch if not a heapnumber. | |
1736 __ cmp(FieldOperand(answer.reg(), HeapObject::kMapOffset), | |
1737 Factory::heap_number_map()); | |
1738 deferred->Branch(not_equal); | |
1739 | |
1740 // Load integer value into answer register using truncation. | |
1741 __ cvttsd2si(answer.reg(), | |
1742 FieldOperand(answer.reg(), HeapNumber::kValueOffset)); | |
1743 // Branch if we do not fit in a smi. | |
1744 __ cmp(answer.reg(), 0xc0000000); | |
1745 deferred->Branch(negative); | |
1746 } else { | |
1747 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(), | |
1748 left_type_info, right_type_info, deferred); | |
1749 | |
1750 // Untag both operands. | |
1751 __ mov(answer.reg(), left->reg()); | |
1752 __ SmiUntag(answer.reg()); | |
1753 } | |
1754 | |
1755 __ bind(&do_op); | |
1756 __ SmiUntag(ecx); | |
1757 // Perform the operation. | |
1758 switch (op) { | |
1759 case Token::SAR: | |
1760 __ sar_cl(answer.reg()); | |
1761 // No checks of result necessary | |
1762 break; | |
1763 case Token::SHR: { | 1989 case Token::SHR: { |
1764 Label result_ok; | |
1765 __ shr_cl(answer.reg()); | 1990 __ shr_cl(answer.reg()); |
1766 // Check that the *unsigned* result fits in a smi. Neither of | 1991 // Check that the *unsigned* result fits in a smi. Neither of |
1767 // the two high-order bits can be set: | 1992 // the two high-order bits can be set: |
1768 // * 0x80000000: high bit would be lost when smi tagging. | 1993 // * 0x80000000: high bit would be lost when smi tagging. |
1769 // * 0x40000000: this number would convert to negative when smi | 1994 // * 0x40000000: this number would convert to negative when smi |
1770 // tagging. | 1995 // tagging. |
1771 // These two cases can only happen with shifts by 0 or 1 when | 1996 // These two cases can only happen with shifts by 0 or 1 when |
1772 // handed a valid smi. If the answer cannot be represented by a | 1997 // handed a valid smi. If the answer cannot be represented by a |
1773 // smi, restore the left and right arguments, and jump to slow | 1998 // smi, restore the left and right arguments, and jump to slow |
1774 // case. The low bit of the left argument may be lost, but only | 1999 // case. The low bit of the left argument may be lost, but only |
1775 // in a case where it is dropped anyway. | 2000 // in a case where it is dropped anyway. |
1776 __ test(answer.reg(), Immediate(0xc0000000)); | 2001 __ test(answer.reg(), Immediate(0xc0000000)); |
1777 __ j(zero, &result_ok); | 2002 deferred->JumpToAnswerOutOfRange(not_zero); |
1778 __ SmiTag(ecx); | |
1779 deferred->Jump(); | |
1780 __ bind(&result_ok); | |
1781 break; | 2003 break; |
1782 } | 2004 } |
1783 case Token::SHL: { | 2005 case Token::SHL: { |
1784 Label result_ok; | |
1785 __ shl_cl(answer.reg()); | 2006 __ shl_cl(answer.reg()); |
1786 // Check that the *signed* result fits in a smi. | 2007 // Check that the *signed* result fits in a smi. |
1787 __ cmp(answer.reg(), 0xc0000000); | 2008 __ cmp(answer.reg(), 0xc0000000); |
1788 __ j(positive, &result_ok); | 2009 deferred->JumpToAnswerOutOfRange(negative); |
1789 __ SmiTag(ecx); | |
1790 deferred->Jump(); | |
1791 __ bind(&result_ok); | |
1792 break; | 2010 break; |
1793 } | 2011 } |
1794 default: | 2012 default: |
1795 UNREACHABLE(); | 2013 UNREACHABLE(); |
1796 } | 2014 } |
1797 // Smi-tag the result in answer. | 2015 // Smi-tag the result in answer. |
1798 __ SmiTag(answer.reg()); | 2016 __ SmiTag(answer.reg()); |
1799 deferred->BindExit(); | 2017 deferred->BindExit(); |
1800 left->Unuse(); | 2018 left->Unuse(); |
1801 right->Unuse(); | 2019 right->Unuse(); |
1802 ASSERT(answer.is_valid()); | 2020 ASSERT(answer.is_valid()); |
1803 return answer; | 2021 return answer; |
1804 } | 2022 } |
1805 | 2023 |
1806 // Handle the other binary operations. | 2024 // Handle the other binary operations. |
1807 left->ToRegister(); | 2025 left->ToRegister(); |
1808 right->ToRegister(); | 2026 right->ToRegister(); |
2027 // DeferredInlineBinaryOperation requires all the registers that it is told | |
2028 // about to be spilled. | |
2029 frame_->Spill(left->reg()); | |
2030 frame_->Spill(right->reg()); | |
1809 // A newly allocated register answer is used to hold the answer. The | 2031 // A newly allocated register answer is used to hold the answer. The |
1810 // registers containing left and right are not modified so they don't | 2032 // registers containing left and right are not modified so they don't |
1811 // need to be spilled in the fast case. | 2033 // need to be spilled in the fast case. |
1812 answer = allocator_->Allocate(); | 2034 answer = allocator_->Allocate(); |
1813 ASSERT(answer.is_valid()); | 2035 ASSERT(answer.is_valid()); |
1814 | 2036 |
1815 // Perform the smi tag check. | 2037 // Perform the smi tag check. |
1816 DeferredInlineBinaryOperation* deferred = | 2038 DeferredInlineBinaryOperation* deferred = |
1817 new DeferredInlineBinaryOperation(op, | 2039 new DeferredInlineBinaryOperation(op, |
1818 answer.reg(), | 2040 answer.reg(), |
1819 left->reg(), | 2041 left->reg(), |
1820 right->reg(), | 2042 right->reg(), |
1821 left_type_info, | 2043 left_type_info, |
1822 right_type_info, | 2044 right_type_info, |
1823 overwrite_mode); | 2045 overwrite_mode); |
1824 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(), | 2046 Label non_smi_bit_op; |
1825 left_type_info, right_type_info, deferred); | 2047 if (op != Token::BIT_OR) { |
2048 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(), | |
2049 left_type_info, right_type_info, | |
2050 deferred->NonSmiInputLabel()); | |
2051 } | |
1826 | 2052 |
1827 __ mov(answer.reg(), left->reg()); | 2053 __ mov(answer.reg(), left->reg()); |
1828 switch (op) { | 2054 switch (op) { |
1829 case Token::ADD: | 2055 case Token::ADD: |
1830 __ add(answer.reg(), Operand(right->reg())); | 2056 __ add(answer.reg(), Operand(right->reg())); |
1831 deferred->Branch(overflow); | 2057 deferred->Branch(overflow); |
1832 break; | 2058 break; |
1833 | 2059 |
1834 case Token::SUB: | 2060 case Token::SUB: |
1835 __ sub(answer.reg(), Operand(right->reg())); | 2061 __ sub(answer.reg(), Operand(right->reg())); |
(...skipping 22 matching lines...) Expand all Loading... | |
1858 __ or_(answer.reg(), Operand(right->reg())); | 2084 __ or_(answer.reg(), Operand(right->reg())); |
1859 deferred->Branch(negative); | 2085 deferred->Branch(negative); |
1860 __ xor_(answer.reg(), Operand(answer.reg())); // Positive 0 is correct. | 2086 __ xor_(answer.reg(), Operand(answer.reg())); // Positive 0 is correct. |
1861 __ bind(&non_zero_result); | 2087 __ bind(&non_zero_result); |
1862 } | 2088 } |
1863 break; | 2089 break; |
1864 } | 2090 } |
1865 | 2091 |
1866 case Token::BIT_OR: | 2092 case Token::BIT_OR: |
1867 __ or_(answer.reg(), Operand(right->reg())); | 2093 __ or_(answer.reg(), Operand(right->reg())); |
2094 __ test(answer.reg(), Immediate(kSmiTagMask)); | |
2095 __ j(not_zero, deferred->NonSmiInputLabel()); | |
1868 break; | 2096 break; |
1869 | 2097 |
1870 case Token::BIT_AND: | 2098 case Token::BIT_AND: |
1871 __ and_(answer.reg(), Operand(right->reg())); | 2099 __ and_(answer.reg(), Operand(right->reg())); |
1872 break; | 2100 break; |
1873 | 2101 |
1874 case Token::BIT_XOR: | 2102 case Token::BIT_XOR: |
1875 __ xor_(answer.reg(), Operand(right->reg())); | 2103 __ xor_(answer.reg(), Operand(right->reg())); |
1876 break; | 2104 break; |
1877 | 2105 |
1878 default: | 2106 default: |
1879 UNREACHABLE(); | 2107 UNREACHABLE(); |
1880 break; | 2108 break; |
1881 } | 2109 } |
2110 | |
1882 deferred->BindExit(); | 2111 deferred->BindExit(); |
1883 left->Unuse(); | 2112 left->Unuse(); |
1884 right->Unuse(); | 2113 right->Unuse(); |
1885 ASSERT(answer.is_valid()); | 2114 ASSERT(answer.is_valid()); |
1886 return answer; | 2115 return answer; |
1887 } | 2116 } |
1888 | 2117 |
1889 | 2118 |
1890 // Call the appropriate binary operation stub to compute src op value | 2119 // Call the appropriate binary operation stub to compute src op value |
1891 // and leave the result in dst. | 2120 // and leave the result in dst. |
(...skipping 465 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2357 deferred->BindExit(); | 2586 deferred->BindExit(); |
2358 operand->Unuse(); | 2587 operand->Unuse(); |
2359 } | 2588 } |
2360 } | 2589 } |
2361 break; | 2590 break; |
2362 | 2591 |
2363 case Token::BIT_OR: | 2592 case Token::BIT_OR: |
2364 case Token::BIT_XOR: | 2593 case Token::BIT_XOR: |
2365 case Token::BIT_AND: { | 2594 case Token::BIT_AND: { |
2366 operand->ToRegister(); | 2595 operand->ToRegister(); |
2596 // DeferredInlineBinaryOperation requires all the registers that it is | |
2597 // told about to be spilled. | |
2367 frame_->Spill(operand->reg()); | 2598 frame_->Spill(operand->reg()); |
2368 DeferredCode* deferred = NULL; | 2599 DeferredInlineBinaryOperation* deferred = NULL; |
2369 if (reversed) { | |
2370 deferred = | |
2371 new DeferredInlineSmiOperationReversed(op, | |
2372 operand->reg(), | |
2373 smi_value, | |
2374 operand->reg(), | |
2375 operand->type_info(), | |
2376 overwrite_mode); | |
2377 } else { | |
2378 deferred = new DeferredInlineSmiOperation(op, | |
2379 operand->reg(), | |
2380 operand->reg(), | |
2381 operand->type_info(), | |
2382 smi_value, | |
2383 overwrite_mode); | |
2384 } | |
2385 if (!operand->type_info().IsSmi()) { | 2600 if (!operand->type_info().IsSmi()) { |
2601 Result left = allocator()->Allocate(); | |
2602 ASSERT(left.is_valid()); | |
2603 Result right = allocator()->Allocate(); | |
2604 ASSERT(right.is_valid()); | |
2605 deferred = new DeferredInlineBinaryOperation( | |
2606 op, | |
2607 operand->reg(), | |
2608 left.reg(), | |
2609 right.reg(), | |
2610 operand->type_info(), | |
2611 TypeInfo::Smi(), | |
2612 overwrite_mode == NO_OVERWRITE ? NO_OVERWRITE : OVERWRITE_LEFT); | |
2386 __ test(operand->reg(), Immediate(kSmiTagMask)); | 2613 __ test(operand->reg(), Immediate(kSmiTagMask)); |
2387 deferred->Branch(not_zero); | 2614 deferred->JumpToConstantRhs(not_zero, smi_value); |
2388 } else if (FLAG_debug_code) { | 2615 } else if (FLAG_debug_code) { |
2389 __ AbortIfNotSmi(operand->reg()); | 2616 __ AbortIfNotSmi(operand->reg()); |
2390 } | 2617 } |
2391 if (op == Token::BIT_AND) { | 2618 if (op == Token::BIT_AND) { |
2392 __ and_(Operand(operand->reg()), Immediate(value)); | 2619 __ and_(Operand(operand->reg()), Immediate(value)); |
2393 } else if (op == Token::BIT_XOR) { | 2620 } else if (op == Token::BIT_XOR) { |
2394 if (int_value != 0) { | 2621 if (int_value != 0) { |
2395 __ xor_(Operand(operand->reg()), Immediate(value)); | 2622 __ xor_(Operand(operand->reg()), Immediate(value)); |
2396 } | 2623 } |
2397 } else { | 2624 } else { |
2398 ASSERT(op == Token::BIT_OR); | 2625 ASSERT(op == Token::BIT_OR); |
2399 if (int_value != 0) { | 2626 if (int_value != 0) { |
2400 __ or_(Operand(operand->reg()), Immediate(value)); | 2627 __ or_(Operand(operand->reg()), Immediate(value)); |
2401 } | 2628 } |
2402 } | 2629 } |
2403 deferred->BindExit(); | 2630 if (deferred != NULL) deferred->BindExit(); |
2404 answer = *operand; | 2631 answer = *operand; |
2405 break; | 2632 break; |
2406 } | 2633 } |
2407 | 2634 |
2408 case Token::DIV: | 2635 case Token::DIV: |
2409 if (!reversed && int_value == 2) { | 2636 if (!reversed && int_value == 2) { |
2410 operand->ToRegister(); | 2637 operand->ToRegister(); |
2411 frame_->Spill(operand->reg()); | 2638 frame_->Spill(operand->reg()); |
2412 | 2639 |
2413 DeferredInlineSmiOperation* deferred = | 2640 DeferredInlineSmiOperation* deferred = |
(...skipping 11545 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
13959 masm.GetCode(&desc); | 14186 masm.GetCode(&desc); |
13960 // Call the function from C++. | 14187 // Call the function from C++. |
13961 return FUNCTION_CAST<MemCopyFunction>(buffer); | 14188 return FUNCTION_CAST<MemCopyFunction>(buffer); |
13962 } | 14189 } |
13963 | 14190 |
13964 #undef __ | 14191 #undef __ |
13965 | 14192 |
13966 } } // namespace v8::internal | 14193 } } // namespace v8::internal |
13967 | 14194 |
13968 #endif // V8_TARGET_ARCH_IA32 | 14195 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |