OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 973 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
984 | 984 |
985 | 985 |
986 bool MacroAssembler::IsUnsafeInt(const int x) { | 986 bool MacroAssembler::IsUnsafeInt(const int x) { |
987 static const int kMaxBits = 17; | 987 static const int kMaxBits = 17; |
988 return !is_intn(x, kMaxBits); | 988 return !is_intn(x, kMaxBits); |
989 } | 989 } |
990 | 990 |
991 | 991 |
992 void MacroAssembler::SafeMove(Register dst, Smi* src) { | 992 void MacroAssembler::SafeMove(Register dst, Smi* src) { |
993 ASSERT(!dst.is(kScratchRegister)); | 993 ASSERT(!dst.is(kScratchRegister)); |
| 994 #if !V8_USE_31_BITS_SMI_VALUE |
994 ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi. | 995 ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi. |
995 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) { | 996 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) { |
996 Move(dst, Smi::FromInt(src->value() ^ jit_cookie())); | 997 Move(dst, Smi::FromInt(src->value() ^ jit_cookie())); |
997 Move(kScratchRegister, Smi::FromInt(jit_cookie())); | 998 Move(kScratchRegister, Smi::FromInt(jit_cookie())); |
998 xor_(dst, kScratchRegister); | 999 xor_(dst, kScratchRegister); |
999 } else { | 1000 } else { |
1000 Move(dst, src); | 1001 Move(dst, src); |
1001 } | 1002 } |
| 1003 #else |
| 1004 ASSERT(kSmiValueSize == 31); |
| 1005 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) { |
| 1006 movq(dst, Immediate(static_cast<int>(reinterpret_cast<intptr_t>(src)) ^ |
| 1007 jit_cookie())); |
| 1008 movq(kScratchRegister, Immediate(jit_cookie())); |
| 1009 xor_(dst, kScratchRegister); |
| 1010 } else { |
| 1011 Move(dst, src); |
| 1012 } |
| 1013 #endif |
1002 } | 1014 } |
1003 | 1015 |
1004 | 1016 |
1005 void MacroAssembler::SafePush(Smi* src) { | 1017 void MacroAssembler::SafePush(Smi* src) { |
| 1018 #if !V8_USE_31_BITS_SMI_VALUE |
1006 ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi. | 1019 ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi. |
1007 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) { | 1020 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) { |
1008 Push(Smi::FromInt(src->value() ^ jit_cookie())); | 1021 Push(Smi::FromInt(src->value() ^ jit_cookie())); |
1009 Move(kScratchRegister, Smi::FromInt(jit_cookie())); | 1022 Move(kScratchRegister, Smi::FromInt(jit_cookie())); |
1010 xor_(Operand(rsp, 0), kScratchRegister); | 1023 xor_(Operand(rsp, 0), kScratchRegister); |
1011 } else { | 1024 } else { |
1012 Push(src); | 1025 Push(src); |
1013 } | 1026 } |
| 1027 #else |
| 1028 ASSERT(kSmiValueSize == 31); |
| 1029 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) { |
| 1030 push(Immediate(static_cast<int>(reinterpret_cast<intptr_t>(src)) ^ |
| 1031 jit_cookie())); |
| 1032 movq(kScratchRegister, Immediate(jit_cookie())); |
| 1033 xor_(Operand(rsp, 0), kScratchRegister); |
| 1034 } else { |
| 1035 Push(src); |
| 1036 } |
| 1037 #endif |
1014 } | 1038 } |
1015 | 1039 |
1016 | 1040 |
1017 // ---------------------------------------------------------------------------- | 1041 // ---------------------------------------------------------------------------- |
1018 // Smi tagging, untagging and tag detection. | 1042 // Smi tagging, untagging and tag detection. |
1019 | 1043 |
1020 Register MacroAssembler::GetSmiConstant(Smi* source) { | 1044 Register MacroAssembler::GetSmiConstant(Smi* source) { |
1021 int value = source->value(); | 1045 int value = source->value(); |
1022 if (value == 0) { | 1046 if (value == 0) { |
1023 xorl(kScratchRegister, kScratchRegister); | 1047 xorl(kScratchRegister, kScratchRegister); |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1089 neg(dst); | 1113 neg(dst); |
1090 } | 1114 } |
1091 } | 1115 } |
1092 | 1116 |
1093 | 1117 |
1094 void MacroAssembler::Integer32ToSmi(Register dst, Register src) { | 1118 void MacroAssembler::Integer32ToSmi(Register dst, Register src) { |
1095 STATIC_ASSERT(kSmiTag == 0); | 1119 STATIC_ASSERT(kSmiTag == 0); |
1096 if (!dst.is(src)) { | 1120 if (!dst.is(src)) { |
1097 movl(dst, src); | 1121 movl(dst, src); |
1098 } | 1122 } |
| 1123 #if !V8_USE_31_BITS_SMI_VALUE |
1099 shl(dst, Immediate(kSmiShift)); | 1124 shl(dst, Immediate(kSmiShift)); |
| 1125 #else |
| 1126 shll(dst, Immediate(kSmiShift)); |
| 1127 movsxlq(dst, dst); |
| 1128 #endif |
1100 } | 1129 } |
1101 | 1130 |
1102 | 1131 |
1103 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { | 1132 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { |
1104 if (emit_debug_code()) { | 1133 if (emit_debug_code()) { |
1105 testb(dst, Immediate(0x01)); | 1134 testb(dst, Immediate(0x01)); |
1106 Label ok; | 1135 Label ok; |
1107 j(zero, &ok, Label::kNear); | 1136 j(zero, &ok, Label::kNear); |
1108 if (allow_stub_calls()) { | 1137 if (allow_stub_calls()) { |
1109 Abort("Integer32ToSmiField writing to non-smi location"); | 1138 Abort("Integer32ToSmiField writing to non-smi location"); |
1110 } else { | 1139 } else { |
1111 int3(); | 1140 int3(); |
1112 } | 1141 } |
1113 bind(&ok); | 1142 bind(&ok); |
1114 } | 1143 } |
| 1144 #if !V8_USE_31_BITS_SMI_VALUE |
1115 ASSERT(kSmiShift % kBitsPerByte == 0); | 1145 ASSERT(kSmiShift % kBitsPerByte == 0); |
1116 movl(Operand(dst, kSmiShift / kBitsPerByte), src); | 1146 movl(Operand(dst, kSmiShift / kBitsPerByte), src); |
| 1147 #else |
| 1148 Integer32ToSmi(kScratchRegister, src); |
| 1149 movq(dst, kScratchRegister); |
| 1150 #endif |
1117 } | 1151 } |
1118 | 1152 |
1119 | 1153 |
1120 void MacroAssembler::Integer64PlusConstantToSmi(Register dst, | 1154 void MacroAssembler::Integer64PlusConstantToSmi(Register dst, |
1121 Register src, | 1155 Register src, |
1122 int constant) { | 1156 int constant) { |
1123 if (dst.is(src)) { | 1157 if (dst.is(src)) { |
1124 addl(dst, Immediate(constant)); | 1158 addl(dst, Immediate(constant)); |
1125 } else { | 1159 } else { |
1126 leal(dst, Operand(src, constant)); | 1160 leal(dst, Operand(src, constant)); |
1127 } | 1161 } |
1128 shl(dst, Immediate(kSmiShift)); | 1162 Integer32ToSmi(dst, dst); |
1129 } | 1163 } |
1130 | 1164 |
1131 | 1165 |
1132 void MacroAssembler::SmiToInteger32(Register dst, Register src) { | 1166 void MacroAssembler::SmiToInteger32(Register dst, Register src) { |
1133 STATIC_ASSERT(kSmiTag == 0); | 1167 STATIC_ASSERT(kSmiTag == 0); |
1134 if (!dst.is(src)) { | 1168 if (!dst.is(src)) { |
1135 movq(dst, src); | 1169 movq(dst, src); |
1136 } | 1170 } |
| 1171 #if !V8_USE_31_BITS_SMI_VALUE |
1137 shr(dst, Immediate(kSmiShift)); | 1172 shr(dst, Immediate(kSmiShift)); |
| 1173 #else |
| 1174 sarl(dst, Immediate(kSmiShift)); |
| 1175 #endif |
1138 } | 1176 } |
1139 | 1177 |
1140 | 1178 |
1141 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) { | 1179 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) { |
| 1180 #if !V8_USE_31_BITS_SMI_VALUE |
1142 movl(dst, Operand(src, kSmiShift / kBitsPerByte)); | 1181 movl(dst, Operand(src, kSmiShift / kBitsPerByte)); |
| 1182 #else |
| 1183 movl(dst, src); |
| 1184 sarl(dst, Immediate(kSmiShift)); |
| 1185 #endif |
1143 } | 1186 } |
1144 | 1187 |
1145 | 1188 |
1146 void MacroAssembler::SmiToInteger64(Register dst, Register src) { | 1189 void MacroAssembler::SmiToInteger64(Register dst, Register src) { |
1147 STATIC_ASSERT(kSmiTag == 0); | 1190 STATIC_ASSERT(kSmiTag == 0); |
1148 if (!dst.is(src)) { | 1191 if (!dst.is(src)) { |
1149 movq(dst, src); | 1192 movq(dst, src); |
1150 } | 1193 } |
1151 sar(dst, Immediate(kSmiShift)); | 1194 sar(dst, Immediate(kSmiShift)); |
1152 } | 1195 } |
1153 | 1196 |
1154 | 1197 |
1155 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) { | 1198 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) { |
| 1199 #if !V8_USE_31_BITS_SMI_VALUE |
1156 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte)); | 1200 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte)); |
| 1201 #else |
| 1202 movq(dst, src); |
| 1203 SmiToInteger64(dst, dst); |
| 1204 #endif |
1157 } | 1205 } |
1158 | 1206 |
1159 | 1207 |
1160 void MacroAssembler::SmiTest(Register src) { | 1208 void MacroAssembler::SmiTest(Register src) { |
1161 AssertSmi(src); | 1209 AssertSmi(src); |
| 1210 #if !V8_USE_31_BITS_SMI_VALUE |
1162 testq(src, src); | 1211 testq(src, src); |
| 1212 #else |
| 1213 testl(src, src); |
| 1214 #endif |
1163 } | 1215 } |
1164 | 1216 |
1165 | 1217 |
1166 void MacroAssembler::SmiCompare(Register smi1, Register smi2) { | 1218 void MacroAssembler::SmiCompare(Register smi1, Register smi2) { |
1167 AssertSmi(smi1); | 1219 AssertSmi(smi1); |
1168 AssertSmi(smi2); | 1220 AssertSmi(smi2); |
| 1221 #if !V8_USE_31_BITS_SMI_VALUE |
1169 cmpq(smi1, smi2); | 1222 cmpq(smi1, smi2); |
| 1223 #else |
| 1224 cmpl(smi1, smi2); |
| 1225 #endif |
1170 } | 1226 } |
1171 | 1227 |
1172 | 1228 |
1173 void MacroAssembler::SmiCompare(Register dst, Smi* src) { | 1229 void MacroAssembler::SmiCompare(Register dst, Smi* src) { |
1174 AssertSmi(dst); | 1230 AssertSmi(dst); |
1175 Cmp(dst, src); | 1231 Cmp(dst, src); |
1176 } | 1232 } |
1177 | 1233 |
1178 | 1234 |
1179 void MacroAssembler::Cmp(Register dst, Smi* src) { | 1235 void MacroAssembler::Cmp(Register dst, Smi* src) { |
1180 ASSERT(!dst.is(kScratchRegister)); | 1236 ASSERT(!dst.is(kScratchRegister)); |
1181 if (src->value() == 0) { | 1237 if (src->value() == 0) { |
| 1238 #if !V8_USE_31_BITS_SMI_VALUE |
1182 testq(dst, dst); | 1239 testq(dst, dst); |
| 1240 #else |
| 1241 testl(dst, dst); |
| 1242 #endif |
1183 } else { | 1243 } else { |
1184 Register constant_reg = GetSmiConstant(src); | 1244 Register constant_reg = GetSmiConstant(src); |
| 1245 #if !V8_USE_31_BITS_SMI_VALUE |
1185 cmpq(dst, constant_reg); | 1246 cmpq(dst, constant_reg); |
| 1247 #else |
| 1248 cmpl(dst, constant_reg); |
| 1249 #endif |
1186 } | 1250 } |
1187 } | 1251 } |
1188 | 1252 |
1189 | 1253 |
1190 void MacroAssembler::SmiCompare(Register dst, const Operand& src) { | 1254 void MacroAssembler::SmiCompare(Register dst, const Operand& src) { |
1191 AssertSmi(dst); | 1255 AssertSmi(dst); |
1192 AssertSmi(src); | 1256 AssertSmi(src); |
| 1257 #if !V8_USE_31_BITS_SMI_VALUE |
1193 cmpq(dst, src); | 1258 cmpq(dst, src); |
| 1259 #else |
| 1260 cmpl(dst, src); |
| 1261 #endif |
1194 } | 1262 } |
1195 | 1263 |
1196 | 1264 |
1197 void MacroAssembler::SmiCompare(const Operand& dst, Register src) { | 1265 void MacroAssembler::SmiCompare(const Operand& dst, Register src) { |
1198 AssertSmi(dst); | 1266 AssertSmi(dst); |
1199 AssertSmi(src); | 1267 AssertSmi(src); |
| 1268 #if !V8_USE_31_BITS_SMI_VALUE |
1200 cmpq(dst, src); | 1269 cmpq(dst, src); |
| 1270 #else |
| 1271 cmpl(dst, src); |
| 1272 #endif |
1201 } | 1273 } |
1202 | 1274 |
1203 | 1275 |
1204 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) { | 1276 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) { |
1205 AssertSmi(dst); | 1277 AssertSmi(dst); |
| 1278 #if !V8_USE_31_BITS_SMI_VALUE |
1206 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value())); | 1279 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value())); |
| 1280 #else |
| 1281 cmpl(dst, Immediate(src)); |
| 1282 #endif |
1207 } | 1283 } |
1208 | 1284 |
1209 | 1285 |
1210 void MacroAssembler::Cmp(const Operand& dst, Smi* src) { | 1286 void MacroAssembler::Cmp(const Operand& dst, Smi* src) { |
| 1287 #if !V8_USE_31_BITS_SMI_VALUE |
1211 // The Operand cannot use the smi register. | 1288 // The Operand cannot use the smi register. |
1212 Register smi_reg = GetSmiConstant(src); | 1289 Register smi_reg = GetSmiConstant(src); |
1213 ASSERT(!dst.AddressUsesRegister(smi_reg)); | 1290 ASSERT(!dst.AddressUsesRegister(smi_reg)); |
1214 cmpq(dst, smi_reg); | 1291 cmpq(dst, smi_reg); |
| 1292 #else |
| 1293 cmpl(dst, Immediate(src)); |
| 1294 #endif |
1215 } | 1295 } |
1216 | 1296 |
1217 | 1297 |
1218 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) { | 1298 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) { |
| 1299 #if !V8_USE_31_BITS_SMI_VALUE |
1219 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src); | 1300 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src); |
| 1301 #else |
| 1302 SmiToInteger32(kScratchRegister, dst); |
| 1303 cmpl(kScratchRegister, src); |
| 1304 #endif |
1220 } | 1305 } |
1221 | 1306 |
1222 | 1307 |
1223 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst, | 1308 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst, |
1224 Register src, | 1309 Register src, |
1225 int power) { | 1310 int power) { |
1226 ASSERT(power >= 0); | 1311 ASSERT(power >= 0); |
1227 ASSERT(power < 64); | 1312 ASSERT(power < 64); |
1228 if (power == 0) { | 1313 if (power == 0) { |
1229 SmiToInteger64(dst, src); | 1314 SmiToInteger64(dst, src); |
1230 return; | 1315 return; |
1231 } | 1316 } |
1232 if (!dst.is(src)) { | 1317 if (!dst.is(src)) { |
1233 movq(dst, src); | 1318 movq(dst, src); |
1234 } | 1319 } |
1235 if (power < kSmiShift) { | 1320 if (power < kSmiShift) { |
1236 sar(dst, Immediate(kSmiShift - power)); | 1321 sar(dst, Immediate(kSmiShift - power)); |
1237 } else if (power > kSmiShift) { | 1322 } else if (power > kSmiShift) { |
1238 shl(dst, Immediate(power - kSmiShift)); | 1323 shl(dst, Immediate(power - kSmiShift)); |
1239 } | 1324 } |
1240 } | 1325 } |
1241 | 1326 |
1242 | 1327 |
1243 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst, | 1328 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst, |
1244 Register src, | 1329 Register src, |
1245 int power) { | 1330 int power) { |
1246 ASSERT((0 <= power) && (power < 32)); | 1331 ASSERT((0 <= power) && (power < 32)); |
1247 if (dst.is(src)) { | 1332 if (dst.is(src)) { |
| 1333 #if !V8_USE_31_BITS_SMI_VALUE |
1248 shr(dst, Immediate(power + kSmiShift)); | 1334 shr(dst, Immediate(power + kSmiShift)); |
| 1335 #else |
| 1336 shrl(dst, Immediate(power + kSmiShift)); |
| 1337 #endif |
1249 } else { | 1338 } else { |
1250 UNIMPLEMENTED(); // Not used. | 1339 UNIMPLEMENTED(); // Not used. |
1251 } | 1340 } |
1252 } | 1341 } |
1253 | 1342 |
1254 | 1343 |
1255 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2, | 1344 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2, |
1256 Label* on_not_smis, | 1345 Label* on_not_smis, |
1257 Label::Distance near_jump) { | 1346 Label::Distance near_jump) { |
1258 if (dst.is(src1) || dst.is(src2)) { | 1347 if (dst.is(src1) || dst.is(src2)) { |
(...skipping 20 matching lines...) Expand all Loading... |
1279 | 1368 |
1280 Condition MacroAssembler::CheckSmi(const Operand& src) { | 1369 Condition MacroAssembler::CheckSmi(const Operand& src) { |
1281 STATIC_ASSERT(kSmiTag == 0); | 1370 STATIC_ASSERT(kSmiTag == 0); |
1282 testb(src, Immediate(kSmiTagMask)); | 1371 testb(src, Immediate(kSmiTagMask)); |
1283 return zero; | 1372 return zero; |
1284 } | 1373 } |
1285 | 1374 |
1286 | 1375 |
1287 Condition MacroAssembler::CheckNonNegativeSmi(Register src) { | 1376 Condition MacroAssembler::CheckNonNegativeSmi(Register src) { |
1288 STATIC_ASSERT(kSmiTag == 0); | 1377 STATIC_ASSERT(kSmiTag == 0); |
| 1378 #if !V8_USE_31_BITS_SMI_VALUE |
1289 // Test that both bits of the mask 0x8000000000000001 are zero. | 1379 // Test that both bits of the mask 0x8000000000000001 are zero. |
1290 movq(kScratchRegister, src); | 1380 movq(kScratchRegister, src); |
1291 rol(kScratchRegister, Immediate(1)); | 1381 rol(kScratchRegister, Immediate(1)); |
| 1382 #else |
| 1383 movl(kScratchRegister, src); |
| 1384 roll(kScratchRegister, Immediate(1)); |
| 1385 #endif |
1292 testb(kScratchRegister, Immediate(3)); | 1386 testb(kScratchRegister, Immediate(3)); |
1293 return zero; | 1387 return zero; |
1294 } | 1388 } |
1295 | 1389 |
1296 | 1390 |
1297 Condition MacroAssembler::CheckBothSmi(Register first, Register second) { | 1391 Condition MacroAssembler::CheckBothSmi(Register first, Register second) { |
1298 if (first.is(second)) { | 1392 if (first.is(second)) { |
1299 return CheckSmi(first); | 1393 return CheckSmi(first); |
1300 } | 1394 } |
1301 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3); | 1395 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3); |
| 1396 #if !V8_USE_31_BITS_SMI_VALUE |
1302 leal(kScratchRegister, Operand(first, second, times_1, 0)); | 1397 leal(kScratchRegister, Operand(first, second, times_1, 0)); |
1303 testb(kScratchRegister, Immediate(0x03)); | 1398 testb(kScratchRegister, Immediate(0x03)); |
| 1399 #else |
| 1400 movl(kScratchRegister, first); |
| 1401 orl(kScratchRegister, second); |
| 1402 testb(kScratchRegister, Immediate(kSmiTagMask)); |
| 1403 #endif |
1304 return zero; | 1404 return zero; |
1305 } | 1405 } |
1306 | 1406 |
1307 | 1407 |
1308 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first, | 1408 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first, |
1309 Register second) { | 1409 Register second) { |
1310 if (first.is(second)) { | 1410 if (first.is(second)) { |
1311 return CheckNonNegativeSmi(first); | 1411 return CheckNonNegativeSmi(first); |
1312 } | 1412 } |
| 1413 #if !V8_USE_31_BITS_SMI_VALUE |
1313 movq(kScratchRegister, first); | 1414 movq(kScratchRegister, first); |
1314 or_(kScratchRegister, second); | 1415 or_(kScratchRegister, second); |
1315 rol(kScratchRegister, Immediate(1)); | 1416 rol(kScratchRegister, Immediate(1)); |
| 1417 #else |
| 1418 movl(kScratchRegister, first); |
| 1419 orl(kScratchRegister, second); |
| 1420 roll(kScratchRegister, Immediate(1)); |
| 1421 #endif |
1316 testl(kScratchRegister, Immediate(3)); | 1422 testl(kScratchRegister, Immediate(3)); |
1317 return zero; | 1423 return zero; |
1318 } | 1424 } |
1319 | 1425 |
1320 | 1426 |
1321 Condition MacroAssembler::CheckEitherSmi(Register first, | 1427 Condition MacroAssembler::CheckEitherSmi(Register first, |
1322 Register second, | 1428 Register second, |
1323 Register scratch) { | 1429 Register scratch) { |
1324 if (first.is(second)) { | 1430 if (first.is(second)) { |
1325 return CheckSmi(first); | 1431 return CheckSmi(first); |
1326 } | 1432 } |
1327 if (scratch.is(second)) { | 1433 if (scratch.is(second)) { |
1328 andl(scratch, first); | 1434 andl(scratch, first); |
1329 } else { | 1435 } else { |
1330 if (!scratch.is(first)) { | 1436 if (!scratch.is(first)) { |
1331 movl(scratch, first); | 1437 movl(scratch, first); |
1332 } | 1438 } |
1333 andl(scratch, second); | 1439 andl(scratch, second); |
1334 } | 1440 } |
1335 testb(scratch, Immediate(kSmiTagMask)); | 1441 testb(scratch, Immediate(kSmiTagMask)); |
1336 return zero; | 1442 return zero; |
1337 } | 1443 } |
1338 | 1444 |
1339 | 1445 |
1340 Condition MacroAssembler::CheckIsMinSmi(Register src) { | 1446 Condition MacroAssembler::CheckIsMinSmi(Register src) { |
1341 ASSERT(!src.is(kScratchRegister)); | 1447 ASSERT(!src.is(kScratchRegister)); |
1342 // If we overflow by subtracting one, it's the minimal smi value. | 1448 // If we overflow by subtracting one, it's the minimal smi value. |
| 1449 #if !V8_USE_31_BITS_SMI_VALUE |
1343 cmpq(src, kSmiConstantRegister); | 1450 cmpq(src, kSmiConstantRegister); |
| 1451 #else |
| 1452 cmpl(src, kSmiConstantRegister); |
| 1453 #endif |
1344 return overflow; | 1454 return overflow; |
1345 } | 1455 } |
1346 | 1456 |
1347 | 1457 |
1348 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) { | 1458 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) { |
| 1459 #if !V8_USE_31_BITS_SMI_VALUE |
1349 // A 32-bit integer value can always be converted to a smi. | 1460 // A 32-bit integer value can always be converted to a smi. |
1350 return always; | 1461 return always; |
| 1462 #else |
| 1463 cmpl(src, Immediate(0xc0000000)); |
| 1464 return positive; |
| 1465 #endif |
1351 } | 1466 } |
1352 | 1467 |
1353 | 1468 |
1354 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) { | 1469 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) { |
| 1470 #if !V8_USE_31_BITS_SMI_VALUE |
1355 // An unsigned 32-bit integer value is valid as long as the high bit | 1471 // An unsigned 32-bit integer value is valid as long as the high bit |
1356 // is not set. | 1472 // is not set. |
1357 testl(src, src); | 1473 testl(src, src); |
1358 return positive; | 1474 return positive; |
| 1475 #else |
| 1476 testl(src, Immediate(0xc0000000)); |
| 1477 return zero; |
| 1478 #endif |
1359 } | 1479 } |
1360 | 1480 |
1361 | 1481 |
1362 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) { | 1482 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) { |
1363 if (dst.is(src)) { | 1483 if (dst.is(src)) { |
1364 andl(dst, Immediate(kSmiTagMask)); | 1484 andl(dst, Immediate(kSmiTagMask)); |
1365 } else { | 1485 } else { |
1366 movl(dst, Immediate(kSmiTagMask)); | 1486 movl(dst, Immediate(kSmiTagMask)); |
1367 andl(dst, src); | 1487 andl(dst, src); |
1368 } | 1488 } |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1454 Label::Distance near_jump) { | 1574 Label::Distance near_jump) { |
1455 // Does not assume that src is a smi. | 1575 // Does not assume that src is a smi. |
1456 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask)); | 1576 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask)); |
1457 STATIC_ASSERT(kSmiTag == 0); | 1577 STATIC_ASSERT(kSmiTag == 0); |
1458 ASSERT(!dst.is(kScratchRegister)); | 1578 ASSERT(!dst.is(kScratchRegister)); |
1459 ASSERT(!src.is(kScratchRegister)); | 1579 ASSERT(!src.is(kScratchRegister)); |
1460 | 1580 |
1461 JumpIfNotSmi(src, on_not_smi_result, near_jump); | 1581 JumpIfNotSmi(src, on_not_smi_result, near_jump); |
1462 Register tmp = (dst.is(src) ? kScratchRegister : dst); | 1582 Register tmp = (dst.is(src) ? kScratchRegister : dst); |
1463 LoadSmiConstant(tmp, constant); | 1583 LoadSmiConstant(tmp, constant); |
| 1584 #if !V8_USE_31_BITS_SMI_VALUE |
1464 addq(tmp, src); | 1585 addq(tmp, src); |
| 1586 #else |
| 1587 addl(tmp, src); |
| 1588 #endif |
1465 j(overflow, on_not_smi_result, near_jump); | 1589 j(overflow, on_not_smi_result, near_jump); |
1466 if (dst.is(src)) { | 1590 if (dst.is(src)) { |
1467 movq(dst, tmp); | 1591 movq(dst, tmp); |
1468 } | 1592 } |
| 1593 #if V8_USE_31_BITS_SMI_VALUE |
| 1594 movsxlq(dst, dst); |
| 1595 #endif |
1469 } | 1596 } |
1470 | 1597 |
1471 | 1598 |
1472 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { | 1599 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { |
1473 if (constant->value() == 0) { | 1600 if (constant->value() == 0) { |
1474 if (!dst.is(src)) { | 1601 if (!dst.is(src)) { |
1475 movq(dst, src); | 1602 movq(dst, src); |
1476 } | 1603 } |
1477 return; | 1604 return; |
1478 } else if (dst.is(src)) { | 1605 } else if (dst.is(src)) { |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1513 LoadSmiConstant(dst, constant); | 1640 LoadSmiConstant(dst, constant); |
1514 addq(dst, src); | 1641 addq(dst, src); |
1515 return; | 1642 return; |
1516 } | 1643 } |
1517 } | 1644 } |
1518 } | 1645 } |
1519 | 1646 |
1520 | 1647 |
1521 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) { | 1648 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) { |
1522 if (constant->value() != 0) { | 1649 if (constant->value() != 0) { |
| 1650 #if !V8_USE_31_BITS_SMI_VALUE |
1523 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value())); | 1651 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value())); |
| 1652 #else |
| 1653 addq(dst, Immediate(constant)); |
| 1654 #endif |
1524 } | 1655 } |
1525 } | 1656 } |
1526 | 1657 |
1527 | 1658 |
1528 void MacroAssembler::SmiAddConstant(Register dst, | 1659 void MacroAssembler::SmiAddConstant(Register dst, |
1529 Register src, | 1660 Register src, |
1530 Smi* constant, | 1661 Smi* constant, |
1531 Label* on_not_smi_result, | 1662 Label* on_not_smi_result, |
1532 Label::Distance near_jump) { | 1663 Label::Distance near_jump) { |
1533 if (constant->value() == 0) { | 1664 if (constant->value() == 0) { |
1534 if (!dst.is(src)) { | 1665 if (!dst.is(src)) { |
1535 movq(dst, src); | 1666 movq(dst, src); |
1536 } | 1667 } |
1537 } else if (dst.is(src)) { | 1668 } else if (dst.is(src)) { |
1538 ASSERT(!dst.is(kScratchRegister)); | 1669 ASSERT(!dst.is(kScratchRegister)); |
1539 | 1670 |
1540 LoadSmiConstant(kScratchRegister, constant); | 1671 LoadSmiConstant(kScratchRegister, constant); |
| 1672 #if !V8_USE_31_BITS_SMI_VALUE |
1541 addq(kScratchRegister, src); | 1673 addq(kScratchRegister, src); |
| 1674 #else |
| 1675 addl(kScratchRegister, src); |
| 1676 #endif |
1542 j(overflow, on_not_smi_result, near_jump); | 1677 j(overflow, on_not_smi_result, near_jump); |
| 1678 #if !V8_USE_31_BITS_SMI_VALUE |
1543 movq(dst, kScratchRegister); | 1679 movq(dst, kScratchRegister); |
| 1680 #else |
| 1681 movsxlq(dst, kScratchRegister); |
| 1682 #endif |
1544 } else { | 1683 } else { |
1545 LoadSmiConstant(dst, constant); | 1684 LoadSmiConstant(dst, constant); |
| 1685 #if !V8_USE_31_BITS_SMI_VALUE |
1546 addq(dst, src); | 1686 addq(dst, src); |
| 1687 #else |
| 1688 addl(dst, src); |
| 1689 #endif |
1547 j(overflow, on_not_smi_result, near_jump); | 1690 j(overflow, on_not_smi_result, near_jump); |
| 1691 #if V8_USE_31_BITS_SMI_VALUE |
| 1692 movsxlq(dst, dst); |
| 1693 #endif |
1548 } | 1694 } |
1549 } | 1695 } |
1550 | 1696 |
1551 | 1697 |
1552 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) { | 1698 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) { |
1553 if (constant->value() == 0) { | 1699 if (constant->value() == 0) { |
1554 if (!dst.is(src)) { | 1700 if (!dst.is(src)) { |
1555 movq(dst, src); | 1701 movq(dst, src); |
1556 } | 1702 } |
1557 } else if (dst.is(src)) { | 1703 } else if (dst.is(src)) { |
1558 ASSERT(!dst.is(kScratchRegister)); | 1704 ASSERT(!dst.is(kScratchRegister)); |
1559 Register constant_reg = GetSmiConstant(constant); | 1705 Register constant_reg = GetSmiConstant(constant); |
1560 subq(dst, constant_reg); | 1706 subq(dst, constant_reg); |
1561 } else { | 1707 } else { |
1562 if (constant->value() == Smi::kMinValue) { | 1708 if (constant->value() == Smi::kMinValue) { |
| 1709 #if !V8_USE_31_BITS_SMI_VALUE |
1563 LoadSmiConstant(dst, constant); | 1710 LoadSmiConstant(dst, constant); |
1564 // Adding and subtracting the min-value gives the same result, it only | 1711 // Adding and subtracting the min-value gives the same result, it only |
1565 // differs on the overflow bit, which we don't check here. | 1712 // differs on the overflow bit, which we don't check here. |
1566 addq(dst, src); | 1713 addq(dst, src); |
| 1714 #else |
| 1715 movq(dst, src); |
| 1716 subq(dst, Immediate(constant)); |
| 1717 #endif |
1567 } else { | 1718 } else { |
1568 // Subtract by adding the negation. | 1719 // Subtract by adding the negation. |
1569 LoadSmiConstant(dst, Smi::FromInt(-constant->value())); | 1720 LoadSmiConstant(dst, Smi::FromInt(-constant->value())); |
1570 addq(dst, src); | 1721 addq(dst, src); |
1571 } | 1722 } |
1572 } | 1723 } |
1573 } | 1724 } |
1574 | 1725 |
1575 | 1726 |
1576 void MacroAssembler::SmiSubConstant(Register dst, | 1727 void MacroAssembler::SmiSubConstant(Register dst, |
1577 Register src, | 1728 Register src, |
1578 Smi* constant, | 1729 Smi* constant, |
1579 Label* on_not_smi_result, | 1730 Label* on_not_smi_result, |
1580 Label::Distance near_jump) { | 1731 Label::Distance near_jump) { |
1581 if (constant->value() == 0) { | 1732 if (constant->value() == 0) { |
1582 if (!dst.is(src)) { | 1733 if (!dst.is(src)) { |
1583 movq(dst, src); | 1734 movq(dst, src); |
1584 } | 1735 } |
1585 } else if (dst.is(src)) { | 1736 } else if (dst.is(src)) { |
1586 ASSERT(!dst.is(kScratchRegister)); | 1737 ASSERT(!dst.is(kScratchRegister)); |
1587 if (constant->value() == Smi::kMinValue) { | 1738 if (constant->value() == Smi::kMinValue) { |
1588 // Subtracting min-value from any non-negative value will overflow. | 1739 // Subtracting min-value from any non-negative value will overflow. |
1589 // We test the non-negativeness before doing the subtraction. | 1740 // We test the non-negativeness before doing the subtraction. |
| 1741 #if !V8_USE_31_BITS_SMI_VALUE |
1590 testq(src, src); | 1742 testq(src, src); |
| 1743 #else |
| 1744 testl(src, src); |
| 1745 #endif |
1591 j(not_sign, on_not_smi_result, near_jump); | 1746 j(not_sign, on_not_smi_result, near_jump); |
1592 LoadSmiConstant(kScratchRegister, constant); | 1747 LoadSmiConstant(kScratchRegister, constant); |
1593 subq(dst, kScratchRegister); | 1748 subq(dst, kScratchRegister); |
1594 } else { | 1749 } else { |
1595 // Subtract by adding the negation. | 1750 // Subtract by adding the negation. |
1596 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value())); | 1751 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value())); |
| 1752 #if !V8_USE_31_BITS_SMI_VALUE |
1597 addq(kScratchRegister, dst); | 1753 addq(kScratchRegister, dst); |
| 1754 #else |
| 1755 addl(kScratchRegister, dst); |
| 1756 #endif |
1598 j(overflow, on_not_smi_result, near_jump); | 1757 j(overflow, on_not_smi_result, near_jump); |
| 1758 #if !V8_USE_31_BITS_SMI_VALUE |
1599 movq(dst, kScratchRegister); | 1759 movq(dst, kScratchRegister); |
| 1760 #else |
| 1761 movsxlq(dst, kScratchRegister); |
| 1762 #endif |
1600 } | 1763 } |
1601 } else { | 1764 } else { |
1602 if (constant->value() == Smi::kMinValue) { | 1765 if (constant->value() == Smi::kMinValue) { |
1603 // Subtracting min-value from any non-negative value will overflow. | 1766 // Subtracting min-value from any non-negative value will overflow. |
1604 // We test the non-negativeness before doing the subtraction. | 1767 // We test the non-negativeness before doing the subtraction. |
| 1768 #if !V8_USE_31_BITS_SMI_VALUE |
1605 testq(src, src); | 1769 testq(src, src); |
1606 j(not_sign, on_not_smi_result, near_jump); | 1770 j(not_sign, on_not_smi_result, near_jump); |
1607 LoadSmiConstant(dst, constant); | 1771 LoadSmiConstant(dst, constant); |
1608 // Adding and subtracting the min-value gives the same result, it only | 1772 // Adding and subtracting the min-value gives the same result, it only |
1609 // differs on the overflow bit, which we don't check here. | 1773 // differs on the overflow bit, which we don't check here. |
1610 addq(dst, src); | 1774 addq(dst, src); |
| 1775 #else |
| 1776 testl(src, src); |
| 1777 j(not_sign, on_not_smi_result, near_jump); |
| 1778 movq(dst, src); |
| 1779 subq(dst, Immediate(constant)); |
| 1780 #endif |
1611 } else { | 1781 } else { |
1612 // Subtract by adding the negation. | 1782 // Subtract by adding the negation. |
1613 LoadSmiConstant(dst, Smi::FromInt(-(constant->value()))); | 1783 LoadSmiConstant(dst, Smi::FromInt(-(constant->value()))); |
| 1784 #if !V8_USE_31_BITS_SMI_VALUE |
1614 addq(dst, src); | 1785 addq(dst, src); |
| 1786 #else |
| 1787 addl(dst, src); |
| 1788 #endif |
1615 j(overflow, on_not_smi_result, near_jump); | 1789 j(overflow, on_not_smi_result, near_jump); |
| 1790 #if V8_USE_31_BITS_SMI_VALUE |
| 1791 movsxlq(dst, dst); |
| 1792 #endif |
1616 } | 1793 } |
1617 } | 1794 } |
1618 } | 1795 } |
1619 | 1796 |
1620 | 1797 |
1621 void MacroAssembler::SmiNeg(Register dst, | 1798 void MacroAssembler::SmiNeg(Register dst, |
1622 Register src, | 1799 Register src, |
1623 Label* on_smi_result, | 1800 Label* on_smi_result, |
1624 Label::Distance near_jump) { | 1801 Label::Distance near_jump) { |
1625 if (dst.is(src)) { | 1802 if (dst.is(src)) { |
1626 ASSERT(!dst.is(kScratchRegister)); | 1803 ASSERT(!dst.is(kScratchRegister)); |
1627 movq(kScratchRegister, src); | 1804 movq(kScratchRegister, src); |
1628 neg(dst); // Low 32 bits are retained as zero by negation. | 1805 neg(dst); // Low 32 bits are retained as zero by negation. |
1629 // Test if result is zero or Smi::kMinValue. | 1806 // Test if result is zero or Smi::kMinValue. |
| 1807 #if !V8_USE_31_BITS_SMI_VALUE |
1630 cmpq(dst, kScratchRegister); | 1808 cmpq(dst, kScratchRegister); |
| 1809 #else |
| 1810 cmpl(dst, kScratchRegister); |
| 1811 #endif |
1631 j(not_equal, on_smi_result, near_jump); | 1812 j(not_equal, on_smi_result, near_jump); |
1632 movq(src, kScratchRegister); | 1813 movq(src, kScratchRegister); |
1633 } else { | 1814 } else { |
1634 movq(dst, src); | 1815 movq(dst, src); |
1635 neg(dst); | 1816 neg(dst); |
| 1817 #if !V8_USE_31_BITS_SMI_VALUE |
1636 cmpq(dst, src); | 1818 cmpq(dst, src); |
| 1819 #else |
| 1820 cmpl(dst, src); |
| 1821 #endif |
1637 // If the result is zero or Smi::kMinValue, negation failed to create a smi. | 1822 // If the result is zero or Smi::kMinValue, negation failed to create a smi. |
1638 j(not_equal, on_smi_result, near_jump); | 1823 j(not_equal, on_smi_result, near_jump); |
1639 } | 1824 } |
1640 } | 1825 } |
1641 | 1826 |
1642 | 1827 |
1643 void MacroAssembler::SmiAdd(Register dst, | 1828 void MacroAssembler::SmiAdd(Register dst, |
1644 Register src1, | 1829 Register src1, |
1645 Register src2, | 1830 Register src2, |
1646 Label* on_not_smi_result, | 1831 Label* on_not_smi_result, |
1647 Label::Distance near_jump) { | 1832 Label::Distance near_jump) { |
1648 ASSERT_NOT_NULL(on_not_smi_result); | 1833 ASSERT_NOT_NULL(on_not_smi_result); |
1649 ASSERT(!dst.is(src2)); | 1834 ASSERT(!dst.is(src2)); |
1650 if (dst.is(src1)) { | 1835 if (dst.is(src1)) { |
1651 movq(kScratchRegister, src1); | 1836 movq(kScratchRegister, src1); |
| 1837 #if !V8_USE_31_BITS_SMI_VALUE |
1652 addq(kScratchRegister, src2); | 1838 addq(kScratchRegister, src2); |
| 1839 #else |
| 1840 addl(kScratchRegister, src2); |
| 1841 #endif |
1653 j(overflow, on_not_smi_result, near_jump); | 1842 j(overflow, on_not_smi_result, near_jump); |
| 1843 #if !V8_USE_31_BITS_SMI_VALUE |
1654 movq(dst, kScratchRegister); | 1844 movq(dst, kScratchRegister); |
| 1845 #else |
| 1846 movsxlq(dst, kScratchRegister); |
| 1847 #endif |
1655 } else { | 1848 } else { |
1656 movq(dst, src1); | 1849 movq(dst, src1); |
| 1850 #if !V8_USE_31_BITS_SMI_VALUE |
1657 addq(dst, src2); | 1851 addq(dst, src2); |
| 1852 #else |
| 1853 addl(dst, src2); |
| 1854 #endif |
1658 j(overflow, on_not_smi_result, near_jump); | 1855 j(overflow, on_not_smi_result, near_jump); |
| 1856 #if V8_USE_31_BITS_SMI_VALUE |
| 1857 movsxlq(dst, dst); |
| 1858 #endif |
1659 } | 1859 } |
1660 } | 1860 } |
1661 | 1861 |
1662 | 1862 |
1663 void MacroAssembler::SmiAdd(Register dst, | 1863 void MacroAssembler::SmiAdd(Register dst, |
1664 Register src1, | 1864 Register src1, |
1665 const Operand& src2, | 1865 const Operand& src2, |
1666 Label* on_not_smi_result, | 1866 Label* on_not_smi_result, |
1667 Label::Distance near_jump) { | 1867 Label::Distance near_jump) { |
1668 ASSERT_NOT_NULL(on_not_smi_result); | 1868 ASSERT_NOT_NULL(on_not_smi_result); |
1669 if (dst.is(src1)) { | 1869 if (dst.is(src1)) { |
1670 movq(kScratchRegister, src1); | 1870 movq(kScratchRegister, src1); |
| 1871 #if !V8_USE_31_BITS_SMI_VALUE |
1671 addq(kScratchRegister, src2); | 1872 addq(kScratchRegister, src2); |
| 1873 #else |
| 1874 addl(kScratchRegister, src2); |
| 1875 #endif |
1672 j(overflow, on_not_smi_result, near_jump); | 1876 j(overflow, on_not_smi_result, near_jump); |
| 1877 #if !V8_USE_31_BITS_SMI_VALUE |
1673 movq(dst, kScratchRegister); | 1878 movq(dst, kScratchRegister); |
| 1879 #else |
| 1880 movsxlq(dst, kScratchRegister); |
| 1881 #endif |
1674 } else { | 1882 } else { |
1675 ASSERT(!src2.AddressUsesRegister(dst)); | 1883 ASSERT(!src2.AddressUsesRegister(dst)); |
1676 movq(dst, src1); | 1884 movq(dst, src1); |
| 1885 #if !V8_USE_31_BITS_SMI_VALUE |
1677 addq(dst, src2); | 1886 addq(dst, src2); |
| 1887 #else |
| 1888 addl(dst, src2); |
| 1889 #endif |
1678 j(overflow, on_not_smi_result, near_jump); | 1890 j(overflow, on_not_smi_result, near_jump); |
| 1891 #if V8_USE_31_BITS_SMI_VALUE |
| 1892 movsxlq(dst, dst); |
| 1893 #endif |
1679 } | 1894 } |
1680 } | 1895 } |
1681 | 1896 |
1682 | 1897 |
1683 void MacroAssembler::SmiAdd(Register dst, | 1898 void MacroAssembler::SmiAdd(Register dst, |
1684 Register src1, | 1899 Register src1, |
1685 Register src2) { | 1900 Register src2) { |
1686 // No overflow checking. Use only when it's known that | 1901 // No overflow checking. Use only when it's known that |
1687 // overflowing is impossible. | 1902 // overflowing is impossible. |
1688 if (!dst.is(src1)) { | 1903 if (!dst.is(src1)) { |
(...skipping 11 matching lines...) Expand all Loading... |
1700 | 1915 |
1701 | 1916 |
1702 void MacroAssembler::SmiSub(Register dst, | 1917 void MacroAssembler::SmiSub(Register dst, |
1703 Register src1, | 1918 Register src1, |
1704 Register src2, | 1919 Register src2, |
1705 Label* on_not_smi_result, | 1920 Label* on_not_smi_result, |
1706 Label::Distance near_jump) { | 1921 Label::Distance near_jump) { |
1707 ASSERT_NOT_NULL(on_not_smi_result); | 1922 ASSERT_NOT_NULL(on_not_smi_result); |
1708 ASSERT(!dst.is(src2)); | 1923 ASSERT(!dst.is(src2)); |
1709 if (dst.is(src1)) { | 1924 if (dst.is(src1)) { |
| 1925 #if !V8_USE_31_BITS_SMI_VALUE |
1710 cmpq(dst, src2); | 1926 cmpq(dst, src2); |
| 1927 #else |
| 1928 cmpl(dst, src2); |
| 1929 #endif |
1711 j(overflow, on_not_smi_result, near_jump); | 1930 j(overflow, on_not_smi_result, near_jump); |
1712 subq(dst, src2); | 1931 subq(dst, src2); |
1713 } else { | 1932 } else { |
| 1933 #if !V8_USE_31_BITS_SMI_VALUE |
1714 movq(dst, src1); | 1934 movq(dst, src1); |
1715 subq(dst, src2); | 1935 subq(dst, src2); |
| 1936 #else |
| 1937 movl(dst, src1); |
| 1938 subl(dst, src2); |
| 1939 #endif |
1716 j(overflow, on_not_smi_result, near_jump); | 1940 j(overflow, on_not_smi_result, near_jump); |
| 1941 #if V8_USE_31_BITS_SMI_VALUE |
| 1942 movsxlq(dst, dst); |
| 1943 #endif |
1717 } | 1944 } |
1718 } | 1945 } |
1719 | 1946 |
1720 | 1947 |
1721 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) { | 1948 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) { |
1722 // No overflow checking. Use only when it's known that | 1949 // No overflow checking. Use only when it's known that |
1723 // overflowing is impossible (e.g., subtracting two positive smis). | 1950 // overflowing is impossible (e.g., subtracting two positive smis). |
1724 ASSERT(!dst.is(src2)); | 1951 ASSERT(!dst.is(src2)); |
1725 if (!dst.is(src1)) { | 1952 if (!dst.is(src1)) { |
1726 movq(dst, src1); | 1953 movq(dst, src1); |
1727 } | 1954 } |
1728 subq(dst, src2); | 1955 subq(dst, src2); |
1729 Assert(no_overflow, "Smi subtraction overflow"); | 1956 Assert(no_overflow, "Smi subtraction overflow"); |
1730 } | 1957 } |
1731 | 1958 |
1732 | 1959 |
1733 void MacroAssembler::SmiSub(Register dst, | 1960 void MacroAssembler::SmiSub(Register dst, |
1734 Register src1, | 1961 Register src1, |
1735 const Operand& src2, | 1962 const Operand& src2, |
1736 Label* on_not_smi_result, | 1963 Label* on_not_smi_result, |
1737 Label::Distance near_jump) { | 1964 Label::Distance near_jump) { |
1738 ASSERT_NOT_NULL(on_not_smi_result); | 1965 ASSERT_NOT_NULL(on_not_smi_result); |
1739 if (dst.is(src1)) { | 1966 if (dst.is(src1)) { |
| 1967 #if !V8_USE_31_BITS_SMI_VALUE |
1740 movq(kScratchRegister, src2); | 1968 movq(kScratchRegister, src2); |
1741 cmpq(src1, kScratchRegister); | 1969 cmpq(src1, kScratchRegister); |
| 1970 #else |
| 1971 movl(kScratchRegister, src2); |
| 1972 cmpl(src1, kScratchRegister); |
| 1973 #endif |
1742 j(overflow, on_not_smi_result, near_jump); | 1974 j(overflow, on_not_smi_result, near_jump); |
| 1975 #if !V8_USE_31_BITS_SMI_VALUE |
1743 subq(src1, kScratchRegister); | 1976 subq(src1, kScratchRegister); |
| 1977 #else |
| 1978 movsxlq(src1, kScratchRegister); |
| 1979 #endif |
1744 } else { | 1980 } else { |
| 1981 #if !V8_USE_31_BITS_SMI_VALUE |
1745 movq(dst, src1); | 1982 movq(dst, src1); |
1746 subq(dst, src2); | 1983 subq(dst, src2); |
| 1984 #else |
| 1985 movl(dst, src1); |
| 1986 subl(dst, src2); |
| 1987 #endif |
1747 j(overflow, on_not_smi_result, near_jump); | 1988 j(overflow, on_not_smi_result, near_jump); |
| 1989 #if V8_USE_31_BITS_SMI_VALUE |
| 1990 movsxlq(dst, dst); |
| 1991 #endif |
1748 } | 1992 } |
1749 } | 1993 } |
1750 | 1994 |
1751 | 1995 |
1752 void MacroAssembler::SmiSub(Register dst, | 1996 void MacroAssembler::SmiSub(Register dst, |
1753 Register src1, | 1997 Register src1, |
1754 const Operand& src2) { | 1998 const Operand& src2) { |
1755 // No overflow checking. Use only when it's known that | 1999 // No overflow checking. Use only when it's known that |
1756 // overflowing is impossible (e.g., subtracting two positive smis). | 2000 // overflowing is impossible (e.g., subtracting two positive smis). |
1757 if (!dst.is(src1)) { | 2001 if (!dst.is(src1)) { |
(...skipping 10 matching lines...) Expand all Loading... |
1768 Label* on_not_smi_result, | 2012 Label* on_not_smi_result, |
1769 Label::Distance near_jump) { | 2013 Label::Distance near_jump) { |
1770 ASSERT(!dst.is(src2)); | 2014 ASSERT(!dst.is(src2)); |
1771 ASSERT(!dst.is(kScratchRegister)); | 2015 ASSERT(!dst.is(kScratchRegister)); |
1772 ASSERT(!src1.is(kScratchRegister)); | 2016 ASSERT(!src1.is(kScratchRegister)); |
1773 ASSERT(!src2.is(kScratchRegister)); | 2017 ASSERT(!src2.is(kScratchRegister)); |
1774 | 2018 |
1775 if (dst.is(src1)) { | 2019 if (dst.is(src1)) { |
1776 Label failure, zero_correct_result; | 2020 Label failure, zero_correct_result; |
1777 movq(kScratchRegister, src1); // Create backup for later testing. | 2021 movq(kScratchRegister, src1); // Create backup for later testing. |
| 2022 #if !V8_USE_31_BITS_SMI_VALUE |
1778 SmiToInteger64(dst, src1); | 2023 SmiToInteger64(dst, src1); |
1779 imul(dst, src2); | 2024 imul(dst, src2); |
| 2025 #else |
| 2026 SmiToInteger32(dst, src1); |
| 2027 imull(dst, src2); |
| 2028 #endif |
1780 j(overflow, &failure, Label::kNear); | 2029 j(overflow, &failure, Label::kNear); |
1781 | 2030 |
1782 // Check for negative zero result. If product is zero, and one | 2031 // Check for negative zero result. If product is zero, and one |
1783 // argument is negative, go to slow case. | 2032 // argument is negative, go to slow case. |
1784 Label correct_result; | 2033 Label correct_result; |
1785 testq(dst, dst); | 2034 testq(dst, dst); |
1786 j(not_zero, &correct_result, Label::kNear); | 2035 j(not_zero, &correct_result, Label::kNear); |
1787 | 2036 |
1788 movq(dst, kScratchRegister); | 2037 movq(dst, kScratchRegister); |
1789 xor_(dst, src2); | 2038 xor_(dst, src2); |
1790 // Result was positive zero. | 2039 // Result was positive zero. |
1791 j(positive, &zero_correct_result, Label::kNear); | 2040 j(positive, &zero_correct_result, Label::kNear); |
1792 | 2041 |
1793 bind(&failure); // Reused failure exit, restores src1. | 2042 bind(&failure); // Reused failure exit, restores src1. |
1794 movq(src1, kScratchRegister); | 2043 movq(src1, kScratchRegister); |
1795 jmp(on_not_smi_result, near_jump); | 2044 jmp(on_not_smi_result, near_jump); |
1796 | 2045 |
1797 bind(&zero_correct_result); | 2046 bind(&zero_correct_result); |
1798 Set(dst, 0); | 2047 Set(dst, 0); |
1799 | 2048 |
1800 bind(&correct_result); | 2049 bind(&correct_result); |
| 2050 #if V8_USE_31_BITS_SMI_VALUE |
| 2051 movsxlq(dst, dst); |
| 2052 #endif |
1801 } else { | 2053 } else { |
| 2054 #if !V8_USE_31_BITS_SMI_VALUE |
1802 SmiToInteger64(dst, src1); | 2055 SmiToInteger64(dst, src1); |
1803 imul(dst, src2); | 2056 imul(dst, src2); |
| 2057 #else |
| 2058 SmiToInteger32(dst, src1); |
| 2059 imull(dst, src2); |
| 2060 #endif |
1804 j(overflow, on_not_smi_result, near_jump); | 2061 j(overflow, on_not_smi_result, near_jump); |
1805 // Check for negative zero result. If product is zero, and one | 2062 // Check for negative zero result. If product is zero, and one |
1806 // argument is negative, go to slow case. | 2063 // argument is negative, go to slow case. |
1807 Label correct_result; | 2064 Label correct_result; |
1808 testq(dst, dst); | 2065 testq(dst, dst); |
1809 j(not_zero, &correct_result, Label::kNear); | 2066 j(not_zero, &correct_result, Label::kNear); |
1810 // One of src1 and src2 is zero, the check whether the other is | 2067 // One of src1 and src2 is zero, the check whether the other is |
1811 // negative. | 2068 // negative. |
1812 movq(kScratchRegister, src1); | 2069 movq(kScratchRegister, src1); |
1813 xor_(kScratchRegister, src2); | 2070 xor_(kScratchRegister, src2); |
1814 j(negative, on_not_smi_result, near_jump); | 2071 j(negative, on_not_smi_result, near_jump); |
1815 bind(&correct_result); | 2072 bind(&correct_result); |
| 2073 #if V8_USE_31_BITS_SMI_VALUE |
| 2074 movsxlq(dst, dst); |
| 2075 #endif |
1816 } | 2076 } |
1817 } | 2077 } |
1818 | 2078 |
1819 | 2079 |
1820 void MacroAssembler::SmiDiv(Register dst, | 2080 void MacroAssembler::SmiDiv(Register dst, |
1821 Register src1, | 2081 Register src1, |
1822 Register src2, | 2082 Register src2, |
1823 Label* on_not_smi_result, | 2083 Label* on_not_smi_result, |
1824 Label::Distance near_jump) { | 2084 Label::Distance near_jump) { |
1825 ASSERT(!src1.is(kScratchRegister)); | 2085 ASSERT(!src1.is(kScratchRegister)); |
(...skipping 12 matching lines...) Expand all Loading... |
1838 } | 2098 } |
1839 SmiToInteger32(rax, src1); | 2099 SmiToInteger32(rax, src1); |
1840 // We need to rule out dividing Smi::kMinValue by -1, since that would | 2100 // We need to rule out dividing Smi::kMinValue by -1, since that would |
1841 // overflow in idiv and raise an exception. | 2101 // overflow in idiv and raise an exception. |
1842 // We combine this with negative zero test (negative zero only happens | 2102 // We combine this with negative zero test (negative zero only happens |
1843 // when dividing zero by a negative number). | 2103 // when dividing zero by a negative number). |
1844 | 2104 |
1845 // We overshoot a little and go to slow case if we divide min-value | 2105 // We overshoot a little and go to slow case if we divide min-value |
1846 // by any negative value, not just -1. | 2106 // by any negative value, not just -1. |
1847 Label safe_div; | 2107 Label safe_div; |
| 2108 #if !V8_USE_31_BITS_SMI_VALUE |
1848 testl(rax, Immediate(0x7fffffff)); | 2109 testl(rax, Immediate(0x7fffffff)); |
| 2110 #else |
| 2111 testl(rax, Immediate(0x3fffffff)); |
| 2112 #endif |
1849 j(not_zero, &safe_div, Label::kNear); | 2113 j(not_zero, &safe_div, Label::kNear); |
1850 testq(src2, src2); | 2114 testq(src2, src2); |
1851 if (src1.is(rax)) { | 2115 if (src1.is(rax)) { |
1852 j(positive, &safe_div, Label::kNear); | 2116 j(positive, &safe_div, Label::kNear); |
1853 movq(src1, kScratchRegister); | 2117 movq(src1, kScratchRegister); |
1854 jmp(on_not_smi_result, near_jump); | 2118 jmp(on_not_smi_result, near_jump); |
1855 } else { | 2119 } else { |
1856 j(negative, on_not_smi_result, near_jump); | 2120 j(negative, on_not_smi_result, near_jump); |
1857 } | 2121 } |
1858 bind(&safe_div); | 2122 bind(&safe_div); |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1932 testq(src1, src1); | 2196 testq(src1, src1); |
1933 j(negative, on_not_smi_result, near_jump); | 2197 j(negative, on_not_smi_result, near_jump); |
1934 bind(&smi_result); | 2198 bind(&smi_result); |
1935 Integer32ToSmi(dst, rdx); | 2199 Integer32ToSmi(dst, rdx); |
1936 } | 2200 } |
1937 | 2201 |
1938 | 2202 |
1939 void MacroAssembler::SmiNot(Register dst, Register src) { | 2203 void MacroAssembler::SmiNot(Register dst, Register src) { |
1940 ASSERT(!dst.is(kScratchRegister)); | 2204 ASSERT(!dst.is(kScratchRegister)); |
1941 ASSERT(!src.is(kScratchRegister)); | 2205 ASSERT(!src.is(kScratchRegister)); |
| 2206 #if !V8_USE_31_BITS_SMI_VALUE |
1942 // Set tag and padding bits before negating, so that they are zero afterwards. | 2207 // Set tag and padding bits before negating, so that they are zero afterwards. |
1943 movl(kScratchRegister, Immediate(~0)); | 2208 movl(kScratchRegister, Immediate(~0)); |
| 2209 #else |
| 2210 movl(kScratchRegister, Immediate(1)); |
| 2211 #endif |
1944 if (dst.is(src)) { | 2212 if (dst.is(src)) { |
1945 xor_(dst, kScratchRegister); | 2213 xor_(dst, kScratchRegister); |
1946 } else { | 2214 } else { |
1947 lea(dst, Operand(src, kScratchRegister, times_1, 0)); | 2215 lea(dst, Operand(src, kScratchRegister, times_1, 0)); |
1948 } | 2216 } |
1949 not_(dst); | 2217 not_(dst); |
1950 } | 2218 } |
1951 | 2219 |
1952 | 2220 |
1953 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) { | 2221 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) { |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2023 if (dst.is(src)) { | 2291 if (dst.is(src)) { |
2024 sar(dst, Immediate(shift_value + kSmiShift)); | 2292 sar(dst, Immediate(shift_value + kSmiShift)); |
2025 shl(dst, Immediate(kSmiShift)); | 2293 shl(dst, Immediate(kSmiShift)); |
2026 } else { | 2294 } else { |
2027 UNIMPLEMENTED(); // Not used. | 2295 UNIMPLEMENTED(); // Not used. |
2028 } | 2296 } |
2029 } | 2297 } |
2030 } | 2298 } |
2031 | 2299 |
2032 | 2300 |
| 2301 #if !V8_USE_31_BITS_SMI_VALUE |
2033 void MacroAssembler::SmiShiftLeftConstant(Register dst, | 2302 void MacroAssembler::SmiShiftLeftConstant(Register dst, |
2034 Register src, | 2303 Register src, |
2035 int shift_value) { | 2304 int shift_value) { |
2036 if (!dst.is(src)) { | 2305 if (!dst.is(src)) { |
2037 movq(dst, src); | 2306 movq(dst, src); |
2038 } | 2307 } |
2039 if (shift_value > 0) { | 2308 if (shift_value > 0) { |
2040 shl(dst, Immediate(shift_value)); | 2309 shl(dst, Immediate(shift_value)); |
2041 } | 2310 } |
2042 } | 2311 } |
| 2312 #else |
| 2313 void MacroAssembler::SmiShiftLeftConstant(Register dst, |
| 2314 Register src, |
| 2315 int shift_value, |
| 2316 Label* on_not_smi_result) { |
| 2317 if (!dst.is(src)) { |
| 2318 movq(dst, src); |
| 2319 } |
| 2320 |
| 2321 if (shift_value > 0) { |
| 2322 Label result_ok; |
| 2323 SmiToInteger32(dst, dst); |
| 2324 shll(dst, Immediate(shift_value)); |
| 2325 cmpl(dst, Immediate(0xc0000000)); |
| 2326 j(not_sign, &result_ok); |
| 2327 jmp(on_not_smi_result); |
| 2328 bind(&result_ok); |
| 2329 Integer32ToSmi(dst, dst); |
| 2330 } |
| 2331 } |
| 2332 #endif |
2043 | 2333 |
2044 | 2334 |
2045 void MacroAssembler::SmiShiftLogicalRightConstant( | 2335 void MacroAssembler::SmiShiftLogicalRightConstant( |
2046 Register dst, Register src, int shift_value, | 2336 Register dst, Register src, int shift_value, |
2047 Label* on_not_smi_result, Label::Distance near_jump) { | 2337 Label* on_not_smi_result, Label::Distance near_jump) { |
2048 // Logic right shift interprets its result as an *unsigned* number. | 2338 // Logic right shift interprets its result as an *unsigned* number. |
2049 if (dst.is(src)) { | 2339 if (dst.is(src)) { |
2050 UNIMPLEMENTED(); // Not used. | 2340 UNIMPLEMENTED(); // Not used. |
2051 } else { | 2341 } else { |
2052 movq(dst, src); | 2342 movq(dst, src); |
2053 if (shift_value == 0) { | 2343 if (shift_value == 0) { |
| 2344 #if !V8_USE_31_BITS_SMI_VALUE |
2054 testq(dst, dst); | 2345 testq(dst, dst); |
| 2346 #else |
| 2347 testl(dst, dst); |
| 2348 #endif |
2055 j(negative, on_not_smi_result, near_jump); | 2349 j(negative, on_not_smi_result, near_jump); |
2056 } | 2350 } |
| 2351 #if !V8_USE_31_BITS_SMI_VALUE |
2057 shr(dst, Immediate(shift_value + kSmiShift)); | 2352 shr(dst, Immediate(shift_value + kSmiShift)); |
2058 shl(dst, Immediate(kSmiShift)); | 2353 shl(dst, Immediate(kSmiShift)); |
| 2354 #else |
| 2355 SmiToInteger32(dst, dst); |
| 2356 shrl(dst, Immediate(shift_value)); |
| 2357 testl(dst, Immediate(0xc0000000)); |
| 2358 j(not_zero, on_not_smi_result, near_jump); |
| 2359 shll(dst, Immediate(kSmiShift)); |
| 2360 #endif |
2059 } | 2361 } |
2060 } | 2362 } |
2061 | 2363 |
2062 | 2364 |
| 2365 #if !V8_USE_31_BITS_SMI_VALUE |
2063 void MacroAssembler::SmiShiftLeft(Register dst, | 2366 void MacroAssembler::SmiShiftLeft(Register dst, |
2064 Register src1, | 2367 Register src1, |
2065 Register src2) { | 2368 Register src2) { |
2066 ASSERT(!dst.is(rcx)); | 2369 ASSERT(!dst.is(rcx)); |
2067 // Untag shift amount. | 2370 // Untag shift amount. |
2068 if (!dst.is(src1)) { | 2371 if (!dst.is(src1)) { |
2069 movq(dst, src1); | 2372 movq(dst, src1); |
2070 } | 2373 } |
2071 SmiToInteger32(rcx, src2); | 2374 SmiToInteger32(rcx, src2); |
2072 // Shift amount specified by lower 5 bits, not six as the shl opcode. | 2375 // Shift amount specified by lower 5 bits, not six as the shl opcode. |
2073 and_(rcx, Immediate(0x1f)); | 2376 and_(rcx, Immediate(0x1f)); |
2074 shl_cl(dst); | 2377 shl_cl(dst); |
2075 } | 2378 } |
| 2379 #else |
| 2380 void MacroAssembler::SmiShiftLeft(Register dst, |
| 2381 Register src1, |
| 2382 Register src2, |
| 2383 Label* on_not_smi_result) { |
| 2384 ASSERT(!dst.is(kScratchRegister)); |
| 2385 ASSERT(!src1.is(kScratchRegister)); |
| 2386 ASSERT(!src2.is(kScratchRegister)); |
| 2387 ASSERT(!dst.is(rcx)); |
| 2388 Label result_ok; |
| 2389 |
| 2390 if (src1.is(rcx) || src2.is(rcx)) { |
| 2391 movq(kScratchRegister, rcx); |
| 2392 } |
| 2393 // Untag shift amount. |
| 2394 if (!dst.is(src1)) { |
| 2395 movq(dst, src1); |
| 2396 } |
| 2397 SmiToInteger32(dst, dst); |
| 2398 SmiToInteger32(rcx, src2); |
| 2399 // Shift amount specified by lower 5 bits, not six as the shl opcode. |
| 2400 andl(rcx, Immediate(0x1f)); |
| 2401 shll_cl(dst); |
| 2402 cmpl(dst, Immediate(0xc0000000)); |
| 2403 j(not_sign, &result_ok); |
| 2404 if (src1.is(rcx) || src2.is(rcx)) { |
| 2405 if (src1.is(rcx)) { |
| 2406 movq(src1, kScratchRegister); |
| 2407 } else { |
| 2408 movq(src2, kScratchRegister); |
| 2409 } |
| 2410 } |
| 2411 jmp(on_not_smi_result); |
| 2412 bind(&result_ok); |
| 2413 Integer32ToSmi(dst, dst); |
| 2414 } |
| 2415 #endif |
2076 | 2416 |
2077 | 2417 |
2078 void MacroAssembler::SmiShiftLogicalRight(Register dst, | 2418 void MacroAssembler::SmiShiftLogicalRight(Register dst, |
2079 Register src1, | 2419 Register src1, |
2080 Register src2, | 2420 Register src2, |
2081 Label* on_not_smi_result, | 2421 Label* on_not_smi_result, |
2082 Label::Distance near_jump) { | 2422 Label::Distance near_jump) { |
| 2423 #if !V8_USE_31_BITS_SMI_VALUE |
2083 ASSERT(!dst.is(kScratchRegister)); | 2424 ASSERT(!dst.is(kScratchRegister)); |
2084 ASSERT(!src1.is(kScratchRegister)); | 2425 ASSERT(!src1.is(kScratchRegister)); |
2085 ASSERT(!src2.is(kScratchRegister)); | 2426 ASSERT(!src2.is(kScratchRegister)); |
2086 ASSERT(!dst.is(rcx)); | 2427 ASSERT(!dst.is(rcx)); |
2087 // dst and src1 can be the same, because the one case that bails out | 2428 // dst and src1 can be the same, because the one case that bails out |
2088 // is a shift by 0, which leaves dst, and therefore src1, unchanged. | 2429 // is a shift by 0, which leaves dst, and therefore src1, unchanged. |
2089 if (src1.is(rcx) || src2.is(rcx)) { | 2430 if (src1.is(rcx) || src2.is(rcx)) { |
2090 movq(kScratchRegister, rcx); | 2431 movq(kScratchRegister, rcx); |
2091 } | 2432 } |
2092 if (!dst.is(src1)) { | 2433 if (!dst.is(src1)) { |
(...skipping 11 matching lines...) Expand all Loading... |
2104 movq(src1, kScratchRegister); | 2445 movq(src1, kScratchRegister); |
2105 } else { | 2446 } else { |
2106 movq(src2, kScratchRegister); | 2447 movq(src2, kScratchRegister); |
2107 } | 2448 } |
2108 jmp(on_not_smi_result, near_jump); | 2449 jmp(on_not_smi_result, near_jump); |
2109 bind(&positive_result); | 2450 bind(&positive_result); |
2110 } else { | 2451 } else { |
2111 // src2 was zero and src1 negative. | 2452 // src2 was zero and src1 negative. |
2112 j(negative, on_not_smi_result, near_jump); | 2453 j(negative, on_not_smi_result, near_jump); |
2113 } | 2454 } |
| 2455 #else |
| 2456 ASSERT(!dst.is(kScratchRegister)); |
| 2457 ASSERT(!src1.is(kScratchRegister)); |
| 2458 ASSERT(!src2.is(kScratchRegister)); |
| 2459 ASSERT(!dst.is(rcx)); |
| 2460 Label result_ok; |
| 2461 |
| 2462 // dst and src1 can be the same, because the one case that bails out |
| 2463 // is a shift by 0, which leaves dst, and therefore src1, unchanged. |
| 2464 if (src1.is(rcx) || src2.is(rcx)) { |
| 2465 movq(kScratchRegister, rcx); |
| 2466 } |
| 2467 if (!dst.is(src1)) { |
| 2468 movq(dst, src1); |
| 2469 } |
| 2470 SmiToInteger32(rcx, src2); |
| 2471 SmiToInteger32(dst, dst); |
| 2472 shrl_cl(dst); |
| 2473 testl(dst, Immediate(0xc0000000)); |
| 2474 j(zero, &result_ok); |
| 2475 if (src1.is(rcx) || src2.is(rcx)) { |
| 2476 if (src1.is(rcx)) { |
| 2477 movq(src1, kScratchRegister); |
| 2478 } else { |
| 2479 movq(src2, kScratchRegister); |
| 2480 } |
| 2481 } |
| 2482 jmp(on_not_smi_result); |
| 2483 bind(&result_ok); |
| 2484 Integer32ToSmi(dst, dst); |
| 2485 #endif |
2114 } | 2486 } |
2115 | 2487 |
2116 | 2488 |
2117 void MacroAssembler::SmiShiftArithmeticRight(Register dst, | 2489 void MacroAssembler::SmiShiftArithmeticRight(Register dst, |
2118 Register src1, | 2490 Register src1, |
2119 Register src2) { | 2491 Register src2) { |
2120 ASSERT(!dst.is(kScratchRegister)); | 2492 ASSERT(!dst.is(kScratchRegister)); |
2121 ASSERT(!src1.is(kScratchRegister)); | 2493 ASSERT(!src1.is(kScratchRegister)); |
2122 ASSERT(!src2.is(kScratchRegister)); | 2494 ASSERT(!src2.is(kScratchRegister)); |
2123 ASSERT(!dst.is(rcx)); | 2495 ASSERT(!dst.is(rcx)); |
2124 if (src1.is(rcx)) { | 2496 if (src1.is(rcx)) { |
2125 movq(kScratchRegister, src1); | 2497 movq(kScratchRegister, src1); |
2126 } else if (src2.is(rcx)) { | 2498 } else if (src2.is(rcx)) { |
2127 movq(kScratchRegister, src2); | 2499 movq(kScratchRegister, src2); |
2128 } | 2500 } |
2129 if (!dst.is(src1)) { | 2501 if (!dst.is(src1)) { |
2130 movq(dst, src1); | 2502 movq(dst, src1); |
2131 } | 2503 } |
2132 SmiToInteger32(rcx, src2); | 2504 SmiToInteger32(rcx, src2); |
| 2505 #if !V8_USE_31_BITS_SMI_VALUE |
2133 orl(rcx, Immediate(kSmiShift)); | 2506 orl(rcx, Immediate(kSmiShift)); |
2134 sar_cl(dst); // Shift 32 + original rcx & 0x1f. | 2507 sar_cl(dst); // Shift 32 + original rcx & 0x1f. |
2135 shl(dst, Immediate(kSmiShift)); | 2508 #else |
| 2509 SmiToInteger32(dst, dst); |
| 2510 sarl_cl(dst); |
| 2511 #endif |
| 2512 Integer32ToSmi(dst, dst); |
2136 if (src1.is(rcx)) { | 2513 if (src1.is(rcx)) { |
2137 movq(src1, kScratchRegister); | 2514 movq(src1, kScratchRegister); |
2138 } else if (src2.is(rcx)) { | 2515 } else if (src2.is(rcx)) { |
2139 movq(src2, kScratchRegister); | 2516 movq(src2, kScratchRegister); |
2140 } | 2517 } |
2141 } | 2518 } |
2142 | 2519 |
2143 | 2520 |
2144 void MacroAssembler::SelectNonSmi(Register dst, | 2521 void MacroAssembler::SelectNonSmi(Register dst, |
2145 Register src1, | 2522 Register src1, |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2182 | 2559 |
2183 SmiIndex MacroAssembler::SmiToIndex(Register dst, | 2560 SmiIndex MacroAssembler::SmiToIndex(Register dst, |
2184 Register src, | 2561 Register src, |
2185 int shift) { | 2562 int shift) { |
2186 ASSERT(is_uint6(shift)); | 2563 ASSERT(is_uint6(shift)); |
2187 // There is a possible optimization if shift is in the range 60-63, but that | 2564 // There is a possible optimization if shift is in the range 60-63, but that |
2188 // will (and must) never happen. | 2565 // will (and must) never happen. |
2189 if (!dst.is(src)) { | 2566 if (!dst.is(src)) { |
2190 movq(dst, src); | 2567 movq(dst, src); |
2191 } | 2568 } |
| 2569 #if !V8_USE_31_BITS_SMI_VALUE |
2192 if (shift < kSmiShift) { | 2570 if (shift < kSmiShift) { |
2193 sar(dst, Immediate(kSmiShift - shift)); | 2571 sar(dst, Immediate(kSmiShift - shift)); |
2194 } else { | 2572 } else { |
2195 shl(dst, Immediate(shift - kSmiShift)); | 2573 shl(dst, Immediate(shift - kSmiShift)); |
2196 } | 2574 } |
2197 return SmiIndex(dst, times_1); | 2575 return SmiIndex(dst, times_1); |
| 2576 #else |
| 2577 if (shift == times_1) { |
| 2578 sar(dst, Immediate(kSmiShift)); |
| 2579 return SmiIndex(dst, times_1); |
| 2580 } |
| 2581 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1)); |
| 2582 #endif |
2198 } | 2583 } |
2199 | 2584 |
| 2585 |
2200 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst, | 2586 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst, |
2201 Register src, | 2587 Register src, |
2202 int shift) { | 2588 int shift) { |
2203 // Register src holds a positive smi. | 2589 // Register src holds a positive smi. |
2204 ASSERT(is_uint6(shift)); | 2590 ASSERT(is_uint6(shift)); |
2205 if (!dst.is(src)) { | 2591 if (!dst.is(src)) { |
2206 movq(dst, src); | 2592 movq(dst, src); |
2207 } | 2593 } |
2208 neg(dst); | 2594 neg(dst); |
| 2595 #if !V8_USE_31_BITS_SMI_VALUE |
2209 if (shift < kSmiShift) { | 2596 if (shift < kSmiShift) { |
2210 sar(dst, Immediate(kSmiShift - shift)); | 2597 sar(dst, Immediate(kSmiShift - shift)); |
2211 } else { | 2598 } else { |
2212 shl(dst, Immediate(shift - kSmiShift)); | 2599 shl(dst, Immediate(shift - kSmiShift)); |
2213 } | 2600 } |
2214 return SmiIndex(dst, times_1); | 2601 return SmiIndex(dst, times_1); |
| 2602 #else |
| 2603 if (shift == times_1) { |
| 2604 sar(dst, Immediate(kSmiShift)); |
| 2605 return SmiIndex(dst, times_1); |
| 2606 } |
| 2607 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1)); |
| 2608 #endif |
2215 } | 2609 } |
2216 | 2610 |
2217 | 2611 |
2218 void MacroAssembler::AddSmiField(Register dst, const Operand& src) { | 2612 void MacroAssembler::AddSmiField(Register dst, const Operand& src) { |
| 2613 #if !V8_USE_31_BITS_SMI_VALUE |
2219 ASSERT_EQ(0, kSmiShift % kBitsPerByte); | 2614 ASSERT_EQ(0, kSmiShift % kBitsPerByte); |
2220 addl(dst, Operand(src, kSmiShift / kBitsPerByte)); | 2615 addl(dst, Operand(src, kSmiShift / kBitsPerByte)); |
| 2616 #else |
| 2617 SmiToInteger32(kScratchRegister, src); |
| 2618 addl(dst, kScratchRegister); |
| 2619 #endif |
2221 } | 2620 } |
2222 | 2621 |
2223 | 2622 |
2224 void MacroAssembler::JumpIfNotString(Register object, | 2623 void MacroAssembler::JumpIfNotString(Register object, |
2225 Register object_map, | 2624 Register object_map, |
2226 Label* not_string, | 2625 Label* not_string, |
2227 Label::Distance near_jump) { | 2626 Label::Distance near_jump) { |
2228 Condition is_smi = CheckSmi(object); | 2627 Condition is_smi = CheckSmi(object); |
2229 j(is_smi, not_string, near_jump); | 2628 j(is_smi, not_string, near_jump); |
2230 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map); | 2629 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map); |
(...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2472 | 2871 |
2473 | 2872 |
2474 void MacroAssembler::Drop(int stack_elements) { | 2873 void MacroAssembler::Drop(int stack_elements) { |
2475 if (stack_elements > 0) { | 2874 if (stack_elements > 0) { |
2476 addq(rsp, Immediate(stack_elements * kPointerSize)); | 2875 addq(rsp, Immediate(stack_elements * kPointerSize)); |
2477 } | 2876 } |
2478 } | 2877 } |
2479 | 2878 |
2480 | 2879 |
2481 void MacroAssembler::Test(const Operand& src, Smi* source) { | 2880 void MacroAssembler::Test(const Operand& src, Smi* source) { |
| 2881 #if !V8_USE_31_BITS_SMI_VALUE |
2482 testl(Operand(src, kIntSize), Immediate(source->value())); | 2882 testl(Operand(src, kIntSize), Immediate(source->value())); |
| 2883 #else |
| 2884 testl(src, Immediate(source)); |
| 2885 #endif |
2483 } | 2886 } |
2484 | 2887 |
2485 | 2888 |
2486 void MacroAssembler::TestBit(const Operand& src, int bits) { | 2889 void MacroAssembler::TestBit(const Operand& src, int bits) { |
| 2890 #if V8_USE_31_BITS_SMI_VALUE |
| 2891 bits += kSmiTagSize + kSmiShiftSize; |
| 2892 #endif |
2487 int byte_offset = bits / kBitsPerByte; | 2893 int byte_offset = bits / kBitsPerByte; |
2488 int bit_in_byte = bits & (kBitsPerByte - 1); | 2894 int bit_in_byte = bits & (kBitsPerByte - 1); |
2489 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte)); | 2895 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte)); |
2490 } | 2896 } |
2491 | 2897 |
2492 | 2898 |
2493 void MacroAssembler::Jump(ExternalReference ext) { | 2899 void MacroAssembler::Jump(ExternalReference ext) { |
2494 LoadAddress(kScratchRegister, ext); | 2900 LoadAddress(kScratchRegister, ext); |
2495 jmp(kScratchRegister); | 2901 jmp(kScratchRegister); |
2496 } | 2902 } |
(...skipping 2197 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4694 j(greater, &no_memento_available); | 5100 j(greater, &no_memento_available); |
4695 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), | 5101 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), |
4696 Heap::kAllocationMementoMapRootIndex); | 5102 Heap::kAllocationMementoMapRootIndex); |
4697 bind(&no_memento_available); | 5103 bind(&no_memento_available); |
4698 } | 5104 } |
4699 | 5105 |
4700 | 5106 |
4701 } } // namespace v8::internal | 5107 } } // namespace v8::internal |
4702 | 5108 |
4703 #endif // V8_TARGET_ARCH_X64 | 5109 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |