Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 21014003: Optionally use 31-bits SMI value for 64-bit system (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Use (kSmiValueSize == 31) or (kSmiValueSize == 32) for SMI functions in macro assembler" Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 965 matching lines...) Expand 10 before | Expand all | Expand 10 after
976 void MacroAssembler::Set(const Operand& dst, int64_t x) { 976 void MacroAssembler::Set(const Operand& dst, int64_t x) {
977 if (is_int32(x)) { 977 if (is_int32(x)) {
978 movq(dst, Immediate(static_cast<int32_t>(x))); 978 movq(dst, Immediate(static_cast<int32_t>(x)));
979 } else { 979 } else {
980 Set(kScratchRegister, x); 980 Set(kScratchRegister, x);
981 movq(dst, kScratchRegister); 981 movq(dst, kScratchRegister);
982 } 982 }
983 } 983 }
984 984
985 985
986 // ----------------------------------------------------------------------------
987 // Smi tagging, untagging and tag detection.
988
989
990 static inline Immediate SmiToImmediate(Smi* src) {
991 if (kSmiValueSize == 32) {
992 UNREACHABLE();
993 } else {
994 return Immediate(static_cast<int32_t>(reinterpret_cast<intptr_t>(src)));
995 }
996 }
997
998
986 bool MacroAssembler::IsUnsafeInt(const int x) { 999 bool MacroAssembler::IsUnsafeInt(const int x) {
987 static const int kMaxBits = 17; 1000 static const int kMaxBits = 17;
988 return !is_intn(x, kMaxBits); 1001 return !is_intn(x, kMaxBits);
989 } 1002 }
990 1003
991 1004
992 void MacroAssembler::SafeMove(Register dst, Smi* src) { 1005 void MacroAssembler::SafeMove(Register dst, Smi* src) {
993 ASSERT(!dst.is(kScratchRegister)); 1006 ASSERT(!dst.is(kScratchRegister));
994 ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi. 1007 if (kSmiValueSize == 32) {
995 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) { 1008 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
996 Move(dst, Smi::FromInt(src->value() ^ jit_cookie())); 1009 Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
997 Move(kScratchRegister, Smi::FromInt(jit_cookie())); 1010 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
998 xor_(dst, kScratchRegister); 1011 xor_(dst, kScratchRegister);
1012 } else {
1013 Move(dst, src);
1014 }
999 } else { 1015 } else {
1000 Move(dst, src); 1016 ASSERT(kSmiValueSize == 31);
1017 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1018 movq(dst, Immediate(static_cast<int>(reinterpret_cast<intptr_t>(src)) ^
1019 jit_cookie()));
1020 movq(kScratchRegister, Immediate(jit_cookie()));
1021 xor_(dst, kScratchRegister);
1022 } else {
1023 Move(dst, src);
1024 }
1001 } 1025 }
1002 } 1026 }
1003 1027
1004 1028
1005 void MacroAssembler::SafePush(Smi* src) { 1029 void MacroAssembler::SafePush(Smi* src) {
1006 ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi. 1030 if (kSmiValueSize == 32) {
1007 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) { 1031 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1008 Push(Smi::FromInt(src->value() ^ jit_cookie())); 1032 Push(Smi::FromInt(src->value() ^ jit_cookie()));
1009 Move(kScratchRegister, Smi::FromInt(jit_cookie())); 1033 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
1010 xor_(Operand(rsp, 0), kScratchRegister); 1034 xor_(Operand(rsp, 0), kScratchRegister);
1035 } else {
1036 Push(src);
1037 }
1011 } else { 1038 } else {
1012 Push(src); 1039 ASSERT(kSmiValueSize == 31);
1040 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1041 push(Immediate(static_cast<int>(reinterpret_cast<intptr_t>(src)) ^
1042 jit_cookie()));
1043 movq(kScratchRegister, Immediate(jit_cookie()));
1044 xor_(Operand(rsp, 0), kScratchRegister);
1045 } else {
1046 Push(src);
1047 }
1013 } 1048 }
1014 } 1049 }
1015 1050
1016 1051
1017 // ----------------------------------------------------------------------------
1018 // Smi tagging, untagging and tag detection.
1019
1020 Register MacroAssembler::GetSmiConstant(Smi* source) { 1052 Register MacroAssembler::GetSmiConstant(Smi* source) {
1021 int value = source->value(); 1053 int value = source->value();
1022 if (value == 0) { 1054 if (value == 0) {
1023 xorl(kScratchRegister, kScratchRegister); 1055 xorl(kScratchRegister, kScratchRegister);
1024 return kScratchRegister; 1056 return kScratchRegister;
1025 } 1057 }
1026 if (value == 1) { 1058 if (value == 1) {
1027 return kSmiConstantRegister; 1059 return kSmiConstantRegister;
1028 } 1060 }
1029 LoadSmiConstant(kScratchRegister, source); 1061 LoadSmiConstant(kScratchRegister, source);
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
1089 neg(dst); 1121 neg(dst);
1090 } 1122 }
1091 } 1123 }
1092 1124
1093 1125
1094 void MacroAssembler::Integer32ToSmi(Register dst, Register src) { 1126 void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
1095 STATIC_ASSERT(kSmiTag == 0); 1127 STATIC_ASSERT(kSmiTag == 0);
1096 if (!dst.is(src)) { 1128 if (!dst.is(src)) {
1097 movl(dst, src); 1129 movl(dst, src);
1098 } 1130 }
1099 shl(dst, Immediate(kSmiShift)); 1131 if (kSmiValueSize == 32) {
1132 shl(dst, Immediate(kSmiShift));
1133 } else {
1134 ASSERT(kSmiValueSize == 31);
1135 shll(dst, Immediate(kSmiShift));
1136 movsxlq(dst, dst);
1137 }
1100 } 1138 }
1101 1139
1102 1140
1103 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { 1141 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
1104 if (emit_debug_code()) { 1142 if (emit_debug_code()) {
1105 testb(dst, Immediate(0x01)); 1143 testb(dst, Immediate(0x01));
1106 Label ok; 1144 Label ok;
1107 j(zero, &ok, Label::kNear); 1145 j(zero, &ok, Label::kNear);
1108 if (allow_stub_calls()) { 1146 if (allow_stub_calls()) {
1109 Abort("Integer32ToSmiField writing to non-smi location"); 1147 Abort("Integer32ToSmiField writing to non-smi location");
1110 } else { 1148 } else {
1111 int3(); 1149 int3();
1112 } 1150 }
1113 bind(&ok); 1151 bind(&ok);
1114 } 1152 }
1115 ASSERT(kSmiShift % kBitsPerByte == 0); 1153 if (kSmiValueSize == 32) {
1116 movl(Operand(dst, kSmiShift / kBitsPerByte), src); 1154 ASSERT(kSmiShift % kBitsPerByte == 0);
1155 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
1156 } else {
1157 ASSERT(kSmiValueSize == 31);
1158 Integer32ToSmi(kScratchRegister, src);
1159 movq(dst, kScratchRegister);
1160 }
1117 } 1161 }
1118 1162
1119 1163
1120 void MacroAssembler::Integer64PlusConstantToSmi(Register dst, 1164 void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
1121 Register src, 1165 Register src,
1122 int constant) { 1166 int constant) {
1123 if (dst.is(src)) { 1167 if (dst.is(src)) {
1124 addl(dst, Immediate(constant)); 1168 addl(dst, Immediate(constant));
1125 } else { 1169 } else {
1126 leal(dst, Operand(src, constant)); 1170 leal(dst, Operand(src, constant));
1127 } 1171 }
1128 shl(dst, Immediate(kSmiShift)); 1172 Integer32ToSmi(dst, dst);
1129 } 1173 }
1130 1174
1131 1175
1132 void MacroAssembler::SmiToInteger32(Register dst, Register src) { 1176 void MacroAssembler::SmiToInteger32(Register dst, Register src) {
1133 STATIC_ASSERT(kSmiTag == 0); 1177 STATIC_ASSERT(kSmiTag == 0);
1134 if (!dst.is(src)) { 1178 if (!dst.is(src)) {
1135 movq(dst, src); 1179 movq(dst, src);
1136 } 1180 }
1137 shr(dst, Immediate(kSmiShift)); 1181 if (kSmiValueSize == 32) {
1182 shr(dst, Immediate(kSmiShift));
1183 } else {
1184 ASSERT(kSmiValueSize == 31);
1185 sarl(dst, Immediate(kSmiShift));
1186 }
1138 } 1187 }
1139 1188
1140 1189
1141 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) { 1190 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
1142 movl(dst, Operand(src, kSmiShift / kBitsPerByte)); 1191 if (kSmiValueSize == 32) {
1192 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1193 } else {
1194 ASSERT(kSmiValueSize == 31);
1195 movl(dst, src);
1196 sarl(dst, Immediate(kSmiShift));
1197 }
1143 } 1198 }
1144 1199
1145 1200
1146 void MacroAssembler::SmiToInteger64(Register dst, Register src) { 1201 void MacroAssembler::SmiToInteger64(Register dst, Register src) {
1147 STATIC_ASSERT(kSmiTag == 0); 1202 STATIC_ASSERT(kSmiTag == 0);
1148 if (!dst.is(src)) { 1203 if (!dst.is(src)) {
1149 movq(dst, src); 1204 movq(dst, src);
1150 } 1205 }
1151 sar(dst, Immediate(kSmiShift)); 1206 sar(dst, Immediate(kSmiShift));
1152 } 1207 }
1153 1208
1154 1209
1155 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) { 1210 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
1156 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte)); 1211 if (kSmiValueSize == 32) {
1212 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
1213 } else {
1214 ASSERT(kSmiValueSize == 31);
1215 movq(dst, src);
1216 SmiToInteger64(dst, dst);
1217 }
1157 } 1218 }
1158 1219
1159 1220
1160 void MacroAssembler::SmiTest(Register src) { 1221 void MacroAssembler::SmiTest(Register src) {
1161 AssertSmi(src); 1222 AssertSmi(src);
1162 testq(src, src); 1223 if (kSmiValueSize == 32) {
1224 testq(src, src);
1225 } else {
1226 ASSERT(kSmiValueSize == 31);
1227 testl(src, src);
1228 }
1163 } 1229 }
1164 1230
1165 1231
1166 void MacroAssembler::SmiCompare(Register smi1, Register smi2) { 1232 void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
1167 AssertSmi(smi1); 1233 AssertSmi(smi1);
1168 AssertSmi(smi2); 1234 AssertSmi(smi2);
1169 cmpq(smi1, smi2); 1235 if (kSmiValueSize == 32) {
1236 cmpq(smi1, smi2);
1237 } else {
1238 ASSERT(kSmiValueSize == 31);
1239 cmpl(smi1, smi2);
1240 }
1170 } 1241 }
1171 1242
1172 1243
1173 void MacroAssembler::SmiCompare(Register dst, Smi* src) { 1244 void MacroAssembler::SmiCompare(Register dst, Smi* src) {
1174 AssertSmi(dst); 1245 AssertSmi(dst);
1175 Cmp(dst, src); 1246 Cmp(dst, src);
1176 } 1247 }
1177 1248
1178 1249
1179 void MacroAssembler::Cmp(Register dst, Smi* src) { 1250 void MacroAssembler::Cmp(Register dst, Smi* src) {
1180 ASSERT(!dst.is(kScratchRegister)); 1251 ASSERT(!dst.is(kScratchRegister));
1181 if (src->value() == 0) { 1252 if (src->value() == 0) {
1182 testq(dst, dst); 1253 if (kSmiValueSize == 32) {
1254 testq(dst, dst);
1255 } else {
1256 ASSERT(kSmiValueSize == 31);
1257 testl(dst, dst);
1258 }
1183 } else { 1259 } else {
1184 Register constant_reg = GetSmiConstant(src); 1260 Register constant_reg = GetSmiConstant(src);
1185 cmpq(dst, constant_reg); 1261 if (kSmiValueSize == 32) {
1262 cmpq(dst, constant_reg);
1263 } else {
1264 ASSERT(kSmiValueSize == 31);
1265 cmpl(dst, constant_reg);
1266 }
1186 } 1267 }
1187 } 1268 }
1188 1269
1189 1270
1190 void MacroAssembler::SmiCompare(Register dst, const Operand& src) { 1271 void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
1191 AssertSmi(dst); 1272 AssertSmi(dst);
1192 AssertSmi(src); 1273 AssertSmi(src);
1193 cmpq(dst, src); 1274 if (kSmiValueSize == 32) {
1275 cmpq(dst, src);
1276 } else {
1277 ASSERT(kSmiValueSize == 31);
1278 cmpl(dst, src);
1279 }
1194 } 1280 }
1195 1281
1196 1282
1197 void MacroAssembler::SmiCompare(const Operand& dst, Register src) { 1283 void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
1198 AssertSmi(dst); 1284 AssertSmi(dst);
1199 AssertSmi(src); 1285 AssertSmi(src);
1200 cmpq(dst, src); 1286 if (kSmiValueSize == 32) {
1287 cmpq(dst, src);
1288 } else {
1289 ASSERT(kSmiValueSize == 31);
1290 cmpl(dst, src);
1291 }
1201 } 1292 }
1202 1293
1203 1294
1204 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) { 1295 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
1205 AssertSmi(dst); 1296 AssertSmi(dst);
1206 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value())); 1297 if (kSmiValueSize == 32) {
1298 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1299 } else {
1300 ASSERT(kSmiValueSize == 31);
1301 cmpl(dst, SmiToImmediate(src));
1302 }
1207 } 1303 }
1208 1304
1209 1305
1210 void MacroAssembler::Cmp(const Operand& dst, Smi* src) { 1306 void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1211 // The Operand cannot use the smi register. 1307 if (kSmiValueSize == 32) {
1212 Register smi_reg = GetSmiConstant(src); 1308 // The Operand cannot use the smi register.
1213 ASSERT(!dst.AddressUsesRegister(smi_reg)); 1309 Register smi_reg = GetSmiConstant(src);
1214 cmpq(dst, smi_reg); 1310 ASSERT(!dst.AddressUsesRegister(smi_reg));
1311 cmpq(dst, smi_reg);
1312 } else {
1313 ASSERT(kSmiValueSize == 31);
1314 cmpl(dst, SmiToImmediate(src));
1315 }
1215 } 1316 }
1216 1317
1217 1318
1218 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) { 1319 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
1219 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src); 1320 if (kSmiValueSize == 32) {
1321 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1322 } else {
1323 ASSERT(kSmiValueSize == 31);
1324 SmiToInteger32(kScratchRegister, dst);
1325 cmpl(kScratchRegister, src);
1326 }
1220 } 1327 }
1221 1328
1222 1329
1223 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst, 1330 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1224 Register src, 1331 Register src,
1225 int power) { 1332 int power) {
1226 ASSERT(power >= 0); 1333 ASSERT(power >= 0);
1227 ASSERT(power < 64); 1334 ASSERT(power < 64);
1228 if (power == 0) { 1335 if (power == 0) {
1229 SmiToInteger64(dst, src); 1336 SmiToInteger64(dst, src);
1230 return; 1337 return;
1231 } 1338 }
1232 if (!dst.is(src)) { 1339 if (!dst.is(src)) {
1233 movq(dst, src); 1340 movq(dst, src);
1234 } 1341 }
1235 if (power < kSmiShift) { 1342 if (power < kSmiShift) {
1236 sar(dst, Immediate(kSmiShift - power)); 1343 sar(dst, Immediate(kSmiShift - power));
1237 } else if (power > kSmiShift) { 1344 } else if (power > kSmiShift) {
1238 shl(dst, Immediate(power - kSmiShift)); 1345 shl(dst, Immediate(power - kSmiShift));
1239 } 1346 }
1240 } 1347 }
1241 1348
1242 1349
1243 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst, 1350 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1244 Register src, 1351 Register src,
1245 int power) { 1352 int power) {
1246 ASSERT((0 <= power) && (power < 32)); 1353 ASSERT((0 <= power) && (power < 32));
1247 if (dst.is(src)) { 1354 if (dst.is(src)) {
1248 shr(dst, Immediate(power + kSmiShift)); 1355 if (kSmiValueSize == 32) {
1356 shr(dst, Immediate(power + kSmiShift));
1357 } else {
1358 ASSERT(kSmiValueSize == 31);
1359 shrl(dst, Immediate(power + kSmiShift));
1360 }
1249 } else { 1361 } else {
1250 UNIMPLEMENTED(); // Not used. 1362 UNIMPLEMENTED(); // Not used.
1251 } 1363 }
1252 } 1364 }
1253 1365
1254 1366
1255 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2, 1367 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1256 Label* on_not_smis, 1368 Label* on_not_smis,
1257 Label::Distance near_jump) { 1369 Label::Distance near_jump) {
1258 if (dst.is(src1) || dst.is(src2)) { 1370 if (dst.is(src1) || dst.is(src2)) {
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1292 testb(kScratchRegister, Immediate(3)); 1404 testb(kScratchRegister, Immediate(3));
1293 return zero; 1405 return zero;
1294 } 1406 }
1295 1407
1296 1408
1297 Condition MacroAssembler::CheckBothSmi(Register first, Register second) { 1409 Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1298 if (first.is(second)) { 1410 if (first.is(second)) {
1299 return CheckSmi(first); 1411 return CheckSmi(first);
1300 } 1412 }
1301 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3); 1413 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
1302 leal(kScratchRegister, Operand(first, second, times_1, 0)); 1414 if (kSmiValueSize == 32) {
1303 testb(kScratchRegister, Immediate(0x03)); 1415 leal(kScratchRegister, Operand(first, second, times_1, 0));
1416 testb(kScratchRegister, Immediate(0x03));
1417 } else {
1418 ASSERT(kSmiValueSize == 31);
1419 movl(kScratchRegister, first);
1420 orl(kScratchRegister, second);
1421 testb(kScratchRegister, Immediate(kSmiTagMask));
1422 }
1304 return zero; 1423 return zero;
1305 } 1424 }
1306 1425
1307 1426
1308 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first, 1427 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1309 Register second) { 1428 Register second) {
1310 if (first.is(second)) { 1429 if (first.is(second)) {
1311 return CheckNonNegativeSmi(first); 1430 return CheckNonNegativeSmi(first);
1312 } 1431 }
1313 movq(kScratchRegister, first); 1432 movq(kScratchRegister, first);
(...skipping 19 matching lines...) Expand all
1333 andl(scratch, second); 1452 andl(scratch, second);
1334 } 1453 }
1335 testb(scratch, Immediate(kSmiTagMask)); 1454 testb(scratch, Immediate(kSmiTagMask));
1336 return zero; 1455 return zero;
1337 } 1456 }
1338 1457
1339 1458
1340 Condition MacroAssembler::CheckIsMinSmi(Register src) { 1459 Condition MacroAssembler::CheckIsMinSmi(Register src) {
1341 ASSERT(!src.is(kScratchRegister)); 1460 ASSERT(!src.is(kScratchRegister));
1342 // If we overflow by subtracting one, it's the minimal smi value. 1461 // If we overflow by subtracting one, it's the minimal smi value.
1343 cmpq(src, kSmiConstantRegister); 1462 if (kSmiValueSize == 32) {
1463 cmpq(src, kSmiConstantRegister);
1464 } else {
1465 ASSERT(kSmiValueSize == 31);
1466 cmpl(src, kSmiConstantRegister);
1467 }
1344 return overflow; 1468 return overflow;
1345 } 1469 }
1346 1470
1347 1471
1348 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) { 1472 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
1349 // A 32-bit integer value can always be converted to a smi. 1473 if (kSmiValueSize == 32) {
1350 return always; 1474 // A 32-bit integer value can always be converted to a smi.
1475 return always;
1476 } else {
1477 ASSERT(kSmiValueSize == 31);
1478 cmpl(src, Immediate(0xc0000000));
1479 return positive;
1480 }
1351 } 1481 }
1352 1482
1353 1483
1354 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) { 1484 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1355 // An unsigned 32-bit integer value is valid as long as the high bit 1485 if (kSmiValueSize == 32) {
1356 // is not set. 1486 // An unsigned 32-bit integer value is valid as long as the high bit
1357 testl(src, src); 1487 // is not set.
1358 return positive; 1488 testl(src, src);
1489 return positive;
1490 } else {
1491 ASSERT(kSmiValueSize == 31);
1492 testl(src, Immediate(0xc0000000));
1493 return zero;
1494 }
1359 } 1495 }
1360 1496
1361 1497
1362 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) { 1498 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1363 if (dst.is(src)) { 1499 if (dst.is(src)) {
1364 andl(dst, Immediate(kSmiTagMask)); 1500 andl(dst, Immediate(kSmiTagMask));
1365 } else { 1501 } else {
1366 movl(dst, Immediate(kSmiTagMask)); 1502 movl(dst, Immediate(kSmiTagMask));
1367 andl(dst, src); 1503 andl(dst, src);
1368 } 1504 }
(...skipping 12 matching lines...) Expand all
1381 1517
1382 1518
1383 void MacroAssembler::JumpIfNotValidSmiValue(Register src, 1519 void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1384 Label* on_invalid, 1520 Label* on_invalid,
1385 Label::Distance near_jump) { 1521 Label::Distance near_jump) {
1386 Condition is_valid = CheckInteger32ValidSmiValue(src); 1522 Condition is_valid = CheckInteger32ValidSmiValue(src);
1387 j(NegateCondition(is_valid), on_invalid, near_jump); 1523 j(NegateCondition(is_valid), on_invalid, near_jump);
1388 } 1524 }
1389 1525
1390 1526
1527 void MacroAssembler::JumpIfValidSmiValue(Register src,
1528 Label* on_valid,
1529 Label::Distance near_jump) {
1530 Condition is_valid = CheckInteger32ValidSmiValue(src);
1531 j(is_valid, on_valid, near_jump);
1532 }
1533
1534
1391 void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src, 1535 void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1392 Label* on_invalid, 1536 Label* on_invalid,
1393 Label::Distance near_jump) { 1537 Label::Distance near_jump) {
1394 Condition is_valid = CheckUInteger32ValidSmiValue(src); 1538 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1395 j(NegateCondition(is_valid), on_invalid, near_jump); 1539 j(NegateCondition(is_valid), on_invalid, near_jump);
1396 } 1540 }
1397 1541
1398 1542
1543 void MacroAssembler::JumpIfUIntValidSmiValue(Register src,
1544 Label* on_valid,
1545 Label::Distance near_jump) {
1546 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1547 j(is_valid, on_valid, near_jump);
1548 }
1549
1550
1399 void MacroAssembler::JumpIfSmi(Register src, 1551 void MacroAssembler::JumpIfSmi(Register src,
1400 Label* on_smi, 1552 Label* on_smi,
1401 Label::Distance near_jump) { 1553 Label::Distance near_jump) {
1402 Condition smi = CheckSmi(src); 1554 Condition smi = CheckSmi(src);
1403 j(smi, on_smi, near_jump); 1555 j(smi, on_smi, near_jump);
1404 } 1556 }
1405 1557
1406 1558
1407 void MacroAssembler::JumpIfNotSmi(Register src, 1559 void MacroAssembler::JumpIfNotSmi(Register src,
1408 Label* on_not_smi, 1560 Label* on_not_smi,
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
1454 Label::Distance near_jump) { 1606 Label::Distance near_jump) {
1455 // Does not assume that src is a smi. 1607 // Does not assume that src is a smi.
1456 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask)); 1608 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
1457 STATIC_ASSERT(kSmiTag == 0); 1609 STATIC_ASSERT(kSmiTag == 0);
1458 ASSERT(!dst.is(kScratchRegister)); 1610 ASSERT(!dst.is(kScratchRegister));
1459 ASSERT(!src.is(kScratchRegister)); 1611 ASSERT(!src.is(kScratchRegister));
1460 1612
1461 JumpIfNotSmi(src, on_not_smi_result, near_jump); 1613 JumpIfNotSmi(src, on_not_smi_result, near_jump);
1462 Register tmp = (dst.is(src) ? kScratchRegister : dst); 1614 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1463 LoadSmiConstant(tmp, constant); 1615 LoadSmiConstant(tmp, constant);
1464 addq(tmp, src); 1616 if (kSmiValueSize == 32) {
1617 addq(tmp, src);
1618 } else {
1619 ASSERT(kSmiValueSize == 31);
1620 addl(tmp, src);
1621 }
1465 j(overflow, on_not_smi_result, near_jump); 1622 j(overflow, on_not_smi_result, near_jump);
1466 if (dst.is(src)) { 1623 if (dst.is(src)) {
1467 movq(dst, tmp); 1624 movq(dst, tmp);
1468 } 1625 }
1626 if (kSmiValueSize == 31) {
1627 movsxlq(dst, dst);
1628 }
1469 } 1629 }
1470 1630
1471 1631
1472 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { 1632 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1473 if (constant->value() == 0) { 1633 if (constant->value() == 0) {
1474 if (!dst.is(src)) { 1634 if (!dst.is(src)) {
1475 movq(dst, src); 1635 movq(dst, src);
1476 } 1636 }
1477 return; 1637 return;
1478 } else if (dst.is(src)) { 1638 } else if (dst.is(src)) {
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1513 LoadSmiConstant(dst, constant); 1673 LoadSmiConstant(dst, constant);
1514 addq(dst, src); 1674 addq(dst, src);
1515 return; 1675 return;
1516 } 1676 }
1517 } 1677 }
1518 } 1678 }
1519 1679
1520 1680
1521 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) { 1681 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1522 if (constant->value() != 0) { 1682 if (constant->value() != 0) {
1523 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value())); 1683 if (kSmiValueSize == 32) {
1684 addl(Operand(dst, kSmiShift / kBitsPerByte),
1685 Immediate(constant->value()));
1686 } else {
1687 ASSERT(kSmiValueSize == 31);
1688 addq(dst, SmiToImmediate(constant));
1689 }
1524 } 1690 }
1525 } 1691 }
1526 1692
1527 1693
1528 void MacroAssembler::SmiAddConstant(Register dst, 1694 void MacroAssembler::SmiAddConstant(Register dst,
1529 Register src, 1695 Register src,
1530 Smi* constant, 1696 Smi* constant,
1531 Label* on_not_smi_result, 1697 Label* on_not_smi_result,
1532 Label::Distance near_jump) { 1698 Label::Distance near_jump) {
1533 if (constant->value() == 0) { 1699 if (constant->value() == 0) {
1534 if (!dst.is(src)) { 1700 if (!dst.is(src)) {
1535 movq(dst, src); 1701 movq(dst, src);
1536 } 1702 }
1537 } else if (dst.is(src)) { 1703 } else if (dst.is(src)) {
1538 ASSERT(!dst.is(kScratchRegister)); 1704 ASSERT(!dst.is(kScratchRegister));
1539 1705
1540 LoadSmiConstant(kScratchRegister, constant); 1706 LoadSmiConstant(kScratchRegister, constant);
1541 addq(kScratchRegister, src); 1707 if (kSmiValueSize == 32) {
1708 addq(kScratchRegister, src);
1709 } else {
1710 ASSERT(kSmiValueSize == 31);
1711 addl(kScratchRegister, src);
1712 }
1542 j(overflow, on_not_smi_result, near_jump); 1713 j(overflow, on_not_smi_result, near_jump);
1543 movq(dst, kScratchRegister); 1714 if (kSmiValueSize == 32) {
1715 movq(dst, kScratchRegister);
1716 } else {
1717 ASSERT(kSmiValueSize == 31);
1718 movsxlq(dst, kScratchRegister);
1719 }
1544 } else { 1720 } else {
1545 LoadSmiConstant(dst, constant); 1721 LoadSmiConstant(dst, constant);
1546 addq(dst, src); 1722 if (kSmiValueSize == 32) {
1723 addq(dst, src);
1724 } else {
1725 ASSERT(kSmiValueSize == 31);
1726 addl(dst, src);
1727 }
1547 j(overflow, on_not_smi_result, near_jump); 1728 j(overflow, on_not_smi_result, near_jump);
1729 if (kSmiValueSize == 31) {
1730 movsxlq(dst, dst);
1731 }
1548 } 1732 }
1549 } 1733 }
1550 1734
1551 1735
1552 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) { 1736 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1553 if (constant->value() == 0) { 1737 if (constant->value() == 0) {
1554 if (!dst.is(src)) { 1738 if (!dst.is(src)) {
1555 movq(dst, src); 1739 movq(dst, src);
1556 } 1740 }
1557 } else if (dst.is(src)) { 1741 } else if (dst.is(src)) {
1558 ASSERT(!dst.is(kScratchRegister)); 1742 ASSERT(!dst.is(kScratchRegister));
1559 Register constant_reg = GetSmiConstant(constant); 1743 Register constant_reg = GetSmiConstant(constant);
1560 subq(dst, constant_reg); 1744 subq(dst, constant_reg);
1561 } else { 1745 } else {
1562 if (constant->value() == Smi::kMinValue) { 1746 if (constant->value() == Smi::kMinValue) {
1563 LoadSmiConstant(dst, constant); 1747 if (kSmiValueSize == 32) {
1564 // Adding and subtracting the min-value gives the same result, it only 1748 LoadSmiConstant(dst, constant);
1565 // differs on the overflow bit, which we don't check here. 1749 // Adding and subtracting the min-value gives the same result, it only
1566 addq(dst, src); 1750 // differs on the overflow bit, which we don't check here.
1751 addq(dst, src);
1752 } else {
1753 ASSERT(kSmiValueSize == 31);
1754 movq(dst, src);
1755 subq(dst, SmiToImmediate(constant));
1756 }
1567 } else { 1757 } else {
1568 // Subtract by adding the negation. 1758 // Subtract by adding the negation.
1569 LoadSmiConstant(dst, Smi::FromInt(-constant->value())); 1759 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1570 addq(dst, src); 1760 addq(dst, src);
1571 } 1761 }
1572 } 1762 }
1573 } 1763 }
1574 1764
1575 1765
1576 void MacroAssembler::SmiSubConstant(Register dst, 1766 void MacroAssembler::SmiSubConstant(Register dst,
1577 Register src, 1767 Register src,
1578 Smi* constant, 1768 Smi* constant,
1579 Label* on_not_smi_result, 1769 Label* on_not_smi_result,
1580 Label::Distance near_jump) { 1770 Label::Distance near_jump) {
1581 if (constant->value() == 0) { 1771 if (constant->value() == 0) {
1582 if (!dst.is(src)) { 1772 if (!dst.is(src)) {
1583 movq(dst, src); 1773 movq(dst, src);
1584 } 1774 }
1585 } else if (dst.is(src)) { 1775 } else if (dst.is(src)) {
1586 ASSERT(!dst.is(kScratchRegister)); 1776 ASSERT(!dst.is(kScratchRegister));
1587 if (constant->value() == Smi::kMinValue) { 1777 if (constant->value() == Smi::kMinValue) {
1588 // Subtracting min-value from any non-negative value will overflow. 1778 // Subtracting min-value from any non-negative value will overflow.
1589 // We test the non-negativeness before doing the subtraction. 1779 // We test the non-negativeness before doing the subtraction.
1590 testq(src, src); 1780 if (kSmiValueSize == 32) {
1781 testq(src, src);
1782 } else {
1783 ASSERT(kSmiValueSize == 31);
1784 testl(src, src);
1785 }
1591 j(not_sign, on_not_smi_result, near_jump); 1786 j(not_sign, on_not_smi_result, near_jump);
1592 LoadSmiConstant(kScratchRegister, constant); 1787 LoadSmiConstant(kScratchRegister, constant);
1593 subq(dst, kScratchRegister); 1788 subq(dst, kScratchRegister);
1594 } else { 1789 } else {
1595 // Subtract by adding the negation. 1790 // Subtract by adding the negation.
1596 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value())); 1791 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1597 addq(kScratchRegister, dst); 1792 if (kSmiValueSize == 32) {
1793 addq(kScratchRegister, dst);
1794 } else {
1795 ASSERT(kSmiValueSize == 31);
1796 addl(kScratchRegister, dst);
1797 }
1598 j(overflow, on_not_smi_result, near_jump); 1798 j(overflow, on_not_smi_result, near_jump);
1599 movq(dst, kScratchRegister); 1799 if (kSmiValueSize == 32) {
1800 movq(dst, kScratchRegister);
1801 } else {
1802 ASSERT(kSmiValueSize == 31);
1803 movsxlq(dst, kScratchRegister);
1804 }
1600 } 1805 }
1601 } else { 1806 } else {
1602 if (constant->value() == Smi::kMinValue) { 1807 if (constant->value() == Smi::kMinValue) {
1603 // Subtracting min-value from any non-negative value will overflow. 1808 // Subtracting min-value from any non-negative value will overflow.
1604 // We test the non-negativeness before doing the subtraction. 1809 // We test the non-negativeness before doing the subtraction.
1605 testq(src, src); 1810 if (kSmiValueSize == 32) {
1606 j(not_sign, on_not_smi_result, near_jump); 1811 testq(src, src);
1607 LoadSmiConstant(dst, constant); 1812 j(not_sign, on_not_smi_result, near_jump);
1608 // Adding and subtracting the min-value gives the same result, it only 1813 LoadSmiConstant(dst, constant);
1609 // differs on the overflow bit, which we don't check here. 1814 // Adding and subtracting the min-value gives the same result, it only
1610 addq(dst, src); 1815 // differs on the overflow bit, which we don't check here.
1816 addq(dst, src);
1817 } else {
1818 ASSERT(kSmiValueSize == 31);
1819 testl(src, src);
1820 j(not_sign, on_not_smi_result, near_jump);
1821 movq(dst, src);
1822 subq(dst, SmiToImmediate(constant));
1823 }
1611 } else { 1824 } else {
1612 // Subtract by adding the negation. 1825 // Subtract by adding the negation.
1613 LoadSmiConstant(dst, Smi::FromInt(-(constant->value()))); 1826 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1614 addq(dst, src); 1827 if (kSmiValueSize == 32) {
1828 addq(dst, src);
1829 } else {
1830 ASSERT(kSmiValueSize == 31);
1831 addl(dst, src);
1832 }
1615 j(overflow, on_not_smi_result, near_jump); 1833 j(overflow, on_not_smi_result, near_jump);
1834 if (kSmiValueSize == 31) {
1835 movsxlq(dst, dst);
1836 }
1616 } 1837 }
1617 } 1838 }
1618 } 1839 }
1619 1840
1620 1841
1621 void MacroAssembler::SmiNeg(Register dst, 1842 void MacroAssembler::SmiNeg(Register dst,
1622 Register src, 1843 Register src,
1623 Label* on_smi_result, 1844 Label* on_smi_result,
1624 Label::Distance near_jump) { 1845 Label::Distance near_jump) {
1625 if (dst.is(src)) { 1846 if (dst.is(src)) {
1626 ASSERT(!dst.is(kScratchRegister)); 1847 ASSERT(!dst.is(kScratchRegister));
1627 movq(kScratchRegister, src); 1848 movq(kScratchRegister, src);
1628 neg(dst); // Low 32 bits are retained as zero by negation. 1849 neg(dst); // Low 32 bits are retained as zero by negation.
1629 // Test if result is zero or Smi::kMinValue. 1850 // Test if result is zero or Smi::kMinValue.
1851 if (kSmiValueSize == 32) {
1630 cmpq(dst, kScratchRegister); 1852 cmpq(dst, kScratchRegister);
1631 j(not_equal, on_smi_result, near_jump); 1853 } else {
1632 movq(src, kScratchRegister); 1854 ASSERT(kSmiValueSize == 31);
1855 cmpl(dst, kScratchRegister);
1856 }
1857 j(not_equal, on_smi_result, near_jump);
1858 movq(src, kScratchRegister);
1633 } else { 1859 } else {
1634 movq(dst, src); 1860 movq(dst, src);
1635 neg(dst); 1861 neg(dst);
1636 cmpq(dst, src); 1862 if (kSmiValueSize == 32) {
1863 cmpq(dst, src);
1864 } else {
1865 ASSERT(kSmiValueSize == 31);
1866 cmpl(dst, src);
1867 }
1637 // If the result is zero or Smi::kMinValue, negation failed to create a smi. 1868 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1638 j(not_equal, on_smi_result, near_jump); 1869 j(not_equal, on_smi_result, near_jump);
1639 } 1870 }
1640 } 1871 }
1641 1872
1642 1873
1643 void MacroAssembler::SmiAdd(Register dst, 1874 void MacroAssembler::SmiAdd(Register dst,
1644 Register src1, 1875 Register src1,
1645 Register src2, 1876 Register src2,
1646 Label* on_not_smi_result, 1877 Label* on_not_smi_result,
1647 Label::Distance near_jump) { 1878 Label::Distance near_jump) {
1648 ASSERT_NOT_NULL(on_not_smi_result); 1879 ASSERT_NOT_NULL(on_not_smi_result);
1649 ASSERT(!dst.is(src2)); 1880 ASSERT(!dst.is(src2));
1650 if (dst.is(src1)) { 1881 if (dst.is(src1)) {
1651 movq(kScratchRegister, src1); 1882 movq(kScratchRegister, src1);
1652 addq(kScratchRegister, src2); 1883 if (kSmiValueSize == 32) {
1884 addq(kScratchRegister, src2);
1885 } else {
1886 ASSERT(kSmiValueSize == 31);
1887 addl(kScratchRegister, src2);
1888 }
1653 j(overflow, on_not_smi_result, near_jump); 1889 j(overflow, on_not_smi_result, near_jump);
1654 movq(dst, kScratchRegister); 1890 if (kSmiValueSize == 32) {
1891 movq(dst, kScratchRegister);
1892 } else {
1893 ASSERT(kSmiValueSize == 31);
1894 movsxlq(dst, kScratchRegister);
1895 }
1655 } else { 1896 } else {
1656 movq(dst, src1); 1897 movq(dst, src1);
1657 addq(dst, src2); 1898 if (kSmiValueSize == 32) {
1899 addq(dst, src2);
1900 } else {
1901 ASSERT(kSmiValueSize == 31);
1902 addl(dst, src2);
1903 }
1658 j(overflow, on_not_smi_result, near_jump); 1904 j(overflow, on_not_smi_result, near_jump);
1905 if (kSmiValueSize == 31) {
1906 movsxlq(dst, dst);
1907 }
1659 } 1908 }
1660 } 1909 }
1661 1910
1662 1911
1663 void MacroAssembler::SmiAdd(Register dst, 1912 void MacroAssembler::SmiAdd(Register dst,
1664 Register src1, 1913 Register src1,
1665 const Operand& src2, 1914 const Operand& src2,
1666 Label* on_not_smi_result, 1915 Label* on_not_smi_result,
1667 Label::Distance near_jump) { 1916 Label::Distance near_jump) {
1668 ASSERT_NOT_NULL(on_not_smi_result); 1917 ASSERT_NOT_NULL(on_not_smi_result);
1669 if (dst.is(src1)) { 1918 if (dst.is(src1)) {
1670 movq(kScratchRegister, src1); 1919 movq(kScratchRegister, src1);
1671 addq(kScratchRegister, src2); 1920 if (kSmiValueSize == 32) {
1921 addq(kScratchRegister, src2);
1922 } else {
1923 ASSERT(kSmiValueSize == 31);
1924 addl(kScratchRegister, src2);
1925 }
1672 j(overflow, on_not_smi_result, near_jump); 1926 j(overflow, on_not_smi_result, near_jump);
1673 movq(dst, kScratchRegister); 1927 if (kSmiValueSize == 32) {
1928 movq(dst, kScratchRegister);
1929 } else {
1930 ASSERT(kSmiValueSize == 31);
1931 movsxlq(dst, kScratchRegister);
1932 }
1674 } else { 1933 } else {
1675 ASSERT(!src2.AddressUsesRegister(dst)); 1934 ASSERT(!src2.AddressUsesRegister(dst));
1676 movq(dst, src1); 1935 movq(dst, src1);
1677 addq(dst, src2); 1936 if (kSmiValueSize == 32) {
1937 addq(dst, src2);
1938 } else {
1939 ASSERT(kSmiValueSize == 31);
1940 addl(dst, src2);
1941 }
1678 j(overflow, on_not_smi_result, near_jump); 1942 j(overflow, on_not_smi_result, near_jump);
1943 if (kSmiValueSize == 31) {
1944 movsxlq(dst, dst);
1679 } 1945 }
1680 } 1946 }
1947 }
1681 1948
1682 1949
1683 void MacroAssembler::SmiAdd(Register dst, 1950 void MacroAssembler::SmiAdd(Register dst,
1684 Register src1, 1951 Register src1,
1685 Register src2) { 1952 Register src2) {
1686 // No overflow checking. Use only when it's known that 1953 // No overflow checking. Use only when it's known that
1687 // overflowing is impossible. 1954 // overflowing is impossible.
1688 if (!dst.is(src1)) { 1955 if (!dst.is(src1)) {
1689 if (emit_debug_code()) { 1956 if (emit_debug_code()) {
1690 movq(kScratchRegister, src1); 1957 movq(kScratchRegister, src1);
1691 addq(kScratchRegister, src2); 1958 addq(kScratchRegister, src2);
1692 Check(no_overflow, "Smi addition overflow"); 1959 Check(no_overflow, "Smi addition overflow");
1693 } 1960 }
1694 lea(dst, Operand(src1, src2, times_1, 0)); 1961 lea(dst, Operand(src1, src2, times_1, 0));
1695 } else { 1962 } else {
1696 addq(dst, src2); 1963 addq(dst, src2);
1697 Assert(no_overflow, "Smi addition overflow"); 1964 Assert(no_overflow, "Smi addition overflow");
1698 } 1965 }
1699 } 1966 }
1700 1967
1701 1968
1702 void MacroAssembler::SmiSub(Register dst, 1969 void MacroAssembler::SmiSub(Register dst,
1703 Register src1, 1970 Register src1,
1704 Register src2, 1971 Register src2,
1705 Label* on_not_smi_result, 1972 Label* on_not_smi_result,
1706 Label::Distance near_jump) { 1973 Label::Distance near_jump) {
1707 ASSERT_NOT_NULL(on_not_smi_result); 1974 ASSERT_NOT_NULL(on_not_smi_result);
1708 ASSERT(!dst.is(src2)); 1975 ASSERT(!dst.is(src2));
1709 if (dst.is(src1)) { 1976 if (dst.is(src1)) {
1710 cmpq(dst, src2); 1977 if (kSmiValueSize == 32) {
1978 cmpq(dst, src2);
1979 } else {
1980 ASSERT(kSmiValueSize == 31);
1981 cmpl(dst, src2);
1982 }
1711 j(overflow, on_not_smi_result, near_jump); 1983 j(overflow, on_not_smi_result, near_jump);
1712 subq(dst, src2); 1984 subq(dst, src2);
1713 } else { 1985 } else {
1714 movq(dst, src1); 1986 if (kSmiValueSize == 32) {
1715 subq(dst, src2); 1987 movq(dst, src1);
1988 subq(dst, src2);
1989 } else {
1990 ASSERT(kSmiValueSize == 31);
1991 movl(dst, src1);
1992 subl(dst, src2);
1993 }
1716 j(overflow, on_not_smi_result, near_jump); 1994 j(overflow, on_not_smi_result, near_jump);
1995 if (kSmiValueSize == 31) {
1996 movsxlq(dst, dst);
1997 }
1717 } 1998 }
1718 } 1999 }
1719 2000
1720 2001
1721 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) { 2002 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1722 // No overflow checking. Use only when it's known that 2003 // No overflow checking. Use only when it's known that
1723 // overflowing is impossible (e.g., subtracting two positive smis). 2004 // overflowing is impossible (e.g., subtracting two positive smis).
1724 ASSERT(!dst.is(src2)); 2005 ASSERT(!dst.is(src2));
1725 if (!dst.is(src1)) { 2006 if (!dst.is(src1)) {
1726 movq(dst, src1); 2007 movq(dst, src1);
1727 } 2008 }
1728 subq(dst, src2); 2009 subq(dst, src2);
1729 Assert(no_overflow, "Smi subtraction overflow"); 2010 Assert(no_overflow, "Smi subtraction overflow");
1730 } 2011 }
1731 2012
1732 2013
1733 void MacroAssembler::SmiSub(Register dst, 2014 void MacroAssembler::SmiSub(Register dst,
1734 Register src1, 2015 Register src1,
1735 const Operand& src2, 2016 const Operand& src2,
1736 Label* on_not_smi_result, 2017 Label* on_not_smi_result,
1737 Label::Distance near_jump) { 2018 Label::Distance near_jump) {
1738 ASSERT_NOT_NULL(on_not_smi_result); 2019 ASSERT_NOT_NULL(on_not_smi_result);
1739 if (dst.is(src1)) { 2020 if (dst.is(src1)) {
1740 movq(kScratchRegister, src2); 2021 if (kSmiValueSize == 32) {
1741 cmpq(src1, kScratchRegister); 2022 movq(kScratchRegister, src2);
2023 cmpq(src1, kScratchRegister);
2024 } else {
2025 ASSERT(kSmiValueSize == 31);
2026 movl(kScratchRegister, src2);
2027 cmpl(src1, kScratchRegister);
2028 }
1742 j(overflow, on_not_smi_result, near_jump); 2029 j(overflow, on_not_smi_result, near_jump);
1743 subq(src1, kScratchRegister); 2030 if (kSmiValueSize == 32) {
2031 subq(src1, kScratchRegister);
2032 } else {
2033 ASSERT(kSmiValueSize == 31);
2034 movsxlq(src1, kScratchRegister);
2035 }
1744 } else { 2036 } else {
1745 movq(dst, src1); 2037 if (kSmiValueSize == 32) {
1746 subq(dst, src2); 2038 movq(dst, src1);
2039 subq(dst, src2);
2040 } else {
2041 ASSERT(kSmiValueSize == 31);
2042 movl(dst, src1);
2043 subl(dst, src2);
2044 }
1747 j(overflow, on_not_smi_result, near_jump); 2045 j(overflow, on_not_smi_result, near_jump);
2046 if (kSmiValueSize == 31) {
2047 movsxlq(dst, dst);
2048 }
1748 } 2049 }
1749 } 2050 }
1750 2051
1751 2052
1752 void MacroAssembler::SmiSub(Register dst, 2053 void MacroAssembler::SmiSub(Register dst,
1753 Register src1, 2054 Register src1,
1754 const Operand& src2) { 2055 const Operand& src2) {
1755 // No overflow checking. Use only when it's known that 2056 // No overflow checking. Use only when it's known that
1756 // overflowing is impossible (e.g., subtracting two positive smis). 2057 // overflowing is impossible (e.g., subtracting two positive smis).
1757 if (!dst.is(src1)) { 2058 if (!dst.is(src1)) {
(...skipping 10 matching lines...) Expand all
1768 Label* on_not_smi_result, 2069 Label* on_not_smi_result,
1769 Label::Distance near_jump) { 2070 Label::Distance near_jump) {
1770 ASSERT(!dst.is(src2)); 2071 ASSERT(!dst.is(src2));
1771 ASSERT(!dst.is(kScratchRegister)); 2072 ASSERT(!dst.is(kScratchRegister));
1772 ASSERT(!src1.is(kScratchRegister)); 2073 ASSERT(!src1.is(kScratchRegister));
1773 ASSERT(!src2.is(kScratchRegister)); 2074 ASSERT(!src2.is(kScratchRegister));
1774 2075
1775 if (dst.is(src1)) { 2076 if (dst.is(src1)) {
1776 Label failure, zero_correct_result; 2077 Label failure, zero_correct_result;
1777 movq(kScratchRegister, src1); // Create backup for later testing. 2078 movq(kScratchRegister, src1); // Create backup for later testing.
1778 SmiToInteger64(dst, src1); 2079 if (kSmiValueSize == 32) {
1779 imul(dst, src2); 2080 SmiToInteger64(dst, src1);
2081 imul(dst, src2);
2082 } else {
2083 ASSERT(kSmiValueSize == 31);
2084 SmiToInteger32(dst, src1);
2085 imull(dst, src2);
2086 }
1780 j(overflow, &failure, Label::kNear); 2087 j(overflow, &failure, Label::kNear);
1781 2088
1782 // Check for negative zero result. If product is zero, and one 2089 // Check for negative zero result. If product is zero, and one
1783 // argument is negative, go to slow case. 2090 // argument is negative, go to slow case.
1784 Label correct_result; 2091 Label correct_result;
1785 testq(dst, dst); 2092 testq(dst, dst);
1786 j(not_zero, &correct_result, Label::kNear); 2093 j(not_zero, &correct_result, Label::kNear);
1787 2094
1788 movq(dst, kScratchRegister); 2095 movq(dst, kScratchRegister);
1789 xor_(dst, src2); 2096 xor_(dst, src2);
1790 // Result was positive zero. 2097 // Result was positive zero.
1791 j(positive, &zero_correct_result, Label::kNear); 2098 j(positive, &zero_correct_result, Label::kNear);
1792 2099
1793 bind(&failure); // Reused failure exit, restores src1. 2100 bind(&failure); // Reused failure exit, restores src1.
1794 movq(src1, kScratchRegister); 2101 movq(src1, kScratchRegister);
1795 jmp(on_not_smi_result, near_jump); 2102 jmp(on_not_smi_result, near_jump);
1796 2103
1797 bind(&zero_correct_result); 2104 bind(&zero_correct_result);
1798 Set(dst, 0); 2105 Set(dst, 0);
1799 2106
1800 bind(&correct_result); 2107 bind(&correct_result);
2108 if (kSmiValueSize == 31) {
2109 movsxlq(dst, dst);
2110 }
1801 } else { 2111 } else {
1802 SmiToInteger64(dst, src1); 2112 if (kSmiValueSize == 32) {
1803 imul(dst, src2); 2113 SmiToInteger64(dst, src1);
2114 imul(dst, src2);
2115 } else {
2116 ASSERT(kSmiValueSize == 31);
2117 SmiToInteger32(dst, src1);
2118 imull(dst, src2);
2119 }
1804 j(overflow, on_not_smi_result, near_jump); 2120 j(overflow, on_not_smi_result, near_jump);
1805 // Check for negative zero result. If product is zero, and one 2121 // Check for negative zero result. If product is zero, and one
1806 // argument is negative, go to slow case. 2122 // argument is negative, go to slow case.
1807 Label correct_result; 2123 Label correct_result;
1808 testq(dst, dst); 2124 testq(dst, dst);
1809 j(not_zero, &correct_result, Label::kNear); 2125 j(not_zero, &correct_result, Label::kNear);
1810 // One of src1 and src2 is zero, the check whether the other is 2126 // One of src1 and src2 is zero, the check whether the other is
1811 // negative. 2127 // negative.
1812 movq(kScratchRegister, src1); 2128 movq(kScratchRegister, src1);
1813 xor_(kScratchRegister, src2); 2129 xor_(kScratchRegister, src2);
1814 j(negative, on_not_smi_result, near_jump); 2130 j(negative, on_not_smi_result, near_jump);
1815 bind(&correct_result); 2131 bind(&correct_result);
2132 if (kSmiValueSize == 31) {
2133 movsxlq(dst, dst);
2134 }
1816 } 2135 }
1817 } 2136 }
1818 2137
1819 2138
1820 void MacroAssembler::SmiDiv(Register dst, 2139 void MacroAssembler::SmiDiv(Register dst,
1821 Register src1, 2140 Register src1,
1822 Register src2, 2141 Register src2,
1823 Label* on_not_smi_result, 2142 Label* on_not_smi_result,
1824 Label::Distance near_jump) { 2143 Label::Distance near_jump) {
1825 ASSERT(!src1.is(kScratchRegister)); 2144 ASSERT(!src1.is(kScratchRegister));
(...skipping 12 matching lines...) Expand all
1838 } 2157 }
1839 SmiToInteger32(rax, src1); 2158 SmiToInteger32(rax, src1);
1840 // We need to rule out dividing Smi::kMinValue by -1, since that would 2159 // We need to rule out dividing Smi::kMinValue by -1, since that would
1841 // overflow in idiv and raise an exception. 2160 // overflow in idiv and raise an exception.
1842 // We combine this with negative zero test (negative zero only happens 2161 // We combine this with negative zero test (negative zero only happens
1843 // when dividing zero by a negative number). 2162 // when dividing zero by a negative number).
1844 2163
1845 // We overshoot a little and go to slow case if we divide min-value 2164 // We overshoot a little and go to slow case if we divide min-value
1846 // by any negative value, not just -1. 2165 // by any negative value, not just -1.
1847 Label safe_div; 2166 Label safe_div;
1848 testl(rax, Immediate(0x7fffffff)); 2167 if (kSmiValueSize == 32) {
2168 testl(rax, Immediate(0x7fffffff));
2169 } else {
2170 ASSERT(kSmiValueSize == 31);
2171 testl(rax, Immediate(0x3fffffff));
2172 }
1849 j(not_zero, &safe_div, Label::kNear); 2173 j(not_zero, &safe_div, Label::kNear);
1850 testq(src2, src2); 2174 testq(src2, src2);
1851 if (src1.is(rax)) { 2175 if (src1.is(rax)) {
1852 j(positive, &safe_div, Label::kNear); 2176 j(positive, &safe_div, Label::kNear);
1853 movq(src1, kScratchRegister); 2177 movq(src1, kScratchRegister);
1854 jmp(on_not_smi_result, near_jump); 2178 jmp(on_not_smi_result, near_jump);
1855 } else { 2179 } else {
1856 j(negative, on_not_smi_result, near_jump); 2180 j(negative, on_not_smi_result, near_jump);
1857 } 2181 }
1858 bind(&safe_div); 2182 bind(&safe_div);
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
1932 testq(src1, src1); 2256 testq(src1, src1);
1933 j(negative, on_not_smi_result, near_jump); 2257 j(negative, on_not_smi_result, near_jump);
1934 bind(&smi_result); 2258 bind(&smi_result);
1935 Integer32ToSmi(dst, rdx); 2259 Integer32ToSmi(dst, rdx);
1936 } 2260 }
1937 2261
1938 2262
1939 void MacroAssembler::SmiNot(Register dst, Register src) { 2263 void MacroAssembler::SmiNot(Register dst, Register src) {
1940 ASSERT(!dst.is(kScratchRegister)); 2264 ASSERT(!dst.is(kScratchRegister));
1941 ASSERT(!src.is(kScratchRegister)); 2265 ASSERT(!src.is(kScratchRegister));
2266 if (kSmiValueSize == 32) {
1942 // Set tag and padding bits before negating, so that they are zero afterwards. 2267 // Set tag and padding bits before negating, so that they are zero afterwards.
1943 movl(kScratchRegister, Immediate(~0)); 2268 movl(kScratchRegister, Immediate(~0));
2269 } else {
2270 ASSERT(kSmiValueSize == 31);
2271 movl(kScratchRegister, Immediate(1));
2272 }
1944 if (dst.is(src)) { 2273 if (dst.is(src)) {
1945 xor_(dst, kScratchRegister); 2274 xor_(dst, kScratchRegister);
1946 } else { 2275 } else {
1947 lea(dst, Operand(src, kScratchRegister, times_1, 0)); 2276 lea(dst, Operand(src, kScratchRegister, times_1, 0));
1948 } 2277 }
1949 not_(dst); 2278 not_(dst);
1950 } 2279 }
1951 2280
1952 2281
1953 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) { 2282 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
2025 shl(dst, Immediate(kSmiShift)); 2354 shl(dst, Immediate(kSmiShift));
2026 } else { 2355 } else {
2027 UNIMPLEMENTED(); // Not used. 2356 UNIMPLEMENTED(); // Not used.
2028 } 2357 }
2029 } 2358 }
2030 } 2359 }
2031 2360
2032 2361
2033 void MacroAssembler::SmiShiftLeftConstant(Register dst, 2362 void MacroAssembler::SmiShiftLeftConstant(Register dst,
2034 Register src, 2363 Register src,
2035 int shift_value) { 2364 int shift_value,
2365 Label* on_not_smi_result) {
2036 if (!dst.is(src)) { 2366 if (!dst.is(src)) {
2037 movq(dst, src); 2367 movq(dst, src);
2038 } 2368 }
2369
2039 if (shift_value > 0) { 2370 if (shift_value > 0) {
2040 shl(dst, Immediate(shift_value)); 2371 if (kSmiValueSize == 32) {
2372 shl(dst, Immediate(shift_value));
2373 } else {
2374 ASSERT(kSmiValueSize == 31);
2375 SmiToInteger32(dst, dst);
2376 shll(dst, Immediate(shift_value));
2377 JumpIfNotValidSmiValue(dst, on_not_smi_result);
2378 Integer32ToSmi(dst, dst);
2379 }
2041 } 2380 }
2042 } 2381 }
2043 2382
2044 2383
2045 void MacroAssembler::SmiShiftLogicalRightConstant( 2384 void MacroAssembler::SmiShiftLogicalRightConstant(
2046 Register dst, Register src, int shift_value, 2385 Register dst, Register src, int shift_value,
2047 Label* on_not_smi_result, Label::Distance near_jump) { 2386 Label* on_not_smi_result, Label::Distance near_jump) {
2048 // Logic right shift interprets its result as an *unsigned* number. 2387 // Logic right shift interprets its result as an *unsigned* number.
2049 if (dst.is(src)) { 2388 if (dst.is(src)) {
2050 UNIMPLEMENTED(); // Not used. 2389 UNIMPLEMENTED(); // Not used.
2051 } else { 2390 } else {
2052 movq(dst, src); 2391 movq(dst, src);
2053 if (shift_value == 0) { 2392 if (shift_value == 0) {
2054 testq(dst, dst); 2393 if (kSmiValueSize == 32) {
2394 testq(dst, dst);
2395 } else {
2396 ASSERT(kSmiValueSize == 31);
2397 testl(dst, dst);
2398 }
2055 j(negative, on_not_smi_result, near_jump); 2399 j(negative, on_not_smi_result, near_jump);
2056 } 2400 }
2057 shr(dst, Immediate(shift_value + kSmiShift)); 2401 if (kSmiValueSize == 32) {
2058 shl(dst, Immediate(kSmiShift)); 2402 shr(dst, Immediate(shift_value + kSmiShift));
2403 shl(dst, Immediate(kSmiShift));
2404 } else {
2405 ASSERT(kSmiValueSize == 31);
2406 SmiToInteger32(dst, dst);
2407 shrl(dst, Immediate(shift_value));
2408 JumpIfUIntNotValidSmiValue(dst, on_not_smi_result, near_jump);
2409 shll(dst, Immediate(kSmiShift));
2410 }
2059 } 2411 }
2060 } 2412 }
2061 2413
2062 2414
2063 void MacroAssembler::SmiShiftLeft(Register dst, 2415 void MacroAssembler::SmiShiftLeft(Register dst,
2064 Register src1, 2416 Register src1,
2065 Register src2) { 2417 Register src2,
2066 ASSERT(!dst.is(rcx)); 2418 Label* on_not_smi_result) {
2067 // Untag shift amount. 2419 if (kSmiValueSize == 32) {
2068 if (!dst.is(src1)) { 2420 ASSERT(!dst.is(rcx));
2069 movq(dst, src1); 2421 // Untag shift amount.
2422 if (!dst.is(src1)) {
2423 movq(dst, src1);
2424 }
2425 SmiToInteger32(rcx, src2);
2426 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2427 and_(rcx, Immediate(0x1f));
2428 shl_cl(dst);
2429 } else {
2430 ASSERT(kSmiValueSize == 31);
2431 ASSERT(!dst.is(kScratchRegister));
2432 ASSERT(!src1.is(kScratchRegister));
2433 ASSERT(!src2.is(kScratchRegister));
2434 ASSERT(!dst.is(rcx));
2435 Label result_ok;
2436
2437 if (src1.is(rcx) || src2.is(rcx)) {
2438 movq(kScratchRegister, rcx);
2439 }
2440 // Untag shift amount.
2441 if (!dst.is(src1)) {
2442 movq(dst, src1);
2443 }
2444 SmiToInteger32(dst, dst);
2445 SmiToInteger32(rcx, src2);
2446 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2447 andl(rcx, Immediate(0x1f));
2448 shll_cl(dst);
2449 JumpIfValidSmiValue(dst, &result_ok, Label::kNear);
2450 if (src1.is(rcx) || src2.is(rcx)) {
2451 if (src1.is(rcx)) {
2452 movq(src1, kScratchRegister);
2453 } else {
2454 movq(src2, kScratchRegister);
2455 }
2456 }
2457 jmp(on_not_smi_result);
2458 bind(&result_ok);
2459 Integer32ToSmi(dst, dst);
2070 } 2460 }
2071 SmiToInteger32(rcx, src2);
2072 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2073 and_(rcx, Immediate(0x1f));
2074 shl_cl(dst);
2075 } 2461 }
2076 2462
2077 2463
2078 void MacroAssembler::SmiShiftLogicalRight(Register dst, 2464 void MacroAssembler::SmiShiftLogicalRight(Register dst,
2079 Register src1, 2465 Register src1,
2080 Register src2, 2466 Register src2,
2081 Label* on_not_smi_result, 2467 Label* on_not_smi_result,
2082 Label::Distance near_jump) { 2468 Label::Distance near_jump) {
2083 ASSERT(!dst.is(kScratchRegister)); 2469 if (kSmiValueSize == 32) {
2084 ASSERT(!src1.is(kScratchRegister)); 2470 ASSERT(!dst.is(kScratchRegister));
2085 ASSERT(!src2.is(kScratchRegister)); 2471 ASSERT(!src1.is(kScratchRegister));
2086 ASSERT(!dst.is(rcx)); 2472 ASSERT(!src2.is(kScratchRegister));
2087 // dst and src1 can be the same, because the one case that bails out 2473 ASSERT(!dst.is(rcx));
2088 // is a shift by 0, which leaves dst, and therefore src1, unchanged. 2474 // dst and src1 can be the same, because the one case that bails out
2089 if (src1.is(rcx) || src2.is(rcx)) { 2475 // is a shift by 0, which leaves dst, and therefore src1, unchanged.
2090 movq(kScratchRegister, rcx); 2476 if (src1.is(rcx) || src2.is(rcx)) {
2091 } 2477 movq(kScratchRegister, rcx);
2092 if (!dst.is(src1)) { 2478 }
2093 movq(dst, src1); 2479 if (!dst.is(src1)) {
2094 } 2480 movq(dst, src1);
2095 SmiToInteger32(rcx, src2); 2481 }
2096 orl(rcx, Immediate(kSmiShift)); 2482 SmiToInteger32(rcx, src2);
2097 shr_cl(dst); // Shift is rcx modulo 0x1f + 32. 2483 orl(rcx, Immediate(kSmiShift));
2098 shl(dst, Immediate(kSmiShift)); 2484 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
2099 testq(dst, dst); 2485 shl(dst, Immediate(kSmiShift));
2100 if (src1.is(rcx) || src2.is(rcx)) { 2486 testq(dst, dst);
2101 Label positive_result; 2487 if (src1.is(rcx) || src2.is(rcx)) {
2102 j(positive, &positive_result, Label::kNear); 2488 Label positive_result;
2103 if (src1.is(rcx)) { 2489 j(positive, &positive_result, Label::kNear);
2104 movq(src1, kScratchRegister); 2490 if (src1.is(rcx)) {
2491 movq(src1, kScratchRegister);
2492 } else {
2493 movq(src2, kScratchRegister);
2494 }
2495 jmp(on_not_smi_result, near_jump);
2496 bind(&positive_result);
2105 } else { 2497 } else {
2106 movq(src2, kScratchRegister); 2498 // src2 was zero and src1 negative.
2499 j(negative, on_not_smi_result, near_jump);
2107 } 2500 }
2108 jmp(on_not_smi_result, near_jump);
2109 bind(&positive_result);
2110 } else { 2501 } else {
2111 // src2 was zero and src1 negative. 2502 ASSERT(kSmiValueSize == 31);
2112 j(negative, on_not_smi_result, near_jump); 2503 ASSERT(!dst.is(kScratchRegister));
2504 ASSERT(!src1.is(kScratchRegister));
2505 ASSERT(!src2.is(kScratchRegister));
2506 ASSERT(!dst.is(rcx));
2507 Label result_ok;
2508
2509 // dst and src1 can be the same, because the one case that bails out
2510 // is a shift by 0, which leaves dst, and therefore src1, unchanged.
2511 if (src1.is(rcx) || src2.is(rcx)) {
2512 movq(kScratchRegister, rcx);
2513 }
2514 if (!dst.is(src1)) {
2515 movq(dst, src1);
2516 }
2517 SmiToInteger32(rcx, src2);
2518 SmiToInteger32(dst, dst);
2519 shrl_cl(dst);
2520 JumpIfUIntValidSmiValue(dst, &result_ok, Label::kNear);
2521 if (src1.is(rcx) || src2.is(rcx)) {
2522 if (src1.is(rcx)) {
2523 movq(src1, kScratchRegister);
2524 } else {
2525 movq(src2, kScratchRegister);
2526 }
2527 }
2528 jmp(on_not_smi_result);
2529 bind(&result_ok);
2530 Integer32ToSmi(dst, dst);
2113 } 2531 }
2114 } 2532 }
2115 2533
2116 2534
2117 void MacroAssembler::SmiShiftArithmeticRight(Register dst, 2535 void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2118 Register src1, 2536 Register src1,
2119 Register src2) { 2537 Register src2) {
2120 ASSERT(!dst.is(kScratchRegister)); 2538 ASSERT(!dst.is(kScratchRegister));
2121 ASSERT(!src1.is(kScratchRegister)); 2539 ASSERT(!src1.is(kScratchRegister));
2122 ASSERT(!src2.is(kScratchRegister)); 2540 ASSERT(!src2.is(kScratchRegister));
2123 ASSERT(!dst.is(rcx)); 2541 ASSERT(!dst.is(rcx));
2124 if (src1.is(rcx)) { 2542 if (src1.is(rcx)) {
2125 movq(kScratchRegister, src1); 2543 movq(kScratchRegister, src1);
2126 } else if (src2.is(rcx)) { 2544 } else if (src2.is(rcx)) {
2127 movq(kScratchRegister, src2); 2545 movq(kScratchRegister, src2);
2128 } 2546 }
2129 if (!dst.is(src1)) { 2547 if (!dst.is(src1)) {
2130 movq(dst, src1); 2548 movq(dst, src1);
2131 } 2549 }
2132 SmiToInteger32(rcx, src2); 2550 SmiToInteger32(rcx, src2);
2133 orl(rcx, Immediate(kSmiShift)); 2551 if (kSmiValueSize == 32) {
2134 sar_cl(dst); // Shift 32 + original rcx & 0x1f. 2552 orl(rcx, Immediate(kSmiShift));
2135 shl(dst, Immediate(kSmiShift)); 2553 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
2554 } else {
2555 ASSERT(kSmiValueSize == 31);
2556 SmiToInteger32(dst, dst);
2557 sarl_cl(dst);
2558 }
2559 Integer32ToSmi(dst, dst);
2136 if (src1.is(rcx)) { 2560 if (src1.is(rcx)) {
2137 movq(src1, kScratchRegister); 2561 movq(src1, kScratchRegister);
2138 } else if (src2.is(rcx)) { 2562 } else if (src2.is(rcx)) {
2139 movq(src2, kScratchRegister); 2563 movq(src2, kScratchRegister);
2140 } 2564 }
2141 } 2565 }
2142 2566
2143 2567
2144 void MacroAssembler::SelectNonSmi(Register dst, 2568 void MacroAssembler::SelectNonSmi(Register dst,
2145 Register src1, 2569 Register src1,
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
2182 2606
2183 SmiIndex MacroAssembler::SmiToIndex(Register dst, 2607 SmiIndex MacroAssembler::SmiToIndex(Register dst,
2184 Register src, 2608 Register src,
2185 int shift) { 2609 int shift) {
2186 ASSERT(is_uint6(shift)); 2610 ASSERT(is_uint6(shift));
2187 // There is a possible optimization if shift is in the range 60-63, but that 2611 // There is a possible optimization if shift is in the range 60-63, but that
2188 // will (and must) never happen. 2612 // will (and must) never happen.
2189 if (!dst.is(src)) { 2613 if (!dst.is(src)) {
2190 movq(dst, src); 2614 movq(dst, src);
2191 } 2615 }
2192 if (shift < kSmiShift) { 2616 if (kSmiValueSize == 32) {
2193 sar(dst, Immediate(kSmiShift - shift)); 2617 if (shift < kSmiShift) {
2618 sar(dst, Immediate(kSmiShift - shift));
2619 } else {
2620 shl(dst, Immediate(shift - kSmiShift));
2621 }
2622 return SmiIndex(dst, times_1);
2194 } else { 2623 } else {
2195 shl(dst, Immediate(shift - kSmiShift)); 2624 ASSERT(kSmiValueSize == 31);
2625 if (shift == times_1) {
2626 sar(dst, Immediate(kSmiShift));
2627 return SmiIndex(dst, times_1);
2628 }
2629 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
2196 } 2630 }
2197 return SmiIndex(dst, times_1);
2198 } 2631 }
2199 2632
2633
2200 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst, 2634 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2201 Register src, 2635 Register src,
2202 int shift) { 2636 int shift) {
2203 // Register src holds a positive smi. 2637 // Register src holds a positive smi.
2204 ASSERT(is_uint6(shift)); 2638 ASSERT(is_uint6(shift));
2205 if (!dst.is(src)) { 2639 if (!dst.is(src)) {
2206 movq(dst, src); 2640 movq(dst, src);
2207 } 2641 }
2208 neg(dst); 2642 neg(dst);
2209 if (shift < kSmiShift) { 2643 if (kSmiValueSize == 32) {
2210 sar(dst, Immediate(kSmiShift - shift)); 2644 if (shift < kSmiShift) {
2645 sar(dst, Immediate(kSmiShift - shift));
2646 } else {
2647 shl(dst, Immediate(shift - kSmiShift));
2648 }
2649 return SmiIndex(dst, times_1);
2211 } else { 2650 } else {
2212 shl(dst, Immediate(shift - kSmiShift)); 2651 ASSERT(kSmiValueSize == 31);
2652 if (shift == times_1) {
2653 sar(dst, Immediate(kSmiShift));
2654 return SmiIndex(dst, times_1);
2655 }
2656 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
2213 } 2657 }
2214 return SmiIndex(dst, times_1);
2215 } 2658 }
2216 2659
2217 2660
2218 void MacroAssembler::AddSmiField(Register dst, const Operand& src) { 2661 void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
2219 ASSERT_EQ(0, kSmiShift % kBitsPerByte); 2662 if (kSmiValueSize == 32) {
2220 addl(dst, Operand(src, kSmiShift / kBitsPerByte)); 2663 ASSERT_EQ(0, kSmiShift % kBitsPerByte);
2664 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2665 } else {
2666 ASSERT(kSmiValueSize == 31);
2667 SmiToInteger32(kScratchRegister, src);
2668 addl(dst, kScratchRegister);
2669 }
2221 } 2670 }
2222 2671
2223 2672
2673 void MacroAssembler::Test(const Operand& src, Smi* source) {
2674 if (kSmiValueSize == 32) {
2675 testl(Operand(src, kIntSize), Immediate(source->value()));
2676 } else {
2677 ASSERT(kSmiValueSize == 31);
2678 testl(src, SmiToImmediate(source));
2679 }
2680 }
2681
2682
2683 void MacroAssembler::TestBit(const Operand& src, int bits) {
2684 int byte_offset = bits / kBitsPerByte;
2685 int bit_in_byte = bits & (kBitsPerByte - 1);
2686 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte));
2687 }
2688
2689
2690 // End of smi tagging, untagging and tag detection.
2691 // ----------------------------------------------------------------------------
2692
2693
2224 void MacroAssembler::JumpIfNotString(Register object, 2694 void MacroAssembler::JumpIfNotString(Register object,
2225 Register object_map, 2695 Register object_map,
2226 Label* not_string, 2696 Label* not_string,
2227 Label::Distance near_jump) { 2697 Label::Distance near_jump) {
2228 Condition is_smi = CheckSmi(object); 2698 Condition is_smi = CheckSmi(object);
2229 j(is_smi, not_string, near_jump); 2699 j(is_smi, not_string, near_jump);
2230 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map); 2700 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
2231 j(above_equal, not_string, near_jump); 2701 j(above_equal, not_string, near_jump);
2232 } 2702 }
2233 2703
(...skipping 237 matching lines...) Expand 10 before | Expand all | Expand 10 after
2471 } 2941 }
2472 2942
2473 2943
2474 void MacroAssembler::Drop(int stack_elements) { 2944 void MacroAssembler::Drop(int stack_elements) {
2475 if (stack_elements > 0) { 2945 if (stack_elements > 0) {
2476 addq(rsp, Immediate(stack_elements * kPointerSize)); 2946 addq(rsp, Immediate(stack_elements * kPointerSize));
2477 } 2947 }
2478 } 2948 }
2479 2949
2480 2950
2481 void MacroAssembler::Test(const Operand& src, Smi* source) {
2482 testl(Operand(src, kIntSize), Immediate(source->value()));
2483 }
2484
2485
2486 void MacroAssembler::TestBit(const Operand& src, int bits) {
2487 int byte_offset = bits / kBitsPerByte;
2488 int bit_in_byte = bits & (kBitsPerByte - 1);
2489 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte));
2490 }
2491
2492
2493 void MacroAssembler::Jump(ExternalReference ext) { 2951 void MacroAssembler::Jump(ExternalReference ext) {
2494 LoadAddress(kScratchRegister, ext); 2952 LoadAddress(kScratchRegister, ext);
2495 jmp(kScratchRegister); 2953 jmp(kScratchRegister);
2496 } 2954 }
2497 2955
2498 2956
2499 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) { 2957 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
2500 movq(kScratchRegister, destination, rmode); 2958 movq(kScratchRegister, destination, rmode);
2501 jmp(kScratchRegister); 2959 jmp(kScratchRegister);
2502 } 2960 }
(...skipping 2191 matching lines...) Expand 10 before | Expand all | Expand 10 after
4694 j(greater, &no_memento_available); 5152 j(greater, &no_memento_available);
4695 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), 5153 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize),
4696 Heap::kAllocationMementoMapRootIndex); 5154 Heap::kAllocationMementoMapRootIndex);
4697 bind(&no_memento_available); 5155 bind(&no_memento_available);
4698 } 5156 }
4699 5157
4700 5158
4701 } } // namespace v8::internal 5159 } } // namespace v8::internal
4702 5160
4703 #endif // V8_TARGET_ARCH_X64 5161 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698