Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 21014003: Optionally use 31-bits SMI value for 64-bit system (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Introduce SmiFunctionInvoker to abstract the difference between FullCodeGen and LCodeGen Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 966 matching lines...) Expand 10 before | Expand all | Expand 10 after
977 void MacroAssembler::Set(const Operand& dst, int64_t x) { 977 void MacroAssembler::Set(const Operand& dst, int64_t x) {
978 if (is_int32(x)) { 978 if (is_int32(x)) {
979 movq(dst, Immediate(static_cast<int32_t>(x))); 979 movq(dst, Immediate(static_cast<int32_t>(x)));
980 } else { 980 } else {
981 Set(kScratchRegister, x); 981 Set(kScratchRegister, x);
982 movq(dst, kScratchRegister); 982 movq(dst, kScratchRegister);
983 } 983 }
984 } 984 }
985 985
986 986
987 // ----------------------------------------------------------------------------
988 // Smi tagging, untagging and tag detection.
989
990
991 static inline Immediate SmiToImmediate(Smi* src) {
992 if (kSmiValueSize == 32) {
993 UNREACHABLE();
994 return Immediate(2);
995 } else {
996 return Immediate(static_cast<int32_t>(reinterpret_cast<intptr_t>(src)));
997 }
998 }
999
1000
987 bool MacroAssembler::IsUnsafeInt(const int x) { 1001 bool MacroAssembler::IsUnsafeInt(const int x) {
988 static const int kMaxBits = 17; 1002 static const int kMaxBits = 17;
989 return !is_intn(x, kMaxBits); 1003 return !is_intn(x, kMaxBits);
990 } 1004 }
991 1005
992 1006
993 void MacroAssembler::SafeMove(Register dst, Smi* src) { 1007 void MacroAssembler::SafeMove(Register dst, Smi* src) {
994 ASSERT(!dst.is(kScratchRegister)); 1008 ASSERT(!dst.is(kScratchRegister));
995 ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi. 1009 if (kSmiValueSize == 32) {
996 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) { 1010 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
997 Move(dst, Smi::FromInt(src->value() ^ jit_cookie())); 1011 Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
998 Move(kScratchRegister, Smi::FromInt(jit_cookie())); 1012 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
999 xor_(dst, kScratchRegister); 1013 xor_(dst, kScratchRegister);
1014 } else {
1015 Move(dst, src);
1016 }
1000 } else { 1017 } else {
1001 Move(dst, src); 1018 ASSERT(kSmiValueSize == 31);
1019 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1020 movq(dst, Immediate(static_cast<int>(reinterpret_cast<intptr_t>(src)) ^
1021 jit_cookie()));
1022 movq(kScratchRegister, Immediate(jit_cookie()));
1023 xor_(dst, kScratchRegister);
1024 } else {
1025 Move(dst, src);
1026 }
1002 } 1027 }
1003 } 1028 }
1004 1029
1005 1030
1006 void MacroAssembler::SafePush(Smi* src) { 1031 void MacroAssembler::SafePush(Smi* src) {
1007 ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi. 1032 if (kSmiValueSize == 32) {
1008 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) { 1033 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1009 Push(Smi::FromInt(src->value() ^ jit_cookie())); 1034 Push(Smi::FromInt(src->value() ^ jit_cookie()));
1010 Move(kScratchRegister, Smi::FromInt(jit_cookie())); 1035 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
1011 xor_(Operand(rsp, 0), kScratchRegister); 1036 xor_(Operand(rsp, 0), kScratchRegister);
1037 } else {
1038 Push(src);
1039 }
1012 } else { 1040 } else {
1013 Push(src); 1041 ASSERT(kSmiValueSize == 31);
1042 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1043 push(Immediate(static_cast<int>(reinterpret_cast<intptr_t>(src)) ^
1044 jit_cookie()));
1045 movq(kScratchRegister, Immediate(jit_cookie()));
1046 xor_(Operand(rsp, 0), kScratchRegister);
1047 } else {
1048 Push(src);
1049 }
1014 } 1050 }
1015 } 1051 }
1016 1052
1017 1053
1018 // ----------------------------------------------------------------------------
1019 // Smi tagging, untagging and tag detection.
1020
1021 Register MacroAssembler::GetSmiConstant(Smi* source) { 1054 Register MacroAssembler::GetSmiConstant(Smi* source) {
1022 int value = source->value(); 1055 int value = source->value();
1023 if (value == 0) { 1056 if (value == 0) {
1024 xorl(kScratchRegister, kScratchRegister); 1057 xorl(kScratchRegister, kScratchRegister);
1025 return kScratchRegister; 1058 return kScratchRegister;
1026 } 1059 }
1027 if (value == 1) { 1060 if (value == 1) {
1028 return kSmiConstantRegister; 1061 return kSmiConstantRegister;
1029 } 1062 }
1030 LoadSmiConstant(kScratchRegister, source); 1063 LoadSmiConstant(kScratchRegister, source);
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
1090 neg(dst); 1123 neg(dst);
1091 } 1124 }
1092 } 1125 }
1093 1126
1094 1127
1095 void MacroAssembler::Integer32ToSmi(Register dst, Register src) { 1128 void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
1096 STATIC_ASSERT(kSmiTag == 0); 1129 STATIC_ASSERT(kSmiTag == 0);
1097 if (!dst.is(src)) { 1130 if (!dst.is(src)) {
1098 movl(dst, src); 1131 movl(dst, src);
1099 } 1132 }
1100 shl(dst, Immediate(kSmiShift)); 1133 if (kSmiValueSize == 32) {
1134 shl(dst, Immediate(kSmiShift));
1135 } else {
1136 ASSERT(kSmiValueSize == 31);
1137 shll(dst, Immediate(kSmiShift));
1138 movsxlq(dst, dst);
1139 }
1101 } 1140 }
1102 1141
1103 1142
1104 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { 1143 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
1105 if (emit_debug_code()) { 1144 if (emit_debug_code()) {
1106 testb(dst, Immediate(0x01)); 1145 testb(dst, Immediate(0x01));
1107 Label ok; 1146 Label ok;
1108 j(zero, &ok, Label::kNear); 1147 j(zero, &ok, Label::kNear);
1109 if (allow_stub_calls()) { 1148 if (allow_stub_calls()) {
1110 Abort(kInteger32ToSmiFieldWritingToNonSmiLocation); 1149 Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
1111 } else { 1150 } else {
1112 int3(); 1151 int3();
1113 } 1152 }
1114 bind(&ok); 1153 bind(&ok);
1115 } 1154 }
1116 ASSERT(kSmiShift % kBitsPerByte == 0); 1155 if (kSmiValueSize == 32) {
1117 movl(Operand(dst, kSmiShift / kBitsPerByte), src); 1156 ASSERT(kSmiShift % kBitsPerByte == 0);
1157 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
1158 } else {
1159 ASSERT(kSmiValueSize == 31);
1160 Integer32ToSmi(kScratchRegister, src);
1161 movq(dst, kScratchRegister);
1162 }
1118 } 1163 }
1119 1164
1120 1165
1121 void MacroAssembler::Integer64PlusConstantToSmi(Register dst, 1166 void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
1122 Register src, 1167 Register src,
1123 int constant) { 1168 int constant) {
1124 if (dst.is(src)) { 1169 if (dst.is(src)) {
1125 addl(dst, Immediate(constant)); 1170 addl(dst, Immediate(constant));
1126 } else { 1171 } else {
1127 leal(dst, Operand(src, constant)); 1172 leal(dst, Operand(src, constant));
1128 } 1173 }
1129 shl(dst, Immediate(kSmiShift)); 1174 Integer32ToSmi(dst, dst);
1130 } 1175 }
1131 1176
1132 1177
1133 void MacroAssembler::SmiToInteger32(Register dst, Register src) { 1178 void MacroAssembler::SmiToInteger32(Register dst, Register src) {
1134 STATIC_ASSERT(kSmiTag == 0); 1179 STATIC_ASSERT(kSmiTag == 0);
1135 if (!dst.is(src)) { 1180 if (!dst.is(src)) {
1136 movq(dst, src); 1181 movq(dst, src);
1137 } 1182 }
1138 shr(dst, Immediate(kSmiShift)); 1183 if (kSmiValueSize == 32) {
1184 shr(dst, Immediate(kSmiShift));
1185 } else {
1186 ASSERT(kSmiValueSize == 31);
1187 sarl(dst, Immediate(kSmiShift));
1188 }
1139 } 1189 }
1140 1190
1141 1191
1142 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) { 1192 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
1143 movl(dst, Operand(src, kSmiShift / kBitsPerByte)); 1193 if (kSmiValueSize == 32) {
1194 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1195 } else {
1196 ASSERT(kSmiValueSize == 31);
1197 movl(dst, src);
1198 sarl(dst, Immediate(kSmiShift));
1199 }
1144 } 1200 }
1145 1201
1146 1202
1147 void MacroAssembler::SmiToInteger64(Register dst, Register src) { 1203 void MacroAssembler::SmiToInteger64(Register dst, Register src) {
1148 STATIC_ASSERT(kSmiTag == 0); 1204 STATIC_ASSERT(kSmiTag == 0);
1149 if (!dst.is(src)) { 1205 if (!dst.is(src)) {
1150 movq(dst, src); 1206 movq(dst, src);
1151 } 1207 }
1152 sar(dst, Immediate(kSmiShift)); 1208 sar(dst, Immediate(kSmiShift));
1153 } 1209 }
1154 1210
1155 1211
1156 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) { 1212 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
1157 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte)); 1213 if (kSmiValueSize == 32) {
1214 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
1215 } else {
1216 ASSERT(kSmiValueSize == 31);
1217 movq(dst, src);
1218 SmiToInteger64(dst, dst);
1219 }
1158 } 1220 }
1159 1221
1160 1222
1161 void MacroAssembler::SmiTest(Register src) { 1223 void MacroAssembler::SmiTest(Register src) {
1162 AssertSmi(src); 1224 AssertSmi(src);
1163 testq(src, src); 1225 if (kSmiValueSize == 32) {
1226 testq(src, src);
1227 } else {
1228 ASSERT(kSmiValueSize == 31);
1229 testl(src, src);
1230 }
1164 } 1231 }
1165 1232
1166 1233
1167 void MacroAssembler::SmiCompare(Register smi1, Register smi2) { 1234 void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
1168 AssertSmi(smi1); 1235 AssertSmi(smi1);
1169 AssertSmi(smi2); 1236 AssertSmi(smi2);
1170 cmpq(smi1, smi2); 1237 if (kSmiValueSize == 32) {
1238 cmpq(smi1, smi2);
1239 } else {
1240 ASSERT(kSmiValueSize == 31);
1241 cmpl(smi1, smi2);
1242 }
1171 } 1243 }
1172 1244
1173 1245
1174 void MacroAssembler::SmiCompare(Register dst, Smi* src) { 1246 void MacroAssembler::SmiCompare(Register dst, Smi* src) {
1175 AssertSmi(dst); 1247 AssertSmi(dst);
1176 Cmp(dst, src); 1248 Cmp(dst, src);
1177 } 1249 }
1178 1250
1179 1251
1180 void MacroAssembler::Cmp(Register dst, Smi* src) { 1252 void MacroAssembler::Cmp(Register dst, Smi* src) {
1181 ASSERT(!dst.is(kScratchRegister)); 1253 ASSERT(!dst.is(kScratchRegister));
1182 if (src->value() == 0) { 1254 if (src->value() == 0) {
1183 testq(dst, dst); 1255 if (kSmiValueSize == 32) {
1256 testq(dst, dst);
1257 } else {
1258 ASSERT(kSmiValueSize == 31);
1259 testl(dst, dst);
1260 }
1184 } else { 1261 } else {
1185 Register constant_reg = GetSmiConstant(src); 1262 Register constant_reg = GetSmiConstant(src);
1186 cmpq(dst, constant_reg); 1263 if (kSmiValueSize == 32) {
1264 cmpq(dst, constant_reg);
1265 } else {
1266 ASSERT(kSmiValueSize == 31);
1267 cmpl(dst, constant_reg);
1268 }
1187 } 1269 }
1188 } 1270 }
1189 1271
1190 1272
1191 void MacroAssembler::SmiCompare(Register dst, const Operand& src) { 1273 void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
1192 AssertSmi(dst); 1274 AssertSmi(dst);
1193 AssertSmi(src); 1275 AssertSmi(src);
1194 cmpq(dst, src); 1276 if (kSmiValueSize == 32) {
1277 cmpq(dst, src);
1278 } else {
1279 ASSERT(kSmiValueSize == 31);
1280 cmpl(dst, src);
1281 }
1195 } 1282 }
1196 1283
1197 1284
1198 void MacroAssembler::SmiCompare(const Operand& dst, Register src) { 1285 void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
1199 AssertSmi(dst); 1286 AssertSmi(dst);
1200 AssertSmi(src); 1287 AssertSmi(src);
1201 cmpq(dst, src); 1288 if (kSmiValueSize == 32) {
1289 cmpq(dst, src);
1290 } else {
1291 ASSERT(kSmiValueSize == 31);
1292 cmpl(dst, src);
1293 }
1202 } 1294 }
1203 1295
1204 1296
1205 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) { 1297 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
1206 AssertSmi(dst); 1298 AssertSmi(dst);
1207 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value())); 1299 if (kSmiValueSize == 32) {
1300 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1301 } else {
1302 ASSERT(kSmiValueSize == 31);
1303 cmpl(dst, SmiToImmediate(src));
1304 }
1208 } 1305 }
1209 1306
1210 1307
1211 void MacroAssembler::Cmp(const Operand& dst, Smi* src) { 1308 void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1212 // The Operand cannot use the smi register. 1309 if (kSmiValueSize == 32) {
1213 Register smi_reg = GetSmiConstant(src); 1310 // The Operand cannot use the smi register.
1214 ASSERT(!dst.AddressUsesRegister(smi_reg)); 1311 Register smi_reg = GetSmiConstant(src);
1215 cmpq(dst, smi_reg); 1312 ASSERT(!dst.AddressUsesRegister(smi_reg));
1313 cmpq(dst, smi_reg);
1314 } else {
1315 ASSERT(kSmiValueSize == 31);
1316 cmpl(dst, SmiToImmediate(src));
1317 }
1216 } 1318 }
1217 1319
1218 1320
1219 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) { 1321 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
1220 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src); 1322 if (kSmiValueSize == 32) {
1323 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1324 } else {
1325 ASSERT(kSmiValueSize == 31);
1326 SmiToInteger32(kScratchRegister, dst);
1327 cmpl(kScratchRegister, src);
1328 }
1221 } 1329 }
1222 1330
1223 1331
1224 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst, 1332 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1225 Register src, 1333 Register src,
1226 int power) { 1334 int power) {
1227 ASSERT(power >= 0); 1335 ASSERT(power >= 0);
1228 ASSERT(power < 64); 1336 ASSERT(power < 64);
1229 if (power == 0) { 1337 if (power == 0) {
1230 SmiToInteger64(dst, src); 1338 SmiToInteger64(dst, src);
1231 return; 1339 return;
1232 } 1340 }
1233 if (!dst.is(src)) { 1341 if (!dst.is(src)) {
1234 movq(dst, src); 1342 movq(dst, src);
1235 } 1343 }
1236 if (power < kSmiShift) { 1344 if (power < kSmiShift) {
1237 sar(dst, Immediate(kSmiShift - power)); 1345 sar(dst, Immediate(kSmiShift - power));
1238 } else if (power > kSmiShift) { 1346 } else if (power > kSmiShift) {
1239 shl(dst, Immediate(power - kSmiShift)); 1347 shl(dst, Immediate(power - kSmiShift));
1240 } 1348 }
1241 } 1349 }
1242 1350
1243 1351
1244 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst, 1352 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1245 Register src, 1353 Register src,
1246 int power) { 1354 int power) {
1247 ASSERT((0 <= power) && (power < 32)); 1355 ASSERT((0 <= power) && (power < 32));
1248 if (dst.is(src)) { 1356 if (dst.is(src)) {
1249 shr(dst, Immediate(power + kSmiShift)); 1357 if (kSmiValueSize == 32) {
1358 shr(dst, Immediate(power + kSmiShift));
1359 } else {
1360 ASSERT(kSmiValueSize == 31);
1361 shrl(dst, Immediate(power + kSmiShift));
1362 }
1250 } else { 1363 } else {
1251 UNIMPLEMENTED(); // Not used. 1364 UNIMPLEMENTED(); // Not used.
1252 } 1365 }
1253 } 1366 }
1254 1367
1255 1368
1256 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2, 1369 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1257 Label* on_not_smis, 1370 Label* on_not_smis,
1258 Label::Distance near_jump) { 1371 Label::Distance near_jump) {
1259 if (dst.is(src1) || dst.is(src2)) { 1372 if (dst.is(src1) || dst.is(src2)) {
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1293 testb(kScratchRegister, Immediate(3)); 1406 testb(kScratchRegister, Immediate(3));
1294 return zero; 1407 return zero;
1295 } 1408 }
1296 1409
1297 1410
1298 Condition MacroAssembler::CheckBothSmi(Register first, Register second) { 1411 Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1299 if (first.is(second)) { 1412 if (first.is(second)) {
1300 return CheckSmi(first); 1413 return CheckSmi(first);
1301 } 1414 }
1302 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3); 1415 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
1303 leal(kScratchRegister, Operand(first, second, times_1, 0)); 1416 if (kSmiValueSize == 32) {
1304 testb(kScratchRegister, Immediate(0x03)); 1417 leal(kScratchRegister, Operand(first, second, times_1, 0));
1418 testb(kScratchRegister, Immediate(0x03));
1419 } else {
1420 ASSERT(kSmiValueSize == 31);
1421 movl(kScratchRegister, first);
1422 orl(kScratchRegister, second);
1423 testb(kScratchRegister, Immediate(kSmiTagMask));
1424 }
1305 return zero; 1425 return zero;
1306 } 1426 }
1307 1427
1308 1428
1309 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first, 1429 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1310 Register second) { 1430 Register second) {
1311 if (first.is(second)) { 1431 if (first.is(second)) {
1312 return CheckNonNegativeSmi(first); 1432 return CheckNonNegativeSmi(first);
1313 } 1433 }
1314 movq(kScratchRegister, first); 1434 movq(kScratchRegister, first);
(...skipping 19 matching lines...) Expand all
1334 andl(scratch, second); 1454 andl(scratch, second);
1335 } 1455 }
1336 testb(scratch, Immediate(kSmiTagMask)); 1456 testb(scratch, Immediate(kSmiTagMask));
1337 return zero; 1457 return zero;
1338 } 1458 }
1339 1459
1340 1460
1341 Condition MacroAssembler::CheckIsMinSmi(Register src) { 1461 Condition MacroAssembler::CheckIsMinSmi(Register src) {
1342 ASSERT(!src.is(kScratchRegister)); 1462 ASSERT(!src.is(kScratchRegister));
1343 // If we overflow by subtracting one, it's the minimal smi value. 1463 // If we overflow by subtracting one, it's the minimal smi value.
1344 cmpq(src, kSmiConstantRegister); 1464 if (kSmiValueSize == 32) {
1465 cmpq(src, kSmiConstantRegister);
1466 } else {
1467 ASSERT(kSmiValueSize == 31);
1468 cmpl(src, kSmiConstantRegister);
1469 }
1345 return overflow; 1470 return overflow;
1346 } 1471 }
1347 1472
1348 1473
1349 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) { 1474 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
1350 // A 32-bit integer value can always be converted to a smi. 1475 if (kSmiValueSize == 32) {
1351 return always; 1476 // A 32-bit integer value can always be converted to a smi.
1477 return always;
1478 } else {
1479 ASSERT(kSmiValueSize == 31);
1480 cmpl(src, Immediate(0xc0000000));
1481 return positive;
1482 }
1352 } 1483 }
1353 1484
1354 1485
1355 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) { 1486 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1356 // An unsigned 32-bit integer value is valid as long as the high bit 1487 if (kSmiValueSize == 32) {
1357 // is not set. 1488 // An unsigned 32-bit integer value is valid as long as the high bit
1358 testl(src, src); 1489 // is not set.
1359 return positive; 1490 testl(src, src);
1491 return positive;
1492 } else {
1493 ASSERT(kSmiValueSize == 31);
1494 testl(src, Immediate(0xc0000000));
1495 return zero;
1496 }
1360 } 1497 }
1361 1498
1362 1499
1363 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) { 1500 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1364 if (dst.is(src)) { 1501 if (dst.is(src)) {
1365 andl(dst, Immediate(kSmiTagMask)); 1502 andl(dst, Immediate(kSmiTagMask));
1366 } else { 1503 } else {
1367 movl(dst, Immediate(kSmiTagMask)); 1504 movl(dst, Immediate(kSmiTagMask));
1368 andl(dst, src); 1505 andl(dst, src);
1369 } 1506 }
(...skipping 12 matching lines...) Expand all
1382 1519
1383 1520
1384 void MacroAssembler::JumpIfNotValidSmiValue(Register src, 1521 void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1385 Label* on_invalid, 1522 Label* on_invalid,
1386 Label::Distance near_jump) { 1523 Label::Distance near_jump) {
1387 Condition is_valid = CheckInteger32ValidSmiValue(src); 1524 Condition is_valid = CheckInteger32ValidSmiValue(src);
1388 j(NegateCondition(is_valid), on_invalid, near_jump); 1525 j(NegateCondition(is_valid), on_invalid, near_jump);
1389 } 1526 }
1390 1527
1391 1528
1529 void MacroAssembler::JumpIfValidSmiValue(Register src,
1530 Label* on_valid,
1531 Label::Distance near_jump) {
1532 Condition is_valid = CheckInteger32ValidSmiValue(src);
1533 j(is_valid, on_valid, near_jump);
1534 }
1535
1536
1392 void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src, 1537 void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1393 Label* on_invalid, 1538 Label* on_invalid,
1394 Label::Distance near_jump) { 1539 Label::Distance near_jump) {
1395 Condition is_valid = CheckUInteger32ValidSmiValue(src); 1540 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1396 j(NegateCondition(is_valid), on_invalid, near_jump); 1541 j(NegateCondition(is_valid), on_invalid, near_jump);
1397 } 1542 }
1398 1543
1399 1544
1545 void MacroAssembler::JumpIfUIntValidSmiValue(Register src,
1546 Label* on_valid,
1547 Label::Distance near_jump) {
1548 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1549 j(is_valid, on_valid, near_jump);
1550 }
1551
1552
1400 void MacroAssembler::JumpIfSmi(Register src, 1553 void MacroAssembler::JumpIfSmi(Register src,
1401 Label* on_smi, 1554 Label* on_smi,
1402 Label::Distance near_jump) { 1555 Label::Distance near_jump) {
1403 Condition smi = CheckSmi(src); 1556 Condition smi = CheckSmi(src);
1404 j(smi, on_smi, near_jump); 1557 j(smi, on_smi, near_jump);
1405 } 1558 }
1406 1559
1407 1560
1408 void MacroAssembler::JumpIfNotSmi(Register src, 1561 void MacroAssembler::JumpIfNotSmi(Register src,
1409 Label* on_not_smi, 1562 Label* on_not_smi,
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
1455 Label::Distance near_jump) { 1608 Label::Distance near_jump) {
1456 // Does not assume that src is a smi. 1609 // Does not assume that src is a smi.
1457 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask)); 1610 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
1458 STATIC_ASSERT(kSmiTag == 0); 1611 STATIC_ASSERT(kSmiTag == 0);
1459 ASSERT(!dst.is(kScratchRegister)); 1612 ASSERT(!dst.is(kScratchRegister));
1460 ASSERT(!src.is(kScratchRegister)); 1613 ASSERT(!src.is(kScratchRegister));
1461 1614
1462 JumpIfNotSmi(src, on_not_smi_result, near_jump); 1615 JumpIfNotSmi(src, on_not_smi_result, near_jump);
1463 Register tmp = (dst.is(src) ? kScratchRegister : dst); 1616 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1464 LoadSmiConstant(tmp, constant); 1617 LoadSmiConstant(tmp, constant);
1465 addq(tmp, src); 1618 if (kSmiValueSize == 32) {
1619 addq(tmp, src);
1620 } else {
1621 ASSERT(kSmiValueSize == 31);
1622 addl(tmp, src);
1623 }
1466 j(overflow, on_not_smi_result, near_jump); 1624 j(overflow, on_not_smi_result, near_jump);
1467 if (dst.is(src)) { 1625 if (dst.is(src)) {
1468 movq(dst, tmp); 1626 movq(dst, tmp);
1469 } 1627 }
1628 if (kSmiValueSize == 31) {
1629 movsxlq(dst, dst);
1630 }
1470 } 1631 }
1471 1632
1472 1633
1473 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { 1634 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1474 if (constant->value() == 0) { 1635 if (constant->value() == 0) {
1475 if (!dst.is(src)) { 1636 if (!dst.is(src)) {
1476 movq(dst, src); 1637 movq(dst, src);
1477 } 1638 }
1478 return; 1639 return;
1479 } else if (dst.is(src)) { 1640 } else if (dst.is(src)) {
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1514 LoadSmiConstant(dst, constant); 1675 LoadSmiConstant(dst, constant);
1515 addq(dst, src); 1676 addq(dst, src);
1516 return; 1677 return;
1517 } 1678 }
1518 } 1679 }
1519 } 1680 }
1520 1681
1521 1682
1522 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) { 1683 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1523 if (constant->value() != 0) { 1684 if (constant->value() != 0) {
1524 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value())); 1685 if (kSmiValueSize == 32) {
1686 addl(Operand(dst, kSmiShift / kBitsPerByte),
1687 Immediate(constant->value()));
1688 } else {
1689 ASSERT(kSmiValueSize == 31);
1690 addq(dst, SmiToImmediate(constant));
1691 }
1525 } 1692 }
1526 } 1693 }
1527 1694
1528 1695
1529 void MacroAssembler::SmiAddConstant(Register dst, 1696 void MacroAssembler::SmiAddConstant(Register dst,
1530 Register src, 1697 Register src,
1531 Smi* constant, 1698 Smi* constant,
1532 Label* on_not_smi_result, 1699 Label* on_not_smi_result,
1533 Label::Distance near_jump) { 1700 Label::Distance near_jump) {
1534 if (constant->value() == 0) { 1701 if (constant->value() == 0) {
1535 if (!dst.is(src)) { 1702 if (!dst.is(src)) {
1536 movq(dst, src); 1703 movq(dst, src);
1537 } 1704 }
1538 } else if (dst.is(src)) { 1705 } else if (dst.is(src)) {
1539 ASSERT(!dst.is(kScratchRegister)); 1706 ASSERT(!dst.is(kScratchRegister));
1540 1707
1541 LoadSmiConstant(kScratchRegister, constant); 1708 LoadSmiConstant(kScratchRegister, constant);
1542 addq(kScratchRegister, src); 1709 if (kSmiValueSize == 32) {
1710 addq(kScratchRegister, src);
1711 } else {
1712 ASSERT(kSmiValueSize == 31);
1713 addl(kScratchRegister, src);
1714 }
1543 j(overflow, on_not_smi_result, near_jump); 1715 j(overflow, on_not_smi_result, near_jump);
1544 movq(dst, kScratchRegister); 1716 if (kSmiValueSize == 32) {
1717 movq(dst, kScratchRegister);
1718 } else {
1719 ASSERT(kSmiValueSize == 31);
1720 movsxlq(dst, kScratchRegister);
1721 }
1545 } else { 1722 } else {
1546 LoadSmiConstant(dst, constant); 1723 LoadSmiConstant(dst, constant);
1547 addq(dst, src); 1724 if (kSmiValueSize == 32) {
1725 addq(dst, src);
1726 } else {
1727 ASSERT(kSmiValueSize == 31);
1728 addl(dst, src);
1729 }
1548 j(overflow, on_not_smi_result, near_jump); 1730 j(overflow, on_not_smi_result, near_jump);
1731 if (kSmiValueSize == 31) {
1732 movsxlq(dst, dst);
1733 }
1549 } 1734 }
1550 } 1735 }
1551 1736
1552 1737
1553 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) { 1738 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1554 if (constant->value() == 0) { 1739 if (constant->value() == 0) {
1555 if (!dst.is(src)) { 1740 if (!dst.is(src)) {
1556 movq(dst, src); 1741 movq(dst, src);
1557 } 1742 }
1558 } else if (dst.is(src)) { 1743 } else if (dst.is(src)) {
1559 ASSERT(!dst.is(kScratchRegister)); 1744 ASSERT(!dst.is(kScratchRegister));
1560 Register constant_reg = GetSmiConstant(constant); 1745 Register constant_reg = GetSmiConstant(constant);
1561 subq(dst, constant_reg); 1746 subq(dst, constant_reg);
1562 } else { 1747 } else {
1563 if (constant->value() == Smi::kMinValue) { 1748 if (constant->value() == Smi::kMinValue) {
1564 LoadSmiConstant(dst, constant); 1749 if (kSmiValueSize == 32) {
1565 // Adding and subtracting the min-value gives the same result, it only 1750 LoadSmiConstant(dst, constant);
1566 // differs on the overflow bit, which we don't check here. 1751 // Adding and subtracting the min-value gives the same result, it only
1567 addq(dst, src); 1752 // differs on the overflow bit, which we don't check here.
1753 addq(dst, src);
1754 } else {
1755 ASSERT(kSmiValueSize == 31);
1756 movq(dst, src);
1757 subq(dst, SmiToImmediate(constant));
1758 }
1568 } else { 1759 } else {
1569 // Subtract by adding the negation. 1760 // Subtract by adding the negation.
1570 LoadSmiConstant(dst, Smi::FromInt(-constant->value())); 1761 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1571 addq(dst, src); 1762 addq(dst, src);
1572 } 1763 }
1573 } 1764 }
1574 } 1765 }
1575 1766
1576 1767
1577 void MacroAssembler::SmiSubConstant(Register dst, 1768 void MacroAssembler::SmiSubConstant(Register dst,
1578 Register src, 1769 Register src,
1579 Smi* constant, 1770 Smi* constant,
1580 Label* on_not_smi_result, 1771 Label* on_not_smi_result,
1581 Label::Distance near_jump) { 1772 Label::Distance near_jump) {
1582 if (constant->value() == 0) { 1773 if (constant->value() == 0) {
1583 if (!dst.is(src)) { 1774 if (!dst.is(src)) {
1584 movq(dst, src); 1775 movq(dst, src);
1585 } 1776 }
1586 } else if (dst.is(src)) { 1777 } else if (dst.is(src)) {
1587 ASSERT(!dst.is(kScratchRegister)); 1778 ASSERT(!dst.is(kScratchRegister));
1588 if (constant->value() == Smi::kMinValue) { 1779 if (constant->value() == Smi::kMinValue) {
1589 // Subtracting min-value from any non-negative value will overflow. 1780 // Subtracting min-value from any non-negative value will overflow.
1590 // We test the non-negativeness before doing the subtraction. 1781 // We test the non-negativeness before doing the subtraction.
1591 testq(src, src); 1782 if (kSmiValueSize == 32) {
1783 testq(src, src);
1784 } else {
1785 ASSERT(kSmiValueSize == 31);
1786 testl(src, src);
1787 }
1592 j(not_sign, on_not_smi_result, near_jump); 1788 j(not_sign, on_not_smi_result, near_jump);
1593 LoadSmiConstant(kScratchRegister, constant); 1789 LoadSmiConstant(kScratchRegister, constant);
1594 subq(dst, kScratchRegister); 1790 subq(dst, kScratchRegister);
1595 } else { 1791 } else {
1596 // Subtract by adding the negation. 1792 // Subtract by adding the negation.
1597 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value())); 1793 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1598 addq(kScratchRegister, dst); 1794 if (kSmiValueSize == 32) {
1795 addq(kScratchRegister, dst);
1796 } else {
1797 ASSERT(kSmiValueSize == 31);
1798 addl(kScratchRegister, dst);
1799 }
1599 j(overflow, on_not_smi_result, near_jump); 1800 j(overflow, on_not_smi_result, near_jump);
1600 movq(dst, kScratchRegister); 1801 if (kSmiValueSize == 32) {
1802 movq(dst, kScratchRegister);
1803 } else {
1804 ASSERT(kSmiValueSize == 31);
1805 movsxlq(dst, kScratchRegister);
1806 }
1601 } 1807 }
1602 } else { 1808 } else {
1603 if (constant->value() == Smi::kMinValue) { 1809 if (constant->value() == Smi::kMinValue) {
1604 // Subtracting min-value from any non-negative value will overflow. 1810 // Subtracting min-value from any non-negative value will overflow.
1605 // We test the non-negativeness before doing the subtraction. 1811 // We test the non-negativeness before doing the subtraction.
1606 testq(src, src); 1812 if (kSmiValueSize == 32) {
1607 j(not_sign, on_not_smi_result, near_jump); 1813 testq(src, src);
1608 LoadSmiConstant(dst, constant); 1814 j(not_sign, on_not_smi_result, near_jump);
1609 // Adding and subtracting the min-value gives the same result, it only 1815 LoadSmiConstant(dst, constant);
1610 // differs on the overflow bit, which we don't check here. 1816 // Adding and subtracting the min-value gives the same result, it only
1611 addq(dst, src); 1817 // differs on the overflow bit, which we don't check here.
1818 addq(dst, src);
1819 } else {
1820 ASSERT(kSmiValueSize == 31);
1821 testl(src, src);
1822 j(not_sign, on_not_smi_result, near_jump);
1823 movq(dst, src);
1824 subq(dst, SmiToImmediate(constant));
1825 }
1612 } else { 1826 } else {
1613 // Subtract by adding the negation. 1827 // Subtract by adding the negation.
1614 LoadSmiConstant(dst, Smi::FromInt(-(constant->value()))); 1828 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1615 addq(dst, src); 1829 if (kSmiValueSize == 32) {
1830 addq(dst, src);
1831 } else {
1832 ASSERT(kSmiValueSize == 31);
1833 addl(dst, src);
1834 }
1616 j(overflow, on_not_smi_result, near_jump); 1835 j(overflow, on_not_smi_result, near_jump);
1836 if (kSmiValueSize == 31) {
1837 movsxlq(dst, dst);
1838 }
1617 } 1839 }
1618 } 1840 }
1619 } 1841 }
1620 1842
1621 1843
1622 void MacroAssembler::SmiNeg(Register dst, 1844 void MacroAssembler::SmiNeg(Register dst,
1623 Register src, 1845 Register src,
1624 Label* on_smi_result, 1846 Label* on_smi_result,
1625 Label::Distance near_jump) { 1847 Label::Distance near_jump) {
1626 if (dst.is(src)) { 1848 if (dst.is(src)) {
1627 ASSERT(!dst.is(kScratchRegister)); 1849 ASSERT(!dst.is(kScratchRegister));
1628 movq(kScratchRegister, src); 1850 movq(kScratchRegister, src);
1629 neg(dst); // Low 32 bits are retained as zero by negation. 1851 neg(dst); // Low 32 bits are retained as zero by negation.
1630 // Test if result is zero or Smi::kMinValue. 1852 // Test if result is zero or Smi::kMinValue.
1853 if (kSmiValueSize == 32) {
1631 cmpq(dst, kScratchRegister); 1854 cmpq(dst, kScratchRegister);
1632 j(not_equal, on_smi_result, near_jump); 1855 } else {
1633 movq(src, kScratchRegister); 1856 ASSERT(kSmiValueSize == 31);
1857 cmpl(dst, kScratchRegister);
1858 }
1859 j(not_equal, on_smi_result, near_jump);
1860 movq(src, kScratchRegister);
1634 } else { 1861 } else {
1635 movq(dst, src); 1862 movq(dst, src);
1636 neg(dst); 1863 neg(dst);
1637 cmpq(dst, src); 1864 if (kSmiValueSize == 32) {
1865 cmpq(dst, src);
1866 } else {
1867 ASSERT(kSmiValueSize == 31);
1868 cmpl(dst, src);
1869 }
1638 // If the result is zero or Smi::kMinValue, negation failed to create a smi. 1870 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1639 j(not_equal, on_smi_result, near_jump); 1871 j(not_equal, on_smi_result, near_jump);
1640 } 1872 }
1641 } 1873 }
1642 1874
1643 1875
1644 void MacroAssembler::SmiAdd(Register dst, 1876 void MacroAssembler::SmiAdd(Register dst,
1645 Register src1, 1877 Register src1,
1646 Register src2, 1878 Register src2,
1647 Label* on_not_smi_result, 1879 Label* on_not_smi_result,
1648 Label::Distance near_jump) { 1880 Label::Distance near_jump) {
1649 ASSERT_NOT_NULL(on_not_smi_result); 1881 ASSERT_NOT_NULL(on_not_smi_result);
1650 ASSERT(!dst.is(src2)); 1882 ASSERT(!dst.is(src2));
1651 if (dst.is(src1)) { 1883 if (dst.is(src1)) {
1652 movq(kScratchRegister, src1); 1884 movq(kScratchRegister, src1);
1653 addq(kScratchRegister, src2); 1885 if (kSmiValueSize == 32) {
1886 addq(kScratchRegister, src2);
1887 } else {
1888 ASSERT(kSmiValueSize == 31);
1889 addl(kScratchRegister, src2);
1890 }
1654 j(overflow, on_not_smi_result, near_jump); 1891 j(overflow, on_not_smi_result, near_jump);
1655 movq(dst, kScratchRegister); 1892 if (kSmiValueSize == 32) {
1893 movq(dst, kScratchRegister);
1894 } else {
1895 ASSERT(kSmiValueSize == 31);
1896 movsxlq(dst, kScratchRegister);
1897 }
1656 } else { 1898 } else {
1657 movq(dst, src1); 1899 movq(dst, src1);
1658 addq(dst, src2); 1900 if (kSmiValueSize == 32) {
1901 addq(dst, src2);
1902 } else {
1903 ASSERT(kSmiValueSize == 31);
1904 addl(dst, src2);
1905 }
1659 j(overflow, on_not_smi_result, near_jump); 1906 j(overflow, on_not_smi_result, near_jump);
1907 if (kSmiValueSize == 31) {
1908 movsxlq(dst, dst);
1909 }
1660 } 1910 }
1661 } 1911 }
1662 1912
1663 1913
1664 void MacroAssembler::SmiAdd(Register dst, 1914 void MacroAssembler::SmiAdd(Register dst,
1665 Register src1, 1915 Register src1,
1666 const Operand& src2, 1916 const Operand& src2,
1667 Label* on_not_smi_result, 1917 Label* on_not_smi_result,
1668 Label::Distance near_jump) { 1918 Label::Distance near_jump) {
1669 ASSERT_NOT_NULL(on_not_smi_result); 1919 ASSERT_NOT_NULL(on_not_smi_result);
1670 if (dst.is(src1)) { 1920 if (dst.is(src1)) {
1671 movq(kScratchRegister, src1); 1921 movq(kScratchRegister, src1);
1672 addq(kScratchRegister, src2); 1922 if (kSmiValueSize == 32) {
1923 addq(kScratchRegister, src2);
1924 } else {
1925 ASSERT(kSmiValueSize == 31);
1926 addl(kScratchRegister, src2);
1927 }
1673 j(overflow, on_not_smi_result, near_jump); 1928 j(overflow, on_not_smi_result, near_jump);
1674 movq(dst, kScratchRegister); 1929 if (kSmiValueSize == 32) {
1930 movq(dst, kScratchRegister);
1931 } else {
1932 ASSERT(kSmiValueSize == 31);
1933 movsxlq(dst, kScratchRegister);
1934 }
1675 } else { 1935 } else {
1676 ASSERT(!src2.AddressUsesRegister(dst)); 1936 ASSERT(!src2.AddressUsesRegister(dst));
1677 movq(dst, src1); 1937 movq(dst, src1);
1678 addq(dst, src2); 1938 if (kSmiValueSize == 32) {
1939 addq(dst, src2);
1940 } else {
1941 ASSERT(kSmiValueSize == 31);
1942 addl(dst, src2);
1943 }
1679 j(overflow, on_not_smi_result, near_jump); 1944 j(overflow, on_not_smi_result, near_jump);
1945 if (kSmiValueSize == 31) {
1946 movsxlq(dst, dst);
1680 } 1947 }
1681 } 1948 }
1949 }
1682 1950
1683 1951
1684 void MacroAssembler::SmiAdd(Register dst, 1952 void MacroAssembler::SmiAdd(Register dst,
1685 Register src1, 1953 Register src1,
1686 Register src2) { 1954 Register src2) {
1687 // No overflow checking. Use only when it's known that 1955 // No overflow checking. Use only when it's known that
1688 // overflowing is impossible. 1956 // overflowing is impossible.
1689 if (!dst.is(src1)) { 1957 if (!dst.is(src1)) {
1690 if (emit_debug_code()) { 1958 if (emit_debug_code()) {
1691 movq(kScratchRegister, src1); 1959 movq(kScratchRegister, src1);
1692 addq(kScratchRegister, src2); 1960 addq(kScratchRegister, src2);
1693 Check(no_overflow, kSmiAdditionOverflow); 1961 Check(no_overflow, kSmiAdditionOverflow);
1694 } 1962 }
1695 lea(dst, Operand(src1, src2, times_1, 0)); 1963 lea(dst, Operand(src1, src2, times_1, 0));
1696 } else { 1964 } else {
1697 addq(dst, src2); 1965 addq(dst, src2);
1698 Assert(no_overflow, kSmiAdditionOverflow); 1966 Assert(no_overflow, kSmiAdditionOverflow);
1699 } 1967 }
1700 } 1968 }
1701 1969
1702 1970
1703 void MacroAssembler::SmiSub(Register dst, 1971 void MacroAssembler::SmiSub(Register dst,
1704 Register src1, 1972 Register src1,
1705 Register src2, 1973 Register src2,
1706 Label* on_not_smi_result, 1974 Label* on_not_smi_result,
1707 Label::Distance near_jump) { 1975 Label::Distance near_jump) {
1708 ASSERT_NOT_NULL(on_not_smi_result); 1976 ASSERT_NOT_NULL(on_not_smi_result);
1709 ASSERT(!dst.is(src2)); 1977 ASSERT(!dst.is(src2));
1710 if (dst.is(src1)) { 1978 if (dst.is(src1)) {
1711 cmpq(dst, src2); 1979 if (kSmiValueSize == 32) {
1980 cmpq(dst, src2);
1981 } else {
1982 ASSERT(kSmiValueSize == 31);
1983 cmpl(dst, src2);
1984 }
1712 j(overflow, on_not_smi_result, near_jump); 1985 j(overflow, on_not_smi_result, near_jump);
1713 subq(dst, src2); 1986 subq(dst, src2);
1714 } else { 1987 } else {
1715 movq(dst, src1); 1988 if (kSmiValueSize == 32) {
1716 subq(dst, src2); 1989 movq(dst, src1);
1990 subq(dst, src2);
1991 } else {
1992 ASSERT(kSmiValueSize == 31);
1993 movl(dst, src1);
1994 subl(dst, src2);
1995 }
1717 j(overflow, on_not_smi_result, near_jump); 1996 j(overflow, on_not_smi_result, near_jump);
1997 if (kSmiValueSize == 31) {
1998 movsxlq(dst, dst);
1999 }
1718 } 2000 }
1719 } 2001 }
1720 2002
1721 2003
1722 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) { 2004 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1723 // No overflow checking. Use only when it's known that 2005 // No overflow checking. Use only when it's known that
1724 // overflowing is impossible (e.g., subtracting two positive smis). 2006 // overflowing is impossible (e.g., subtracting two positive smis).
1725 ASSERT(!dst.is(src2)); 2007 ASSERT(!dst.is(src2));
1726 if (!dst.is(src1)) { 2008 if (!dst.is(src1)) {
1727 movq(dst, src1); 2009 movq(dst, src1);
1728 } 2010 }
1729 subq(dst, src2); 2011 subq(dst, src2);
1730 Assert(no_overflow, kSmiSubtractionOverflow); 2012 Assert(no_overflow, kSmiSubtractionOverflow);
1731 } 2013 }
1732 2014
1733 2015
1734 void MacroAssembler::SmiSub(Register dst, 2016 void MacroAssembler::SmiSub(Register dst,
1735 Register src1, 2017 Register src1,
1736 const Operand& src2, 2018 const Operand& src2,
1737 Label* on_not_smi_result, 2019 Label* on_not_smi_result,
1738 Label::Distance near_jump) { 2020 Label::Distance near_jump) {
1739 ASSERT_NOT_NULL(on_not_smi_result); 2021 ASSERT_NOT_NULL(on_not_smi_result);
1740 if (dst.is(src1)) { 2022 if (dst.is(src1)) {
1741 movq(kScratchRegister, src2); 2023 if (kSmiValueSize == 32) {
1742 cmpq(src1, kScratchRegister); 2024 movq(kScratchRegister, src2);
2025 cmpq(src1, kScratchRegister);
2026 } else {
2027 ASSERT(kSmiValueSize == 31);
2028 movl(kScratchRegister, src2);
2029 cmpl(src1, kScratchRegister);
2030 }
1743 j(overflow, on_not_smi_result, near_jump); 2031 j(overflow, on_not_smi_result, near_jump);
1744 subq(src1, kScratchRegister); 2032 if (kSmiValueSize == 32) {
2033 subq(src1, kScratchRegister);
2034 } else {
2035 ASSERT(kSmiValueSize == 31);
2036 movsxlq(src1, kScratchRegister);
2037 }
1745 } else { 2038 } else {
1746 movq(dst, src1); 2039 if (kSmiValueSize == 32) {
1747 subq(dst, src2); 2040 movq(dst, src1);
2041 subq(dst, src2);
2042 } else {
2043 ASSERT(kSmiValueSize == 31);
2044 movl(dst, src1);
2045 subl(dst, src2);
2046 }
1748 j(overflow, on_not_smi_result, near_jump); 2047 j(overflow, on_not_smi_result, near_jump);
2048 if (kSmiValueSize == 31) {
2049 movsxlq(dst, dst);
2050 }
1749 } 2051 }
1750 } 2052 }
1751 2053
1752 2054
1753 void MacroAssembler::SmiSub(Register dst, 2055 void MacroAssembler::SmiSub(Register dst,
1754 Register src1, 2056 Register src1,
1755 const Operand& src2) { 2057 const Operand& src2) {
1756 // No overflow checking. Use only when it's known that 2058 // No overflow checking. Use only when it's known that
1757 // overflowing is impossible (e.g., subtracting two positive smis). 2059 // overflowing is impossible (e.g., subtracting two positive smis).
1758 if (!dst.is(src1)) { 2060 if (!dst.is(src1)) {
(...skipping 10 matching lines...) Expand all
1769 Label* on_not_smi_result, 2071 Label* on_not_smi_result,
1770 Label::Distance near_jump) { 2072 Label::Distance near_jump) {
1771 ASSERT(!dst.is(src2)); 2073 ASSERT(!dst.is(src2));
1772 ASSERT(!dst.is(kScratchRegister)); 2074 ASSERT(!dst.is(kScratchRegister));
1773 ASSERT(!src1.is(kScratchRegister)); 2075 ASSERT(!src1.is(kScratchRegister));
1774 ASSERT(!src2.is(kScratchRegister)); 2076 ASSERT(!src2.is(kScratchRegister));
1775 2077
1776 if (dst.is(src1)) { 2078 if (dst.is(src1)) {
1777 Label failure, zero_correct_result; 2079 Label failure, zero_correct_result;
1778 movq(kScratchRegister, src1); // Create backup for later testing. 2080 movq(kScratchRegister, src1); // Create backup for later testing.
1779 SmiToInteger64(dst, src1); 2081 if (kSmiValueSize == 32) {
1780 imul(dst, src2); 2082 SmiToInteger64(dst, src1);
2083 imul(dst, src2);
2084 } else {
2085 ASSERT(kSmiValueSize == 31);
2086 SmiToInteger32(dst, src1);
2087 imull(dst, src2);
2088 }
1781 j(overflow, &failure, Label::kNear); 2089 j(overflow, &failure, Label::kNear);
1782 2090
1783 // Check for negative zero result. If product is zero, and one 2091 // Check for negative zero result. If product is zero, and one
1784 // argument is negative, go to slow case. 2092 // argument is negative, go to slow case.
1785 Label correct_result; 2093 Label correct_result;
1786 testq(dst, dst); 2094 testq(dst, dst);
1787 j(not_zero, &correct_result, Label::kNear); 2095 j(not_zero, &correct_result, Label::kNear);
1788 2096
1789 movq(dst, kScratchRegister); 2097 movq(dst, kScratchRegister);
1790 xor_(dst, src2); 2098 xor_(dst, src2);
1791 // Result was positive zero. 2099 // Result was positive zero.
1792 j(positive, &zero_correct_result, Label::kNear); 2100 j(positive, &zero_correct_result, Label::kNear);
1793 2101
1794 bind(&failure); // Reused failure exit, restores src1. 2102 bind(&failure); // Reused failure exit, restores src1.
1795 movq(src1, kScratchRegister); 2103 movq(src1, kScratchRegister);
1796 jmp(on_not_smi_result, near_jump); 2104 jmp(on_not_smi_result, near_jump);
1797 2105
1798 bind(&zero_correct_result); 2106 bind(&zero_correct_result);
1799 Set(dst, 0); 2107 Set(dst, 0);
1800 2108
1801 bind(&correct_result); 2109 bind(&correct_result);
2110 if (kSmiValueSize == 31) {
2111 movsxlq(dst, dst);
2112 }
1802 } else { 2113 } else {
1803 SmiToInteger64(dst, src1); 2114 if (kSmiValueSize == 32) {
1804 imul(dst, src2); 2115 SmiToInteger64(dst, src1);
2116 imul(dst, src2);
2117 } else {
2118 ASSERT(kSmiValueSize == 31);
2119 SmiToInteger32(dst, src1);
2120 imull(dst, src2);
2121 }
1805 j(overflow, on_not_smi_result, near_jump); 2122 j(overflow, on_not_smi_result, near_jump);
1806 // Check for negative zero result. If product is zero, and one 2123 // Check for negative zero result. If product is zero, and one
1807 // argument is negative, go to slow case. 2124 // argument is negative, go to slow case.
1808 Label correct_result; 2125 Label correct_result;
1809 testq(dst, dst); 2126 testq(dst, dst);
1810 j(not_zero, &correct_result, Label::kNear); 2127 j(not_zero, &correct_result, Label::kNear);
1811 // One of src1 and src2 is zero, the check whether the other is 2128 // One of src1 and src2 is zero, the check whether the other is
1812 // negative. 2129 // negative.
1813 movq(kScratchRegister, src1); 2130 movq(kScratchRegister, src1);
1814 xor_(kScratchRegister, src2); 2131 xor_(kScratchRegister, src2);
1815 j(negative, on_not_smi_result, near_jump); 2132 j(negative, on_not_smi_result, near_jump);
1816 bind(&correct_result); 2133 bind(&correct_result);
2134 if (kSmiValueSize == 31) {
2135 movsxlq(dst, dst);
2136 }
1817 } 2137 }
1818 } 2138 }
1819 2139
1820 2140
1821 void MacroAssembler::SmiDiv(Register dst, 2141 void MacroAssembler::SmiDiv(Register dst,
1822 Register src1, 2142 Register src1,
1823 Register src2, 2143 Register src2,
1824 Label* on_not_smi_result, 2144 Label* on_not_smi_result,
1825 Label::Distance near_jump) { 2145 Label::Distance near_jump) {
1826 ASSERT(!src1.is(kScratchRegister)); 2146 ASSERT(!src1.is(kScratchRegister));
(...skipping 12 matching lines...) Expand all
1839 } 2159 }
1840 SmiToInteger32(rax, src1); 2160 SmiToInteger32(rax, src1);
1841 // We need to rule out dividing Smi::kMinValue by -1, since that would 2161 // We need to rule out dividing Smi::kMinValue by -1, since that would
1842 // overflow in idiv and raise an exception. 2162 // overflow in idiv and raise an exception.
1843 // We combine this with negative zero test (negative zero only happens 2163 // We combine this with negative zero test (negative zero only happens
1844 // when dividing zero by a negative number). 2164 // when dividing zero by a negative number).
1845 2165
1846 // We overshoot a little and go to slow case if we divide min-value 2166 // We overshoot a little and go to slow case if we divide min-value
1847 // by any negative value, not just -1. 2167 // by any negative value, not just -1.
1848 Label safe_div; 2168 Label safe_div;
1849 testl(rax, Immediate(0x7fffffff)); 2169 if (kSmiValueSize == 32) {
2170 testl(rax, Immediate(0x7fffffff));
2171 } else {
2172 ASSERT(kSmiValueSize == 31);
2173 testl(rax, Immediate(0x3fffffff));
2174 }
1850 j(not_zero, &safe_div, Label::kNear); 2175 j(not_zero, &safe_div, Label::kNear);
1851 testq(src2, src2); 2176 testq(src2, src2);
1852 if (src1.is(rax)) { 2177 if (src1.is(rax)) {
1853 j(positive, &safe_div, Label::kNear); 2178 j(positive, &safe_div, Label::kNear);
1854 movq(src1, kScratchRegister); 2179 movq(src1, kScratchRegister);
1855 jmp(on_not_smi_result, near_jump); 2180 jmp(on_not_smi_result, near_jump);
1856 } else { 2181 } else {
1857 j(negative, on_not_smi_result, near_jump); 2182 j(negative, on_not_smi_result, near_jump);
1858 } 2183 }
1859 bind(&safe_div); 2184 bind(&safe_div);
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
1933 testq(src1, src1); 2258 testq(src1, src1);
1934 j(negative, on_not_smi_result, near_jump); 2259 j(negative, on_not_smi_result, near_jump);
1935 bind(&smi_result); 2260 bind(&smi_result);
1936 Integer32ToSmi(dst, rdx); 2261 Integer32ToSmi(dst, rdx);
1937 } 2262 }
1938 2263
1939 2264
1940 void MacroAssembler::SmiNot(Register dst, Register src) { 2265 void MacroAssembler::SmiNot(Register dst, Register src) {
1941 ASSERT(!dst.is(kScratchRegister)); 2266 ASSERT(!dst.is(kScratchRegister));
1942 ASSERT(!src.is(kScratchRegister)); 2267 ASSERT(!src.is(kScratchRegister));
2268 if (kSmiValueSize == 32) {
1943 // Set tag and padding bits before negating, so that they are zero afterwards. 2269 // Set tag and padding bits before negating, so that they are zero afterwards.
1944 movl(kScratchRegister, Immediate(~0)); 2270 movl(kScratchRegister, Immediate(~0));
2271 } else {
2272 ASSERT(kSmiValueSize == 31);
2273 movl(kScratchRegister, Immediate(1));
2274 }
1945 if (dst.is(src)) { 2275 if (dst.is(src)) {
1946 xor_(dst, kScratchRegister); 2276 xor_(dst, kScratchRegister);
1947 } else { 2277 } else {
1948 lea(dst, Operand(src, kScratchRegister, times_1, 0)); 2278 lea(dst, Operand(src, kScratchRegister, times_1, 0));
1949 } 2279 }
1950 not_(dst); 2280 not_(dst);
1951 } 2281 }
1952 2282
1953 2283
1954 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) { 2284 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
2026 shl(dst, Immediate(kSmiShift)); 2356 shl(dst, Immediate(kSmiShift));
2027 } else { 2357 } else {
2028 UNIMPLEMENTED(); // Not used. 2358 UNIMPLEMENTED(); // Not used.
2029 } 2359 }
2030 } 2360 }
2031 } 2361 }
2032 2362
2033 2363
2034 void MacroAssembler::SmiShiftLeftConstant(Register dst, 2364 void MacroAssembler::SmiShiftLeftConstant(Register dst,
2035 Register src, 2365 Register src,
2036 int shift_value) { 2366 int shift_value,
2037 if (!dst.is(src)) { 2367 SmiFunctionInvoker& invoker) {
2038 movq(dst, src); 2368 if (kSmiValueSize == 32) {
2039 } 2369 if (!dst.is(src)) {
2040 if (shift_value > 0) { 2370 movq(dst, src);
2041 shl(dst, Immediate(shift_value)); 2371 }
2372 if (shift_value > 0) {
2373 shl(dst, Immediate(shift_value));
2374 }
2375 } else {
2376 ASSERT(kSmiValueSize == 31);
2377 if (dst.is(src)) {
2378 ASSERT(!invoker.reserve_source_operands());
2379 } else {
2380 movq(dst, src);
2381 }
2382 if (shift_value > 0) {
2383 SmiToInteger32(dst, dst);
2384 shll(dst, Immediate(shift_value));
2385 if (invoker.on_not_smi_result() != NULL) {
2386 JumpIfNotValidSmiValue(dst, invoker.on_not_smi_result());
2387 } else {
2388 Label done;
2389 JumpIfValidSmiValue(dst, &done, Label::kNear);
2390 invoker.Bailout();
2391 bind(&done);
2392 }
2393 Integer32ToSmi(dst, dst);
2394 }
2042 } 2395 }
2043 } 2396 }
2044 2397
2045 2398
2046 void MacroAssembler::SmiShiftLogicalRightConstant( 2399 void MacroAssembler::SmiShiftLogicalRightConstant(
2047 Register dst, Register src, int shift_value, 2400 Register dst, Register src, int shift_value,
2048 Label* on_not_smi_result, Label::Distance near_jump) { 2401 Label* on_not_smi_result, Label::Distance near_jump) {
2049 // Logic right shift interprets its result as an *unsigned* number. 2402 // Logic right shift interprets its result as an *unsigned* number.
2050 if (dst.is(src)) { 2403 if (dst.is(src)) {
2051 UNIMPLEMENTED(); // Not used. 2404 UNIMPLEMENTED(); // Not used.
2052 } else { 2405 } else {
2053 movq(dst, src); 2406 movq(dst, src);
2054 if (shift_value == 0) { 2407 if (shift_value == 0) {
2055 testq(dst, dst); 2408 if (kSmiValueSize == 32) {
2409 testq(dst, dst);
2410 } else {
2411 ASSERT(kSmiValueSize == 31);
2412 testl(dst, dst);
2413 }
2056 j(negative, on_not_smi_result, near_jump); 2414 j(negative, on_not_smi_result, near_jump);
2057 } 2415 }
2058 shr(dst, Immediate(shift_value + kSmiShift)); 2416 if (kSmiValueSize == 32) {
2059 shl(dst, Immediate(kSmiShift)); 2417 shr(dst, Immediate(shift_value + kSmiShift));
2418 shl(dst, Immediate(kSmiShift));
2419 } else {
2420 ASSERT(kSmiValueSize == 31);
2421 SmiToInteger32(dst, dst);
2422 shrl(dst, Immediate(shift_value));
2423 JumpIfUIntNotValidSmiValue(dst, on_not_smi_result, near_jump);
2424 shll(dst, Immediate(kSmiShift));
2425 }
2060 } 2426 }
2061 } 2427 }
2062 2428
2063 2429
2064 void MacroAssembler::SmiShiftLeft(Register dst, 2430 void MacroAssembler::SmiShiftLeft(Register dst,
2065 Register src1, 2431 Register src1,
2066 Register src2) { 2432 Register src2,
2067 ASSERT(!dst.is(rcx)); 2433 Label* on_not_smi_result) {
2068 // Untag shift amount. 2434 if (kSmiValueSize == 32) {
2069 if (!dst.is(src1)) { 2435 ASSERT(!dst.is(rcx));
2070 movq(dst, src1); 2436 // Untag shift amount.
2437 if (!dst.is(src1)) {
2438 movq(dst, src1);
2439 }
2440 SmiToInteger32(rcx, src2);
2441 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2442 and_(rcx, Immediate(0x1f));
2443 shl_cl(dst);
2444 } else {
2445 ASSERT(kSmiValueSize == 31);
2446 ASSERT(!dst.is(kScratchRegister));
2447 ASSERT(!src1.is(kScratchRegister));
2448 ASSERT(!src2.is(kScratchRegister));
2449 ASSERT(!dst.is(rcx));
2450 Label result_ok;
2451
2452 if (dst.is(src1)) {
2453 UNIMPLEMENTED(); // Not used.
2454 } else {
2455 if (src1.is(rcx) || src2.is(rcx)) {
2456 movq(kScratchRegister, rcx);
2457 }
2458 movq(dst, src1);
2459 SmiToInteger32(dst, dst);
2460 // Untag shift amount.
2461 SmiToInteger32(rcx, src2);
2462 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2463 andl(rcx, Immediate(0x1f));
2464 shll_cl(dst);
2465 JumpIfValidSmiValue(dst, &result_ok, Label::kNear);
2466 if (src1.is(rcx) || src2.is(rcx)) {
2467 if (src1.is(rcx)) {
2468 movq(src1, kScratchRegister);
2469 } else {
2470 movq(src2, kScratchRegister);
2471 }
2472 }
2473 jmp(on_not_smi_result);
2474 bind(&result_ok);
2475 Integer32ToSmi(dst, dst);
2476 }
2071 } 2477 }
2072 SmiToInteger32(rcx, src2);
2073 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2074 and_(rcx, Immediate(0x1f));
2075 shl_cl(dst);
2076 } 2478 }
2077 2479
2078 2480
2079 void MacroAssembler::SmiShiftLogicalRight(Register dst, 2481 void MacroAssembler::SmiShiftLogicalRight(Register dst,
2080 Register src1, 2482 Register src1,
2081 Register src2, 2483 Register src2,
2082 Label* on_not_smi_result, 2484 Label* on_not_smi_result,
2083 Label::Distance near_jump) { 2485 Label::Distance near_jump) {
2084 ASSERT(!dst.is(kScratchRegister)); 2486 ASSERT(!dst.is(kScratchRegister));
2085 ASSERT(!src1.is(kScratchRegister)); 2487 ASSERT(!src1.is(kScratchRegister));
2086 ASSERT(!src2.is(kScratchRegister)); 2488 ASSERT(!src2.is(kScratchRegister));
2087 ASSERT(!dst.is(rcx)); 2489 ASSERT(!dst.is(rcx));
2088 // dst and src1 can be the same, because the one case that bails out 2490 Label result_ok;
2089 // is a shift by 0, which leaves dst, and therefore src1, unchanged. 2491
2090 if (src1.is(rcx) || src2.is(rcx)) { 2492 if (dst.is(src1)) {
2091 movq(kScratchRegister, rcx); 2493 UNIMPLEMENTED(); // Not used.
2092 } 2494 } else {
2093 if (!dst.is(src1)) { 2495 if (src1.is(rcx) || src2.is(rcx)) {
2496 movq(kScratchRegister, rcx);
2497 }
2094 movq(dst, src1); 2498 movq(dst, src1);
2095 } 2499 SmiToInteger32(dst, dst);
2096 SmiToInteger32(rcx, src2); 2500 SmiToInteger32(rcx, src2);
2097 orl(rcx, Immediate(kSmiShift)); 2501 shrl_cl(dst);
2098 shr_cl(dst); // Shift is rcx modulo 0x1f + 32. 2502 JumpIfUIntValidSmiValue(dst, &result_ok, Label::kNear);
2099 shl(dst, Immediate(kSmiShift)); 2503 if (src1.is(rcx) || src2.is(rcx)) {
2100 testq(dst, dst); 2504 if (src1.is(rcx)) {
2101 if (src1.is(rcx) || src2.is(rcx)) { 2505 movq(src1, kScratchRegister);
2102 Label positive_result; 2506 } else {
2103 j(positive, &positive_result, Label::kNear); 2507 movq(src2, kScratchRegister);
2104 if (src1.is(rcx)) { 2508 }
2105 movq(src1, kScratchRegister);
2106 } else {
2107 movq(src2, kScratchRegister);
2108 } 2509 }
2109 jmp(on_not_smi_result, near_jump); 2510 jmp(on_not_smi_result);
2110 bind(&positive_result); 2511 bind(&result_ok);
2111 } else { 2512 Integer32ToSmi(dst, dst);
2112 // src2 was zero and src1 negative.
2113 j(negative, on_not_smi_result, near_jump);
2114 } 2513 }
2115 } 2514 }
2116 2515
2117 2516
2118 void MacroAssembler::SmiShiftArithmeticRight(Register dst, 2517 void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2119 Register src1, 2518 Register src1,
2120 Register src2) { 2519 Register src2) {
2121 ASSERT(!dst.is(kScratchRegister)); 2520 ASSERT(!dst.is(kScratchRegister));
2122 ASSERT(!src1.is(kScratchRegister)); 2521 ASSERT(!src1.is(kScratchRegister));
2123 ASSERT(!src2.is(kScratchRegister)); 2522 ASSERT(!src2.is(kScratchRegister));
2124 ASSERT(!dst.is(rcx)); 2523 ASSERT(!dst.is(rcx));
2125 if (src1.is(rcx)) { 2524 if (src1.is(rcx)) {
2126 movq(kScratchRegister, src1); 2525 movq(kScratchRegister, src1);
2127 } else if (src2.is(rcx)) { 2526 } else if (src2.is(rcx)) {
2128 movq(kScratchRegister, src2); 2527 movq(kScratchRegister, src2);
2129 } 2528 }
2130 if (!dst.is(src1)) { 2529 if (!dst.is(src1)) {
2131 movq(dst, src1); 2530 movq(dst, src1);
2132 } 2531 }
2133 SmiToInteger32(rcx, src2); 2532 SmiToInteger32(rcx, src2);
2134 orl(rcx, Immediate(kSmiShift)); 2533 if (kSmiValueSize == 32) {
2135 sar_cl(dst); // Shift 32 + original rcx & 0x1f. 2534 orl(rcx, Immediate(kSmiShift));
2136 shl(dst, Immediate(kSmiShift)); 2535 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
2536 } else {
2537 ASSERT(kSmiValueSize == 31);
2538 SmiToInteger32(dst, dst);
2539 sarl_cl(dst);
2540 }
2541 Integer32ToSmi(dst, dst);
2137 if (src1.is(rcx)) { 2542 if (src1.is(rcx)) {
2138 movq(src1, kScratchRegister); 2543 movq(src1, kScratchRegister);
2139 } else if (src2.is(rcx)) { 2544 } else if (src2.is(rcx)) {
2140 movq(src2, kScratchRegister); 2545 movq(src2, kScratchRegister);
2141 } 2546 }
2142 } 2547 }
2143 2548
2144 2549
2145 void MacroAssembler::SelectNonSmi(Register dst, 2550 void MacroAssembler::SelectNonSmi(Register dst,
2146 Register src1, 2551 Register src1,
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
2183 2588
2184 SmiIndex MacroAssembler::SmiToIndex(Register dst, 2589 SmiIndex MacroAssembler::SmiToIndex(Register dst,
2185 Register src, 2590 Register src,
2186 int shift) { 2591 int shift) {
2187 ASSERT(is_uint6(shift)); 2592 ASSERT(is_uint6(shift));
2188 // There is a possible optimization if shift is in the range 60-63, but that 2593 // There is a possible optimization if shift is in the range 60-63, but that
2189 // will (and must) never happen. 2594 // will (and must) never happen.
2190 if (!dst.is(src)) { 2595 if (!dst.is(src)) {
2191 movq(dst, src); 2596 movq(dst, src);
2192 } 2597 }
2193 if (shift < kSmiShift) { 2598 if (kSmiValueSize == 32) {
2194 sar(dst, Immediate(kSmiShift - shift)); 2599 if (shift < kSmiShift) {
2600 sar(dst, Immediate(kSmiShift - shift));
2601 } else {
2602 shl(dst, Immediate(shift - kSmiShift));
2603 }
2604 return SmiIndex(dst, times_1);
2195 } else { 2605 } else {
2196 shl(dst, Immediate(shift - kSmiShift)); 2606 ASSERT(kSmiValueSize == 31);
2607 if (shift == times_1) {
2608 sar(dst, Immediate(kSmiShift));
2609 return SmiIndex(dst, times_1);
2610 }
2611 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
2197 } 2612 }
2198 return SmiIndex(dst, times_1);
2199 } 2613 }
2200 2614
2615
2201 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst, 2616 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2202 Register src, 2617 Register src,
2203 int shift) { 2618 int shift) {
2204 // Register src holds a positive smi. 2619 // Register src holds a positive smi.
2205 ASSERT(is_uint6(shift)); 2620 ASSERT(is_uint6(shift));
2206 if (!dst.is(src)) { 2621 if (!dst.is(src)) {
2207 movq(dst, src); 2622 movq(dst, src);
2208 } 2623 }
2209 neg(dst); 2624 neg(dst);
2210 if (shift < kSmiShift) { 2625 if (kSmiValueSize == 32) {
2211 sar(dst, Immediate(kSmiShift - shift)); 2626 if (shift < kSmiShift) {
2627 sar(dst, Immediate(kSmiShift - shift));
2628 } else {
2629 shl(dst, Immediate(shift - kSmiShift));
2630 }
2631 return SmiIndex(dst, times_1);
2212 } else { 2632 } else {
2213 shl(dst, Immediate(shift - kSmiShift)); 2633 ASSERT(kSmiValueSize == 31);
2634 if (shift == times_1) {
2635 sar(dst, Immediate(kSmiShift));
2636 return SmiIndex(dst, times_1);
2637 }
2638 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
2214 } 2639 }
2215 return SmiIndex(dst, times_1);
2216 } 2640 }
2217 2641
2218 2642
2219 void MacroAssembler::AddSmiField(Register dst, const Operand& src) { 2643 void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
2220 ASSERT_EQ(0, kSmiShift % kBitsPerByte); 2644 if (kSmiValueSize == 32) {
2221 addl(dst, Operand(src, kSmiShift / kBitsPerByte)); 2645 ASSERT_EQ(0, kSmiShift % kBitsPerByte);
2646 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2647 } else {
2648 ASSERT(kSmiValueSize == 31);
2649 SmiToInteger32(kScratchRegister, src);
2650 addl(dst, kScratchRegister);
2651 }
2222 } 2652 }
2223 2653
2224 2654
2655 void MacroAssembler::Test(const Operand& src, Smi* source) {
2656 if (kSmiValueSize == 32) {
2657 testl(Operand(src, kIntSize), Immediate(source->value()));
2658 } else {
2659 ASSERT(kSmiValueSize == 31);
2660 testl(src, SmiToImmediate(source));
2661 }
2662 }
2663
2664
2665 void MacroAssembler::TestBit(const Operand& src, int bits) {
2666 int byte_offset = bits / kBitsPerByte;
2667 int bit_in_byte = bits & (kBitsPerByte - 1);
2668 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte));
2669 }
2670
2671
2672 void MacroAssembler::PushInt64AsTwoSmis(Register src, Register scratch) {
2673 movq(scratch, src);
2674 // High bits.
2675 shr(src, Immediate(64 - kSmiShift));
2676 shl(src, Immediate(kSmiShift));
2677 push(src);
2678 // Low bits.
2679 shl(scratch, Immediate(kSmiShift));
2680 push(scratch);
2681 }
2682
2683
2684 void MacroAssembler::PopInt64AsTwoSmis(Register dst, Register scratch) {
2685 pop(scratch);
2686 // Low bits.
2687 shr(scratch, Immediate(kSmiShift));
2688 pop(dst);
2689 shr(dst, Immediate(kSmiShift));
2690 // High bits.
2691 shl(dst, Immediate(64 - kSmiShift));
2692 or_(dst, scratch);
2693 }
2694
2695
2696 bool MacroAssembler::IsUnsafeSmiOperator(Token::Value op) {
2697 return (op == Token::ADD || op == Token::SUB || op == Token::MUL ||
2698 op == Token::DIV || (kSmiValueSize == 31 && op == Token::SHL) ||
2699 op == Token::SHR);
2700 }
2701
2702
2703 // End of smi tagging, untagging and tag detection.
2704 // ----------------------------------------------------------------------------
2705
2706
2225 void MacroAssembler::JumpIfNotString(Register object, 2707 void MacroAssembler::JumpIfNotString(Register object,
2226 Register object_map, 2708 Register object_map,
2227 Label* not_string, 2709 Label* not_string,
2228 Label::Distance near_jump) { 2710 Label::Distance near_jump) {
2229 Condition is_smi = CheckSmi(object); 2711 Condition is_smi = CheckSmi(object);
2230 j(is_smi, not_string, near_jump); 2712 j(is_smi, not_string, near_jump);
2231 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map); 2713 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
2232 j(above_equal, not_string, near_jump); 2714 j(above_equal, not_string, near_jump);
2233 } 2715 }
2234 2716
(...skipping 237 matching lines...) Expand 10 before | Expand all | Expand 10 after
2472 } 2954 }
2473 2955
2474 2956
2475 void MacroAssembler::Drop(int stack_elements) { 2957 void MacroAssembler::Drop(int stack_elements) {
2476 if (stack_elements > 0) { 2958 if (stack_elements > 0) {
2477 addq(rsp, Immediate(stack_elements * kPointerSize)); 2959 addq(rsp, Immediate(stack_elements * kPointerSize));
2478 } 2960 }
2479 } 2961 }
2480 2962
2481 2963
2482 void MacroAssembler::Test(const Operand& src, Smi* source) {
2483 testl(Operand(src, kIntSize), Immediate(source->value()));
2484 }
2485
2486
2487 void MacroAssembler::TestBit(const Operand& src, int bits) {
2488 int byte_offset = bits / kBitsPerByte;
2489 int bit_in_byte = bits & (kBitsPerByte - 1);
2490 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte));
2491 }
2492
2493
2494 void MacroAssembler::Jump(ExternalReference ext) { 2964 void MacroAssembler::Jump(ExternalReference ext) {
2495 LoadAddress(kScratchRegister, ext); 2965 LoadAddress(kScratchRegister, ext);
2496 jmp(kScratchRegister); 2966 jmp(kScratchRegister);
2497 } 2967 }
2498 2968
2499 2969
2500 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) { 2970 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
2501 movq(kScratchRegister, destination, rmode); 2971 movq(kScratchRegister, destination, rmode);
2502 jmp(kScratchRegister); 2972 jmp(kScratchRegister);
2503 } 2973 }
(...skipping 2190 matching lines...) Expand 10 before | Expand all | Expand 10 after
4694 j(greater, &no_memento_available); 5164 j(greater, &no_memento_available);
4695 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), 5165 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize),
4696 Heap::kAllocationMementoMapRootIndex); 5166 Heap::kAllocationMementoMapRootIndex);
4697 bind(&no_memento_available); 5167 bind(&no_memento_available);
4698 } 5168 }
4699 5169
4700 5170
4701 } } // namespace v8::internal 5171 } } // namespace v8::internal
4702 5172
4703 #endif // V8_TARGET_ARCH_X64 5173 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698