OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/code-factory.h" | 7 #include "src/code-factory.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/hydrogen-osr.h" | 9 #include "src/hydrogen-osr.h" |
10 #include "src/ic/ic.h" | 10 #include "src/ic/ic.h" |
(...skipping 1048 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1059 HMod* hmod = instr->hydrogen(); | 1059 HMod* hmod = instr->hydrogen(); |
1060 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 1060 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); |
1061 Label dividend_is_not_negative, done; | 1061 Label dividend_is_not_negative, done; |
1062 | 1062 |
1063 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { | 1063 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { |
1064 __ Branch(÷nd_is_not_negative, ge, dividend, Operand(zero_reg)); | 1064 __ Branch(÷nd_is_not_negative, ge, dividend, Operand(zero_reg)); |
1065 // Note: The code below even works when right contains kMinInt. | 1065 // Note: The code below even works when right contains kMinInt. |
1066 __ dsubu(dividend, zero_reg, dividend); | 1066 __ dsubu(dividend, zero_reg, dividend); |
1067 __ And(dividend, dividend, Operand(mask)); | 1067 __ And(dividend, dividend, Operand(mask)); |
1068 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1068 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1069 DeoptimizeIf(eq, instr, "minus zero", dividend, Operand(zero_reg)); | 1069 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, dividend, |
| 1070 Operand(zero_reg)); |
1070 } | 1071 } |
1071 __ Branch(USE_DELAY_SLOT, &done); | 1072 __ Branch(USE_DELAY_SLOT, &done); |
1072 __ dsubu(dividend, zero_reg, dividend); | 1073 __ dsubu(dividend, zero_reg, dividend); |
1073 } | 1074 } |
1074 | 1075 |
1075 __ bind(÷nd_is_not_negative); | 1076 __ bind(÷nd_is_not_negative); |
1076 __ And(dividend, dividend, Operand(mask)); | 1077 __ And(dividend, dividend, Operand(mask)); |
1077 __ bind(&done); | 1078 __ bind(&done); |
1078 } | 1079 } |
1079 | 1080 |
1080 | 1081 |
1081 void LCodeGen::DoModByConstI(LModByConstI* instr) { | 1082 void LCodeGen::DoModByConstI(LModByConstI* instr) { |
1082 Register dividend = ToRegister(instr->dividend()); | 1083 Register dividend = ToRegister(instr->dividend()); |
1083 int32_t divisor = instr->divisor(); | 1084 int32_t divisor = instr->divisor(); |
1084 Register result = ToRegister(instr->result()); | 1085 Register result = ToRegister(instr->result()); |
1085 DCHECK(!dividend.is(result)); | 1086 DCHECK(!dividend.is(result)); |
1086 | 1087 |
1087 if (divisor == 0) { | 1088 if (divisor == 0) { |
1088 DeoptimizeIf(al, instr, "division by zero"); | 1089 DeoptimizeIf(al, instr, Deoptimizer::kDivisionByZero); |
1089 return; | 1090 return; |
1090 } | 1091 } |
1091 | 1092 |
1092 __ TruncatingDiv(result, dividend, Abs(divisor)); | 1093 __ TruncatingDiv(result, dividend, Abs(divisor)); |
1093 __ Dmul(result, result, Operand(Abs(divisor))); | 1094 __ Dmul(result, result, Operand(Abs(divisor))); |
1094 __ Dsubu(result, dividend, Operand(result)); | 1095 __ Dsubu(result, dividend, Operand(result)); |
1095 | 1096 |
1096 // Check for negative zero. | 1097 // Check for negative zero. |
1097 HMod* hmod = instr->hydrogen(); | 1098 HMod* hmod = instr->hydrogen(); |
1098 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1099 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1099 Label remainder_not_zero; | 1100 Label remainder_not_zero; |
1100 __ Branch(&remainder_not_zero, ne, result, Operand(zero_reg)); | 1101 __ Branch(&remainder_not_zero, ne, result, Operand(zero_reg)); |
1101 DeoptimizeIf(lt, instr, "minus zero", dividend, Operand(zero_reg)); | 1102 DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, dividend, |
| 1103 Operand(zero_reg)); |
1102 __ bind(&remainder_not_zero); | 1104 __ bind(&remainder_not_zero); |
1103 } | 1105 } |
1104 } | 1106 } |
1105 | 1107 |
1106 | 1108 |
1107 void LCodeGen::DoModI(LModI* instr) { | 1109 void LCodeGen::DoModI(LModI* instr) { |
1108 HMod* hmod = instr->hydrogen(); | 1110 HMod* hmod = instr->hydrogen(); |
1109 const Register left_reg = ToRegister(instr->left()); | 1111 const Register left_reg = ToRegister(instr->left()); |
1110 const Register right_reg = ToRegister(instr->right()); | 1112 const Register right_reg = ToRegister(instr->right()); |
1111 const Register result_reg = ToRegister(instr->result()); | 1113 const Register result_reg = ToRegister(instr->result()); |
1112 | 1114 |
1113 // div runs in the background while we check for special cases. | 1115 // div runs in the background while we check for special cases. |
1114 __ Dmod(result_reg, left_reg, right_reg); | 1116 __ Dmod(result_reg, left_reg, right_reg); |
1115 | 1117 |
1116 Label done; | 1118 Label done; |
1117 // Check for x % 0, we have to deopt in this case because we can't return a | 1119 // Check for x % 0, we have to deopt in this case because we can't return a |
1118 // NaN. | 1120 // NaN. |
1119 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { | 1121 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { |
1120 DeoptimizeIf(eq, instr, "division by zero", right_reg, Operand(zero_reg)); | 1122 DeoptimizeIf(eq, instr, Deoptimizer::kDivisionByZero, right_reg, |
| 1123 Operand(zero_reg)); |
1121 } | 1124 } |
1122 | 1125 |
1123 // Check for kMinInt % -1, div will return kMinInt, which is not what we | 1126 // Check for kMinInt % -1, div will return kMinInt, which is not what we |
1124 // want. We have to deopt if we care about -0, because we can't return that. | 1127 // want. We have to deopt if we care about -0, because we can't return that. |
1125 if (hmod->CheckFlag(HValue::kCanOverflow)) { | 1128 if (hmod->CheckFlag(HValue::kCanOverflow)) { |
1126 Label no_overflow_possible; | 1129 Label no_overflow_possible; |
1127 __ Branch(&no_overflow_possible, ne, left_reg, Operand(kMinInt)); | 1130 __ Branch(&no_overflow_possible, ne, left_reg, Operand(kMinInt)); |
1128 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1131 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1129 DeoptimizeIf(eq, instr, "minus zero", right_reg, Operand(-1)); | 1132 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, right_reg, Operand(-1)); |
1130 } else { | 1133 } else { |
1131 __ Branch(&no_overflow_possible, ne, right_reg, Operand(-1)); | 1134 __ Branch(&no_overflow_possible, ne, right_reg, Operand(-1)); |
1132 __ Branch(USE_DELAY_SLOT, &done); | 1135 __ Branch(USE_DELAY_SLOT, &done); |
1133 __ mov(result_reg, zero_reg); | 1136 __ mov(result_reg, zero_reg); |
1134 } | 1137 } |
1135 __ bind(&no_overflow_possible); | 1138 __ bind(&no_overflow_possible); |
1136 } | 1139 } |
1137 | 1140 |
1138 // If we care about -0, test if the dividend is <0 and the result is 0. | 1141 // If we care about -0, test if the dividend is <0 and the result is 0. |
1139 __ Branch(&done, ge, left_reg, Operand(zero_reg)); | 1142 __ Branch(&done, ge, left_reg, Operand(zero_reg)); |
1140 | 1143 |
1141 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1144 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1142 DeoptimizeIf(eq, instr, "minus zero", result_reg, Operand(zero_reg)); | 1145 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, result_reg, |
| 1146 Operand(zero_reg)); |
1143 } | 1147 } |
1144 __ bind(&done); | 1148 __ bind(&done); |
1145 } | 1149 } |
1146 | 1150 |
1147 | 1151 |
1148 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { | 1152 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { |
1149 Register dividend = ToRegister(instr->dividend()); | 1153 Register dividend = ToRegister(instr->dividend()); |
1150 int32_t divisor = instr->divisor(); | 1154 int32_t divisor = instr->divisor(); |
1151 Register result = ToRegister(instr->result()); | 1155 Register result = ToRegister(instr->result()); |
1152 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); | 1156 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); |
1153 DCHECK(!result.is(dividend)); | 1157 DCHECK(!result.is(dividend)); |
1154 | 1158 |
1155 // Check for (0 / -x) that will produce negative zero. | 1159 // Check for (0 / -x) that will produce negative zero. |
1156 HDiv* hdiv = instr->hydrogen(); | 1160 HDiv* hdiv = instr->hydrogen(); |
1157 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1161 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1158 DeoptimizeIf(eq, instr, "minus zero", dividend, Operand(zero_reg)); | 1162 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, dividend, |
| 1163 Operand(zero_reg)); |
1159 } | 1164 } |
1160 // Check for (kMinInt / -1). | 1165 // Check for (kMinInt / -1). |
1161 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { | 1166 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { |
1162 DeoptimizeIf(eq, instr, "overflow", dividend, Operand(kMinInt)); | 1167 DeoptimizeIf(eq, instr, Deoptimizer::kOverflow, dividend, Operand(kMinInt)); |
1163 } | 1168 } |
1164 // Deoptimize if remainder will not be 0. | 1169 // Deoptimize if remainder will not be 0. |
1165 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && | 1170 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && |
1166 divisor != 1 && divisor != -1) { | 1171 divisor != 1 && divisor != -1) { |
1167 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 1172 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); |
1168 __ And(at, dividend, Operand(mask)); | 1173 __ And(at, dividend, Operand(mask)); |
1169 DeoptimizeIf(ne, instr, "lost precision", at, Operand(zero_reg)); | 1174 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecision, at, Operand(zero_reg)); |
1170 } | 1175 } |
1171 | 1176 |
1172 if (divisor == -1) { // Nice shortcut, not needed for correctness. | 1177 if (divisor == -1) { // Nice shortcut, not needed for correctness. |
1173 __ Dsubu(result, zero_reg, dividend); | 1178 __ Dsubu(result, zero_reg, dividend); |
1174 return; | 1179 return; |
1175 } | 1180 } |
1176 uint16_t shift = WhichPowerOf2Abs(divisor); | 1181 uint16_t shift = WhichPowerOf2Abs(divisor); |
1177 if (shift == 0) { | 1182 if (shift == 0) { |
1178 __ Move(result, dividend); | 1183 __ Move(result, dividend); |
1179 } else if (shift == 1) { | 1184 } else if (shift == 1) { |
1180 __ dsrl32(result, dividend, 31); | 1185 __ dsrl32(result, dividend, 31); |
1181 __ Daddu(result, dividend, Operand(result)); | 1186 __ Daddu(result, dividend, Operand(result)); |
1182 } else { | 1187 } else { |
1183 __ dsra32(result, dividend, 31); | 1188 __ dsra32(result, dividend, 31); |
1184 __ dsrl32(result, result, 32 - shift); | 1189 __ dsrl32(result, result, 32 - shift); |
1185 __ Daddu(result, dividend, Operand(result)); | 1190 __ Daddu(result, dividend, Operand(result)); |
1186 } | 1191 } |
1187 if (shift > 0) __ dsra(result, result, shift); | 1192 if (shift > 0) __ dsra(result, result, shift); |
1188 if (divisor < 0) __ Dsubu(result, zero_reg, result); | 1193 if (divisor < 0) __ Dsubu(result, zero_reg, result); |
1189 } | 1194 } |
1190 | 1195 |
1191 | 1196 |
1192 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { | 1197 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { |
1193 Register dividend = ToRegister(instr->dividend()); | 1198 Register dividend = ToRegister(instr->dividend()); |
1194 int32_t divisor = instr->divisor(); | 1199 int32_t divisor = instr->divisor(); |
1195 Register result = ToRegister(instr->result()); | 1200 Register result = ToRegister(instr->result()); |
1196 DCHECK(!dividend.is(result)); | 1201 DCHECK(!dividend.is(result)); |
1197 | 1202 |
1198 if (divisor == 0) { | 1203 if (divisor == 0) { |
1199 DeoptimizeIf(al, instr, "division by zero"); | 1204 DeoptimizeIf(al, instr, Deoptimizer::kDivisionByZero); |
1200 return; | 1205 return; |
1201 } | 1206 } |
1202 | 1207 |
1203 // Check for (0 / -x) that will produce negative zero. | 1208 // Check for (0 / -x) that will produce negative zero. |
1204 HDiv* hdiv = instr->hydrogen(); | 1209 HDiv* hdiv = instr->hydrogen(); |
1205 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1210 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1206 DeoptimizeIf(eq, instr, "minus zero", dividend, Operand(zero_reg)); | 1211 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, dividend, |
| 1212 Operand(zero_reg)); |
1207 } | 1213 } |
1208 | 1214 |
1209 __ TruncatingDiv(result, dividend, Abs(divisor)); | 1215 __ TruncatingDiv(result, dividend, Abs(divisor)); |
1210 if (divisor < 0) __ Subu(result, zero_reg, result); | 1216 if (divisor < 0) __ Subu(result, zero_reg, result); |
1211 | 1217 |
1212 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { | 1218 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { |
1213 __ Dmul(scratch0(), result, Operand(divisor)); | 1219 __ Dmul(scratch0(), result, Operand(divisor)); |
1214 __ Dsubu(scratch0(), scratch0(), dividend); | 1220 __ Dsubu(scratch0(), scratch0(), dividend); |
1215 DeoptimizeIf(ne, instr, "lost precision", scratch0(), Operand(zero_reg)); | 1221 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecision, scratch0(), |
| 1222 Operand(zero_reg)); |
1216 } | 1223 } |
1217 } | 1224 } |
1218 | 1225 |
1219 | 1226 |
1220 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. | 1227 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. |
1221 void LCodeGen::DoDivI(LDivI* instr) { | 1228 void LCodeGen::DoDivI(LDivI* instr) { |
1222 HBinaryOperation* hdiv = instr->hydrogen(); | 1229 HBinaryOperation* hdiv = instr->hydrogen(); |
1223 Register dividend = ToRegister(instr->dividend()); | 1230 Register dividend = ToRegister(instr->dividend()); |
1224 Register divisor = ToRegister(instr->divisor()); | 1231 Register divisor = ToRegister(instr->divisor()); |
1225 const Register result = ToRegister(instr->result()); | 1232 const Register result = ToRegister(instr->result()); |
1226 | 1233 |
1227 // On MIPS div is asynchronous - it will run in the background while we | 1234 // On MIPS div is asynchronous - it will run in the background while we |
1228 // check for special cases. | 1235 // check for special cases. |
1229 __ Ddiv(result, dividend, divisor); | 1236 __ Ddiv(result, dividend, divisor); |
1230 | 1237 |
1231 // Check for x / 0. | 1238 // Check for x / 0. |
1232 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1239 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
1233 DeoptimizeIf(eq, instr, "division by zero", divisor, Operand(zero_reg)); | 1240 DeoptimizeIf(eq, instr, Deoptimizer::kDivisionByZero, divisor, |
| 1241 Operand(zero_reg)); |
1234 } | 1242 } |
1235 | 1243 |
1236 // Check for (0 / -x) that will produce negative zero. | 1244 // Check for (0 / -x) that will produce negative zero. |
1237 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1245 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1238 Label left_not_zero; | 1246 Label left_not_zero; |
1239 __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg)); | 1247 __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg)); |
1240 DeoptimizeIf(lt, instr, "minus zero", divisor, Operand(zero_reg)); | 1248 DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, divisor, |
| 1249 Operand(zero_reg)); |
1241 __ bind(&left_not_zero); | 1250 __ bind(&left_not_zero); |
1242 } | 1251 } |
1243 | 1252 |
1244 // Check for (kMinInt / -1). | 1253 // Check for (kMinInt / -1). |
1245 if (hdiv->CheckFlag(HValue::kCanOverflow) && | 1254 if (hdiv->CheckFlag(HValue::kCanOverflow) && |
1246 !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 1255 !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { |
1247 Label left_not_min_int; | 1256 Label left_not_min_int; |
1248 __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt)); | 1257 __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt)); |
1249 DeoptimizeIf(eq, instr, "overflow", divisor, Operand(-1)); | 1258 DeoptimizeIf(eq, instr, Deoptimizer::kOverflow, divisor, Operand(-1)); |
1250 __ bind(&left_not_min_int); | 1259 __ bind(&left_not_min_int); |
1251 } | 1260 } |
1252 | 1261 |
1253 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 1262 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { |
1254 // Calculate remainder. | 1263 // Calculate remainder. |
1255 Register remainder = ToRegister(instr->temp()); | 1264 Register remainder = ToRegister(instr->temp()); |
1256 if (kArchVariant != kMips64r6) { | 1265 if (kArchVariant != kMips64r6) { |
1257 __ mfhi(remainder); | 1266 __ mfhi(remainder); |
1258 } else { | 1267 } else { |
1259 __ dmod(remainder, dividend, divisor); | 1268 __ dmod(remainder, dividend, divisor); |
1260 } | 1269 } |
1261 DeoptimizeIf(ne, instr, "lost precision", remainder, Operand(zero_reg)); | 1270 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecision, remainder, |
| 1271 Operand(zero_reg)); |
1262 } | 1272 } |
1263 } | 1273 } |
1264 | 1274 |
1265 | 1275 |
1266 void LCodeGen::DoMultiplyAddD(LMultiplyAddD* instr) { | 1276 void LCodeGen::DoMultiplyAddD(LMultiplyAddD* instr) { |
1267 DoubleRegister addend = ToDoubleRegister(instr->addend()); | 1277 DoubleRegister addend = ToDoubleRegister(instr->addend()); |
1268 DoubleRegister multiplier = ToDoubleRegister(instr->multiplier()); | 1278 DoubleRegister multiplier = ToDoubleRegister(instr->multiplier()); |
1269 DoubleRegister multiplicand = ToDoubleRegister(instr->multiplicand()); | 1279 DoubleRegister multiplicand = ToDoubleRegister(instr->multiplicand()); |
1270 | 1280 |
1271 // This is computed in-place. | 1281 // This is computed in-place. |
(...skipping 24 matching lines...) Expand all Loading... |
1296 return; | 1306 return; |
1297 } | 1307 } |
1298 | 1308 |
1299 // If the divisor is negative, we have to negate and handle edge cases. | 1309 // If the divisor is negative, we have to negate and handle edge cases. |
1300 // Dividend can be the same register as result so save the value of it | 1310 // Dividend can be the same register as result so save the value of it |
1301 // for checking overflow. | 1311 // for checking overflow. |
1302 __ Move(scratch, dividend); | 1312 __ Move(scratch, dividend); |
1303 | 1313 |
1304 __ Dsubu(result, zero_reg, dividend); | 1314 __ Dsubu(result, zero_reg, dividend); |
1305 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1315 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1306 DeoptimizeIf(eq, instr, "minus zero", result, Operand(zero_reg)); | 1316 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, result, Operand(zero_reg)); |
1307 } | 1317 } |
1308 | 1318 |
1309 __ Xor(scratch, scratch, result); | 1319 __ Xor(scratch, scratch, result); |
1310 // Dividing by -1 is basically negation, unless we overflow. | 1320 // Dividing by -1 is basically negation, unless we overflow. |
1311 if (divisor == -1) { | 1321 if (divisor == -1) { |
1312 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 1322 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { |
1313 DeoptimizeIf(gt, instr, "overflow", result, Operand(kMaxInt)); | 1323 DeoptimizeIf(gt, instr, Deoptimizer::kOverflow, result, Operand(kMaxInt)); |
1314 } | 1324 } |
1315 return; | 1325 return; |
1316 } | 1326 } |
1317 | 1327 |
1318 // If the negation could not overflow, simply shifting is OK. | 1328 // If the negation could not overflow, simply shifting is OK. |
1319 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 1329 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { |
1320 __ dsra(result, result, shift); | 1330 __ dsra(result, result, shift); |
1321 return; | 1331 return; |
1322 } | 1332 } |
1323 | 1333 |
1324 Label no_overflow, done; | 1334 Label no_overflow, done; |
1325 __ Branch(&no_overflow, lt, scratch, Operand(zero_reg)); | 1335 __ Branch(&no_overflow, lt, scratch, Operand(zero_reg)); |
1326 __ li(result, Operand(kMinInt / divisor), CONSTANT_SIZE); | 1336 __ li(result, Operand(kMinInt / divisor), CONSTANT_SIZE); |
1327 __ Branch(&done); | 1337 __ Branch(&done); |
1328 __ bind(&no_overflow); | 1338 __ bind(&no_overflow); |
1329 __ dsra(result, result, shift); | 1339 __ dsra(result, result, shift); |
1330 __ bind(&done); | 1340 __ bind(&done); |
1331 } | 1341 } |
1332 | 1342 |
1333 | 1343 |
1334 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { | 1344 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { |
1335 Register dividend = ToRegister(instr->dividend()); | 1345 Register dividend = ToRegister(instr->dividend()); |
1336 int32_t divisor = instr->divisor(); | 1346 int32_t divisor = instr->divisor(); |
1337 Register result = ToRegister(instr->result()); | 1347 Register result = ToRegister(instr->result()); |
1338 DCHECK(!dividend.is(result)); | 1348 DCHECK(!dividend.is(result)); |
1339 | 1349 |
1340 if (divisor == 0) { | 1350 if (divisor == 0) { |
1341 DeoptimizeIf(al, instr, "division by zero"); | 1351 DeoptimizeIf(al, instr, Deoptimizer::kDivisionByZero); |
1342 return; | 1352 return; |
1343 } | 1353 } |
1344 | 1354 |
1345 // Check for (0 / -x) that will produce negative zero. | 1355 // Check for (0 / -x) that will produce negative zero. |
1346 HMathFloorOfDiv* hdiv = instr->hydrogen(); | 1356 HMathFloorOfDiv* hdiv = instr->hydrogen(); |
1347 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1357 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1348 DeoptimizeIf(eq, instr, "minus zero", dividend, Operand(zero_reg)); | 1358 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, dividend, |
| 1359 Operand(zero_reg)); |
1349 } | 1360 } |
1350 | 1361 |
1351 // Easy case: We need no dynamic check for the dividend and the flooring | 1362 // Easy case: We need no dynamic check for the dividend and the flooring |
1352 // division is the same as the truncating division. | 1363 // division is the same as the truncating division. |
1353 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || | 1364 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || |
1354 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { | 1365 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { |
1355 __ TruncatingDiv(result, dividend, Abs(divisor)); | 1366 __ TruncatingDiv(result, dividend, Abs(divisor)); |
1356 if (divisor < 0) __ Dsubu(result, zero_reg, result); | 1367 if (divisor < 0) __ Dsubu(result, zero_reg, result); |
1357 return; | 1368 return; |
1358 } | 1369 } |
(...skipping 23 matching lines...) Expand all Loading... |
1382 Register dividend = ToRegister(instr->dividend()); | 1393 Register dividend = ToRegister(instr->dividend()); |
1383 Register divisor = ToRegister(instr->divisor()); | 1394 Register divisor = ToRegister(instr->divisor()); |
1384 const Register result = ToRegister(instr->result()); | 1395 const Register result = ToRegister(instr->result()); |
1385 | 1396 |
1386 // On MIPS div is asynchronous - it will run in the background while we | 1397 // On MIPS div is asynchronous - it will run in the background while we |
1387 // check for special cases. | 1398 // check for special cases. |
1388 __ Ddiv(result, dividend, divisor); | 1399 __ Ddiv(result, dividend, divisor); |
1389 | 1400 |
1390 // Check for x / 0. | 1401 // Check for x / 0. |
1391 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1402 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
1392 DeoptimizeIf(eq, instr, "division by zero", divisor, Operand(zero_reg)); | 1403 DeoptimizeIf(eq, instr, Deoptimizer::kDivisionByZero, divisor, |
| 1404 Operand(zero_reg)); |
1393 } | 1405 } |
1394 | 1406 |
1395 // Check for (0 / -x) that will produce negative zero. | 1407 // Check for (0 / -x) that will produce negative zero. |
1396 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1408 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1397 Label left_not_zero; | 1409 Label left_not_zero; |
1398 __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg)); | 1410 __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg)); |
1399 DeoptimizeIf(lt, instr, "minus zero", divisor, Operand(zero_reg)); | 1411 DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, divisor, |
| 1412 Operand(zero_reg)); |
1400 __ bind(&left_not_zero); | 1413 __ bind(&left_not_zero); |
1401 } | 1414 } |
1402 | 1415 |
1403 // Check for (kMinInt / -1). | 1416 // Check for (kMinInt / -1). |
1404 if (hdiv->CheckFlag(HValue::kCanOverflow) && | 1417 if (hdiv->CheckFlag(HValue::kCanOverflow) && |
1405 !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 1418 !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { |
1406 Label left_not_min_int; | 1419 Label left_not_min_int; |
1407 __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt)); | 1420 __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt)); |
1408 DeoptimizeIf(eq, instr, "overflow", divisor, Operand(-1)); | 1421 DeoptimizeIf(eq, instr, Deoptimizer::kOverflow, divisor, Operand(-1)); |
1409 __ bind(&left_not_min_int); | 1422 __ bind(&left_not_min_int); |
1410 } | 1423 } |
1411 | 1424 |
1412 // We performed a truncating division. Correct the result if necessary. | 1425 // We performed a truncating division. Correct the result if necessary. |
1413 Label done; | 1426 Label done; |
1414 Register remainder = scratch0(); | 1427 Register remainder = scratch0(); |
1415 if (kArchVariant != kMips64r6) { | 1428 if (kArchVariant != kMips64r6) { |
1416 __ mfhi(remainder); | 1429 __ mfhi(remainder); |
1417 } else { | 1430 } else { |
1418 __ dmod(remainder, dividend, divisor); | 1431 __ dmod(remainder, dividend, divisor); |
(...skipping 16 matching lines...) Expand all Loading... |
1435 bool bailout_on_minus_zero = | 1448 bool bailout_on_minus_zero = |
1436 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); | 1449 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); |
1437 bool overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); | 1450 bool overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); |
1438 | 1451 |
1439 if (right_op->IsConstantOperand()) { | 1452 if (right_op->IsConstantOperand()) { |
1440 int32_t constant = ToInteger32(LConstantOperand::cast(right_op)); | 1453 int32_t constant = ToInteger32(LConstantOperand::cast(right_op)); |
1441 | 1454 |
1442 if (bailout_on_minus_zero && (constant < 0)) { | 1455 if (bailout_on_minus_zero && (constant < 0)) { |
1443 // The case of a null constant will be handled separately. | 1456 // The case of a null constant will be handled separately. |
1444 // If constant is negative and left is null, the result should be -0. | 1457 // If constant is negative and left is null, the result should be -0. |
1445 DeoptimizeIf(eq, instr, "minus zero", left, Operand(zero_reg)); | 1458 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, left, Operand(zero_reg)); |
1446 } | 1459 } |
1447 | 1460 |
1448 switch (constant) { | 1461 switch (constant) { |
1449 case -1: | 1462 case -1: |
1450 if (overflow) { | 1463 if (overflow) { |
1451 __ SubuAndCheckForOverflow(result, zero_reg, left, scratch); | 1464 __ SubuAndCheckForOverflow(result, zero_reg, left, scratch); |
1452 DeoptimizeIf(gt, instr, "overflow", scratch, Operand(kMaxInt)); | 1465 DeoptimizeIf(gt, instr, Deoptimizer::kOverflow, scratch, |
| 1466 Operand(kMaxInt)); |
1453 } else { | 1467 } else { |
1454 __ Dsubu(result, zero_reg, left); | 1468 __ Dsubu(result, zero_reg, left); |
1455 } | 1469 } |
1456 break; | 1470 break; |
1457 case 0: | 1471 case 0: |
1458 if (bailout_on_minus_zero) { | 1472 if (bailout_on_minus_zero) { |
1459 // If left is strictly negative and the constant is null, the | 1473 // If left is strictly negative and the constant is null, the |
1460 // result is -0. Deoptimize if required, otherwise return 0. | 1474 // result is -0. Deoptimize if required, otherwise return 0. |
1461 DeoptimizeIf(lt, instr, "minus zero", left, Operand(zero_reg)); | 1475 DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, left, |
| 1476 Operand(zero_reg)); |
1462 } | 1477 } |
1463 __ mov(result, zero_reg); | 1478 __ mov(result, zero_reg); |
1464 break; | 1479 break; |
1465 case 1: | 1480 case 1: |
1466 // Nothing to do. | 1481 // Nothing to do. |
1467 __ Move(result, left); | 1482 __ Move(result, left); |
1468 break; | 1483 break; |
1469 default: | 1484 default: |
1470 // Multiplying by powers of two and powers of two plus or minus | 1485 // Multiplying by powers of two and powers of two plus or minus |
1471 // one can be done faster with shifted operands. | 1486 // one can be done faster with shifted operands. |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1506 if (instr->hydrogen()->representation().IsSmi()) { | 1521 if (instr->hydrogen()->representation().IsSmi()) { |
1507 __ Dmulh(result, left, right); | 1522 __ Dmulh(result, left, right); |
1508 } else { | 1523 } else { |
1509 __ Dmul(result, left, right); | 1524 __ Dmul(result, left, right); |
1510 } | 1525 } |
1511 __ dsra32(scratch, result, 0); | 1526 __ dsra32(scratch, result, 0); |
1512 __ sra(at, result, 31); | 1527 __ sra(at, result, 31); |
1513 if (instr->hydrogen()->representation().IsSmi()) { | 1528 if (instr->hydrogen()->representation().IsSmi()) { |
1514 __ SmiTag(result); | 1529 __ SmiTag(result); |
1515 } | 1530 } |
1516 DeoptimizeIf(ne, instr, "overflow", scratch, Operand(at)); | 1531 DeoptimizeIf(ne, instr, Deoptimizer::kOverflow, scratch, Operand(at)); |
1517 } else { | 1532 } else { |
1518 if (instr->hydrogen()->representation().IsSmi()) { | 1533 if (instr->hydrogen()->representation().IsSmi()) { |
1519 __ SmiUntag(result, left); | 1534 __ SmiUntag(result, left); |
1520 __ Dmul(result, result, right); | 1535 __ Dmul(result, result, right); |
1521 } else { | 1536 } else { |
1522 __ Dmul(result, left, right); | 1537 __ Dmul(result, left, right); |
1523 } | 1538 } |
1524 } | 1539 } |
1525 | 1540 |
1526 if (bailout_on_minus_zero) { | 1541 if (bailout_on_minus_zero) { |
1527 Label done; | 1542 Label done; |
1528 __ Xor(at, left, right); | 1543 __ Xor(at, left, right); |
1529 __ Branch(&done, ge, at, Operand(zero_reg)); | 1544 __ Branch(&done, ge, at, Operand(zero_reg)); |
1530 // Bail out if the result is minus zero. | 1545 // Bail out if the result is minus zero. |
1531 DeoptimizeIf(eq, instr, "minus zero", result, Operand(zero_reg)); | 1546 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, result, |
| 1547 Operand(zero_reg)); |
1532 __ bind(&done); | 1548 __ bind(&done); |
1533 } | 1549 } |
1534 } | 1550 } |
1535 } | 1551 } |
1536 | 1552 |
1537 | 1553 |
1538 void LCodeGen::DoBitI(LBitI* instr) { | 1554 void LCodeGen::DoBitI(LBitI* instr) { |
1539 LOperand* left_op = instr->left(); | 1555 LOperand* left_op = instr->left(); |
1540 LOperand* right_op = instr->right(); | 1556 LOperand* right_op = instr->right(); |
1541 DCHECK(left_op->IsRegister()); | 1557 DCHECK(left_op->IsRegister()); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1585 case Token::ROR: | 1601 case Token::ROR: |
1586 __ Ror(result, left, Operand(ToRegister(right_op))); | 1602 __ Ror(result, left, Operand(ToRegister(right_op))); |
1587 break; | 1603 break; |
1588 case Token::SAR: | 1604 case Token::SAR: |
1589 __ srav(result, left, ToRegister(right_op)); | 1605 __ srav(result, left, ToRegister(right_op)); |
1590 break; | 1606 break; |
1591 case Token::SHR: | 1607 case Token::SHR: |
1592 __ srlv(result, left, ToRegister(right_op)); | 1608 __ srlv(result, left, ToRegister(right_op)); |
1593 if (instr->can_deopt()) { | 1609 if (instr->can_deopt()) { |
1594 // TODO(yy): (-1) >>> 0. anything else? | 1610 // TODO(yy): (-1) >>> 0. anything else? |
1595 DeoptimizeIf(lt, instr, "negative value", result, Operand(zero_reg)); | 1611 DeoptimizeIf(lt, instr, Deoptimizer::kNegativeValue, result, |
1596 DeoptimizeIf(gt, instr, "negative value", result, Operand(kMaxInt)); | 1612 Operand(zero_reg)); |
| 1613 DeoptimizeIf(gt, instr, Deoptimizer::kNegativeValue, result, |
| 1614 Operand(kMaxInt)); |
1597 } | 1615 } |
1598 break; | 1616 break; |
1599 case Token::SHL: | 1617 case Token::SHL: |
1600 __ sllv(result, left, ToRegister(right_op)); | 1618 __ sllv(result, left, ToRegister(right_op)); |
1601 break; | 1619 break; |
1602 default: | 1620 default: |
1603 UNREACHABLE(); | 1621 UNREACHABLE(); |
1604 break; | 1622 break; |
1605 } | 1623 } |
1606 } else { | 1624 } else { |
(...skipping 14 matching lines...) Expand all Loading... |
1621 } else { | 1639 } else { |
1622 __ Move(result, left); | 1640 __ Move(result, left); |
1623 } | 1641 } |
1624 break; | 1642 break; |
1625 case Token::SHR: | 1643 case Token::SHR: |
1626 if (shift_count != 0) { | 1644 if (shift_count != 0) { |
1627 __ srl(result, left, shift_count); | 1645 __ srl(result, left, shift_count); |
1628 } else { | 1646 } else { |
1629 if (instr->can_deopt()) { | 1647 if (instr->can_deopt()) { |
1630 __ And(at, left, Operand(0x80000000)); | 1648 __ And(at, left, Operand(0x80000000)); |
1631 DeoptimizeIf(ne, instr, "negative value", at, Operand(zero_reg)); | 1649 DeoptimizeIf(ne, instr, Deoptimizer::kNegativeValue, at, |
| 1650 Operand(zero_reg)); |
1632 } | 1651 } |
1633 __ Move(result, left); | 1652 __ Move(result, left); |
1634 } | 1653 } |
1635 break; | 1654 break; |
1636 case Token::SHL: | 1655 case Token::SHL: |
1637 if (shift_count != 0) { | 1656 if (shift_count != 0) { |
1638 if (instr->hydrogen_value()->representation().IsSmi()) { | 1657 if (instr->hydrogen_value()->representation().IsSmi()) { |
1639 __ dsll(result, left, shift_count); | 1658 __ dsll(result, left, shift_count); |
1640 } else { | 1659 } else { |
1641 __ sll(result, left, shift_count); | 1660 __ sll(result, left, shift_count); |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1677 overflow); // Reg at also used as scratch. | 1696 overflow); // Reg at also used as scratch. |
1678 } else { | 1697 } else { |
1679 DCHECK(right->IsRegister()); | 1698 DCHECK(right->IsRegister()); |
1680 // Due to overflow check macros not supporting constant operands, | 1699 // Due to overflow check macros not supporting constant operands, |
1681 // handling the IsConstantOperand case was moved to prev if clause. | 1700 // handling the IsConstantOperand case was moved to prev if clause. |
1682 __ SubuAndCheckForOverflow(ToRegister(result), | 1701 __ SubuAndCheckForOverflow(ToRegister(result), |
1683 ToRegister(left), | 1702 ToRegister(left), |
1684 ToRegister(right), | 1703 ToRegister(right), |
1685 overflow); // Reg at also used as scratch. | 1704 overflow); // Reg at also used as scratch. |
1686 } | 1705 } |
1687 DeoptimizeIf(lt, instr, "overflow", overflow, Operand(zero_reg)); | 1706 DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, overflow, |
| 1707 Operand(zero_reg)); |
1688 if (!instr->hydrogen()->representation().IsSmi()) { | 1708 if (!instr->hydrogen()->representation().IsSmi()) { |
1689 DeoptimizeIf(gt, instr, "overflow", ToRegister(result), Operand(kMaxInt)); | 1709 DeoptimizeIf(gt, instr, Deoptimizer::kOverflow, ToRegister(result), |
1690 DeoptimizeIf(lt, instr, "overflow", ToRegister(result), Operand(kMinInt)); | 1710 Operand(kMaxInt)); |
| 1711 DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, ToRegister(result), |
| 1712 Operand(kMinInt)); |
1691 } | 1713 } |
1692 } | 1714 } |
1693 } | 1715 } |
1694 | 1716 |
1695 | 1717 |
1696 void LCodeGen::DoConstantI(LConstantI* instr) { | 1718 void LCodeGen::DoConstantI(LConstantI* instr) { |
1697 __ li(ToRegister(instr->result()), Operand(instr->value())); | 1719 __ li(ToRegister(instr->result()), Operand(instr->value())); |
1698 } | 1720 } |
1699 | 1721 |
1700 | 1722 |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1735 Register result = ToRegister(instr->result()); | 1757 Register result = ToRegister(instr->result()); |
1736 Register scratch = ToRegister(instr->temp()); | 1758 Register scratch = ToRegister(instr->temp()); |
1737 Smi* index = instr->index(); | 1759 Smi* index = instr->index(); |
1738 Label runtime, done; | 1760 Label runtime, done; |
1739 DCHECK(object.is(a0)); | 1761 DCHECK(object.is(a0)); |
1740 DCHECK(result.is(v0)); | 1762 DCHECK(result.is(v0)); |
1741 DCHECK(!scratch.is(scratch0())); | 1763 DCHECK(!scratch.is(scratch0())); |
1742 DCHECK(!scratch.is(object)); | 1764 DCHECK(!scratch.is(object)); |
1743 | 1765 |
1744 __ SmiTst(object, at); | 1766 __ SmiTst(object, at); |
1745 DeoptimizeIf(eq, instr, "Smi", at, Operand(zero_reg)); | 1767 DeoptimizeIf(eq, instr, Deoptimizer::kSmi, at, Operand(zero_reg)); |
1746 __ GetObjectType(object, scratch, scratch); | 1768 __ GetObjectType(object, scratch, scratch); |
1747 DeoptimizeIf(ne, instr, "not a date object", scratch, Operand(JS_DATE_TYPE)); | 1769 DeoptimizeIf(ne, instr, Deoptimizer::kNotADateObject, scratch, |
| 1770 Operand(JS_DATE_TYPE)); |
1748 | 1771 |
1749 if (index->value() == 0) { | 1772 if (index->value() == 0) { |
1750 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset)); | 1773 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset)); |
1751 } else { | 1774 } else { |
1752 if (index->value() < JSDate::kFirstUncachedField) { | 1775 if (index->value() < JSDate::kFirstUncachedField) { |
1753 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | 1776 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); |
1754 __ li(scratch, Operand(stamp)); | 1777 __ li(scratch, Operand(stamp)); |
1755 __ ld(scratch, MemOperand(scratch)); | 1778 __ ld(scratch, MemOperand(scratch)); |
1756 __ ld(scratch0(), FieldMemOperand(object, JSDate::kCacheStampOffset)); | 1779 __ ld(scratch0(), FieldMemOperand(object, JSDate::kCacheStampOffset)); |
1757 __ Branch(&runtime, ne, scratch, Operand(scratch0())); | 1780 __ Branch(&runtime, ne, scratch, Operand(scratch0())); |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1872 overflow); // Reg at also used as scratch. | 1895 overflow); // Reg at also used as scratch. |
1873 } else { | 1896 } else { |
1874 DCHECK(right->IsRegister()); | 1897 DCHECK(right->IsRegister()); |
1875 // Due to overflow check macros not supporting constant operands, | 1898 // Due to overflow check macros not supporting constant operands, |
1876 // handling the IsConstantOperand case was moved to prev if clause. | 1899 // handling the IsConstantOperand case was moved to prev if clause. |
1877 __ AdduAndCheckForOverflow(ToRegister(result), | 1900 __ AdduAndCheckForOverflow(ToRegister(result), |
1878 ToRegister(left), | 1901 ToRegister(left), |
1879 ToRegister(right), | 1902 ToRegister(right), |
1880 overflow); // Reg at also used as scratch. | 1903 overflow); // Reg at also used as scratch. |
1881 } | 1904 } |
1882 DeoptimizeIf(lt, instr, "overflow", overflow, Operand(zero_reg)); | 1905 DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, overflow, |
| 1906 Operand(zero_reg)); |
1883 // if not smi, it must int32. | 1907 // if not smi, it must int32. |
1884 if (!instr->hydrogen()->representation().IsSmi()) { | 1908 if (!instr->hydrogen()->representation().IsSmi()) { |
1885 DeoptimizeIf(gt, instr, "overflow", ToRegister(result), Operand(kMaxInt)); | 1909 DeoptimizeIf(gt, instr, Deoptimizer::kOverflow, ToRegister(result), |
1886 DeoptimizeIf(lt, instr, "overflow", ToRegister(result), Operand(kMinInt)); | 1910 Operand(kMaxInt)); |
| 1911 DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, ToRegister(result), |
| 1912 Operand(kMinInt)); |
1887 } | 1913 } |
1888 } | 1914 } |
1889 } | 1915 } |
1890 | 1916 |
1891 | 1917 |
1892 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { | 1918 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { |
1893 LOperand* left = instr->left(); | 1919 LOperand* left = instr->left(); |
1894 LOperand* right = instr->right(); | 1920 LOperand* right = instr->right(); |
1895 HMathMinMax::Operation operation = instr->hydrogen()->operation(); | 1921 HMathMinMax::Operation operation = instr->hydrogen()->operation(); |
1896 Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge; | 1922 Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge; |
(...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2138 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(at)); | 2164 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(at)); |
2139 } | 2165 } |
2140 | 2166 |
2141 if (expected.Contains(ToBooleanStub::SMI)) { | 2167 if (expected.Contains(ToBooleanStub::SMI)) { |
2142 // Smis: 0 -> false, all other -> true. | 2168 // Smis: 0 -> false, all other -> true. |
2143 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(zero_reg)); | 2169 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(zero_reg)); |
2144 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); | 2170 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); |
2145 } else if (expected.NeedsMap()) { | 2171 } else if (expected.NeedsMap()) { |
2146 // If we need a map later and have a Smi -> deopt. | 2172 // If we need a map later and have a Smi -> deopt. |
2147 __ SmiTst(reg, at); | 2173 __ SmiTst(reg, at); |
2148 DeoptimizeIf(eq, instr, "Smi", at, Operand(zero_reg)); | 2174 DeoptimizeIf(eq, instr, Deoptimizer::kSmi, at, Operand(zero_reg)); |
2149 } | 2175 } |
2150 | 2176 |
2151 const Register map = scratch0(); | 2177 const Register map = scratch0(); |
2152 if (expected.NeedsMap()) { | 2178 if (expected.NeedsMap()) { |
2153 __ ld(map, FieldMemOperand(reg, HeapObject::kMapOffset)); | 2179 __ ld(map, FieldMemOperand(reg, HeapObject::kMapOffset)); |
2154 if (expected.CanBeUndetectable()) { | 2180 if (expected.CanBeUndetectable()) { |
2155 // Undetectable -> false. | 2181 // Undetectable -> false. |
2156 __ lbu(at, FieldMemOperand(map, Map::kBitFieldOffset)); | 2182 __ lbu(at, FieldMemOperand(map, Map::kBitFieldOffset)); |
2157 __ And(at, at, Operand(1 << Map::kIsUndetectable)); | 2183 __ And(at, at, Operand(1 << Map::kIsUndetectable)); |
2158 __ Branch(instr->FalseLabel(chunk_), ne, at, Operand(zero_reg)); | 2184 __ Branch(instr->FalseLabel(chunk_), ne, at, Operand(zero_reg)); |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2194 __ BranchF(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_), | 2220 __ BranchF(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_), |
2195 ne, dbl_scratch, kDoubleRegZero); | 2221 ne, dbl_scratch, kDoubleRegZero); |
2196 // Falls through if dbl_scratch == 0. | 2222 // Falls through if dbl_scratch == 0. |
2197 __ Branch(instr->FalseLabel(chunk_)); | 2223 __ Branch(instr->FalseLabel(chunk_)); |
2198 __ bind(¬_heap_number); | 2224 __ bind(¬_heap_number); |
2199 } | 2225 } |
2200 | 2226 |
2201 if (!expected.IsGeneric()) { | 2227 if (!expected.IsGeneric()) { |
2202 // We've seen something for the first time -> deopt. | 2228 // We've seen something for the first time -> deopt. |
2203 // This can only happen if we are not generic already. | 2229 // This can only happen if we are not generic already. |
2204 DeoptimizeIf(al, instr, "unexpected object", zero_reg, | 2230 DeoptimizeIf(al, instr, Deoptimizer::kUnexpectedObject, zero_reg, |
2205 Operand(zero_reg)); | 2231 Operand(zero_reg)); |
2206 } | 2232 } |
2207 } | 2233 } |
2208 } | 2234 } |
2209 } | 2235 } |
2210 | 2236 |
2211 | 2237 |
2212 void LCodeGen::EmitGoto(int block) { | 2238 void LCodeGen::EmitGoto(int block) { |
2213 if (!IsNextEmittedBlock(block)) { | 2239 if (!IsNextEmittedBlock(block)) { |
2214 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); | 2240 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); |
(...skipping 629 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2844 } | 2870 } |
2845 } | 2871 } |
2846 | 2872 |
2847 | 2873 |
2848 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { | 2874 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { |
2849 Register result = ToRegister(instr->result()); | 2875 Register result = ToRegister(instr->result()); |
2850 __ li(at, Operand(Handle<Object>(instr->hydrogen()->cell().handle()))); | 2876 __ li(at, Operand(Handle<Object>(instr->hydrogen()->cell().handle()))); |
2851 __ ld(result, FieldMemOperand(at, Cell::kValueOffset)); | 2877 __ ld(result, FieldMemOperand(at, Cell::kValueOffset)); |
2852 if (instr->hydrogen()->RequiresHoleCheck()) { | 2878 if (instr->hydrogen()->RequiresHoleCheck()) { |
2853 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 2879 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
2854 DeoptimizeIf(eq, instr, "hole", result, Operand(at)); | 2880 DeoptimizeIf(eq, instr, Deoptimizer::kHole, result, Operand(at)); |
2855 } | 2881 } |
2856 } | 2882 } |
2857 | 2883 |
2858 | 2884 |
2859 template <class T> | 2885 template <class T> |
2860 void LCodeGen::EmitVectorLoadICRegisters(T* instr) { | 2886 void LCodeGen::EmitVectorLoadICRegisters(T* instr) { |
2861 DCHECK(FLAG_vector_ics); | 2887 DCHECK(FLAG_vector_ics); |
2862 Register vector_register = ToRegister(instr->temp_vector()); | 2888 Register vector_register = ToRegister(instr->temp_vector()); |
2863 Register slot_register = VectorLoadICDescriptor::SlotRegister(); | 2889 Register slot_register = VectorLoadICDescriptor::SlotRegister(); |
2864 DCHECK(vector_register.is(VectorLoadICDescriptor::VectorRegister())); | 2890 DCHECK(vector_register.is(VectorLoadICDescriptor::VectorRegister())); |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2899 | 2925 |
2900 // If the cell we are storing to contains the hole it could have | 2926 // If the cell we are storing to contains the hole it could have |
2901 // been deleted from the property dictionary. In that case, we need | 2927 // been deleted from the property dictionary. In that case, we need |
2902 // to update the property details in the property dictionary to mark | 2928 // to update the property details in the property dictionary to mark |
2903 // it as no longer deleted. | 2929 // it as no longer deleted. |
2904 if (instr->hydrogen()->RequiresHoleCheck()) { | 2930 if (instr->hydrogen()->RequiresHoleCheck()) { |
2905 // We use a temp to check the payload. | 2931 // We use a temp to check the payload. |
2906 Register payload = ToRegister(instr->temp()); | 2932 Register payload = ToRegister(instr->temp()); |
2907 __ ld(payload, FieldMemOperand(cell, Cell::kValueOffset)); | 2933 __ ld(payload, FieldMemOperand(cell, Cell::kValueOffset)); |
2908 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 2934 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
2909 DeoptimizeIf(eq, instr, "hole", payload, Operand(at)); | 2935 DeoptimizeIf(eq, instr, Deoptimizer::kHole, payload, Operand(at)); |
2910 } | 2936 } |
2911 | 2937 |
2912 // Store the value. | 2938 // Store the value. |
2913 __ sd(value, FieldMemOperand(cell, Cell::kValueOffset)); | 2939 __ sd(value, FieldMemOperand(cell, Cell::kValueOffset)); |
2914 // Cells are always rescanned, so no write barrier here. | 2940 // Cells are always rescanned, so no write barrier here. |
2915 } | 2941 } |
2916 | 2942 |
2917 | 2943 |
2918 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { | 2944 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { |
2919 Register context = ToRegister(instr->context()); | 2945 Register context = ToRegister(instr->context()); |
2920 Register result = ToRegister(instr->result()); | 2946 Register result = ToRegister(instr->result()); |
2921 | 2947 |
2922 __ ld(result, ContextOperand(context, instr->slot_index())); | 2948 __ ld(result, ContextOperand(context, instr->slot_index())); |
2923 if (instr->hydrogen()->RequiresHoleCheck()) { | 2949 if (instr->hydrogen()->RequiresHoleCheck()) { |
2924 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 2950 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
2925 | 2951 |
2926 if (instr->hydrogen()->DeoptimizesOnHole()) { | 2952 if (instr->hydrogen()->DeoptimizesOnHole()) { |
2927 DeoptimizeIf(eq, instr, "hole", result, Operand(at)); | 2953 DeoptimizeIf(eq, instr, Deoptimizer::kHole, result, Operand(at)); |
2928 } else { | 2954 } else { |
2929 Label is_not_hole; | 2955 Label is_not_hole; |
2930 __ Branch(&is_not_hole, ne, result, Operand(at)); | 2956 __ Branch(&is_not_hole, ne, result, Operand(at)); |
2931 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | 2957 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
2932 __ bind(&is_not_hole); | 2958 __ bind(&is_not_hole); |
2933 } | 2959 } |
2934 } | 2960 } |
2935 } | 2961 } |
2936 | 2962 |
2937 | 2963 |
2938 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { | 2964 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { |
2939 Register context = ToRegister(instr->context()); | 2965 Register context = ToRegister(instr->context()); |
2940 Register value = ToRegister(instr->value()); | 2966 Register value = ToRegister(instr->value()); |
2941 Register scratch = scratch0(); | 2967 Register scratch = scratch0(); |
2942 MemOperand target = ContextOperand(context, instr->slot_index()); | 2968 MemOperand target = ContextOperand(context, instr->slot_index()); |
2943 | 2969 |
2944 Label skip_assignment; | 2970 Label skip_assignment; |
2945 | 2971 |
2946 if (instr->hydrogen()->RequiresHoleCheck()) { | 2972 if (instr->hydrogen()->RequiresHoleCheck()) { |
2947 __ ld(scratch, target); | 2973 __ ld(scratch, target); |
2948 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 2974 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
2949 | 2975 |
2950 if (instr->hydrogen()->DeoptimizesOnHole()) { | 2976 if (instr->hydrogen()->DeoptimizesOnHole()) { |
2951 DeoptimizeIf(eq, instr, "hole", scratch, Operand(at)); | 2977 DeoptimizeIf(eq, instr, Deoptimizer::kHole, scratch, Operand(at)); |
2952 } else { | 2978 } else { |
2953 __ Branch(&skip_assignment, ne, scratch, Operand(at)); | 2979 __ Branch(&skip_assignment, ne, scratch, Operand(at)); |
2954 } | 2980 } |
2955 } | 2981 } |
2956 | 2982 |
2957 __ sd(value, target); | 2983 __ sd(value, target); |
2958 if (instr->hydrogen()->NeedsWriteBarrier()) { | 2984 if (instr->hydrogen()->NeedsWriteBarrier()) { |
2959 SmiCheck check_needed = | 2985 SmiCheck check_needed = |
2960 instr->hydrogen()->value()->type().IsHeapObject() | 2986 instr->hydrogen()->value()->type().IsHeapObject() |
2961 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 2987 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3036 Register scratch = scratch0(); | 3062 Register scratch = scratch0(); |
3037 Register function = ToRegister(instr->function()); | 3063 Register function = ToRegister(instr->function()); |
3038 Register result = ToRegister(instr->result()); | 3064 Register result = ToRegister(instr->result()); |
3039 | 3065 |
3040 // Get the prototype or initial map from the function. | 3066 // Get the prototype or initial map from the function. |
3041 __ ld(result, | 3067 __ ld(result, |
3042 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 3068 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
3043 | 3069 |
3044 // Check that the function has a prototype or an initial map. | 3070 // Check that the function has a prototype or an initial map. |
3045 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 3071 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
3046 DeoptimizeIf(eq, instr, "hole", result, Operand(at)); | 3072 DeoptimizeIf(eq, instr, Deoptimizer::kHole, result, Operand(at)); |
3047 | 3073 |
3048 // If the function does not have an initial map, we're done. | 3074 // If the function does not have an initial map, we're done. |
3049 Label done; | 3075 Label done; |
3050 __ GetObjectType(result, scratch, scratch); | 3076 __ GetObjectType(result, scratch, scratch); |
3051 __ Branch(&done, ne, scratch, Operand(MAP_TYPE)); | 3077 __ Branch(&done, ne, scratch, Operand(MAP_TYPE)); |
3052 | 3078 |
3053 // Get the prototype from the initial map. | 3079 // Get the prototype from the initial map. |
3054 __ ld(result, FieldMemOperand(result, Map::kPrototypeOffset)); | 3080 __ ld(result, FieldMemOperand(result, Map::kPrototypeOffset)); |
3055 | 3081 |
3056 // All done. | 3082 // All done. |
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3182 __ lhu(result, mem_operand); | 3208 __ lhu(result, mem_operand); |
3183 break; | 3209 break; |
3184 case EXTERNAL_INT32_ELEMENTS: | 3210 case EXTERNAL_INT32_ELEMENTS: |
3185 case INT32_ELEMENTS: | 3211 case INT32_ELEMENTS: |
3186 __ lw(result, mem_operand); | 3212 __ lw(result, mem_operand); |
3187 break; | 3213 break; |
3188 case EXTERNAL_UINT32_ELEMENTS: | 3214 case EXTERNAL_UINT32_ELEMENTS: |
3189 case UINT32_ELEMENTS: | 3215 case UINT32_ELEMENTS: |
3190 __ lw(result, mem_operand); | 3216 __ lw(result, mem_operand); |
3191 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { | 3217 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { |
3192 DeoptimizeIf(Ugreater_equal, instr, "negative value", result, | 3218 DeoptimizeIf(Ugreater_equal, instr, Deoptimizer::kNegativeValue, |
3193 Operand(0x80000000)); | 3219 result, Operand(0x80000000)); |
3194 } | 3220 } |
3195 break; | 3221 break; |
3196 case FLOAT32_ELEMENTS: | 3222 case FLOAT32_ELEMENTS: |
3197 case FLOAT64_ELEMENTS: | 3223 case FLOAT64_ELEMENTS: |
3198 case EXTERNAL_FLOAT32_ELEMENTS: | 3224 case EXTERNAL_FLOAT32_ELEMENTS: |
3199 case EXTERNAL_FLOAT64_ELEMENTS: | 3225 case EXTERNAL_FLOAT64_ELEMENTS: |
3200 case FAST_DOUBLE_ELEMENTS: | 3226 case FAST_DOUBLE_ELEMENTS: |
3201 case FAST_ELEMENTS: | 3227 case FAST_ELEMENTS: |
3202 case FAST_SMI_ELEMENTS: | 3228 case FAST_SMI_ELEMENTS: |
3203 case FAST_HOLEY_DOUBLE_ELEMENTS: | 3229 case FAST_HOLEY_DOUBLE_ELEMENTS: |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3243 } else { | 3269 } else { |
3244 __ dsra(at, key, -shift_size); | 3270 __ dsra(at, key, -shift_size); |
3245 } | 3271 } |
3246 __ Daddu(scratch, scratch, at); | 3272 __ Daddu(scratch, scratch, at); |
3247 } | 3273 } |
3248 | 3274 |
3249 __ ldc1(result, MemOperand(scratch)); | 3275 __ ldc1(result, MemOperand(scratch)); |
3250 | 3276 |
3251 if (instr->hydrogen()->RequiresHoleCheck()) { | 3277 if (instr->hydrogen()->RequiresHoleCheck()) { |
3252 __ lwu(scratch, MemOperand(scratch, sizeof(kHoleNanLower32))); | 3278 __ lwu(scratch, MemOperand(scratch, sizeof(kHoleNanLower32))); |
3253 DeoptimizeIf(eq, instr, "hole", scratch, Operand(kHoleNanUpper32)); | 3279 DeoptimizeIf(eq, instr, Deopt::kHole, scratch, Operand(kHoleNanUpper32)); |
3254 } | 3280 } |
3255 } | 3281 } |
3256 | 3282 |
3257 | 3283 |
3258 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { | 3284 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { |
3259 HLoadKeyed* hinstr = instr->hydrogen(); | 3285 HLoadKeyed* hinstr = instr->hydrogen(); |
3260 Register elements = ToRegister(instr->elements()); | 3286 Register elements = ToRegister(instr->elements()); |
3261 Register result = ToRegister(instr->result()); | 3287 Register result = ToRegister(instr->result()); |
3262 Register scratch = scratch0(); | 3288 Register scratch = scratch0(); |
3263 Register store_base = scratch; | 3289 Register store_base = scratch; |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3297 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); | 3323 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); |
3298 offset += kPointerSize / 2; | 3324 offset += kPointerSize / 2; |
3299 } | 3325 } |
3300 | 3326 |
3301 __ Load(result, MemOperand(store_base, offset), representation); | 3327 __ Load(result, MemOperand(store_base, offset), representation); |
3302 | 3328 |
3303 // Check for the hole value. | 3329 // Check for the hole value. |
3304 if (hinstr->RequiresHoleCheck()) { | 3330 if (hinstr->RequiresHoleCheck()) { |
3305 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { | 3331 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { |
3306 __ SmiTst(result, scratch); | 3332 __ SmiTst(result, scratch); |
3307 DeoptimizeIf(ne, instr, "not a Smi", scratch, Operand(zero_reg)); | 3333 DeoptimizeIf(ne, instr, Deoptimizer::kNotASmi, scratch, |
| 3334 Operand(zero_reg)); |
3308 } else { | 3335 } else { |
3309 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); | 3336 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); |
3310 DeoptimizeIf(eq, instr, "hole", result, Operand(scratch)); | 3337 DeoptimizeIf(eq, instr, Deoptimizer::kHole, result, Operand(scratch)); |
3311 } | 3338 } |
3312 } | 3339 } |
3313 } | 3340 } |
3314 | 3341 |
3315 | 3342 |
3316 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { | 3343 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { |
3317 if (instr->is_typed_elements()) { | 3344 if (instr->is_typed_elements()) { |
3318 DoLoadKeyedExternalArray(instr); | 3345 DoLoadKeyedExternalArray(instr); |
3319 } else if (instr->hydrogen()->representation().IsDouble()) { | 3346 } else if (instr->hydrogen()->representation().IsDouble()) { |
3320 DoLoadKeyedFixedDoubleArray(instr); | 3347 DoLoadKeyedFixedDoubleArray(instr); |
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3456 } | 3483 } |
3457 | 3484 |
3458 // Normal function. Replace undefined or null with global receiver. | 3485 // Normal function. Replace undefined or null with global receiver. |
3459 __ LoadRoot(scratch, Heap::kNullValueRootIndex); | 3486 __ LoadRoot(scratch, Heap::kNullValueRootIndex); |
3460 __ Branch(&global_object, eq, receiver, Operand(scratch)); | 3487 __ Branch(&global_object, eq, receiver, Operand(scratch)); |
3461 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); | 3488 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); |
3462 __ Branch(&global_object, eq, receiver, Operand(scratch)); | 3489 __ Branch(&global_object, eq, receiver, Operand(scratch)); |
3463 | 3490 |
3464 // Deoptimize if the receiver is not a JS object. | 3491 // Deoptimize if the receiver is not a JS object. |
3465 __ SmiTst(receiver, scratch); | 3492 __ SmiTst(receiver, scratch); |
3466 DeoptimizeIf(eq, instr, "Smi", scratch, Operand(zero_reg)); | 3493 DeoptimizeIf(eq, instr, Deoptimizer::kSmi, scratch, Operand(zero_reg)); |
3467 | 3494 |
3468 __ GetObjectType(receiver, scratch, scratch); | 3495 __ GetObjectType(receiver, scratch, scratch); |
3469 DeoptimizeIf(lt, instr, "not a JavaScript object", scratch, | 3496 DeoptimizeIf(lt, instr, Deoptimizer::kNotAJavaScriptObject, scratch, |
3470 Operand(FIRST_SPEC_OBJECT_TYPE)); | 3497 Operand(FIRST_SPEC_OBJECT_TYPE)); |
3471 __ Branch(&result_in_receiver); | 3498 __ Branch(&result_in_receiver); |
3472 | 3499 |
3473 __ bind(&global_object); | 3500 __ bind(&global_object); |
3474 __ ld(result, FieldMemOperand(function, JSFunction::kContextOffset)); | 3501 __ ld(result, FieldMemOperand(function, JSFunction::kContextOffset)); |
3475 __ ld(result, | 3502 __ ld(result, |
3476 ContextOperand(result, Context::GLOBAL_OBJECT_INDEX)); | 3503 ContextOperand(result, Context::GLOBAL_OBJECT_INDEX)); |
3477 __ ld(result, | 3504 __ ld(result, |
3478 FieldMemOperand(result, GlobalObject::kGlobalProxyOffset)); | 3505 FieldMemOperand(result, GlobalObject::kGlobalProxyOffset)); |
3479 | 3506 |
(...skipping 15 matching lines...) Expand all Loading... |
3495 Register length = ToRegister(instr->length()); | 3522 Register length = ToRegister(instr->length()); |
3496 Register elements = ToRegister(instr->elements()); | 3523 Register elements = ToRegister(instr->elements()); |
3497 Register scratch = scratch0(); | 3524 Register scratch = scratch0(); |
3498 DCHECK(receiver.is(a0)); // Used for parameter count. | 3525 DCHECK(receiver.is(a0)); // Used for parameter count. |
3499 DCHECK(function.is(a1)); // Required by InvokeFunction. | 3526 DCHECK(function.is(a1)); // Required by InvokeFunction. |
3500 DCHECK(ToRegister(instr->result()).is(v0)); | 3527 DCHECK(ToRegister(instr->result()).is(v0)); |
3501 | 3528 |
3502 // Copy the arguments to this function possibly from the | 3529 // Copy the arguments to this function possibly from the |
3503 // adaptor frame below it. | 3530 // adaptor frame below it. |
3504 const uint32_t kArgumentsLimit = 1 * KB; | 3531 const uint32_t kArgumentsLimit = 1 * KB; |
3505 DeoptimizeIf(hi, instr, "too many arguments", length, | 3532 DeoptimizeIf(hi, instr, Deoptimizer::kTooManyArguments, length, |
3506 Operand(kArgumentsLimit)); | 3533 Operand(kArgumentsLimit)); |
3507 | 3534 |
3508 // Push the receiver and use the register to keep the original | 3535 // Push the receiver and use the register to keep the original |
3509 // number of arguments. | 3536 // number of arguments. |
3510 __ push(receiver); | 3537 __ push(receiver); |
3511 __ Move(receiver, length); | 3538 __ Move(receiver, length); |
3512 // The arguments are at a one pointer size offset from elements. | 3539 // The arguments are at a one pointer size offset from elements. |
3513 __ Daddu(elements, elements, Operand(1 * kPointerSize)); | 3540 __ Daddu(elements, elements, Operand(1 * kPointerSize)); |
3514 | 3541 |
3515 // Loop through the arguments pushing them onto the execution | 3542 // Loop through the arguments pushing them onto the execution |
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3621 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { | 3648 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { |
3622 DCHECK(instr->context() != NULL); | 3649 DCHECK(instr->context() != NULL); |
3623 DCHECK(ToRegister(instr->context()).is(cp)); | 3650 DCHECK(ToRegister(instr->context()).is(cp)); |
3624 Register input = ToRegister(instr->value()); | 3651 Register input = ToRegister(instr->value()); |
3625 Register result = ToRegister(instr->result()); | 3652 Register result = ToRegister(instr->result()); |
3626 Register scratch = scratch0(); | 3653 Register scratch = scratch0(); |
3627 | 3654 |
3628 // Deoptimize if not a heap number. | 3655 // Deoptimize if not a heap number. |
3629 __ ld(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); | 3656 __ ld(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); |
3630 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 3657 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
3631 DeoptimizeIf(ne, instr, "not a heap number", scratch, Operand(at)); | 3658 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumber, scratch, Operand(at)); |
3632 | 3659 |
3633 Label done; | 3660 Label done; |
3634 Register exponent = scratch0(); | 3661 Register exponent = scratch0(); |
3635 scratch = no_reg; | 3662 scratch = no_reg; |
3636 __ lwu(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset)); | 3663 __ lwu(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset)); |
3637 // Check the sign of the argument. If the argument is positive, just | 3664 // Check the sign of the argument. If the argument is positive, just |
3638 // return it. | 3665 // return it. |
3639 __ Move(result, input); | 3666 __ Move(result, input); |
3640 __ And(at, exponent, Operand(HeapNumber::kSignMask)); | 3667 __ And(at, exponent, Operand(HeapNumber::kSignMask)); |
3641 __ Branch(&done, eq, at, Operand(zero_reg)); | 3668 __ Branch(&done, eq, at, Operand(zero_reg)); |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3688 | 3715 |
3689 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { | 3716 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { |
3690 Register input = ToRegister(instr->value()); | 3717 Register input = ToRegister(instr->value()); |
3691 Register result = ToRegister(instr->result()); | 3718 Register result = ToRegister(instr->result()); |
3692 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 3719 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
3693 Label done; | 3720 Label done; |
3694 __ Branch(USE_DELAY_SLOT, &done, ge, input, Operand(zero_reg)); | 3721 __ Branch(USE_DELAY_SLOT, &done, ge, input, Operand(zero_reg)); |
3695 __ mov(result, input); | 3722 __ mov(result, input); |
3696 __ dsubu(result, zero_reg, input); | 3723 __ dsubu(result, zero_reg, input); |
3697 // Overflow if result is still negative, i.e. 0x80000000. | 3724 // Overflow if result is still negative, i.e. 0x80000000. |
3698 DeoptimizeIf(lt, instr, "overflow", result, Operand(zero_reg)); | 3725 DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, result, Operand(zero_reg)); |
3699 __ bind(&done); | 3726 __ bind(&done); |
3700 } | 3727 } |
3701 | 3728 |
3702 | 3729 |
3703 void LCodeGen::DoMathAbs(LMathAbs* instr) { | 3730 void LCodeGen::DoMathAbs(LMathAbs* instr) { |
3704 // Class for deferred case. | 3731 // Class for deferred case. |
3705 class DeferredMathAbsTaggedHeapNumber FINAL : public LDeferredCode { | 3732 class DeferredMathAbsTaggedHeapNumber FINAL : public LDeferredCode { |
3706 public: | 3733 public: |
3707 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, LMathAbs* instr) | 3734 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, LMathAbs* instr) |
3708 : LDeferredCode(codegen), instr_(instr) { } | 3735 : LDeferredCode(codegen), instr_(instr) { } |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3743 Register except_flag = ToRegister(instr->temp()); | 3770 Register except_flag = ToRegister(instr->temp()); |
3744 | 3771 |
3745 __ EmitFPUTruncate(kRoundToMinusInf, | 3772 __ EmitFPUTruncate(kRoundToMinusInf, |
3746 result, | 3773 result, |
3747 input, | 3774 input, |
3748 scratch1, | 3775 scratch1, |
3749 double_scratch0(), | 3776 double_scratch0(), |
3750 except_flag); | 3777 except_flag); |
3751 | 3778 |
3752 // Deopt if the operation did not succeed. | 3779 // Deopt if the operation did not succeed. |
3753 DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 3780 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, |
3754 Operand(zero_reg)); | 3781 Operand(zero_reg)); |
3755 | 3782 |
3756 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3783 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3757 // Test for -0. | 3784 // Test for -0. |
3758 Label done; | 3785 Label done; |
3759 __ Branch(&done, ne, result, Operand(zero_reg)); | 3786 __ Branch(&done, ne, result, Operand(zero_reg)); |
3760 __ mfhc1(scratch1, input); // Get exponent/sign bits. | 3787 __ mfhc1(scratch1, input); // Get exponent/sign bits. |
3761 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 3788 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); |
3762 DeoptimizeIf(ne, instr, "minus zero", scratch1, Operand(zero_reg)); | 3789 DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch1, |
| 3790 Operand(zero_reg)); |
3763 __ bind(&done); | 3791 __ bind(&done); |
3764 } | 3792 } |
3765 } | 3793 } |
3766 | 3794 |
3767 | 3795 |
3768 void LCodeGen::DoMathRound(LMathRound* instr) { | 3796 void LCodeGen::DoMathRound(LMathRound* instr) { |
3769 DoubleRegister input = ToDoubleRegister(instr->value()); | 3797 DoubleRegister input = ToDoubleRegister(instr->value()); |
3770 Register result = ToRegister(instr->result()); | 3798 Register result = ToRegister(instr->result()); |
3771 DoubleRegister double_scratch1 = ToDoubleRegister(instr->temp()); | 3799 DoubleRegister double_scratch1 = ToDoubleRegister(instr->temp()); |
3772 Register scratch = scratch0(); | 3800 Register scratch = scratch0(); |
(...skipping 12 matching lines...) Expand all Loading... |
3785 __ mov(result, zero_reg); | 3813 __ mov(result, zero_reg); |
3786 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3814 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3787 __ Branch(&check_sign_on_zero); | 3815 __ Branch(&check_sign_on_zero); |
3788 } else { | 3816 } else { |
3789 __ Branch(&done); | 3817 __ Branch(&done); |
3790 } | 3818 } |
3791 __ bind(&skip1); | 3819 __ bind(&skip1); |
3792 | 3820 |
3793 // The following conversion will not work with numbers | 3821 // The following conversion will not work with numbers |
3794 // outside of ]-2^32, 2^32[. | 3822 // outside of ]-2^32, 2^32[. |
3795 DeoptimizeIf(ge, instr, "overflow", scratch, | 3823 DeoptimizeIf(ge, instr, Deoptimizer::kOverflow, scratch, |
3796 Operand(HeapNumber::kExponentBias + 32)); | 3824 Operand(HeapNumber::kExponentBias + 32)); |
3797 | 3825 |
3798 // Save the original sign for later comparison. | 3826 // Save the original sign for later comparison. |
3799 __ And(scratch, result, Operand(HeapNumber::kSignMask)); | 3827 __ And(scratch, result, Operand(HeapNumber::kSignMask)); |
3800 | 3828 |
3801 __ Move(double_scratch0(), 0.5); | 3829 __ Move(double_scratch0(), 0.5); |
3802 __ add_d(double_scratch0(), input, double_scratch0()); | 3830 __ add_d(double_scratch0(), input, double_scratch0()); |
3803 | 3831 |
3804 // Check sign of the result: if the sign changed, the input | 3832 // Check sign of the result: if the sign changed, the input |
3805 // value was in ]0.5, 0[ and the result should be -0. | 3833 // value was in ]0.5, 0[ and the result should be -0. |
3806 __ mfhc1(result, double_scratch0()); | 3834 __ mfhc1(result, double_scratch0()); |
3807 // mfhc1 sign-extends, clear the upper bits. | 3835 // mfhc1 sign-extends, clear the upper bits. |
3808 __ dsll32(result, result, 0); | 3836 __ dsll32(result, result, 0); |
3809 __ dsrl32(result, result, 0); | 3837 __ dsrl32(result, result, 0); |
3810 __ Xor(result, result, Operand(scratch)); | 3838 __ Xor(result, result, Operand(scratch)); |
3811 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3839 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3812 // ARM uses 'mi' here, which is 'lt' | 3840 // ARM uses 'mi' here, which is 'lt' |
3813 DeoptimizeIf(lt, instr, "minus zero", result, Operand(zero_reg)); | 3841 DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, result, Operand(zero_reg)); |
3814 } else { | 3842 } else { |
3815 Label skip2; | 3843 Label skip2; |
3816 // ARM uses 'mi' here, which is 'lt' | 3844 // ARM uses 'mi' here, which is 'lt' |
3817 // Negating it results in 'ge' | 3845 // Negating it results in 'ge' |
3818 __ Branch(&skip2, ge, result, Operand(zero_reg)); | 3846 __ Branch(&skip2, ge, result, Operand(zero_reg)); |
3819 __ mov(result, zero_reg); | 3847 __ mov(result, zero_reg); |
3820 __ Branch(&done); | 3848 __ Branch(&done); |
3821 __ bind(&skip2); | 3849 __ bind(&skip2); |
3822 } | 3850 } |
3823 | 3851 |
3824 Register except_flag = scratch; | 3852 Register except_flag = scratch; |
3825 __ EmitFPUTruncate(kRoundToMinusInf, | 3853 __ EmitFPUTruncate(kRoundToMinusInf, |
3826 result, | 3854 result, |
3827 double_scratch0(), | 3855 double_scratch0(), |
3828 at, | 3856 at, |
3829 double_scratch1, | 3857 double_scratch1, |
3830 except_flag); | 3858 except_flag); |
3831 | 3859 |
3832 DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 3860 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, |
3833 Operand(zero_reg)); | 3861 Operand(zero_reg)); |
3834 | 3862 |
3835 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3863 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3836 // Test for -0. | 3864 // Test for -0. |
3837 __ Branch(&done, ne, result, Operand(zero_reg)); | 3865 __ Branch(&done, ne, result, Operand(zero_reg)); |
3838 __ bind(&check_sign_on_zero); | 3866 __ bind(&check_sign_on_zero); |
3839 __ mfhc1(scratch, input); // Get exponent/sign bits. | 3867 __ mfhc1(scratch, input); // Get exponent/sign bits. |
3840 __ And(scratch, scratch, Operand(HeapNumber::kSignMask)); | 3868 __ And(scratch, scratch, Operand(HeapNumber::kSignMask)); |
3841 DeoptimizeIf(ne, instr, "minus zero", scratch, Operand(zero_reg)); | 3869 DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch, |
| 3870 Operand(zero_reg)); |
3842 } | 3871 } |
3843 __ bind(&done); | 3872 __ bind(&done); |
3844 } | 3873 } |
3845 | 3874 |
3846 | 3875 |
3847 void LCodeGen::DoMathFround(LMathFround* instr) { | 3876 void LCodeGen::DoMathFround(LMathFround* instr) { |
3848 DoubleRegister input = ToDoubleRegister(instr->value()); | 3877 DoubleRegister input = ToDoubleRegister(instr->value()); |
3849 DoubleRegister result = ToDoubleRegister(instr->result()); | 3878 DoubleRegister result = ToDoubleRegister(instr->result()); |
3850 __ cvt_s_d(result, input); | 3879 __ cvt_s_d(result, input); |
3851 __ cvt_d_s(result, result); | 3880 __ cvt_d_s(result, result); |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3897 | 3926 |
3898 if (exponent_type.IsSmi()) { | 3927 if (exponent_type.IsSmi()) { |
3899 MathPowStub stub(isolate(), MathPowStub::TAGGED); | 3928 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
3900 __ CallStub(&stub); | 3929 __ CallStub(&stub); |
3901 } else if (exponent_type.IsTagged()) { | 3930 } else if (exponent_type.IsTagged()) { |
3902 Label no_deopt; | 3931 Label no_deopt; |
3903 __ JumpIfSmi(tagged_exponent, &no_deopt); | 3932 __ JumpIfSmi(tagged_exponent, &no_deopt); |
3904 DCHECK(!a7.is(tagged_exponent)); | 3933 DCHECK(!a7.is(tagged_exponent)); |
3905 __ lw(a7, FieldMemOperand(tagged_exponent, HeapObject::kMapOffset)); | 3934 __ lw(a7, FieldMemOperand(tagged_exponent, HeapObject::kMapOffset)); |
3906 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 3935 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
3907 DeoptimizeIf(ne, instr, "not a heap number", a7, Operand(at)); | 3936 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumber, a7, Operand(at)); |
3908 __ bind(&no_deopt); | 3937 __ bind(&no_deopt); |
3909 MathPowStub stub(isolate(), MathPowStub::TAGGED); | 3938 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
3910 __ CallStub(&stub); | 3939 __ CallStub(&stub); |
3911 } else if (exponent_type.IsInteger32()) { | 3940 } else if (exponent_type.IsInteger32()) { |
3912 MathPowStub stub(isolate(), MathPowStub::INTEGER); | 3941 MathPowStub stub(isolate(), MathPowStub::INTEGER); |
3913 __ CallStub(&stub); | 3942 __ CallStub(&stub); |
3914 } else { | 3943 } else { |
3915 DCHECK(exponent_type.IsDouble()); | 3944 DCHECK(exponent_type.IsDouble()); |
3916 MathPowStub stub(isolate(), MathPowStub::DOUBLE); | 3945 MathPowStub stub(isolate(), MathPowStub::DOUBLE); |
3917 __ CallStub(&stub); | 3946 __ CallStub(&stub); |
(...skipping 380 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4298 } else { | 4327 } else { |
4299 reg = ToRegister(instr->index()); | 4328 reg = ToRegister(instr->index()); |
4300 operand = ToOperand(instr->length()); | 4329 operand = ToOperand(instr->length()); |
4301 } | 4330 } |
4302 if (FLAG_debug_code && instr->hydrogen()->skip_check()) { | 4331 if (FLAG_debug_code && instr->hydrogen()->skip_check()) { |
4303 Label done; | 4332 Label done; |
4304 __ Branch(&done, NegateCondition(cc), reg, operand); | 4333 __ Branch(&done, NegateCondition(cc), reg, operand); |
4305 __ stop("eliminated bounds check failed"); | 4334 __ stop("eliminated bounds check failed"); |
4306 __ bind(&done); | 4335 __ bind(&done); |
4307 } else { | 4336 } else { |
4308 DeoptimizeIf(cc, instr, "out of bounds", reg, operand); | 4337 DeoptimizeIf(cc, instr, Deoptimizer::kOutOfBounds, reg, operand); |
4309 } | 4338 } |
4310 } | 4339 } |
4311 | 4340 |
4312 | 4341 |
4313 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { | 4342 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { |
4314 Register external_pointer = ToRegister(instr->elements()); | 4343 Register external_pointer = ToRegister(instr->elements()); |
4315 Register key = no_reg; | 4344 Register key = no_reg; |
4316 ElementsKind elements_kind = instr->elements_kind(); | 4345 ElementsKind elements_kind = instr->elements_kind(); |
4317 bool key_is_constant = instr->key()->IsConstantOperand(); | 4346 bool key_is_constant = instr->key()->IsConstantOperand(); |
4318 int constant_key = 0; | 4347 int constant_key = 0; |
(...skipping 270 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4589 __ bind(¬_applicable); | 4618 __ bind(¬_applicable); |
4590 } | 4619 } |
4591 | 4620 |
4592 | 4621 |
4593 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 4622 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
4594 Register object = ToRegister(instr->object()); | 4623 Register object = ToRegister(instr->object()); |
4595 Register temp = ToRegister(instr->temp()); | 4624 Register temp = ToRegister(instr->temp()); |
4596 Label no_memento_found; | 4625 Label no_memento_found; |
4597 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found, | 4626 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found, |
4598 ne, &no_memento_found); | 4627 ne, &no_memento_found); |
4599 DeoptimizeIf(al, instr, "memento found"); | 4628 DeoptimizeIf(al, instr, Deoptimizer::kMementoFound); |
4600 __ bind(&no_memento_found); | 4629 __ bind(&no_memento_found); |
4601 } | 4630 } |
4602 | 4631 |
4603 | 4632 |
4604 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 4633 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
4605 DCHECK(ToRegister(instr->context()).is(cp)); | 4634 DCHECK(ToRegister(instr->context()).is(cp)); |
4606 DCHECK(ToRegister(instr->left()).is(a1)); | 4635 DCHECK(ToRegister(instr->left()).is(a1)); |
4607 DCHECK(ToRegister(instr->right()).is(a0)); | 4636 DCHECK(ToRegister(instr->right()).is(a0)); |
4608 StringAddStub stub(isolate(), | 4637 StringAddStub stub(isolate(), |
4609 instr->hydrogen()->flags(), | 4638 instr->hydrogen()->flags(), |
(...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4892 } | 4921 } |
4893 | 4922 |
4894 | 4923 |
4895 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 4924 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
4896 HChange* hchange = instr->hydrogen(); | 4925 HChange* hchange = instr->hydrogen(); |
4897 Register input = ToRegister(instr->value()); | 4926 Register input = ToRegister(instr->value()); |
4898 Register output = ToRegister(instr->result()); | 4927 Register output = ToRegister(instr->result()); |
4899 if (hchange->CheckFlag(HValue::kCanOverflow) && | 4928 if (hchange->CheckFlag(HValue::kCanOverflow) && |
4900 hchange->value()->CheckFlag(HValue::kUint32)) { | 4929 hchange->value()->CheckFlag(HValue::kUint32)) { |
4901 __ And(at, input, Operand(0x80000000)); | 4930 __ And(at, input, Operand(0x80000000)); |
4902 DeoptimizeIf(ne, instr, "overflow", at, Operand(zero_reg)); | 4931 DeoptimizeIf(ne, instr, Deoptimizer::kOverflow, at, Operand(zero_reg)); |
4903 } | 4932 } |
4904 if (hchange->CheckFlag(HValue::kCanOverflow) && | 4933 if (hchange->CheckFlag(HValue::kCanOverflow) && |
4905 !hchange->value()->CheckFlag(HValue::kUint32)) { | 4934 !hchange->value()->CheckFlag(HValue::kUint32)) { |
4906 __ SmiTagCheckOverflow(output, input, at); | 4935 __ SmiTagCheckOverflow(output, input, at); |
4907 DeoptimizeIf(lt, instr, "overflow", at, Operand(zero_reg)); | 4936 DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, at, Operand(zero_reg)); |
4908 } else { | 4937 } else { |
4909 __ SmiTag(output, input); | 4938 __ SmiTag(output, input); |
4910 } | 4939 } |
4911 } | 4940 } |
4912 | 4941 |
4913 | 4942 |
4914 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { | 4943 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { |
4915 Register scratch = scratch0(); | 4944 Register scratch = scratch0(); |
4916 Register input = ToRegister(instr->value()); | 4945 Register input = ToRegister(instr->value()); |
4917 Register result = ToRegister(instr->result()); | 4946 Register result = ToRegister(instr->result()); |
4918 if (instr->needs_check()) { | 4947 if (instr->needs_check()) { |
4919 STATIC_ASSERT(kHeapObjectTag == 1); | 4948 STATIC_ASSERT(kHeapObjectTag == 1); |
4920 // If the input is a HeapObject, value of scratch won't be zero. | 4949 // If the input is a HeapObject, value of scratch won't be zero. |
4921 __ And(scratch, input, Operand(kHeapObjectTag)); | 4950 __ And(scratch, input, Operand(kHeapObjectTag)); |
4922 __ SmiUntag(result, input); | 4951 __ SmiUntag(result, input); |
4923 DeoptimizeIf(ne, instr, "not a Smi", scratch, Operand(zero_reg)); | 4952 DeoptimizeIf(ne, instr, Deoptimizer::kNotASmi, scratch, Operand(zero_reg)); |
4924 } else { | 4953 } else { |
4925 __ SmiUntag(result, input); | 4954 __ SmiUntag(result, input); |
4926 } | 4955 } |
4927 } | 4956 } |
4928 | 4957 |
4929 | 4958 |
4930 void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg, | 4959 void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg, |
4931 DoubleRegister result_reg, | 4960 DoubleRegister result_reg, |
4932 NumberUntagDMode mode) { | 4961 NumberUntagDMode mode) { |
4933 bool can_convert_undefined_to_nan = | 4962 bool can_convert_undefined_to_nan = |
4934 instr->hydrogen()->can_convert_undefined_to_nan(); | 4963 instr->hydrogen()->can_convert_undefined_to_nan(); |
4935 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero(); | 4964 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero(); |
4936 | 4965 |
4937 Register scratch = scratch0(); | 4966 Register scratch = scratch0(); |
4938 Label convert, load_smi, done; | 4967 Label convert, load_smi, done; |
4939 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { | 4968 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { |
4940 // Smi check. | 4969 // Smi check. |
4941 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi); | 4970 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi); |
4942 // Heap number map check. | 4971 // Heap number map check. |
4943 __ ld(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); | 4972 __ ld(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); |
4944 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 4973 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
4945 if (can_convert_undefined_to_nan) { | 4974 if (can_convert_undefined_to_nan) { |
4946 __ Branch(&convert, ne, scratch, Operand(at)); | 4975 __ Branch(&convert, ne, scratch, Operand(at)); |
4947 } else { | 4976 } else { |
4948 DeoptimizeIf(ne, instr, "not a heap number", scratch, Operand(at)); | 4977 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumber, scratch, |
| 4978 Operand(at)); |
4949 } | 4979 } |
4950 // Load heap number. | 4980 // Load heap number. |
4951 __ ldc1(result_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset)); | 4981 __ ldc1(result_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset)); |
4952 if (deoptimize_on_minus_zero) { | 4982 if (deoptimize_on_minus_zero) { |
4953 __ mfc1(at, result_reg); | 4983 __ mfc1(at, result_reg); |
4954 __ Branch(&done, ne, at, Operand(zero_reg)); | 4984 __ Branch(&done, ne, at, Operand(zero_reg)); |
4955 __ mfhc1(scratch, result_reg); // Get exponent/sign bits. | 4985 __ mfhc1(scratch, result_reg); // Get exponent/sign bits. |
4956 DeoptimizeIf(eq, instr, "minus zero", scratch, | 4986 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, scratch, |
4957 Operand(HeapNumber::kSignMask)); | 4987 Operand(HeapNumber::kSignMask)); |
4958 } | 4988 } |
4959 __ Branch(&done); | 4989 __ Branch(&done); |
4960 if (can_convert_undefined_to_nan) { | 4990 if (can_convert_undefined_to_nan) { |
4961 __ bind(&convert); | 4991 __ bind(&convert); |
4962 // Convert undefined (and hole) to NaN. | 4992 // Convert undefined (and hole) to NaN. |
4963 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 4993 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
4964 DeoptimizeIf(ne, instr, "not a heap number/undefined", input_reg, | 4994 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumberUndefined, input_reg, |
4965 Operand(at)); | 4995 Operand(at)); |
4966 __ LoadRoot(scratch, Heap::kNanValueRootIndex); | 4996 __ LoadRoot(scratch, Heap::kNanValueRootIndex); |
4967 __ ldc1(result_reg, FieldMemOperand(scratch, HeapNumber::kValueOffset)); | 4997 __ ldc1(result_reg, FieldMemOperand(scratch, HeapNumber::kValueOffset)); |
4968 __ Branch(&done); | 4998 __ Branch(&done); |
4969 } | 4999 } |
4970 } else { | 5000 } else { |
4971 __ SmiUntag(scratch, input_reg); | 5001 __ SmiUntag(scratch, input_reg); |
4972 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); | 5002 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); |
4973 } | 5003 } |
4974 // Smi to double register conversion | 5004 // Smi to double register conversion |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5019 __ mov(input_reg, zero_reg); // In delay slot. | 5049 __ mov(input_reg, zero_reg); // In delay slot. |
5020 | 5050 |
5021 __ bind(&check_bools); | 5051 __ bind(&check_bools); |
5022 __ LoadRoot(at, Heap::kTrueValueRootIndex); | 5052 __ LoadRoot(at, Heap::kTrueValueRootIndex); |
5023 __ Branch(&check_false, ne, scratch2, Operand(at)); | 5053 __ Branch(&check_false, ne, scratch2, Operand(at)); |
5024 __ Branch(USE_DELAY_SLOT, &done); | 5054 __ Branch(USE_DELAY_SLOT, &done); |
5025 __ li(input_reg, Operand(1)); // In delay slot. | 5055 __ li(input_reg, Operand(1)); // In delay slot. |
5026 | 5056 |
5027 __ bind(&check_false); | 5057 __ bind(&check_false); |
5028 __ LoadRoot(at, Heap::kFalseValueRootIndex); | 5058 __ LoadRoot(at, Heap::kFalseValueRootIndex); |
5029 DeoptimizeIf(ne, instr, "not a heap number/undefined/true/false", scratch2, | 5059 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumberUndefinedTrueFalse, |
5030 Operand(at)); | 5060 scratch2, Operand(at)); |
5031 __ Branch(USE_DELAY_SLOT, &done); | 5061 __ Branch(USE_DELAY_SLOT, &done); |
5032 __ mov(input_reg, zero_reg); // In delay slot. | 5062 __ mov(input_reg, zero_reg); // In delay slot. |
5033 } else { | 5063 } else { |
5034 DeoptimizeIf(ne, instr, "not a heap number", scratch1, Operand(at)); | 5064 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumber, scratch1, |
| 5065 Operand(at)); |
5035 | 5066 |
5036 // Load the double value. | 5067 // Load the double value. |
5037 __ ldc1(double_scratch, | 5068 __ ldc1(double_scratch, |
5038 FieldMemOperand(input_reg, HeapNumber::kValueOffset)); | 5069 FieldMemOperand(input_reg, HeapNumber::kValueOffset)); |
5039 | 5070 |
5040 Register except_flag = scratch2; | 5071 Register except_flag = scratch2; |
5041 __ EmitFPUTruncate(kRoundToZero, | 5072 __ EmitFPUTruncate(kRoundToZero, |
5042 input_reg, | 5073 input_reg, |
5043 double_scratch, | 5074 double_scratch, |
5044 scratch1, | 5075 scratch1, |
5045 double_scratch2, | 5076 double_scratch2, |
5046 except_flag, | 5077 except_flag, |
5047 kCheckForInexactConversion); | 5078 kCheckForInexactConversion); |
5048 | 5079 |
5049 DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 5080 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, |
5050 Operand(zero_reg)); | 5081 Operand(zero_reg)); |
5051 | 5082 |
5052 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 5083 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
5053 __ Branch(&done, ne, input_reg, Operand(zero_reg)); | 5084 __ Branch(&done, ne, input_reg, Operand(zero_reg)); |
5054 | 5085 |
5055 __ mfhc1(scratch1, double_scratch); // Get exponent/sign bits. | 5086 __ mfhc1(scratch1, double_scratch); // Get exponent/sign bits. |
5056 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 5087 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); |
5057 DeoptimizeIf(ne, instr, "minus zero", scratch1, Operand(zero_reg)); | 5088 DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch1, |
| 5089 Operand(zero_reg)); |
5058 } | 5090 } |
5059 } | 5091 } |
5060 __ bind(&done); | 5092 __ bind(&done); |
5061 } | 5093 } |
5062 | 5094 |
5063 | 5095 |
5064 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { | 5096 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { |
5065 class DeferredTaggedToI FINAL : public LDeferredCode { | 5097 class DeferredTaggedToI FINAL : public LDeferredCode { |
5066 public: | 5098 public: |
5067 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) | 5099 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5123 | 5155 |
5124 __ EmitFPUTruncate(kRoundToMinusInf, | 5156 __ EmitFPUTruncate(kRoundToMinusInf, |
5125 result_reg, | 5157 result_reg, |
5126 double_input, | 5158 double_input, |
5127 scratch1, | 5159 scratch1, |
5128 double_scratch0(), | 5160 double_scratch0(), |
5129 except_flag, | 5161 except_flag, |
5130 kCheckForInexactConversion); | 5162 kCheckForInexactConversion); |
5131 | 5163 |
5132 // Deopt if the operation did not succeed (except_flag != 0). | 5164 // Deopt if the operation did not succeed (except_flag != 0). |
5133 DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 5165 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, |
5134 Operand(zero_reg)); | 5166 Operand(zero_reg)); |
5135 | 5167 |
5136 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 5168 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
5137 Label done; | 5169 Label done; |
5138 __ Branch(&done, ne, result_reg, Operand(zero_reg)); | 5170 __ Branch(&done, ne, result_reg, Operand(zero_reg)); |
5139 __ mfhc1(scratch1, double_input); // Get exponent/sign bits. | 5171 __ mfhc1(scratch1, double_input); // Get exponent/sign bits. |
5140 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 5172 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); |
5141 DeoptimizeIf(ne, instr, "minus zero", scratch1, Operand(zero_reg)); | 5173 DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch1, |
| 5174 Operand(zero_reg)); |
5142 __ bind(&done); | 5175 __ bind(&done); |
5143 } | 5176 } |
5144 } | 5177 } |
5145 } | 5178 } |
5146 | 5179 |
5147 | 5180 |
5148 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { | 5181 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { |
5149 Register result_reg = ToRegister(instr->result()); | 5182 Register result_reg = ToRegister(instr->result()); |
5150 Register scratch1 = LCodeGen::scratch0(); | 5183 Register scratch1 = LCodeGen::scratch0(); |
5151 DoubleRegister double_input = ToDoubleRegister(instr->value()); | 5184 DoubleRegister double_input = ToDoubleRegister(instr->value()); |
5152 | 5185 |
5153 if (instr->truncating()) { | 5186 if (instr->truncating()) { |
5154 __ TruncateDoubleToI(result_reg, double_input); | 5187 __ TruncateDoubleToI(result_reg, double_input); |
5155 } else { | 5188 } else { |
5156 Register except_flag = LCodeGen::scratch1(); | 5189 Register except_flag = LCodeGen::scratch1(); |
5157 | 5190 |
5158 __ EmitFPUTruncate(kRoundToMinusInf, | 5191 __ EmitFPUTruncate(kRoundToMinusInf, |
5159 result_reg, | 5192 result_reg, |
5160 double_input, | 5193 double_input, |
5161 scratch1, | 5194 scratch1, |
5162 double_scratch0(), | 5195 double_scratch0(), |
5163 except_flag, | 5196 except_flag, |
5164 kCheckForInexactConversion); | 5197 kCheckForInexactConversion); |
5165 | 5198 |
5166 // Deopt if the operation did not succeed (except_flag != 0). | 5199 // Deopt if the operation did not succeed (except_flag != 0). |
5167 DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 5200 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, |
5168 Operand(zero_reg)); | 5201 Operand(zero_reg)); |
5169 | 5202 |
5170 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 5203 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
5171 Label done; | 5204 Label done; |
5172 __ Branch(&done, ne, result_reg, Operand(zero_reg)); | 5205 __ Branch(&done, ne, result_reg, Operand(zero_reg)); |
5173 __ mfhc1(scratch1, double_input); // Get exponent/sign bits. | 5206 __ mfhc1(scratch1, double_input); // Get exponent/sign bits. |
5174 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 5207 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); |
5175 DeoptimizeIf(ne, instr, "minus zero", scratch1, Operand(zero_reg)); | 5208 DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch1, |
| 5209 Operand(zero_reg)); |
5176 __ bind(&done); | 5210 __ bind(&done); |
5177 } | 5211 } |
5178 } | 5212 } |
5179 __ SmiTag(result_reg, result_reg); | 5213 __ SmiTag(result_reg, result_reg); |
5180 } | 5214 } |
5181 | 5215 |
5182 | 5216 |
5183 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { | 5217 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { |
5184 LOperand* input = instr->value(); | 5218 LOperand* input = instr->value(); |
5185 __ SmiTst(ToRegister(input), at); | 5219 __ SmiTst(ToRegister(input), at); |
5186 DeoptimizeIf(ne, instr, "not a Smi", at, Operand(zero_reg)); | 5220 DeoptimizeIf(ne, instr, Deoptimizer::kNotASmi, at, Operand(zero_reg)); |
5187 } | 5221 } |
5188 | 5222 |
5189 | 5223 |
5190 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { | 5224 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { |
5191 if (!instr->hydrogen()->value()->type().IsHeapObject()) { | 5225 if (!instr->hydrogen()->value()->type().IsHeapObject()) { |
5192 LOperand* input = instr->value(); | 5226 LOperand* input = instr->value(); |
5193 __ SmiTst(ToRegister(input), at); | 5227 __ SmiTst(ToRegister(input), at); |
5194 DeoptimizeIf(eq, instr, "Smi", at, Operand(zero_reg)); | 5228 DeoptimizeIf(eq, instr, Deoptimizer::kSmi, at, Operand(zero_reg)); |
5195 } | 5229 } |
5196 } | 5230 } |
5197 | 5231 |
5198 | 5232 |
5199 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 5233 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { |
5200 Register input = ToRegister(instr->value()); | 5234 Register input = ToRegister(instr->value()); |
5201 Register scratch = scratch0(); | 5235 Register scratch = scratch0(); |
5202 | 5236 |
5203 __ GetObjectType(input, scratch, scratch); | 5237 __ GetObjectType(input, scratch, scratch); |
5204 | 5238 |
5205 if (instr->hydrogen()->is_interval_check()) { | 5239 if (instr->hydrogen()->is_interval_check()) { |
5206 InstanceType first; | 5240 InstanceType first; |
5207 InstanceType last; | 5241 InstanceType last; |
5208 instr->hydrogen()->GetCheckInterval(&first, &last); | 5242 instr->hydrogen()->GetCheckInterval(&first, &last); |
5209 | 5243 |
5210 // If there is only one type in the interval check for equality. | 5244 // If there is only one type in the interval check for equality. |
5211 if (first == last) { | 5245 if (first == last) { |
5212 DeoptimizeIf(ne, instr, "wrong instance type", scratch, Operand(first)); | 5246 DeoptimizeIf(ne, instr, Deoptimizer::kWrongInstanceType, scratch, |
| 5247 Operand(first)); |
5213 } else { | 5248 } else { |
5214 DeoptimizeIf(lo, instr, "wrong instance type", scratch, Operand(first)); | 5249 DeoptimizeIf(lo, instr, Deoptimizer::kWrongInstanceType, scratch, |
| 5250 Operand(first)); |
5215 // Omit check for the last type. | 5251 // Omit check for the last type. |
5216 if (last != LAST_TYPE) { | 5252 if (last != LAST_TYPE) { |
5217 DeoptimizeIf(hi, instr, "wrong instance type", scratch, Operand(last)); | 5253 DeoptimizeIf(hi, instr, Deoptimizer::kWrongInstanceType, scratch, |
| 5254 Operand(last)); |
5218 } | 5255 } |
5219 } | 5256 } |
5220 } else { | 5257 } else { |
5221 uint8_t mask; | 5258 uint8_t mask; |
5222 uint8_t tag; | 5259 uint8_t tag; |
5223 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); | 5260 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); |
5224 | 5261 |
5225 if (base::bits::IsPowerOfTwo32(mask)) { | 5262 if (base::bits::IsPowerOfTwo32(mask)) { |
5226 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); | 5263 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); |
5227 __ And(at, scratch, mask); | 5264 __ And(at, scratch, mask); |
5228 DeoptimizeIf(tag == 0 ? ne : eq, instr, "wrong instance type", at, | 5265 DeoptimizeIf(tag == 0 ? ne : eq, instr, Deoptimizer::kWrongInstanceType, |
5229 Operand(zero_reg)); | 5266 at, Operand(zero_reg)); |
5230 } else { | 5267 } else { |
5231 __ And(scratch, scratch, Operand(mask)); | 5268 __ And(scratch, scratch, Operand(mask)); |
5232 DeoptimizeIf(ne, instr, "wrong instance type", scratch, Operand(tag)); | 5269 DeoptimizeIf(ne, instr, Deoptimizer::kWrongInstanceType, scratch, |
| 5270 Operand(tag)); |
5233 } | 5271 } |
5234 } | 5272 } |
5235 } | 5273 } |
5236 | 5274 |
5237 | 5275 |
5238 void LCodeGen::DoCheckValue(LCheckValue* instr) { | 5276 void LCodeGen::DoCheckValue(LCheckValue* instr) { |
5239 Register reg = ToRegister(instr->value()); | 5277 Register reg = ToRegister(instr->value()); |
5240 Handle<HeapObject> object = instr->hydrogen()->object().handle(); | 5278 Handle<HeapObject> object = instr->hydrogen()->object().handle(); |
5241 AllowDeferredHandleDereference smi_check; | 5279 AllowDeferredHandleDereference smi_check; |
5242 if (isolate()->heap()->InNewSpace(*object)) { | 5280 if (isolate()->heap()->InNewSpace(*object)) { |
5243 Register reg = ToRegister(instr->value()); | 5281 Register reg = ToRegister(instr->value()); |
5244 Handle<Cell> cell = isolate()->factory()->NewCell(object); | 5282 Handle<Cell> cell = isolate()->factory()->NewCell(object); |
5245 __ li(at, Operand(Handle<Object>(cell))); | 5283 __ li(at, Operand(Handle<Object>(cell))); |
5246 __ ld(at, FieldMemOperand(at, Cell::kValueOffset)); | 5284 __ ld(at, FieldMemOperand(at, Cell::kValueOffset)); |
5247 DeoptimizeIf(ne, instr, "value mismatch", reg, Operand(at)); | 5285 DeoptimizeIf(ne, instr, Deoptimizer::kValueMismatch, reg, Operand(at)); |
5248 } else { | 5286 } else { |
5249 DeoptimizeIf(ne, instr, "value mismatch", reg, Operand(object)); | 5287 DeoptimizeIf(ne, instr, Deoptimizer::kValueMismatch, reg, Operand(object)); |
5250 } | 5288 } |
5251 } | 5289 } |
5252 | 5290 |
5253 | 5291 |
5254 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { | 5292 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { |
5255 { | 5293 { |
5256 PushSafepointRegistersScope scope(this); | 5294 PushSafepointRegistersScope scope(this); |
5257 __ push(object); | 5295 __ push(object); |
5258 __ mov(cp, zero_reg); | 5296 __ mov(cp, zero_reg); |
5259 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); | 5297 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); |
5260 RecordSafepointWithRegisters( | 5298 RecordSafepointWithRegisters( |
5261 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); | 5299 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); |
5262 __ StoreToSafepointRegisterSlot(v0, scratch0()); | 5300 __ StoreToSafepointRegisterSlot(v0, scratch0()); |
5263 } | 5301 } |
5264 __ SmiTst(scratch0(), at); | 5302 __ SmiTst(scratch0(), at); |
5265 DeoptimizeIf(eq, instr, "instance migration failed", at, Operand(zero_reg)); | 5303 DeoptimizeIf(eq, instr, Deoptimizer::kInstanceMigrationFailed, at, |
| 5304 Operand(zero_reg)); |
5266 } | 5305 } |
5267 | 5306 |
5268 | 5307 |
5269 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 5308 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
5270 class DeferredCheckMaps FINAL : public LDeferredCode { | 5309 class DeferredCheckMaps FINAL : public LDeferredCode { |
5271 public: | 5310 public: |
5272 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) | 5311 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) |
5273 : LDeferredCode(codegen), instr_(instr), object_(object) { | 5312 : LDeferredCode(codegen), instr_(instr), object_(object) { |
5274 SetExit(check_maps()); | 5313 SetExit(check_maps()); |
5275 } | 5314 } |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5309 Label success; | 5348 Label success; |
5310 for (int i = 0; i < maps->size() - 1; i++) { | 5349 for (int i = 0; i < maps->size() - 1; i++) { |
5311 Handle<Map> map = maps->at(i).handle(); | 5350 Handle<Map> map = maps->at(i).handle(); |
5312 __ CompareMapAndBranch(map_reg, map, &success, eq, &success); | 5351 __ CompareMapAndBranch(map_reg, map, &success, eq, &success); |
5313 } | 5352 } |
5314 Handle<Map> map = maps->at(maps->size() - 1).handle(); | 5353 Handle<Map> map = maps->at(maps->size() - 1).handle(); |
5315 // Do the CompareMap() directly within the Branch() and DeoptimizeIf(). | 5354 // Do the CompareMap() directly within the Branch() and DeoptimizeIf(). |
5316 if (instr->hydrogen()->HasMigrationTarget()) { | 5355 if (instr->hydrogen()->HasMigrationTarget()) { |
5317 __ Branch(deferred->entry(), ne, map_reg, Operand(map)); | 5356 __ Branch(deferred->entry(), ne, map_reg, Operand(map)); |
5318 } else { | 5357 } else { |
5319 DeoptimizeIf(ne, instr, "wrong map", map_reg, Operand(map)); | 5358 DeoptimizeIf(ne, instr, Deoptimizer::kWrongMap, map_reg, Operand(map)); |
5320 } | 5359 } |
5321 | 5360 |
5322 __ bind(&success); | 5361 __ bind(&success); |
5323 } | 5362 } |
5324 | 5363 |
5325 | 5364 |
5326 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 5365 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { |
5327 DoubleRegister value_reg = ToDoubleRegister(instr->unclamped()); | 5366 DoubleRegister value_reg = ToDoubleRegister(instr->unclamped()); |
5328 Register result_reg = ToRegister(instr->result()); | 5367 Register result_reg = ToRegister(instr->result()); |
5329 DoubleRegister temp_reg = ToDoubleRegister(instr->temp()); | 5368 DoubleRegister temp_reg = ToDoubleRegister(instr->temp()); |
(...skipping 17 matching lines...) Expand all Loading... |
5347 | 5386 |
5348 // Both smi and heap number cases are handled. | 5387 // Both smi and heap number cases are handled. |
5349 __ UntagAndJumpIfSmi(scratch, input_reg, &is_smi); | 5388 __ UntagAndJumpIfSmi(scratch, input_reg, &is_smi); |
5350 | 5389 |
5351 // Check for heap number | 5390 // Check for heap number |
5352 __ ld(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); | 5391 __ ld(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); |
5353 __ Branch(&heap_number, eq, scratch, Operand(factory()->heap_number_map())); | 5392 __ Branch(&heap_number, eq, scratch, Operand(factory()->heap_number_map())); |
5354 | 5393 |
5355 // Check for undefined. Undefined is converted to zero for clamping | 5394 // Check for undefined. Undefined is converted to zero for clamping |
5356 // conversions. | 5395 // conversions. |
5357 DeoptimizeIf(ne, instr, "not a heap number/undefined", input_reg, | 5396 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumberUndefined, input_reg, |
5358 Operand(factory()->undefined_value())); | 5397 Operand(factory()->undefined_value())); |
5359 __ mov(result_reg, zero_reg); | 5398 __ mov(result_reg, zero_reg); |
5360 __ jmp(&done); | 5399 __ jmp(&done); |
5361 | 5400 |
5362 // Heap number | 5401 // Heap number |
5363 __ bind(&heap_number); | 5402 __ bind(&heap_number); |
5364 __ ldc1(double_scratch0(), FieldMemOperand(input_reg, | 5403 __ ldc1(double_scratch0(), FieldMemOperand(input_reg, |
5365 HeapNumber::kValueOffset)); | 5404 HeapNumber::kValueOffset)); |
5366 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg); | 5405 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg); |
5367 __ jmp(&done); | 5406 __ jmp(&done); |
(...skipping 495 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5863 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 5902 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
5864 | 5903 |
5865 GenerateOsrPrologue(); | 5904 GenerateOsrPrologue(); |
5866 } | 5905 } |
5867 | 5906 |
5868 | 5907 |
5869 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { | 5908 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { |
5870 Register result = ToRegister(instr->result()); | 5909 Register result = ToRegister(instr->result()); |
5871 Register object = ToRegister(instr->object()); | 5910 Register object = ToRegister(instr->object()); |
5872 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 5911 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
5873 DeoptimizeIf(eq, instr, "undefined", object, Operand(at)); | 5912 DeoptimizeIf(eq, instr, Deoptimizer::kUndefined, object, Operand(at)); |
5874 | 5913 |
5875 Register null_value = a5; | 5914 Register null_value = a5; |
5876 __ LoadRoot(null_value, Heap::kNullValueRootIndex); | 5915 __ LoadRoot(null_value, Heap::kNullValueRootIndex); |
5877 DeoptimizeIf(eq, instr, "null", object, Operand(null_value)); | 5916 DeoptimizeIf(eq, instr, Deoptimizer::kNull, object, Operand(null_value)); |
5878 | 5917 |
5879 __ And(at, object, kSmiTagMask); | 5918 __ And(at, object, kSmiTagMask); |
5880 DeoptimizeIf(eq, instr, "Smi", at, Operand(zero_reg)); | 5919 DeoptimizeIf(eq, instr, Deoptimizer::kSmi, at, Operand(zero_reg)); |
5881 | 5920 |
5882 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 5921 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); |
5883 __ GetObjectType(object, a1, a1); | 5922 __ GetObjectType(object, a1, a1); |
5884 DeoptimizeIf(le, instr, "not a JavaScript object", a1, | 5923 DeoptimizeIf(le, instr, Deoptimizer::kNotAJavaScriptObject, a1, |
5885 Operand(LAST_JS_PROXY_TYPE)); | 5924 Operand(LAST_JS_PROXY_TYPE)); |
5886 | 5925 |
5887 Label use_cache, call_runtime; | 5926 Label use_cache, call_runtime; |
5888 DCHECK(object.is(a0)); | 5927 DCHECK(object.is(a0)); |
5889 __ CheckEnumCache(null_value, &call_runtime); | 5928 __ CheckEnumCache(null_value, &call_runtime); |
5890 | 5929 |
5891 __ ld(result, FieldMemOperand(object, HeapObject::kMapOffset)); | 5930 __ ld(result, FieldMemOperand(object, HeapObject::kMapOffset)); |
5892 __ Branch(&use_cache); | 5931 __ Branch(&use_cache); |
5893 | 5932 |
5894 // Get the set of properties to enumerate. | 5933 // Get the set of properties to enumerate. |
5895 __ bind(&call_runtime); | 5934 __ bind(&call_runtime); |
5896 __ push(object); | 5935 __ push(object); |
5897 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); | 5936 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); |
5898 | 5937 |
5899 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); | 5938 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); |
5900 DCHECK(result.is(v0)); | 5939 DCHECK(result.is(v0)); |
5901 __ LoadRoot(at, Heap::kMetaMapRootIndex); | 5940 __ LoadRoot(at, Heap::kMetaMapRootIndex); |
5902 DeoptimizeIf(ne, instr, "wrong map", a1, Operand(at)); | 5941 DeoptimizeIf(ne, instr, Deoptimizer::kWrongMap, a1, Operand(at)); |
5903 __ bind(&use_cache); | 5942 __ bind(&use_cache); |
5904 } | 5943 } |
5905 | 5944 |
5906 | 5945 |
5907 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { | 5946 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { |
5908 Register map = ToRegister(instr->map()); | 5947 Register map = ToRegister(instr->map()); |
5909 Register result = ToRegister(instr->result()); | 5948 Register result = ToRegister(instr->result()); |
5910 Label load_cache, done; | 5949 Label load_cache, done; |
5911 __ EnumLength(result, map); | 5950 __ EnumLength(result, map); |
5912 __ Branch(&load_cache, ne, result, Operand(Smi::FromInt(0))); | 5951 __ Branch(&load_cache, ne, result, Operand(Smi::FromInt(0))); |
5913 __ li(result, Operand(isolate()->factory()->empty_fixed_array())); | 5952 __ li(result, Operand(isolate()->factory()->empty_fixed_array())); |
5914 __ jmp(&done); | 5953 __ jmp(&done); |
5915 | 5954 |
5916 __ bind(&load_cache); | 5955 __ bind(&load_cache); |
5917 __ LoadInstanceDescriptors(map, result); | 5956 __ LoadInstanceDescriptors(map, result); |
5918 __ ld(result, | 5957 __ ld(result, |
5919 FieldMemOperand(result, DescriptorArray::kEnumCacheOffset)); | 5958 FieldMemOperand(result, DescriptorArray::kEnumCacheOffset)); |
5920 __ ld(result, | 5959 __ ld(result, |
5921 FieldMemOperand(result, FixedArray::SizeFor(instr->idx()))); | 5960 FieldMemOperand(result, FixedArray::SizeFor(instr->idx()))); |
5922 DeoptimizeIf(eq, instr, "no cache", result, Operand(zero_reg)); | 5961 DeoptimizeIf(eq, instr, Deoptimizer::kNoCache, result, Operand(zero_reg)); |
5923 | 5962 |
5924 __ bind(&done); | 5963 __ bind(&done); |
5925 } | 5964 } |
5926 | 5965 |
5927 | 5966 |
5928 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 5967 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
5929 Register object = ToRegister(instr->value()); | 5968 Register object = ToRegister(instr->value()); |
5930 Register map = ToRegister(instr->map()); | 5969 Register map = ToRegister(instr->map()); |
5931 __ ld(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); | 5970 __ ld(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); |
5932 DeoptimizeIf(ne, instr, "wrong map", map, Operand(scratch0())); | 5971 DeoptimizeIf(ne, instr, Deoptimizer::kWrongMap, map, Operand(scratch0())); |
5933 } | 5972 } |
5934 | 5973 |
5935 | 5974 |
5936 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | 5975 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, |
5937 Register result, | 5976 Register result, |
5938 Register object, | 5977 Register object, |
5939 Register index) { | 5978 Register index) { |
5940 PushSafepointRegistersScope scope(this); | 5979 PushSafepointRegistersScope scope(this); |
5941 __ Push(object, index); | 5980 __ Push(object, index); |
5942 __ mov(cp, zero_reg); | 5981 __ mov(cp, zero_reg); |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6017 __ li(at, scope_info); | 6056 __ li(at, scope_info); |
6018 __ Push(at, ToRegister(instr->function())); | 6057 __ Push(at, ToRegister(instr->function())); |
6019 CallRuntime(Runtime::kPushBlockContext, 2, instr); | 6058 CallRuntime(Runtime::kPushBlockContext, 2, instr); |
6020 RecordSafepoint(Safepoint::kNoLazyDeopt); | 6059 RecordSafepoint(Safepoint::kNoLazyDeopt); |
6021 } | 6060 } |
6022 | 6061 |
6023 | 6062 |
6024 #undef __ | 6063 #undef __ |
6025 | 6064 |
6026 } } // namespace v8::internal | 6065 } } // namespace v8::internal |
OLD | NEW |