OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/code-factory.h" | 7 #include "src/code-factory.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/hydrogen-osr.h" | 9 #include "src/hydrogen-osr.h" |
10 #include "src/ic/ic.h" | 10 #include "src/ic/ic.h" |
(...skipping 1049 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1060 HMod* hmod = instr->hydrogen(); | 1060 HMod* hmod = instr->hydrogen(); |
1061 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 1061 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); |
1062 Label dividend_is_not_negative, done; | 1062 Label dividend_is_not_negative, done; |
1063 | 1063 |
1064 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { | 1064 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { |
1065 __ Branch(÷nd_is_not_negative, ge, dividend, Operand(zero_reg)); | 1065 __ Branch(÷nd_is_not_negative, ge, dividend, Operand(zero_reg)); |
1066 // Note: The code below even works when right contains kMinInt. | 1066 // Note: The code below even works when right contains kMinInt. |
1067 __ dsubu(dividend, zero_reg, dividend); | 1067 __ dsubu(dividend, zero_reg, dividend); |
1068 __ And(dividend, dividend, Operand(mask)); | 1068 __ And(dividend, dividend, Operand(mask)); |
1069 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1069 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1070 DeoptimizeIf(eq, instr, "minus zero", dividend, Operand(zero_reg)); | 1070 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, dividend, |
| 1071 Operand(zero_reg)); |
1071 } | 1072 } |
1072 __ Branch(USE_DELAY_SLOT, &done); | 1073 __ Branch(USE_DELAY_SLOT, &done); |
1073 __ dsubu(dividend, zero_reg, dividend); | 1074 __ dsubu(dividend, zero_reg, dividend); |
1074 } | 1075 } |
1075 | 1076 |
1076 __ bind(÷nd_is_not_negative); | 1077 __ bind(÷nd_is_not_negative); |
1077 __ And(dividend, dividend, Operand(mask)); | 1078 __ And(dividend, dividend, Operand(mask)); |
1078 __ bind(&done); | 1079 __ bind(&done); |
1079 } | 1080 } |
1080 | 1081 |
1081 | 1082 |
1082 void LCodeGen::DoModByConstI(LModByConstI* instr) { | 1083 void LCodeGen::DoModByConstI(LModByConstI* instr) { |
1083 Register dividend = ToRegister(instr->dividend()); | 1084 Register dividend = ToRegister(instr->dividend()); |
1084 int32_t divisor = instr->divisor(); | 1085 int32_t divisor = instr->divisor(); |
1085 Register result = ToRegister(instr->result()); | 1086 Register result = ToRegister(instr->result()); |
1086 DCHECK(!dividend.is(result)); | 1087 DCHECK(!dividend.is(result)); |
1087 | 1088 |
1088 if (divisor == 0) { | 1089 if (divisor == 0) { |
1089 DeoptimizeIf(al, instr, "division by zero"); | 1090 DeoptimizeIf(al, instr, Deoptimizer::kDivisionByZero); |
1090 return; | 1091 return; |
1091 } | 1092 } |
1092 | 1093 |
1093 __ TruncatingDiv(result, dividend, Abs(divisor)); | 1094 __ TruncatingDiv(result, dividend, Abs(divisor)); |
1094 __ Dmul(result, result, Operand(Abs(divisor))); | 1095 __ Dmul(result, result, Operand(Abs(divisor))); |
1095 __ Dsubu(result, dividend, Operand(result)); | 1096 __ Dsubu(result, dividend, Operand(result)); |
1096 | 1097 |
1097 // Check for negative zero. | 1098 // Check for negative zero. |
1098 HMod* hmod = instr->hydrogen(); | 1099 HMod* hmod = instr->hydrogen(); |
1099 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1100 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1100 Label remainder_not_zero; | 1101 Label remainder_not_zero; |
1101 __ Branch(&remainder_not_zero, ne, result, Operand(zero_reg)); | 1102 __ Branch(&remainder_not_zero, ne, result, Operand(zero_reg)); |
1102 DeoptimizeIf(lt, instr, "minus zero", dividend, Operand(zero_reg)); | 1103 DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, dividend, |
| 1104 Operand(zero_reg)); |
1103 __ bind(&remainder_not_zero); | 1105 __ bind(&remainder_not_zero); |
1104 } | 1106 } |
1105 } | 1107 } |
1106 | 1108 |
1107 | 1109 |
1108 void LCodeGen::DoModI(LModI* instr) { | 1110 void LCodeGen::DoModI(LModI* instr) { |
1109 HMod* hmod = instr->hydrogen(); | 1111 HMod* hmod = instr->hydrogen(); |
1110 const Register left_reg = ToRegister(instr->left()); | 1112 const Register left_reg = ToRegister(instr->left()); |
1111 const Register right_reg = ToRegister(instr->right()); | 1113 const Register right_reg = ToRegister(instr->right()); |
1112 const Register result_reg = ToRegister(instr->result()); | 1114 const Register result_reg = ToRegister(instr->result()); |
1113 | 1115 |
1114 // div runs in the background while we check for special cases. | 1116 // div runs in the background while we check for special cases. |
1115 __ Dmod(result_reg, left_reg, right_reg); | 1117 __ Dmod(result_reg, left_reg, right_reg); |
1116 | 1118 |
1117 Label done; | 1119 Label done; |
1118 // Check for x % 0, we have to deopt in this case because we can't return a | 1120 // Check for x % 0, we have to deopt in this case because we can't return a |
1119 // NaN. | 1121 // NaN. |
1120 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { | 1122 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { |
1121 DeoptimizeIf(eq, instr, "division by zero", right_reg, Operand(zero_reg)); | 1123 DeoptimizeIf(eq, instr, Deoptimizer::kDivisionByZero, right_reg, |
| 1124 Operand(zero_reg)); |
1122 } | 1125 } |
1123 | 1126 |
1124 // Check for kMinInt % -1, div will return kMinInt, which is not what we | 1127 // Check for kMinInt % -1, div will return kMinInt, which is not what we |
1125 // want. We have to deopt if we care about -0, because we can't return that. | 1128 // want. We have to deopt if we care about -0, because we can't return that. |
1126 if (hmod->CheckFlag(HValue::kCanOverflow)) { | 1129 if (hmod->CheckFlag(HValue::kCanOverflow)) { |
1127 Label no_overflow_possible; | 1130 Label no_overflow_possible; |
1128 __ Branch(&no_overflow_possible, ne, left_reg, Operand(kMinInt)); | 1131 __ Branch(&no_overflow_possible, ne, left_reg, Operand(kMinInt)); |
1129 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1132 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1130 DeoptimizeIf(eq, instr, "minus zero", right_reg, Operand(-1)); | 1133 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, right_reg, Operand(-1)); |
1131 } else { | 1134 } else { |
1132 __ Branch(&no_overflow_possible, ne, right_reg, Operand(-1)); | 1135 __ Branch(&no_overflow_possible, ne, right_reg, Operand(-1)); |
1133 __ Branch(USE_DELAY_SLOT, &done); | 1136 __ Branch(USE_DELAY_SLOT, &done); |
1134 __ mov(result_reg, zero_reg); | 1137 __ mov(result_reg, zero_reg); |
1135 } | 1138 } |
1136 __ bind(&no_overflow_possible); | 1139 __ bind(&no_overflow_possible); |
1137 } | 1140 } |
1138 | 1141 |
1139 // If we care about -0, test if the dividend is <0 and the result is 0. | 1142 // If we care about -0, test if the dividend is <0 and the result is 0. |
1140 __ Branch(&done, ge, left_reg, Operand(zero_reg)); | 1143 __ Branch(&done, ge, left_reg, Operand(zero_reg)); |
1141 | 1144 |
1142 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1145 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1143 DeoptimizeIf(eq, instr, "minus zero", result_reg, Operand(zero_reg)); | 1146 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, result_reg, |
| 1147 Operand(zero_reg)); |
1144 } | 1148 } |
1145 __ bind(&done); | 1149 __ bind(&done); |
1146 } | 1150 } |
1147 | 1151 |
1148 | 1152 |
1149 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { | 1153 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { |
1150 Register dividend = ToRegister(instr->dividend()); | 1154 Register dividend = ToRegister(instr->dividend()); |
1151 int32_t divisor = instr->divisor(); | 1155 int32_t divisor = instr->divisor(); |
1152 Register result = ToRegister(instr->result()); | 1156 Register result = ToRegister(instr->result()); |
1153 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); | 1157 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); |
1154 DCHECK(!result.is(dividend)); | 1158 DCHECK(!result.is(dividend)); |
1155 | 1159 |
1156 // Check for (0 / -x) that will produce negative zero. | 1160 // Check for (0 / -x) that will produce negative zero. |
1157 HDiv* hdiv = instr->hydrogen(); | 1161 HDiv* hdiv = instr->hydrogen(); |
1158 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1162 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1159 DeoptimizeIf(eq, instr, "minus zero", dividend, Operand(zero_reg)); | 1163 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, dividend, |
| 1164 Operand(zero_reg)); |
1160 } | 1165 } |
1161 // Check for (kMinInt / -1). | 1166 // Check for (kMinInt / -1). |
1162 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { | 1167 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { |
1163 DeoptimizeIf(eq, instr, "overflow", dividend, Operand(kMinInt)); | 1168 DeoptimizeIf(eq, instr, Deoptimizer::kOverflow, dividend, Operand(kMinInt)); |
1164 } | 1169 } |
1165 // Deoptimize if remainder will not be 0. | 1170 // Deoptimize if remainder will not be 0. |
1166 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && | 1171 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && |
1167 divisor != 1 && divisor != -1) { | 1172 divisor != 1 && divisor != -1) { |
1168 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 1173 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); |
1169 __ And(at, dividend, Operand(mask)); | 1174 __ And(at, dividend, Operand(mask)); |
1170 DeoptimizeIf(ne, instr, "lost precision", at, Operand(zero_reg)); | 1175 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecision, at, Operand(zero_reg)); |
1171 } | 1176 } |
1172 | 1177 |
1173 if (divisor == -1) { // Nice shortcut, not needed for correctness. | 1178 if (divisor == -1) { // Nice shortcut, not needed for correctness. |
1174 __ Dsubu(result, zero_reg, dividend); | 1179 __ Dsubu(result, zero_reg, dividend); |
1175 return; | 1180 return; |
1176 } | 1181 } |
1177 uint16_t shift = WhichPowerOf2Abs(divisor); | 1182 uint16_t shift = WhichPowerOf2Abs(divisor); |
1178 if (shift == 0) { | 1183 if (shift == 0) { |
1179 __ Move(result, dividend); | 1184 __ Move(result, dividend); |
1180 } else if (shift == 1) { | 1185 } else if (shift == 1) { |
1181 __ dsrl32(result, dividend, 31); | 1186 __ dsrl32(result, dividend, 31); |
1182 __ Daddu(result, dividend, Operand(result)); | 1187 __ Daddu(result, dividend, Operand(result)); |
1183 } else { | 1188 } else { |
1184 __ dsra32(result, dividend, 31); | 1189 __ dsra32(result, dividend, 31); |
1185 __ dsrl32(result, result, 32 - shift); | 1190 __ dsrl32(result, result, 32 - shift); |
1186 __ Daddu(result, dividend, Operand(result)); | 1191 __ Daddu(result, dividend, Operand(result)); |
1187 } | 1192 } |
1188 if (shift > 0) __ dsra(result, result, shift); | 1193 if (shift > 0) __ dsra(result, result, shift); |
1189 if (divisor < 0) __ Dsubu(result, zero_reg, result); | 1194 if (divisor < 0) __ Dsubu(result, zero_reg, result); |
1190 } | 1195 } |
1191 | 1196 |
1192 | 1197 |
1193 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { | 1198 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { |
1194 Register dividend = ToRegister(instr->dividend()); | 1199 Register dividend = ToRegister(instr->dividend()); |
1195 int32_t divisor = instr->divisor(); | 1200 int32_t divisor = instr->divisor(); |
1196 Register result = ToRegister(instr->result()); | 1201 Register result = ToRegister(instr->result()); |
1197 DCHECK(!dividend.is(result)); | 1202 DCHECK(!dividend.is(result)); |
1198 | 1203 |
1199 if (divisor == 0) { | 1204 if (divisor == 0) { |
1200 DeoptimizeIf(al, instr, "division by zero"); | 1205 DeoptimizeIf(al, instr, Deoptimizer::kDivisionByZero); |
1201 return; | 1206 return; |
1202 } | 1207 } |
1203 | 1208 |
1204 // Check for (0 / -x) that will produce negative zero. | 1209 // Check for (0 / -x) that will produce negative zero. |
1205 HDiv* hdiv = instr->hydrogen(); | 1210 HDiv* hdiv = instr->hydrogen(); |
1206 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1211 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1207 DeoptimizeIf(eq, instr, "minus zero", dividend, Operand(zero_reg)); | 1212 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, dividend, |
| 1213 Operand(zero_reg)); |
1208 } | 1214 } |
1209 | 1215 |
1210 __ TruncatingDiv(result, dividend, Abs(divisor)); | 1216 __ TruncatingDiv(result, dividend, Abs(divisor)); |
1211 if (divisor < 0) __ Subu(result, zero_reg, result); | 1217 if (divisor < 0) __ Subu(result, zero_reg, result); |
1212 | 1218 |
1213 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { | 1219 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { |
1214 __ Dmul(scratch0(), result, Operand(divisor)); | 1220 __ Dmul(scratch0(), result, Operand(divisor)); |
1215 __ Dsubu(scratch0(), scratch0(), dividend); | 1221 __ Dsubu(scratch0(), scratch0(), dividend); |
1216 DeoptimizeIf(ne, instr, "lost precision", scratch0(), Operand(zero_reg)); | 1222 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecision, scratch0(), |
| 1223 Operand(zero_reg)); |
1217 } | 1224 } |
1218 } | 1225 } |
1219 | 1226 |
1220 | 1227 |
1221 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. | 1228 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. |
1222 void LCodeGen::DoDivI(LDivI* instr) { | 1229 void LCodeGen::DoDivI(LDivI* instr) { |
1223 HBinaryOperation* hdiv = instr->hydrogen(); | 1230 HBinaryOperation* hdiv = instr->hydrogen(); |
1224 Register dividend = ToRegister(instr->dividend()); | 1231 Register dividend = ToRegister(instr->dividend()); |
1225 Register divisor = ToRegister(instr->divisor()); | 1232 Register divisor = ToRegister(instr->divisor()); |
1226 const Register result = ToRegister(instr->result()); | 1233 const Register result = ToRegister(instr->result()); |
1227 | 1234 |
1228 // On MIPS div is asynchronous - it will run in the background while we | 1235 // On MIPS div is asynchronous - it will run in the background while we |
1229 // check for special cases. | 1236 // check for special cases. |
1230 __ Ddiv(result, dividend, divisor); | 1237 __ Ddiv(result, dividend, divisor); |
1231 | 1238 |
1232 // Check for x / 0. | 1239 // Check for x / 0. |
1233 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1240 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
1234 DeoptimizeIf(eq, instr, "division by zero", divisor, Operand(zero_reg)); | 1241 DeoptimizeIf(eq, instr, Deoptimizer::kDivisionByZero, divisor, |
| 1242 Operand(zero_reg)); |
1235 } | 1243 } |
1236 | 1244 |
1237 // Check for (0 / -x) that will produce negative zero. | 1245 // Check for (0 / -x) that will produce negative zero. |
1238 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1246 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1239 Label left_not_zero; | 1247 Label left_not_zero; |
1240 __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg)); | 1248 __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg)); |
1241 DeoptimizeIf(lt, instr, "minus zero", divisor, Operand(zero_reg)); | 1249 DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, divisor, |
| 1250 Operand(zero_reg)); |
1242 __ bind(&left_not_zero); | 1251 __ bind(&left_not_zero); |
1243 } | 1252 } |
1244 | 1253 |
1245 // Check for (kMinInt / -1). | 1254 // Check for (kMinInt / -1). |
1246 if (hdiv->CheckFlag(HValue::kCanOverflow) && | 1255 if (hdiv->CheckFlag(HValue::kCanOverflow) && |
1247 !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 1256 !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { |
1248 Label left_not_min_int; | 1257 Label left_not_min_int; |
1249 __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt)); | 1258 __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt)); |
1250 DeoptimizeIf(eq, instr, "overflow", divisor, Operand(-1)); | 1259 DeoptimizeIf(eq, instr, Deoptimizer::kOverflow, divisor, Operand(-1)); |
1251 __ bind(&left_not_min_int); | 1260 __ bind(&left_not_min_int); |
1252 } | 1261 } |
1253 | 1262 |
1254 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 1263 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { |
1255 // Calculate remainder. | 1264 // Calculate remainder. |
1256 Register remainder = ToRegister(instr->temp()); | 1265 Register remainder = ToRegister(instr->temp()); |
1257 if (kArchVariant != kMips64r6) { | 1266 if (kArchVariant != kMips64r6) { |
1258 __ mfhi(remainder); | 1267 __ mfhi(remainder); |
1259 } else { | 1268 } else { |
1260 __ dmod(remainder, dividend, divisor); | 1269 __ dmod(remainder, dividend, divisor); |
1261 } | 1270 } |
1262 DeoptimizeIf(ne, instr, "lost precision", remainder, Operand(zero_reg)); | 1271 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecision, remainder, |
| 1272 Operand(zero_reg)); |
1263 } | 1273 } |
1264 } | 1274 } |
1265 | 1275 |
1266 | 1276 |
1267 void LCodeGen::DoMultiplyAddD(LMultiplyAddD* instr) { | 1277 void LCodeGen::DoMultiplyAddD(LMultiplyAddD* instr) { |
1268 DoubleRegister addend = ToDoubleRegister(instr->addend()); | 1278 DoubleRegister addend = ToDoubleRegister(instr->addend()); |
1269 DoubleRegister multiplier = ToDoubleRegister(instr->multiplier()); | 1279 DoubleRegister multiplier = ToDoubleRegister(instr->multiplier()); |
1270 DoubleRegister multiplicand = ToDoubleRegister(instr->multiplicand()); | 1280 DoubleRegister multiplicand = ToDoubleRegister(instr->multiplicand()); |
1271 | 1281 |
1272 // This is computed in-place. | 1282 // This is computed in-place. |
(...skipping 24 matching lines...) Expand all Loading... |
1297 return; | 1307 return; |
1298 } | 1308 } |
1299 | 1309 |
1300 // If the divisor is negative, we have to negate and handle edge cases. | 1310 // If the divisor is negative, we have to negate and handle edge cases. |
1301 // Dividend can be the same register as result so save the value of it | 1311 // Dividend can be the same register as result so save the value of it |
1302 // for checking overflow. | 1312 // for checking overflow. |
1303 __ Move(scratch, dividend); | 1313 __ Move(scratch, dividend); |
1304 | 1314 |
1305 __ Dsubu(result, zero_reg, dividend); | 1315 __ Dsubu(result, zero_reg, dividend); |
1306 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1316 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1307 DeoptimizeIf(eq, instr, "minus zero", result, Operand(zero_reg)); | 1317 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, result, Operand(zero_reg)); |
1308 } | 1318 } |
1309 | 1319 |
1310 __ Xor(scratch, scratch, result); | 1320 __ Xor(scratch, scratch, result); |
1311 // Dividing by -1 is basically negation, unless we overflow. | 1321 // Dividing by -1 is basically negation, unless we overflow. |
1312 if (divisor == -1) { | 1322 if (divisor == -1) { |
1313 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 1323 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { |
1314 DeoptimizeIf(gt, instr, "overflow", result, Operand(kMaxInt)); | 1324 DeoptimizeIf(gt, instr, Deoptimizer::kOverflow, result, Operand(kMaxInt)); |
1315 } | 1325 } |
1316 return; | 1326 return; |
1317 } | 1327 } |
1318 | 1328 |
1319 // If the negation could not overflow, simply shifting is OK. | 1329 // If the negation could not overflow, simply shifting is OK. |
1320 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 1330 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { |
1321 __ dsra(result, result, shift); | 1331 __ dsra(result, result, shift); |
1322 return; | 1332 return; |
1323 } | 1333 } |
1324 | 1334 |
1325 Label no_overflow, done; | 1335 Label no_overflow, done; |
1326 __ Branch(&no_overflow, lt, scratch, Operand(zero_reg)); | 1336 __ Branch(&no_overflow, lt, scratch, Operand(zero_reg)); |
1327 __ li(result, Operand(kMinInt / divisor), CONSTANT_SIZE); | 1337 __ li(result, Operand(kMinInt / divisor), CONSTANT_SIZE); |
1328 __ Branch(&done); | 1338 __ Branch(&done); |
1329 __ bind(&no_overflow); | 1339 __ bind(&no_overflow); |
1330 __ dsra(result, result, shift); | 1340 __ dsra(result, result, shift); |
1331 __ bind(&done); | 1341 __ bind(&done); |
1332 } | 1342 } |
1333 | 1343 |
1334 | 1344 |
1335 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { | 1345 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { |
1336 Register dividend = ToRegister(instr->dividend()); | 1346 Register dividend = ToRegister(instr->dividend()); |
1337 int32_t divisor = instr->divisor(); | 1347 int32_t divisor = instr->divisor(); |
1338 Register result = ToRegister(instr->result()); | 1348 Register result = ToRegister(instr->result()); |
1339 DCHECK(!dividend.is(result)); | 1349 DCHECK(!dividend.is(result)); |
1340 | 1350 |
1341 if (divisor == 0) { | 1351 if (divisor == 0) { |
1342 DeoptimizeIf(al, instr, "division by zero"); | 1352 DeoptimizeIf(al, instr, Deoptimizer::kDivisionByZero); |
1343 return; | 1353 return; |
1344 } | 1354 } |
1345 | 1355 |
1346 // Check for (0 / -x) that will produce negative zero. | 1356 // Check for (0 / -x) that will produce negative zero. |
1347 HMathFloorOfDiv* hdiv = instr->hydrogen(); | 1357 HMathFloorOfDiv* hdiv = instr->hydrogen(); |
1348 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1358 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1349 DeoptimizeIf(eq, instr, "minus zero", dividend, Operand(zero_reg)); | 1359 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, dividend, |
| 1360 Operand(zero_reg)); |
1350 } | 1361 } |
1351 | 1362 |
1352 // Easy case: We need no dynamic check for the dividend and the flooring | 1363 // Easy case: We need no dynamic check for the dividend and the flooring |
1353 // division is the same as the truncating division. | 1364 // division is the same as the truncating division. |
1354 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || | 1365 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || |
1355 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { | 1366 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { |
1356 __ TruncatingDiv(result, dividend, Abs(divisor)); | 1367 __ TruncatingDiv(result, dividend, Abs(divisor)); |
1357 if (divisor < 0) __ Dsubu(result, zero_reg, result); | 1368 if (divisor < 0) __ Dsubu(result, zero_reg, result); |
1358 return; | 1369 return; |
1359 } | 1370 } |
(...skipping 23 matching lines...) Expand all Loading... |
1383 Register dividend = ToRegister(instr->dividend()); | 1394 Register dividend = ToRegister(instr->dividend()); |
1384 Register divisor = ToRegister(instr->divisor()); | 1395 Register divisor = ToRegister(instr->divisor()); |
1385 const Register result = ToRegister(instr->result()); | 1396 const Register result = ToRegister(instr->result()); |
1386 | 1397 |
1387 // On MIPS div is asynchronous - it will run in the background while we | 1398 // On MIPS div is asynchronous - it will run in the background while we |
1388 // check for special cases. | 1399 // check for special cases. |
1389 __ Ddiv(result, dividend, divisor); | 1400 __ Ddiv(result, dividend, divisor); |
1390 | 1401 |
1391 // Check for x / 0. | 1402 // Check for x / 0. |
1392 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1403 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
1393 DeoptimizeIf(eq, instr, "division by zero", divisor, Operand(zero_reg)); | 1404 DeoptimizeIf(eq, instr, Deoptimizer::kDivisionByZero, divisor, |
| 1405 Operand(zero_reg)); |
1394 } | 1406 } |
1395 | 1407 |
1396 // Check for (0 / -x) that will produce negative zero. | 1408 // Check for (0 / -x) that will produce negative zero. |
1397 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1409 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1398 Label left_not_zero; | 1410 Label left_not_zero; |
1399 __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg)); | 1411 __ Branch(&left_not_zero, ne, dividend, Operand(zero_reg)); |
1400 DeoptimizeIf(lt, instr, "minus zero", divisor, Operand(zero_reg)); | 1412 DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, divisor, |
| 1413 Operand(zero_reg)); |
1401 __ bind(&left_not_zero); | 1414 __ bind(&left_not_zero); |
1402 } | 1415 } |
1403 | 1416 |
1404 // Check for (kMinInt / -1). | 1417 // Check for (kMinInt / -1). |
1405 if (hdiv->CheckFlag(HValue::kCanOverflow) && | 1418 if (hdiv->CheckFlag(HValue::kCanOverflow) && |
1406 !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 1419 !hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { |
1407 Label left_not_min_int; | 1420 Label left_not_min_int; |
1408 __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt)); | 1421 __ Branch(&left_not_min_int, ne, dividend, Operand(kMinInt)); |
1409 DeoptimizeIf(eq, instr, "overflow", divisor, Operand(-1)); | 1422 DeoptimizeIf(eq, instr, Deoptimizer::kOverflow, divisor, Operand(-1)); |
1410 __ bind(&left_not_min_int); | 1423 __ bind(&left_not_min_int); |
1411 } | 1424 } |
1412 | 1425 |
1413 // We performed a truncating division. Correct the result if necessary. | 1426 // We performed a truncating division. Correct the result if necessary. |
1414 Label done; | 1427 Label done; |
1415 Register remainder = scratch0(); | 1428 Register remainder = scratch0(); |
1416 if (kArchVariant != kMips64r6) { | 1429 if (kArchVariant != kMips64r6) { |
1417 __ mfhi(remainder); | 1430 __ mfhi(remainder); |
1418 } else { | 1431 } else { |
1419 __ dmod(remainder, dividend, divisor); | 1432 __ dmod(remainder, dividend, divisor); |
(...skipping 16 matching lines...) Expand all Loading... |
1436 bool bailout_on_minus_zero = | 1449 bool bailout_on_minus_zero = |
1437 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); | 1450 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); |
1438 bool overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); | 1451 bool overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); |
1439 | 1452 |
1440 if (right_op->IsConstantOperand()) { | 1453 if (right_op->IsConstantOperand()) { |
1441 int32_t constant = ToInteger32(LConstantOperand::cast(right_op)); | 1454 int32_t constant = ToInteger32(LConstantOperand::cast(right_op)); |
1442 | 1455 |
1443 if (bailout_on_minus_zero && (constant < 0)) { | 1456 if (bailout_on_minus_zero && (constant < 0)) { |
1444 // The case of a null constant will be handled separately. | 1457 // The case of a null constant will be handled separately. |
1445 // If constant is negative and left is null, the result should be -0. | 1458 // If constant is negative and left is null, the result should be -0. |
1446 DeoptimizeIf(eq, instr, "minus zero", left, Operand(zero_reg)); | 1459 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, left, Operand(zero_reg)); |
1447 } | 1460 } |
1448 | 1461 |
1449 switch (constant) { | 1462 switch (constant) { |
1450 case -1: | 1463 case -1: |
1451 if (overflow) { | 1464 if (overflow) { |
1452 __ SubuAndCheckForOverflow(result, zero_reg, left, scratch); | 1465 __ SubuAndCheckForOverflow(result, zero_reg, left, scratch); |
1453 DeoptimizeIf(gt, instr, "overflow", scratch, Operand(kMaxInt)); | 1466 DeoptimizeIf(gt, instr, Deoptimizer::kOverflow, scratch, |
| 1467 Operand(kMaxInt)); |
1454 } else { | 1468 } else { |
1455 __ Dsubu(result, zero_reg, left); | 1469 __ Dsubu(result, zero_reg, left); |
1456 } | 1470 } |
1457 break; | 1471 break; |
1458 case 0: | 1472 case 0: |
1459 if (bailout_on_minus_zero) { | 1473 if (bailout_on_minus_zero) { |
1460 // If left is strictly negative and the constant is null, the | 1474 // If left is strictly negative and the constant is null, the |
1461 // result is -0. Deoptimize if required, otherwise return 0. | 1475 // result is -0. Deoptimize if required, otherwise return 0. |
1462 DeoptimizeIf(lt, instr, "minus zero", left, Operand(zero_reg)); | 1476 DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, left, |
| 1477 Operand(zero_reg)); |
1463 } | 1478 } |
1464 __ mov(result, zero_reg); | 1479 __ mov(result, zero_reg); |
1465 break; | 1480 break; |
1466 case 1: | 1481 case 1: |
1467 // Nothing to do. | 1482 // Nothing to do. |
1468 __ Move(result, left); | 1483 __ Move(result, left); |
1469 break; | 1484 break; |
1470 default: | 1485 default: |
1471 // Multiplying by powers of two and powers of two plus or minus | 1486 // Multiplying by powers of two and powers of two plus or minus |
1472 // one can be done faster with shifted operands. | 1487 // one can be done faster with shifted operands. |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1507 if (instr->hydrogen()->representation().IsSmi()) { | 1522 if (instr->hydrogen()->representation().IsSmi()) { |
1508 __ Dmulh(result, left, right); | 1523 __ Dmulh(result, left, right); |
1509 } else { | 1524 } else { |
1510 __ Dmul(result, left, right); | 1525 __ Dmul(result, left, right); |
1511 } | 1526 } |
1512 __ dsra32(scratch, result, 0); | 1527 __ dsra32(scratch, result, 0); |
1513 __ sra(at, result, 31); | 1528 __ sra(at, result, 31); |
1514 if (instr->hydrogen()->representation().IsSmi()) { | 1529 if (instr->hydrogen()->representation().IsSmi()) { |
1515 __ SmiTag(result); | 1530 __ SmiTag(result); |
1516 } | 1531 } |
1517 DeoptimizeIf(ne, instr, "overflow", scratch, Operand(at)); | 1532 DeoptimizeIf(ne, instr, Deoptimizer::kOverflow, scratch, Operand(at)); |
1518 } else { | 1533 } else { |
1519 if (instr->hydrogen()->representation().IsSmi()) { | 1534 if (instr->hydrogen()->representation().IsSmi()) { |
1520 __ SmiUntag(result, left); | 1535 __ SmiUntag(result, left); |
1521 __ Dmul(result, result, right); | 1536 __ Dmul(result, result, right); |
1522 } else { | 1537 } else { |
1523 __ Dmul(result, left, right); | 1538 __ Dmul(result, left, right); |
1524 } | 1539 } |
1525 } | 1540 } |
1526 | 1541 |
1527 if (bailout_on_minus_zero) { | 1542 if (bailout_on_minus_zero) { |
1528 Label done; | 1543 Label done; |
1529 __ Xor(at, left, right); | 1544 __ Xor(at, left, right); |
1530 __ Branch(&done, ge, at, Operand(zero_reg)); | 1545 __ Branch(&done, ge, at, Operand(zero_reg)); |
1531 // Bail out if the result is minus zero. | 1546 // Bail out if the result is minus zero. |
1532 DeoptimizeIf(eq, instr, "minus zero", result, Operand(zero_reg)); | 1547 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, result, |
| 1548 Operand(zero_reg)); |
1533 __ bind(&done); | 1549 __ bind(&done); |
1534 } | 1550 } |
1535 } | 1551 } |
1536 } | 1552 } |
1537 | 1553 |
1538 | 1554 |
1539 void LCodeGen::DoBitI(LBitI* instr) { | 1555 void LCodeGen::DoBitI(LBitI* instr) { |
1540 LOperand* left_op = instr->left(); | 1556 LOperand* left_op = instr->left(); |
1541 LOperand* right_op = instr->right(); | 1557 LOperand* right_op = instr->right(); |
1542 DCHECK(left_op->IsRegister()); | 1558 DCHECK(left_op->IsRegister()); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1586 case Token::ROR: | 1602 case Token::ROR: |
1587 __ Ror(result, left, Operand(ToRegister(right_op))); | 1603 __ Ror(result, left, Operand(ToRegister(right_op))); |
1588 break; | 1604 break; |
1589 case Token::SAR: | 1605 case Token::SAR: |
1590 __ srav(result, left, ToRegister(right_op)); | 1606 __ srav(result, left, ToRegister(right_op)); |
1591 break; | 1607 break; |
1592 case Token::SHR: | 1608 case Token::SHR: |
1593 __ srlv(result, left, ToRegister(right_op)); | 1609 __ srlv(result, left, ToRegister(right_op)); |
1594 if (instr->can_deopt()) { | 1610 if (instr->can_deopt()) { |
1595 // TODO(yy): (-1) >>> 0. anything else? | 1611 // TODO(yy): (-1) >>> 0. anything else? |
1596 DeoptimizeIf(lt, instr, "negative value", result, Operand(zero_reg)); | 1612 DeoptimizeIf(lt, instr, Deoptimizer::kNegativeValue, result, |
1597 DeoptimizeIf(gt, instr, "negative value", result, Operand(kMaxInt)); | 1613 Operand(zero_reg)); |
| 1614 DeoptimizeIf(gt, instr, Deoptimizer::kNegativeValue, result, |
| 1615 Operand(kMaxInt)); |
1598 } | 1616 } |
1599 break; | 1617 break; |
1600 case Token::SHL: | 1618 case Token::SHL: |
1601 __ sllv(result, left, ToRegister(right_op)); | 1619 __ sllv(result, left, ToRegister(right_op)); |
1602 break; | 1620 break; |
1603 default: | 1621 default: |
1604 UNREACHABLE(); | 1622 UNREACHABLE(); |
1605 break; | 1623 break; |
1606 } | 1624 } |
1607 } else { | 1625 } else { |
(...skipping 14 matching lines...) Expand all Loading... |
1622 } else { | 1640 } else { |
1623 __ Move(result, left); | 1641 __ Move(result, left); |
1624 } | 1642 } |
1625 break; | 1643 break; |
1626 case Token::SHR: | 1644 case Token::SHR: |
1627 if (shift_count != 0) { | 1645 if (shift_count != 0) { |
1628 __ srl(result, left, shift_count); | 1646 __ srl(result, left, shift_count); |
1629 } else { | 1647 } else { |
1630 if (instr->can_deopt()) { | 1648 if (instr->can_deopt()) { |
1631 __ And(at, left, Operand(0x80000000)); | 1649 __ And(at, left, Operand(0x80000000)); |
1632 DeoptimizeIf(ne, instr, "negative value", at, Operand(zero_reg)); | 1650 DeoptimizeIf(ne, instr, Deoptimizer::kNegativeValue, at, |
| 1651 Operand(zero_reg)); |
1633 } | 1652 } |
1634 __ Move(result, left); | 1653 __ Move(result, left); |
1635 } | 1654 } |
1636 break; | 1655 break; |
1637 case Token::SHL: | 1656 case Token::SHL: |
1638 if (shift_count != 0) { | 1657 if (shift_count != 0) { |
1639 if (instr->hydrogen_value()->representation().IsSmi()) { | 1658 if (instr->hydrogen_value()->representation().IsSmi()) { |
1640 __ dsll(result, left, shift_count); | 1659 __ dsll(result, left, shift_count); |
1641 } else { | 1660 } else { |
1642 __ sll(result, left, shift_count); | 1661 __ sll(result, left, shift_count); |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1678 overflow); // Reg at also used as scratch. | 1697 overflow); // Reg at also used as scratch. |
1679 } else { | 1698 } else { |
1680 DCHECK(right->IsRegister()); | 1699 DCHECK(right->IsRegister()); |
1681 // Due to overflow check macros not supporting constant operands, | 1700 // Due to overflow check macros not supporting constant operands, |
1682 // handling the IsConstantOperand case was moved to prev if clause. | 1701 // handling the IsConstantOperand case was moved to prev if clause. |
1683 __ SubuAndCheckForOverflow(ToRegister(result), | 1702 __ SubuAndCheckForOverflow(ToRegister(result), |
1684 ToRegister(left), | 1703 ToRegister(left), |
1685 ToRegister(right), | 1704 ToRegister(right), |
1686 overflow); // Reg at also used as scratch. | 1705 overflow); // Reg at also used as scratch. |
1687 } | 1706 } |
1688 DeoptimizeIf(lt, instr, "overflow", overflow, Operand(zero_reg)); | 1707 DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, overflow, |
| 1708 Operand(zero_reg)); |
1689 if (!instr->hydrogen()->representation().IsSmi()) { | 1709 if (!instr->hydrogen()->representation().IsSmi()) { |
1690 DeoptimizeIf(gt, instr, "overflow", ToRegister(result), Operand(kMaxInt)); | 1710 DeoptimizeIf(gt, instr, Deoptimizer::kOverflow, ToRegister(result), |
1691 DeoptimizeIf(lt, instr, "overflow", ToRegister(result), Operand(kMinInt)); | 1711 Operand(kMaxInt)); |
| 1712 DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, ToRegister(result), |
| 1713 Operand(kMinInt)); |
1692 } | 1714 } |
1693 } | 1715 } |
1694 } | 1716 } |
1695 | 1717 |
1696 | 1718 |
1697 void LCodeGen::DoConstantI(LConstantI* instr) { | 1719 void LCodeGen::DoConstantI(LConstantI* instr) { |
1698 __ li(ToRegister(instr->result()), Operand(instr->value())); | 1720 __ li(ToRegister(instr->result()), Operand(instr->value())); |
1699 } | 1721 } |
1700 | 1722 |
1701 | 1723 |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1736 Register result = ToRegister(instr->result()); | 1758 Register result = ToRegister(instr->result()); |
1737 Register scratch = ToRegister(instr->temp()); | 1759 Register scratch = ToRegister(instr->temp()); |
1738 Smi* index = instr->index(); | 1760 Smi* index = instr->index(); |
1739 Label runtime, done; | 1761 Label runtime, done; |
1740 DCHECK(object.is(a0)); | 1762 DCHECK(object.is(a0)); |
1741 DCHECK(result.is(v0)); | 1763 DCHECK(result.is(v0)); |
1742 DCHECK(!scratch.is(scratch0())); | 1764 DCHECK(!scratch.is(scratch0())); |
1743 DCHECK(!scratch.is(object)); | 1765 DCHECK(!scratch.is(object)); |
1744 | 1766 |
1745 __ SmiTst(object, at); | 1767 __ SmiTst(object, at); |
1746 DeoptimizeIf(eq, instr, "Smi", at, Operand(zero_reg)); | 1768 DeoptimizeIf(eq, instr, Deoptimizer::kSmi, at, Operand(zero_reg)); |
1747 __ GetObjectType(object, scratch, scratch); | 1769 __ GetObjectType(object, scratch, scratch); |
1748 DeoptimizeIf(ne, instr, "not a date object", scratch, Operand(JS_DATE_TYPE)); | 1770 DeoptimizeIf(ne, instr, Deoptimizer::kNotADateObject, scratch, |
| 1771 Operand(JS_DATE_TYPE)); |
1749 | 1772 |
1750 if (index->value() == 0) { | 1773 if (index->value() == 0) { |
1751 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset)); | 1774 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset)); |
1752 } else { | 1775 } else { |
1753 if (index->value() < JSDate::kFirstUncachedField) { | 1776 if (index->value() < JSDate::kFirstUncachedField) { |
1754 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | 1777 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); |
1755 __ li(scratch, Operand(stamp)); | 1778 __ li(scratch, Operand(stamp)); |
1756 __ ld(scratch, MemOperand(scratch)); | 1779 __ ld(scratch, MemOperand(scratch)); |
1757 __ ld(scratch0(), FieldMemOperand(object, JSDate::kCacheStampOffset)); | 1780 __ ld(scratch0(), FieldMemOperand(object, JSDate::kCacheStampOffset)); |
1758 __ Branch(&runtime, ne, scratch, Operand(scratch0())); | 1781 __ Branch(&runtime, ne, scratch, Operand(scratch0())); |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1873 overflow); // Reg at also used as scratch. | 1896 overflow); // Reg at also used as scratch. |
1874 } else { | 1897 } else { |
1875 DCHECK(right->IsRegister()); | 1898 DCHECK(right->IsRegister()); |
1876 // Due to overflow check macros not supporting constant operands, | 1899 // Due to overflow check macros not supporting constant operands, |
1877 // handling the IsConstantOperand case was moved to prev if clause. | 1900 // handling the IsConstantOperand case was moved to prev if clause. |
1878 __ AdduAndCheckForOverflow(ToRegister(result), | 1901 __ AdduAndCheckForOverflow(ToRegister(result), |
1879 ToRegister(left), | 1902 ToRegister(left), |
1880 ToRegister(right), | 1903 ToRegister(right), |
1881 overflow); // Reg at also used as scratch. | 1904 overflow); // Reg at also used as scratch. |
1882 } | 1905 } |
1883 DeoptimizeIf(lt, instr, "overflow", overflow, Operand(zero_reg)); | 1906 DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, overflow, |
| 1907 Operand(zero_reg)); |
1884 // if not smi, it must int32. | 1908 // if not smi, it must int32. |
1885 if (!instr->hydrogen()->representation().IsSmi()) { | 1909 if (!instr->hydrogen()->representation().IsSmi()) { |
1886 DeoptimizeIf(gt, instr, "overflow", ToRegister(result), Operand(kMaxInt)); | 1910 DeoptimizeIf(gt, instr, Deoptimizer::kOverflow, ToRegister(result), |
1887 DeoptimizeIf(lt, instr, "overflow", ToRegister(result), Operand(kMinInt)); | 1911 Operand(kMaxInt)); |
| 1912 DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, ToRegister(result), |
| 1913 Operand(kMinInt)); |
1888 } | 1914 } |
1889 } | 1915 } |
1890 } | 1916 } |
1891 | 1917 |
1892 | 1918 |
1893 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { | 1919 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { |
1894 LOperand* left = instr->left(); | 1920 LOperand* left = instr->left(); |
1895 LOperand* right = instr->right(); | 1921 LOperand* right = instr->right(); |
1896 HMathMinMax::Operation operation = instr->hydrogen()->operation(); | 1922 HMathMinMax::Operation operation = instr->hydrogen()->operation(); |
1897 Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge; | 1923 Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge; |
(...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2139 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(at)); | 2165 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(at)); |
2140 } | 2166 } |
2141 | 2167 |
2142 if (expected.Contains(ToBooleanStub::SMI)) { | 2168 if (expected.Contains(ToBooleanStub::SMI)) { |
2143 // Smis: 0 -> false, all other -> true. | 2169 // Smis: 0 -> false, all other -> true. |
2144 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(zero_reg)); | 2170 __ Branch(instr->FalseLabel(chunk_), eq, reg, Operand(zero_reg)); |
2145 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); | 2171 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); |
2146 } else if (expected.NeedsMap()) { | 2172 } else if (expected.NeedsMap()) { |
2147 // If we need a map later and have a Smi -> deopt. | 2173 // If we need a map later and have a Smi -> deopt. |
2148 __ SmiTst(reg, at); | 2174 __ SmiTst(reg, at); |
2149 DeoptimizeIf(eq, instr, "Smi", at, Operand(zero_reg)); | 2175 DeoptimizeIf(eq, instr, Deoptimizer::kSmi, at, Operand(zero_reg)); |
2150 } | 2176 } |
2151 | 2177 |
2152 const Register map = scratch0(); | 2178 const Register map = scratch0(); |
2153 if (expected.NeedsMap()) { | 2179 if (expected.NeedsMap()) { |
2154 __ ld(map, FieldMemOperand(reg, HeapObject::kMapOffset)); | 2180 __ ld(map, FieldMemOperand(reg, HeapObject::kMapOffset)); |
2155 if (expected.CanBeUndetectable()) { | 2181 if (expected.CanBeUndetectable()) { |
2156 // Undetectable -> false. | 2182 // Undetectable -> false. |
2157 __ lbu(at, FieldMemOperand(map, Map::kBitFieldOffset)); | 2183 __ lbu(at, FieldMemOperand(map, Map::kBitFieldOffset)); |
2158 __ And(at, at, Operand(1 << Map::kIsUndetectable)); | 2184 __ And(at, at, Operand(1 << Map::kIsUndetectable)); |
2159 __ Branch(instr->FalseLabel(chunk_), ne, at, Operand(zero_reg)); | 2185 __ Branch(instr->FalseLabel(chunk_), ne, at, Operand(zero_reg)); |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2195 __ BranchF(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_), | 2221 __ BranchF(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_), |
2196 ne, dbl_scratch, kDoubleRegZero); | 2222 ne, dbl_scratch, kDoubleRegZero); |
2197 // Falls through if dbl_scratch == 0. | 2223 // Falls through if dbl_scratch == 0. |
2198 __ Branch(instr->FalseLabel(chunk_)); | 2224 __ Branch(instr->FalseLabel(chunk_)); |
2199 __ bind(¬_heap_number); | 2225 __ bind(¬_heap_number); |
2200 } | 2226 } |
2201 | 2227 |
2202 if (!expected.IsGeneric()) { | 2228 if (!expected.IsGeneric()) { |
2203 // We've seen something for the first time -> deopt. | 2229 // We've seen something for the first time -> deopt. |
2204 // This can only happen if we are not generic already. | 2230 // This can only happen if we are not generic already. |
2205 DeoptimizeIf(al, instr, "unexpected object", zero_reg, | 2231 DeoptimizeIf(al, instr, Deoptimizer::kUnexpectedObject, zero_reg, |
2206 Operand(zero_reg)); | 2232 Operand(zero_reg)); |
2207 } | 2233 } |
2208 } | 2234 } |
2209 } | 2235 } |
2210 } | 2236 } |
2211 | 2237 |
2212 | 2238 |
2213 void LCodeGen::EmitGoto(int block) { | 2239 void LCodeGen::EmitGoto(int block) { |
2214 if (!IsNextEmittedBlock(block)) { | 2240 if (!IsNextEmittedBlock(block)) { |
2215 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); | 2241 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); |
(...skipping 627 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2843 } | 2869 } |
2844 } | 2870 } |
2845 | 2871 |
2846 | 2872 |
2847 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { | 2873 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { |
2848 Register result = ToRegister(instr->result()); | 2874 Register result = ToRegister(instr->result()); |
2849 __ li(at, Operand(Handle<Object>(instr->hydrogen()->cell().handle()))); | 2875 __ li(at, Operand(Handle<Object>(instr->hydrogen()->cell().handle()))); |
2850 __ ld(result, FieldMemOperand(at, Cell::kValueOffset)); | 2876 __ ld(result, FieldMemOperand(at, Cell::kValueOffset)); |
2851 if (instr->hydrogen()->RequiresHoleCheck()) { | 2877 if (instr->hydrogen()->RequiresHoleCheck()) { |
2852 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 2878 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
2853 DeoptimizeIf(eq, instr, "hole", result, Operand(at)); | 2879 DeoptimizeIf(eq, instr, Deoptimizer::kHole, result, Operand(at)); |
2854 } | 2880 } |
2855 } | 2881 } |
2856 | 2882 |
2857 | 2883 |
2858 template <class T> | 2884 template <class T> |
2859 void LCodeGen::EmitVectorLoadICRegisters(T* instr) { | 2885 void LCodeGen::EmitVectorLoadICRegisters(T* instr) { |
2860 DCHECK(FLAG_vector_ics); | 2886 DCHECK(FLAG_vector_ics); |
2861 Register vector_register = ToRegister(instr->temp_vector()); | 2887 Register vector_register = ToRegister(instr->temp_vector()); |
2862 Register slot_register = VectorLoadICDescriptor::SlotRegister(); | 2888 Register slot_register = VectorLoadICDescriptor::SlotRegister(); |
2863 DCHECK(vector_register.is(VectorLoadICDescriptor::VectorRegister())); | 2889 DCHECK(vector_register.is(VectorLoadICDescriptor::VectorRegister())); |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2898 | 2924 |
2899 // If the cell we are storing to contains the hole it could have | 2925 // If the cell we are storing to contains the hole it could have |
2900 // been deleted from the property dictionary. In that case, we need | 2926 // been deleted from the property dictionary. In that case, we need |
2901 // to update the property details in the property dictionary to mark | 2927 // to update the property details in the property dictionary to mark |
2902 // it as no longer deleted. | 2928 // it as no longer deleted. |
2903 if (instr->hydrogen()->RequiresHoleCheck()) { | 2929 if (instr->hydrogen()->RequiresHoleCheck()) { |
2904 // We use a temp to check the payload. | 2930 // We use a temp to check the payload. |
2905 Register payload = ToRegister(instr->temp()); | 2931 Register payload = ToRegister(instr->temp()); |
2906 __ ld(payload, FieldMemOperand(cell, Cell::kValueOffset)); | 2932 __ ld(payload, FieldMemOperand(cell, Cell::kValueOffset)); |
2907 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 2933 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
2908 DeoptimizeIf(eq, instr, "hole", payload, Operand(at)); | 2934 DeoptimizeIf(eq, instr, Deoptimizer::kHole, payload, Operand(at)); |
2909 } | 2935 } |
2910 | 2936 |
2911 // Store the value. | 2937 // Store the value. |
2912 __ sd(value, FieldMemOperand(cell, Cell::kValueOffset)); | 2938 __ sd(value, FieldMemOperand(cell, Cell::kValueOffset)); |
2913 // Cells are always rescanned, so no write barrier here. | 2939 // Cells are always rescanned, so no write barrier here. |
2914 } | 2940 } |
2915 | 2941 |
2916 | 2942 |
2917 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { | 2943 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { |
2918 Register context = ToRegister(instr->context()); | 2944 Register context = ToRegister(instr->context()); |
2919 Register result = ToRegister(instr->result()); | 2945 Register result = ToRegister(instr->result()); |
2920 | 2946 |
2921 __ ld(result, ContextOperand(context, instr->slot_index())); | 2947 __ ld(result, ContextOperand(context, instr->slot_index())); |
2922 if (instr->hydrogen()->RequiresHoleCheck()) { | 2948 if (instr->hydrogen()->RequiresHoleCheck()) { |
2923 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 2949 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
2924 | 2950 |
2925 if (instr->hydrogen()->DeoptimizesOnHole()) { | 2951 if (instr->hydrogen()->DeoptimizesOnHole()) { |
2926 DeoptimizeIf(eq, instr, "hole", result, Operand(at)); | 2952 DeoptimizeIf(eq, instr, Deoptimizer::kHole, result, Operand(at)); |
2927 } else { | 2953 } else { |
2928 Label is_not_hole; | 2954 Label is_not_hole; |
2929 __ Branch(&is_not_hole, ne, result, Operand(at)); | 2955 __ Branch(&is_not_hole, ne, result, Operand(at)); |
2930 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | 2956 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
2931 __ bind(&is_not_hole); | 2957 __ bind(&is_not_hole); |
2932 } | 2958 } |
2933 } | 2959 } |
2934 } | 2960 } |
2935 | 2961 |
2936 | 2962 |
2937 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { | 2963 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { |
2938 Register context = ToRegister(instr->context()); | 2964 Register context = ToRegister(instr->context()); |
2939 Register value = ToRegister(instr->value()); | 2965 Register value = ToRegister(instr->value()); |
2940 Register scratch = scratch0(); | 2966 Register scratch = scratch0(); |
2941 MemOperand target = ContextOperand(context, instr->slot_index()); | 2967 MemOperand target = ContextOperand(context, instr->slot_index()); |
2942 | 2968 |
2943 Label skip_assignment; | 2969 Label skip_assignment; |
2944 | 2970 |
2945 if (instr->hydrogen()->RequiresHoleCheck()) { | 2971 if (instr->hydrogen()->RequiresHoleCheck()) { |
2946 __ ld(scratch, target); | 2972 __ ld(scratch, target); |
2947 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 2973 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
2948 | 2974 |
2949 if (instr->hydrogen()->DeoptimizesOnHole()) { | 2975 if (instr->hydrogen()->DeoptimizesOnHole()) { |
2950 DeoptimizeIf(eq, instr, "hole", scratch, Operand(at)); | 2976 DeoptimizeIf(eq, instr, Deoptimizer::kHole, scratch, Operand(at)); |
2951 } else { | 2977 } else { |
2952 __ Branch(&skip_assignment, ne, scratch, Operand(at)); | 2978 __ Branch(&skip_assignment, ne, scratch, Operand(at)); |
2953 } | 2979 } |
2954 } | 2980 } |
2955 | 2981 |
2956 __ sd(value, target); | 2982 __ sd(value, target); |
2957 if (instr->hydrogen()->NeedsWriteBarrier()) { | 2983 if (instr->hydrogen()->NeedsWriteBarrier()) { |
2958 SmiCheck check_needed = | 2984 SmiCheck check_needed = |
2959 instr->hydrogen()->value()->type().IsHeapObject() | 2985 instr->hydrogen()->value()->type().IsHeapObject() |
2960 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 2986 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3035 Register scratch = scratch0(); | 3061 Register scratch = scratch0(); |
3036 Register function = ToRegister(instr->function()); | 3062 Register function = ToRegister(instr->function()); |
3037 Register result = ToRegister(instr->result()); | 3063 Register result = ToRegister(instr->result()); |
3038 | 3064 |
3039 // Get the prototype or initial map from the function. | 3065 // Get the prototype or initial map from the function. |
3040 __ ld(result, | 3066 __ ld(result, |
3041 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 3067 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
3042 | 3068 |
3043 // Check that the function has a prototype or an initial map. | 3069 // Check that the function has a prototype or an initial map. |
3044 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 3070 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
3045 DeoptimizeIf(eq, instr, "hole", result, Operand(at)); | 3071 DeoptimizeIf(eq, instr, Deoptimizer::kHole, result, Operand(at)); |
3046 | 3072 |
3047 // If the function does not have an initial map, we're done. | 3073 // If the function does not have an initial map, we're done. |
3048 Label done; | 3074 Label done; |
3049 __ GetObjectType(result, scratch, scratch); | 3075 __ GetObjectType(result, scratch, scratch); |
3050 __ Branch(&done, ne, scratch, Operand(MAP_TYPE)); | 3076 __ Branch(&done, ne, scratch, Operand(MAP_TYPE)); |
3051 | 3077 |
3052 // Get the prototype from the initial map. | 3078 // Get the prototype from the initial map. |
3053 __ ld(result, FieldMemOperand(result, Map::kPrototypeOffset)); | 3079 __ ld(result, FieldMemOperand(result, Map::kPrototypeOffset)); |
3054 | 3080 |
3055 // All done. | 3081 // All done. |
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3181 __ lhu(result, mem_operand); | 3207 __ lhu(result, mem_operand); |
3182 break; | 3208 break; |
3183 case EXTERNAL_INT32_ELEMENTS: | 3209 case EXTERNAL_INT32_ELEMENTS: |
3184 case INT32_ELEMENTS: | 3210 case INT32_ELEMENTS: |
3185 __ lw(result, mem_operand); | 3211 __ lw(result, mem_operand); |
3186 break; | 3212 break; |
3187 case EXTERNAL_UINT32_ELEMENTS: | 3213 case EXTERNAL_UINT32_ELEMENTS: |
3188 case UINT32_ELEMENTS: | 3214 case UINT32_ELEMENTS: |
3189 __ lw(result, mem_operand); | 3215 __ lw(result, mem_operand); |
3190 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { | 3216 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { |
3191 DeoptimizeIf(Ugreater_equal, instr, "negative value", result, | 3217 DeoptimizeIf(Ugreater_equal, instr, Deoptimizer::kNegativeValue, |
3192 Operand(0x80000000)); | 3218 result, Operand(0x80000000)); |
3193 } | 3219 } |
3194 break; | 3220 break; |
3195 case FLOAT32_ELEMENTS: | 3221 case FLOAT32_ELEMENTS: |
3196 case FLOAT64_ELEMENTS: | 3222 case FLOAT64_ELEMENTS: |
3197 case EXTERNAL_FLOAT32_ELEMENTS: | 3223 case EXTERNAL_FLOAT32_ELEMENTS: |
3198 case EXTERNAL_FLOAT64_ELEMENTS: | 3224 case EXTERNAL_FLOAT64_ELEMENTS: |
3199 case FAST_DOUBLE_ELEMENTS: | 3225 case FAST_DOUBLE_ELEMENTS: |
3200 case FAST_ELEMENTS: | 3226 case FAST_ELEMENTS: |
3201 case FAST_SMI_ELEMENTS: | 3227 case FAST_SMI_ELEMENTS: |
3202 case FAST_HOLEY_DOUBLE_ELEMENTS: | 3228 case FAST_HOLEY_DOUBLE_ELEMENTS: |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3242 } else { | 3268 } else { |
3243 __ dsra(at, key, -shift_size); | 3269 __ dsra(at, key, -shift_size); |
3244 } | 3270 } |
3245 __ Daddu(scratch, scratch, at); | 3271 __ Daddu(scratch, scratch, at); |
3246 } | 3272 } |
3247 | 3273 |
3248 __ ldc1(result, MemOperand(scratch)); | 3274 __ ldc1(result, MemOperand(scratch)); |
3249 | 3275 |
3250 if (instr->hydrogen()->RequiresHoleCheck()) { | 3276 if (instr->hydrogen()->RequiresHoleCheck()) { |
3251 __ lw(scratch, MemOperand(scratch, sizeof(kHoleNanLower32))); | 3277 __ lw(scratch, MemOperand(scratch, sizeof(kHoleNanLower32))); |
3252 DeoptimizeIf(eq, instr, "hole", scratch, Operand(kHoleNanUpper32)); | 3278 DeoptimizeIf(eq, instr, Deoptimizer::kHole, scratch, |
| 3279 Operand(kHoleNanUpper32)); |
3253 } | 3280 } |
3254 } | 3281 } |
3255 | 3282 |
3256 | 3283 |
3257 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { | 3284 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { |
3258 HLoadKeyed* hinstr = instr->hydrogen(); | 3285 HLoadKeyed* hinstr = instr->hydrogen(); |
3259 Register elements = ToRegister(instr->elements()); | 3286 Register elements = ToRegister(instr->elements()); |
3260 Register result = ToRegister(instr->result()); | 3287 Register result = ToRegister(instr->result()); |
3261 Register scratch = scratch0(); | 3288 Register scratch = scratch0(); |
3262 Register store_base = scratch; | 3289 Register store_base = scratch; |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3296 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); | 3323 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); |
3297 offset += kPointerSize / 2; | 3324 offset += kPointerSize / 2; |
3298 } | 3325 } |
3299 | 3326 |
3300 __ Load(result, MemOperand(store_base, offset), representation); | 3327 __ Load(result, MemOperand(store_base, offset), representation); |
3301 | 3328 |
3302 // Check for the hole value. | 3329 // Check for the hole value. |
3303 if (hinstr->RequiresHoleCheck()) { | 3330 if (hinstr->RequiresHoleCheck()) { |
3304 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { | 3331 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { |
3305 __ SmiTst(result, scratch); | 3332 __ SmiTst(result, scratch); |
3306 DeoptimizeIf(ne, instr, "not a Smi", scratch, Operand(zero_reg)); | 3333 DeoptimizeIf(ne, instr, Deoptimizer::kNotASmi, scratch, |
| 3334 Operand(zero_reg)); |
3307 } else { | 3335 } else { |
3308 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); | 3336 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); |
3309 DeoptimizeIf(eq, instr, "hole", result, Operand(scratch)); | 3337 DeoptimizeIf(eq, instr, Deoptimizer::kHole, result, Operand(scratch)); |
3310 } | 3338 } |
3311 } | 3339 } |
3312 } | 3340 } |
3313 | 3341 |
3314 | 3342 |
3315 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { | 3343 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { |
3316 if (instr->is_typed_elements()) { | 3344 if (instr->is_typed_elements()) { |
3317 DoLoadKeyedExternalArray(instr); | 3345 DoLoadKeyedExternalArray(instr); |
3318 } else if (instr->hydrogen()->representation().IsDouble()) { | 3346 } else if (instr->hydrogen()->representation().IsDouble()) { |
3319 DoLoadKeyedFixedDoubleArray(instr); | 3347 DoLoadKeyedFixedDoubleArray(instr); |
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3455 } | 3483 } |
3456 | 3484 |
3457 // Normal function. Replace undefined or null with global receiver. | 3485 // Normal function. Replace undefined or null with global receiver. |
3458 __ LoadRoot(scratch, Heap::kNullValueRootIndex); | 3486 __ LoadRoot(scratch, Heap::kNullValueRootIndex); |
3459 __ Branch(&global_object, eq, receiver, Operand(scratch)); | 3487 __ Branch(&global_object, eq, receiver, Operand(scratch)); |
3460 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); | 3488 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); |
3461 __ Branch(&global_object, eq, receiver, Operand(scratch)); | 3489 __ Branch(&global_object, eq, receiver, Operand(scratch)); |
3462 | 3490 |
3463 // Deoptimize if the receiver is not a JS object. | 3491 // Deoptimize if the receiver is not a JS object. |
3464 __ SmiTst(receiver, scratch); | 3492 __ SmiTst(receiver, scratch); |
3465 DeoptimizeIf(eq, instr, "Smi", scratch, Operand(zero_reg)); | 3493 DeoptimizeIf(eq, instr, Deoptimizer::kSmi, scratch, Operand(zero_reg)); |
3466 | 3494 |
3467 __ GetObjectType(receiver, scratch, scratch); | 3495 __ GetObjectType(receiver, scratch, scratch); |
3468 DeoptimizeIf(lt, instr, "not a JavaScript object", scratch, | 3496 DeoptimizeIf(lt, instr, Deoptimizer::kNotAJavaScriptObject, scratch, |
3469 Operand(FIRST_SPEC_OBJECT_TYPE)); | 3497 Operand(FIRST_SPEC_OBJECT_TYPE)); |
3470 __ Branch(&result_in_receiver); | 3498 __ Branch(&result_in_receiver); |
3471 | 3499 |
3472 __ bind(&global_object); | 3500 __ bind(&global_object); |
3473 __ ld(result, FieldMemOperand(function, JSFunction::kContextOffset)); | 3501 __ ld(result, FieldMemOperand(function, JSFunction::kContextOffset)); |
3474 __ ld(result, | 3502 __ ld(result, |
3475 ContextOperand(result, Context::GLOBAL_OBJECT_INDEX)); | 3503 ContextOperand(result, Context::GLOBAL_OBJECT_INDEX)); |
3476 __ ld(result, | 3504 __ ld(result, |
3477 FieldMemOperand(result, GlobalObject::kGlobalProxyOffset)); | 3505 FieldMemOperand(result, GlobalObject::kGlobalProxyOffset)); |
3478 | 3506 |
(...skipping 15 matching lines...) Expand all Loading... |
3494 Register length = ToRegister(instr->length()); | 3522 Register length = ToRegister(instr->length()); |
3495 Register elements = ToRegister(instr->elements()); | 3523 Register elements = ToRegister(instr->elements()); |
3496 Register scratch = scratch0(); | 3524 Register scratch = scratch0(); |
3497 DCHECK(receiver.is(a0)); // Used for parameter count. | 3525 DCHECK(receiver.is(a0)); // Used for parameter count. |
3498 DCHECK(function.is(a1)); // Required by InvokeFunction. | 3526 DCHECK(function.is(a1)); // Required by InvokeFunction. |
3499 DCHECK(ToRegister(instr->result()).is(v0)); | 3527 DCHECK(ToRegister(instr->result()).is(v0)); |
3500 | 3528 |
3501 // Copy the arguments to this function possibly from the | 3529 // Copy the arguments to this function possibly from the |
3502 // adaptor frame below it. | 3530 // adaptor frame below it. |
3503 const uint32_t kArgumentsLimit = 1 * KB; | 3531 const uint32_t kArgumentsLimit = 1 * KB; |
3504 DeoptimizeIf(hi, instr, "too many arguments", length, | 3532 DeoptimizeIf(hi, instr, Deoptimizer::kTooManyArguments, length, |
3505 Operand(kArgumentsLimit)); | 3533 Operand(kArgumentsLimit)); |
3506 | 3534 |
3507 // Push the receiver and use the register to keep the original | 3535 // Push the receiver and use the register to keep the original |
3508 // number of arguments. | 3536 // number of arguments. |
3509 __ push(receiver); | 3537 __ push(receiver); |
3510 __ Move(receiver, length); | 3538 __ Move(receiver, length); |
3511 // The arguments are at a one pointer size offset from elements. | 3539 // The arguments are at a one pointer size offset from elements. |
3512 __ Daddu(elements, elements, Operand(1 * kPointerSize)); | 3540 __ Daddu(elements, elements, Operand(1 * kPointerSize)); |
3513 | 3541 |
3514 // Loop through the arguments pushing them onto the execution | 3542 // Loop through the arguments pushing them onto the execution |
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3620 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { | 3648 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { |
3621 DCHECK(instr->context() != NULL); | 3649 DCHECK(instr->context() != NULL); |
3622 DCHECK(ToRegister(instr->context()).is(cp)); | 3650 DCHECK(ToRegister(instr->context()).is(cp)); |
3623 Register input = ToRegister(instr->value()); | 3651 Register input = ToRegister(instr->value()); |
3624 Register result = ToRegister(instr->result()); | 3652 Register result = ToRegister(instr->result()); |
3625 Register scratch = scratch0(); | 3653 Register scratch = scratch0(); |
3626 | 3654 |
3627 // Deoptimize if not a heap number. | 3655 // Deoptimize if not a heap number. |
3628 __ ld(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); | 3656 __ ld(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); |
3629 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 3657 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
3630 DeoptimizeIf(ne, instr, "not a heap number", scratch, Operand(at)); | 3658 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumber, scratch, Operand(at)); |
3631 | 3659 |
3632 Label done; | 3660 Label done; |
3633 Register exponent = scratch0(); | 3661 Register exponent = scratch0(); |
3634 scratch = no_reg; | 3662 scratch = no_reg; |
3635 __ lwu(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset)); | 3663 __ lwu(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset)); |
3636 // Check the sign of the argument. If the argument is positive, just | 3664 // Check the sign of the argument. If the argument is positive, just |
3637 // return it. | 3665 // return it. |
3638 __ Move(result, input); | 3666 __ Move(result, input); |
3639 __ And(at, exponent, Operand(HeapNumber::kSignMask)); | 3667 __ And(at, exponent, Operand(HeapNumber::kSignMask)); |
3640 __ Branch(&done, eq, at, Operand(zero_reg)); | 3668 __ Branch(&done, eq, at, Operand(zero_reg)); |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3687 | 3715 |
3688 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { | 3716 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { |
3689 Register input = ToRegister(instr->value()); | 3717 Register input = ToRegister(instr->value()); |
3690 Register result = ToRegister(instr->result()); | 3718 Register result = ToRegister(instr->result()); |
3691 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 3719 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
3692 Label done; | 3720 Label done; |
3693 __ Branch(USE_DELAY_SLOT, &done, ge, input, Operand(zero_reg)); | 3721 __ Branch(USE_DELAY_SLOT, &done, ge, input, Operand(zero_reg)); |
3694 __ mov(result, input); | 3722 __ mov(result, input); |
3695 __ dsubu(result, zero_reg, input); | 3723 __ dsubu(result, zero_reg, input); |
3696 // Overflow if result is still negative, i.e. 0x80000000. | 3724 // Overflow if result is still negative, i.e. 0x80000000. |
3697 DeoptimizeIf(lt, instr, "overflow", result, Operand(zero_reg)); | 3725 DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, result, Operand(zero_reg)); |
3698 __ bind(&done); | 3726 __ bind(&done); |
3699 } | 3727 } |
3700 | 3728 |
3701 | 3729 |
3702 void LCodeGen::DoMathAbs(LMathAbs* instr) { | 3730 void LCodeGen::DoMathAbs(LMathAbs* instr) { |
3703 // Class for deferred case. | 3731 // Class for deferred case. |
3704 class DeferredMathAbsTaggedHeapNumber FINAL : public LDeferredCode { | 3732 class DeferredMathAbsTaggedHeapNumber FINAL : public LDeferredCode { |
3705 public: | 3733 public: |
3706 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, LMathAbs* instr) | 3734 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, LMathAbs* instr) |
3707 : LDeferredCode(codegen), instr_(instr) { } | 3735 : LDeferredCode(codegen), instr_(instr) { } |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3742 Register except_flag = ToRegister(instr->temp()); | 3770 Register except_flag = ToRegister(instr->temp()); |
3743 | 3771 |
3744 __ EmitFPUTruncate(kRoundToMinusInf, | 3772 __ EmitFPUTruncate(kRoundToMinusInf, |
3745 result, | 3773 result, |
3746 input, | 3774 input, |
3747 scratch1, | 3775 scratch1, |
3748 double_scratch0(), | 3776 double_scratch0(), |
3749 except_flag); | 3777 except_flag); |
3750 | 3778 |
3751 // Deopt if the operation did not succeed. | 3779 // Deopt if the operation did not succeed. |
3752 DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 3780 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, |
3753 Operand(zero_reg)); | 3781 Operand(zero_reg)); |
3754 | 3782 |
3755 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3783 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3756 // Test for -0. | 3784 // Test for -0. |
3757 Label done; | 3785 Label done; |
3758 __ Branch(&done, ne, result, Operand(zero_reg)); | 3786 __ Branch(&done, ne, result, Operand(zero_reg)); |
3759 __ mfhc1(scratch1, input); // Get exponent/sign bits. | 3787 __ mfhc1(scratch1, input); // Get exponent/sign bits. |
3760 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 3788 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); |
3761 DeoptimizeIf(ne, instr, "minus zero", scratch1, Operand(zero_reg)); | 3789 DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch1, |
| 3790 Operand(zero_reg)); |
3762 __ bind(&done); | 3791 __ bind(&done); |
3763 } | 3792 } |
3764 } | 3793 } |
3765 | 3794 |
3766 | 3795 |
3767 void LCodeGen::DoMathRound(LMathRound* instr) { | 3796 void LCodeGen::DoMathRound(LMathRound* instr) { |
3768 DoubleRegister input = ToDoubleRegister(instr->value()); | 3797 DoubleRegister input = ToDoubleRegister(instr->value()); |
3769 Register result = ToRegister(instr->result()); | 3798 Register result = ToRegister(instr->result()); |
3770 DoubleRegister double_scratch1 = ToDoubleRegister(instr->temp()); | 3799 DoubleRegister double_scratch1 = ToDoubleRegister(instr->temp()); |
3771 Register scratch = scratch0(); | 3800 Register scratch = scratch0(); |
(...skipping 12 matching lines...) Expand all Loading... |
3784 __ mov(result, zero_reg); | 3813 __ mov(result, zero_reg); |
3785 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3814 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3786 __ Branch(&check_sign_on_zero); | 3815 __ Branch(&check_sign_on_zero); |
3787 } else { | 3816 } else { |
3788 __ Branch(&done); | 3817 __ Branch(&done); |
3789 } | 3818 } |
3790 __ bind(&skip1); | 3819 __ bind(&skip1); |
3791 | 3820 |
3792 // The following conversion will not work with numbers | 3821 // The following conversion will not work with numbers |
3793 // outside of ]-2^32, 2^32[. | 3822 // outside of ]-2^32, 2^32[. |
3794 DeoptimizeIf(ge, instr, "overflow", scratch, | 3823 DeoptimizeIf(ge, instr, Deoptimizer::kOverflow, scratch, |
3795 Operand(HeapNumber::kExponentBias + 32)); | 3824 Operand(HeapNumber::kExponentBias + 32)); |
3796 | 3825 |
3797 // Save the original sign for later comparison. | 3826 // Save the original sign for later comparison. |
3798 __ And(scratch, result, Operand(HeapNumber::kSignMask)); | 3827 __ And(scratch, result, Operand(HeapNumber::kSignMask)); |
3799 | 3828 |
3800 __ Move(double_scratch0(), 0.5); | 3829 __ Move(double_scratch0(), 0.5); |
3801 __ add_d(double_scratch0(), input, double_scratch0()); | 3830 __ add_d(double_scratch0(), input, double_scratch0()); |
3802 | 3831 |
3803 // Check sign of the result: if the sign changed, the input | 3832 // Check sign of the result: if the sign changed, the input |
3804 // value was in ]0.5, 0[ and the result should be -0. | 3833 // value was in ]0.5, 0[ and the result should be -0. |
3805 __ mfhc1(result, double_scratch0()); | 3834 __ mfhc1(result, double_scratch0()); |
3806 // mfhc1 sign-extends, clear the upper bits. | 3835 // mfhc1 sign-extends, clear the upper bits. |
3807 __ dsll32(result, result, 0); | 3836 __ dsll32(result, result, 0); |
3808 __ dsrl32(result, result, 0); | 3837 __ dsrl32(result, result, 0); |
3809 __ Xor(result, result, Operand(scratch)); | 3838 __ Xor(result, result, Operand(scratch)); |
3810 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3839 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3811 // ARM uses 'mi' here, which is 'lt' | 3840 // ARM uses 'mi' here, which is 'lt' |
3812 DeoptimizeIf(lt, instr, "minus zero", result, Operand(zero_reg)); | 3841 DeoptimizeIf(lt, instr, Deoptimizer::kMinusZero, result, Operand(zero_reg)); |
3813 } else { | 3842 } else { |
3814 Label skip2; | 3843 Label skip2; |
3815 // ARM uses 'mi' here, which is 'lt' | 3844 // ARM uses 'mi' here, which is 'lt' |
3816 // Negating it results in 'ge' | 3845 // Negating it results in 'ge' |
3817 __ Branch(&skip2, ge, result, Operand(zero_reg)); | 3846 __ Branch(&skip2, ge, result, Operand(zero_reg)); |
3818 __ mov(result, zero_reg); | 3847 __ mov(result, zero_reg); |
3819 __ Branch(&done); | 3848 __ Branch(&done); |
3820 __ bind(&skip2); | 3849 __ bind(&skip2); |
3821 } | 3850 } |
3822 | 3851 |
3823 Register except_flag = scratch; | 3852 Register except_flag = scratch; |
3824 __ EmitFPUTruncate(kRoundToMinusInf, | 3853 __ EmitFPUTruncate(kRoundToMinusInf, |
3825 result, | 3854 result, |
3826 double_scratch0(), | 3855 double_scratch0(), |
3827 at, | 3856 at, |
3828 double_scratch1, | 3857 double_scratch1, |
3829 except_flag); | 3858 except_flag); |
3830 | 3859 |
3831 DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 3860 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, |
3832 Operand(zero_reg)); | 3861 Operand(zero_reg)); |
3833 | 3862 |
3834 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3863 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3835 // Test for -0. | 3864 // Test for -0. |
3836 __ Branch(&done, ne, result, Operand(zero_reg)); | 3865 __ Branch(&done, ne, result, Operand(zero_reg)); |
3837 __ bind(&check_sign_on_zero); | 3866 __ bind(&check_sign_on_zero); |
3838 __ mfhc1(scratch, input); // Get exponent/sign bits. | 3867 __ mfhc1(scratch, input); // Get exponent/sign bits. |
3839 __ And(scratch, scratch, Operand(HeapNumber::kSignMask)); | 3868 __ And(scratch, scratch, Operand(HeapNumber::kSignMask)); |
3840 DeoptimizeIf(ne, instr, "minus zero", scratch, Operand(zero_reg)); | 3869 DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch, |
| 3870 Operand(zero_reg)); |
3841 } | 3871 } |
3842 __ bind(&done); | 3872 __ bind(&done); |
3843 } | 3873 } |
3844 | 3874 |
3845 | 3875 |
3846 void LCodeGen::DoMathFround(LMathFround* instr) { | 3876 void LCodeGen::DoMathFround(LMathFround* instr) { |
3847 DoubleRegister input = ToDoubleRegister(instr->value()); | 3877 DoubleRegister input = ToDoubleRegister(instr->value()); |
3848 DoubleRegister result = ToDoubleRegister(instr->result()); | 3878 DoubleRegister result = ToDoubleRegister(instr->result()); |
3849 __ cvt_s_d(result, input); | 3879 __ cvt_s_d(result, input); |
3850 __ cvt_d_s(result, result); | 3880 __ cvt_d_s(result, result); |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3896 | 3926 |
3897 if (exponent_type.IsSmi()) { | 3927 if (exponent_type.IsSmi()) { |
3898 MathPowStub stub(isolate(), MathPowStub::TAGGED); | 3928 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
3899 __ CallStub(&stub); | 3929 __ CallStub(&stub); |
3900 } else if (exponent_type.IsTagged()) { | 3930 } else if (exponent_type.IsTagged()) { |
3901 Label no_deopt; | 3931 Label no_deopt; |
3902 __ JumpIfSmi(tagged_exponent, &no_deopt); | 3932 __ JumpIfSmi(tagged_exponent, &no_deopt); |
3903 DCHECK(!a7.is(tagged_exponent)); | 3933 DCHECK(!a7.is(tagged_exponent)); |
3904 __ lw(a7, FieldMemOperand(tagged_exponent, HeapObject::kMapOffset)); | 3934 __ lw(a7, FieldMemOperand(tagged_exponent, HeapObject::kMapOffset)); |
3905 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 3935 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
3906 DeoptimizeIf(ne, instr, "not a heap number", a7, Operand(at)); | 3936 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumber, a7, Operand(at)); |
3907 __ bind(&no_deopt); | 3937 __ bind(&no_deopt); |
3908 MathPowStub stub(isolate(), MathPowStub::TAGGED); | 3938 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
3909 __ CallStub(&stub); | 3939 __ CallStub(&stub); |
3910 } else if (exponent_type.IsInteger32()) { | 3940 } else if (exponent_type.IsInteger32()) { |
3911 MathPowStub stub(isolate(), MathPowStub::INTEGER); | 3941 MathPowStub stub(isolate(), MathPowStub::INTEGER); |
3912 __ CallStub(&stub); | 3942 __ CallStub(&stub); |
3913 } else { | 3943 } else { |
3914 DCHECK(exponent_type.IsDouble()); | 3944 DCHECK(exponent_type.IsDouble()); |
3915 MathPowStub stub(isolate(), MathPowStub::DOUBLE); | 3945 MathPowStub stub(isolate(), MathPowStub::DOUBLE); |
3916 __ CallStub(&stub); | 3946 __ CallStub(&stub); |
(...skipping 370 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4287 } else { | 4317 } else { |
4288 reg = ToRegister(instr->index()); | 4318 reg = ToRegister(instr->index()); |
4289 operand = ToOperand(instr->length()); | 4319 operand = ToOperand(instr->length()); |
4290 } | 4320 } |
4291 if (FLAG_debug_code && instr->hydrogen()->skip_check()) { | 4321 if (FLAG_debug_code && instr->hydrogen()->skip_check()) { |
4292 Label done; | 4322 Label done; |
4293 __ Branch(&done, NegateCondition(cc), reg, operand); | 4323 __ Branch(&done, NegateCondition(cc), reg, operand); |
4294 __ stop("eliminated bounds check failed"); | 4324 __ stop("eliminated bounds check failed"); |
4295 __ bind(&done); | 4325 __ bind(&done); |
4296 } else { | 4326 } else { |
4297 DeoptimizeIf(cc, instr, "out of bounds", reg, operand); | 4327 DeoptimizeIf(cc, instr, Deoptimizer::kOutOfBounds, reg, operand); |
4298 } | 4328 } |
4299 } | 4329 } |
4300 | 4330 |
4301 | 4331 |
4302 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { | 4332 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { |
4303 Register external_pointer = ToRegister(instr->elements()); | 4333 Register external_pointer = ToRegister(instr->elements()); |
4304 Register key = no_reg; | 4334 Register key = no_reg; |
4305 ElementsKind elements_kind = instr->elements_kind(); | 4335 ElementsKind elements_kind = instr->elements_kind(); |
4306 bool key_is_constant = instr->key()->IsConstantOperand(); | 4336 bool key_is_constant = instr->key()->IsConstantOperand(); |
4307 int constant_key = 0; | 4337 int constant_key = 0; |
(...skipping 270 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4578 __ bind(¬_applicable); | 4608 __ bind(¬_applicable); |
4579 } | 4609 } |
4580 | 4610 |
4581 | 4611 |
4582 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 4612 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
4583 Register object = ToRegister(instr->object()); | 4613 Register object = ToRegister(instr->object()); |
4584 Register temp = ToRegister(instr->temp()); | 4614 Register temp = ToRegister(instr->temp()); |
4585 Label no_memento_found; | 4615 Label no_memento_found; |
4586 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found, | 4616 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found, |
4587 ne, &no_memento_found); | 4617 ne, &no_memento_found); |
4588 DeoptimizeIf(al, instr, "memento found"); | 4618 DeoptimizeIf(al, instr, Deoptimizer::kMementoFound); |
4589 __ bind(&no_memento_found); | 4619 __ bind(&no_memento_found); |
4590 } | 4620 } |
4591 | 4621 |
4592 | 4622 |
4593 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 4623 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
4594 DCHECK(ToRegister(instr->context()).is(cp)); | 4624 DCHECK(ToRegister(instr->context()).is(cp)); |
4595 DCHECK(ToRegister(instr->left()).is(a1)); | 4625 DCHECK(ToRegister(instr->left()).is(a1)); |
4596 DCHECK(ToRegister(instr->right()).is(a0)); | 4626 DCHECK(ToRegister(instr->right()).is(a0)); |
4597 StringAddStub stub(isolate(), | 4627 StringAddStub stub(isolate(), |
4598 instr->hydrogen()->flags(), | 4628 instr->hydrogen()->flags(), |
(...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4881 } | 4911 } |
4882 | 4912 |
4883 | 4913 |
4884 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 4914 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
4885 HChange* hchange = instr->hydrogen(); | 4915 HChange* hchange = instr->hydrogen(); |
4886 Register input = ToRegister(instr->value()); | 4916 Register input = ToRegister(instr->value()); |
4887 Register output = ToRegister(instr->result()); | 4917 Register output = ToRegister(instr->result()); |
4888 if (hchange->CheckFlag(HValue::kCanOverflow) && | 4918 if (hchange->CheckFlag(HValue::kCanOverflow) && |
4889 hchange->value()->CheckFlag(HValue::kUint32)) { | 4919 hchange->value()->CheckFlag(HValue::kUint32)) { |
4890 __ And(at, input, Operand(0x80000000)); | 4920 __ And(at, input, Operand(0x80000000)); |
4891 DeoptimizeIf(ne, instr, "overflow", at, Operand(zero_reg)); | 4921 DeoptimizeIf(ne, instr, Deoptimizer::kOverflow, at, Operand(zero_reg)); |
4892 } | 4922 } |
4893 if (hchange->CheckFlag(HValue::kCanOverflow) && | 4923 if (hchange->CheckFlag(HValue::kCanOverflow) && |
4894 !hchange->value()->CheckFlag(HValue::kUint32)) { | 4924 !hchange->value()->CheckFlag(HValue::kUint32)) { |
4895 __ SmiTagCheckOverflow(output, input, at); | 4925 __ SmiTagCheckOverflow(output, input, at); |
4896 DeoptimizeIf(lt, instr, "overflow", at, Operand(zero_reg)); | 4926 DeoptimizeIf(lt, instr, Deoptimizer::kOverflow, at, Operand(zero_reg)); |
4897 } else { | 4927 } else { |
4898 __ SmiTag(output, input); | 4928 __ SmiTag(output, input); |
4899 } | 4929 } |
4900 } | 4930 } |
4901 | 4931 |
4902 | 4932 |
4903 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { | 4933 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { |
4904 Register scratch = scratch0(); | 4934 Register scratch = scratch0(); |
4905 Register input = ToRegister(instr->value()); | 4935 Register input = ToRegister(instr->value()); |
4906 Register result = ToRegister(instr->result()); | 4936 Register result = ToRegister(instr->result()); |
4907 if (instr->needs_check()) { | 4937 if (instr->needs_check()) { |
4908 STATIC_ASSERT(kHeapObjectTag == 1); | 4938 STATIC_ASSERT(kHeapObjectTag == 1); |
4909 // If the input is a HeapObject, value of scratch won't be zero. | 4939 // If the input is a HeapObject, value of scratch won't be zero. |
4910 __ And(scratch, input, Operand(kHeapObjectTag)); | 4940 __ And(scratch, input, Operand(kHeapObjectTag)); |
4911 __ SmiUntag(result, input); | 4941 __ SmiUntag(result, input); |
4912 DeoptimizeIf(ne, instr, "not a Smi", scratch, Operand(zero_reg)); | 4942 DeoptimizeIf(ne, instr, Deoptimizer::kNotASmi, scratch, Operand(zero_reg)); |
4913 } else { | 4943 } else { |
4914 __ SmiUntag(result, input); | 4944 __ SmiUntag(result, input); |
4915 } | 4945 } |
4916 } | 4946 } |
4917 | 4947 |
4918 | 4948 |
4919 void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg, | 4949 void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg, |
4920 DoubleRegister result_reg, | 4950 DoubleRegister result_reg, |
4921 NumberUntagDMode mode) { | 4951 NumberUntagDMode mode) { |
4922 bool can_convert_undefined_to_nan = | 4952 bool can_convert_undefined_to_nan = |
4923 instr->hydrogen()->can_convert_undefined_to_nan(); | 4953 instr->hydrogen()->can_convert_undefined_to_nan(); |
4924 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero(); | 4954 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero(); |
4925 | 4955 |
4926 Register scratch = scratch0(); | 4956 Register scratch = scratch0(); |
4927 Label convert, load_smi, done; | 4957 Label convert, load_smi, done; |
4928 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { | 4958 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { |
4929 // Smi check. | 4959 // Smi check. |
4930 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi); | 4960 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi); |
4931 // Heap number map check. | 4961 // Heap number map check. |
4932 __ ld(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); | 4962 __ ld(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); |
4933 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 4963 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
4934 if (can_convert_undefined_to_nan) { | 4964 if (can_convert_undefined_to_nan) { |
4935 __ Branch(&convert, ne, scratch, Operand(at)); | 4965 __ Branch(&convert, ne, scratch, Operand(at)); |
4936 } else { | 4966 } else { |
4937 DeoptimizeIf(ne, instr, "not a heap number", scratch, Operand(at)); | 4967 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumber, scratch, |
| 4968 Operand(at)); |
4938 } | 4969 } |
4939 // Load heap number. | 4970 // Load heap number. |
4940 __ ldc1(result_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset)); | 4971 __ ldc1(result_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset)); |
4941 if (deoptimize_on_minus_zero) { | 4972 if (deoptimize_on_minus_zero) { |
4942 __ mfc1(at, result_reg); | 4973 __ mfc1(at, result_reg); |
4943 __ Branch(&done, ne, at, Operand(zero_reg)); | 4974 __ Branch(&done, ne, at, Operand(zero_reg)); |
4944 __ mfhc1(scratch, result_reg); // Get exponent/sign bits. | 4975 __ mfhc1(scratch, result_reg); // Get exponent/sign bits. |
4945 DeoptimizeIf(eq, instr, "minus zero", scratch, | 4976 DeoptimizeIf(eq, instr, Deoptimizer::kMinusZero, scratch, |
4946 Operand(HeapNumber::kSignMask)); | 4977 Operand(HeapNumber::kSignMask)); |
4947 } | 4978 } |
4948 __ Branch(&done); | 4979 __ Branch(&done); |
4949 if (can_convert_undefined_to_nan) { | 4980 if (can_convert_undefined_to_nan) { |
4950 __ bind(&convert); | 4981 __ bind(&convert); |
4951 // Convert undefined (and hole) to NaN. | 4982 // Convert undefined (and hole) to NaN. |
4952 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 4983 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
4953 DeoptimizeIf(ne, instr, "not a heap number/undefined", input_reg, | 4984 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumberUndefined, input_reg, |
4954 Operand(at)); | 4985 Operand(at)); |
4955 __ LoadRoot(scratch, Heap::kNanValueRootIndex); | 4986 __ LoadRoot(scratch, Heap::kNanValueRootIndex); |
4956 __ ldc1(result_reg, FieldMemOperand(scratch, HeapNumber::kValueOffset)); | 4987 __ ldc1(result_reg, FieldMemOperand(scratch, HeapNumber::kValueOffset)); |
4957 __ Branch(&done); | 4988 __ Branch(&done); |
4958 } | 4989 } |
4959 } else { | 4990 } else { |
4960 __ SmiUntag(scratch, input_reg); | 4991 __ SmiUntag(scratch, input_reg); |
4961 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); | 4992 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); |
4962 } | 4993 } |
4963 // Smi to double register conversion | 4994 // Smi to double register conversion |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5008 __ mov(input_reg, zero_reg); // In delay slot. | 5039 __ mov(input_reg, zero_reg); // In delay slot. |
5009 | 5040 |
5010 __ bind(&check_bools); | 5041 __ bind(&check_bools); |
5011 __ LoadRoot(at, Heap::kTrueValueRootIndex); | 5042 __ LoadRoot(at, Heap::kTrueValueRootIndex); |
5012 __ Branch(&check_false, ne, scratch2, Operand(at)); | 5043 __ Branch(&check_false, ne, scratch2, Operand(at)); |
5013 __ Branch(USE_DELAY_SLOT, &done); | 5044 __ Branch(USE_DELAY_SLOT, &done); |
5014 __ li(input_reg, Operand(1)); // In delay slot. | 5045 __ li(input_reg, Operand(1)); // In delay slot. |
5015 | 5046 |
5016 __ bind(&check_false); | 5047 __ bind(&check_false); |
5017 __ LoadRoot(at, Heap::kFalseValueRootIndex); | 5048 __ LoadRoot(at, Heap::kFalseValueRootIndex); |
5018 DeoptimizeIf(ne, instr, "not a heap number/undefined/true/false", scratch2, | 5049 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumberUndefinedTrueFalse, |
5019 Operand(at)); | 5050 scratch2, Operand(at)); |
5020 __ Branch(USE_DELAY_SLOT, &done); | 5051 __ Branch(USE_DELAY_SLOT, &done); |
5021 __ mov(input_reg, zero_reg); // In delay slot. | 5052 __ mov(input_reg, zero_reg); // In delay slot. |
5022 } else { | 5053 } else { |
5023 DeoptimizeIf(ne, instr, "not a heap number", scratch1, Operand(at)); | 5054 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumber, scratch1, |
| 5055 Operand(at)); |
5024 | 5056 |
5025 // Load the double value. | 5057 // Load the double value. |
5026 __ ldc1(double_scratch, | 5058 __ ldc1(double_scratch, |
5027 FieldMemOperand(input_reg, HeapNumber::kValueOffset)); | 5059 FieldMemOperand(input_reg, HeapNumber::kValueOffset)); |
5028 | 5060 |
5029 Register except_flag = scratch2; | 5061 Register except_flag = scratch2; |
5030 __ EmitFPUTruncate(kRoundToZero, | 5062 __ EmitFPUTruncate(kRoundToZero, |
5031 input_reg, | 5063 input_reg, |
5032 double_scratch, | 5064 double_scratch, |
5033 scratch1, | 5065 scratch1, |
5034 double_scratch2, | 5066 double_scratch2, |
5035 except_flag, | 5067 except_flag, |
5036 kCheckForInexactConversion); | 5068 kCheckForInexactConversion); |
5037 | 5069 |
5038 DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 5070 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, |
5039 Operand(zero_reg)); | 5071 Operand(zero_reg)); |
5040 | 5072 |
5041 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 5073 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
5042 __ Branch(&done, ne, input_reg, Operand(zero_reg)); | 5074 __ Branch(&done, ne, input_reg, Operand(zero_reg)); |
5043 | 5075 |
5044 __ mfhc1(scratch1, double_scratch); // Get exponent/sign bits. | 5076 __ mfhc1(scratch1, double_scratch); // Get exponent/sign bits. |
5045 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 5077 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); |
5046 DeoptimizeIf(ne, instr, "minus zero", scratch1, Operand(zero_reg)); | 5078 DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch1, |
| 5079 Operand(zero_reg)); |
5047 } | 5080 } |
5048 } | 5081 } |
5049 __ bind(&done); | 5082 __ bind(&done); |
5050 } | 5083 } |
5051 | 5084 |
5052 | 5085 |
5053 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { | 5086 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { |
5054 class DeferredTaggedToI FINAL : public LDeferredCode { | 5087 class DeferredTaggedToI FINAL : public LDeferredCode { |
5055 public: | 5088 public: |
5056 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) | 5089 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5112 | 5145 |
5113 __ EmitFPUTruncate(kRoundToMinusInf, | 5146 __ EmitFPUTruncate(kRoundToMinusInf, |
5114 result_reg, | 5147 result_reg, |
5115 double_input, | 5148 double_input, |
5116 scratch1, | 5149 scratch1, |
5117 double_scratch0(), | 5150 double_scratch0(), |
5118 except_flag, | 5151 except_flag, |
5119 kCheckForInexactConversion); | 5152 kCheckForInexactConversion); |
5120 | 5153 |
5121 // Deopt if the operation did not succeed (except_flag != 0). | 5154 // Deopt if the operation did not succeed (except_flag != 0). |
5122 DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 5155 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, |
5123 Operand(zero_reg)); | 5156 Operand(zero_reg)); |
5124 | 5157 |
5125 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 5158 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
5126 Label done; | 5159 Label done; |
5127 __ Branch(&done, ne, result_reg, Operand(zero_reg)); | 5160 __ Branch(&done, ne, result_reg, Operand(zero_reg)); |
5128 __ mfhc1(scratch1, double_input); // Get exponent/sign bits. | 5161 __ mfhc1(scratch1, double_input); // Get exponent/sign bits. |
5129 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 5162 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); |
5130 DeoptimizeIf(ne, instr, "minus zero", scratch1, Operand(zero_reg)); | 5163 DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch1, |
| 5164 Operand(zero_reg)); |
5131 __ bind(&done); | 5165 __ bind(&done); |
5132 } | 5166 } |
5133 } | 5167 } |
5134 } | 5168 } |
5135 | 5169 |
5136 | 5170 |
5137 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { | 5171 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { |
5138 Register result_reg = ToRegister(instr->result()); | 5172 Register result_reg = ToRegister(instr->result()); |
5139 Register scratch1 = LCodeGen::scratch0(); | 5173 Register scratch1 = LCodeGen::scratch0(); |
5140 DoubleRegister double_input = ToDoubleRegister(instr->value()); | 5174 DoubleRegister double_input = ToDoubleRegister(instr->value()); |
5141 | 5175 |
5142 if (instr->truncating()) { | 5176 if (instr->truncating()) { |
5143 __ TruncateDoubleToI(result_reg, double_input); | 5177 __ TruncateDoubleToI(result_reg, double_input); |
5144 } else { | 5178 } else { |
5145 Register except_flag = LCodeGen::scratch1(); | 5179 Register except_flag = LCodeGen::scratch1(); |
5146 | 5180 |
5147 __ EmitFPUTruncate(kRoundToMinusInf, | 5181 __ EmitFPUTruncate(kRoundToMinusInf, |
5148 result_reg, | 5182 result_reg, |
5149 double_input, | 5183 double_input, |
5150 scratch1, | 5184 scratch1, |
5151 double_scratch0(), | 5185 double_scratch0(), |
5152 except_flag, | 5186 except_flag, |
5153 kCheckForInexactConversion); | 5187 kCheckForInexactConversion); |
5154 | 5188 |
5155 // Deopt if the operation did not succeed (except_flag != 0). | 5189 // Deopt if the operation did not succeed (except_flag != 0). |
5156 DeoptimizeIf(ne, instr, "lost precision or NaN", except_flag, | 5190 DeoptimizeIf(ne, instr, Deoptimizer::kLostPrecisionOrNaN, except_flag, |
5157 Operand(zero_reg)); | 5191 Operand(zero_reg)); |
5158 | 5192 |
5159 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 5193 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
5160 Label done; | 5194 Label done; |
5161 __ Branch(&done, ne, result_reg, Operand(zero_reg)); | 5195 __ Branch(&done, ne, result_reg, Operand(zero_reg)); |
5162 __ mfhc1(scratch1, double_input); // Get exponent/sign bits. | 5196 __ mfhc1(scratch1, double_input); // Get exponent/sign bits. |
5163 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); | 5197 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); |
5164 DeoptimizeIf(ne, instr, "minus zero", scratch1, Operand(zero_reg)); | 5198 DeoptimizeIf(ne, instr, Deoptimizer::kMinusZero, scratch1, |
| 5199 Operand(zero_reg)); |
5165 __ bind(&done); | 5200 __ bind(&done); |
5166 } | 5201 } |
5167 } | 5202 } |
5168 __ SmiTag(result_reg, result_reg); | 5203 __ SmiTag(result_reg, result_reg); |
5169 } | 5204 } |
5170 | 5205 |
5171 | 5206 |
5172 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { | 5207 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { |
5173 LOperand* input = instr->value(); | 5208 LOperand* input = instr->value(); |
5174 __ SmiTst(ToRegister(input), at); | 5209 __ SmiTst(ToRegister(input), at); |
5175 DeoptimizeIf(ne, instr, "not a Smi", at, Operand(zero_reg)); | 5210 DeoptimizeIf(ne, instr, Deoptimizer::kNotASmi, at, Operand(zero_reg)); |
5176 } | 5211 } |
5177 | 5212 |
5178 | 5213 |
5179 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { | 5214 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { |
5180 if (!instr->hydrogen()->value()->type().IsHeapObject()) { | 5215 if (!instr->hydrogen()->value()->type().IsHeapObject()) { |
5181 LOperand* input = instr->value(); | 5216 LOperand* input = instr->value(); |
5182 __ SmiTst(ToRegister(input), at); | 5217 __ SmiTst(ToRegister(input), at); |
5183 DeoptimizeIf(eq, instr, "Smi", at, Operand(zero_reg)); | 5218 DeoptimizeIf(eq, instr, Deoptimizer::kSmi, at, Operand(zero_reg)); |
5184 } | 5219 } |
5185 } | 5220 } |
5186 | 5221 |
5187 | 5222 |
5188 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 5223 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { |
5189 Register input = ToRegister(instr->value()); | 5224 Register input = ToRegister(instr->value()); |
5190 Register scratch = scratch0(); | 5225 Register scratch = scratch0(); |
5191 | 5226 |
5192 __ GetObjectType(input, scratch, scratch); | 5227 __ GetObjectType(input, scratch, scratch); |
5193 | 5228 |
5194 if (instr->hydrogen()->is_interval_check()) { | 5229 if (instr->hydrogen()->is_interval_check()) { |
5195 InstanceType first; | 5230 InstanceType first; |
5196 InstanceType last; | 5231 InstanceType last; |
5197 instr->hydrogen()->GetCheckInterval(&first, &last); | 5232 instr->hydrogen()->GetCheckInterval(&first, &last); |
5198 | 5233 |
5199 // If there is only one type in the interval check for equality. | 5234 // If there is only one type in the interval check for equality. |
5200 if (first == last) { | 5235 if (first == last) { |
5201 DeoptimizeIf(ne, instr, "wrong instance type", scratch, Operand(first)); | 5236 DeoptimizeIf(ne, instr, Deoptimizer::kWrongInstanceType, scratch, |
| 5237 Operand(first)); |
5202 } else { | 5238 } else { |
5203 DeoptimizeIf(lo, instr, "wrong instance type", scratch, Operand(first)); | 5239 DeoptimizeIf(lo, instr, Deoptimizer::kWrongInstanceType, scratch, |
| 5240 Operand(first)); |
5204 // Omit check for the last type. | 5241 // Omit check for the last type. |
5205 if (last != LAST_TYPE) { | 5242 if (last != LAST_TYPE) { |
5206 DeoptimizeIf(hi, instr, "wrong instance type", scratch, Operand(last)); | 5243 DeoptimizeIf(hi, instr, Deoptimizer::kWrongInstanceType, scratch, |
| 5244 Operand(last)); |
5207 } | 5245 } |
5208 } | 5246 } |
5209 } else { | 5247 } else { |
5210 uint8_t mask; | 5248 uint8_t mask; |
5211 uint8_t tag; | 5249 uint8_t tag; |
5212 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); | 5250 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); |
5213 | 5251 |
5214 if (base::bits::IsPowerOfTwo32(mask)) { | 5252 if (base::bits::IsPowerOfTwo32(mask)) { |
5215 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); | 5253 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); |
5216 __ And(at, scratch, mask); | 5254 __ And(at, scratch, mask); |
5217 DeoptimizeIf(tag == 0 ? ne : eq, instr, "wrong instance type", at, | 5255 DeoptimizeIf(tag == 0 ? ne : eq, instr, Deoptimizer::kWrongInstanceType, |
5218 Operand(zero_reg)); | 5256 at, Operand(zero_reg)); |
5219 } else { | 5257 } else { |
5220 __ And(scratch, scratch, Operand(mask)); | 5258 __ And(scratch, scratch, Operand(mask)); |
5221 DeoptimizeIf(ne, instr, "wrong instance type", scratch, Operand(tag)); | 5259 DeoptimizeIf(ne, instr, Deoptimizer::kWrongInstanceType, scratch, |
| 5260 Operand(tag)); |
5222 } | 5261 } |
5223 } | 5262 } |
5224 } | 5263 } |
5225 | 5264 |
5226 | 5265 |
5227 void LCodeGen::DoCheckValue(LCheckValue* instr) { | 5266 void LCodeGen::DoCheckValue(LCheckValue* instr) { |
5228 Register reg = ToRegister(instr->value()); | 5267 Register reg = ToRegister(instr->value()); |
5229 Handle<HeapObject> object = instr->hydrogen()->object().handle(); | 5268 Handle<HeapObject> object = instr->hydrogen()->object().handle(); |
5230 AllowDeferredHandleDereference smi_check; | 5269 AllowDeferredHandleDereference smi_check; |
5231 if (isolate()->heap()->InNewSpace(*object)) { | 5270 if (isolate()->heap()->InNewSpace(*object)) { |
5232 Register reg = ToRegister(instr->value()); | 5271 Register reg = ToRegister(instr->value()); |
5233 Handle<Cell> cell = isolate()->factory()->NewCell(object); | 5272 Handle<Cell> cell = isolate()->factory()->NewCell(object); |
5234 __ li(at, Operand(Handle<Object>(cell))); | 5273 __ li(at, Operand(Handle<Object>(cell))); |
5235 __ ld(at, FieldMemOperand(at, Cell::kValueOffset)); | 5274 __ ld(at, FieldMemOperand(at, Cell::kValueOffset)); |
5236 DeoptimizeIf(ne, instr, "value mismatch", reg, Operand(at)); | 5275 DeoptimizeIf(ne, instr, Deoptimizer::kValueMismatch, reg, Operand(at)); |
5237 } else { | 5276 } else { |
5238 DeoptimizeIf(ne, instr, "value mismatch", reg, Operand(object)); | 5277 DeoptimizeIf(ne, instr, Deoptimizer::kValueMismatch, reg, Operand(object)); |
5239 } | 5278 } |
5240 } | 5279 } |
5241 | 5280 |
5242 | 5281 |
5243 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { | 5282 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { |
5244 { | 5283 { |
5245 PushSafepointRegistersScope scope(this); | 5284 PushSafepointRegistersScope scope(this); |
5246 __ push(object); | 5285 __ push(object); |
5247 __ mov(cp, zero_reg); | 5286 __ mov(cp, zero_reg); |
5248 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); | 5287 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); |
5249 RecordSafepointWithRegisters( | 5288 RecordSafepointWithRegisters( |
5250 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); | 5289 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); |
5251 __ StoreToSafepointRegisterSlot(v0, scratch0()); | 5290 __ StoreToSafepointRegisterSlot(v0, scratch0()); |
5252 } | 5291 } |
5253 __ SmiTst(scratch0(), at); | 5292 __ SmiTst(scratch0(), at); |
5254 DeoptimizeIf(eq, instr, "instance migration failed", at, Operand(zero_reg)); | 5293 DeoptimizeIf(eq, instr, Deoptimizer::kInstanceMigrationFailed, at, |
| 5294 Operand(zero_reg)); |
5255 } | 5295 } |
5256 | 5296 |
5257 | 5297 |
5258 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 5298 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
5259 class DeferredCheckMaps FINAL : public LDeferredCode { | 5299 class DeferredCheckMaps FINAL : public LDeferredCode { |
5260 public: | 5300 public: |
5261 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) | 5301 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) |
5262 : LDeferredCode(codegen), instr_(instr), object_(object) { | 5302 : LDeferredCode(codegen), instr_(instr), object_(object) { |
5263 SetExit(check_maps()); | 5303 SetExit(check_maps()); |
5264 } | 5304 } |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5298 Label success; | 5338 Label success; |
5299 for (int i = 0; i < maps->size() - 1; i++) { | 5339 for (int i = 0; i < maps->size() - 1; i++) { |
5300 Handle<Map> map = maps->at(i).handle(); | 5340 Handle<Map> map = maps->at(i).handle(); |
5301 __ CompareMapAndBranch(map_reg, map, &success, eq, &success); | 5341 __ CompareMapAndBranch(map_reg, map, &success, eq, &success); |
5302 } | 5342 } |
5303 Handle<Map> map = maps->at(maps->size() - 1).handle(); | 5343 Handle<Map> map = maps->at(maps->size() - 1).handle(); |
5304 // Do the CompareMap() directly within the Branch() and DeoptimizeIf(). | 5344 // Do the CompareMap() directly within the Branch() and DeoptimizeIf(). |
5305 if (instr->hydrogen()->HasMigrationTarget()) { | 5345 if (instr->hydrogen()->HasMigrationTarget()) { |
5306 __ Branch(deferred->entry(), ne, map_reg, Operand(map)); | 5346 __ Branch(deferred->entry(), ne, map_reg, Operand(map)); |
5307 } else { | 5347 } else { |
5308 DeoptimizeIf(ne, instr, "wrong map", map_reg, Operand(map)); | 5348 DeoptimizeIf(ne, instr, Deoptimizer::kWrongMap, map_reg, Operand(map)); |
5309 } | 5349 } |
5310 | 5350 |
5311 __ bind(&success); | 5351 __ bind(&success); |
5312 } | 5352 } |
5313 | 5353 |
5314 | 5354 |
5315 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 5355 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { |
5316 DoubleRegister value_reg = ToDoubleRegister(instr->unclamped()); | 5356 DoubleRegister value_reg = ToDoubleRegister(instr->unclamped()); |
5317 Register result_reg = ToRegister(instr->result()); | 5357 Register result_reg = ToRegister(instr->result()); |
5318 DoubleRegister temp_reg = ToDoubleRegister(instr->temp()); | 5358 DoubleRegister temp_reg = ToDoubleRegister(instr->temp()); |
(...skipping 17 matching lines...) Expand all Loading... |
5336 | 5376 |
5337 // Both smi and heap number cases are handled. | 5377 // Both smi and heap number cases are handled. |
5338 __ UntagAndJumpIfSmi(scratch, input_reg, &is_smi); | 5378 __ UntagAndJumpIfSmi(scratch, input_reg, &is_smi); |
5339 | 5379 |
5340 // Check for heap number | 5380 // Check for heap number |
5341 __ ld(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); | 5381 __ ld(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); |
5342 __ Branch(&heap_number, eq, scratch, Operand(factory()->heap_number_map())); | 5382 __ Branch(&heap_number, eq, scratch, Operand(factory()->heap_number_map())); |
5343 | 5383 |
5344 // Check for undefined. Undefined is converted to zero for clamping | 5384 // Check for undefined. Undefined is converted to zero for clamping |
5345 // conversions. | 5385 // conversions. |
5346 DeoptimizeIf(ne, instr, "not a heap number/undefined", input_reg, | 5386 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumberUndefined, input_reg, |
5347 Operand(factory()->undefined_value())); | 5387 Operand(factory()->undefined_value())); |
5348 __ mov(result_reg, zero_reg); | 5388 __ mov(result_reg, zero_reg); |
5349 __ jmp(&done); | 5389 __ jmp(&done); |
5350 | 5390 |
5351 // Heap number | 5391 // Heap number |
5352 __ bind(&heap_number); | 5392 __ bind(&heap_number); |
5353 __ ldc1(double_scratch0(), FieldMemOperand(input_reg, | 5393 __ ldc1(double_scratch0(), FieldMemOperand(input_reg, |
5354 HeapNumber::kValueOffset)); | 5394 HeapNumber::kValueOffset)); |
5355 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg); | 5395 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg); |
5356 __ jmp(&done); | 5396 __ jmp(&done); |
(...skipping 495 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5852 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 5892 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
5853 | 5893 |
5854 GenerateOsrPrologue(); | 5894 GenerateOsrPrologue(); |
5855 } | 5895 } |
5856 | 5896 |
5857 | 5897 |
5858 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { | 5898 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { |
5859 Register result = ToRegister(instr->result()); | 5899 Register result = ToRegister(instr->result()); |
5860 Register object = ToRegister(instr->object()); | 5900 Register object = ToRegister(instr->object()); |
5861 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 5901 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
5862 DeoptimizeIf(eq, instr, "undefined", object, Operand(at)); | 5902 DeoptimizeIf(eq, instr, Deoptimizer::kUndefined, object, Operand(at)); |
5863 | 5903 |
5864 Register null_value = a5; | 5904 Register null_value = a5; |
5865 __ LoadRoot(null_value, Heap::kNullValueRootIndex); | 5905 __ LoadRoot(null_value, Heap::kNullValueRootIndex); |
5866 DeoptimizeIf(eq, instr, "null", object, Operand(null_value)); | 5906 DeoptimizeIf(eq, instr, Deoptimizer::kNull, object, Operand(null_value)); |
5867 | 5907 |
5868 __ And(at, object, kSmiTagMask); | 5908 __ And(at, object, kSmiTagMask); |
5869 DeoptimizeIf(eq, instr, "Smi", at, Operand(zero_reg)); | 5909 DeoptimizeIf(eq, instr, Deoptimizer::kSmi, at, Operand(zero_reg)); |
5870 | 5910 |
5871 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 5911 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); |
5872 __ GetObjectType(object, a1, a1); | 5912 __ GetObjectType(object, a1, a1); |
5873 DeoptimizeIf(le, instr, "not a JavaScript object", a1, | 5913 DeoptimizeIf(le, instr, Deoptimizer::kNotAJavaScriptObject, a1, |
5874 Operand(LAST_JS_PROXY_TYPE)); | 5914 Operand(LAST_JS_PROXY_TYPE)); |
5875 | 5915 |
5876 Label use_cache, call_runtime; | 5916 Label use_cache, call_runtime; |
5877 DCHECK(object.is(a0)); | 5917 DCHECK(object.is(a0)); |
5878 __ CheckEnumCache(null_value, &call_runtime); | 5918 __ CheckEnumCache(null_value, &call_runtime); |
5879 | 5919 |
5880 __ ld(result, FieldMemOperand(object, HeapObject::kMapOffset)); | 5920 __ ld(result, FieldMemOperand(object, HeapObject::kMapOffset)); |
5881 __ Branch(&use_cache); | 5921 __ Branch(&use_cache); |
5882 | 5922 |
5883 // Get the set of properties to enumerate. | 5923 // Get the set of properties to enumerate. |
5884 __ bind(&call_runtime); | 5924 __ bind(&call_runtime); |
5885 __ push(object); | 5925 __ push(object); |
5886 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); | 5926 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); |
5887 | 5927 |
5888 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); | 5928 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); |
5889 DCHECK(result.is(v0)); | 5929 DCHECK(result.is(v0)); |
5890 __ LoadRoot(at, Heap::kMetaMapRootIndex); | 5930 __ LoadRoot(at, Heap::kMetaMapRootIndex); |
5891 DeoptimizeIf(ne, instr, "wrong map", a1, Operand(at)); | 5931 DeoptimizeIf(ne, instr, Deoptimizer::kWrongMap, a1, Operand(at)); |
5892 __ bind(&use_cache); | 5932 __ bind(&use_cache); |
5893 } | 5933 } |
5894 | 5934 |
5895 | 5935 |
5896 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { | 5936 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { |
5897 Register map = ToRegister(instr->map()); | 5937 Register map = ToRegister(instr->map()); |
5898 Register result = ToRegister(instr->result()); | 5938 Register result = ToRegister(instr->result()); |
5899 Label load_cache, done; | 5939 Label load_cache, done; |
5900 __ EnumLength(result, map); | 5940 __ EnumLength(result, map); |
5901 __ Branch(&load_cache, ne, result, Operand(Smi::FromInt(0))); | 5941 __ Branch(&load_cache, ne, result, Operand(Smi::FromInt(0))); |
5902 __ li(result, Operand(isolate()->factory()->empty_fixed_array())); | 5942 __ li(result, Operand(isolate()->factory()->empty_fixed_array())); |
5903 __ jmp(&done); | 5943 __ jmp(&done); |
5904 | 5944 |
5905 __ bind(&load_cache); | 5945 __ bind(&load_cache); |
5906 __ LoadInstanceDescriptors(map, result); | 5946 __ LoadInstanceDescriptors(map, result); |
5907 __ ld(result, | 5947 __ ld(result, |
5908 FieldMemOperand(result, DescriptorArray::kEnumCacheOffset)); | 5948 FieldMemOperand(result, DescriptorArray::kEnumCacheOffset)); |
5909 __ ld(result, | 5949 __ ld(result, |
5910 FieldMemOperand(result, FixedArray::SizeFor(instr->idx()))); | 5950 FieldMemOperand(result, FixedArray::SizeFor(instr->idx()))); |
5911 DeoptimizeIf(eq, instr, "no cache", result, Operand(zero_reg)); | 5951 DeoptimizeIf(eq, instr, Deoptimizer::kNoCache, result, Operand(zero_reg)); |
5912 | 5952 |
5913 __ bind(&done); | 5953 __ bind(&done); |
5914 } | 5954 } |
5915 | 5955 |
5916 | 5956 |
5917 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 5957 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
5918 Register object = ToRegister(instr->value()); | 5958 Register object = ToRegister(instr->value()); |
5919 Register map = ToRegister(instr->map()); | 5959 Register map = ToRegister(instr->map()); |
5920 __ ld(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); | 5960 __ ld(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); |
5921 DeoptimizeIf(ne, instr, "wrong map", map, Operand(scratch0())); | 5961 DeoptimizeIf(ne, instr, Deoptimizer::kWrongMap, map, Operand(scratch0())); |
5922 } | 5962 } |
5923 | 5963 |
5924 | 5964 |
5925 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | 5965 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, |
5926 Register result, | 5966 Register result, |
5927 Register object, | 5967 Register object, |
5928 Register index) { | 5968 Register index) { |
5929 PushSafepointRegistersScope scope(this); | 5969 PushSafepointRegistersScope scope(this); |
5930 __ Push(object, index); | 5970 __ Push(object, index); |
5931 __ mov(cp, zero_reg); | 5971 __ mov(cp, zero_reg); |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6006 __ li(at, scope_info); | 6046 __ li(at, scope_info); |
6007 __ Push(at, ToRegister(instr->function())); | 6047 __ Push(at, ToRegister(instr->function())); |
6008 CallRuntime(Runtime::kPushBlockContext, 2, instr); | 6048 CallRuntime(Runtime::kPushBlockContext, 2, instr); |
6009 RecordSafepoint(Safepoint::kNoLazyDeopt); | 6049 RecordSafepoint(Safepoint::kNoLazyDeopt); |
6010 } | 6050 } |
6011 | 6051 |
6012 | 6052 |
6013 #undef __ | 6053 #undef __ |
6014 | 6054 |
6015 } } // namespace v8::internal | 6055 } } // namespace v8::internal |
OLD | NEW |