OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X87 | 5 #if V8_TARGET_ARCH_X87 |
6 | 6 |
7 #include "src/crankshaft/x87/lithium-codegen-x87.h" | 7 #include "src/crankshaft/x87/lithium-codegen-x87.h" |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
(...skipping 934 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
945 WriteTranslation(environment, &translation); | 945 WriteTranslation(environment, &translation); |
946 int deoptimization_index = deoptimizations_.length(); | 946 int deoptimization_index = deoptimizations_.length(); |
947 int pc_offset = masm()->pc_offset(); | 947 int pc_offset = masm()->pc_offset(); |
948 environment->Register(deoptimization_index, | 948 environment->Register(deoptimization_index, |
949 translation.index(), | 949 translation.index(), |
950 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | 950 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); |
951 deoptimizations_.Add(environment, zone()); | 951 deoptimizations_.Add(environment, zone()); |
952 } | 952 } |
953 } | 953 } |
954 | 954 |
955 | |
956 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, | 955 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, |
957 Deoptimizer::DeoptReason deopt_reason, | 956 DeoptimizeReason deopt_reason, |
958 Deoptimizer::BailoutType bailout_type) { | 957 Deoptimizer::BailoutType bailout_type) { |
959 LEnvironment* environment = instr->environment(); | 958 LEnvironment* environment = instr->environment(); |
960 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 959 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
961 DCHECK(environment->HasBeenRegistered()); | 960 DCHECK(environment->HasBeenRegistered()); |
962 int id = environment->deoptimization_index(); | 961 int id = environment->deoptimization_index(); |
963 Address entry = | 962 Address entry = |
964 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 963 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
965 if (entry == NULL) { | 964 if (entry == NULL) { |
966 Abort(kBailoutWasNotPrepared); | 965 Abort(kBailoutWasNotPrepared); |
967 return; | 966 return; |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1033 jump_table_.Add(table_entry, zone()); | 1032 jump_table_.Add(table_entry, zone()); |
1034 } | 1033 } |
1035 if (cc == no_condition) { | 1034 if (cc == no_condition) { |
1036 __ jmp(&jump_table_.last().label); | 1035 __ jmp(&jump_table_.last().label); |
1037 } else { | 1036 } else { |
1038 __ j(cc, &jump_table_.last().label); | 1037 __ j(cc, &jump_table_.last().label); |
1039 } | 1038 } |
1040 } | 1039 } |
1041 } | 1040 } |
1042 | 1041 |
1043 | |
1044 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, | 1042 void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr, |
1045 Deoptimizer::DeoptReason deopt_reason) { | 1043 DeoptimizeReason deopt_reason) { |
1046 Deoptimizer::BailoutType bailout_type = info()->IsStub() | 1044 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
1047 ? Deoptimizer::LAZY | 1045 ? Deoptimizer::LAZY |
1048 : Deoptimizer::EAGER; | 1046 : Deoptimizer::EAGER; |
1049 DeoptimizeIf(cc, instr, deopt_reason, bailout_type); | 1047 DeoptimizeIf(cc, instr, deopt_reason, bailout_type); |
1050 } | 1048 } |
1051 | 1049 |
1052 | 1050 |
1053 void LCodeGen::RecordSafepointWithLazyDeopt( | 1051 void LCodeGen::RecordSafepointWithLazyDeopt( |
1054 LInstruction* instr, SafepointMode safepoint_mode) { | 1052 LInstruction* instr, SafepointMode safepoint_mode) { |
1055 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { | 1053 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1176 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 1174 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); |
1177 Label dividend_is_not_negative, done; | 1175 Label dividend_is_not_negative, done; |
1178 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { | 1176 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) { |
1179 __ test(dividend, dividend); | 1177 __ test(dividend, dividend); |
1180 __ j(not_sign, ÷nd_is_not_negative, Label::kNear); | 1178 __ j(not_sign, ÷nd_is_not_negative, Label::kNear); |
1181 // Note that this is correct even for kMinInt operands. | 1179 // Note that this is correct even for kMinInt operands. |
1182 __ neg(dividend); | 1180 __ neg(dividend); |
1183 __ and_(dividend, mask); | 1181 __ and_(dividend, mask); |
1184 __ neg(dividend); | 1182 __ neg(dividend); |
1185 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1183 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1186 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 1184 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
1187 } | 1185 } |
1188 __ jmp(&done, Label::kNear); | 1186 __ jmp(&done, Label::kNear); |
1189 } | 1187 } |
1190 | 1188 |
1191 __ bind(÷nd_is_not_negative); | 1189 __ bind(÷nd_is_not_negative); |
1192 __ and_(dividend, mask); | 1190 __ and_(dividend, mask); |
1193 __ bind(&done); | 1191 __ bind(&done); |
1194 } | 1192 } |
1195 | 1193 |
1196 | 1194 |
1197 void LCodeGen::DoModByConstI(LModByConstI* instr) { | 1195 void LCodeGen::DoModByConstI(LModByConstI* instr) { |
1198 Register dividend = ToRegister(instr->dividend()); | 1196 Register dividend = ToRegister(instr->dividend()); |
1199 int32_t divisor = instr->divisor(); | 1197 int32_t divisor = instr->divisor(); |
1200 DCHECK(ToRegister(instr->result()).is(eax)); | 1198 DCHECK(ToRegister(instr->result()).is(eax)); |
1201 | 1199 |
1202 if (divisor == 0) { | 1200 if (divisor == 0) { |
1203 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); | 1201 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kDivisionByZero); |
1204 return; | 1202 return; |
1205 } | 1203 } |
1206 | 1204 |
1207 __ TruncatingDiv(dividend, Abs(divisor)); | 1205 __ TruncatingDiv(dividend, Abs(divisor)); |
1208 __ imul(edx, edx, Abs(divisor)); | 1206 __ imul(edx, edx, Abs(divisor)); |
1209 __ mov(eax, dividend); | 1207 __ mov(eax, dividend); |
1210 __ sub(eax, edx); | 1208 __ sub(eax, edx); |
1211 | 1209 |
1212 // Check for negative zero. | 1210 // Check for negative zero. |
1213 HMod* hmod = instr->hydrogen(); | 1211 HMod* hmod = instr->hydrogen(); |
1214 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1212 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1215 Label remainder_not_zero; | 1213 Label remainder_not_zero; |
1216 __ j(not_zero, &remainder_not_zero, Label::kNear); | 1214 __ j(not_zero, &remainder_not_zero, Label::kNear); |
1217 __ cmp(dividend, Immediate(0)); | 1215 __ cmp(dividend, Immediate(0)); |
1218 DeoptimizeIf(less, instr, Deoptimizer::kMinusZero); | 1216 DeoptimizeIf(less, instr, DeoptimizeReason::kMinusZero); |
1219 __ bind(&remainder_not_zero); | 1217 __ bind(&remainder_not_zero); |
1220 } | 1218 } |
1221 } | 1219 } |
1222 | 1220 |
1223 | 1221 |
1224 void LCodeGen::DoModI(LModI* instr) { | 1222 void LCodeGen::DoModI(LModI* instr) { |
1225 HMod* hmod = instr->hydrogen(); | 1223 HMod* hmod = instr->hydrogen(); |
1226 | 1224 |
1227 Register left_reg = ToRegister(instr->left()); | 1225 Register left_reg = ToRegister(instr->left()); |
1228 DCHECK(left_reg.is(eax)); | 1226 DCHECK(left_reg.is(eax)); |
1229 Register right_reg = ToRegister(instr->right()); | 1227 Register right_reg = ToRegister(instr->right()); |
1230 DCHECK(!right_reg.is(eax)); | 1228 DCHECK(!right_reg.is(eax)); |
1231 DCHECK(!right_reg.is(edx)); | 1229 DCHECK(!right_reg.is(edx)); |
1232 Register result_reg = ToRegister(instr->result()); | 1230 Register result_reg = ToRegister(instr->result()); |
1233 DCHECK(result_reg.is(edx)); | 1231 DCHECK(result_reg.is(edx)); |
1234 | 1232 |
1235 Label done; | 1233 Label done; |
1236 // Check for x % 0, idiv would signal a divide error. We have to | 1234 // Check for x % 0, idiv would signal a divide error. We have to |
1237 // deopt in this case because we can't return a NaN. | 1235 // deopt in this case because we can't return a NaN. |
1238 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { | 1236 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) { |
1239 __ test(right_reg, Operand(right_reg)); | 1237 __ test(right_reg, Operand(right_reg)); |
1240 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); | 1238 DeoptimizeIf(zero, instr, DeoptimizeReason::kDivisionByZero); |
1241 } | 1239 } |
1242 | 1240 |
1243 // Check for kMinInt % -1, idiv would signal a divide error. We | 1241 // Check for kMinInt % -1, idiv would signal a divide error. We |
1244 // have to deopt if we care about -0, because we can't return that. | 1242 // have to deopt if we care about -0, because we can't return that. |
1245 if (hmod->CheckFlag(HValue::kCanOverflow)) { | 1243 if (hmod->CheckFlag(HValue::kCanOverflow)) { |
1246 Label no_overflow_possible; | 1244 Label no_overflow_possible; |
1247 __ cmp(left_reg, kMinInt); | 1245 __ cmp(left_reg, kMinInt); |
1248 __ j(not_equal, &no_overflow_possible, Label::kNear); | 1246 __ j(not_equal, &no_overflow_possible, Label::kNear); |
1249 __ cmp(right_reg, -1); | 1247 __ cmp(right_reg, -1); |
1250 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1248 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1251 DeoptimizeIf(equal, instr, Deoptimizer::kMinusZero); | 1249 DeoptimizeIf(equal, instr, DeoptimizeReason::kMinusZero); |
1252 } else { | 1250 } else { |
1253 __ j(not_equal, &no_overflow_possible, Label::kNear); | 1251 __ j(not_equal, &no_overflow_possible, Label::kNear); |
1254 __ Move(result_reg, Immediate(0)); | 1252 __ Move(result_reg, Immediate(0)); |
1255 __ jmp(&done, Label::kNear); | 1253 __ jmp(&done, Label::kNear); |
1256 } | 1254 } |
1257 __ bind(&no_overflow_possible); | 1255 __ bind(&no_overflow_possible); |
1258 } | 1256 } |
1259 | 1257 |
1260 // Sign extend dividend in eax into edx:eax. | 1258 // Sign extend dividend in eax into edx:eax. |
1261 __ cdq(); | 1259 __ cdq(); |
1262 | 1260 |
1263 // If we care about -0, test if the dividend is <0 and the result is 0. | 1261 // If we care about -0, test if the dividend is <0 and the result is 0. |
1264 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1262 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1265 Label positive_left; | 1263 Label positive_left; |
1266 __ test(left_reg, Operand(left_reg)); | 1264 __ test(left_reg, Operand(left_reg)); |
1267 __ j(not_sign, &positive_left, Label::kNear); | 1265 __ j(not_sign, &positive_left, Label::kNear); |
1268 __ idiv(right_reg); | 1266 __ idiv(right_reg); |
1269 __ test(result_reg, Operand(result_reg)); | 1267 __ test(result_reg, Operand(result_reg)); |
1270 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 1268 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
1271 __ jmp(&done, Label::kNear); | 1269 __ jmp(&done, Label::kNear); |
1272 __ bind(&positive_left); | 1270 __ bind(&positive_left); |
1273 } | 1271 } |
1274 __ idiv(right_reg); | 1272 __ idiv(right_reg); |
1275 __ bind(&done); | 1273 __ bind(&done); |
1276 } | 1274 } |
1277 | 1275 |
1278 | 1276 |
1279 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { | 1277 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { |
1280 Register dividend = ToRegister(instr->dividend()); | 1278 Register dividend = ToRegister(instr->dividend()); |
1281 int32_t divisor = instr->divisor(); | 1279 int32_t divisor = instr->divisor(); |
1282 Register result = ToRegister(instr->result()); | 1280 Register result = ToRegister(instr->result()); |
1283 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); | 1281 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor))); |
1284 DCHECK(!result.is(dividend)); | 1282 DCHECK(!result.is(dividend)); |
1285 | 1283 |
1286 // Check for (0 / -x) that will produce negative zero. | 1284 // Check for (0 / -x) that will produce negative zero. |
1287 HDiv* hdiv = instr->hydrogen(); | 1285 HDiv* hdiv = instr->hydrogen(); |
1288 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1286 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1289 __ test(dividend, dividend); | 1287 __ test(dividend, dividend); |
1290 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 1288 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
1291 } | 1289 } |
1292 // Check for (kMinInt / -1). | 1290 // Check for (kMinInt / -1). |
1293 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { | 1291 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { |
1294 __ cmp(dividend, kMinInt); | 1292 __ cmp(dividend, kMinInt); |
1295 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); | 1293 DeoptimizeIf(zero, instr, DeoptimizeReason::kOverflow); |
1296 } | 1294 } |
1297 // Deoptimize if remainder will not be 0. | 1295 // Deoptimize if remainder will not be 0. |
1298 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && | 1296 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) && |
1299 divisor != 1 && divisor != -1) { | 1297 divisor != 1 && divisor != -1) { |
1300 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); | 1298 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); |
1301 __ test(dividend, Immediate(mask)); | 1299 __ test(dividend, Immediate(mask)); |
1302 DeoptimizeIf(not_zero, instr, Deoptimizer::kLostPrecision); | 1300 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kLostPrecision); |
1303 } | 1301 } |
1304 __ Move(result, dividend); | 1302 __ Move(result, dividend); |
1305 int32_t shift = WhichPowerOf2Abs(divisor); | 1303 int32_t shift = WhichPowerOf2Abs(divisor); |
1306 if (shift > 0) { | 1304 if (shift > 0) { |
1307 // The arithmetic shift is always OK, the 'if' is an optimization only. | 1305 // The arithmetic shift is always OK, the 'if' is an optimization only. |
1308 if (shift > 1) __ sar(result, 31); | 1306 if (shift > 1) __ sar(result, 31); |
1309 __ shr(result, 32 - shift); | 1307 __ shr(result, 32 - shift); |
1310 __ add(result, dividend); | 1308 __ add(result, dividend); |
1311 __ sar(result, shift); | 1309 __ sar(result, shift); |
1312 } | 1310 } |
1313 if (divisor < 0) __ neg(result); | 1311 if (divisor < 0) __ neg(result); |
1314 } | 1312 } |
1315 | 1313 |
1316 | 1314 |
1317 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { | 1315 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { |
1318 Register dividend = ToRegister(instr->dividend()); | 1316 Register dividend = ToRegister(instr->dividend()); |
1319 int32_t divisor = instr->divisor(); | 1317 int32_t divisor = instr->divisor(); |
1320 DCHECK(ToRegister(instr->result()).is(edx)); | 1318 DCHECK(ToRegister(instr->result()).is(edx)); |
1321 | 1319 |
1322 if (divisor == 0) { | 1320 if (divisor == 0) { |
1323 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); | 1321 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kDivisionByZero); |
1324 return; | 1322 return; |
1325 } | 1323 } |
1326 | 1324 |
1327 // Check for (0 / -x) that will produce negative zero. | 1325 // Check for (0 / -x) that will produce negative zero. |
1328 HDiv* hdiv = instr->hydrogen(); | 1326 HDiv* hdiv = instr->hydrogen(); |
1329 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1327 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1330 __ test(dividend, dividend); | 1328 __ test(dividend, dividend); |
1331 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 1329 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
1332 } | 1330 } |
1333 | 1331 |
1334 __ TruncatingDiv(dividend, Abs(divisor)); | 1332 __ TruncatingDiv(dividend, Abs(divisor)); |
1335 if (divisor < 0) __ neg(edx); | 1333 if (divisor < 0) __ neg(edx); |
1336 | 1334 |
1337 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { | 1335 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) { |
1338 __ mov(eax, edx); | 1336 __ mov(eax, edx); |
1339 __ imul(eax, eax, divisor); | 1337 __ imul(eax, eax, divisor); |
1340 __ sub(eax, dividend); | 1338 __ sub(eax, dividend); |
1341 DeoptimizeIf(not_equal, instr, Deoptimizer::kLostPrecision); | 1339 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kLostPrecision); |
1342 } | 1340 } |
1343 } | 1341 } |
1344 | 1342 |
1345 | 1343 |
1346 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. | 1344 // TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI. |
1347 void LCodeGen::DoDivI(LDivI* instr) { | 1345 void LCodeGen::DoDivI(LDivI* instr) { |
1348 HBinaryOperation* hdiv = instr->hydrogen(); | 1346 HBinaryOperation* hdiv = instr->hydrogen(); |
1349 Register dividend = ToRegister(instr->dividend()); | 1347 Register dividend = ToRegister(instr->dividend()); |
1350 Register divisor = ToRegister(instr->divisor()); | 1348 Register divisor = ToRegister(instr->divisor()); |
1351 Register remainder = ToRegister(instr->temp()); | 1349 Register remainder = ToRegister(instr->temp()); |
1352 DCHECK(dividend.is(eax)); | 1350 DCHECK(dividend.is(eax)); |
1353 DCHECK(remainder.is(edx)); | 1351 DCHECK(remainder.is(edx)); |
1354 DCHECK(ToRegister(instr->result()).is(eax)); | 1352 DCHECK(ToRegister(instr->result()).is(eax)); |
1355 DCHECK(!divisor.is(eax)); | 1353 DCHECK(!divisor.is(eax)); |
1356 DCHECK(!divisor.is(edx)); | 1354 DCHECK(!divisor.is(edx)); |
1357 | 1355 |
1358 // Check for x / 0. | 1356 // Check for x / 0. |
1359 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1357 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
1360 __ test(divisor, divisor); | 1358 __ test(divisor, divisor); |
1361 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); | 1359 DeoptimizeIf(zero, instr, DeoptimizeReason::kDivisionByZero); |
1362 } | 1360 } |
1363 | 1361 |
1364 // Check for (0 / -x) that will produce negative zero. | 1362 // Check for (0 / -x) that will produce negative zero. |
1365 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1363 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1366 Label dividend_not_zero; | 1364 Label dividend_not_zero; |
1367 __ test(dividend, dividend); | 1365 __ test(dividend, dividend); |
1368 __ j(not_zero, ÷nd_not_zero, Label::kNear); | 1366 __ j(not_zero, ÷nd_not_zero, Label::kNear); |
1369 __ test(divisor, divisor); | 1367 __ test(divisor, divisor); |
1370 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); | 1368 DeoptimizeIf(sign, instr, DeoptimizeReason::kMinusZero); |
1371 __ bind(÷nd_not_zero); | 1369 __ bind(÷nd_not_zero); |
1372 } | 1370 } |
1373 | 1371 |
1374 // Check for (kMinInt / -1). | 1372 // Check for (kMinInt / -1). |
1375 if (hdiv->CheckFlag(HValue::kCanOverflow)) { | 1373 if (hdiv->CheckFlag(HValue::kCanOverflow)) { |
1376 Label dividend_not_min_int; | 1374 Label dividend_not_min_int; |
1377 __ cmp(dividend, kMinInt); | 1375 __ cmp(dividend, kMinInt); |
1378 __ j(not_zero, ÷nd_not_min_int, Label::kNear); | 1376 __ j(not_zero, ÷nd_not_min_int, Label::kNear); |
1379 __ cmp(divisor, -1); | 1377 __ cmp(divisor, -1); |
1380 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); | 1378 DeoptimizeIf(zero, instr, DeoptimizeReason::kOverflow); |
1381 __ bind(÷nd_not_min_int); | 1379 __ bind(÷nd_not_min_int); |
1382 } | 1380 } |
1383 | 1381 |
1384 // Sign extend to edx (= remainder). | 1382 // Sign extend to edx (= remainder). |
1385 __ cdq(); | 1383 __ cdq(); |
1386 __ idiv(divisor); | 1384 __ idiv(divisor); |
1387 | 1385 |
1388 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { | 1386 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) { |
1389 // Deoptimize if remainder is not 0. | 1387 // Deoptimize if remainder is not 0. |
1390 __ test(remainder, remainder); | 1388 __ test(remainder, remainder); |
1391 DeoptimizeIf(not_zero, instr, Deoptimizer::kLostPrecision); | 1389 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kLostPrecision); |
1392 } | 1390 } |
1393 } | 1391 } |
1394 | 1392 |
1395 | 1393 |
1396 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { | 1394 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { |
1397 Register dividend = ToRegister(instr->dividend()); | 1395 Register dividend = ToRegister(instr->dividend()); |
1398 int32_t divisor = instr->divisor(); | 1396 int32_t divisor = instr->divisor(); |
1399 DCHECK(dividend.is(ToRegister(instr->result()))); | 1397 DCHECK(dividend.is(ToRegister(instr->result()))); |
1400 | 1398 |
1401 // If the divisor is positive, things are easy: There can be no deopts and we | 1399 // If the divisor is positive, things are easy: There can be no deopts and we |
1402 // can simply do an arithmetic right shift. | 1400 // can simply do an arithmetic right shift. |
1403 if (divisor == 1) return; | 1401 if (divisor == 1) return; |
1404 int32_t shift = WhichPowerOf2Abs(divisor); | 1402 int32_t shift = WhichPowerOf2Abs(divisor); |
1405 if (divisor > 1) { | 1403 if (divisor > 1) { |
1406 __ sar(dividend, shift); | 1404 __ sar(dividend, shift); |
1407 return; | 1405 return; |
1408 } | 1406 } |
1409 | 1407 |
1410 // If the divisor is negative, we have to negate and handle edge cases. | 1408 // If the divisor is negative, we have to negate and handle edge cases. |
1411 __ neg(dividend); | 1409 __ neg(dividend); |
1412 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1410 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1413 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 1411 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
1414 } | 1412 } |
1415 | 1413 |
1416 // Dividing by -1 is basically negation, unless we overflow. | 1414 // Dividing by -1 is basically negation, unless we overflow. |
1417 if (divisor == -1) { | 1415 if (divisor == -1) { |
1418 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 1416 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { |
1419 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1417 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
1420 } | 1418 } |
1421 return; | 1419 return; |
1422 } | 1420 } |
1423 | 1421 |
1424 // If the negation could not overflow, simply shifting is OK. | 1422 // If the negation could not overflow, simply shifting is OK. |
1425 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { | 1423 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) { |
1426 __ sar(dividend, shift); | 1424 __ sar(dividend, shift); |
1427 return; | 1425 return; |
1428 } | 1426 } |
1429 | 1427 |
1430 Label not_kmin_int, done; | 1428 Label not_kmin_int, done; |
1431 __ j(no_overflow, ¬_kmin_int, Label::kNear); | 1429 __ j(no_overflow, ¬_kmin_int, Label::kNear); |
1432 __ mov(dividend, Immediate(kMinInt / divisor)); | 1430 __ mov(dividend, Immediate(kMinInt / divisor)); |
1433 __ jmp(&done, Label::kNear); | 1431 __ jmp(&done, Label::kNear); |
1434 __ bind(¬_kmin_int); | 1432 __ bind(¬_kmin_int); |
1435 __ sar(dividend, shift); | 1433 __ sar(dividend, shift); |
1436 __ bind(&done); | 1434 __ bind(&done); |
1437 } | 1435 } |
1438 | 1436 |
1439 | 1437 |
1440 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { | 1438 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { |
1441 Register dividend = ToRegister(instr->dividend()); | 1439 Register dividend = ToRegister(instr->dividend()); |
1442 int32_t divisor = instr->divisor(); | 1440 int32_t divisor = instr->divisor(); |
1443 DCHECK(ToRegister(instr->result()).is(edx)); | 1441 DCHECK(ToRegister(instr->result()).is(edx)); |
1444 | 1442 |
1445 if (divisor == 0) { | 1443 if (divisor == 0) { |
1446 DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero); | 1444 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kDivisionByZero); |
1447 return; | 1445 return; |
1448 } | 1446 } |
1449 | 1447 |
1450 // Check for (0 / -x) that will produce negative zero. | 1448 // Check for (0 / -x) that will produce negative zero. |
1451 HMathFloorOfDiv* hdiv = instr->hydrogen(); | 1449 HMathFloorOfDiv* hdiv = instr->hydrogen(); |
1452 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { | 1450 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { |
1453 __ test(dividend, dividend); | 1451 __ test(dividend, dividend); |
1454 DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero); | 1452 DeoptimizeIf(zero, instr, DeoptimizeReason::kMinusZero); |
1455 } | 1453 } |
1456 | 1454 |
1457 // Easy case: We need no dynamic check for the dividend and the flooring | 1455 // Easy case: We need no dynamic check for the dividend and the flooring |
1458 // division is the same as the truncating division. | 1456 // division is the same as the truncating division. |
1459 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || | 1457 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || |
1460 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { | 1458 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { |
1461 __ TruncatingDiv(dividend, Abs(divisor)); | 1459 __ TruncatingDiv(dividend, Abs(divisor)); |
1462 if (divisor < 0) __ neg(edx); | 1460 if (divisor < 0) __ neg(edx); |
1463 return; | 1461 return; |
1464 } | 1462 } |
(...skipping 26 matching lines...) Expand all Loading... |
1491 Register result = ToRegister(instr->result()); | 1489 Register result = ToRegister(instr->result()); |
1492 DCHECK(dividend.is(eax)); | 1490 DCHECK(dividend.is(eax)); |
1493 DCHECK(remainder.is(edx)); | 1491 DCHECK(remainder.is(edx)); |
1494 DCHECK(result.is(eax)); | 1492 DCHECK(result.is(eax)); |
1495 DCHECK(!divisor.is(eax)); | 1493 DCHECK(!divisor.is(eax)); |
1496 DCHECK(!divisor.is(edx)); | 1494 DCHECK(!divisor.is(edx)); |
1497 | 1495 |
1498 // Check for x / 0. | 1496 // Check for x / 0. |
1499 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { | 1497 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) { |
1500 __ test(divisor, divisor); | 1498 __ test(divisor, divisor); |
1501 DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero); | 1499 DeoptimizeIf(zero, instr, DeoptimizeReason::kDivisionByZero); |
1502 } | 1500 } |
1503 | 1501 |
1504 // Check for (0 / -x) that will produce negative zero. | 1502 // Check for (0 / -x) that will produce negative zero. |
1505 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1503 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1506 Label dividend_not_zero; | 1504 Label dividend_not_zero; |
1507 __ test(dividend, dividend); | 1505 __ test(dividend, dividend); |
1508 __ j(not_zero, ÷nd_not_zero, Label::kNear); | 1506 __ j(not_zero, ÷nd_not_zero, Label::kNear); |
1509 __ test(divisor, divisor); | 1507 __ test(divisor, divisor); |
1510 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); | 1508 DeoptimizeIf(sign, instr, DeoptimizeReason::kMinusZero); |
1511 __ bind(÷nd_not_zero); | 1509 __ bind(÷nd_not_zero); |
1512 } | 1510 } |
1513 | 1511 |
1514 // Check for (kMinInt / -1). | 1512 // Check for (kMinInt / -1). |
1515 if (hdiv->CheckFlag(HValue::kCanOverflow)) { | 1513 if (hdiv->CheckFlag(HValue::kCanOverflow)) { |
1516 Label dividend_not_min_int; | 1514 Label dividend_not_min_int; |
1517 __ cmp(dividend, kMinInt); | 1515 __ cmp(dividend, kMinInt); |
1518 __ j(not_zero, ÷nd_not_min_int, Label::kNear); | 1516 __ j(not_zero, ÷nd_not_min_int, Label::kNear); |
1519 __ cmp(divisor, -1); | 1517 __ cmp(divisor, -1); |
1520 DeoptimizeIf(zero, instr, Deoptimizer::kOverflow); | 1518 DeoptimizeIf(zero, instr, DeoptimizeReason::kOverflow); |
1521 __ bind(÷nd_not_min_int); | 1519 __ bind(÷nd_not_min_int); |
1522 } | 1520 } |
1523 | 1521 |
1524 // Sign extend to edx (= remainder). | 1522 // Sign extend to edx (= remainder). |
1525 __ cdq(); | 1523 __ cdq(); |
1526 __ idiv(divisor); | 1524 __ idiv(divisor); |
1527 | 1525 |
1528 Label done; | 1526 Label done; |
1529 __ test(remainder, remainder); | 1527 __ test(remainder, remainder); |
1530 __ j(zero, &done, Label::kNear); | 1528 __ j(zero, &done, Label::kNear); |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1588 __ imul(left, left, constant); | 1586 __ imul(left, left, constant); |
1589 } | 1587 } |
1590 } else { | 1588 } else { |
1591 if (instr->hydrogen()->representation().IsSmi()) { | 1589 if (instr->hydrogen()->representation().IsSmi()) { |
1592 __ SmiUntag(left); | 1590 __ SmiUntag(left); |
1593 } | 1591 } |
1594 __ imul(left, ToOperand(right)); | 1592 __ imul(left, ToOperand(right)); |
1595 } | 1593 } |
1596 | 1594 |
1597 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1595 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
1598 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1596 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
1599 } | 1597 } |
1600 | 1598 |
1601 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1599 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1602 // Bail out if the result is supposed to be negative zero. | 1600 // Bail out if the result is supposed to be negative zero. |
1603 Label done; | 1601 Label done; |
1604 __ test(left, Operand(left)); | 1602 __ test(left, Operand(left)); |
1605 __ j(not_zero, &done); | 1603 __ j(not_zero, &done); |
1606 if (right->IsConstantOperand()) { | 1604 if (right->IsConstantOperand()) { |
1607 if (ToInteger32(LConstantOperand::cast(right)) < 0) { | 1605 if (ToInteger32(LConstantOperand::cast(right)) < 0) { |
1608 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); | 1606 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kMinusZero); |
1609 } else if (ToInteger32(LConstantOperand::cast(right)) == 0) { | 1607 } else if (ToInteger32(LConstantOperand::cast(right)) == 0) { |
1610 __ cmp(ToRegister(instr->temp()), Immediate(0)); | 1608 __ cmp(ToRegister(instr->temp()), Immediate(0)); |
1611 DeoptimizeIf(less, instr, Deoptimizer::kMinusZero); | 1609 DeoptimizeIf(less, instr, DeoptimizeReason::kMinusZero); |
1612 } | 1610 } |
1613 } else { | 1611 } else { |
1614 // Test the non-zero operand for negative sign. | 1612 // Test the non-zero operand for negative sign. |
1615 __ or_(ToRegister(instr->temp()), ToOperand(right)); | 1613 __ or_(ToRegister(instr->temp()), ToOperand(right)); |
1616 DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero); | 1614 DeoptimizeIf(sign, instr, DeoptimizeReason::kMinusZero); |
1617 } | 1615 } |
1618 __ bind(&done); | 1616 __ bind(&done); |
1619 } | 1617 } |
1620 } | 1618 } |
1621 | 1619 |
1622 | 1620 |
1623 void LCodeGen::DoBitI(LBitI* instr) { | 1621 void LCodeGen::DoBitI(LBitI* instr) { |
1624 LOperand* left = instr->left(); | 1622 LOperand* left = instr->left(); |
1625 LOperand* right = instr->right(); | 1623 LOperand* right = instr->right(); |
1626 DCHECK(left->Equals(instr->result())); | 1624 DCHECK(left->Equals(instr->result())); |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1679 case Token::ROR: | 1677 case Token::ROR: |
1680 __ ror_cl(ToRegister(left)); | 1678 __ ror_cl(ToRegister(left)); |
1681 break; | 1679 break; |
1682 case Token::SAR: | 1680 case Token::SAR: |
1683 __ sar_cl(ToRegister(left)); | 1681 __ sar_cl(ToRegister(left)); |
1684 break; | 1682 break; |
1685 case Token::SHR: | 1683 case Token::SHR: |
1686 __ shr_cl(ToRegister(left)); | 1684 __ shr_cl(ToRegister(left)); |
1687 if (instr->can_deopt()) { | 1685 if (instr->can_deopt()) { |
1688 __ test(ToRegister(left), ToRegister(left)); | 1686 __ test(ToRegister(left), ToRegister(left)); |
1689 DeoptimizeIf(sign, instr, Deoptimizer::kNegativeValue); | 1687 DeoptimizeIf(sign, instr, DeoptimizeReason::kNegativeValue); |
1690 } | 1688 } |
1691 break; | 1689 break; |
1692 case Token::SHL: | 1690 case Token::SHL: |
1693 __ shl_cl(ToRegister(left)); | 1691 __ shl_cl(ToRegister(left)); |
1694 break; | 1692 break; |
1695 default: | 1693 default: |
1696 UNREACHABLE(); | 1694 UNREACHABLE(); |
1697 break; | 1695 break; |
1698 } | 1696 } |
1699 } else { | 1697 } else { |
1700 int value = ToInteger32(LConstantOperand::cast(right)); | 1698 int value = ToInteger32(LConstantOperand::cast(right)); |
1701 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F); | 1699 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F); |
1702 switch (instr->op()) { | 1700 switch (instr->op()) { |
1703 case Token::ROR: | 1701 case Token::ROR: |
1704 if (shift_count == 0 && instr->can_deopt()) { | 1702 if (shift_count == 0 && instr->can_deopt()) { |
1705 __ test(ToRegister(left), ToRegister(left)); | 1703 __ test(ToRegister(left), ToRegister(left)); |
1706 DeoptimizeIf(sign, instr, Deoptimizer::kNegativeValue); | 1704 DeoptimizeIf(sign, instr, DeoptimizeReason::kNegativeValue); |
1707 } else { | 1705 } else { |
1708 __ ror(ToRegister(left), shift_count); | 1706 __ ror(ToRegister(left), shift_count); |
1709 } | 1707 } |
1710 break; | 1708 break; |
1711 case Token::SAR: | 1709 case Token::SAR: |
1712 if (shift_count != 0) { | 1710 if (shift_count != 0) { |
1713 __ sar(ToRegister(left), shift_count); | 1711 __ sar(ToRegister(left), shift_count); |
1714 } | 1712 } |
1715 break; | 1713 break; |
1716 case Token::SHR: | 1714 case Token::SHR: |
1717 if (shift_count != 0) { | 1715 if (shift_count != 0) { |
1718 __ shr(ToRegister(left), shift_count); | 1716 __ shr(ToRegister(left), shift_count); |
1719 } else if (instr->can_deopt()) { | 1717 } else if (instr->can_deopt()) { |
1720 __ test(ToRegister(left), ToRegister(left)); | 1718 __ test(ToRegister(left), ToRegister(left)); |
1721 DeoptimizeIf(sign, instr, Deoptimizer::kNegativeValue); | 1719 DeoptimizeIf(sign, instr, DeoptimizeReason::kNegativeValue); |
1722 } | 1720 } |
1723 break; | 1721 break; |
1724 case Token::SHL: | 1722 case Token::SHL: |
1725 if (shift_count != 0) { | 1723 if (shift_count != 0) { |
1726 if (instr->hydrogen_value()->representation().IsSmi() && | 1724 if (instr->hydrogen_value()->representation().IsSmi() && |
1727 instr->can_deopt()) { | 1725 instr->can_deopt()) { |
1728 if (shift_count != 1) { | 1726 if (shift_count != 1) { |
1729 __ shl(ToRegister(left), shift_count - 1); | 1727 __ shl(ToRegister(left), shift_count - 1); |
1730 } | 1728 } |
1731 __ SmiTag(ToRegister(left)); | 1729 __ SmiTag(ToRegister(left)); |
1732 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1730 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
1733 } else { | 1731 } else { |
1734 __ shl(ToRegister(left), shift_count); | 1732 __ shl(ToRegister(left), shift_count); |
1735 } | 1733 } |
1736 } | 1734 } |
1737 break; | 1735 break; |
1738 default: | 1736 default: |
1739 UNREACHABLE(); | 1737 UNREACHABLE(); |
1740 break; | 1738 break; |
1741 } | 1739 } |
1742 } | 1740 } |
1743 } | 1741 } |
1744 | 1742 |
1745 | 1743 |
1746 void LCodeGen::DoSubI(LSubI* instr) { | 1744 void LCodeGen::DoSubI(LSubI* instr) { |
1747 LOperand* left = instr->left(); | 1745 LOperand* left = instr->left(); |
1748 LOperand* right = instr->right(); | 1746 LOperand* right = instr->right(); |
1749 DCHECK(left->Equals(instr->result())); | 1747 DCHECK(left->Equals(instr->result())); |
1750 | 1748 |
1751 if (right->IsConstantOperand()) { | 1749 if (right->IsConstantOperand()) { |
1752 __ sub(ToOperand(left), | 1750 __ sub(ToOperand(left), |
1753 ToImmediate(right, instr->hydrogen()->representation())); | 1751 ToImmediate(right, instr->hydrogen()->representation())); |
1754 } else { | 1752 } else { |
1755 __ sub(ToRegister(left), ToOperand(right)); | 1753 __ sub(ToRegister(left), ToOperand(right)); |
1756 } | 1754 } |
1757 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1755 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
1758 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1756 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
1759 } | 1757 } |
1760 } | 1758 } |
1761 | 1759 |
1762 | 1760 |
1763 void LCodeGen::DoConstantI(LConstantI* instr) { | 1761 void LCodeGen::DoConstantI(LConstantI* instr) { |
1764 __ Move(ToRegister(instr->result()), Immediate(instr->value())); | 1762 __ Move(ToRegister(instr->result()), Immediate(instr->value())); |
1765 } | 1763 } |
1766 | 1764 |
1767 | 1765 |
1768 void LCodeGen::DoConstantS(LConstantS* instr) { | 1766 void LCodeGen::DoConstantS(LConstantS* instr) { |
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1896 __ lea(ToRegister(instr->result()), address); | 1894 __ lea(ToRegister(instr->result()), address); |
1897 } | 1895 } |
1898 } else { | 1896 } else { |
1899 if (right->IsConstantOperand()) { | 1897 if (right->IsConstantOperand()) { |
1900 __ add(ToOperand(left), | 1898 __ add(ToOperand(left), |
1901 ToImmediate(right, instr->hydrogen()->representation())); | 1899 ToImmediate(right, instr->hydrogen()->representation())); |
1902 } else { | 1900 } else { |
1903 __ add(ToRegister(left), ToOperand(right)); | 1901 __ add(ToRegister(left), ToOperand(right)); |
1904 } | 1902 } |
1905 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 1903 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
1906 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 1904 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
1907 } | 1905 } |
1908 } | 1906 } |
1909 } | 1907 } |
1910 | 1908 |
1911 | 1909 |
1912 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { | 1910 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { |
1913 LOperand* left = instr->left(); | 1911 LOperand* left = instr->left(); |
1914 LOperand* right = instr->right(); | 1912 LOperand* right = instr->right(); |
1915 DCHECK(left->Equals(instr->result())); | 1913 DCHECK(left->Equals(instr->result())); |
1916 HMathMinMax::Operation operation = instr->hydrogen()->operation(); | 1914 HMathMinMax::Operation operation = instr->hydrogen()->operation(); |
(...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2150 } | 2148 } |
2151 | 2149 |
2152 if (expected.Contains(ToBooleanICStub::SMI)) { | 2150 if (expected.Contains(ToBooleanICStub::SMI)) { |
2153 // Smis: 0 -> false, all other -> true. | 2151 // Smis: 0 -> false, all other -> true. |
2154 __ test(reg, Operand(reg)); | 2152 __ test(reg, Operand(reg)); |
2155 __ j(equal, instr->FalseLabel(chunk_)); | 2153 __ j(equal, instr->FalseLabel(chunk_)); |
2156 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); | 2154 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); |
2157 } else if (expected.NeedsMap()) { | 2155 } else if (expected.NeedsMap()) { |
2158 // If we need a map later and have a Smi -> deopt. | 2156 // If we need a map later and have a Smi -> deopt. |
2159 __ test(reg, Immediate(kSmiTagMask)); | 2157 __ test(reg, Immediate(kSmiTagMask)); |
2160 DeoptimizeIf(zero, instr, Deoptimizer::kSmi); | 2158 DeoptimizeIf(zero, instr, DeoptimizeReason::kSmi); |
2161 } | 2159 } |
2162 | 2160 |
2163 Register map = no_reg; // Keep the compiler happy. | 2161 Register map = no_reg; // Keep the compiler happy. |
2164 if (expected.NeedsMap()) { | 2162 if (expected.NeedsMap()) { |
2165 map = ToRegister(instr->temp()); | 2163 map = ToRegister(instr->temp()); |
2166 DCHECK(!map.is(reg)); | 2164 DCHECK(!map.is(reg)); |
2167 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset)); | 2165 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset)); |
2168 | 2166 |
2169 if (expected.CanBeUndetectable()) { | 2167 if (expected.CanBeUndetectable()) { |
2170 // Undetectable -> false. | 2168 // Undetectable -> false. |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2213 __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset)); | 2211 __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset)); |
2214 __ FCmp(); | 2212 __ FCmp(); |
2215 __ j(zero, instr->FalseLabel(chunk_)); | 2213 __ j(zero, instr->FalseLabel(chunk_)); |
2216 __ jmp(instr->TrueLabel(chunk_)); | 2214 __ jmp(instr->TrueLabel(chunk_)); |
2217 __ bind(¬_heap_number); | 2215 __ bind(¬_heap_number); |
2218 } | 2216 } |
2219 | 2217 |
2220 if (!expected.IsGeneric()) { | 2218 if (!expected.IsGeneric()) { |
2221 // We've seen something for the first time -> deopt. | 2219 // We've seen something for the first time -> deopt. |
2222 // This can only happen if we are not generic already. | 2220 // This can only happen if we are not generic already. |
2223 DeoptimizeIf(no_condition, instr, Deoptimizer::kUnexpectedObject); | 2221 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kUnexpectedObject); |
2224 } | 2222 } |
2225 } | 2223 } |
2226 } | 2224 } |
2227 } | 2225 } |
2228 | 2226 |
2229 | 2227 |
2230 void LCodeGen::EmitGoto(int block) { | 2228 void LCodeGen::EmitGoto(int block) { |
2231 if (!IsNextEmittedBlock(block)) { | 2229 if (!IsNextEmittedBlock(block)) { |
2232 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); | 2230 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); |
2233 } | 2231 } |
(...skipping 351 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2585 } | 2583 } |
2586 | 2584 |
2587 // Loop through the {object}s prototype chain looking for the {prototype}. | 2585 // Loop through the {object}s prototype chain looking for the {prototype}. |
2588 __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset)); | 2586 __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset)); |
2589 Label loop; | 2587 Label loop; |
2590 __ bind(&loop); | 2588 __ bind(&loop); |
2591 | 2589 |
2592 // Deoptimize if the object needs to be access checked. | 2590 // Deoptimize if the object needs to be access checked. |
2593 __ test_b(FieldOperand(object_map, Map::kBitFieldOffset), | 2591 __ test_b(FieldOperand(object_map, Map::kBitFieldOffset), |
2594 Immediate(1 << Map::kIsAccessCheckNeeded)); | 2592 Immediate(1 << Map::kIsAccessCheckNeeded)); |
2595 DeoptimizeIf(not_zero, instr, Deoptimizer::kAccessCheck); | 2593 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kAccessCheck); |
2596 // Deoptimize for proxies. | 2594 // Deoptimize for proxies. |
2597 __ CmpInstanceType(object_map, JS_PROXY_TYPE); | 2595 __ CmpInstanceType(object_map, JS_PROXY_TYPE); |
2598 DeoptimizeIf(equal, instr, Deoptimizer::kProxy); | 2596 DeoptimizeIf(equal, instr, DeoptimizeReason::kProxy); |
2599 | 2597 |
2600 __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset)); | 2598 __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset)); |
2601 __ cmp(object_prototype, factory()->null_value()); | 2599 __ cmp(object_prototype, factory()->null_value()); |
2602 EmitFalseBranch(instr, equal); | 2600 EmitFalseBranch(instr, equal); |
2603 __ cmp(object_prototype, prototype); | 2601 __ cmp(object_prototype, prototype); |
2604 EmitTrueBranch(instr, equal); | 2602 EmitTrueBranch(instr, equal); |
2605 __ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset)); | 2603 __ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset)); |
2606 __ jmp(&loop); | 2604 __ jmp(&loop); |
2607 } | 2605 } |
2608 | 2606 |
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2709 | 2707 |
2710 | 2708 |
2711 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { | 2709 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { |
2712 Register context = ToRegister(instr->context()); | 2710 Register context = ToRegister(instr->context()); |
2713 Register result = ToRegister(instr->result()); | 2711 Register result = ToRegister(instr->result()); |
2714 __ mov(result, ContextOperand(context, instr->slot_index())); | 2712 __ mov(result, ContextOperand(context, instr->slot_index())); |
2715 | 2713 |
2716 if (instr->hydrogen()->RequiresHoleCheck()) { | 2714 if (instr->hydrogen()->RequiresHoleCheck()) { |
2717 __ cmp(result, factory()->the_hole_value()); | 2715 __ cmp(result, factory()->the_hole_value()); |
2718 if (instr->hydrogen()->DeoptimizesOnHole()) { | 2716 if (instr->hydrogen()->DeoptimizesOnHole()) { |
2719 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2717 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
2720 } else { | 2718 } else { |
2721 Label is_not_hole; | 2719 Label is_not_hole; |
2722 __ j(not_equal, &is_not_hole, Label::kNear); | 2720 __ j(not_equal, &is_not_hole, Label::kNear); |
2723 __ mov(result, factory()->undefined_value()); | 2721 __ mov(result, factory()->undefined_value()); |
2724 __ bind(&is_not_hole); | 2722 __ bind(&is_not_hole); |
2725 } | 2723 } |
2726 } | 2724 } |
2727 } | 2725 } |
2728 | 2726 |
2729 | 2727 |
2730 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { | 2728 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { |
2731 Register context = ToRegister(instr->context()); | 2729 Register context = ToRegister(instr->context()); |
2732 Register value = ToRegister(instr->value()); | 2730 Register value = ToRegister(instr->value()); |
2733 | 2731 |
2734 Label skip_assignment; | 2732 Label skip_assignment; |
2735 | 2733 |
2736 Operand target = ContextOperand(context, instr->slot_index()); | 2734 Operand target = ContextOperand(context, instr->slot_index()); |
2737 if (instr->hydrogen()->RequiresHoleCheck()) { | 2735 if (instr->hydrogen()->RequiresHoleCheck()) { |
2738 __ cmp(target, factory()->the_hole_value()); | 2736 __ cmp(target, factory()->the_hole_value()); |
2739 if (instr->hydrogen()->DeoptimizesOnHole()) { | 2737 if (instr->hydrogen()->DeoptimizesOnHole()) { |
2740 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2738 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
2741 } else { | 2739 } else { |
2742 __ j(not_equal, &skip_assignment, Label::kNear); | 2740 __ j(not_equal, &skip_assignment, Label::kNear); |
2743 } | 2741 } |
2744 } | 2742 } |
2745 | 2743 |
2746 __ mov(target, value); | 2744 __ mov(target, value); |
2747 if (instr->hydrogen()->NeedsWriteBarrier()) { | 2745 if (instr->hydrogen()->NeedsWriteBarrier()) { |
2748 SmiCheck check_needed = | 2746 SmiCheck check_needed = |
2749 instr->hydrogen()->value()->type().IsHeapObject() | 2747 instr->hydrogen()->value()->type().IsHeapObject() |
2750 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 2748 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2821 Register function = ToRegister(instr->function()); | 2819 Register function = ToRegister(instr->function()); |
2822 Register temp = ToRegister(instr->temp()); | 2820 Register temp = ToRegister(instr->temp()); |
2823 Register result = ToRegister(instr->result()); | 2821 Register result = ToRegister(instr->result()); |
2824 | 2822 |
2825 // Get the prototype or initial map from the function. | 2823 // Get the prototype or initial map from the function. |
2826 __ mov(result, | 2824 __ mov(result, |
2827 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 2825 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
2828 | 2826 |
2829 // Check that the function has a prototype or an initial map. | 2827 // Check that the function has a prototype or an initial map. |
2830 __ cmp(Operand(result), Immediate(factory()->the_hole_value())); | 2828 __ cmp(Operand(result), Immediate(factory()->the_hole_value())); |
2831 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2829 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
2832 | 2830 |
2833 // If the function does not have an initial map, we're done. | 2831 // If the function does not have an initial map, we're done. |
2834 Label done; | 2832 Label done; |
2835 __ CmpObjectType(result, MAP_TYPE, temp); | 2833 __ CmpObjectType(result, MAP_TYPE, temp); |
2836 __ j(not_equal, &done, Label::kNear); | 2834 __ j(not_equal, &done, Label::kNear); |
2837 | 2835 |
2838 // Get the prototype from the initial map. | 2836 // Get the prototype from the initial map. |
2839 __ mov(result, FieldOperand(result, Map::kPrototypeOffset)); | 2837 __ mov(result, FieldOperand(result, Map::kPrototypeOffset)); |
2840 | 2838 |
2841 // All done. | 2839 // All done. |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2903 case UINT16_ELEMENTS: | 2901 case UINT16_ELEMENTS: |
2904 __ movzx_w(result, operand); | 2902 __ movzx_w(result, operand); |
2905 break; | 2903 break; |
2906 case INT32_ELEMENTS: | 2904 case INT32_ELEMENTS: |
2907 __ mov(result, operand); | 2905 __ mov(result, operand); |
2908 break; | 2906 break; |
2909 case UINT32_ELEMENTS: | 2907 case UINT32_ELEMENTS: |
2910 __ mov(result, operand); | 2908 __ mov(result, operand); |
2911 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { | 2909 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { |
2912 __ test(result, Operand(result)); | 2910 __ test(result, Operand(result)); |
2913 DeoptimizeIf(negative, instr, Deoptimizer::kNegativeValue); | 2911 DeoptimizeIf(negative, instr, DeoptimizeReason::kNegativeValue); |
2914 } | 2912 } |
2915 break; | 2913 break; |
2916 case FLOAT32_ELEMENTS: | 2914 case FLOAT32_ELEMENTS: |
2917 case FLOAT64_ELEMENTS: | 2915 case FLOAT64_ELEMENTS: |
2918 case FAST_SMI_ELEMENTS: | 2916 case FAST_SMI_ELEMENTS: |
2919 case FAST_ELEMENTS: | 2917 case FAST_ELEMENTS: |
2920 case FAST_DOUBLE_ELEMENTS: | 2918 case FAST_DOUBLE_ELEMENTS: |
2921 case FAST_HOLEY_SMI_ELEMENTS: | 2919 case FAST_HOLEY_SMI_ELEMENTS: |
2922 case FAST_HOLEY_ELEMENTS: | 2920 case FAST_HOLEY_ELEMENTS: |
2923 case FAST_HOLEY_DOUBLE_ELEMENTS: | 2921 case FAST_HOLEY_DOUBLE_ELEMENTS: |
(...skipping 11 matching lines...) Expand all Loading... |
2935 | 2933 |
2936 | 2934 |
2937 void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) { | 2935 void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) { |
2938 if (instr->hydrogen()->RequiresHoleCheck()) { | 2936 if (instr->hydrogen()->RequiresHoleCheck()) { |
2939 Operand hole_check_operand = BuildFastArrayOperand( | 2937 Operand hole_check_operand = BuildFastArrayOperand( |
2940 instr->elements(), instr->key(), | 2938 instr->elements(), instr->key(), |
2941 instr->hydrogen()->key()->representation(), | 2939 instr->hydrogen()->key()->representation(), |
2942 FAST_DOUBLE_ELEMENTS, | 2940 FAST_DOUBLE_ELEMENTS, |
2943 instr->base_offset() + sizeof(kHoleNanLower32)); | 2941 instr->base_offset() + sizeof(kHoleNanLower32)); |
2944 __ cmp(hole_check_operand, Immediate(kHoleNanUpper32)); | 2942 __ cmp(hole_check_operand, Immediate(kHoleNanUpper32)); |
2945 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2943 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
2946 } | 2944 } |
2947 | 2945 |
2948 Operand double_load_operand = BuildFastArrayOperand( | 2946 Operand double_load_operand = BuildFastArrayOperand( |
2949 instr->elements(), | 2947 instr->elements(), |
2950 instr->key(), | 2948 instr->key(), |
2951 instr->hydrogen()->key()->representation(), | 2949 instr->hydrogen()->key()->representation(), |
2952 FAST_DOUBLE_ELEMENTS, | 2950 FAST_DOUBLE_ELEMENTS, |
2953 instr->base_offset()); | 2951 instr->base_offset()); |
2954 X87Mov(ToX87Register(instr->result()), double_load_operand); | 2952 X87Mov(ToX87Register(instr->result()), double_load_operand); |
2955 } | 2953 } |
2956 | 2954 |
2957 | 2955 |
2958 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { | 2956 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { |
2959 Register result = ToRegister(instr->result()); | 2957 Register result = ToRegister(instr->result()); |
2960 | 2958 |
2961 // Load the result. | 2959 // Load the result. |
2962 __ mov(result, | 2960 __ mov(result, |
2963 BuildFastArrayOperand(instr->elements(), instr->key(), | 2961 BuildFastArrayOperand(instr->elements(), instr->key(), |
2964 instr->hydrogen()->key()->representation(), | 2962 instr->hydrogen()->key()->representation(), |
2965 FAST_ELEMENTS, instr->base_offset())); | 2963 FAST_ELEMENTS, instr->base_offset())); |
2966 | 2964 |
2967 // Check for the hole value. | 2965 // Check for the hole value. |
2968 if (instr->hydrogen()->RequiresHoleCheck()) { | 2966 if (instr->hydrogen()->RequiresHoleCheck()) { |
2969 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { | 2967 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { |
2970 __ test(result, Immediate(kSmiTagMask)); | 2968 __ test(result, Immediate(kSmiTagMask)); |
2971 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotASmi); | 2969 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotASmi); |
2972 } else { | 2970 } else { |
2973 __ cmp(result, factory()->the_hole_value()); | 2971 __ cmp(result, factory()->the_hole_value()); |
2974 DeoptimizeIf(equal, instr, Deoptimizer::kHole); | 2972 DeoptimizeIf(equal, instr, DeoptimizeReason::kHole); |
2975 } | 2973 } |
2976 } else if (instr->hydrogen()->hole_mode() == CONVERT_HOLE_TO_UNDEFINED) { | 2974 } else if (instr->hydrogen()->hole_mode() == CONVERT_HOLE_TO_UNDEFINED) { |
2977 DCHECK(instr->hydrogen()->elements_kind() == FAST_HOLEY_ELEMENTS); | 2975 DCHECK(instr->hydrogen()->elements_kind() == FAST_HOLEY_ELEMENTS); |
2978 Label done; | 2976 Label done; |
2979 __ cmp(result, factory()->the_hole_value()); | 2977 __ cmp(result, factory()->the_hole_value()); |
2980 __ j(not_equal, &done); | 2978 __ j(not_equal, &done); |
2981 if (info()->IsStub()) { | 2979 if (info()->IsStub()) { |
2982 // A stub can safely convert the hole to undefined only if the array | 2980 // A stub can safely convert the hole to undefined only if the array |
2983 // protector cell contains (Smi) Isolate::kArrayProtectorValid. | 2981 // protector cell contains (Smi) Isolate::kArrayProtectorValid. |
2984 // Otherwise it needs to bail out. | 2982 // Otherwise it needs to bail out. |
2985 __ LoadRoot(result, Heap::kArrayProtectorRootIndex); | 2983 __ LoadRoot(result, Heap::kArrayProtectorRootIndex); |
2986 __ cmp(FieldOperand(result, PropertyCell::kValueOffset), | 2984 __ cmp(FieldOperand(result, PropertyCell::kValueOffset), |
2987 Immediate(Smi::FromInt(Isolate::kArrayProtectorValid))); | 2985 Immediate(Smi::FromInt(Isolate::kArrayProtectorValid))); |
2988 DeoptimizeIf(not_equal, instr, Deoptimizer::kHole); | 2986 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kHole); |
2989 } | 2987 } |
2990 __ mov(result, isolate()->factory()->undefined_value()); | 2988 __ mov(result, isolate()->factory()->undefined_value()); |
2991 __ bind(&done); | 2989 __ bind(&done); |
2992 } | 2990 } |
2993 } | 2991 } |
2994 | 2992 |
2995 | 2993 |
2996 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { | 2994 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { |
2997 if (instr->is_fixed_typed_array()) { | 2995 if (instr->is_fixed_typed_array()) { |
2998 DoLoadKeyedExternalArray(instr); | 2996 DoLoadKeyedExternalArray(instr); |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3128 } | 3126 } |
3129 | 3127 |
3130 // Normal function. Replace undefined or null with global receiver. | 3128 // Normal function. Replace undefined or null with global receiver. |
3131 __ cmp(receiver, factory()->null_value()); | 3129 __ cmp(receiver, factory()->null_value()); |
3132 __ j(equal, &global_object, Label::kNear); | 3130 __ j(equal, &global_object, Label::kNear); |
3133 __ cmp(receiver, factory()->undefined_value()); | 3131 __ cmp(receiver, factory()->undefined_value()); |
3134 __ j(equal, &global_object, Label::kNear); | 3132 __ j(equal, &global_object, Label::kNear); |
3135 | 3133 |
3136 // The receiver should be a JS object. | 3134 // The receiver should be a JS object. |
3137 __ test(receiver, Immediate(kSmiTagMask)); | 3135 __ test(receiver, Immediate(kSmiTagMask)); |
3138 DeoptimizeIf(equal, instr, Deoptimizer::kSmi); | 3136 DeoptimizeIf(equal, instr, DeoptimizeReason::kSmi); |
3139 __ CmpObjectType(receiver, FIRST_JS_RECEIVER_TYPE, scratch); | 3137 __ CmpObjectType(receiver, FIRST_JS_RECEIVER_TYPE, scratch); |
3140 DeoptimizeIf(below, instr, Deoptimizer::kNotAJavaScriptObject); | 3138 DeoptimizeIf(below, instr, DeoptimizeReason::kNotAJavaScriptObject); |
3141 | 3139 |
3142 __ jmp(&receiver_ok, Label::kNear); | 3140 __ jmp(&receiver_ok, Label::kNear); |
3143 __ bind(&global_object); | 3141 __ bind(&global_object); |
3144 __ mov(receiver, FieldOperand(function, JSFunction::kContextOffset)); | 3142 __ mov(receiver, FieldOperand(function, JSFunction::kContextOffset)); |
3145 __ mov(receiver, ContextOperand(receiver, Context::NATIVE_CONTEXT_INDEX)); | 3143 __ mov(receiver, ContextOperand(receiver, Context::NATIVE_CONTEXT_INDEX)); |
3146 __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_PROXY_INDEX)); | 3144 __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_PROXY_INDEX)); |
3147 __ bind(&receiver_ok); | 3145 __ bind(&receiver_ok); |
3148 } | 3146 } |
3149 | 3147 |
3150 | 3148 |
3151 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { | 3149 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
3152 Register receiver = ToRegister(instr->receiver()); | 3150 Register receiver = ToRegister(instr->receiver()); |
3153 Register function = ToRegister(instr->function()); | 3151 Register function = ToRegister(instr->function()); |
3154 Register length = ToRegister(instr->length()); | 3152 Register length = ToRegister(instr->length()); |
3155 Register elements = ToRegister(instr->elements()); | 3153 Register elements = ToRegister(instr->elements()); |
3156 DCHECK(receiver.is(eax)); // Used for parameter count. | 3154 DCHECK(receiver.is(eax)); // Used for parameter count. |
3157 DCHECK(function.is(edi)); // Required by InvokeFunction. | 3155 DCHECK(function.is(edi)); // Required by InvokeFunction. |
3158 DCHECK(ToRegister(instr->result()).is(eax)); | 3156 DCHECK(ToRegister(instr->result()).is(eax)); |
3159 | 3157 |
3160 // Copy the arguments to this function possibly from the | 3158 // Copy the arguments to this function possibly from the |
3161 // adaptor frame below it. | 3159 // adaptor frame below it. |
3162 const uint32_t kArgumentsLimit = 1 * KB; | 3160 const uint32_t kArgumentsLimit = 1 * KB; |
3163 __ cmp(length, kArgumentsLimit); | 3161 __ cmp(length, kArgumentsLimit); |
3164 DeoptimizeIf(above, instr, Deoptimizer::kTooManyArguments); | 3162 DeoptimizeIf(above, instr, DeoptimizeReason::kTooManyArguments); |
3165 | 3163 |
3166 __ push(receiver); | 3164 __ push(receiver); |
3167 __ mov(receiver, length); | 3165 __ mov(receiver, length); |
3168 | 3166 |
3169 // Loop through the arguments pushing them onto the execution | 3167 // Loop through the arguments pushing them onto the execution |
3170 // stack. | 3168 // stack. |
3171 Label invoke, loop; | 3169 Label invoke, loop; |
3172 // length is a small non-negative integer, due to the test above. | 3170 // length is a small non-negative integer, due to the test above. |
3173 __ test(length, Operand(length)); | 3171 __ test(length, Operand(length)); |
3174 __ j(zero, &invoke, Label::kNear); | 3172 __ j(zero, &invoke, Label::kNear); |
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3330 } | 3328 } |
3331 generator.AfterCall(); | 3329 generator.AfterCall(); |
3332 } | 3330 } |
3333 } | 3331 } |
3334 | 3332 |
3335 | 3333 |
3336 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { | 3334 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { |
3337 Register input_reg = ToRegister(instr->value()); | 3335 Register input_reg = ToRegister(instr->value()); |
3338 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 3336 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
3339 factory()->heap_number_map()); | 3337 factory()->heap_number_map()); |
3340 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); | 3338 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber); |
3341 | 3339 |
3342 Label slow, allocated, done; | 3340 Label slow, allocated, done; |
3343 uint32_t available_regs = eax.bit() | ecx.bit() | edx.bit() | ebx.bit(); | 3341 uint32_t available_regs = eax.bit() | ecx.bit() | edx.bit() | ebx.bit(); |
3344 available_regs &= ~input_reg.bit(); | 3342 available_regs &= ~input_reg.bit(); |
3345 if (instr->context()->IsRegister()) { | 3343 if (instr->context()->IsRegister()) { |
3346 // Make sure that the context isn't overwritten in the AllocateHeapNumber | 3344 // Make sure that the context isn't overwritten in the AllocateHeapNumber |
3347 // macro below. | 3345 // macro below. |
3348 available_regs &= ~ToRegister(instr->context()).bit(); | 3346 available_regs &= ~ToRegister(instr->context()).bit(); |
3349 } | 3347 } |
3350 | 3348 |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3388 __ bind(&done); | 3386 __ bind(&done); |
3389 } | 3387 } |
3390 | 3388 |
3391 | 3389 |
3392 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { | 3390 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { |
3393 Register input_reg = ToRegister(instr->value()); | 3391 Register input_reg = ToRegister(instr->value()); |
3394 __ test(input_reg, Operand(input_reg)); | 3392 __ test(input_reg, Operand(input_reg)); |
3395 Label is_positive; | 3393 Label is_positive; |
3396 __ j(not_sign, &is_positive, Label::kNear); | 3394 __ j(not_sign, &is_positive, Label::kNear); |
3397 __ neg(input_reg); // Sets flags. | 3395 __ neg(input_reg); // Sets flags. |
3398 DeoptimizeIf(negative, instr, Deoptimizer::kOverflow); | 3396 DeoptimizeIf(negative, instr, DeoptimizeReason::kOverflow); |
3399 __ bind(&is_positive); | 3397 __ bind(&is_positive); |
3400 } | 3398 } |
3401 | 3399 |
3402 | 3400 |
3403 void LCodeGen::DoMathAbs(LMathAbs* instr) { | 3401 void LCodeGen::DoMathAbs(LMathAbs* instr) { |
3404 // Class for deferred case. | 3402 // Class for deferred case. |
3405 class DeferredMathAbsTaggedHeapNumber final : public LDeferredCode { | 3403 class DeferredMathAbsTaggedHeapNumber final : public LDeferredCode { |
3406 public: | 3404 public: |
3407 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, | 3405 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, |
3408 LMathAbs* instr, | 3406 LMathAbs* instr, |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3441 void LCodeGen::DoMathFloor(LMathFloor* instr) { | 3439 void LCodeGen::DoMathFloor(LMathFloor* instr) { |
3442 Register output_reg = ToRegister(instr->result()); | 3440 Register output_reg = ToRegister(instr->result()); |
3443 X87Register input_reg = ToX87Register(instr->value()); | 3441 X87Register input_reg = ToX87Register(instr->value()); |
3444 X87Fxch(input_reg); | 3442 X87Fxch(input_reg); |
3445 | 3443 |
3446 Label not_minus_zero, done; | 3444 Label not_minus_zero, done; |
3447 // Deoptimize on unordered. | 3445 // Deoptimize on unordered. |
3448 __ fldz(); | 3446 __ fldz(); |
3449 __ fld(1); | 3447 __ fld(1); |
3450 __ FCmp(); | 3448 __ FCmp(); |
3451 DeoptimizeIf(parity_even, instr, Deoptimizer::kNaN); | 3449 DeoptimizeIf(parity_even, instr, DeoptimizeReason::kNaN); |
3452 __ j(below, ¬_minus_zero, Label::kNear); | 3450 __ j(below, ¬_minus_zero, Label::kNear); |
3453 | 3451 |
3454 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3452 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3455 // Check for negative zero. | 3453 // Check for negative zero. |
3456 __ j(not_equal, ¬_minus_zero, Label::kNear); | 3454 __ j(not_equal, ¬_minus_zero, Label::kNear); |
3457 // +- 0.0. | 3455 // +- 0.0. |
3458 __ fld(0); | 3456 __ fld(0); |
3459 __ FXamSign(); | 3457 __ FXamSign(); |
3460 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); | 3458 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero); |
3461 __ Move(output_reg, Immediate(0)); | 3459 __ Move(output_reg, Immediate(0)); |
3462 __ jmp(&done, Label::kFar); | 3460 __ jmp(&done, Label::kFar); |
3463 } | 3461 } |
3464 | 3462 |
3465 // Positive input. | 3463 // Positive input. |
3466 // rc=01B, round down. | 3464 // rc=01B, round down. |
3467 __ bind(¬_minus_zero); | 3465 __ bind(¬_minus_zero); |
3468 __ fnclex(); | 3466 __ fnclex(); |
3469 __ X87SetRC(0x0400); | 3467 __ X87SetRC(0x0400); |
3470 __ sub(esp, Immediate(kPointerSize)); | 3468 __ sub(esp, Immediate(kPointerSize)); |
3471 __ fist_s(Operand(esp, 0)); | 3469 __ fist_s(Operand(esp, 0)); |
3472 __ pop(output_reg); | 3470 __ pop(output_reg); |
3473 __ X87SetRC(0x0000); | 3471 __ X87SetRC(0x0000); |
3474 __ X87CheckIA(); | 3472 __ X87CheckIA(); |
3475 DeoptimizeIf(equal, instr, Deoptimizer::kOverflow); | 3473 DeoptimizeIf(equal, instr, DeoptimizeReason::kOverflow); |
3476 __ fnclex(); | 3474 __ fnclex(); |
3477 __ X87SetRC(0x0000); | 3475 __ X87SetRC(0x0000); |
3478 __ bind(&done); | 3476 __ bind(&done); |
3479 } | 3477 } |
3480 | 3478 |
3481 | 3479 |
3482 void LCodeGen::DoMathRound(LMathRound* instr) { | 3480 void LCodeGen::DoMathRound(LMathRound* instr) { |
3483 X87Register input_reg = ToX87Register(instr->value()); | 3481 X87Register input_reg = ToX87Register(instr->value()); |
3484 Register result = ToRegister(instr->result()); | 3482 Register result = ToRegister(instr->result()); |
3485 X87Fxch(input_reg); | 3483 X87Fxch(input_reg); |
(...skipping 15 matching lines...) Expand all Loading... |
3501 __ X87SetRC(0x0c00); | 3499 __ X87SetRC(0x0c00); |
3502 __ sub(esp, Immediate(kPointerSize)); | 3500 __ sub(esp, Immediate(kPointerSize)); |
3503 // Clear exception bits. | 3501 // Clear exception bits. |
3504 __ fnclex(); | 3502 __ fnclex(); |
3505 __ fistp_s(MemOperand(esp, 0)); | 3503 __ fistp_s(MemOperand(esp, 0)); |
3506 // Restore round mode. | 3504 // Restore round mode. |
3507 __ X87SetRC(0x0000); | 3505 __ X87SetRC(0x0000); |
3508 // Check overflow. | 3506 // Check overflow. |
3509 __ X87CheckIA(); | 3507 __ X87CheckIA(); |
3510 __ pop(result); | 3508 __ pop(result); |
3511 DeoptimizeIf(equal, instr, Deoptimizer::kConversionOverflow); | 3509 DeoptimizeIf(equal, instr, DeoptimizeReason::kConversionOverflow); |
3512 __ fnclex(); | 3510 __ fnclex(); |
3513 // Restore round mode. | 3511 // Restore round mode. |
3514 __ X87SetRC(0x0000); | 3512 __ X87SetRC(0x0000); |
3515 __ jmp(&done); | 3513 __ jmp(&done); |
3516 | 3514 |
3517 __ bind(&below_one_half); | 3515 __ bind(&below_one_half); |
3518 __ fld_d(Operand::StaticVariable(minus_one_half)); | 3516 __ fld_d(Operand::StaticVariable(minus_one_half)); |
3519 __ fld(1); | 3517 __ fld(1); |
3520 __ FCmp(); | 3518 __ FCmp(); |
3521 __ j(carry, &below_minus_one_half); | 3519 __ j(carry, &below_minus_one_half); |
3522 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if | 3520 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if |
3523 // we can ignore the difference between a result of -0 and +0. | 3521 // we can ignore the difference between a result of -0 and +0. |
3524 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3522 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3525 // If the sign is positive, we return +0. | 3523 // If the sign is positive, we return +0. |
3526 __ fld(0); | 3524 __ fld(0); |
3527 __ FXamSign(); | 3525 __ FXamSign(); |
3528 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); | 3526 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero); |
3529 } | 3527 } |
3530 __ Move(result, Immediate(0)); | 3528 __ Move(result, Immediate(0)); |
3531 __ jmp(&done); | 3529 __ jmp(&done); |
3532 | 3530 |
3533 __ bind(&below_minus_one_half); | 3531 __ bind(&below_minus_one_half); |
3534 __ fld(0); | 3532 __ fld(0); |
3535 __ fadd_d(Operand::StaticVariable(one_half)); | 3533 __ fadd_d(Operand::StaticVariable(one_half)); |
3536 // rc=01B, round down. | 3534 // rc=01B, round down. |
3537 __ X87SetRC(0x0400); | 3535 __ X87SetRC(0x0400); |
3538 __ sub(esp, Immediate(kPointerSize)); | 3536 __ sub(esp, Immediate(kPointerSize)); |
3539 // Clear exception bits. | 3537 // Clear exception bits. |
3540 __ fnclex(); | 3538 __ fnclex(); |
3541 __ fistp_s(MemOperand(esp, 0)); | 3539 __ fistp_s(MemOperand(esp, 0)); |
3542 // Restore round mode. | 3540 // Restore round mode. |
3543 __ X87SetRC(0x0000); | 3541 __ X87SetRC(0x0000); |
3544 // Check overflow. | 3542 // Check overflow. |
3545 __ X87CheckIA(); | 3543 __ X87CheckIA(); |
3546 __ pop(result); | 3544 __ pop(result); |
3547 DeoptimizeIf(equal, instr, Deoptimizer::kConversionOverflow); | 3545 DeoptimizeIf(equal, instr, DeoptimizeReason::kConversionOverflow); |
3548 __ fnclex(); | 3546 __ fnclex(); |
3549 // Restore round mode. | 3547 // Restore round mode. |
3550 __ X87SetRC(0x0000); | 3548 __ X87SetRC(0x0000); |
3551 | 3549 |
3552 __ bind(&done); | 3550 __ bind(&done); |
3553 } | 3551 } |
3554 | 3552 |
3555 | 3553 |
3556 void LCodeGen::DoMathFround(LMathFround* instr) { | 3554 void LCodeGen::DoMathFround(LMathFround* instr) { |
3557 X87Register input_reg = ToX87Register(instr->value()); | 3555 X87Register input_reg = ToX87Register(instr->value()); |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3615 __ push(exponent); | 3613 __ push(exponent); |
3616 __ fild_s(MemOperand(esp, 0)); | 3614 __ fild_s(MemOperand(esp, 0)); |
3617 __ pop(exponent); | 3615 __ pop(exponent); |
3618 } else if (exponent_type.IsTagged()) { | 3616 } else if (exponent_type.IsTagged()) { |
3619 Register exponent = ToRegister(instr->right()); | 3617 Register exponent = ToRegister(instr->right()); |
3620 Register temp = exponent.is(ecx) ? eax : ecx; | 3618 Register temp = exponent.is(ecx) ? eax : ecx; |
3621 Label no_deopt, done; | 3619 Label no_deopt, done; |
3622 X87LoadForUsage(base); | 3620 X87LoadForUsage(base); |
3623 __ JumpIfSmi(exponent, &no_deopt); | 3621 __ JumpIfSmi(exponent, &no_deopt); |
3624 __ CmpObjectType(exponent, HEAP_NUMBER_TYPE, temp); | 3622 __ CmpObjectType(exponent, HEAP_NUMBER_TYPE, temp); |
3625 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); | 3623 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber); |
3626 // Heap number(double) | 3624 // Heap number(double) |
3627 __ fld_d(FieldOperand(exponent, HeapNumber::kValueOffset)); | 3625 __ fld_d(FieldOperand(exponent, HeapNumber::kValueOffset)); |
3628 __ jmp(&done); | 3626 __ jmp(&done); |
3629 // SMI | 3627 // SMI |
3630 __ bind(&no_deopt); | 3628 __ bind(&no_deopt); |
3631 __ SmiUntag(exponent); | 3629 __ SmiUntag(exponent); |
3632 __ push(exponent); | 3630 __ push(exponent); |
3633 __ fild_s(MemOperand(esp, 0)); | 3631 __ fild_s(MemOperand(esp, 0)); |
3634 __ pop(exponent); | 3632 __ pop(exponent); |
3635 __ bind(&done); | 3633 __ bind(&done); |
(...skipping 369 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4005 instr->hydrogen()->index()->representation())); | 4003 instr->hydrogen()->index()->representation())); |
4006 } else { | 4004 } else { |
4007 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); | 4005 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); |
4008 } | 4006 } |
4009 if (FLAG_debug_code && instr->hydrogen()->skip_check()) { | 4007 if (FLAG_debug_code && instr->hydrogen()->skip_check()) { |
4010 Label done; | 4008 Label done; |
4011 __ j(NegateCondition(cc), &done, Label::kNear); | 4009 __ j(NegateCondition(cc), &done, Label::kNear); |
4012 __ int3(); | 4010 __ int3(); |
4013 __ bind(&done); | 4011 __ bind(&done); |
4014 } else { | 4012 } else { |
4015 DeoptimizeIf(cc, instr, Deoptimizer::kOutOfBounds); | 4013 DeoptimizeIf(cc, instr, DeoptimizeReason::kOutOfBounds); |
4016 } | 4014 } |
4017 } | 4015 } |
4018 | 4016 |
4019 | 4017 |
4020 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { | 4018 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { |
4021 ElementsKind elements_kind = instr->elements_kind(); | 4019 ElementsKind elements_kind = instr->elements_kind(); |
4022 LOperand* key = instr->key(); | 4020 LOperand* key = instr->key(); |
4023 if (!key->IsConstantOperand() && | 4021 if (!key->IsConstantOperand() && |
4024 ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(), | 4022 ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(), |
4025 elements_kind)) { | 4023 elements_kind)) { |
(...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4213 .code(); | 4211 .code(); |
4214 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 4212 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
4215 } | 4213 } |
4216 | 4214 |
4217 | 4215 |
4218 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 4216 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
4219 Register object = ToRegister(instr->object()); | 4217 Register object = ToRegister(instr->object()); |
4220 Register temp = ToRegister(instr->temp()); | 4218 Register temp = ToRegister(instr->temp()); |
4221 Label no_memento_found; | 4219 Label no_memento_found; |
4222 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); | 4220 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); |
4223 DeoptimizeIf(equal, instr, Deoptimizer::kMementoFound); | 4221 DeoptimizeIf(equal, instr, DeoptimizeReason::kMementoFound); |
4224 __ bind(&no_memento_found); | 4222 __ bind(&no_memento_found); |
4225 } | 4223 } |
4226 | 4224 |
4227 | 4225 |
4228 void LCodeGen::DoMaybeGrowElements(LMaybeGrowElements* instr) { | 4226 void LCodeGen::DoMaybeGrowElements(LMaybeGrowElements* instr) { |
4229 class DeferredMaybeGrowElements final : public LDeferredCode { | 4227 class DeferredMaybeGrowElements final : public LDeferredCode { |
4230 public: | 4228 public: |
4231 DeferredMaybeGrowElements(LCodeGen* codegen, | 4229 DeferredMaybeGrowElements(LCodeGen* codegen, |
4232 LMaybeGrowElements* instr, | 4230 LMaybeGrowElements* instr, |
4233 const X87Stack& x87_stack) | 4231 const X87Stack& x87_stack) |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4313 GrowArrayElementsStub stub(isolate(), instr->hydrogen()->is_js_array(), | 4311 GrowArrayElementsStub stub(isolate(), instr->hydrogen()->is_js_array(), |
4314 instr->hydrogen()->kind()); | 4312 instr->hydrogen()->kind()); |
4315 __ CallStub(&stub); | 4313 __ CallStub(&stub); |
4316 RecordSafepointWithLazyDeopt( | 4314 RecordSafepointWithLazyDeopt( |
4317 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 4315 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
4318 __ StoreToSafepointRegisterSlot(result, result); | 4316 __ StoreToSafepointRegisterSlot(result, result); |
4319 } | 4317 } |
4320 | 4318 |
4321 // Deopt on smi, which means the elements array changed to dictionary mode. | 4319 // Deopt on smi, which means the elements array changed to dictionary mode. |
4322 __ test(result, Immediate(kSmiTagMask)); | 4320 __ test(result, Immediate(kSmiTagMask)); |
4323 DeoptimizeIf(equal, instr, Deoptimizer::kSmi); | 4321 DeoptimizeIf(equal, instr, DeoptimizeReason::kSmi); |
4324 } | 4322 } |
4325 | 4323 |
4326 | 4324 |
4327 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { | 4325 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { |
4328 Register object_reg = ToRegister(instr->object()); | 4326 Register object_reg = ToRegister(instr->object()); |
4329 | 4327 |
4330 Handle<Map> from_map = instr->original_map(); | 4328 Handle<Map> from_map = instr->original_map(); |
4331 Handle<Map> to_map = instr->transitioned_map(); | 4329 Handle<Map> to_map = instr->transitioned_map(); |
4332 ElementsKind from_kind = instr->from_kind(); | 4330 ElementsKind from_kind = instr->from_kind(); |
4333 ElementsKind to_kind = instr->to_kind(); | 4331 ElementsKind to_kind = instr->to_kind(); |
(...skipping 345 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4679 __ StoreToSafepointRegisterSlot(reg, eax); | 4677 __ StoreToSafepointRegisterSlot(reg, eax); |
4680 } | 4678 } |
4681 | 4679 |
4682 | 4680 |
4683 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 4681 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
4684 HChange* hchange = instr->hydrogen(); | 4682 HChange* hchange = instr->hydrogen(); |
4685 Register input = ToRegister(instr->value()); | 4683 Register input = ToRegister(instr->value()); |
4686 if (hchange->CheckFlag(HValue::kCanOverflow) && | 4684 if (hchange->CheckFlag(HValue::kCanOverflow) && |
4687 hchange->value()->CheckFlag(HValue::kUint32)) { | 4685 hchange->value()->CheckFlag(HValue::kUint32)) { |
4688 __ test(input, Immediate(0xc0000000)); | 4686 __ test(input, Immediate(0xc0000000)); |
4689 DeoptimizeIf(not_zero, instr, Deoptimizer::kOverflow); | 4687 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kOverflow); |
4690 } | 4688 } |
4691 __ SmiTag(input); | 4689 __ SmiTag(input); |
4692 if (hchange->CheckFlag(HValue::kCanOverflow) && | 4690 if (hchange->CheckFlag(HValue::kCanOverflow) && |
4693 !hchange->value()->CheckFlag(HValue::kUint32)) { | 4691 !hchange->value()->CheckFlag(HValue::kUint32)) { |
4694 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 4692 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
4695 } | 4693 } |
4696 } | 4694 } |
4697 | 4695 |
4698 | 4696 |
4699 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { | 4697 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { |
4700 LOperand* input = instr->value(); | 4698 LOperand* input = instr->value(); |
4701 Register result = ToRegister(input); | 4699 Register result = ToRegister(input); |
4702 DCHECK(input->IsRegister() && input->Equals(instr->result())); | 4700 DCHECK(input->IsRegister() && input->Equals(instr->result())); |
4703 if (instr->needs_check()) { | 4701 if (instr->needs_check()) { |
4704 __ test(result, Immediate(kSmiTagMask)); | 4702 __ test(result, Immediate(kSmiTagMask)); |
4705 DeoptimizeIf(not_zero, instr, Deoptimizer::kNotASmi); | 4703 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kNotASmi); |
4706 } else { | 4704 } else { |
4707 __ AssertSmi(result); | 4705 __ AssertSmi(result); |
4708 } | 4706 } |
4709 __ SmiUntag(result); | 4707 __ SmiUntag(result); |
4710 } | 4708 } |
4711 | 4709 |
4712 | 4710 |
4713 void LCodeGen::EmitNumberUntagDNoSSE2(LNumberUntagD* instr, Register input_reg, | 4711 void LCodeGen::EmitNumberUntagDNoSSE2(LNumberUntagD* instr, Register input_reg, |
4714 Register temp_reg, X87Register res_reg, | 4712 Register temp_reg, X87Register res_reg, |
4715 NumberUntagDMode mode) { | 4713 NumberUntagDMode mode) { |
4716 bool can_convert_undefined_to_nan = | 4714 bool can_convert_undefined_to_nan = |
4717 instr->hydrogen()->can_convert_undefined_to_nan(); | 4715 instr->hydrogen()->can_convert_undefined_to_nan(); |
4718 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero(); | 4716 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero(); |
4719 | 4717 |
4720 Label load_smi, done; | 4718 Label load_smi, done; |
4721 | 4719 |
4722 X87PrepareToWrite(res_reg); | 4720 X87PrepareToWrite(res_reg); |
4723 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { | 4721 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) { |
4724 // Smi check. | 4722 // Smi check. |
4725 __ JumpIfSmi(input_reg, &load_smi); | 4723 __ JumpIfSmi(input_reg, &load_smi); |
4726 | 4724 |
4727 // Heap number map check. | 4725 // Heap number map check. |
4728 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 4726 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
4729 factory()->heap_number_map()); | 4727 factory()->heap_number_map()); |
4730 if (!can_convert_undefined_to_nan) { | 4728 if (!can_convert_undefined_to_nan) { |
4731 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); | 4729 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber); |
4732 } else { | 4730 } else { |
4733 Label heap_number, convert; | 4731 Label heap_number, convert; |
4734 __ j(equal, &heap_number); | 4732 __ j(equal, &heap_number); |
4735 | 4733 |
4736 // Convert undefined (or hole) to NaN. | 4734 // Convert undefined (or hole) to NaN. |
4737 __ cmp(input_reg, factory()->undefined_value()); | 4735 __ cmp(input_reg, factory()->undefined_value()); |
4738 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumberUndefined); | 4736 DeoptimizeIf(not_equal, instr, |
| 4737 DeoptimizeReason::kNotAHeapNumberUndefined); |
4739 | 4738 |
4740 __ bind(&convert); | 4739 __ bind(&convert); |
4741 __ push(Immediate(0xffffffff)); | 4740 __ push(Immediate(0xffffffff)); |
4742 __ push(Immediate(0x7fffffff)); | 4741 __ push(Immediate(0x7fffffff)); |
4743 __ fld_d(MemOperand(esp, 0)); | 4742 __ fld_d(MemOperand(esp, 0)); |
4744 __ lea(esp, Operand(esp, kDoubleSize)); | 4743 __ lea(esp, Operand(esp, kDoubleSize)); |
4745 __ jmp(&done, Label::kNear); | 4744 __ jmp(&done, Label::kNear); |
4746 | 4745 |
4747 __ bind(&heap_number); | 4746 __ bind(&heap_number); |
4748 } | 4747 } |
4749 // Heap number to x87 conversion. | 4748 // Heap number to x87 conversion. |
4750 __ fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4749 __ fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset)); |
4751 if (deoptimize_on_minus_zero) { | 4750 if (deoptimize_on_minus_zero) { |
4752 __ fldz(); | 4751 __ fldz(); |
4753 __ FCmp(); | 4752 __ FCmp(); |
4754 __ fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4753 __ fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset)); |
4755 __ j(not_zero, &done, Label::kNear); | 4754 __ j(not_zero, &done, Label::kNear); |
4756 | 4755 |
4757 // Use general purpose registers to check if we have -0.0 | 4756 // Use general purpose registers to check if we have -0.0 |
4758 __ mov(temp_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 4757 __ mov(temp_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
4759 __ test(temp_reg, Immediate(HeapNumber::kSignMask)); | 4758 __ test(temp_reg, Immediate(HeapNumber::kSignMask)); |
4760 __ j(zero, &done, Label::kNear); | 4759 __ j(zero, &done, Label::kNear); |
4761 | 4760 |
4762 // Pop FPU stack before deoptimizing. | 4761 // Pop FPU stack before deoptimizing. |
4763 __ fstp(0); | 4762 __ fstp(0); |
4764 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); | 4763 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero); |
4765 } | 4764 } |
4766 __ jmp(&done, Label::kNear); | 4765 __ jmp(&done, Label::kNear); |
4767 } else { | 4766 } else { |
4768 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); | 4767 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); |
4769 } | 4768 } |
4770 | 4769 |
4771 __ bind(&load_smi); | 4770 __ bind(&load_smi); |
4772 // Clobbering a temp is faster than re-tagging the | 4771 // Clobbering a temp is faster than re-tagging the |
4773 // input register since we avoid dependencies. | 4772 // input register since we avoid dependencies. |
4774 __ mov(temp_reg, input_reg); | 4773 __ mov(temp_reg, input_reg); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4808 | 4807 |
4809 __ bind(&check_bools); | 4808 __ bind(&check_bools); |
4810 __ cmp(input_reg, factory()->true_value()); | 4809 __ cmp(input_reg, factory()->true_value()); |
4811 __ j(not_equal, &check_false, Label::kNear); | 4810 __ j(not_equal, &check_false, Label::kNear); |
4812 __ Move(input_reg, Immediate(1)); | 4811 __ Move(input_reg, Immediate(1)); |
4813 __ jmp(done); | 4812 __ jmp(done); |
4814 | 4813 |
4815 __ bind(&check_false); | 4814 __ bind(&check_false); |
4816 __ cmp(input_reg, factory()->false_value()); | 4815 __ cmp(input_reg, factory()->false_value()); |
4817 DeoptimizeIf(not_equal, instr, | 4816 DeoptimizeIf(not_equal, instr, |
4818 Deoptimizer::kNotAHeapNumberUndefinedBoolean); | 4817 DeoptimizeReason::kNotAHeapNumberUndefinedBoolean); |
4819 __ Move(input_reg, Immediate(0)); | 4818 __ Move(input_reg, Immediate(0)); |
4820 } else { | 4819 } else { |
4821 // TODO(olivf) Converting a number on the fpu is actually quite slow. We | 4820 // TODO(olivf) Converting a number on the fpu is actually quite slow. We |
4822 // should first try a fast conversion and then bailout to this slow case. | 4821 // should first try a fast conversion and then bailout to this slow case. |
4823 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 4822 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
4824 isolate()->factory()->heap_number_map()); | 4823 isolate()->factory()->heap_number_map()); |
4825 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber); | 4824 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumber); |
4826 | 4825 |
4827 __ sub(esp, Immediate(kPointerSize)); | 4826 __ sub(esp, Immediate(kPointerSize)); |
4828 __ fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4827 __ fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset)); |
4829 | 4828 |
4830 if (instr->hydrogen()->GetMinusZeroMode() == FAIL_ON_MINUS_ZERO) { | 4829 if (instr->hydrogen()->GetMinusZeroMode() == FAIL_ON_MINUS_ZERO) { |
4831 Label no_precision_lost, not_nan, zero_check; | 4830 Label no_precision_lost, not_nan, zero_check; |
4832 __ fld(0); | 4831 __ fld(0); |
4833 | 4832 |
4834 __ fist_s(MemOperand(esp, 0)); | 4833 __ fist_s(MemOperand(esp, 0)); |
4835 __ fild_s(MemOperand(esp, 0)); | 4834 __ fild_s(MemOperand(esp, 0)); |
4836 __ FCmp(); | 4835 __ FCmp(); |
4837 __ pop(input_reg); | 4836 __ pop(input_reg); |
4838 | 4837 |
4839 __ j(equal, &no_precision_lost, Label::kNear); | 4838 __ j(equal, &no_precision_lost, Label::kNear); |
4840 __ fstp(0); | 4839 __ fstp(0); |
4841 DeoptimizeIf(no_condition, instr, Deoptimizer::kLostPrecision); | 4840 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kLostPrecision); |
4842 __ bind(&no_precision_lost); | 4841 __ bind(&no_precision_lost); |
4843 | 4842 |
4844 __ j(parity_odd, ¬_nan); | 4843 __ j(parity_odd, ¬_nan); |
4845 __ fstp(0); | 4844 __ fstp(0); |
4846 DeoptimizeIf(no_condition, instr, Deoptimizer::kNaN); | 4845 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kNaN); |
4847 __ bind(¬_nan); | 4846 __ bind(¬_nan); |
4848 | 4847 |
4849 __ test(input_reg, Operand(input_reg)); | 4848 __ test(input_reg, Operand(input_reg)); |
4850 __ j(zero, &zero_check, Label::kNear); | 4849 __ j(zero, &zero_check, Label::kNear); |
4851 __ fstp(0); | 4850 __ fstp(0); |
4852 __ jmp(done); | 4851 __ jmp(done); |
4853 | 4852 |
4854 __ bind(&zero_check); | 4853 __ bind(&zero_check); |
4855 // To check for minus zero, we load the value again as float, and check | 4854 // To check for minus zero, we load the value again as float, and check |
4856 // if that is still 0. | 4855 // if that is still 0. |
4857 __ sub(esp, Immediate(kPointerSize)); | 4856 __ sub(esp, Immediate(kPointerSize)); |
4858 __ fstp_s(Operand(esp, 0)); | 4857 __ fstp_s(Operand(esp, 0)); |
4859 __ pop(input_reg); | 4858 __ pop(input_reg); |
4860 __ test(input_reg, Operand(input_reg)); | 4859 __ test(input_reg, Operand(input_reg)); |
4861 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); | 4860 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kMinusZero); |
4862 } else { | 4861 } else { |
4863 __ fist_s(MemOperand(esp, 0)); | 4862 __ fist_s(MemOperand(esp, 0)); |
4864 __ fild_s(MemOperand(esp, 0)); | 4863 __ fild_s(MemOperand(esp, 0)); |
4865 __ FCmp(); | 4864 __ FCmp(); |
4866 __ pop(input_reg); | 4865 __ pop(input_reg); |
4867 DeoptimizeIf(not_equal, instr, Deoptimizer::kLostPrecision); | 4866 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kLostPrecision); |
4868 DeoptimizeIf(parity_even, instr, Deoptimizer::kNaN); | 4867 DeoptimizeIf(parity_even, instr, DeoptimizeReason::kNaN); |
4869 } | 4868 } |
4870 } | 4869 } |
4871 } | 4870 } |
4872 | 4871 |
4873 | 4872 |
4874 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { | 4873 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { |
4875 class DeferredTaggedToI final : public LDeferredCode { | 4874 class DeferredTaggedToI final : public LDeferredCode { |
4876 public: | 4875 public: |
4877 DeferredTaggedToI(LCodeGen* codegen, | 4876 DeferredTaggedToI(LCodeGen* codegen, |
4878 LTaggedToI* instr, | 4877 LTaggedToI* instr, |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4939 X87Fxch(input_reg); | 4938 X87Fxch(input_reg); |
4940 __ TruncateX87TOSToI(result_reg); | 4939 __ TruncateX87TOSToI(result_reg); |
4941 } else { | 4940 } else { |
4942 Label lost_precision, is_nan, minus_zero, done; | 4941 Label lost_precision, is_nan, minus_zero, done; |
4943 X87Register input_reg = ToX87Register(input); | 4942 X87Register input_reg = ToX87Register(input); |
4944 X87Fxch(input_reg); | 4943 X87Fxch(input_reg); |
4945 __ X87TOSToI(result_reg, instr->hydrogen()->GetMinusZeroMode(), | 4944 __ X87TOSToI(result_reg, instr->hydrogen()->GetMinusZeroMode(), |
4946 &lost_precision, &is_nan, &minus_zero); | 4945 &lost_precision, &is_nan, &minus_zero); |
4947 __ jmp(&done); | 4946 __ jmp(&done); |
4948 __ bind(&lost_precision); | 4947 __ bind(&lost_precision); |
4949 DeoptimizeIf(no_condition, instr, Deoptimizer::kLostPrecision); | 4948 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kLostPrecision); |
4950 __ bind(&is_nan); | 4949 __ bind(&is_nan); |
4951 DeoptimizeIf(no_condition, instr, Deoptimizer::kNaN); | 4950 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kNaN); |
4952 __ bind(&minus_zero); | 4951 __ bind(&minus_zero); |
4953 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); | 4952 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kMinusZero); |
4954 __ bind(&done); | 4953 __ bind(&done); |
4955 } | 4954 } |
4956 } | 4955 } |
4957 | 4956 |
4958 | 4957 |
4959 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { | 4958 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { |
4960 LOperand* input = instr->value(); | 4959 LOperand* input = instr->value(); |
4961 DCHECK(input->IsDoubleRegister()); | 4960 DCHECK(input->IsDoubleRegister()); |
4962 LOperand* result = instr->result(); | 4961 LOperand* result = instr->result(); |
4963 DCHECK(result->IsRegister()); | 4962 DCHECK(result->IsRegister()); |
4964 Register result_reg = ToRegister(result); | 4963 Register result_reg = ToRegister(result); |
4965 | 4964 |
4966 Label lost_precision, is_nan, minus_zero, done; | 4965 Label lost_precision, is_nan, minus_zero, done; |
4967 X87Register input_reg = ToX87Register(input); | 4966 X87Register input_reg = ToX87Register(input); |
4968 X87Fxch(input_reg); | 4967 X87Fxch(input_reg); |
4969 __ X87TOSToI(result_reg, instr->hydrogen()->GetMinusZeroMode(), | 4968 __ X87TOSToI(result_reg, instr->hydrogen()->GetMinusZeroMode(), |
4970 &lost_precision, &is_nan, &minus_zero); | 4969 &lost_precision, &is_nan, &minus_zero); |
4971 __ jmp(&done); | 4970 __ jmp(&done); |
4972 __ bind(&lost_precision); | 4971 __ bind(&lost_precision); |
4973 DeoptimizeIf(no_condition, instr, Deoptimizer::kLostPrecision); | 4972 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kLostPrecision); |
4974 __ bind(&is_nan); | 4973 __ bind(&is_nan); |
4975 DeoptimizeIf(no_condition, instr, Deoptimizer::kNaN); | 4974 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kNaN); |
4976 __ bind(&minus_zero); | 4975 __ bind(&minus_zero); |
4977 DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero); | 4976 DeoptimizeIf(no_condition, instr, DeoptimizeReason::kMinusZero); |
4978 __ bind(&done); | 4977 __ bind(&done); |
4979 __ SmiTag(result_reg); | 4978 __ SmiTag(result_reg); |
4980 DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow); | 4979 DeoptimizeIf(overflow, instr, DeoptimizeReason::kOverflow); |
4981 } | 4980 } |
4982 | 4981 |
4983 | 4982 |
4984 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { | 4983 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { |
4985 LOperand* input = instr->value(); | 4984 LOperand* input = instr->value(); |
4986 __ test(ToOperand(input), Immediate(kSmiTagMask)); | 4985 __ test(ToOperand(input), Immediate(kSmiTagMask)); |
4987 DeoptimizeIf(not_zero, instr, Deoptimizer::kNotASmi); | 4986 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kNotASmi); |
4988 } | 4987 } |
4989 | 4988 |
4990 | 4989 |
4991 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { | 4990 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { |
4992 if (!instr->hydrogen()->value()->type().IsHeapObject()) { | 4991 if (!instr->hydrogen()->value()->type().IsHeapObject()) { |
4993 LOperand* input = instr->value(); | 4992 LOperand* input = instr->value(); |
4994 __ test(ToOperand(input), Immediate(kSmiTagMask)); | 4993 __ test(ToOperand(input), Immediate(kSmiTagMask)); |
4995 DeoptimizeIf(zero, instr, Deoptimizer::kSmi); | 4994 DeoptimizeIf(zero, instr, DeoptimizeReason::kSmi); |
4996 } | 4995 } |
4997 } | 4996 } |
4998 | 4997 |
4999 | 4998 |
5000 void LCodeGen::DoCheckArrayBufferNotNeutered( | 4999 void LCodeGen::DoCheckArrayBufferNotNeutered( |
5001 LCheckArrayBufferNotNeutered* instr) { | 5000 LCheckArrayBufferNotNeutered* instr) { |
5002 Register view = ToRegister(instr->view()); | 5001 Register view = ToRegister(instr->view()); |
5003 Register scratch = ToRegister(instr->scratch()); | 5002 Register scratch = ToRegister(instr->scratch()); |
5004 | 5003 |
5005 __ mov(scratch, FieldOperand(view, JSArrayBufferView::kBufferOffset)); | 5004 __ mov(scratch, FieldOperand(view, JSArrayBufferView::kBufferOffset)); |
5006 __ test_b(FieldOperand(scratch, JSArrayBuffer::kBitFieldOffset), | 5005 __ test_b(FieldOperand(scratch, JSArrayBuffer::kBitFieldOffset), |
5007 Immediate(1 << JSArrayBuffer::WasNeutered::kShift)); | 5006 Immediate(1 << JSArrayBuffer::WasNeutered::kShift)); |
5008 DeoptimizeIf(not_zero, instr, Deoptimizer::kOutOfBounds); | 5007 DeoptimizeIf(not_zero, instr, DeoptimizeReason::kOutOfBounds); |
5009 } | 5008 } |
5010 | 5009 |
5011 | 5010 |
5012 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 5011 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { |
5013 Register input = ToRegister(instr->value()); | 5012 Register input = ToRegister(instr->value()); |
5014 Register temp = ToRegister(instr->temp()); | 5013 Register temp = ToRegister(instr->temp()); |
5015 | 5014 |
5016 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset)); | 5015 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset)); |
5017 | 5016 |
5018 if (instr->hydrogen()->is_interval_check()) { | 5017 if (instr->hydrogen()->is_interval_check()) { |
5019 InstanceType first; | 5018 InstanceType first; |
5020 InstanceType last; | 5019 InstanceType last; |
5021 instr->hydrogen()->GetCheckInterval(&first, &last); | 5020 instr->hydrogen()->GetCheckInterval(&first, &last); |
5022 | 5021 |
5023 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(first)); | 5022 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(first)); |
5024 | 5023 |
5025 // If there is only one type in the interval check for equality. | 5024 // If there is only one type in the interval check for equality. |
5026 if (first == last) { | 5025 if (first == last) { |
5027 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType); | 5026 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongInstanceType); |
5028 } else { | 5027 } else { |
5029 DeoptimizeIf(below, instr, Deoptimizer::kWrongInstanceType); | 5028 DeoptimizeIf(below, instr, DeoptimizeReason::kWrongInstanceType); |
5030 // Omit check for the last type. | 5029 // Omit check for the last type. |
5031 if (last != LAST_TYPE) { | 5030 if (last != LAST_TYPE) { |
5032 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(last)); | 5031 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(last)); |
5033 DeoptimizeIf(above, instr, Deoptimizer::kWrongInstanceType); | 5032 DeoptimizeIf(above, instr, DeoptimizeReason::kWrongInstanceType); |
5034 } | 5033 } |
5035 } | 5034 } |
5036 } else { | 5035 } else { |
5037 uint8_t mask; | 5036 uint8_t mask; |
5038 uint8_t tag; | 5037 uint8_t tag; |
5039 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); | 5038 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); |
5040 | 5039 |
5041 if (base::bits::IsPowerOfTwo32(mask)) { | 5040 if (base::bits::IsPowerOfTwo32(mask)) { |
5042 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); | 5041 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); |
5043 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(mask)); | 5042 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(mask)); |
5044 DeoptimizeIf(tag == 0 ? not_zero : zero, instr, | 5043 DeoptimizeIf(tag == 0 ? not_zero : zero, instr, |
5045 Deoptimizer::kWrongInstanceType); | 5044 DeoptimizeReason::kWrongInstanceType); |
5046 } else { | 5045 } else { |
5047 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); | 5046 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); |
5048 __ and_(temp, mask); | 5047 __ and_(temp, mask); |
5049 __ cmp(temp, tag); | 5048 __ cmp(temp, tag); |
5050 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType); | 5049 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongInstanceType); |
5051 } | 5050 } |
5052 } | 5051 } |
5053 } | 5052 } |
5054 | 5053 |
5055 | 5054 |
5056 void LCodeGen::DoCheckValue(LCheckValue* instr) { | 5055 void LCodeGen::DoCheckValue(LCheckValue* instr) { |
5057 Handle<HeapObject> object = instr->hydrogen()->object().handle(); | 5056 Handle<HeapObject> object = instr->hydrogen()->object().handle(); |
5058 if (instr->hydrogen()->object_in_new_space()) { | 5057 if (instr->hydrogen()->object_in_new_space()) { |
5059 Register reg = ToRegister(instr->value()); | 5058 Register reg = ToRegister(instr->value()); |
5060 Handle<Cell> cell = isolate()->factory()->NewCell(object); | 5059 Handle<Cell> cell = isolate()->factory()->NewCell(object); |
5061 __ cmp(reg, Operand::ForCell(cell)); | 5060 __ cmp(reg, Operand::ForCell(cell)); |
5062 } else { | 5061 } else { |
5063 Operand operand = ToOperand(instr->value()); | 5062 Operand operand = ToOperand(instr->value()); |
5064 __ cmp(operand, object); | 5063 __ cmp(operand, object); |
5065 } | 5064 } |
5066 DeoptimizeIf(not_equal, instr, Deoptimizer::kValueMismatch); | 5065 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kValueMismatch); |
5067 } | 5066 } |
5068 | 5067 |
5069 | 5068 |
5070 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { | 5069 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { |
5071 { | 5070 { |
5072 PushSafepointRegistersScope scope(this); | 5071 PushSafepointRegistersScope scope(this); |
5073 __ push(object); | 5072 __ push(object); |
5074 __ xor_(esi, esi); | 5073 __ xor_(esi, esi); |
5075 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); | 5074 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); |
5076 RecordSafepointWithRegisters( | 5075 RecordSafepointWithRegisters( |
5077 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); | 5076 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); |
5078 | 5077 |
5079 __ test(eax, Immediate(kSmiTagMask)); | 5078 __ test(eax, Immediate(kSmiTagMask)); |
5080 } | 5079 } |
5081 DeoptimizeIf(zero, instr, Deoptimizer::kInstanceMigrationFailed); | 5080 DeoptimizeIf(zero, instr, DeoptimizeReason::kInstanceMigrationFailed); |
5082 } | 5081 } |
5083 | 5082 |
5084 | 5083 |
5085 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 5084 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
5086 class DeferredCheckMaps final : public LDeferredCode { | 5085 class DeferredCheckMaps final : public LDeferredCode { |
5087 public: | 5086 public: |
5088 DeferredCheckMaps(LCodeGen* codegen, | 5087 DeferredCheckMaps(LCodeGen* codegen, |
5089 LCheckMaps* instr, | 5088 LCheckMaps* instr, |
5090 Register object, | 5089 Register object, |
5091 const X87Stack& x87_stack) | 5090 const X87Stack& x87_stack) |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5128 Handle<Map> map = maps->at(i).handle(); | 5127 Handle<Map> map = maps->at(i).handle(); |
5129 __ CompareMap(reg, map); | 5128 __ CompareMap(reg, map); |
5130 __ j(equal, &success, Label::kNear); | 5129 __ j(equal, &success, Label::kNear); |
5131 } | 5130 } |
5132 | 5131 |
5133 Handle<Map> map = maps->at(maps->size() - 1).handle(); | 5132 Handle<Map> map = maps->at(maps->size() - 1).handle(); |
5134 __ CompareMap(reg, map); | 5133 __ CompareMap(reg, map); |
5135 if (instr->hydrogen()->HasMigrationTarget()) { | 5134 if (instr->hydrogen()->HasMigrationTarget()) { |
5136 __ j(not_equal, deferred->entry()); | 5135 __ j(not_equal, deferred->entry()); |
5137 } else { | 5136 } else { |
5138 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap); | 5137 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongMap); |
5139 } | 5138 } |
5140 | 5139 |
5141 __ bind(&success); | 5140 __ bind(&success); |
5142 } | 5141 } |
5143 | 5142 |
5144 | 5143 |
5145 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 5144 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { |
5146 X87Register value_reg = ToX87Register(instr->unclamped()); | 5145 X87Register value_reg = ToX87Register(instr->unclamped()); |
5147 Register result_reg = ToRegister(instr->result()); | 5146 Register result_reg = ToRegister(instr->result()); |
5148 X87Fxch(value_reg); | 5147 X87Fxch(value_reg); |
(...skipping 20 matching lines...) Expand all Loading... |
5169 __ JumpIfSmi(input_reg, &is_smi); | 5168 __ JumpIfSmi(input_reg, &is_smi); |
5170 | 5169 |
5171 // Check for heap number | 5170 // Check for heap number |
5172 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 5171 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
5173 factory()->heap_number_map()); | 5172 factory()->heap_number_map()); |
5174 __ j(equal, &heap_number, Label::kNear); | 5173 __ j(equal, &heap_number, Label::kNear); |
5175 | 5174 |
5176 // Check for undefined. Undefined is converted to zero for clamping | 5175 // Check for undefined. Undefined is converted to zero for clamping |
5177 // conversions. | 5176 // conversions. |
5178 __ cmp(input_reg, factory()->undefined_value()); | 5177 __ cmp(input_reg, factory()->undefined_value()); |
5179 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumberUndefined); | 5178 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kNotAHeapNumberUndefined); |
5180 __ jmp(&zero_result, Label::kNear); | 5179 __ jmp(&zero_result, Label::kNear); |
5181 | 5180 |
5182 // Heap number | 5181 // Heap number |
5183 __ bind(&heap_number); | 5182 __ bind(&heap_number); |
5184 | 5183 |
5185 // Surprisingly, all of the hand-crafted bit-manipulations below are much | 5184 // Surprisingly, all of the hand-crafted bit-manipulations below are much |
5186 // faster than the x86 FPU built-in instruction, especially since "banker's | 5185 // faster than the x86 FPU built-in instruction, especially since "banker's |
5187 // rounding" would be additionally very expensive | 5186 // rounding" would be additionally very expensive |
5188 | 5187 |
5189 // Get exponent word. | 5188 // Get exponent word. |
(...skipping 495 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5685 __ jmp(&done, Label::kNear); | 5684 __ jmp(&done, Label::kNear); |
5686 | 5685 |
5687 __ bind(&load_cache); | 5686 __ bind(&load_cache); |
5688 __ LoadInstanceDescriptors(map, result); | 5687 __ LoadInstanceDescriptors(map, result); |
5689 __ mov(result, | 5688 __ mov(result, |
5690 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); | 5689 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); |
5691 __ mov(result, | 5690 __ mov(result, |
5692 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); | 5691 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); |
5693 __ bind(&done); | 5692 __ bind(&done); |
5694 __ test(result, result); | 5693 __ test(result, result); |
5695 DeoptimizeIf(equal, instr, Deoptimizer::kNoCache); | 5694 DeoptimizeIf(equal, instr, DeoptimizeReason::kNoCache); |
5696 } | 5695 } |
5697 | 5696 |
5698 | 5697 |
5699 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 5698 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
5700 Register object = ToRegister(instr->value()); | 5699 Register object = ToRegister(instr->value()); |
5701 __ cmp(ToRegister(instr->map()), | 5700 __ cmp(ToRegister(instr->map()), |
5702 FieldOperand(object, HeapObject::kMapOffset)); | 5701 FieldOperand(object, HeapObject::kMapOffset)); |
5703 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap); | 5702 DeoptimizeIf(not_equal, instr, DeoptimizeReason::kWrongMap); |
5704 } | 5703 } |
5705 | 5704 |
5706 | 5705 |
5707 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | 5706 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, |
5708 Register object, | 5707 Register object, |
5709 Register index) { | 5708 Register index) { |
5710 PushSafepointRegistersScope scope(this); | 5709 PushSafepointRegistersScope scope(this); |
5711 __ push(object); | 5710 __ push(object); |
5712 __ push(index); | 5711 __ push(index); |
5713 __ xor_(esi, esi); | 5712 __ xor_(esi, esi); |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5774 __ bind(deferred->exit()); | 5773 __ bind(deferred->exit()); |
5775 __ bind(&done); | 5774 __ bind(&done); |
5776 } | 5775 } |
5777 | 5776 |
5778 #undef __ | 5777 #undef __ |
5779 | 5778 |
5780 } // namespace internal | 5779 } // namespace internal |
5781 } // namespace v8 | 5780 } // namespace v8 |
5782 | 5781 |
5783 #endif // V8_TARGET_ARCH_X87 | 5782 #endif // V8_TARGET_ARCH_X87 |
OLD | NEW |