OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 408 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
419 HConstant* constant = chunk_->LookupConstant(const_op); | 419 HConstant* constant = chunk_->LookupConstant(const_op); |
420 Handle<Object> literal = constant->handle(); | 420 Handle<Object> literal = constant->handle(); |
421 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 421 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
422 if (r.IsInteger32()) { | 422 if (r.IsInteger32()) { |
423 ASSERT(literal->IsNumber()); | 423 ASSERT(literal->IsNumber()); |
424 __ mov(scratch, Operand(static_cast<int32_t>(literal->Number()))); | 424 __ mov(scratch, Operand(static_cast<int32_t>(literal->Number()))); |
425 } else if (r.IsDouble()) { | 425 } else if (r.IsDouble()) { |
426 Abort("EmitLoadRegister: Unsupported double immediate."); | 426 Abort("EmitLoadRegister: Unsupported double immediate."); |
427 } else { | 427 } else { |
428 ASSERT(r.IsTagged()); | 428 ASSERT(r.IsTagged()); |
429 if (literal->IsSmi()) { | 429 __ LoadObject(scratch, literal); |
430 __ mov(scratch, Operand(literal)); | |
431 } else { | |
432 __ LoadHeapObject(scratch, Handle<HeapObject>::cast(literal)); | |
433 } | |
434 } | 430 } |
435 return scratch; | 431 return scratch; |
436 } else if (op->IsStackSlot() || op->IsArgument()) { | 432 } else if (op->IsStackSlot() || op->IsArgument()) { |
437 __ ldr(scratch, ToMemOperand(op)); | 433 __ ldr(scratch, ToMemOperand(op)); |
438 return scratch; | 434 return scratch; |
439 } | 435 } |
440 UNREACHABLE(); | 436 UNREACHABLE(); |
441 return scratch; | 437 return scratch; |
442 } | 438 } |
443 | 439 |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
491 bool LCodeGen::IsInteger32(LConstantOperand* op) const { | 487 bool LCodeGen::IsInteger32(LConstantOperand* op) const { |
492 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32(); | 488 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32(); |
493 } | 489 } |
494 | 490 |
495 | 491 |
496 bool LCodeGen::IsSmi(LConstantOperand* op) const { | 492 bool LCodeGen::IsSmi(LConstantOperand* op) const { |
497 return chunk_->LookupLiteralRepresentation(op).IsSmi(); | 493 return chunk_->LookupLiteralRepresentation(op).IsSmi(); |
498 } | 494 } |
499 | 495 |
500 | 496 |
501 int LCodeGen::ToInteger32(LConstantOperand* op) const { | 497 int32_t LCodeGen::ToInteger32(LConstantOperand* op) const { |
502 HConstant* constant = chunk_->LookupConstant(op); | 498 return ToRepresentation(op, Representation::Integer32()); |
503 return constant->Integer32Value(); | |
504 } | 499 } |
505 | 500 |
506 | 501 |
| 502 int32_t LCodeGen::ToRepresentation(LConstantOperand* op, |
| 503 const Representation& r) const { |
| 504 HConstant* constant = chunk_->LookupConstant(op); |
| 505 int32_t value = constant->Integer32Value(); |
| 506 if (r.IsInteger32()) return value; |
| 507 ASSERT(r.IsSmiOrTagged()); |
| 508 return reinterpret_cast<int32_t>(Smi::FromInt(value)); |
| 509 } |
| 510 |
| 511 |
507 Smi* LCodeGen::ToSmi(LConstantOperand* op) const { | 512 Smi* LCodeGen::ToSmi(LConstantOperand* op) const { |
508 HConstant* constant = chunk_->LookupConstant(op); | 513 HConstant* constant = chunk_->LookupConstant(op); |
509 return Smi::FromInt(constant->Integer32Value()); | 514 return Smi::FromInt(constant->Integer32Value()); |
510 } | 515 } |
511 | 516 |
512 | 517 |
513 double LCodeGen::ToDouble(LConstantOperand* op) const { | 518 double LCodeGen::ToDouble(LConstantOperand* op) const { |
514 HConstant* constant = chunk_->LookupConstant(op); | 519 HConstant* constant = chunk_->LookupConstant(op); |
515 ASSERT(constant->HasDoubleValue()); | 520 ASSERT(constant->HasDoubleValue()); |
516 return constant->DoubleValue(); | 521 return constant->DoubleValue(); |
517 } | 522 } |
518 | 523 |
519 | 524 |
520 Operand LCodeGen::ToOperand(LOperand* op) { | 525 Operand LCodeGen::ToOperand(LOperand* op) { |
521 if (op->IsConstantOperand()) { | 526 if (op->IsConstantOperand()) { |
522 LConstantOperand* const_op = LConstantOperand::cast(op); | 527 LConstantOperand* const_op = LConstantOperand::cast(op); |
523 HConstant* constant = chunk()->LookupConstant(const_op); | 528 HConstant* constant = chunk()->LookupConstant(const_op); |
524 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 529 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
525 if (r.IsInteger32()) { | 530 if (r.IsSmi()) { |
| 531 ASSERT(constant->HasSmiValue()); |
| 532 return Operand(Smi::FromInt(constant->Integer32Value())); |
| 533 } else if (r.IsInteger32()) { |
526 ASSERT(constant->HasInteger32Value()); | 534 ASSERT(constant->HasInteger32Value()); |
527 return Operand(constant->Integer32Value()); | 535 return Operand(constant->Integer32Value()); |
528 } else if (r.IsDouble()) { | 536 } else if (r.IsDouble()) { |
529 Abort("ToOperand Unsupported double immediate."); | 537 Abort("ToOperand Unsupported double immediate."); |
530 } | 538 } |
531 ASSERT(r.IsTagged()); | 539 ASSERT(r.IsTagged()); |
532 return Operand(constant->handle()); | 540 return Operand(constant->handle()); |
533 } else if (op->IsRegister()) { | 541 } else if (op->IsRegister()) { |
534 return Operand(ToRegister(op)); | 542 return Operand(ToRegister(op)); |
535 } else if (op->IsDoubleRegister()) { | 543 } else if (op->IsDoubleRegister()) { |
(...skipping 267 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
803 | 811 |
804 void LCodeGen::DeoptimizeIf(Condition cc, | 812 void LCodeGen::DeoptimizeIf(Condition cc, |
805 LEnvironment* environment) { | 813 LEnvironment* environment) { |
806 Deoptimizer::BailoutType bailout_type = info()->IsStub() | 814 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
807 ? Deoptimizer::LAZY | 815 ? Deoptimizer::LAZY |
808 : Deoptimizer::EAGER; | 816 : Deoptimizer::EAGER; |
809 DeoptimizeIf(cc, environment, bailout_type); | 817 DeoptimizeIf(cc, environment, bailout_type); |
810 } | 818 } |
811 | 819 |
812 | 820 |
813 void LCodeGen::SoftDeoptimize(LEnvironment* environment) { | |
814 ASSERT(!info()->IsStub()); | |
815 DeoptimizeIf(al, environment, Deoptimizer::SOFT); | |
816 } | |
817 | |
818 | |
819 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { | 821 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { |
820 ZoneList<Handle<Map> > maps(1, zone()); | 822 ZoneList<Handle<Map> > maps(1, zone()); |
821 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); | 823 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
822 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { | 824 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { |
823 RelocInfo::Mode mode = it.rinfo()->rmode(); | 825 RelocInfo::Mode mode = it.rinfo()->rmode(); |
824 if (mode == RelocInfo::EMBEDDED_OBJECT && | 826 if (mode == RelocInfo::EMBEDDED_OBJECT && |
825 it.rinfo()->target_object()->IsMap()) { | 827 it.rinfo()->target_object()->IsMap()) { |
826 Handle<Map> map(Map::cast(it.rinfo()->target_object())); | 828 Handle<Map> map(Map::cast(it.rinfo()->target_object())); |
827 if (map->CanTransition()) { | 829 if (map->CanTransition()) { |
828 maps.Add(map, zone()); | 830 maps.Add(map, zone()); |
(...skipping 352 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1181 Register left_reg = ToRegister(instr->left()); | 1183 Register left_reg = ToRegister(instr->left()); |
1182 Register right_reg = ToRegister(instr->right()); | 1184 Register right_reg = ToRegister(instr->right()); |
1183 Register result_reg = ToRegister(instr->result()); | 1185 Register result_reg = ToRegister(instr->result()); |
1184 Register scratch = scratch0(); | 1186 Register scratch = scratch0(); |
1185 ASSERT(!scratch.is(left_reg)); | 1187 ASSERT(!scratch.is(left_reg)); |
1186 ASSERT(!scratch.is(right_reg)); | 1188 ASSERT(!scratch.is(right_reg)); |
1187 ASSERT(!scratch.is(result_reg)); | 1189 ASSERT(!scratch.is(result_reg)); |
1188 DwVfpRegister dividend = ToDoubleRegister(instr->temp()); | 1190 DwVfpRegister dividend = ToDoubleRegister(instr->temp()); |
1189 DwVfpRegister divisor = ToDoubleRegister(instr->temp2()); | 1191 DwVfpRegister divisor = ToDoubleRegister(instr->temp2()); |
1190 ASSERT(!divisor.is(dividend)); | 1192 ASSERT(!divisor.is(dividend)); |
1191 DwVfpRegister quotient = double_scratch0(); | 1193 LowDwVfpRegister quotient = double_scratch0(); |
1192 ASSERT(!quotient.is(dividend)); | 1194 ASSERT(!quotient.is(dividend)); |
1193 ASSERT(!quotient.is(divisor)); | 1195 ASSERT(!quotient.is(divisor)); |
1194 | 1196 |
1195 Label done; | 1197 Label done; |
1196 // Check for x % 0, we have to deopt in this case because we can't return a | 1198 // Check for x % 0, we have to deopt in this case because we can't return a |
1197 // NaN. | 1199 // NaN. |
1198 if (right->CanBeZero()) { | 1200 if (right->CanBeZero()) { |
1199 __ cmp(right_reg, Operand::Zero()); | 1201 __ cmp(right_reg, Operand::Zero()); |
1200 DeoptimizeIf(eq, instr->environment()); | 1202 DeoptimizeIf(eq, instr->environment()); |
1201 } | 1203 } |
1202 | 1204 |
1203 __ Move(result_reg, left_reg); | 1205 __ Move(result_reg, left_reg); |
1204 // Load the arguments in VFP registers. The divisor value is preloaded | 1206 // Load the arguments in VFP registers. The divisor value is preloaded |
1205 // before. Be careful that 'right_reg' is only live on entry. | 1207 // before. Be careful that 'right_reg' is only live on entry. |
1206 // TODO(svenpanne) The last comments seems to be wrong nowadays. | 1208 // TODO(svenpanne) The last comments seems to be wrong nowadays. |
1207 __ vmov(dividend.low(), left_reg); | 1209 __ vmov(double_scratch0().low(), left_reg); |
1208 __ vmov(divisor.low(), right_reg); | 1210 __ vcvt_f64_s32(dividend, double_scratch0().low()); |
1209 | 1211 __ vmov(double_scratch0().low(), right_reg); |
1210 __ vcvt_f64_s32(dividend, dividend.low()); | 1212 __ vcvt_f64_s32(divisor, double_scratch0().low()); |
1211 __ vcvt_f64_s32(divisor, divisor.low()); | |
1212 | 1213 |
1213 // We do not care about the sign of the divisor. Note that we still handle | 1214 // We do not care about the sign of the divisor. Note that we still handle |
1214 // the kMinInt % -1 case correctly, though. | 1215 // the kMinInt % -1 case correctly, though. |
1215 __ vabs(divisor, divisor); | 1216 __ vabs(divisor, divisor); |
1216 // Compute the quotient and round it to a 32bit integer. | 1217 // Compute the quotient and round it to a 32bit integer. |
1217 __ vdiv(quotient, dividend, divisor); | 1218 __ vdiv(quotient, dividend, divisor); |
1218 __ vcvt_s32_f64(quotient.low(), quotient); | 1219 __ vcvt_s32_f64(quotient.low(), quotient); |
1219 __ vcvt_f64_s32(quotient, quotient.low()); | 1220 __ vcvt_f64_s32(quotient, quotient.low()); |
1220 | 1221 |
1221 // Compute the remainder in result. | 1222 // Compute the remainder in result. |
1222 DwVfpRegister double_scratch = dividend; | 1223 __ vmul(double_scratch0(), divisor, quotient); |
1223 __ vmul(double_scratch, divisor, quotient); | 1224 __ vcvt_s32_f64(double_scratch0().low(), double_scratch0()); |
1224 __ vcvt_s32_f64(double_scratch.low(), double_scratch); | 1225 __ vmov(scratch, double_scratch0().low()); |
1225 __ vmov(scratch, double_scratch.low()); | |
1226 __ sub(result_reg, left_reg, scratch, SetCC); | 1226 __ sub(result_reg, left_reg, scratch, SetCC); |
1227 | 1227 |
1228 // If we care about -0, test if the dividend is <0 and the result is 0. | 1228 // If we care about -0, test if the dividend is <0 and the result is 0. |
1229 if (left->CanBeNegative() && | 1229 if (left->CanBeNegative() && |
1230 hmod->CanBeZero() && | 1230 hmod->CanBeZero() && |
1231 hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1231 hmod->CheckFlag(HValue::kBailoutOnMinusZero)) { |
1232 __ b(ne, &done); | 1232 __ b(ne, &done); |
1233 __ cmp(left_reg, Operand::Zero()); | 1233 __ cmp(left_reg, Operand::Zero()); |
1234 DeoptimizeIf(mi, instr->environment()); | 1234 DeoptimizeIf(mi, instr->environment()); |
1235 } | 1235 } |
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1415 HInstruction::kAllUsesTruncatingToInt32)) { | 1415 HInstruction::kAllUsesTruncatingToInt32)) { |
1416 // Compute remainder and deopt if it's not zero. | 1416 // Compute remainder and deopt if it's not zero. |
1417 const Register remainder = scratch0(); | 1417 const Register remainder = scratch0(); |
1418 __ mls(remainder, result, right, left); | 1418 __ mls(remainder, result, right, left); |
1419 __ cmp(remainder, Operand::Zero()); | 1419 __ cmp(remainder, Operand::Zero()); |
1420 DeoptimizeIf(ne, instr->environment()); | 1420 DeoptimizeIf(ne, instr->environment()); |
1421 } | 1421 } |
1422 } else { | 1422 } else { |
1423 const DoubleRegister vleft = ToDoubleRegister(instr->temp()); | 1423 const DoubleRegister vleft = ToDoubleRegister(instr->temp()); |
1424 const DoubleRegister vright = double_scratch0(); | 1424 const DoubleRegister vright = double_scratch0(); |
1425 __ vmov(vleft.low(), left); | 1425 __ vmov(double_scratch0().low(), left); |
1426 __ vmov(vright.low(), right); | 1426 __ vcvt_f64_s32(vleft, double_scratch0().low()); |
1427 __ vcvt_f64_s32(vleft, vleft.low()); | 1427 __ vmov(double_scratch0().low(), right); |
1428 __ vcvt_f64_s32(vright, vright.low()); | 1428 __ vcvt_f64_s32(vright, double_scratch0().low()); |
1429 __ vdiv(vleft, vleft, vright); // vleft now contains the result. | 1429 __ vdiv(vleft, vleft, vright); // vleft now contains the result. |
1430 __ vcvt_s32_f64(vright.low(), vleft); | 1430 __ vcvt_s32_f64(double_scratch0().low(), vleft); |
1431 __ vmov(result, vright.low()); | 1431 __ vmov(result, double_scratch0().low()); |
1432 | 1432 |
1433 if (!instr->hydrogen()->CheckFlag( | 1433 if (!instr->hydrogen()->CheckFlag( |
1434 HInstruction::kAllUsesTruncatingToInt32)) { | 1434 HInstruction::kAllUsesTruncatingToInt32)) { |
1435 // Deopt if exact conversion to integer was not possible. | 1435 // Deopt if exact conversion to integer was not possible. |
1436 // Use vright as scratch register. | 1436 // Use vright as scratch register. |
1437 __ vcvt_f64_s32(vright, vright.low()); | 1437 __ vcvt_f64_s32(double_scratch0(), double_scratch0().low()); |
1438 __ VFPCompareAndSetFlags(vleft, vright); | 1438 __ VFPCompareAndSetFlags(vleft, double_scratch0()); |
1439 DeoptimizeIf(ne, instr->environment()); | 1439 DeoptimizeIf(ne, instr->environment()); |
1440 } | 1440 } |
1441 } | 1441 } |
1442 } | 1442 } |
1443 | 1443 |
1444 | 1444 |
1445 void LCodeGen::DoMultiplyAddD(LMultiplyAddD* instr) { | 1445 void LCodeGen::DoMultiplyAddD(LMultiplyAddD* instr) { |
1446 DwVfpRegister addend = ToDoubleRegister(instr->addend()); | 1446 DwVfpRegister addend = ToDoubleRegister(instr->addend()); |
1447 DwVfpRegister multiplier = ToDoubleRegister(instr->multiplier()); | 1447 DwVfpRegister multiplier = ToDoubleRegister(instr->multiplier()); |
1448 DwVfpRegister multiplicand = ToDoubleRegister(instr->multiplicand()); | 1448 DwVfpRegister multiplicand = ToDoubleRegister(instr->multiplicand()); |
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1543 // Note that result may alias left. | 1543 // Note that result may alias left. |
1544 Register left = ToRegister(instr->left()); | 1544 Register left = ToRegister(instr->left()); |
1545 LOperand* right_op = instr->right(); | 1545 LOperand* right_op = instr->right(); |
1546 | 1546 |
1547 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); | 1547 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); |
1548 bool bailout_on_minus_zero = | 1548 bool bailout_on_minus_zero = |
1549 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); | 1549 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); |
1550 | 1550 |
1551 if (right_op->IsConstantOperand() && !can_overflow) { | 1551 if (right_op->IsConstantOperand() && !can_overflow) { |
1552 // Use optimized code for specific constants. | 1552 // Use optimized code for specific constants. |
1553 int32_t constant = ToInteger32(LConstantOperand::cast(right_op)); | 1553 int32_t constant = ToRepresentation( |
| 1554 LConstantOperand::cast(right_op), |
| 1555 instr->hydrogen()->right()->representation()); |
1554 | 1556 |
1555 if (bailout_on_minus_zero && (constant < 0)) { | 1557 if (bailout_on_minus_zero && (constant < 0)) { |
1556 // The case of a null constant will be handled separately. | 1558 // The case of a null constant will be handled separately. |
1557 // If constant is negative and left is null, the result should be -0. | 1559 // If constant is negative and left is null, the result should be -0. |
1558 __ cmp(left, Operand::Zero()); | 1560 __ cmp(left, Operand::Zero()); |
1559 DeoptimizeIf(eq, instr->environment()); | 1561 DeoptimizeIf(eq, instr->environment()); |
1560 } | 1562 } |
1561 | 1563 |
1562 switch (constant) { | 1564 switch (constant) { |
1563 case -1: | 1565 case -1: |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1607 } | 1609 } |
1608 | 1610 |
1609 } else { | 1611 } else { |
1610 Register right = EmitLoadRegister(right_op, scratch); | 1612 Register right = EmitLoadRegister(right_op, scratch); |
1611 if (bailout_on_minus_zero) { | 1613 if (bailout_on_minus_zero) { |
1612 __ orr(ToRegister(instr->temp()), left, right); | 1614 __ orr(ToRegister(instr->temp()), left, right); |
1613 } | 1615 } |
1614 | 1616 |
1615 if (can_overflow) { | 1617 if (can_overflow) { |
1616 // scratch:result = left * right. | 1618 // scratch:result = left * right. |
1617 __ smull(result, scratch, left, right); | 1619 if (instr->hydrogen()->representation().IsSmi()) { |
| 1620 __ SmiUntag(result, left); |
| 1621 __ smull(result, scratch, result, right); |
| 1622 } else { |
| 1623 __ smull(result, scratch, left, right); |
| 1624 } |
1618 __ cmp(scratch, Operand(result, ASR, 31)); | 1625 __ cmp(scratch, Operand(result, ASR, 31)); |
1619 DeoptimizeIf(ne, instr->environment()); | 1626 DeoptimizeIf(ne, instr->environment()); |
1620 } else { | 1627 } else { |
1621 __ mul(result, left, right); | 1628 if (instr->hydrogen()->representation().IsSmi()) { |
| 1629 __ SmiUntag(result, left); |
| 1630 __ mul(result, result, right); |
| 1631 } else { |
| 1632 __ mul(result, left, right); |
| 1633 } |
1622 } | 1634 } |
1623 | 1635 |
1624 if (bailout_on_minus_zero) { | 1636 if (bailout_on_minus_zero) { |
1625 // Bail out if the result is supposed to be negative zero. | 1637 // Bail out if the result is supposed to be negative zero. |
1626 Label done; | 1638 Label done; |
1627 __ cmp(result, Operand::Zero()); | 1639 __ cmp(result, Operand::Zero()); |
1628 __ b(ne, &done); | 1640 __ b(ne, &done); |
1629 __ cmp(ToRegister(instr->temp()), Operand::Zero()); | 1641 __ cmp(ToRegister(instr->temp()), Operand::Zero()); |
1630 DeoptimizeIf(mi, instr->environment()); | 1642 DeoptimizeIf(mi, instr->environment()); |
1631 __ bind(&done); | 1643 __ bind(&done); |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1723 } else { | 1735 } else { |
1724 if (instr->can_deopt()) { | 1736 if (instr->can_deopt()) { |
1725 __ tst(left, Operand(0x80000000)); | 1737 __ tst(left, Operand(0x80000000)); |
1726 DeoptimizeIf(ne, instr->environment()); | 1738 DeoptimizeIf(ne, instr->environment()); |
1727 } | 1739 } |
1728 __ Move(result, left); | 1740 __ Move(result, left); |
1729 } | 1741 } |
1730 break; | 1742 break; |
1731 case Token::SHL: | 1743 case Token::SHL: |
1732 if (shift_count != 0) { | 1744 if (shift_count != 0) { |
1733 __ mov(result, Operand(left, LSL, shift_count)); | 1745 if (instr->hydrogen_value()->representation().IsSmi() && |
| 1746 instr->can_deopt()) { |
| 1747 __ mov(result, Operand(left, LSL, shift_count - 1)); |
| 1748 __ SmiTag(result, result, SetCC); |
| 1749 DeoptimizeIf(vs, instr->environment()); |
| 1750 } else { |
| 1751 __ mov(result, Operand(left, LSL, shift_count)); |
| 1752 } |
1734 } else { | 1753 } else { |
1735 __ Move(result, left); | 1754 __ Move(result, left); |
1736 } | 1755 } |
1737 break; | 1756 break; |
1738 default: | 1757 default: |
1739 UNREACHABLE(); | 1758 UNREACHABLE(); |
1740 break; | 1759 break; |
1741 } | 1760 } |
1742 } | 1761 } |
1743 } | 1762 } |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1799 ASSERT(instr->result()->IsDoubleRegister()); | 1818 ASSERT(instr->result()->IsDoubleRegister()); |
1800 DwVfpRegister result = ToDoubleRegister(instr->result()); | 1819 DwVfpRegister result = ToDoubleRegister(instr->result()); |
1801 double v = instr->value(); | 1820 double v = instr->value(); |
1802 __ Vmov(result, v, scratch0()); | 1821 __ Vmov(result, v, scratch0()); |
1803 } | 1822 } |
1804 | 1823 |
1805 | 1824 |
1806 void LCodeGen::DoConstantT(LConstantT* instr) { | 1825 void LCodeGen::DoConstantT(LConstantT* instr) { |
1807 Handle<Object> value = instr->value(); | 1826 Handle<Object> value = instr->value(); |
1808 AllowDeferredHandleDereference smi_check; | 1827 AllowDeferredHandleDereference smi_check; |
1809 if (value->IsSmi()) { | 1828 __ LoadObject(ToRegister(instr->result()), value); |
1810 __ mov(ToRegister(instr->result()), Operand(value)); | |
1811 } else { | |
1812 __ LoadHeapObject(ToRegister(instr->result()), | |
1813 Handle<HeapObject>::cast(value)); | |
1814 } | |
1815 } | 1829 } |
1816 | 1830 |
1817 | 1831 |
1818 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) { | 1832 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) { |
1819 Register result = ToRegister(instr->result()); | 1833 Register result = ToRegister(instr->result()); |
1820 Register map = ToRegister(instr->value()); | 1834 Register map = ToRegister(instr->value()); |
1821 __ EnumLength(result, map); | 1835 __ EnumLength(result, map); |
1822 } | 1836 } |
1823 | 1837 |
1824 | 1838 |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1966 if (can_overflow) { | 1980 if (can_overflow) { |
1967 DeoptimizeIf(vs, instr->environment()); | 1981 DeoptimizeIf(vs, instr->environment()); |
1968 } | 1982 } |
1969 } | 1983 } |
1970 | 1984 |
1971 | 1985 |
1972 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { | 1986 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { |
1973 LOperand* left = instr->left(); | 1987 LOperand* left = instr->left(); |
1974 LOperand* right = instr->right(); | 1988 LOperand* right = instr->right(); |
1975 HMathMinMax::Operation operation = instr->hydrogen()->operation(); | 1989 HMathMinMax::Operation operation = instr->hydrogen()->operation(); |
1976 if (instr->hydrogen()->representation().IsInteger32()) { | 1990 if (instr->hydrogen()->representation().IsSmiOrInteger32()) { |
1977 Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge; | 1991 Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge; |
1978 Register left_reg = ToRegister(left); | 1992 Register left_reg = ToRegister(left); |
1979 Operand right_op = (right->IsRegister() || right->IsConstantOperand()) | 1993 Operand right_op = (right->IsRegister() || right->IsConstantOperand()) |
1980 ? ToOperand(right) | 1994 ? ToOperand(right) |
1981 : Operand(EmitLoadRegister(right, ip)); | 1995 : Operand(EmitLoadRegister(right, ip)); |
1982 Register result_reg = ToRegister(instr->result()); | 1996 Register result_reg = ToRegister(instr->result()); |
1983 __ cmp(left_reg, right_op); | 1997 __ cmp(left_reg, right_op); |
1984 __ Move(result_reg, left_reg, condition); | 1998 __ Move(result_reg, left_reg, condition); |
1985 __ mov(result_reg, right_op, LeaveCC, NegateCondition(condition)); | 1999 __ mov(result_reg, right_op, LeaveCC, NegateCondition(condition)); |
1986 } else { | 2000 } else { |
(...skipping 378 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2365 | 2379 |
2366 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) { | 2380 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) { |
2367 Register left = ToRegister(instr->left()); | 2381 Register left = ToRegister(instr->left()); |
2368 Register right = ToRegister(instr->right()); | 2382 Register right = ToRegister(instr->right()); |
2369 | 2383 |
2370 __ cmp(left, Operand(right)); | 2384 __ cmp(left, Operand(right)); |
2371 EmitBranch(instr, eq); | 2385 EmitBranch(instr, eq); |
2372 } | 2386 } |
2373 | 2387 |
2374 | 2388 |
2375 void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) { | |
2376 Register left = ToRegister(instr->left()); | |
2377 | |
2378 __ cmp(left, Operand(instr->hydrogen()->right())); | |
2379 EmitBranch(instr, eq); | |
2380 } | |
2381 | |
2382 | |
2383 Condition LCodeGen::EmitIsObject(Register input, | 2389 Condition LCodeGen::EmitIsObject(Register input, |
2384 Register temp1, | 2390 Register temp1, |
2385 Label* is_not_object, | 2391 Label* is_not_object, |
2386 Label* is_object) { | 2392 Label* is_object) { |
2387 Register temp2 = scratch0(); | 2393 Register temp2 = scratch0(); |
2388 __ JumpIfSmi(input, is_not_object); | 2394 __ JumpIfSmi(input, is_not_object); |
2389 | 2395 |
2390 __ LoadRoot(temp2, Heap::kNullValueRootIndex); | 2396 __ LoadRoot(temp2, Heap::kNullValueRootIndex); |
2391 __ cmp(input, temp2); | 2397 __ cmp(input, temp2); |
2392 __ b(eq, is_object); | 2398 __ b(eq, is_object); |
(...skipping 632 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3025 int offset = index * kPointerSize; | 3031 int offset = index * kPointerSize; |
3026 if (index < 0) { | 3032 if (index < 0) { |
3027 // Negative property indices are in-object properties, indexed | 3033 // Negative property indices are in-object properties, indexed |
3028 // from the end of the fixed part of the object. | 3034 // from the end of the fixed part of the object. |
3029 __ ldr(result, FieldMemOperand(object, offset + type->instance_size())); | 3035 __ ldr(result, FieldMemOperand(object, offset + type->instance_size())); |
3030 } else { | 3036 } else { |
3031 // Non-negative property indices are in the properties array. | 3037 // Non-negative property indices are in the properties array. |
3032 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 3038 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
3033 __ ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize)); | 3039 __ ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize)); |
3034 } | 3040 } |
3035 } else if (lookup.IsConstantFunction()) { | 3041 } else if (lookup.IsConstant()) { |
3036 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type)); | 3042 Handle<Object> constant(lookup.GetConstantFromMap(*type), isolate()); |
3037 __ LoadHeapObject(result, function); | 3043 __ LoadObject(result, constant); |
3038 } else { | 3044 } else { |
3039 // Negative lookup. | 3045 // Negative lookup. |
3040 // Check prototypes. | 3046 // Check prototypes. |
3041 Handle<HeapObject> current(HeapObject::cast((*type)->prototype())); | 3047 Handle<HeapObject> current(HeapObject::cast((*type)->prototype())); |
3042 Heap* heap = type->GetHeap(); | 3048 Heap* heap = type->GetHeap(); |
3043 while (*current != heap->null_value()) { | 3049 while (*current != heap->null_value()) { |
3044 __ LoadHeapObject(result, current); | 3050 __ LoadHeapObject(result, current); |
3045 __ ldr(result, FieldMemOperand(result, HeapObject::kMapOffset)); | 3051 __ ldr(result, FieldMemOperand(result, HeapObject::kMapOffset)); |
3046 __ cmp(result, Operand(Handle<Map>(current->map()))); | 3052 __ cmp(result, Operand(Handle<Map>(current->map()))); |
3047 DeoptimizeIf(ne, env); | 3053 DeoptimizeIf(ne, env); |
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3202 int additional_offset = instr->additional_index() << element_size_shift; | 3208 int additional_offset = instr->additional_index() << element_size_shift; |
3203 | 3209 |
3204 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || | 3210 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || |
3205 elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { | 3211 elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { |
3206 DwVfpRegister result = ToDoubleRegister(instr->result()); | 3212 DwVfpRegister result = ToDoubleRegister(instr->result()); |
3207 Operand operand = key_is_constant | 3213 Operand operand = key_is_constant |
3208 ? Operand(constant_key << element_size_shift) | 3214 ? Operand(constant_key << element_size_shift) |
3209 : Operand(key, LSL, shift_size); | 3215 : Operand(key, LSL, shift_size); |
3210 __ add(scratch0(), external_pointer, operand); | 3216 __ add(scratch0(), external_pointer, operand); |
3211 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) { | 3217 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) { |
3212 __ vldr(kScratchDoubleReg.low(), scratch0(), additional_offset); | 3218 __ vldr(double_scratch0().low(), scratch0(), additional_offset); |
3213 __ vcvt_f64_f32(result, kScratchDoubleReg.low()); | 3219 __ vcvt_f64_f32(result, double_scratch0().low()); |
3214 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS | 3220 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS |
3215 __ vldr(result, scratch0(), additional_offset); | 3221 __ vldr(result, scratch0(), additional_offset); |
3216 } | 3222 } |
3217 } else { | 3223 } else { |
3218 Register result = ToRegister(instr->result()); | 3224 Register result = ToRegister(instr->result()); |
3219 MemOperand mem_operand = PrepareKeyedOperand( | 3225 MemOperand mem_operand = PrepareKeyedOperand( |
3220 key, external_pointer, key_is_constant, constant_key, | 3226 key, external_pointer, key_is_constant, constant_key, |
3221 element_size_shift, shift_size, | 3227 element_size_shift, shift_size, |
3222 instr->additional_index(), additional_offset); | 3228 instr->additional_index(), additional_offset); |
3223 switch (elements_kind) { | 3229 switch (elements_kind) { |
(...skipping 547 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3771 } | 3777 } |
3772 } | 3778 } |
3773 | 3779 |
3774 | 3780 |
3775 void LCodeGen::DoMathFloor(LMathFloor* instr) { | 3781 void LCodeGen::DoMathFloor(LMathFloor* instr) { |
3776 DwVfpRegister input = ToDoubleRegister(instr->value()); | 3782 DwVfpRegister input = ToDoubleRegister(instr->value()); |
3777 Register result = ToRegister(instr->result()); | 3783 Register result = ToRegister(instr->result()); |
3778 Register input_high = scratch0(); | 3784 Register input_high = scratch0(); |
3779 Label done, exact; | 3785 Label done, exact; |
3780 | 3786 |
3781 __ vmov(input_high, input.high()); | |
3782 __ TryInt32Floor(result, input, input_high, double_scratch0(), &done, &exact); | 3787 __ TryInt32Floor(result, input, input_high, double_scratch0(), &done, &exact); |
3783 DeoptimizeIf(al, instr->environment()); | 3788 DeoptimizeIf(al, instr->environment()); |
3784 | 3789 |
3785 __ bind(&exact); | 3790 __ bind(&exact); |
3786 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3791 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3787 // Test for -0. | 3792 // Test for -0. |
3788 __ cmp(result, Operand::Zero()); | 3793 __ cmp(result, Operand::Zero()); |
3789 __ b(ne, &done); | 3794 __ b(ne, &done); |
3790 __ cmp(input_high, Operand::Zero()); | 3795 __ cmp(input_high, Operand::Zero()); |
3791 DeoptimizeIf(mi, instr->environment()); | 3796 DeoptimizeIf(mi, instr->environment()); |
(...skipping 12 matching lines...) Expand all Loading... |
3804 Label convert, done; | 3809 Label convert, done; |
3805 | 3810 |
3806 __ Vmov(dot_five, 0.5, scratch0()); | 3811 __ Vmov(dot_five, 0.5, scratch0()); |
3807 __ vabs(double_scratch1, input); | 3812 __ vabs(double_scratch1, input); |
3808 __ VFPCompareAndSetFlags(double_scratch1, dot_five); | 3813 __ VFPCompareAndSetFlags(double_scratch1, dot_five); |
3809 // If input is in [-0.5, -0], the result is -0. | 3814 // If input is in [-0.5, -0], the result is -0. |
3810 // If input is in [+0, +0.5[, the result is +0. | 3815 // If input is in [+0, +0.5[, the result is +0. |
3811 // If the input is +0.5, the result is 1. | 3816 // If the input is +0.5, the result is 1. |
3812 __ b(hi, &convert); // Out of [-0.5, +0.5]. | 3817 __ b(hi, &convert); // Out of [-0.5, +0.5]. |
3813 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3818 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3814 __ vmov(input_high, input.high()); | 3819 __ VmovHigh(input_high, input); |
3815 __ cmp(input_high, Operand::Zero()); | 3820 __ cmp(input_high, Operand::Zero()); |
3816 DeoptimizeIf(mi, instr->environment()); // [-0.5, -0]. | 3821 DeoptimizeIf(mi, instr->environment()); // [-0.5, -0]. |
3817 } | 3822 } |
3818 __ VFPCompareAndSetFlags(input, dot_five); | 3823 __ VFPCompareAndSetFlags(input, dot_five); |
3819 __ mov(result, Operand(1), LeaveCC, eq); // +0.5. | 3824 __ mov(result, Operand(1), LeaveCC, eq); // +0.5. |
3820 // Remaining cases: [+0, +0.5[ or [-0.5, +0.5[, depending on | 3825 // Remaining cases: [+0, +0.5[ or [-0.5, +0.5[, depending on |
3821 // flag kBailoutOnMinusZero. | 3826 // flag kBailoutOnMinusZero. |
3822 __ mov(result, Operand::Zero(), LeaveCC, ne); | 3827 __ mov(result, Operand::Zero(), LeaveCC, ne); |
3823 __ b(&done); | 3828 __ b(&done); |
3824 | 3829 |
3825 __ bind(&convert); | 3830 __ bind(&convert); |
3826 __ vadd(input_plus_dot_five, input, dot_five); | 3831 __ vadd(input_plus_dot_five, input, dot_five); |
3827 __ vmov(input_high, input_plus_dot_five.high()); | |
3828 // Reuse dot_five (double_scratch0) as we no longer need this value. | 3832 // Reuse dot_five (double_scratch0) as we no longer need this value. |
3829 __ TryInt32Floor(result, input_plus_dot_five, input_high, double_scratch0(), | 3833 __ TryInt32Floor(result, input_plus_dot_five, input_high, double_scratch0(), |
3830 &done, &done); | 3834 &done, &done); |
3831 DeoptimizeIf(al, instr->environment()); | 3835 DeoptimizeIf(al, instr->environment()); |
3832 __ bind(&done); | 3836 __ bind(&done); |
3833 } | 3837 } |
3834 | 3838 |
3835 | 3839 |
3836 void LCodeGen::DoMathSqrt(LMathSqrt* instr) { | 3840 void LCodeGen::DoMathSqrt(LMathSqrt* instr) { |
3837 DwVfpRegister input = ToDoubleRegister(instr->value()); | 3841 DwVfpRegister input = ToDoubleRegister(instr->value()); |
(...skipping 421 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4259 | 4263 |
4260 // Name is always in r2. | 4264 // Name is always in r2. |
4261 __ mov(r2, Operand(instr->name())); | 4265 __ mov(r2, Operand(instr->name())); |
4262 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) | 4266 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) |
4263 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 4267 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
4264 : isolate()->builtins()->StoreIC_Initialize(); | 4268 : isolate()->builtins()->StoreIC_Initialize(); |
4265 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); | 4269 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); |
4266 } | 4270 } |
4267 | 4271 |
4268 | 4272 |
| 4273 void LCodeGen::ApplyCheckIf(Condition cc, LBoundsCheck* check) { |
| 4274 if (FLAG_debug_code && check->hydrogen()->skip_check()) { |
| 4275 Label done; |
| 4276 __ b(NegateCondition(cc), &done); |
| 4277 __ stop("eliminated bounds check failed"); |
| 4278 __ bind(&done); |
| 4279 } else { |
| 4280 DeoptimizeIf(cc, check->environment()); |
| 4281 } |
| 4282 } |
| 4283 |
| 4284 |
4269 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { | 4285 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { |
4270 if (instr->hydrogen()->skip_check()) return; | 4286 if (instr->hydrogen()->skip_check()) return; |
4271 | 4287 |
4272 if (instr->index()->IsConstantOperand()) { | 4288 if (instr->index()->IsConstantOperand()) { |
4273 int constant_index = | 4289 int constant_index = |
4274 ToInteger32(LConstantOperand::cast(instr->index())); | 4290 ToInteger32(LConstantOperand::cast(instr->index())); |
4275 if (instr->hydrogen()->length()->representation().IsSmi()) { | 4291 if (instr->hydrogen()->length()->representation().IsSmi()) { |
4276 __ mov(ip, Operand(Smi::FromInt(constant_index))); | 4292 __ mov(ip, Operand(Smi::FromInt(constant_index))); |
4277 } else { | 4293 } else { |
4278 __ mov(ip, Operand(constant_index)); | 4294 __ mov(ip, Operand(constant_index)); |
4279 } | 4295 } |
4280 __ cmp(ip, ToRegister(instr->length())); | 4296 __ cmp(ip, ToRegister(instr->length())); |
4281 } else { | 4297 } else { |
4282 __ cmp(ToRegister(instr->index()), ToRegister(instr->length())); | 4298 __ cmp(ToRegister(instr->index()), ToRegister(instr->length())); |
4283 } | 4299 } |
4284 DeoptimizeIf(hs, instr->environment()); | 4300 Condition condition = instr->hydrogen()->allow_equality() ? hi : hs; |
| 4301 ApplyCheckIf(condition, instr); |
4285 } | 4302 } |
4286 | 4303 |
4287 | 4304 |
4288 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { | 4305 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { |
4289 Register external_pointer = ToRegister(instr->elements()); | 4306 Register external_pointer = ToRegister(instr->elements()); |
4290 Register key = no_reg; | 4307 Register key = no_reg; |
4291 ElementsKind elements_kind = instr->elements_kind(); | 4308 ElementsKind elements_kind = instr->elements_kind(); |
4292 bool key_is_constant = instr->key()->IsConstantOperand(); | 4309 bool key_is_constant = instr->key()->IsConstantOperand(); |
4293 int constant_key = 0; | 4310 int constant_key = 0; |
4294 if (key_is_constant) { | 4311 if (key_is_constant) { |
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4483 __ cmp(scratch, Operand(from_map)); | 4500 __ cmp(scratch, Operand(from_map)); |
4484 __ b(ne, ¬_applicable); | 4501 __ b(ne, ¬_applicable); |
4485 | 4502 |
4486 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { | 4503 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { |
4487 Register new_map_reg = ToRegister(instr->new_map_temp()); | 4504 Register new_map_reg = ToRegister(instr->new_map_temp()); |
4488 __ mov(new_map_reg, Operand(to_map)); | 4505 __ mov(new_map_reg, Operand(to_map)); |
4489 __ str(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset)); | 4506 __ str(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset)); |
4490 // Write barrier. | 4507 // Write barrier. |
4491 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, | 4508 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, |
4492 scratch, GetLinkRegisterState(), kDontSaveFPRegs); | 4509 scratch, GetLinkRegisterState(), kDontSaveFPRegs); |
4493 } else if (FLAG_compiled_transitions) { | 4510 } else { |
4494 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 4511 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
4495 __ Move(r0, object_reg); | 4512 __ Move(r0, object_reg); |
4496 __ Move(r1, to_map); | 4513 __ Move(r1, to_map); |
4497 TransitionElementsKindStub stub(from_kind, to_kind); | 4514 TransitionElementsKindStub stub(from_kind, to_kind); |
4498 __ CallStub(&stub); | 4515 __ CallStub(&stub); |
4499 RecordSafepointWithRegisters( | 4516 RecordSafepointWithRegisters( |
4500 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); | 4517 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
4501 } else if (IsFastSmiElementsKind(from_kind) && | |
4502 IsFastDoubleElementsKind(to_kind)) { | |
4503 Register fixed_object_reg = ToRegister(instr->temp()); | |
4504 ASSERT(fixed_object_reg.is(r2)); | |
4505 Register new_map_reg = ToRegister(instr->new_map_temp()); | |
4506 ASSERT(new_map_reg.is(r3)); | |
4507 __ mov(new_map_reg, Operand(to_map)); | |
4508 __ mov(fixed_object_reg, object_reg); | |
4509 CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(), | |
4510 RelocInfo::CODE_TARGET, instr); | |
4511 } else if (IsFastDoubleElementsKind(from_kind) && | |
4512 IsFastObjectElementsKind(to_kind)) { | |
4513 Register fixed_object_reg = ToRegister(instr->temp()); | |
4514 ASSERT(fixed_object_reg.is(r2)); | |
4515 Register new_map_reg = ToRegister(instr->new_map_temp()); | |
4516 ASSERT(new_map_reg.is(r3)); | |
4517 __ mov(new_map_reg, Operand(to_map)); | |
4518 __ mov(fixed_object_reg, object_reg); | |
4519 CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(), | |
4520 RelocInfo::CODE_TARGET, instr); | |
4521 } else { | |
4522 UNREACHABLE(); | |
4523 } | 4518 } |
4524 __ bind(¬_applicable); | 4519 __ bind(¬_applicable); |
4525 } | 4520 } |
4526 | 4521 |
4527 | 4522 |
4528 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 4523 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
4529 Register object = ToRegister(instr->object()); | 4524 Register object = ToRegister(instr->object()); |
4530 Register temp = ToRegister(instr->temp()); | 4525 Register temp = ToRegister(instr->temp()); |
4531 __ TestJSArrayForAllocationMemento(object, temp); | 4526 __ TestJSArrayForAllocationMemento(object, temp); |
4532 DeoptimizeIf(eq, instr->environment()); | 4527 DeoptimizeIf(eq, instr->environment()); |
(...skipping 207 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4740 __ bind(deferred->exit()); | 4735 __ bind(deferred->exit()); |
4741 } | 4736 } |
4742 | 4737 |
4743 | 4738 |
4744 void LCodeGen::DoDeferredNumberTagI(LInstruction* instr, | 4739 void LCodeGen::DoDeferredNumberTagI(LInstruction* instr, |
4745 LOperand* value, | 4740 LOperand* value, |
4746 IntegerSignedness signedness) { | 4741 IntegerSignedness signedness) { |
4747 Label slow; | 4742 Label slow; |
4748 Register src = ToRegister(value); | 4743 Register src = ToRegister(value); |
4749 Register dst = ToRegister(instr->result()); | 4744 Register dst = ToRegister(instr->result()); |
4750 DwVfpRegister dbl_scratch = double_scratch0(); | 4745 LowDwVfpRegister dbl_scratch = double_scratch0(); |
4751 SwVfpRegister flt_scratch = dbl_scratch.low(); | |
4752 | 4746 |
4753 // Preserve the value of all registers. | 4747 // Preserve the value of all registers. |
4754 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 4748 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
4755 | 4749 |
4756 Label done; | 4750 Label done; |
4757 if (signedness == SIGNED_INT32) { | 4751 if (signedness == SIGNED_INT32) { |
4758 // There was overflow, so bits 30 and 31 of the original integer | 4752 // There was overflow, so bits 30 and 31 of the original integer |
4759 // disagree. Try to allocate a heap number in new space and store | 4753 // disagree. Try to allocate a heap number in new space and store |
4760 // the value in there. If that fails, call the runtime system. | 4754 // the value in there. If that fails, call the runtime system. |
4761 if (dst.is(src)) { | 4755 if (dst.is(src)) { |
4762 __ SmiUntag(src, dst); | 4756 __ SmiUntag(src, dst); |
4763 __ eor(src, src, Operand(0x80000000)); | 4757 __ eor(src, src, Operand(0x80000000)); |
4764 } | 4758 } |
4765 __ vmov(flt_scratch, src); | 4759 __ vmov(dbl_scratch.low(), src); |
4766 __ vcvt_f64_s32(dbl_scratch, flt_scratch); | 4760 __ vcvt_f64_s32(dbl_scratch, dbl_scratch.low()); |
4767 } else { | 4761 } else { |
4768 __ vmov(flt_scratch, src); | 4762 __ vmov(dbl_scratch.low(), src); |
4769 __ vcvt_f64_u32(dbl_scratch, flt_scratch); | 4763 __ vcvt_f64_u32(dbl_scratch, dbl_scratch.low()); |
4770 } | 4764 } |
4771 | 4765 |
4772 if (FLAG_inline_new) { | 4766 if (FLAG_inline_new) { |
4773 __ LoadRoot(scratch0(), Heap::kHeapNumberMapRootIndex); | 4767 __ LoadRoot(scratch0(), Heap::kHeapNumberMapRootIndex); |
4774 __ AllocateHeapNumber(r5, r3, r4, scratch0(), &slow, DONT_TAG_RESULT); | 4768 __ AllocateHeapNumber(r5, r3, r4, scratch0(), &slow, DONT_TAG_RESULT); |
4775 __ Move(dst, r5); | 4769 __ Move(dst, r5); |
4776 __ b(&done); | 4770 __ b(&done); |
4777 } | 4771 } |
4778 | 4772 |
4779 // Slow case: Call the runtime system to do the number allocation. | 4773 // Slow case: Call the runtime system to do the number allocation. |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4820 HLoadKeyed* load = HLoadKeyed::cast(change_input); | 4814 HLoadKeyed* load = HLoadKeyed::cast(change_input); |
4821 convert_hole = load->UsesMustHandleHole(); | 4815 convert_hole = load->UsesMustHandleHole(); |
4822 } | 4816 } |
4823 | 4817 |
4824 Label no_special_nan_handling; | 4818 Label no_special_nan_handling; |
4825 Label done; | 4819 Label done; |
4826 if (convert_hole) { | 4820 if (convert_hole) { |
4827 DwVfpRegister input_reg = ToDoubleRegister(instr->value()); | 4821 DwVfpRegister input_reg = ToDoubleRegister(instr->value()); |
4828 __ VFPCompareAndSetFlags(input_reg, input_reg); | 4822 __ VFPCompareAndSetFlags(input_reg, input_reg); |
4829 __ b(vc, &no_special_nan_handling); | 4823 __ b(vc, &no_special_nan_handling); |
4830 __ vmov(scratch, input_reg.high()); | 4824 __ VmovHigh(scratch, input_reg); |
4831 __ cmp(scratch, Operand(kHoleNanUpper32)); | 4825 __ cmp(scratch, Operand(kHoleNanUpper32)); |
4832 // If not the hole NaN, force the NaN to be canonical. | 4826 // If not the hole NaN, force the NaN to be canonical. |
4833 __ VFPCanonicalizeNaN(input_reg, ne); | 4827 __ VFPCanonicalizeNaN(input_reg, ne); |
4834 __ b(ne, &no_special_nan_handling); | 4828 __ b(ne, &no_special_nan_handling); |
4835 __ Move(reg, factory()->the_hole_value()); | 4829 __ Move(reg, factory()->the_hole_value()); |
4836 __ b(&done); | 4830 __ b(&done); |
4837 } | 4831 } |
4838 | 4832 |
4839 __ bind(&no_special_nan_handling); | 4833 __ bind(&no_special_nan_handling); |
4840 DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr); | 4834 DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr); |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4920 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 4914 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
4921 __ cmp(input_reg, Operand(ip)); | 4915 __ cmp(input_reg, Operand(ip)); |
4922 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE) { | 4916 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE) { |
4923 __ b(eq, &convert); | 4917 __ b(eq, &convert); |
4924 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 4918 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
4925 __ cmp(input_reg, Operand(ip)); | 4919 __ cmp(input_reg, Operand(ip)); |
4926 } | 4920 } |
4927 DeoptimizeIf(ne, env); | 4921 DeoptimizeIf(ne, env); |
4928 | 4922 |
4929 __ bind(&convert); | 4923 __ bind(&convert); |
4930 __ LoadRoot(ip, Heap::kNanValueRootIndex); | 4924 __ LoadRoot(scratch, Heap::kNanValueRootIndex); |
4931 __ sub(ip, ip, Operand(kHeapObjectTag)); | 4925 __ vldr(result_reg, scratch, HeapNumber::kValueOffset - kHeapObjectTag); |
4932 __ vldr(result_reg, ip, HeapNumber::kValueOffset); | |
4933 __ jmp(&done); | 4926 __ jmp(&done); |
4934 | 4927 |
4935 __ bind(&heap_number); | 4928 __ bind(&heap_number); |
4936 } | 4929 } |
4937 // Heap number to double register conversion. | 4930 // Heap number to double register conversion. |
4938 __ sub(ip, input_reg, Operand(kHeapObjectTag)); | 4931 __ vldr(result_reg, input_reg, HeapNumber::kValueOffset - kHeapObjectTag); |
4939 __ vldr(result_reg, ip, HeapNumber::kValueOffset); | |
4940 if (deoptimize_on_minus_zero) { | 4932 if (deoptimize_on_minus_zero) { |
4941 __ vmov(ip, result_reg.low()); | 4933 __ VmovLow(scratch, result_reg); |
4942 __ cmp(ip, Operand::Zero()); | 4934 __ cmp(scratch, Operand::Zero()); |
4943 __ b(ne, &done); | 4935 __ b(ne, &done); |
4944 __ vmov(ip, result_reg.high()); | 4936 __ VmovHigh(scratch, result_reg); |
4945 __ cmp(ip, Operand(HeapNumber::kSignMask)); | 4937 __ cmp(scratch, Operand(HeapNumber::kSignMask)); |
4946 DeoptimizeIf(eq, env); | 4938 DeoptimizeIf(eq, env); |
4947 } | 4939 } |
4948 __ jmp(&done); | 4940 __ jmp(&done); |
4949 } else { | 4941 } else { |
4950 __ SmiUntag(scratch, input_reg); | 4942 __ SmiUntag(scratch, input_reg); |
4951 ASSERT(mode == NUMBER_CANDIDATE_IS_SMI); | 4943 ASSERT(mode == NUMBER_CANDIDATE_IS_SMI); |
4952 } | 4944 } |
4953 | 4945 |
4954 // Smi to double register conversion | 4946 // Smi to double register conversion |
4955 __ bind(&load_smi); | 4947 __ bind(&load_smi); |
4956 // scratch: untagged value of input_reg | 4948 // scratch: untagged value of input_reg |
4957 __ vmov(flt_scratch, scratch); | 4949 __ vmov(flt_scratch, scratch); |
4958 __ vcvt_f64_s32(result_reg, flt_scratch); | 4950 __ vcvt_f64_s32(result_reg, flt_scratch); |
4959 __ bind(&done); | 4951 __ bind(&done); |
4960 } | 4952 } |
4961 | 4953 |
4962 | 4954 |
4963 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) { | 4955 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) { |
4964 Register input_reg = ToRegister(instr->value()); | 4956 Register input_reg = ToRegister(instr->value()); |
4965 Register scratch1 = scratch0(); | 4957 Register scratch1 = scratch0(); |
4966 Register scratch2 = ToRegister(instr->temp()); | 4958 Register scratch2 = ToRegister(instr->temp()); |
4967 DwVfpRegister double_scratch = double_scratch0(); | 4959 LowDwVfpRegister double_scratch = double_scratch0(); |
4968 DwVfpRegister double_scratch2 = ToDoubleRegister(instr->temp3()); | 4960 DwVfpRegister double_scratch2 = ToDoubleRegister(instr->temp3()); |
4969 | 4961 |
4970 ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2)); | 4962 ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2)); |
4971 ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1)); | 4963 ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1)); |
4972 | 4964 |
4973 Label done; | 4965 Label done; |
4974 | 4966 |
4975 // The input was optimistically untagged; revert it. | 4967 // The input was optimistically untagged; revert it. |
4976 // The carry flag is set when we reach this deferred code as we just executed | 4968 // The carry flag is set when we reach this deferred code as we just executed |
4977 // SmiUntag(heap_object, SetCC) | 4969 // SmiUntag(heap_object, SetCC) |
(...skipping 27 matching lines...) Expand all Loading... |
5005 __ vldr(double_scratch2, scratch1, HeapNumber::kValueOffset); | 4997 __ vldr(double_scratch2, scratch1, HeapNumber::kValueOffset); |
5006 | 4998 |
5007 __ ECMAToInt32(input_reg, double_scratch2, | 4999 __ ECMAToInt32(input_reg, double_scratch2, |
5008 scratch1, scratch2, scratch3, double_scratch); | 5000 scratch1, scratch2, scratch3, double_scratch); |
5009 | 5001 |
5010 } else { | 5002 } else { |
5011 // Deoptimize if we don't have a heap number. | 5003 // Deoptimize if we don't have a heap number. |
5012 DeoptimizeIf(ne, instr->environment()); | 5004 DeoptimizeIf(ne, instr->environment()); |
5013 | 5005 |
5014 __ sub(ip, input_reg, Operand(kHeapObjectTag)); | 5006 __ sub(ip, input_reg, Operand(kHeapObjectTag)); |
5015 __ vldr(double_scratch, ip, HeapNumber::kValueOffset); | 5007 __ vldr(double_scratch2, ip, HeapNumber::kValueOffset); |
5016 __ TryDoubleToInt32Exact(input_reg, double_scratch, double_scratch2); | 5008 __ TryDoubleToInt32Exact(input_reg, double_scratch2, double_scratch); |
5017 DeoptimizeIf(ne, instr->environment()); | 5009 DeoptimizeIf(ne, instr->environment()); |
5018 | 5010 |
5019 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 5011 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
5020 __ cmp(input_reg, Operand::Zero()); | 5012 __ cmp(input_reg, Operand::Zero()); |
5021 __ b(ne, &done); | 5013 __ b(ne, &done); |
5022 __ vmov(scratch1, double_scratch.high()); | 5014 __ VmovHigh(scratch1, double_scratch2); |
5023 __ tst(scratch1, Operand(HeapNumber::kSignMask)); | 5015 __ tst(scratch1, Operand(HeapNumber::kSignMask)); |
5024 DeoptimizeIf(ne, instr->environment()); | 5016 DeoptimizeIf(ne, instr->environment()); |
5025 } | 5017 } |
5026 } | 5018 } |
5027 __ bind(&done); | 5019 __ bind(&done); |
5028 } | 5020 } |
5029 | 5021 |
5030 | 5022 |
5031 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { | 5023 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { |
5032 class DeferredTaggedToI: public LDeferredCode { | 5024 class DeferredTaggedToI: public LDeferredCode { |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5085 instr->environment(), | 5077 instr->environment(), |
5086 mode); | 5078 mode); |
5087 } | 5079 } |
5088 | 5080 |
5089 | 5081 |
5090 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { | 5082 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { |
5091 Register result_reg = ToRegister(instr->result()); | 5083 Register result_reg = ToRegister(instr->result()); |
5092 Register scratch1 = scratch0(); | 5084 Register scratch1 = scratch0(); |
5093 Register scratch2 = ToRegister(instr->temp()); | 5085 Register scratch2 = ToRegister(instr->temp()); |
5094 DwVfpRegister double_input = ToDoubleRegister(instr->value()); | 5086 DwVfpRegister double_input = ToDoubleRegister(instr->value()); |
5095 DwVfpRegister double_scratch = double_scratch0(); | 5087 LowDwVfpRegister double_scratch = double_scratch0(); |
5096 | 5088 |
5097 if (instr->truncating()) { | 5089 if (instr->truncating()) { |
5098 Register scratch3 = ToRegister(instr->temp2()); | 5090 Register scratch3 = ToRegister(instr->temp2()); |
5099 __ ECMAToInt32(result_reg, double_input, | 5091 __ ECMAToInt32(result_reg, double_input, |
5100 scratch1, scratch2, scratch3, double_scratch); | 5092 scratch1, scratch2, scratch3, double_scratch); |
5101 } else { | 5093 } else { |
5102 __ TryDoubleToInt32Exact(result_reg, double_input, double_scratch); | 5094 __ TryDoubleToInt32Exact(result_reg, double_input, double_scratch); |
5103 // Deoptimize if the input wasn't a int32 (inside a double). | 5095 // Deoptimize if the input wasn't a int32 (inside a double). |
5104 DeoptimizeIf(ne, instr->environment()); | 5096 DeoptimizeIf(ne, instr->environment()); |
5105 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 5097 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
5106 Label done; | 5098 Label done; |
5107 __ cmp(result_reg, Operand::Zero()); | 5099 __ cmp(result_reg, Operand::Zero()); |
5108 __ b(ne, &done); | 5100 __ b(ne, &done); |
5109 __ vmov(scratch1, double_input.high()); | 5101 __ VmovHigh(scratch1, double_input); |
5110 __ tst(scratch1, Operand(HeapNumber::kSignMask)); | 5102 __ tst(scratch1, Operand(HeapNumber::kSignMask)); |
5111 DeoptimizeIf(ne, instr->environment()); | 5103 DeoptimizeIf(ne, instr->environment()); |
5112 __ bind(&done); | 5104 __ bind(&done); |
5113 } | 5105 } |
5114 } | 5106 } |
5115 } | 5107 } |
5116 | 5108 |
5117 | 5109 |
5118 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { | 5110 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { |
5119 Register result_reg = ToRegister(instr->result()); | 5111 Register result_reg = ToRegister(instr->result()); |
5120 Register scratch1 = scratch0(); | 5112 Register scratch1 = scratch0(); |
5121 Register scratch2 = ToRegister(instr->temp()); | 5113 Register scratch2 = ToRegister(instr->temp()); |
5122 DwVfpRegister double_input = ToDoubleRegister(instr->value()); | 5114 DwVfpRegister double_input = ToDoubleRegister(instr->value()); |
5123 DwVfpRegister double_scratch = double_scratch0(); | 5115 LowDwVfpRegister double_scratch = double_scratch0(); |
5124 | 5116 |
5125 if (instr->truncating()) { | 5117 if (instr->truncating()) { |
5126 Register scratch3 = ToRegister(instr->temp2()); | 5118 Register scratch3 = ToRegister(instr->temp2()); |
5127 __ ECMAToInt32(result_reg, double_input, | 5119 __ ECMAToInt32(result_reg, double_input, |
5128 scratch1, scratch2, scratch3, double_scratch); | 5120 scratch1, scratch2, scratch3, double_scratch); |
5129 } else { | 5121 } else { |
5130 __ TryDoubleToInt32Exact(result_reg, double_input, double_scratch); | 5122 __ TryDoubleToInt32Exact(result_reg, double_input, double_scratch); |
5131 // Deoptimize if the input wasn't a int32 (inside a double). | 5123 // Deoptimize if the input wasn't a int32 (inside a double). |
5132 DeoptimizeIf(ne, instr->environment()); | 5124 DeoptimizeIf(ne, instr->environment()); |
5133 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 5125 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
5134 Label done; | 5126 Label done; |
5135 __ cmp(result_reg, Operand::Zero()); | 5127 __ cmp(result_reg, Operand::Zero()); |
5136 __ b(ne, &done); | 5128 __ b(ne, &done); |
5137 __ vmov(scratch1, double_input.high()); | 5129 __ VmovHigh(scratch1, double_input); |
5138 __ tst(scratch1, Operand(HeapNumber::kSignMask)); | 5130 __ tst(scratch1, Operand(HeapNumber::kSignMask)); |
5139 DeoptimizeIf(ne, instr->environment()); | 5131 DeoptimizeIf(ne, instr->environment()); |
5140 __ bind(&done); | 5132 __ bind(&done); |
5141 } | 5133 } |
5142 } | 5134 } |
5143 __ SmiTag(result_reg, SetCC); | 5135 __ SmiTag(result_reg, SetCC); |
5144 DeoptimizeIf(vs, instr->environment()); | 5136 DeoptimizeIf(vs, instr->environment()); |
5145 } | 5137 } |
5146 | 5138 |
5147 | 5139 |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5225 Handle<Map> map, | 5217 Handle<Map> map, |
5226 LEnvironment* env) { | 5218 LEnvironment* env) { |
5227 Label success; | 5219 Label success; |
5228 __ CompareMap(map_reg, map, &success); | 5220 __ CompareMap(map_reg, map, &success); |
5229 DeoptimizeIf(ne, env); | 5221 DeoptimizeIf(ne, env); |
5230 __ bind(&success); | 5222 __ bind(&success); |
5231 } | 5223 } |
5232 | 5224 |
5233 | 5225 |
5234 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 5226 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
| 5227 if (instr->hydrogen()->CanOmitMapChecks()) return; |
5235 Register map_reg = scratch0(); | 5228 Register map_reg = scratch0(); |
5236 LOperand* input = instr->value(); | 5229 LOperand* input = instr->value(); |
5237 ASSERT(input->IsRegister()); | 5230 ASSERT(input->IsRegister()); |
5238 Register reg = ToRegister(input); | 5231 Register reg = ToRegister(input); |
5239 | 5232 |
5240 Label success; | 5233 Label success; |
5241 SmallMapList* map_set = instr->hydrogen()->map_set(); | 5234 SmallMapList* map_set = instr->hydrogen()->map_set(); |
5242 __ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); | 5235 __ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); |
5243 for (int i = 0; i < map_set->length() - 1; i++) { | 5236 for (int i = 0; i < map_set->length() - 1; i++) { |
5244 Handle<Map> map = map_set->at(i); | 5237 Handle<Map> map = map_set->at(i); |
5245 __ CompareMap(map_reg, map, &success); | 5238 __ CompareMap(map_reg, map, &success); |
5246 __ b(eq, &success); | 5239 __ b(eq, &success); |
5247 } | 5240 } |
5248 Handle<Map> map = map_set->last(); | 5241 Handle<Map> map = map_set->last(); |
5249 DoCheckMapCommon(map_reg, map, instr->environment()); | 5242 DoCheckMapCommon(map_reg, map, instr->environment()); |
5250 __ bind(&success); | 5243 __ bind(&success); |
5251 } | 5244 } |
5252 | 5245 |
5253 | 5246 |
5254 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 5247 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { |
5255 DwVfpRegister value_reg = ToDoubleRegister(instr->unclamped()); | 5248 DwVfpRegister value_reg = ToDoubleRegister(instr->unclamped()); |
5256 Register result_reg = ToRegister(instr->result()); | 5249 Register result_reg = ToRegister(instr->result()); |
5257 DwVfpRegister temp_reg = ToDoubleRegister(instr->temp()); | 5250 __ ClampDoubleToUint8(result_reg, value_reg, double_scratch0()); |
5258 __ ClampDoubleToUint8(result_reg, value_reg, temp_reg); | |
5259 } | 5251 } |
5260 | 5252 |
5261 | 5253 |
5262 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) { | 5254 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) { |
5263 Register unclamped_reg = ToRegister(instr->unclamped()); | 5255 Register unclamped_reg = ToRegister(instr->unclamped()); |
5264 Register result_reg = ToRegister(instr->result()); | 5256 Register result_reg = ToRegister(instr->result()); |
5265 __ ClampUint8(result_reg, unclamped_reg); | 5257 __ ClampUint8(result_reg, unclamped_reg); |
5266 } | 5258 } |
5267 | 5259 |
5268 | 5260 |
(...skipping 14 matching lines...) Expand all Loading... |
5283 | 5275 |
5284 // Check for undefined. Undefined is converted to zero for clamping | 5276 // Check for undefined. Undefined is converted to zero for clamping |
5285 // conversions. | 5277 // conversions. |
5286 __ cmp(input_reg, Operand(factory()->undefined_value())); | 5278 __ cmp(input_reg, Operand(factory()->undefined_value())); |
5287 DeoptimizeIf(ne, instr->environment()); | 5279 DeoptimizeIf(ne, instr->environment()); |
5288 __ mov(result_reg, Operand::Zero()); | 5280 __ mov(result_reg, Operand::Zero()); |
5289 __ jmp(&done); | 5281 __ jmp(&done); |
5290 | 5282 |
5291 // Heap number | 5283 // Heap number |
5292 __ bind(&heap_number); | 5284 __ bind(&heap_number); |
5293 __ vldr(double_scratch0(), FieldMemOperand(input_reg, | 5285 __ vldr(temp_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset)); |
5294 HeapNumber::kValueOffset)); | 5286 __ ClampDoubleToUint8(result_reg, temp_reg, double_scratch0()); |
5295 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg); | |
5296 __ jmp(&done); | 5287 __ jmp(&done); |
5297 | 5288 |
5298 // smi | 5289 // smi |
5299 __ bind(&is_smi); | 5290 __ bind(&is_smi); |
5300 __ ClampUint8(result_reg, result_reg); | 5291 __ ClampUint8(result_reg, result_reg); |
5301 | 5292 |
5302 __ bind(&done); | 5293 __ bind(&done); |
5303 } | 5294 } |
5304 | 5295 |
5305 | 5296 |
5306 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { | 5297 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { |
| 5298 if (instr->hydrogen()->CanOmitPrototypeChecks()) return; |
| 5299 |
5307 Register prototype_reg = ToRegister(instr->temp()); | 5300 Register prototype_reg = ToRegister(instr->temp()); |
5308 Register map_reg = ToRegister(instr->temp2()); | 5301 Register map_reg = ToRegister(instr->temp2()); |
5309 | 5302 |
5310 ZoneList<Handle<JSObject> >* prototypes = instr->prototypes(); | 5303 ZoneList<Handle<JSObject> >* prototypes = instr->prototypes(); |
5311 ZoneList<Handle<Map> >* maps = instr->maps(); | 5304 ZoneList<Handle<Map> >* maps = instr->maps(); |
5312 | 5305 |
5313 ASSERT(prototypes->length() == maps->length()); | 5306 ASSERT(prototypes->length() == maps->length()); |
5314 | 5307 |
5315 if (!instr->hydrogen()->CanOmitPrototypeChecks()) { | 5308 for (int i = 0; i < prototypes->length(); i++) { |
5316 for (int i = 0; i < prototypes->length(); i++) { | 5309 __ LoadHeapObject(prototype_reg, prototypes->at(i)); |
5317 __ LoadHeapObject(prototype_reg, prototypes->at(i)); | 5310 __ ldr(map_reg, FieldMemOperand(prototype_reg, HeapObject::kMapOffset)); |
5318 __ ldr(map_reg, FieldMemOperand(prototype_reg, HeapObject::kMapOffset)); | 5311 DoCheckMapCommon(map_reg, maps->at(i), instr->environment()); |
5319 DoCheckMapCommon(map_reg, maps->at(i), instr->environment()); | |
5320 } | |
5321 } | 5312 } |
5322 } | 5313 } |
5323 | 5314 |
5324 | 5315 |
5325 void LCodeGen::DoAllocate(LAllocate* instr) { | 5316 void LCodeGen::DoAllocate(LAllocate* instr) { |
5326 class DeferredAllocate: public LDeferredCode { | 5317 class DeferredAllocate: public LDeferredCode { |
5327 public: | 5318 public: |
5328 DeferredAllocate(LCodeGen* codegen, LAllocate* instr) | 5319 DeferredAllocate(LCodeGen* codegen, LAllocate* instr) |
5329 : LDeferredCode(codegen), instr_(instr) { } | 5320 : LDeferredCode(codegen), instr_(instr) { } |
5330 virtual void Generate() { codegen()->DoDeferredAllocate(instr_); } | 5321 virtual void Generate() { codegen()->DoDeferredAllocate(instr_); } |
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5458 __ jmp(&allocated); | 5449 __ jmp(&allocated); |
5459 | 5450 |
5460 __ bind(&runtime_allocate); | 5451 __ bind(&runtime_allocate); |
5461 __ mov(r0, Operand(Smi::FromInt(size))); | 5452 __ mov(r0, Operand(Smi::FromInt(size))); |
5462 __ Push(r1, r0); | 5453 __ Push(r1, r0); |
5463 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | 5454 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); |
5464 __ pop(r1); | 5455 __ pop(r1); |
5465 | 5456 |
5466 __ bind(&allocated); | 5457 __ bind(&allocated); |
5467 // Copy the content into the newly allocated memory. | 5458 // Copy the content into the newly allocated memory. |
5468 __ CopyFields(r0, r1, double_scratch0(), double_scratch0().low(), | 5459 __ CopyFields(r0, r1, double_scratch0(), size / kPointerSize); |
5469 size / kPointerSize); | |
5470 } | 5460 } |
5471 | 5461 |
5472 | 5462 |
5473 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 5463 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
5474 // Use the fast case closure allocation code that allocates in new | 5464 // Use the fast case closure allocation code that allocates in new |
5475 // space for nested functions that don't need literals cloning. | 5465 // space for nested functions that don't need literals cloning. |
5476 bool pretenure = instr->hydrogen()->pretenure(); | 5466 bool pretenure = instr->hydrogen()->pretenure(); |
5477 if (!pretenure && instr->hydrogen()->has_no_literals()) { | 5467 if (!pretenure && instr->hydrogen()->has_no_literals()) { |
5478 FastNewClosureStub stub(instr->hydrogen()->language_mode(), | 5468 FastNewClosureStub stub(instr->hydrogen()->language_mode(), |
5479 instr->hydrogen()->is_generator()); | 5469 instr->hydrogen()->is_generator()); |
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5638 EnsureSpaceForLazyDeopt(); | 5628 EnsureSpaceForLazyDeopt(); |
5639 last_lazy_deopt_pc_ = masm()->pc_offset(); | 5629 last_lazy_deopt_pc_ = masm()->pc_offset(); |
5640 ASSERT(instr->HasEnvironment()); | 5630 ASSERT(instr->HasEnvironment()); |
5641 LEnvironment* env = instr->environment(); | 5631 LEnvironment* env = instr->environment(); |
5642 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5632 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
5643 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5633 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
5644 } | 5634 } |
5645 | 5635 |
5646 | 5636 |
5647 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5637 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
5648 if (instr->hydrogen_value()->IsSoftDeoptimize()) { | 5638 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
5649 SoftDeoptimize(instr->environment()); | 5639 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
5650 } else { | 5640 // needed return address), even though the implementation of LAZY and EAGER is |
5651 DeoptimizeIf(al, instr->environment()); | 5641 // now identical. When LAZY is eventually completely folded into EAGER, remove |
| 5642 // the special case below. |
| 5643 if (info()->IsStub() && type == Deoptimizer::EAGER) { |
| 5644 type = Deoptimizer::LAZY; |
5652 } | 5645 } |
| 5646 DeoptimizeIf(al, instr->environment(), type); |
5653 } | 5647 } |
5654 | 5648 |
5655 | 5649 |
5656 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 5650 void LCodeGen::DoDummyUse(LDummyUse* instr) { |
5657 // Nothing to see here, move on! | 5651 // Nothing to see here, move on! |
5658 } | 5652 } |
5659 | 5653 |
5660 | 5654 |
5661 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 5655 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
5662 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 5656 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5825 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 5819 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); |
5826 __ ldr(result, FieldMemOperand(scratch, | 5820 __ ldr(result, FieldMemOperand(scratch, |
5827 FixedArray::kHeaderSize - kPointerSize)); | 5821 FixedArray::kHeaderSize - kPointerSize)); |
5828 __ bind(&done); | 5822 __ bind(&done); |
5829 } | 5823 } |
5830 | 5824 |
5831 | 5825 |
5832 #undef __ | 5826 #undef __ |
5833 | 5827 |
5834 } } // namespace v8::internal | 5828 } } // namespace v8::internal |
OLD | NEW |