| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/arm/lithium-arm.h" | 7 #include "src/arm/lithium-arm.h" |
| 8 #include "src/arm/lithium-codegen-arm.h" | 8 #include "src/arm/lithium-codegen-arm.h" |
| 9 #include "src/hydrogen-osr.h" | 9 #include "src/hydrogen-osr.h" |
| 10 #include "src/lithium-allocator-inl.h" | 10 #include "src/lithium-allocator-inl.h" |
| (...skipping 658 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 669 UNREACHABLE(); | 669 UNREACHABLE(); |
| 670 return NULL; | 670 return NULL; |
| 671 } | 671 } |
| 672 | 672 |
| 673 | 673 |
| 674 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) { | 674 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) { |
| 675 return AssignEnvironment(new(zone()) LDeoptimize); | 675 return AssignEnvironment(new(zone()) LDeoptimize); |
| 676 } | 676 } |
| 677 | 677 |
| 678 | 678 |
| 679 HBitwiseBinaryOperation* LChunkBuilder::CanTransformToShiftedOp(HValue* val, | |
| 680 HValue** left) { | |
| 681 if (!val->representation().IsInteger32()) return NULL; | |
| 682 if (!(val->IsBitwise() || val->IsAdd() || val->IsSub())) return NULL; | |
| 683 | |
| 684 HBinaryOperation* hinstr = HBinaryOperation::cast(val); | |
| 685 HValue* hleft = hinstr->left(); | |
| 686 HValue* hright = hinstr->right(); | |
| 687 ASSERT(hleft->representation().Equals(hinstr->representation())); | |
| 688 ASSERT(hright->representation().Equals(hinstr->representation())); | |
| 689 | |
| 690 if ((hright->IsConstant() && | |
| 691 LikelyFitsImmField(hinstr, HConstant::cast(hright)->Integer32Value())) || | |
| 692 (hinstr->IsCommutative() && hleft->IsConstant() && | |
| 693 LikelyFitsImmField(hinstr, HConstant::cast(hleft)->Integer32Value()))) { | |
| 694 // The constant operand will likely fit in the immediate field. We are | |
| 695 // better off with | |
| 696 // mov r1, r2 LSL #imm | |
| 697 // add r0, r1, #imm2 | |
| 698 // than with | |
| 699 // mov r5, #imm2 | |
| 700 // add r0, r5, r2 LSL #imm | |
| 701 return NULL; | |
| 702 } | |
| 703 | |
| 704 HBitwiseBinaryOperation* shift = NULL; | |
| 705 // TODO(aleram): We will miss situations where a shift operation is used by | |
| 706 // different instructions both as a left and right operands. | |
| 707 if (hright->IsBitwiseBinaryShift() && | |
| 708 HBitwiseBinaryOperation::cast(hright)->right()->IsConstant()) { | |
| 709 shift = HBitwiseBinaryOperation::cast(hright); | |
| 710 if (left != NULL) { | |
| 711 *left = hleft; | |
| 712 } | |
| 713 } else if (hinstr->IsCommutative() && | |
| 714 hleft->IsBitwiseBinaryShift() && | |
| 715 HBitwiseBinaryOperation::cast(hleft)->right()->IsConstant()) { | |
| 716 shift = HBitwiseBinaryOperation::cast(hleft); | |
| 717 if (left != NULL) { | |
| 718 *left = hright; | |
| 719 } | |
| 720 } else { | |
| 721 return NULL; | |
| 722 } | |
| 723 | |
| 724 if ((JSShiftAmountFromHConstant(shift->right()) == 0) && shift->IsShr()) { | |
| 725 // Logical shifts right by zero can deoptimize. | |
| 726 return NULL; | |
| 727 } | |
| 728 | |
| 729 return shift; | |
| 730 } | |
| 731 | |
| 732 | |
| 733 bool LChunkBuilder::ShiftCanBeOptimizedAway(HBitwiseBinaryOperation* shift) { | |
| 734 if (!shift->representation().IsInteger32()) { | |
| 735 return false; | |
| 736 } | |
| 737 for (HUseIterator it(shift->uses()); !it.Done(); it.Advance()) { | |
| 738 if (shift != CanTransformToShiftedOp(it.value())) { | |
| 739 return false; | |
| 740 } | |
| 741 } | |
| 742 return true; | |
| 743 } | |
| 744 | |
| 745 | |
| 746 LInstruction* LChunkBuilder::TryDoOpWithShiftedRightOperand( | |
| 747 HBinaryOperation* instr) { | |
| 748 HValue* left; | |
| 749 HBitwiseBinaryOperation* shift = CanTransformToShiftedOp(instr, &left); | |
| 750 | |
| 751 if ((shift != NULL) && ShiftCanBeOptimizedAway(shift)) { | |
| 752 return DoShiftedBinaryOp(instr, left, shift); | |
| 753 } | |
| 754 return NULL; | |
| 755 } | |
| 756 | |
| 757 | |
| 758 LInstruction* LChunkBuilder::DoShiftedBinaryOp( | |
| 759 HBinaryOperation* hinstr, HValue* hleft, HBitwiseBinaryOperation* hshift) { | |
| 760 ASSERT(hshift->IsBitwiseBinaryShift()); | |
| 761 ASSERT(!hshift->IsShr() || (JSShiftAmountFromHConstant(hshift->right()) > 0)); | |
| 762 | |
| 763 LTemplateResultInstruction<1>* res; | |
| 764 LOperand* left = UseRegisterAtStart(hleft); | |
| 765 LOperand* right = UseRegisterAtStart(hshift->left()); | |
| 766 LOperand* shift_amount = UseConstant(hshift->right()); | |
| 767 ShiftOp shift_op; | |
| 768 switch (hshift->opcode()) { | |
| 769 case HValue::kShl: shift_op = LSL; break; | |
| 770 case HValue::kShr: shift_op = LSR; break; | |
| 771 case HValue::kSar: shift_op = ASR; break; | |
| 772 default: UNREACHABLE(); shift_op = NO_SHIFT; | |
| 773 } | |
| 774 | |
| 775 if (hinstr->IsBitwise()) { | |
| 776 res = new(zone()) LBitI(left, right, shift_op, shift_amount); | |
| 777 } else if (hinstr->IsAdd()) { | |
| 778 res = new(zone()) LAddI(left, right, shift_op, shift_amount); | |
| 779 } else { | |
| 780 ASSERT(hinstr->IsSub()); | |
| 781 res = new(zone()) LSubI(left, right, shift_op, shift_amount); | |
| 782 } | |
| 783 if (hinstr->CheckFlag(HValue::kCanOverflow)) { | |
| 784 AssignEnvironment(res); | |
| 785 } | |
| 786 return DefineAsRegister(res); | |
| 787 } | |
| 788 | |
| 789 | |
| 790 LInstruction* LChunkBuilder::DoShift(Token::Value op, | 679 LInstruction* LChunkBuilder::DoShift(Token::Value op, |
| 791 HBitwiseBinaryOperation* instr) { | 680 HBitwiseBinaryOperation* instr) { |
| 792 if (instr->representation().IsSmiOrInteger32()) { | 681 if (instr->representation().IsSmiOrInteger32()) { |
| 793 ASSERT(instr->left()->representation().Equals(instr->representation())); | 682 ASSERT(instr->left()->representation().Equals(instr->representation())); |
| 794 ASSERT(instr->right()->representation().Equals(instr->representation())); | 683 ASSERT(instr->right()->representation().Equals(instr->representation())); |
| 795 | |
| 796 if (ShiftCanBeOptimizedAway(instr)) { | |
| 797 return NULL; | |
| 798 } | |
| 799 | |
| 800 LOperand* left = UseRegisterAtStart(instr->left()); | 684 LOperand* left = UseRegisterAtStart(instr->left()); |
| 801 | 685 |
| 802 HValue* right_value = instr->right(); | 686 HValue* right_value = instr->right(); |
| 803 LOperand* right = NULL; | 687 LOperand* right = NULL; |
| 804 int constant_value = 0; | 688 int constant_value = 0; |
| 805 bool does_deopt = false; | 689 bool does_deopt = false; |
| 806 if (right_value->IsConstant()) { | 690 if (right_value->IsConstant()) { |
| 807 HConstant* constant = HConstant::cast(right_value); | 691 HConstant* constant = HConstant::cast(right_value); |
| 808 right = chunk_->DefineConstantOperand(constant); | 692 right = chunk_->DefineConstantOperand(constant); |
| 809 constant_value = JSShiftAmountFromHConstant(constant); | 693 constant_value = constant->Integer32Value() & 0x1f; |
| 810 // Left shifts can deoptimize if we shift by > 0 and the result cannot be | 694 // Left shifts can deoptimize if we shift by > 0 and the result cannot be |
| 811 // truncated to smi. | 695 // truncated to smi. |
| 812 if (instr->representation().IsSmi() && constant_value > 0) { | 696 if (instr->representation().IsSmi() && constant_value > 0) { |
| 813 does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToSmi); | 697 does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToSmi); |
| 814 } | 698 } |
| 815 } else { | 699 } else { |
| 816 right = UseRegisterAtStart(right_value); | 700 right = UseRegisterAtStart(right_value); |
| 817 } | 701 } |
| 818 | 702 |
| 819 // Shift operations can only deoptimize if we do a logical shift | 703 // Shift operations can only deoptimize if we do a logical shift |
| (...skipping 539 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1359 return DoShift(Token::SHL, instr); | 1243 return DoShift(Token::SHL, instr); |
| 1360 } | 1244 } |
| 1361 | 1245 |
| 1362 | 1246 |
| 1363 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) { | 1247 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) { |
| 1364 if (instr->representation().IsSmiOrInteger32()) { | 1248 if (instr->representation().IsSmiOrInteger32()) { |
| 1365 ASSERT(instr->left()->representation().Equals(instr->representation())); | 1249 ASSERT(instr->left()->representation().Equals(instr->representation())); |
| 1366 ASSERT(instr->right()->representation().Equals(instr->representation())); | 1250 ASSERT(instr->right()->representation().Equals(instr->representation())); |
| 1367 ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32)); | 1251 ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32)); |
| 1368 | 1252 |
| 1369 LInstruction* shifted_operation = TryDoOpWithShiftedRightOperand(instr); | |
| 1370 if (shifted_operation != NULL) { | |
| 1371 return shifted_operation; | |
| 1372 } | |
| 1373 | |
| 1374 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand()); | 1253 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand()); |
| 1375 LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand()); | 1254 LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand()); |
| 1376 return DefineAsRegister(new(zone()) LBitI(left, right)); | 1255 return DefineAsRegister(new(zone()) LBitI(left, right)); |
| 1377 } else { | 1256 } else { |
| 1378 return DoArithmeticT(instr->op(), instr); | 1257 return DoArithmeticT(instr->op(), instr); |
| 1379 } | 1258 } |
| 1380 } | 1259 } |
| 1381 | 1260 |
| 1382 | 1261 |
| 1383 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) { | 1262 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) { |
| (...skipping 263 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1647 return DoArithmeticT(Token::MUL, instr); | 1526 return DoArithmeticT(Token::MUL, instr); |
| 1648 } | 1527 } |
| 1649 } | 1528 } |
| 1650 | 1529 |
| 1651 | 1530 |
| 1652 LInstruction* LChunkBuilder::DoSub(HSub* instr) { | 1531 LInstruction* LChunkBuilder::DoSub(HSub* instr) { |
| 1653 if (instr->representation().IsSmiOrInteger32()) { | 1532 if (instr->representation().IsSmiOrInteger32()) { |
| 1654 ASSERT(instr->left()->representation().Equals(instr->representation())); | 1533 ASSERT(instr->left()->representation().Equals(instr->representation())); |
| 1655 ASSERT(instr->right()->representation().Equals(instr->representation())); | 1534 ASSERT(instr->right()->representation().Equals(instr->representation())); |
| 1656 | 1535 |
| 1657 LInstruction* shifted_operation = TryDoOpWithShiftedRightOperand(instr); | |
| 1658 if (shifted_operation != NULL) { | |
| 1659 return shifted_operation; | |
| 1660 } | |
| 1661 | |
| 1662 if (instr->left()->IsConstant()) { | 1536 if (instr->left()->IsConstant()) { |
| 1663 // If lhs is constant, do reverse subtraction instead. | 1537 // If lhs is constant, do reverse subtraction instead. |
| 1664 return DoRSub(instr); | 1538 return DoRSub(instr); |
| 1665 } | 1539 } |
| 1666 | 1540 |
| 1667 LOperand* left = UseRegisterAtStart(instr->left()); | 1541 LOperand* left = UseRegisterAtStart(instr->left()); |
| 1668 LOperand* right = UseOrConstantAtStart(instr->right()); | 1542 LOperand* right = UseOrConstantAtStart(instr->right()); |
| 1669 LSubI* sub = new(zone()) LSubI(left, right); | 1543 LSubI* sub = new(zone()) LSubI(left, right); |
| 1670 LInstruction* result = DefineAsRegister(sub); | 1544 LInstruction* result = DefineAsRegister(sub); |
| 1671 if (instr->CheckFlag(HValue::kCanOverflow)) { | 1545 if (instr->CheckFlag(HValue::kCanOverflow)) { |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1719 return DefineSameAsFirst(new(zone()) LMultiplySubD(minuend_op, | 1593 return DefineSameAsFirst(new(zone()) LMultiplySubD(minuend_op, |
| 1720 multiplier_op, | 1594 multiplier_op, |
| 1721 multiplicand_op)); | 1595 multiplicand_op)); |
| 1722 } | 1596 } |
| 1723 | 1597 |
| 1724 | 1598 |
| 1725 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) { | 1599 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) { |
| 1726 if (instr->representation().IsSmiOrInteger32()) { | 1600 if (instr->representation().IsSmiOrInteger32()) { |
| 1727 ASSERT(instr->left()->representation().Equals(instr->representation())); | 1601 ASSERT(instr->left()->representation().Equals(instr->representation())); |
| 1728 ASSERT(instr->right()->representation().Equals(instr->representation())); | 1602 ASSERT(instr->right()->representation().Equals(instr->representation())); |
| 1729 | |
| 1730 LInstruction* shifted_operation = TryDoOpWithShiftedRightOperand(instr); | |
| 1731 if (shifted_operation != NULL) { | |
| 1732 return shifted_operation; | |
| 1733 } | |
| 1734 | |
| 1735 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand()); | 1603 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand()); |
| 1736 LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand()); | 1604 LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand()); |
| 1737 LAddI* add = new(zone()) LAddI(left, right); | 1605 LAddI* add = new(zone()) LAddI(left, right); |
| 1738 LInstruction* result = DefineAsRegister(add); | 1606 LInstruction* result = DefineAsRegister(add); |
| 1739 if (instr->CheckFlag(HValue::kCanOverflow)) { | 1607 if (instr->CheckFlag(HValue::kCanOverflow)) { |
| 1740 result = AssignEnvironment(result); | 1608 result = AssignEnvironment(result); |
| 1741 } | 1609 } |
| 1742 return result; | 1610 return result; |
| 1743 } else if (instr->representation().IsExternal()) { | 1611 } else if (instr->representation().IsExternal()) { |
| 1744 ASSERT(instr->left()->representation().IsExternal()); | 1612 ASSERT(instr->left()->representation().IsExternal()); |
| (...skipping 974 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2719 LInstruction* LChunkBuilder::DoAllocateBlockContext( | 2587 LInstruction* LChunkBuilder::DoAllocateBlockContext( |
| 2720 HAllocateBlockContext* instr) { | 2588 HAllocateBlockContext* instr) { |
| 2721 LOperand* context = UseFixed(instr->context(), cp); | 2589 LOperand* context = UseFixed(instr->context(), cp); |
| 2722 LOperand* function = UseRegisterAtStart(instr->function()); | 2590 LOperand* function = UseRegisterAtStart(instr->function()); |
| 2723 LAllocateBlockContext* result = | 2591 LAllocateBlockContext* result = |
| 2724 new(zone()) LAllocateBlockContext(context, function); | 2592 new(zone()) LAllocateBlockContext(context, function); |
| 2725 return MarkAsCall(DefineFixed(result, cp), instr); | 2593 return MarkAsCall(DefineFixed(result, cp), instr); |
| 2726 } | 2594 } |
| 2727 | 2595 |
| 2728 } } // namespace v8::internal | 2596 } } // namespace v8::internal |
| OLD | NEW |