OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
324 | 324 |
325 FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand( | 325 FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand( |
326 Token::Value op, Expression* left, Expression* right) { | 326 Token::Value op, Expression* left, Expression* right) { |
327 ASSERT(ShouldInlineSmiCase(op)); | 327 ASSERT(ShouldInlineSmiCase(op)); |
328 if (op == Token::DIV || op == Token::MOD || op == Token::MUL) { | 328 if (op == Token::DIV || op == Token::MOD || op == Token::MUL) { |
329 // We never generate inlined constant smi operations for these. | 329 // We never generate inlined constant smi operations for these. |
330 return kNoConstants; | 330 return kNoConstants; |
331 } else if (right->IsSmiLiteral()) { | 331 } else if (right->IsSmiLiteral()) { |
332 return kRightConstant; | 332 return kRightConstant; |
333 } else if (left->IsSmiLiteral() && !Token::IsShiftOp(op)) { | 333 } else if (left->IsSmiLiteral() && !Token::IsShiftOp(op)) { |
| 334 // Don't inline shifts with constant left hand side. |
334 return kLeftConstant; | 335 return kLeftConstant; |
335 } else { | 336 } else { |
336 return kNoConstants; | 337 return kNoConstants; |
337 } | 338 } |
338 } | 339 } |
339 | 340 |
340 | 341 |
341 void FullCodeGenerator::EffectContext::Plug(Slot* slot) const { | 342 void FullCodeGenerator::EffectContext::Plug(Slot* slot) const { |
342 } | 343 } |
343 | 344 |
(...skipping 1293 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1637 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); | 1638 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
1638 EmitCallIC(ic, RelocInfo::CODE_TARGET); | 1639 EmitCallIC(ic, RelocInfo::CODE_TARGET); |
1639 } | 1640 } |
1640 | 1641 |
1641 | 1642 |
1642 void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr, | 1643 void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr, |
1643 OverwriteMode mode, | 1644 OverwriteMode mode, |
1644 bool left_is_constant_smi, | 1645 bool left_is_constant_smi, |
1645 Smi* value) { | 1646 Smi* value) { |
1646 NearLabel call_stub, done; | 1647 NearLabel call_stub, done; |
| 1648 // Optimistically add smi value with unknown object. If result overflows or is |
| 1649 // not a smi then we had either a smi overflow or added a smi with a tagged |
| 1650 // pointer. |
1647 __ add(Operand(eax), Immediate(value)); | 1651 __ add(Operand(eax), Immediate(value)); |
1648 __ j(overflow, &call_stub); | 1652 __ j(overflow, &call_stub); |
1649 JumpPatchSite patch_site(masm_); | 1653 JumpPatchSite patch_site(masm_); |
1650 patch_site.EmitJumpIfSmi(eax, &done); | 1654 patch_site.EmitJumpIfSmi(eax, &done); |
1651 | 1655 |
1652 // Undo the optimistic add operation and call the shared stub. | 1656 // Undo the optimistic add operation and call the shared stub. |
1653 __ bind(&call_stub); | 1657 __ bind(&call_stub); |
1654 __ sub(Operand(eax), Immediate(value)); | 1658 __ sub(Operand(eax), Immediate(value)); |
1655 Token::Value op = Token::ADD; | 1659 TypeRecordingBinaryOpStub stub(Token::ADD, mode); |
1656 TypeRecordingBinaryOpStub stub(op, mode); | |
1657 if (left_is_constant_smi) { | 1660 if (left_is_constant_smi) { |
1658 __ mov(edx, Immediate(value)); | 1661 __ mov(edx, Immediate(value)); |
1659 } else { | 1662 } else { |
1660 __ mov(edx, eax); | 1663 __ mov(edx, eax); |
1661 __ mov(eax, Immediate(value)); | 1664 __ mov(eax, Immediate(value)); |
1662 } | 1665 } |
1663 EmitCallIC(stub.GetCode(), &patch_site); | 1666 EmitCallIC(stub.GetCode(), &patch_site); |
1664 | 1667 |
1665 __ bind(&done); | 1668 __ bind(&done); |
1666 context()->Plug(eax); | 1669 context()->Plug(eax); |
1667 } | 1670 } |
1668 | 1671 |
1669 | 1672 |
1670 void FullCodeGenerator::EmitConstantSmiSub(Expression* expr, | 1673 void FullCodeGenerator::EmitConstantSmiSub(Expression* expr, |
1671 OverwriteMode mode, | 1674 OverwriteMode mode, |
1672 bool left_is_constant_smi, | 1675 bool left_is_constant_smi, |
1673 Smi* value) { | 1676 Smi* value) { |
1674 NearLabel call_stub, done; | 1677 NearLabel call_stub, done; |
| 1678 // Optimistically subtract smi value with unknown object. If result overflows |
| 1679 // or is not a smi then we had either a smi overflow or added a smi with a |
| 1680 // tagged pointer. |
1675 if (left_is_constant_smi) { | 1681 if (left_is_constant_smi) { |
1676 __ mov(ecx, eax); | 1682 __ mov(ecx, eax); |
1677 __ mov(eax, Immediate(value)); | 1683 __ mov(eax, Immediate(value)); |
1678 __ sub(Operand(eax), ecx); | 1684 __ sub(Operand(eax), ecx); |
1679 } else { | 1685 } else { |
1680 __ sub(Operand(eax), Immediate(value)); | 1686 __ sub(Operand(eax), Immediate(value)); |
1681 } | 1687 } |
1682 __ j(overflow, &call_stub); | 1688 __ j(overflow, &call_stub); |
1683 JumpPatchSite patch_site(masm_); | 1689 JumpPatchSite patch_site(masm_); |
1684 patch_site.EmitJumpIfSmi(eax, &done); | 1690 patch_site.EmitJumpIfSmi(eax, &done); |
1685 | 1691 |
1686 __ bind(&call_stub); | 1692 __ bind(&call_stub); |
1687 if (left_is_constant_smi) { | 1693 if (left_is_constant_smi) { |
1688 __ mov(edx, Immediate(value)); | 1694 __ mov(edx, Immediate(value)); |
1689 __ mov(eax, ecx); | 1695 __ mov(eax, ecx); |
1690 } else { | 1696 } else { |
1691 __ add(Operand(eax), Immediate(value)); // Undo the subtraction. | 1697 __ add(Operand(eax), Immediate(value)); // Undo the subtraction. |
1692 __ mov(edx, eax); | 1698 __ mov(edx, eax); |
1693 __ mov(eax, Immediate(value)); | 1699 __ mov(eax, Immediate(value)); |
1694 } | 1700 } |
1695 Token::Value op = Token::SUB; | 1701 TypeRecordingBinaryOpStub stub(Token::SUB, mode); |
1696 TypeRecordingBinaryOpStub stub(op, mode); | |
1697 EmitCallIC(stub.GetCode(), &patch_site); | 1702 EmitCallIC(stub.GetCode(), &patch_site); |
1698 | 1703 |
1699 __ bind(&done); | 1704 __ bind(&done); |
1700 context()->Plug(eax); | 1705 context()->Plug(eax); |
1701 } | 1706 } |
1702 | 1707 |
1703 | 1708 |
1704 void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr, | 1709 void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr, |
1705 Token::Value op, | 1710 Token::Value op, |
1706 OverwriteMode mode, | 1711 OverwriteMode mode, |
(...skipping 15 matching lines...) Expand all Loading... |
1722 // Smi case. | 1727 // Smi case. |
1723 __ bind(&smi_case); | 1728 __ bind(&smi_case); |
1724 switch (op) { | 1729 switch (op) { |
1725 case Token::SHL: | 1730 case Token::SHL: |
1726 if (shift_value != 0) { | 1731 if (shift_value != 0) { |
1727 __ mov(edx, eax); | 1732 __ mov(edx, eax); |
1728 if (shift_value > 1) { | 1733 if (shift_value > 1) { |
1729 __ shl(edx, shift_value - 1); | 1734 __ shl(edx, shift_value - 1); |
1730 } | 1735 } |
1731 // Convert int result to smi, checking that it is in int range. | 1736 // Convert int result to smi, checking that it is in int range. |
1732 ASSERT(kSmiTagSize == 1); // Adjust code if not the case. | 1737 STATIC_ASSERT(kSmiTagSize == 1); // Adjust code if not the case. |
1733 __ add(edx, Operand(edx)); | 1738 __ add(edx, Operand(edx)); |
1734 __ j(overflow, &call_stub); | 1739 __ j(overflow, &call_stub); |
1735 __ mov(eax, edx); // Put result back into eax. | 1740 __ mov(eax, edx); // Put result back into eax. |
1736 } | 1741 } |
1737 break; | 1742 break; |
1738 case Token::SAR: | 1743 case Token::SAR: |
1739 if (shift_value != 0) { | 1744 if (shift_value != 0) { |
1740 __ sar(eax, shift_value); | 1745 __ sar(eax, shift_value); |
1741 __ and_(eax, ~kSmiTagMask); | 1746 __ and_(eax, ~kSmiTagMask); |
1742 } | 1747 } |
1743 break; | 1748 break; |
1744 case Token::SHR: | 1749 case Token::SHR: |
| 1750 // SHR must return a positive value. When shifting by 0 or 1 we need to |
| 1751 // check that smi tagging the result will not create a negative value. |
1745 if (shift_value < 2) { | 1752 if (shift_value < 2) { |
1746 __ mov(edx, eax); | 1753 __ mov(edx, eax); |
1747 __ SmiUntag(edx); | 1754 __ SmiUntag(edx); |
1748 __ shr(edx, shift_value); | 1755 __ shr(edx, shift_value); |
1749 __ test(edx, Immediate(0xc0000000)); | 1756 __ test(edx, Immediate(0xc0000000)); |
1750 __ j(not_zero, &call_stub); | 1757 __ j(not_zero, &call_stub); |
1751 __ SmiTag(edx); | 1758 __ SmiTag(edx); |
1752 __ mov(eax, edx); // Put result back into eax. | 1759 __ mov(eax, edx); // Put result back into eax. |
1753 } else { | 1760 } else { |
1754 __ SmiUntag(eax); | 1761 __ SmiUntag(eax); |
(...skipping 2668 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4423 // And return. | 4430 // And return. |
4424 __ ret(0); | 4431 __ ret(0); |
4425 } | 4432 } |
4426 | 4433 |
4427 | 4434 |
4428 #undef __ | 4435 #undef __ |
4429 | 4436 |
4430 } } // namespace v8::internal | 4437 } } // namespace v8::internal |
4431 | 4438 |
4432 #endif // V8_TARGET_ARCH_IA32 | 4439 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |