Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1585 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1596 __ mov(r0, Operand(Smi::FromInt(ncr))); | 1596 __ mov(r0, Operand(Smi::FromInt(ncr))); |
| 1597 __ push(r0); | 1597 __ push(r0); |
| 1598 } | 1598 } |
| 1599 | 1599 |
| 1600 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 1600 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
| 1601 // tagged as a small integer. | 1601 // tagged as a small integer. |
| 1602 __ InvokeBuiltin(native, JUMP_FUNCTION); | 1602 __ InvokeBuiltin(native, JUMP_FUNCTION); |
| 1603 } | 1603 } |
| 1604 | 1604 |
| 1605 | 1605 |
| 1606 // The stub returns zero for false, and a non-zero value for true. | 1606 // The stub expects its argument in the tos_ register and returns its result in |
| 1607 // it, too: zero for false, and a non-zero value for true. | |
| 1607 void ToBooleanStub::Generate(MacroAssembler* masm) { | 1608 void ToBooleanStub::Generate(MacroAssembler* masm) { |
| 1608 // This stub uses VFP3 instructions. | 1609 // This stub uses VFP3 instructions. |
| 1609 CpuFeatures::Scope scope(VFP3); | 1610 CpuFeatures::Scope scope(VFP3); |
| 1610 | 1611 |
| 1611 Label false_result, true_result, not_string; | 1612 Label patch; |
| 1612 const Register map = r9.is(tos_) ? r7 : r9; | 1613 const Register map = r9.is(tos_) ? r7 : r9; |
|
Erik Corry
2011/08/05 12:50:46
If we have ever seen an internal object you could
Sven Panne
2011/08/09 07:58:21
I think we can get rid of all internal object chec
| |
| 1613 | 1614 |
| 1614 // undefined -> false | 1615 // undefined -> false |
|
Erik Corry
2011/08/05 12:50:46
Missing full stops on the end of comments several
Sven Panne
2011/08/09 07:58:21
Done.
| |
| 1615 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 1616 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false, &patch); |
| 1616 __ cmp(tos_, ip); | |
| 1617 __ b(eq, &false_result); | |
| 1618 | 1617 |
| 1619 // Boolean -> its value | 1618 // Boolean -> its value |
| 1620 __ LoadRoot(ip, Heap::kFalseValueRootIndex); | 1619 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false, &patch); |
| 1621 __ cmp(tos_, ip); | 1620 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true, &patch); |
| 1622 __ b(eq, &false_result); | |
| 1623 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | |
| 1624 __ cmp(tos_, ip); | |
| 1625 // "tos_" is a register and contains a non-zero value. Hence we implicitly | |
| 1626 // return true if the equal condition is satisfied. | |
| 1627 __ Ret(eq); | |
| 1628 | 1621 |
| 1629 // Smis: 0 -> false, all other -> true | 1622 // 'null' -> false. |
| 1630 __ tst(tos_, tos_); | 1623 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false, &patch); |
| 1631 __ b(eq, &false_result); | |
| 1632 __ tst(tos_, Operand(kSmiTagMask)); | |
| 1633 // "tos_" is a register and contains a non-zero value. Hence we implicitly | |
| 1634 // return true if the not equal condition is satisfied. | |
| 1635 __ Ret(eq); | |
| 1636 | 1624 |
| 1637 // 'null' -> false | 1625 if (types_.Contains(SMI)) { |
| 1638 __ LoadRoot(ip, Heap::kNullValueRootIndex); | 1626 // Smis: 0 -> false, all other -> true |
| 1639 __ cmp(tos_, ip); | 1627 __ tst(tos_, Operand(kSmiTagMask)); |
| 1640 __ b(eq, &false_result); | 1628 // tos_ contains the correct return value already |
| 1629 __ Ret(eq); | |
| 1630 } else if (types_.NeedsMap()) { | |
| 1631 // If we need a map later and have a Smi -> patch. | |
| 1632 __ JumpIfSmi(tos_, &patch); | |
| 1633 } | |
| 1641 | 1634 |
| 1642 // Get the map of the heap object. | 1635 if (types_.NeedsMap()) { |
| 1643 __ ldr(map, FieldMemOperand(tos_, HeapObject::kMapOffset)); | 1636 __ ldr(map, FieldMemOperand(tos_, HeapObject::kMapOffset)); |
| 1644 | 1637 |
| 1645 // Undetectable -> false. | 1638 // Everything with a map could be undetectable, so check this now. |
|
Erik Corry
2011/08/05 12:50:46
Pretty sure that it is just strings and JSObjects.
Sven Panne
2011/08/09 07:58:21
Again, this is platform-independent, so let's hand
| |
| 1646 __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset)); | 1639 __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset)); |
| 1647 __ tst(ip, Operand(1 << Map::kIsUndetectable)); | 1640 __ tst(ip, Operand(1 << Map::kIsUndetectable)); |
| 1648 __ b(&false_result, ne); | 1641 // Undetectable -> false. |
| 1642 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, ne); | |
| 1643 __ Ret(ne); | |
| 1644 } | |
| 1649 | 1645 |
| 1650 // JavaScript object -> true. | 1646 if (types_.Contains(SPEC_OBJECT)) { |
| 1651 __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE); | 1647 // spec object -> true. |
| 1652 // "tos_" is a register and contains a non-zero value. Hence we implicitly | 1648 __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE); |
| 1653 // return true if the greater than condition is satisfied. | 1649 // tos_ contains the correct non-zero return value already. |
| 1654 __ Ret(ge); | 1650 __ Ret(ge); |
| 1651 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 1652 // We've seen a spec object for the first time -> patch. | |
| 1653 __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE); | |
| 1654 __ b(ge, &patch); | |
| 1655 } | |
| 1655 | 1656 |
| 1656 // String value -> false iff empty. | 1657 if (types_.Contains(STRING)) { |
| 1658 // String value -> false iff empty. | |
| 1657 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE); | 1659 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE); |
| 1658 __ b(¬_string, ge); | 1660 __ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset), lt); |
| 1659 __ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset)); | 1661 __ Ret(lt); // the string length is OK as the return value |
| 1660 // Return string length as boolean value, i.e. return false iff length is 0. | 1662 } else if (types_.Contains(INTERNAL_OBJECT)) { |
| 1661 __ Ret(); | 1663 // We've seen a string for the first time -> patch |
| 1664 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE); | |
| 1665 __ b(lt, &patch); | |
| 1666 } | |
| 1662 | 1667 |
| 1663 __ bind(¬_string); | 1668 if (types_.Contains(HEAP_NUMBER)) { |
| 1664 // HeapNumber -> false iff +0, -0, or NaN. | 1669 // heap number -> false iff +0, -0, or NaN. |
| 1665 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); | 1670 Label not_heap_number; |
| 1666 __ b(&true_result, ne); | 1671 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); |
| 1667 __ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset)); | 1672 __ b(ne, ¬_heap_number); |
| 1668 __ VFPCompareAndSetFlags(d1, 0.0); | 1673 __ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset)); |
| 1669 // "tos_" is a register, and contains a non zero value by default. | 1674 __ VFPCompareAndSetFlags(d1, 0.0); |
| 1670 // Hence we only need to overwrite "tos_" with zero to return false for | 1675 // "tos_" is a register, and contains a non zero value by default. |
| 1671 // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true. | 1676 // Hence we only need to overwrite "tos_" with zero to return false for |
| 1672 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO | 1677 // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true. |
| 1673 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN | 1678 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO |
| 1674 __ Ret(); | 1679 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN |
| 1680 __ Ret(); | |
| 1681 __ bind(¬_heap_number); | |
| 1682 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 1683 // We've seen a heap number for the first time -> patch | |
| 1684 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); | |
| 1685 __ b(eq, &patch); | |
| 1686 } | |
| 1675 | 1687 |
| 1676 // Return 1/0 for true/false in tos_. | 1688 if (types_.Contains(INTERNAL_OBJECT)) { |
| 1677 __ bind(&true_result); | 1689 // internal objects -> true |
|
Erik Corry
2011/08/05 12:50:46
Caps and full stop.
Sven Panne
2011/08/09 07:58:21
Done.
| |
| 1678 __ mov(tos_, Operand(1, RelocInfo::NONE)); | 1690 __ mov(tos_, Operand(1, RelocInfo::NONE)); |
| 1679 __ Ret(); | 1691 __ Ret(); |
| 1680 __ bind(&false_result); | 1692 } |
| 1681 __ mov(tos_, Operand(0, RelocInfo::NONE)); | 1693 |
| 1682 __ Ret(); | 1694 if (!types_.IsAll()) { |
| 1695 __ bind(&patch); | |
| 1696 GenerateTypeTransition(masm); | |
| 1697 } | |
| 1698 } | |
| 1699 | |
| 1700 | |
| 1701 void ToBooleanStub::CheckOddball(MacroAssembler* masm, | |
| 1702 Type type, | |
| 1703 Heap::RootListIndex value, | |
| 1704 bool result, | |
| 1705 Label* patch) { | |
| 1706 if (types_.Contains(type)) { | |
| 1707 // If we see an expected oddball, return its ToBoolean value tos_. | |
| 1708 __ LoadRoot(ip, value); | |
| 1709 __ cmp(tos_, ip); | |
| 1710 // The value of a root is never NULL, so we can avoid loading a non-null | |
| 1711 // value into tos_ when we want to return 'true'. | |
| 1712 if (!result) { | |
| 1713 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); | |
| 1714 } | |
| 1715 __ Ret(eq); | |
| 1716 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 1717 // If we see an unexpected oddball and handle internal objects, we must | |
| 1718 // patch because the code for internal objects doesn't handle it explictly. | |
| 1719 __ LoadRoot(ip, value); | |
| 1720 __ cmp(tos_, ip); | |
| 1721 __ b(eq, patch); | |
| 1722 } | |
| 1723 } | |
| 1724 | |
| 1725 | |
| 1726 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { | |
| 1727 if (!tos_.is(r3)) { | |
| 1728 __ mov(r3, Operand(tos_)); | |
| 1729 } | |
| 1730 __ mov(r2, Operand(Smi::FromInt(tos_.code()))); | |
| 1731 __ mov(r1, Operand(Smi::FromInt(types_.ToByte()))); | |
| 1732 __ Push(r3, r2, r1); | |
| 1733 // Patch the caller to an appropriate specialized stub and return the | |
| 1734 // operation result to the caller of the stub. | |
| 1735 __ TailCallExternalReference( | |
| 1736 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()), | |
| 1737 3, | |
| 1738 1); | |
| 1683 } | 1739 } |
| 1684 | 1740 |
| 1685 | 1741 |
| 1686 void UnaryOpStub::PrintName(StringStream* stream) { | 1742 void UnaryOpStub::PrintName(StringStream* stream) { |
| 1687 const char* op_name = Token::Name(op_); | 1743 const char* op_name = Token::Name(op_); |
| 1688 const char* overwrite_name = NULL; // Make g++ happy. | 1744 const char* overwrite_name = NULL; // Make g++ happy. |
| 1689 switch (mode_) { | 1745 switch (mode_) { |
| 1690 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; | 1746 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; |
| 1691 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; | 1747 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; |
| 1692 } | 1748 } |
| (...skipping 4842 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6535 __ mov(result, Operand(0)); | 6591 __ mov(result, Operand(0)); |
| 6536 __ Ret(); | 6592 __ Ret(); |
| 6537 } | 6593 } |
| 6538 | 6594 |
| 6539 | 6595 |
| 6540 #undef __ | 6596 #undef __ |
| 6541 | 6597 |
| 6542 } } // namespace v8::internal | 6598 } } // namespace v8::internal |
| 6543 | 6599 |
| 6544 #endif // V8_TARGET_ARCH_ARM | 6600 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |