Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(360)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 6113004: Version 3.0.7 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Created 9 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/lithium-arm.cc ('k') | src/arm/macro-assembler-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1058 matching lines...) Expand 10 before | Expand all | Expand 10 after
1069 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1069 int true_block = chunk_->LookupDestination(instr->true_block_id());
1070 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1070 int false_block = chunk_->LookupDestination(instr->false_block_id());
1071 1071
1072 Representation r = instr->hydrogen()->representation(); 1072 Representation r = instr->hydrogen()->representation();
1073 if (r.IsInteger32()) { 1073 if (r.IsInteger32()) {
1074 Register reg = ToRegister(instr->input()); 1074 Register reg = ToRegister(instr->input());
1075 __ cmp(reg, Operand(0)); 1075 __ cmp(reg, Operand(0));
1076 EmitBranch(true_block, false_block, nz); 1076 EmitBranch(true_block, false_block, nz);
1077 } else if (r.IsDouble()) { 1077 } else if (r.IsDouble()) {
1078 DoubleRegister reg = ToDoubleRegister(instr->input()); 1078 DoubleRegister reg = ToDoubleRegister(instr->input());
1079 Register scratch = scratch0();
1080
1081 // Test for the double value. Zero and NaN are false.
1082 // Clear the Invalid cumulative exception flags.
1083 __ ClearFPSCRBits(kVFPInvalidExceptionBit, scratch);
1079 __ vcmp(reg, 0.0); 1084 __ vcmp(reg, 0.0);
1085 // Retrieve the exception and status flags and
1086 // check for zero or an invalid exception.
1087 __ vmrs(scratch);
1088 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPInvalidExceptionBit));
1080 EmitBranch(true_block, false_block, ne); 1089 EmitBranch(true_block, false_block, ne);
1081 } else { 1090 } else {
1082 ASSERT(r.IsTagged()); 1091 ASSERT(r.IsTagged());
1083 Register reg = ToRegister(instr->input()); 1092 Register reg = ToRegister(instr->input());
1084 if (instr->hydrogen()->type().IsBoolean()) { 1093 if (instr->hydrogen()->type().IsBoolean()) {
1085 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 1094 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1086 __ cmp(reg, ip); 1095 __ cmp(reg, ip);
1087 EmitBranch(true_block, false_block, eq); 1096 EmitBranch(true_block, false_block, eq);
1088 } else { 1097 } else {
1089 Label* true_label = chunk_->GetAssemblyLabel(true_block); 1098 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1090 Label* false_label = chunk_->GetAssemblyLabel(false_block); 1099 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1091 1100
1092 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1101 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1093 __ cmp(reg, ip); 1102 __ cmp(reg, ip);
1094 __ b(eq, false_label); 1103 __ b(eq, false_label);
1095 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 1104 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1096 __ cmp(reg, ip); 1105 __ cmp(reg, ip);
1097 __ b(eq, true_label); 1106 __ b(eq, true_label);
1098 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 1107 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1099 __ cmp(reg, ip); 1108 __ cmp(reg, ip);
1100 __ b(eq, false_label); 1109 __ b(eq, false_label);
1101 __ cmp(reg, Operand(0)); 1110 __ cmp(reg, Operand(0));
1102 __ b(eq, false_label); 1111 __ b(eq, false_label);
1103 __ tst(reg, Operand(kSmiTagMask)); 1112 __ tst(reg, Operand(kSmiTagMask));
1104 __ b(eq, true_label); 1113 __ b(eq, true_label);
1105 1114
1106 // Test for double values. Zero is false. 1115 // Test for double values. Zero and NaN are false.
1107 Label call_stub; 1116 Label call_stub;
1108 DoubleRegister dbl_scratch = d0; 1117 DoubleRegister dbl_scratch = d0;
1109 Register scratch = scratch0(); 1118 Register scratch = scratch0();
1110 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset)); 1119 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1111 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 1120 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
1112 __ cmp(scratch, Operand(ip)); 1121 __ cmp(scratch, Operand(ip));
1113 __ b(ne, &call_stub); 1122 __ b(ne, &call_stub);
1114 __ sub(ip, reg, Operand(kHeapObjectTag)); 1123 __ sub(ip, reg, Operand(kHeapObjectTag));
1115 __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset); 1124 __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
1125 // Clear the Invalid cumulative exception flags.
1126 __ ClearFPSCRBits(kVFPInvalidExceptionBit, scratch);
1116 __ vcmp(dbl_scratch, 0.0); 1127 __ vcmp(dbl_scratch, 0.0);
1117 __ b(eq, false_label); 1128 // Retrieve the exception and status flags and
1129 // check for zero or an invalid exception.
1130 __ vmrs(scratch);
1131 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPInvalidExceptionBit));
1132 __ b(ne, false_label);
1118 __ b(true_label); 1133 __ b(true_label);
1119 1134
1120 // The conversion stub doesn't cause garbage collections so it's 1135 // The conversion stub doesn't cause garbage collections so it's
1121 // safe to not record a safepoint after the call. 1136 // safe to not record a safepoint after the call.
1122 __ bind(&call_stub); 1137 __ bind(&call_stub);
1123 ToBooleanStub stub(reg); 1138 ToBooleanStub stub(reg);
1124 RegList saved_regs = kJSCallerSaved | kCalleeSaved; 1139 RegList saved_regs = kJSCallerSaved | kCalleeSaved;
1125 __ stm(db_w, sp, saved_regs); 1140 __ stm(db_w, sp, saved_regs);
1126 __ CallStub(&stub); 1141 __ CallStub(&stub);
1127 __ cmp(reg, Operand(0)); 1142 __ cmp(reg, Operand(0));
(...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after
1424 __ mov(r0, Operand(Factory::false_value()), LeaveCC, ne); 1439 __ mov(r0, Operand(Factory::false_value()), LeaveCC, ne);
1425 __ mov(r0, Operand(Factory::true_value()), LeaveCC, eq); 1440 __ mov(r0, Operand(Factory::true_value()), LeaveCC, eq);
1426 } 1441 }
1427 1442
1428 1443
1429 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { 1444 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1430 Abort("DoInstanceOfAndBranch unimplemented."); 1445 Abort("DoInstanceOfAndBranch unimplemented.");
1431 } 1446 }
1432 1447
1433 1448
1449 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1450 Abort("DoInstanceOfKnownGlobal unimplemented.");
1451 }
1452
1434 1453
1435 static Condition ComputeCompareCondition(Token::Value op) { 1454 static Condition ComputeCompareCondition(Token::Value op) {
1436 switch (op) { 1455 switch (op) {
1437 case Token::EQ_STRICT: 1456 case Token::EQ_STRICT:
1438 case Token::EQ: 1457 case Token::EQ:
1439 return eq; 1458 return eq;
1440 case Token::LT: 1459 case Token::LT:
1441 return lt; 1460 return lt;
1442 case Token::GT: 1461 case Token::GT:
1443 return gt; 1462 return gt;
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after
1570 // in initial map. 1589 // in initial map.
1571 __ bind(&non_instance); 1590 __ bind(&non_instance);
1572 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset)); 1591 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
1573 1592
1574 // All done. 1593 // All done.
1575 __ bind(&done); 1594 __ bind(&done);
1576 } 1595 }
1577 1596
1578 1597
1579 void LCodeGen::DoLoadElements(LLoadElements* instr) { 1598 void LCodeGen::DoLoadElements(LLoadElements* instr) {
1580 Abort("DoLoadElements unimplemented."); 1599 ASSERT(instr->result()->Equals(instr->input()));
1600 Register reg = ToRegister(instr->input());
1601 Register scratch = scratch0();
1602
1603 __ ldr(reg, FieldMemOperand(reg, JSObject::kElementsOffset));
1604 if (FLAG_debug_code) {
1605 Label done;
1606 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1607 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
1608 __ cmp(scratch, ip);
1609 __ b(eq, &done);
1610 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
1611 __ cmp(scratch, ip);
1612 __ Check(eq, "Check for fast elements failed.");
1613 __ bind(&done);
1614 }
1581 } 1615 }
1582 1616
1583 1617
1584 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { 1618 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
1585 Abort("DoAccessArgumentsAt unimplemented."); 1619 Register arguments = ToRegister(instr->arguments());
1620 Register length = ToRegister(instr->length());
1621 Register index = ToRegister(instr->index());
1622 Register result = ToRegister(instr->result());
1623
1624 // Bailout index is not a valid argument index. Use unsigned check to get
1625 // negative check for free.
1626 __ sub(length, length, index, SetCC);
1627 DeoptimizeIf(ls, instr->environment());
1628
1629 // There are two words between the frame pointer and the last argument.
1630 // Subtracting from length accounts for one of them add one more.
1631 __ add(length, length, Operand(1));
1632 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
1586 } 1633 }
1587 1634
1588 1635
1589 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) { 1636 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
1590 Abort("DoLoadKeyedFastElement unimplemented."); 1637 Register elements = ToRegister(instr->elements());
1638 Register key = EmitLoadRegister(instr->key(), scratch0());
1639 Register result;
1640 Register scratch = scratch0();
1641
1642 if (instr->load_result() != NULL) {
1643 result = ToRegister(instr->load_result());
1644 } else {
1645 result = ToRegister(instr->result());
1646 ASSERT(result.is(elements));
1647 }
1648
1649 // Load the result.
1650 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
1651 __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
1652
1653 Representation r = instr->hydrogen()->representation();
1654 if (r.IsInteger32()) {
1655 // Untag and check for smi.
1656 __ SmiUntag(result);
1657 DeoptimizeIf(cs, instr->environment());
1658 } else if (r.IsDouble()) {
1659 EmitNumberUntagD(result,
1660 ToDoubleRegister(instr->result()),
1661 instr->environment());
1662 } else {
1663 // Check for the hole value.
1664 ASSERT(r.IsTagged());
1665 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
1666 __ cmp(result, scratch);
1667 DeoptimizeIf(eq, instr->environment());
1668 }
1591 } 1669 }
1592 1670
1593 1671
1594 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { 1672 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
1595 ASSERT(ToRegister(instr->object()).is(r1)); 1673 ASSERT(ToRegister(instr->object()).is(r1));
1596 ASSERT(ToRegister(instr->key()).is(r0)); 1674 ASSERT(ToRegister(instr->key()).is(r0));
1597 1675
1598 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 1676 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1599 CallCode(ic, RelocInfo::CODE_TARGET, instr); 1677 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1600 } 1678 }
1601 1679
1602 1680
1603 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 1681 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
1604 Abort("DoArgumentsElements unimplemented."); 1682 Register scratch = scratch0();
1683 Register result = ToRegister(instr->result());
1684
1685 // Check if the calling frame is an arguments adaptor frame.
1686 Label done, adapted;
1687 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1688 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
1689 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1690
1691 // Result is the frame pointer for the frame if not adapted and for the real
1692 // frame below the adaptor frame if adapted.
1693 __ mov(result, fp, LeaveCC, ne);
1694 __ mov(result, scratch, LeaveCC, eq);
1605 } 1695 }
1606 1696
1607 1697
1608 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { 1698 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
1609 Abort("DoArgumentsLength unimplemented."); 1699 Register elem = ToRegister(instr->input());
1700 Register result = ToRegister(instr->result());
1701
1702 Label done;
1703
1704 // If no arguments adaptor frame the number of arguments is fixed.
1705 __ cmp(fp, elem);
1706 __ mov(result, Operand(scope()->num_parameters()));
1707 __ b(eq, &done);
1708
1709 // Arguments adaptor frame present. Get argument length from there.
1710 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1711 __ ldr(result,
1712 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
1713 __ SmiUntag(result);
1714
1715 // Argument length is in result register.
1716 __ bind(&done);
1610 } 1717 }
1611 1718
1612 1719
1613 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { 1720 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
1614 Abort("DoApplyArguments unimplemented."); 1721 Abort("DoApplyArguments unimplemented.");
1615 } 1722 }
1616 1723
1617 1724
1618 void LCodeGen::DoPushArgument(LPushArgument* instr) { 1725 void LCodeGen::DoPushArgument(LPushArgument* instr) {
1619 LOperand* argument = instr->input(); 1726 LOperand* argument = instr->input();
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
1711 DoMathSqrt(instr); 1818 DoMathSqrt(instr);
1712 break; 1819 break;
1713 default: 1820 default:
1714 Abort("Unimplemented type of LUnaryMathOperation."); 1821 Abort("Unimplemented type of LUnaryMathOperation.");
1715 UNREACHABLE(); 1822 UNREACHABLE();
1716 } 1823 }
1717 } 1824 }
1718 1825
1719 1826
1720 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 1827 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
1721 Abort("DoCallKeyed unimplemented."); 1828 ASSERT(ToRegister(instr->result()).is(r0));
1829
1830 int arity = instr->arity();
1831 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
1832 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1833 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1722 } 1834 }
1723 1835
1724 1836
1725 void LCodeGen::DoCallNamed(LCallNamed* instr) { 1837 void LCodeGen::DoCallNamed(LCallNamed* instr) {
1726 ASSERT(ToRegister(instr->result()).is(r0)); 1838 ASSERT(ToRegister(instr->result()).is(r0));
1727 1839
1728 int arity = instr->arity(); 1840 int arity = instr->arity();
1729 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); 1841 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
1730 __ mov(r2, Operand(instr->name())); 1842 __ mov(r2, Operand(instr->name()));
1731 CallCode(ic, RelocInfo::CODE_TARGET, instr); 1843 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1732 // Restore context register. 1844 // Restore context register.
1733 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 1845 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1734 } 1846 }
1735 1847
1736 1848
1737 void LCodeGen::DoCallFunction(LCallFunction* instr) { 1849 void LCodeGen::DoCallFunction(LCallFunction* instr) {
1738 ASSERT(ToRegister(instr->result()).is(r0)); 1850 ASSERT(ToRegister(instr->result()).is(r0));
1739 1851
1740 int arity = instr->arity(); 1852 int arity = instr->arity();
1741 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); 1853 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
1742 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1854 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1743 __ Drop(1); 1855 __ Drop(1);
1744 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 1856 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1745 } 1857 }
1746 1858
1747 1859
1748 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { 1860 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
1749 Abort("DoCallGlobal unimplemented."); 1861 ASSERT(ToRegister(instr->result()).is(r0));
1862
1863 int arity = instr->arity();
1864 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
1865 __ mov(r2, Operand(instr->name()));
1866 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
1867 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1750 } 1868 }
1751 1869
1752 1870
1753 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { 1871 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
1754 ASSERT(ToRegister(instr->result()).is(r0)); 1872 ASSERT(ToRegister(instr->result()).is(r0));
1755 __ mov(r1, Operand(instr->target())); 1873 __ mov(r1, Operand(instr->target()));
1756 CallKnownFunction(instr->target(), instr->arity(), instr); 1874 CallKnownFunction(instr->target(), instr->arity(), instr);
1757 } 1875 }
1758 1876
1759 1877
1760 void LCodeGen::DoCallNew(LCallNew* instr) { 1878 void LCodeGen::DoCallNew(LCallNew* instr) {
1761 ASSERT(ToRegister(instr->input()).is(r1)); 1879 ASSERT(ToRegister(instr->input()).is(r1));
1762 ASSERT(ToRegister(instr->result()).is(r0)); 1880 ASSERT(ToRegister(instr->result()).is(r0));
1763 1881
1764 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall)); 1882 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
1765 __ mov(r0, Operand(instr->arity())); 1883 __ mov(r0, Operand(instr->arity()));
1766 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); 1884 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
1767 } 1885 }
1768 1886
1769 1887
1770 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { 1888 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
1771 CallRuntime(instr->function(), instr->arity(), instr); 1889 CallRuntime(instr->function(), instr->arity(), instr);
1772 } 1890 }
1773 1891
1774 1892
1775 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { 1893 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
1776 Abort("DoStoreNamedField unimplemented."); 1894 Register object = ToRegister(instr->object());
1895 Register value = ToRegister(instr->value());
1896 Register scratch = scratch0();
1897 int offset = instr->offset();
1898
1899 ASSERT(!object.is(value));
1900
1901 if (!instr->transition().is_null()) {
1902 __ mov(scratch, Operand(instr->transition()));
1903 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1904 }
1905
1906 // Do the store.
1907 if (instr->is_in_object()) {
1908 __ str(value, FieldMemOperand(object, offset));
1909 if (instr->needs_write_barrier()) {
1910 // Update the write barrier for the object for in-object properties.
1911 __ RecordWrite(object, Operand(offset), value, scratch);
1912 }
1913 } else {
1914 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
1915 __ str(value, FieldMemOperand(scratch, offset));
1916 if (instr->needs_write_barrier()) {
1917 // Update the write barrier for the properties array.
1918 // object is used as a scratch register.
1919 __ RecordWrite(scratch, Operand(offset), value, object);
1920 }
1921 }
1777 } 1922 }
1778 1923
1779 1924
1780 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 1925 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
1781 ASSERT(ToRegister(instr->object()).is(r1)); 1926 ASSERT(ToRegister(instr->object()).is(r1));
1782 ASSERT(ToRegister(instr->value()).is(r0)); 1927 ASSERT(ToRegister(instr->value()).is(r0));
1783 1928
1784 // Name is always in r2. 1929 // Name is always in r2.
1785 __ mov(r2, Operand(instr->name())); 1930 __ mov(r2, Operand(instr->name()));
1786 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 1931 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1787 CallCode(ic, RelocInfo::CODE_TARGET, instr); 1932 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1788 } 1933 }
1789 1934
1790 1935
1791 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { 1936 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
1792 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); 1937 __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
1793 DeoptimizeIf(hs, instr->environment()); 1938 DeoptimizeIf(hs, instr->environment());
1794 } 1939 }
1795 1940
1796 1941
1797 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) { 1942 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
1798 Abort("DoStoreKeyedFastElement unimplemented."); 1943 Register value = ToRegister(instr->value());
1944 Register elements = ToRegister(instr->object());
1945 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
1946 Register scratch = scratch0();
1947
1948 // Do the store.
1949 if (instr->key()->IsConstantOperand()) {
1950 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
1951 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
1952 int offset =
1953 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
1954 __ str(value, FieldMemOperand(elements, offset));
1955 } else {
1956 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
1957 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
1958 }
1959
1960 if (instr->hydrogen()->NeedsWriteBarrier()) {
1961 // Compute address of modified element and store it into key register.
1962 __ add(key, scratch, Operand(FixedArray::kHeaderSize));
1963 __ RecordWrite(elements, key, value);
1964 }
1799 } 1965 }
1800 1966
1801 1967
1802 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { 1968 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
1803 ASSERT(ToRegister(instr->object()).is(r2)); 1969 ASSERT(ToRegister(instr->object()).is(r2));
1804 ASSERT(ToRegister(instr->key()).is(r1)); 1970 ASSERT(ToRegister(instr->key()).is(r1));
1805 ASSERT(ToRegister(instr->value()).is(r0)); 1971 ASSERT(ToRegister(instr->value()).is(r0));
1806 1972
1807 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 1973 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
1808 CallCode(ic, RelocInfo::CODE_TARGET, instr); 1974 CallCode(ic, RelocInfo::CODE_TARGET, instr);
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
1932 2098
1933 void LCodeGen::DoSmiTag(LSmiTag* instr) { 2099 void LCodeGen::DoSmiTag(LSmiTag* instr) {
1934 LOperand* input = instr->input(); 2100 LOperand* input = instr->input();
1935 ASSERT(input->IsRegister() && input->Equals(instr->result())); 2101 ASSERT(input->IsRegister() && input->Equals(instr->result()));
1936 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); 2102 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
1937 __ SmiTag(ToRegister(input)); 2103 __ SmiTag(ToRegister(input));
1938 } 2104 }
1939 2105
1940 2106
1941 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { 2107 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
1942 Abort("DoSmiUntag unimplemented."); 2108 LOperand* input = instr->input();
2109 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2110 if (instr->needs_check()) {
2111 __ tst(ToRegister(input), Operand(kSmiTagMask));
2112 DeoptimizeIf(ne, instr->environment());
2113 }
2114 __ SmiUntag(ToRegister(input));
1943 } 2115 }
1944 2116
1945 2117
1946 void LCodeGen::EmitNumberUntagD(Register input_reg, 2118 void LCodeGen::EmitNumberUntagD(Register input_reg,
1947 DoubleRegister result_reg, 2119 DoubleRegister result_reg,
1948 LEnvironment* env) { 2120 LEnvironment* env) {
1949 Register scratch = scratch0(); 2121 Register scratch = scratch0();
1950 SwVfpRegister flt_scratch = s0; 2122 SwVfpRegister flt_scratch = s0;
1951 ASSERT(!result_reg.is(d0)); 2123 ASSERT(!result_reg.is(d0));
1952 2124
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
2101 2273
2102 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { 2274 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
2103 LOperand* input = instr->input(); 2275 LOperand* input = instr->input();
2104 ASSERT(input->IsRegister()); 2276 ASSERT(input->IsRegister());
2105 __ tst(ToRegister(input), Operand(kSmiTagMask)); 2277 __ tst(ToRegister(input), Operand(kSmiTagMask));
2106 DeoptimizeIf(instr->condition(), instr->environment()); 2278 DeoptimizeIf(instr->condition(), instr->environment());
2107 } 2279 }
2108 2280
2109 2281
2110 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { 2282 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
2111 Abort("DoCheckInstanceType unimplemented."); 2283 Register input = ToRegister(instr->input());
2284 Register scratch = scratch0();
2285 InstanceType first = instr->hydrogen()->first();
2286 InstanceType last = instr->hydrogen()->last();
2287
2288 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2289 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
2290 __ cmp(scratch, Operand(first));
2291
2292 // If there is only one type in the interval check for equality.
2293 if (first == last) {
2294 DeoptimizeIf(ne, instr->environment());
2295 } else {
2296 DeoptimizeIf(lo, instr->environment());
2297 // Omit check for the last type.
2298 if (last != LAST_TYPE) {
2299 __ cmp(scratch, Operand(last));
2300 DeoptimizeIf(hi, instr->environment());
2301 }
2302 }
2112 } 2303 }
2113 2304
2114 2305
2115 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { 2306 void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
2116 ASSERT(instr->input()->IsRegister()); 2307 ASSERT(instr->input()->IsRegister());
2117 Register reg = ToRegister(instr->input()); 2308 Register reg = ToRegister(instr->input());
2118 __ cmp(reg, Operand(instr->hydrogen()->target())); 2309 __ cmp(reg, Operand(instr->hydrogen()->target()));
2119 DeoptimizeIf(ne, instr->environment()); 2310 DeoptimizeIf(ne, instr->environment());
2120 } 2311 }
2121 2312
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
2211 // Pick the right runtime function to call. 2402 // Pick the right runtime function to call.
2212 if (instr->hydrogen()->depth() > 1) { 2403 if (instr->hydrogen()->depth() > 1) {
2213 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); 2404 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
2214 } else { 2405 } else {
2215 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); 2406 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
2216 } 2407 }
2217 } 2408 }
2218 2409
2219 2410
2220 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { 2411 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
2221 Abort("DoRegExpLiteral unimplemented."); 2412 Label materialized;
2413 // Registers will be used as follows:
2414 // r3 = JS function.
2415 // r7 = literals array.
2416 // r1 = regexp literal.
2417 // r0 = regexp literal clone.
2418 // r2 and r4-r6 are used as temporaries.
2419 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2420 __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
2421 int literal_offset = FixedArray::kHeaderSize +
2422 instr->hydrogen()->literal_index() * kPointerSize;
2423 __ ldr(r1, FieldMemOperand(r7, literal_offset));
2424 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2425 __ cmp(r1, ip);
2426 __ b(ne, &materialized);
2427
2428 // Create regexp literal using runtime function
2429 // Result will be in r0.
2430 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
2431 __ mov(r5, Operand(instr->hydrogen()->pattern()));
2432 __ mov(r4, Operand(instr->hydrogen()->flags()));
2433 __ Push(r7, r6, r5, r4);
2434 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
2435 __ mov(r1, r0);
2436
2437 __ bind(&materialized);
2438 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
2439 Label allocated, runtime_allocate;
2440
2441 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
2442 __ jmp(&allocated);
2443
2444 __ bind(&runtime_allocate);
2445 __ mov(r0, Operand(Smi::FromInt(size)));
2446 __ Push(r1, r0);
2447 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
2448 __ pop(r1);
2449
2450 __ bind(&allocated);
2451 // Copy the content into the newly allocated memory.
2452 // (Unroll copy loop once for better throughput).
2453 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
2454 __ ldr(r3, FieldMemOperand(r1, i));
2455 __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
2456 __ str(r3, FieldMemOperand(r0, i));
2457 __ str(r2, FieldMemOperand(r0, i + kPointerSize));
2458 }
2459 if ((size % (2 * kPointerSize)) != 0) {
2460 __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
2461 __ str(r3, FieldMemOperand(r0, size - kPointerSize));
2462 }
2222 } 2463 }
2223 2464
2224 2465
2225 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 2466 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
2226 Abort("DoFunctionLiteral unimplemented."); 2467 // Use the fast case closure allocation code that allocates in new
2468 // space for nested functions that don't need literals cloning.
2469 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
2470 bool pretenure = !instr->hydrogen()->pretenure();
2471 if (shared_info->num_literals() == 0 && !pretenure) {
2472 FastNewClosureStub stub;
2473 __ mov(r1, Operand(shared_info));
2474 __ push(r1);
2475 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2476 } else {
2477 __ mov(r2, Operand(shared_info));
2478 __ mov(r1, Operand(pretenure
2479 ? Factory::true_value()
2480 : Factory::false_value()));
2481 __ Push(cp, r2, r1);
2482 CallRuntime(Runtime::kNewClosure, 3, instr);
2483 }
2227 } 2484 }
2228 2485
2229 2486
2230 void LCodeGen::DoTypeof(LTypeof* instr) { 2487 void LCodeGen::DoTypeof(LTypeof* instr) {
2231 Abort("DoTypeof unimplemented."); 2488 Register input = ToRegister(instr->input());
2489 __ push(input);
2490 CallRuntime(Runtime::kTypeof, 1, instr);
2232 } 2491 }
2233 2492
2234 2493
2235 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { 2494 void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
2236 Abort("DoTypeofIs unimplemented."); 2495 Register input = ToRegister(instr->input());
2496 Register result = ToRegister(instr->result());
2497 Label true_label;
2498 Label false_label;
2499 Label done;
2500
2501 Condition final_branch_condition = EmitTypeofIs(&true_label,
2502 &false_label,
2503 input,
2504 instr->type_literal());
2505 __ b(final_branch_condition, &true_label);
2506 __ bind(&false_label);
2507 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2508 __ b(&done);
2509
2510 __ bind(&true_label);
2511 __ LoadRoot(result, Heap::kTrueValueRootIndex);
2512
2513 __ bind(&done);
2237 } 2514 }
2238 2515
2239 2516
2240 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { 2517 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
2241 Register input = ToRegister(instr->input()); 2518 Register input = ToRegister(instr->input());
2242 int true_block = chunk_->LookupDestination(instr->true_block_id()); 2519 int true_block = chunk_->LookupDestination(instr->true_block_id());
2243 int false_block = chunk_->LookupDestination(instr->false_block_id()); 2520 int false_block = chunk_->LookupDestination(instr->false_block_id());
2244 Label* true_label = chunk_->GetAssemblyLabel(true_block); 2521 Label* true_label = chunk_->GetAssemblyLabel(true_block);
2245 Label* false_label = chunk_->GetAssemblyLabel(false_block); 2522 Label* false_label = chunk_->GetAssemblyLabel(false_block);
2246 2523
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
2364 2641
2365 2642
2366 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { 2643 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
2367 Abort("DoOsrEntry unimplemented."); 2644 Abort("DoOsrEntry unimplemented.");
2368 } 2645 }
2369 2646
2370 2647
2371 #undef __ 2648 #undef __
2372 2649
2373 } } // namespace v8::internal 2650 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/arm/lithium-arm.cc ('k') | src/arm/macro-assembler-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698