Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(80)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 8139027: Version 3.6.5 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/codegen-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 820 matching lines...) Expand 10 before | Expand all | Expand 10 after
831 831
832 // Push the current return address before the C call. Return will be 832 // Push the current return address before the C call. Return will be
833 // through pop(pc) below. 833 // through pop(pc) below.
834 __ push(lr); 834 __ push(lr);
835 __ PrepareCallCFunction(0, 2, scratch); 835 __ PrepareCallCFunction(0, 2, scratch);
836 if (masm->use_eabi_hardfloat()) { 836 if (masm->use_eabi_hardfloat()) {
837 CpuFeatures::Scope scope(VFP3); 837 CpuFeatures::Scope scope(VFP3);
838 __ vmov(d0, r0, r1); 838 __ vmov(d0, r0, r1);
839 __ vmov(d1, r2, r3); 839 __ vmov(d1, r2, r3);
840 } 840 }
841 // Call C routine that may not cause GC or other trouble. 841 {
842 __ CallCFunction(ExternalReference::double_fp_operation(op, masm->isolate()), 842 AllowExternalCallThatCantCauseGC scope(masm);
843 0, 2); 843 __ CallCFunction(
844 ExternalReference::double_fp_operation(op, masm->isolate()), 0, 2);
845 }
844 // Store answer in the overwritable heap number. Double returned in 846 // Store answer in the overwritable heap number. Double returned in
845 // registers r0 and r1 or in d0. 847 // registers r0 and r1 or in d0.
846 if (masm->use_eabi_hardfloat()) { 848 if (masm->use_eabi_hardfloat()) {
847 CpuFeatures::Scope scope(VFP3); 849 CpuFeatures::Scope scope(VFP3);
848 __ vstr(d0, 850 __ vstr(d0,
849 FieldMemOperand(heap_number_result, HeapNumber::kValueOffset)); 851 FieldMemOperand(heap_number_result, HeapNumber::kValueOffset));
850 } else { 852 } else {
851 __ Strd(r0, r1, FieldMemOperand(heap_number_result, 853 __ Strd(r0, r1, FieldMemOperand(heap_number_result,
852 HeapNumber::kValueOffset)); 854 HeapNumber::kValueOffset));
853 } 855 }
854 // Place heap_number_result in r0 and return to the pushed return address. 856 // Place heap_number_result in r0 and return to the pushed return address.
855 __ mov(r0, Operand(heap_number_result)); 857 __ mov(r0, Operand(heap_number_result));
856 __ pop(pc); 858 __ pop(pc);
857 } 859 }
858 860
859 861
862 bool WriteInt32ToHeapNumberStub::IsPregenerated() {
863 // These variants are compiled ahead of time. See next method.
864 if (the_int_.is(r1) && the_heap_number_.is(r0) && scratch_.is(r2)) {
865 return true;
866 }
867 if (the_int_.is(r2) && the_heap_number_.is(r0) && scratch_.is(r3)) {
868 return true;
869 }
870 // Other register combinations are generated as and when they are needed,
871 // so it is unsafe to call them from stubs (we can't generate a stub while
872 // we are generating a stub).
873 return false;
874 }
875
876
877 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime() {
878 WriteInt32ToHeapNumberStub stub1(r1, r0, r2);
879 WriteInt32ToHeapNumberStub stub2(r2, r0, r3);
880 stub1.GetCode()->set_is_pregenerated(true);
881 stub2.GetCode()->set_is_pregenerated(true);
882 }
883
884
860 // See comment for class. 885 // See comment for class.
861 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) { 886 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
862 Label max_negative_int; 887 Label max_negative_int;
863 // the_int_ has the answer which is a signed int32 but not a Smi. 888 // the_int_ has the answer which is a signed int32 but not a Smi.
864 // We test for the special value that has a different exponent. This test 889 // We test for the special value that has a different exponent. This test
865 // has the neat side effect of setting the flags according to the sign. 890 // has the neat side effect of setting the flags according to the sign.
866 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u); 891 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
867 __ cmp(the_int_, Operand(0x80000000u)); 892 __ cmp(the_int_, Operand(0x80000000u));
868 __ b(eq, &max_negative_int); 893 __ b(eq, &max_negative_int);
869 // Set up the correct exponent in scratch_. All non-Smi int32s have the same. 894 // Set up the correct exponent in scratch_. All non-Smi int32s have the same.
(...skipping 320 matching lines...) Expand 10 before | Expand all | Expand 10 after
1190 } else { 1215 } else {
1191 // Call a native function to do a comparison between two non-NaNs. 1216 // Call a native function to do a comparison between two non-NaNs.
1192 // Call C routine that may not cause GC or other trouble. 1217 // Call C routine that may not cause GC or other trouble.
1193 __ push(lr); 1218 __ push(lr);
1194 __ PrepareCallCFunction(0, 2, r5); 1219 __ PrepareCallCFunction(0, 2, r5);
1195 if (masm->use_eabi_hardfloat()) { 1220 if (masm->use_eabi_hardfloat()) {
1196 CpuFeatures::Scope scope(VFP3); 1221 CpuFeatures::Scope scope(VFP3);
1197 __ vmov(d0, r0, r1); 1222 __ vmov(d0, r0, r1);
1198 __ vmov(d1, r2, r3); 1223 __ vmov(d1, r2, r3);
1199 } 1224 }
1225
1226 AllowExternalCallThatCantCauseGC scope(masm);
1200 __ CallCFunction(ExternalReference::compare_doubles(masm->isolate()), 1227 __ CallCFunction(ExternalReference::compare_doubles(masm->isolate()),
1201 0, 2); 1228 0, 2);
1202 __ pop(pc); // Return. 1229 __ pop(pc); // Return.
1203 } 1230 }
1204 } 1231 }
1205 1232
1206 1233
1207 // See comment at call site. 1234 // See comment at call site.
1208 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, 1235 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
1209 Register lhs, 1236 Register lhs,
1210 Register rhs) { 1237 Register rhs) {
1211 ASSERT((lhs.is(r0) && rhs.is(r1)) || 1238 ASSERT((lhs.is(r0) && rhs.is(r1)) ||
1212 (lhs.is(r1) && rhs.is(r0))); 1239 (lhs.is(r1) && rhs.is(r0)));
1213 1240
1214 // If either operand is a JS object or an oddball value, then they are 1241 // If either operand is a JS object or an oddball value, then they are
1215 // not equal since their pointers are different. 1242 // not equal since their pointers are different.
1216 // There is no test for undetectability in strict equality. 1243 // There is no test for undetectability in strict equality.
1217 STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE); 1244 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
1218 Label first_non_object; 1245 Label first_non_object;
1219 // Get the type of the first operand into r2 and compare it with 1246 // Get the type of the first operand into r2 and compare it with
1220 // FIRST_SPEC_OBJECT_TYPE. 1247 // FIRST_SPEC_OBJECT_TYPE.
1221 __ CompareObjectType(rhs, r2, r2, FIRST_SPEC_OBJECT_TYPE); 1248 __ CompareObjectType(rhs, r2, r2, FIRST_SPEC_OBJECT_TYPE);
1222 __ b(lt, &first_non_object); 1249 __ b(lt, &first_non_object);
1223 1250
1224 // Return non-zero (r0 is not zero) 1251 // Return non-zero (r0 is not zero)
1225 Label return_not_equal; 1252 Label return_not_equal;
1226 __ bind(&return_not_equal); 1253 __ bind(&return_not_equal);
1227 __ Ret(); 1254 __ Ret();
(...skipping 371 matching lines...) Expand 10 before | Expand all | Expand 10 after
1599 1626
1600 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 1627 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1601 // tagged as a small integer. 1628 // tagged as a small integer.
1602 __ InvokeBuiltin(native, JUMP_FUNCTION); 1629 __ InvokeBuiltin(native, JUMP_FUNCTION);
1603 } 1630 }
1604 1631
1605 1632
1606 // The stub expects its argument in the tos_ register and returns its result in 1633 // The stub expects its argument in the tos_ register and returns its result in
1607 // it, too: zero for false, and a non-zero value for true. 1634 // it, too: zero for false, and a non-zero value for true.
1608 void ToBooleanStub::Generate(MacroAssembler* masm) { 1635 void ToBooleanStub::Generate(MacroAssembler* masm) {
1636 // This stub overrides SometimesSetsUpAFrame() to return false. That means
1637 // we cannot call anything that could cause a GC from this stub.
1609 // This stub uses VFP3 instructions. 1638 // This stub uses VFP3 instructions.
1610 CpuFeatures::Scope scope(VFP3); 1639 CpuFeatures::Scope scope(VFP3);
1611 1640
1612 Label patch; 1641 Label patch;
1613 const Register map = r9.is(tos_) ? r7 : r9; 1642 const Register map = r9.is(tos_) ? r7 : r9;
1614 1643
1615 // undefined -> false. 1644 // undefined -> false.
1616 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false); 1645 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false);
1617 1646
1618 // Boolean -> its value. 1647 // Boolean -> its value.
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
1706 __ Push(r3, r2, r1); 1735 __ Push(r3, r2, r1);
1707 // Patch the caller to an appropriate specialized stub and return the 1736 // Patch the caller to an appropriate specialized stub and return the
1708 // operation result to the caller of the stub. 1737 // operation result to the caller of the stub.
1709 __ TailCallExternalReference( 1738 __ TailCallExternalReference(
1710 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()), 1739 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
1711 3, 1740 3,
1712 1); 1741 1);
1713 } 1742 }
1714 1743
1715 1744
1745 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
1746 // We don't allow a GC during a store buffer overflow so there is no need to
1747 // store the registers in any particular way, but we do have to store and
1748 // restore them.
1749 __ stm(db_w, sp, kCallerSaved | lr.bit());
1750 if (save_doubles_ == kSaveFPRegs) {
1751 CpuFeatures::Scope scope(VFP3);
1752 __ sub(sp, sp, Operand(kDoubleSize * DwVfpRegister::kNumRegisters));
1753 for (int i = 0; i < DwVfpRegister::kNumRegisters; i++) {
1754 DwVfpRegister reg = DwVfpRegister::from_code(i);
1755 __ vstr(reg, MemOperand(sp, i * kDoubleSize));
1756 }
1757 }
1758 const int argument_count = 1;
1759 const int fp_argument_count = 0;
1760 const Register scratch = r1;
1761
1762 AllowExternalCallThatCantCauseGC scope(masm);
1763 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
1764 __ mov(r0, Operand(ExternalReference::isolate_address()));
1765 __ CallCFunction(
1766 ExternalReference::store_buffer_overflow_function(masm->isolate()),
1767 argument_count);
1768 if (save_doubles_ == kSaveFPRegs) {
1769 CpuFeatures::Scope scope(VFP3);
1770 for (int i = 0; i < DwVfpRegister::kNumRegisters; i++) {
1771 DwVfpRegister reg = DwVfpRegister::from_code(i);
1772 __ vldr(reg, MemOperand(sp, i * kDoubleSize));
1773 }
1774 __ add(sp, sp, Operand(kDoubleSize * DwVfpRegister::kNumRegisters));
1775 }
1776 __ ldm(ia_w, sp, kCallerSaved | pc.bit()); // Also pop pc to get Ret(0).
1777 }
1778
1779
1716 void UnaryOpStub::PrintName(StringStream* stream) { 1780 void UnaryOpStub::PrintName(StringStream* stream) {
1717 const char* op_name = Token::Name(op_); 1781 const char* op_name = Token::Name(op_);
1718 const char* overwrite_name = NULL; // Make g++ happy. 1782 const char* overwrite_name = NULL; // Make g++ happy.
1719 switch (mode_) { 1783 switch (mode_) {
1720 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; 1784 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
1721 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; 1785 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
1722 } 1786 }
1723 stream->Add("UnaryOpStub_%s_%s_%s", 1787 stream->Add("UnaryOpStub_%s_%s_%s",
1724 op_name, 1788 op_name,
1725 overwrite_name, 1789 overwrite_name,
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
1859 if (mode_ == UNARY_OVERWRITE) { 1923 if (mode_ == UNARY_OVERWRITE) {
1860 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 1924 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1861 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. 1925 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
1862 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 1926 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1863 } else { 1927 } else {
1864 Label slow_allocate_heapnumber, heapnumber_allocated; 1928 Label slow_allocate_heapnumber, heapnumber_allocated;
1865 __ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber); 1929 __ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber);
1866 __ jmp(&heapnumber_allocated); 1930 __ jmp(&heapnumber_allocated);
1867 1931
1868 __ bind(&slow_allocate_heapnumber); 1932 __ bind(&slow_allocate_heapnumber);
1869 __ EnterInternalFrame(); 1933 {
1870 __ push(r0); 1934 FrameScope scope(masm, StackFrame::INTERNAL);
1871 __ CallRuntime(Runtime::kNumberAlloc, 0); 1935 __ push(r0);
1872 __ mov(r1, Operand(r0)); 1936 __ CallRuntime(Runtime::kNumberAlloc, 0);
1873 __ pop(r0); 1937 __ mov(r1, Operand(r0));
1874 __ LeaveInternalFrame(); 1938 __ pop(r0);
1939 }
1875 1940
1876 __ bind(&heapnumber_allocated); 1941 __ bind(&heapnumber_allocated);
1877 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); 1942 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
1878 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 1943 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1879 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset)); 1944 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset));
1880 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. 1945 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
1881 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset)); 1946 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset));
1882 __ mov(r0, Operand(r1)); 1947 __ mov(r0, Operand(r1));
1883 } 1948 }
1884 __ Ret(); 1949 __ Ret();
(...skipping 20 matching lines...) Expand all
1905 1970
1906 // Try to store the result in a heap number. 1971 // Try to store the result in a heap number.
1907 __ bind(&try_float); 1972 __ bind(&try_float);
1908 if (mode_ == UNARY_NO_OVERWRITE) { 1973 if (mode_ == UNARY_NO_OVERWRITE) {
1909 Label slow_allocate_heapnumber, heapnumber_allocated; 1974 Label slow_allocate_heapnumber, heapnumber_allocated;
1910 // Allocate a new heap number without zapping r0, which we need if it fails. 1975 // Allocate a new heap number without zapping r0, which we need if it fails.
1911 __ AllocateHeapNumber(r2, r3, r4, r6, &slow_allocate_heapnumber); 1976 __ AllocateHeapNumber(r2, r3, r4, r6, &slow_allocate_heapnumber);
1912 __ jmp(&heapnumber_allocated); 1977 __ jmp(&heapnumber_allocated);
1913 1978
1914 __ bind(&slow_allocate_heapnumber); 1979 __ bind(&slow_allocate_heapnumber);
1915 __ EnterInternalFrame(); 1980 {
1916 __ push(r0); // Push the heap number, not the untagged int32. 1981 FrameScope scope(masm, StackFrame::INTERNAL);
1917 __ CallRuntime(Runtime::kNumberAlloc, 0); 1982 __ push(r0); // Push the heap number, not the untagged int32.
1918 __ mov(r2, r0); // Move the new heap number into r2. 1983 __ CallRuntime(Runtime::kNumberAlloc, 0);
1919 // Get the heap number into r0, now that the new heap number is in r2. 1984 __ mov(r2, r0); // Move the new heap number into r2.
1920 __ pop(r0); 1985 // Get the heap number into r0, now that the new heap number is in r2.
1921 __ LeaveInternalFrame(); 1986 __ pop(r0);
1987 }
1922 1988
1923 // Convert the heap number in r0 to an untagged integer in r1. 1989 // Convert the heap number in r0 to an untagged integer in r1.
1924 // This can't go slow-case because it's the same number we already 1990 // This can't go slow-case because it's the same number we already
1925 // converted once again. 1991 // converted once again.
1926 __ ConvertToInt32(r0, r1, r3, r4, d0, &impossible); 1992 __ ConvertToInt32(r0, r1, r3, r4, d0, &impossible);
1927 __ mvn(r1, Operand(r1)); 1993 __ mvn(r1, Operand(r1));
1928 1994
1929 __ bind(&heapnumber_allocated); 1995 __ bind(&heapnumber_allocated);
1930 __ mov(r0, r2); // Move newly allocated heap number to r0. 1996 __ mov(r0, r2); // Move newly allocated heap number to r0.
1931 } 1997 }
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
2021 } 2087 }
2022 2088
2023 2089
2024 void BinaryOpStub::GenerateTypeTransitionWithSavedArgs( 2090 void BinaryOpStub::GenerateTypeTransitionWithSavedArgs(
2025 MacroAssembler* masm) { 2091 MacroAssembler* masm) {
2026 UNIMPLEMENTED(); 2092 UNIMPLEMENTED();
2027 } 2093 }
2028 2094
2029 2095
2030 void BinaryOpStub::Generate(MacroAssembler* masm) { 2096 void BinaryOpStub::Generate(MacroAssembler* masm) {
2097 // Explicitly allow generation of nested stubs. It is safe here because
2098 // generation code does not use any raw pointers.
2099 AllowStubCallsScope allow_stub_calls(masm, true);
2100
2031 switch (operands_type_) { 2101 switch (operands_type_) {
2032 case BinaryOpIC::UNINITIALIZED: 2102 case BinaryOpIC::UNINITIALIZED:
2033 GenerateTypeTransition(masm); 2103 GenerateTypeTransition(masm);
2034 break; 2104 break;
2035 case BinaryOpIC::SMI: 2105 case BinaryOpIC::SMI:
2036 GenerateSmiStub(masm); 2106 GenerateSmiStub(masm);
2037 break; 2107 break;
2038 case BinaryOpIC::INT32: 2108 case BinaryOpIC::INT32:
2039 GenerateInt32Stub(masm); 2109 GenerateInt32Stub(masm);
2040 break; 2110 break;
(...skipping 1085 matching lines...) Expand 10 before | Expand all | Expand 10 after
3126 __ vstr(d2, FieldMemOperand(r6, HeapNumber::kValueOffset)); 3196 __ vstr(d2, FieldMemOperand(r6, HeapNumber::kValueOffset));
3127 __ stm(ia, cache_entry, r2.bit() | r3.bit() | r6.bit()); 3197 __ stm(ia, cache_entry, r2.bit() | r3.bit() | r6.bit());
3128 __ Ret(); 3198 __ Ret();
3129 3199
3130 __ bind(&invalid_cache); 3200 __ bind(&invalid_cache);
3131 // The cache is invalid. Call runtime which will recreate the 3201 // The cache is invalid. Call runtime which will recreate the
3132 // cache. 3202 // cache.
3133 __ LoadRoot(r5, Heap::kHeapNumberMapRootIndex); 3203 __ LoadRoot(r5, Heap::kHeapNumberMapRootIndex);
3134 __ AllocateHeapNumber(r0, scratch0, scratch1, r5, &skip_cache); 3204 __ AllocateHeapNumber(r0, scratch0, scratch1, r5, &skip_cache);
3135 __ vstr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset)); 3205 __ vstr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset));
3136 __ EnterInternalFrame(); 3206 {
3137 __ push(r0); 3207 FrameScope scope(masm, StackFrame::INTERNAL);
3138 __ CallRuntime(RuntimeFunction(), 1); 3208 __ push(r0);
3139 __ LeaveInternalFrame(); 3209 __ CallRuntime(RuntimeFunction(), 1);
3210 }
3140 __ vldr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset)); 3211 __ vldr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset));
3141 __ Ret(); 3212 __ Ret();
3142 3213
3143 __ bind(&skip_cache); 3214 __ bind(&skip_cache);
3144 // Call C function to calculate the result and answer directly 3215 // Call C function to calculate the result and answer directly
3145 // without updating the cache. 3216 // without updating the cache.
3146 GenerateCallCFunction(masm, scratch0); 3217 GenerateCallCFunction(masm, scratch0);
3147 __ GetCFunctionDoubleResult(d2); 3218 __ GetCFunctionDoubleResult(d2);
3148 __ bind(&no_update); 3219 __ bind(&no_update);
3149 3220
3150 // We return the value in d2 without adding it to the cache, but 3221 // We return the value in d2 without adding it to the cache, but
3151 // we cause a scavenging GC so that future allocations will succeed. 3222 // we cause a scavenging GC so that future allocations will succeed.
3152 __ EnterInternalFrame(); 3223 {
3224 FrameScope scope(masm, StackFrame::INTERNAL);
3153 3225
3154 // Allocate an aligned object larger than a HeapNumber. 3226 // Allocate an aligned object larger than a HeapNumber.
3155 ASSERT(4 * kPointerSize >= HeapNumber::kSize); 3227 ASSERT(4 * kPointerSize >= HeapNumber::kSize);
3156 __ mov(scratch0, Operand(4 * kPointerSize)); 3228 __ mov(scratch0, Operand(4 * kPointerSize));
3157 __ push(scratch0); 3229 __ push(scratch0);
3158 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); 3230 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
3159 __ LeaveInternalFrame(); 3231 }
3160 __ Ret(); 3232 __ Ret();
3161 } 3233 }
3162 } 3234 }
3163 3235
3164 3236
3165 void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm, 3237 void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm,
3166 Register scratch) { 3238 Register scratch) {
3167 Isolate* isolate = masm->isolate(); 3239 Isolate* isolate = masm->isolate();
3168 3240
3169 __ push(lr); 3241 __ push(lr);
3170 __ PrepareCallCFunction(0, 1, scratch); 3242 __ PrepareCallCFunction(0, 1, scratch);
3171 if (masm->use_eabi_hardfloat()) { 3243 if (masm->use_eabi_hardfloat()) {
3172 __ vmov(d0, d2); 3244 __ vmov(d0, d2);
3173 } else { 3245 } else {
3174 __ vmov(r0, r1, d2); 3246 __ vmov(r0, r1, d2);
3175 } 3247 }
3248 AllowExternalCallThatCantCauseGC scope(masm);
3176 switch (type_) { 3249 switch (type_) {
3177 case TranscendentalCache::SIN: 3250 case TranscendentalCache::SIN:
3178 __ CallCFunction(ExternalReference::math_sin_double_function(isolate), 3251 __ CallCFunction(ExternalReference::math_sin_double_function(isolate),
3179 0, 1); 3252 0, 1);
3180 break; 3253 break;
3181 case TranscendentalCache::COS: 3254 case TranscendentalCache::COS:
3182 __ CallCFunction(ExternalReference::math_cos_double_function(isolate), 3255 __ CallCFunction(ExternalReference::math_cos_double_function(isolate),
3183 0, 1); 3256 0, 1);
3184 break; 3257 break;
3185 case TranscendentalCache::LOG: 3258 case TranscendentalCache::LOG:
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
3261 // C function for integer exponents. The register containing 3334 // C function for integer exponents. The register containing
3262 // the heap number is callee-saved. 3335 // the heap number is callee-saved.
3263 __ AllocateHeapNumber(heapnumber, 3336 __ AllocateHeapNumber(heapnumber,
3264 scratch, 3337 scratch,
3265 scratch2, 3338 scratch2,
3266 heapnumbermap, 3339 heapnumbermap,
3267 &call_runtime); 3340 &call_runtime);
3268 __ push(lr); 3341 __ push(lr);
3269 __ PrepareCallCFunction(1, 1, scratch); 3342 __ PrepareCallCFunction(1, 1, scratch);
3270 __ SetCallCDoubleArguments(double_base, exponent); 3343 __ SetCallCDoubleArguments(double_base, exponent);
3271 __ CallCFunction( 3344 {
3272 ExternalReference::power_double_int_function(masm->isolate()), 3345 AllowExternalCallThatCantCauseGC scope(masm);
3273 1, 1); 3346 __ CallCFunction(
3274 __ pop(lr); 3347 ExternalReference::power_double_int_function(masm->isolate()),
3275 __ GetCFunctionDoubleResult(double_result); 3348 1, 1);
3349 __ pop(lr);
3350 __ GetCFunctionDoubleResult(double_result);
3351 }
3276 __ vstr(double_result, 3352 __ vstr(double_result,
3277 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); 3353 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
3278 __ mov(r0, heapnumber); 3354 __ mov(r0, heapnumber);
3279 __ Ret(2 * kPointerSize); 3355 __ Ret(2 * kPointerSize);
3280 3356
3281 __ bind(&exponent_not_smi); 3357 __ bind(&exponent_not_smi);
3282 __ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset)); 3358 __ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset));
3283 __ cmp(scratch, heapnumbermap); 3359 __ cmp(scratch, heapnumbermap);
3284 __ b(ne, &call_runtime); 3360 __ b(ne, &call_runtime);
3285 // Exponent is a heapnumber. Load it into double register. 3361 // Exponent is a heapnumber. Load it into double register.
3286 __ vldr(double_exponent, 3362 __ vldr(double_exponent,
3287 FieldMemOperand(exponent, HeapNumber::kValueOffset)); 3363 FieldMemOperand(exponent, HeapNumber::kValueOffset));
3288 3364
3289 // The base and the exponent are in double registers. 3365 // The base and the exponent are in double registers.
3290 // Allocate a heap number and call a C function for 3366 // Allocate a heap number and call a C function for
3291 // double exponents. The register containing 3367 // double exponents. The register containing
3292 // the heap number is callee-saved. 3368 // the heap number is callee-saved.
3293 __ AllocateHeapNumber(heapnumber, 3369 __ AllocateHeapNumber(heapnumber,
3294 scratch, 3370 scratch,
3295 scratch2, 3371 scratch2,
3296 heapnumbermap, 3372 heapnumbermap,
3297 &call_runtime); 3373 &call_runtime);
3298 __ push(lr); 3374 __ push(lr);
3299 __ PrepareCallCFunction(0, 2, scratch); 3375 __ PrepareCallCFunction(0, 2, scratch);
3300 __ SetCallCDoubleArguments(double_base, double_exponent); 3376 __ SetCallCDoubleArguments(double_base, double_exponent);
3301 __ CallCFunction( 3377 {
3302 ExternalReference::power_double_double_function(masm->isolate()), 3378 AllowExternalCallThatCantCauseGC scope(masm);
3303 0, 2); 3379 __ CallCFunction(
3304 __ pop(lr); 3380 ExternalReference::power_double_double_function(masm->isolate()),
3305 __ GetCFunctionDoubleResult(double_result); 3381 0, 2);
3382 __ pop(lr);
3383 __ GetCFunctionDoubleResult(double_result);
3384 }
3306 __ vstr(double_result, 3385 __ vstr(double_result,
3307 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); 3386 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
3308 __ mov(r0, heapnumber); 3387 __ mov(r0, heapnumber);
3309 __ Ret(2 * kPointerSize); 3388 __ Ret(2 * kPointerSize);
3310 } 3389 }
3311 3390
3312 __ bind(&call_runtime); 3391 __ bind(&call_runtime);
3313 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); 3392 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
3314 } 3393 }
3315 3394
3316 3395
3317 bool CEntryStub::NeedsImmovableCode() { 3396 bool CEntryStub::NeedsImmovableCode() {
3318 return true; 3397 return true;
3319 } 3398 }
3320 3399
3321 3400
3401 bool CEntryStub::IsPregenerated() {
3402 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) &&
3403 result_size_ == 1;
3404 }
3405
3406
3407 void CodeStub::GenerateStubsAheadOfTime() {
3408 CEntryStub::GenerateAheadOfTime();
3409 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime();
3410 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime();
3411 RecordWriteStub::GenerateFixedRegStubsAheadOfTime();
3412 }
3413
3414
3415 void CodeStub::GenerateFPStubs() {
3416 CEntryStub save_doubles(1, kSaveFPRegs);
3417 Handle<Code> code = save_doubles.GetCode();
3418 code->set_is_pregenerated(true);
3419 StoreBufferOverflowStub stub(kSaveFPRegs);
3420 stub.GetCode()->set_is_pregenerated(true);
3421 code->GetIsolate()->set_fp_stubs_generated(true);
3422 }
3423
3424
3425 void CEntryStub::GenerateAheadOfTime() {
3426 CEntryStub stub(1, kDontSaveFPRegs);
3427 Handle<Code> code = stub.GetCode();
3428 code->set_is_pregenerated(true);
3429 }
3430
3431
3322 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { 3432 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
3323 __ Throw(r0); 3433 __ Throw(r0);
3324 } 3434 }
3325 3435
3326 3436
3327 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm, 3437 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
3328 UncatchableExceptionType type) { 3438 UncatchableExceptionType type) {
3329 __ ThrowUncatchable(type, r0); 3439 __ ThrowUncatchable(type, r0);
3330 } 3440 }
3331 3441
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
3423 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); 3533 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
3424 __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); 3534 __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
3425 __ b(eq, &retry); 3535 __ b(eq, &retry);
3426 3536
3427 // Special handling of out of memory exceptions. 3537 // Special handling of out of memory exceptions.
3428 Failure* out_of_memory = Failure::OutOfMemoryException(); 3538 Failure* out_of_memory = Failure::OutOfMemoryException();
3429 __ cmp(r0, Operand(reinterpret_cast<int32_t>(out_of_memory))); 3539 __ cmp(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
3430 __ b(eq, throw_out_of_memory_exception); 3540 __ b(eq, throw_out_of_memory_exception);
3431 3541
3432 // Retrieve the pending exception and clear the variable. 3542 // Retrieve the pending exception and clear the variable.
3433 __ mov(ip, Operand(ExternalReference::the_hole_value_location(isolate))); 3543 __ mov(r3, Operand(isolate->factory()->the_hole_value()));
3434 __ ldr(r3, MemOperand(ip));
3435 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 3544 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
3436 isolate))); 3545 isolate)));
3437 __ ldr(r0, MemOperand(ip)); 3546 __ ldr(r0, MemOperand(ip));
3438 __ str(r3, MemOperand(ip)); 3547 __ str(r3, MemOperand(ip));
3439 3548
3440 // Special handling of termination exceptions which are uncatchable 3549 // Special handling of termination exceptions which are uncatchable
3441 // by javascript code. 3550 // by javascript code.
3442 __ cmp(r0, Operand(isolate->factory()->termination_exception())); 3551 __ cmp(r0, Operand(isolate->factory()->termination_exception()));
3443 __ b(eq, throw_termination_exception); 3552 __ b(eq, throw_termination_exception);
3444 3553
(...skipping 17 matching lines...) Expand all
3462 // NOTE: Invocations of builtins may return failure objects 3571 // NOTE: Invocations of builtins may return failure objects
3463 // instead of a proper result. The builtin entry handles 3572 // instead of a proper result. The builtin entry handles
3464 // this by performing a garbage collection and retrying the 3573 // this by performing a garbage collection and retrying the
3465 // builtin once. 3574 // builtin once.
3466 3575
3467 // Compute the argv pointer in a callee-saved register. 3576 // Compute the argv pointer in a callee-saved register.
3468 __ add(r6, sp, Operand(r0, LSL, kPointerSizeLog2)); 3577 __ add(r6, sp, Operand(r0, LSL, kPointerSizeLog2));
3469 __ sub(r6, r6, Operand(kPointerSize)); 3578 __ sub(r6, r6, Operand(kPointerSize));
3470 3579
3471 // Enter the exit frame that transitions from JavaScript to C++. 3580 // Enter the exit frame that transitions from JavaScript to C++.
3581 FrameScope scope(masm, StackFrame::MANUAL);
3472 __ EnterExitFrame(save_doubles_); 3582 __ EnterExitFrame(save_doubles_);
3473 3583
3474 // Setup argc and the builtin function in callee-saved registers. 3584 // Setup argc and the builtin function in callee-saved registers.
3475 __ mov(r4, Operand(r0)); 3585 __ mov(r4, Operand(r0));
3476 __ mov(r5, Operand(r1)); 3586 __ mov(r5, Operand(r1));
3477 3587
3478 // r4: number of arguments (C callee-saved) 3588 // r4: number of arguments (C callee-saved)
3479 // r5: pointer to builtin function (C callee-saved) 3589 // r5: pointer to builtin function (C callee-saved)
3480 // r6: pointer to first argument (C callee-saved) 3590 // r6: pointer to first argument (C callee-saved)
3481 3591
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
3606 // Invoke: Link this frame into the handler chain. 3716 // Invoke: Link this frame into the handler chain.
3607 __ bind(&invoke); 3717 __ bind(&invoke);
3608 // Must preserve r0-r4, r5-r7 are available. 3718 // Must preserve r0-r4, r5-r7 are available.
3609 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER); 3719 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
3610 // If an exception not caught by another handler occurs, this handler 3720 // If an exception not caught by another handler occurs, this handler
3611 // returns control to the code after the bl(&invoke) above, which 3721 // returns control to the code after the bl(&invoke) above, which
3612 // restores all kCalleeSaved registers (including cp and fp) to their 3722 // restores all kCalleeSaved registers (including cp and fp) to their
3613 // saved values before returning a failure to C. 3723 // saved values before returning a failure to C.
3614 3724
3615 // Clear any pending exceptions. 3725 // Clear any pending exceptions.
3616 __ mov(ip, Operand(ExternalReference::the_hole_value_location(isolate))); 3726 __ mov(r5, Operand(isolate->factory()->the_hole_value()));
3617 __ ldr(r5, MemOperand(ip));
3618 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 3727 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
3619 isolate))); 3728 isolate)));
3620 __ str(r5, MemOperand(ip)); 3729 __ str(r5, MemOperand(ip));
3621 3730
3622 // Invoke the function by calling through JS entry trampoline builtin. 3731 // Invoke the function by calling through JS entry trampoline builtin.
3623 // Notice that we cannot store a reference to the trampoline code directly in 3732 // Notice that we cannot store a reference to the trampoline code directly in
3624 // this stub, because runtime stubs are not traversed when doing GC. 3733 // this stub, because runtime stubs are not traversed when doing GC.
3625 3734
3626 // Expected registers by Builtins::JSEntryTrampoline 3735 // Expected registers by Builtins::JSEntryTrampoline
3627 // r0: code entry 3736 // r0: code entry
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after
3844 __ Ret(HasArgsInRegisters() ? 0 : 2); 3953 __ Ret(HasArgsInRegisters() ? 0 : 2);
3845 3954
3846 // Slow-case. Tail call builtin. 3955 // Slow-case. Tail call builtin.
3847 __ bind(&slow); 3956 __ bind(&slow);
3848 if (!ReturnTrueFalseObject()) { 3957 if (!ReturnTrueFalseObject()) {
3849 if (HasArgsInRegisters()) { 3958 if (HasArgsInRegisters()) {
3850 __ Push(r0, r1); 3959 __ Push(r0, r1);
3851 } 3960 }
3852 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); 3961 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
3853 } else { 3962 } else {
3854 __ EnterInternalFrame(); 3963 {
3855 __ Push(r0, r1); 3964 FrameScope scope(masm, StackFrame::INTERNAL);
3856 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); 3965 __ Push(r0, r1);
3857 __ LeaveInternalFrame(); 3966 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
3967 }
3858 __ cmp(r0, Operand::Zero()); 3968 __ cmp(r0, Operand::Zero());
3859 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq); 3969 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq);
3860 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne); 3970 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne);
3861 __ Ret(HasArgsInRegisters() ? 0 : 2); 3971 __ Ret(HasArgsInRegisters() ? 0 : 2);
3862 } 3972 }
3863 } 3973 }
3864 3974
3865 3975
3866 Register InstanceofStub::left() { return r0; } 3976 Register InstanceofStub::left() { return r0; }
3867 3977
(...skipping 605 matching lines...) Expand 10 before | Expand all | Expand 10 after
4473 __ add(r0, r0, Operand(r2)); 4583 __ add(r0, r0, Operand(r2));
4474 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4584 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4475 4585
4476 // Argument 5 (sp[4]): static offsets vector buffer. 4586 // Argument 5 (sp[4]): static offsets vector buffer.
4477 __ mov(r0, 4587 __ mov(r0,
4478 Operand(ExternalReference::address_of_static_offsets_vector(isolate))); 4588 Operand(ExternalReference::address_of_static_offsets_vector(isolate)));
4479 __ str(r0, MemOperand(sp, 1 * kPointerSize)); 4589 __ str(r0, MemOperand(sp, 1 * kPointerSize));
4480 4590
4481 // For arguments 4 and 3 get string length, calculate start of string data and 4591 // For arguments 4 and 3 get string length, calculate start of string data and
4482 // calculate the shift of the index (0 for ASCII and 1 for two byte). 4592 // calculate the shift of the index (0 for ASCII and 1 for two byte).
4483 STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize); 4593 __ add(r8, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag));
4484 __ add(r8, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4485 __ eor(r3, r3, Operand(1)); 4594 __ eor(r3, r3, Operand(1));
4486 // Load the length from the original subject string from the previous stack 4595 // Load the length from the original subject string from the previous stack
4487 // frame. Therefore we have to use fp, which points exactly to two pointer 4596 // frame. Therefore we have to use fp, which points exactly to two pointer
4488 // sizes below the previous sp. (Because creating a new stack frame pushes 4597 // sizes below the previous sp. (Because creating a new stack frame pushes
4489 // the previous fp onto the stack and moves up sp by 2 * kPointerSize.) 4598 // the previous fp onto the stack and moves up sp by 2 * kPointerSize.)
4490 __ ldr(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize)); 4599 __ ldr(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
4491 // If slice offset is not 0, load the length from the original sliced string. 4600 // If slice offset is not 0, load the length from the original sliced string.
4492 // Argument 4, r3: End of string data 4601 // Argument 4, r3: End of string data
4493 // Argument 3, r2: Start of string data 4602 // Argument 3, r2: Start of string data
4494 // Prepare start and end index of the input. 4603 // Prepare start and end index of the input.
(...skipping 30 matching lines...) Expand all
4525 Label failure; 4634 Label failure;
4526 __ cmp(r0, Operand(NativeRegExpMacroAssembler::FAILURE)); 4635 __ cmp(r0, Operand(NativeRegExpMacroAssembler::FAILURE));
4527 __ b(eq, &failure); 4636 __ b(eq, &failure);
4528 __ cmp(r0, Operand(NativeRegExpMacroAssembler::EXCEPTION)); 4637 __ cmp(r0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
4529 // If not exception it can only be retry. Handle that in the runtime system. 4638 // If not exception it can only be retry. Handle that in the runtime system.
4530 __ b(ne, &runtime); 4639 __ b(ne, &runtime);
4531 // Result must now be exception. If there is no pending exception already a 4640 // Result must now be exception. If there is no pending exception already a
4532 // stack overflow (on the backtrack stack) was detected in RegExp code but 4641 // stack overflow (on the backtrack stack) was detected in RegExp code but
4533 // haven't created the exception yet. Handle that in the runtime system. 4642 // haven't created the exception yet. Handle that in the runtime system.
4534 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 4643 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
4535 __ mov(r1, Operand(ExternalReference::the_hole_value_location(isolate))); 4644 __ mov(r1, Operand(isolate->factory()->the_hole_value()));
4536 __ ldr(r1, MemOperand(r1, 0));
4537 __ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, 4645 __ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
4538 isolate))); 4646 isolate)));
4539 __ ldr(r0, MemOperand(r2, 0)); 4647 __ ldr(r0, MemOperand(r2, 0));
4540 __ cmp(r0, r1); 4648 __ cmp(r0, r1);
4541 __ b(eq, &runtime); 4649 __ b(eq, &runtime);
4542 4650
4543 __ str(r1, MemOperand(r2, 0)); // Clear pending exception. 4651 __ str(r1, MemOperand(r2, 0)); // Clear pending exception.
4544 4652
4545 // Check if the exception is a termination. If so, throw as uncatchable. 4653 // Check if the exception is a termination. If so, throw as uncatchable.
4546 __ CompareRoot(r0, Heap::kTerminationExceptionRootIndex); 4654 __ CompareRoot(r0, Heap::kTerminationExceptionRootIndex);
(...skipping 21 matching lines...) Expand all
4568 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); 4676 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
4569 __ add(r1, r1, Operand(2)); // r1 was a smi. 4677 __ add(r1, r1, Operand(2)); // r1 was a smi.
4570 4678
4571 // r1: number of capture registers 4679 // r1: number of capture registers
4572 // r4: subject string 4680 // r4: subject string
4573 // Store the capture count. 4681 // Store the capture count.
4574 __ mov(r2, Operand(r1, LSL, kSmiTagSize + kSmiShiftSize)); // To smi. 4682 __ mov(r2, Operand(r1, LSL, kSmiTagSize + kSmiShiftSize)); // To smi.
4575 __ str(r2, FieldMemOperand(last_match_info_elements, 4683 __ str(r2, FieldMemOperand(last_match_info_elements,
4576 RegExpImpl::kLastCaptureCountOffset)); 4684 RegExpImpl::kLastCaptureCountOffset));
4577 // Store last subject and last input. 4685 // Store last subject and last input.
4578 __ mov(r3, last_match_info_elements); // Moved up to reduce latency.
4579 __ str(subject, 4686 __ str(subject,
4580 FieldMemOperand(last_match_info_elements, 4687 FieldMemOperand(last_match_info_elements,
4581 RegExpImpl::kLastSubjectOffset)); 4688 RegExpImpl::kLastSubjectOffset));
4582 __ RecordWrite(r3, Operand(RegExpImpl::kLastSubjectOffset), r2, r7); 4689 __ mov(r2, subject);
4690 __ RecordWriteField(last_match_info_elements,
4691 RegExpImpl::kLastSubjectOffset,
4692 r2,
4693 r7,
4694 kLRHasNotBeenSaved,
4695 kDontSaveFPRegs);
4583 __ str(subject, 4696 __ str(subject,
4584 FieldMemOperand(last_match_info_elements, 4697 FieldMemOperand(last_match_info_elements,
4585 RegExpImpl::kLastInputOffset)); 4698 RegExpImpl::kLastInputOffset));
4586 __ mov(r3, last_match_info_elements); 4699 __ RecordWriteField(last_match_info_elements,
4587 __ RecordWrite(r3, Operand(RegExpImpl::kLastInputOffset), r2, r7); 4700 RegExpImpl::kLastInputOffset,
4701 subject,
4702 r7,
4703 kLRHasNotBeenSaved,
4704 kDontSaveFPRegs);
4588 4705
4589 // Get the static offsets vector filled by the native regexp code. 4706 // Get the static offsets vector filled by the native regexp code.
4590 ExternalReference address_of_static_offsets_vector = 4707 ExternalReference address_of_static_offsets_vector =
4591 ExternalReference::address_of_static_offsets_vector(isolate); 4708 ExternalReference::address_of_static_offsets_vector(isolate);
4592 __ mov(r2, Operand(address_of_static_offsets_vector)); 4709 __ mov(r2, Operand(address_of_static_offsets_vector));
4593 4710
4594 // r1: number of capture registers 4711 // r1: number of capture registers
4595 // r2: offsets vector 4712 // r2: offsets vector
4596 Label next_capture, done; 4713 Label next_capture, done;
4597 // Capture register counter starts from number of capture registers and 4714 // Capture register counter starts from number of capture registers and
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after
4705 4822
4706 __ bind(&done); 4823 __ bind(&done);
4707 __ add(sp, sp, Operand(3 * kPointerSize)); 4824 __ add(sp, sp, Operand(3 * kPointerSize));
4708 __ Ret(); 4825 __ Ret();
4709 4826
4710 __ bind(&slowcase); 4827 __ bind(&slowcase);
4711 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); 4828 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
4712 } 4829 }
4713 4830
4714 4831
4832 void CallFunctionStub::FinishCode(Code* code) {
4833 code->set_has_function_cache(false);
4834 }
4835
4836
4837 void CallFunctionStub::Clear(Heap* heap, Address address) {
4838 UNREACHABLE();
4839 }
4840
4841
4842 Object* CallFunctionStub::GetCachedValue(Address address) {
4843 UNREACHABLE();
4844 return NULL;
4845 }
4846
4847
4715 void CallFunctionStub::Generate(MacroAssembler* masm) { 4848 void CallFunctionStub::Generate(MacroAssembler* masm) {
4716 Label slow, non_function; 4849 Label slow, non_function;
4717 4850
4718 // The receiver might implicitly be the global object. This is 4851 // The receiver might implicitly be the global object. This is
4719 // indicated by passing the hole as the receiver to the call 4852 // indicated by passing the hole as the receiver to the call
4720 // function stub. 4853 // function stub.
4721 if (ReceiverMightBeImplicit()) { 4854 if (ReceiverMightBeImplicit()) {
4722 Label call; 4855 Label call;
4723 // Get the receiver from the stack. 4856 // Get the receiver from the stack.
4724 // function, receiver [, arguments] 4857 // function, receiver [, arguments]
(...skipping 1693 matching lines...) Expand 10 before | Expand all | Expand 10 after
6418 } 6551 }
6419 6552
6420 6553
6421 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { 6554 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
6422 __ Push(r1, r0); 6555 __ Push(r1, r0);
6423 __ push(lr); 6556 __ push(lr);
6424 6557
6425 // Call the runtime system in a fresh internal frame. 6558 // Call the runtime system in a fresh internal frame.
6426 ExternalReference miss = 6559 ExternalReference miss =
6427 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); 6560 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
6428 __ EnterInternalFrame(); 6561 {
6429 __ Push(r1, r0); 6562 FrameScope scope(masm, StackFrame::INTERNAL);
6430 __ mov(ip, Operand(Smi::FromInt(op_))); 6563 __ Push(r1, r0);
6431 __ push(ip); 6564 __ mov(ip, Operand(Smi::FromInt(op_)));
6432 __ CallExternalReference(miss, 3); 6565 __ push(ip);
6433 __ LeaveInternalFrame(); 6566 __ CallExternalReference(miss, 3);
6567 }
6434 // Compute the entry point of the rewritten stub. 6568 // Compute the entry point of the rewritten stub.
6435 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); 6569 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
6436 // Restore registers. 6570 // Restore registers.
6437 __ pop(lr); 6571 __ pop(lr);
6438 __ pop(r0); 6572 __ pop(r0);
6439 __ pop(r1); 6573 __ pop(r1);
6440 __ Jump(r2); 6574 __ Jump(r2);
6441 } 6575 }
6442 6576
6443 6577
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after
6606 __ tst(r0, Operand(r0)); 6740 __ tst(r0, Operand(r0));
6607 __ mov(scratch2, Operand(r2)); 6741 __ mov(scratch2, Operand(r2));
6608 __ ldm(ia_w, sp, spill_mask); 6742 __ ldm(ia_w, sp, spill_mask);
6609 6743
6610 __ b(ne, done); 6744 __ b(ne, done);
6611 __ b(eq, miss); 6745 __ b(eq, miss);
6612 } 6746 }
6613 6747
6614 6748
6615 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { 6749 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
6750 // This stub overrides SometimesSetsUpAFrame() to return false. That means
6751 // we cannot call anything that could cause a GC from this stub.
6616 // Registers: 6752 // Registers:
6617 // result: StringDictionary to probe 6753 // result: StringDictionary to probe
6618 // r1: key 6754 // r1: key
6619 // : StringDictionary to probe. 6755 // : StringDictionary to probe.
6620 // index_: will hold an index of entry if lookup is successful. 6756 // index_: will hold an index of entry if lookup is successful.
6621 // might alias with result_. 6757 // might alias with result_.
6622 // Returns: 6758 // Returns:
6623 // result_ is zero if lookup failed, non zero otherwise. 6759 // result_ is zero if lookup failed, non zero otherwise.
6624 6760
6625 Register result = r0; 6761 Register result = r0;
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
6695 __ bind(&in_dictionary); 6831 __ bind(&in_dictionary);
6696 __ mov(result, Operand(1)); 6832 __ mov(result, Operand(1));
6697 __ Ret(); 6833 __ Ret();
6698 6834
6699 __ bind(&not_in_dictionary); 6835 __ bind(&not_in_dictionary);
6700 __ mov(result, Operand::Zero()); 6836 __ mov(result, Operand::Zero());
6701 __ Ret(); 6837 __ Ret();
6702 } 6838 }
6703 6839
6704 6840
6841 struct AheadOfTimeWriteBarrierStubList {
6842 Register object, value, address;
6843 RememberedSetAction action;
6844 };
6845
6846
6847 struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
6848 // Used in RegExpExecStub.
6849 { r6, r4, r7, EMIT_REMEMBERED_SET },
6850 { r6, r2, r7, EMIT_REMEMBERED_SET },
6851 // Used in CompileArrayPushCall.
6852 // Also used in StoreIC::GenerateNormal via GenerateDictionaryStore.
6853 // Also used in KeyedStoreIC::GenerateGeneric.
6854 { r3, r4, r5, EMIT_REMEMBERED_SET },
6855 // Used in CompileStoreGlobal.
6856 { r4, r1, r2, OMIT_REMEMBERED_SET },
6857 // Used in StoreStubCompiler::CompileStoreField via GenerateStoreField.
6858 { r1, r2, r3, EMIT_REMEMBERED_SET },
6859 { r3, r2, r1, EMIT_REMEMBERED_SET },
6860 // Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
6861 { r2, r1, r3, EMIT_REMEMBERED_SET },
6862 { r3, r1, r2, EMIT_REMEMBERED_SET },
6863 // KeyedStoreStubCompiler::GenerateStoreFastElement.
6864 { r4, r2, r3, EMIT_REMEMBERED_SET },
6865 // Null termination.
6866 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
6867 };
6868
6869
6870 bool RecordWriteStub::IsPregenerated() {
6871 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
6872 !entry->object.is(no_reg);
6873 entry++) {
6874 if (object_.is(entry->object) &&
6875 value_.is(entry->value) &&
6876 address_.is(entry->address) &&
6877 remembered_set_action_ == entry->action &&
6878 save_fp_regs_mode_ == kDontSaveFPRegs) {
6879 return true;
6880 }
6881 }
6882 return false;
6883 }
6884
6885
6886 bool StoreBufferOverflowStub::IsPregenerated() {
6887 return save_doubles_ == kDontSaveFPRegs || ISOLATE->fp_stubs_generated();
6888 }
6889
6890
6891 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
6892 StoreBufferOverflowStub stub1(kDontSaveFPRegs);
6893 stub1.GetCode()->set_is_pregenerated(true);
6894 }
6895
6896
6897 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
6898 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
6899 !entry->object.is(no_reg);
6900 entry++) {
6901 RecordWriteStub stub(entry->object,
6902 entry->value,
6903 entry->address,
6904 entry->action,
6905 kDontSaveFPRegs);
6906 stub.GetCode()->set_is_pregenerated(true);
6907 }
6908 }
6909
6910
6911 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
6912 // the value has just been written into the object, now this stub makes sure
6913 // we keep the GC informed. The word in the object where the value has been
6914 // written is in the address register.
6915 void RecordWriteStub::Generate(MacroAssembler* masm) {
6916 Label skip_to_incremental_noncompacting;
6917 Label skip_to_incremental_compacting;
6918
6919 // The first two instructions are generated with labels so as to get the
6920 // offset fixed up correctly by the bind(Label*) call. We patch it back and
6921 // forth between a compare instructions (a nop in this position) and the
6922 // real branch when we start and stop incremental heap marking.
6923 // See RecordWriteStub::Patch for details.
6924 __ b(&skip_to_incremental_noncompacting);
6925 __ b(&skip_to_incremental_compacting);
6926
6927 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
6928 __ RememberedSetHelper(object_,
6929 address_,
6930 value_,
6931 save_fp_regs_mode_,
6932 MacroAssembler::kReturnAtEnd);
6933 }
6934 __ Ret();
6935
6936 __ bind(&skip_to_incremental_noncompacting);
6937 GenerateIncremental(masm, INCREMENTAL);
6938
6939 __ bind(&skip_to_incremental_compacting);
6940 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
6941
6942 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
6943 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
6944 ASSERT(Assembler::GetBranchOffset(masm->instr_at(0)) < (1 << 12));
6945 ASSERT(Assembler::GetBranchOffset(masm->instr_at(4)) < (1 << 12));
6946 PatchBranchIntoNop(masm, 0);
6947 PatchBranchIntoNop(masm, Assembler::kInstrSize);
6948 }
6949
6950
6951 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
6952 regs_.Save(masm);
6953
6954 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
6955 Label dont_need_remembered_set;
6956
6957 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0));
6958 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
6959 regs_.scratch0(),
6960 &dont_need_remembered_set);
6961
6962 __ CheckPageFlag(regs_.object(),
6963 regs_.scratch0(),
6964 1 << MemoryChunk::SCAN_ON_SCAVENGE,
6965 ne,
6966 &dont_need_remembered_set);
6967
6968 // First notify the incremental marker if necessary, then update the
6969 // remembered set.
6970 CheckNeedsToInformIncrementalMarker(
6971 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
6972 InformIncrementalMarker(masm, mode);
6973 regs_.Restore(masm);
6974 __ RememberedSetHelper(object_,
6975 address_,
6976 value_,
6977 save_fp_regs_mode_,
6978 MacroAssembler::kReturnAtEnd);
6979
6980 __ bind(&dont_need_remembered_set);
6981 }
6982
6983 CheckNeedsToInformIncrementalMarker(
6984 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
6985 InformIncrementalMarker(masm, mode);
6986 regs_.Restore(masm);
6987 __ Ret();
6988 }
6989
6990
6991 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
6992 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
6993 int argument_count = 3;
6994 __ PrepareCallCFunction(argument_count, regs_.scratch0());
6995 Register address =
6996 r0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
6997 ASSERT(!address.is(regs_.object()));
6998 ASSERT(!address.is(r0));
6999 __ Move(address, regs_.address());
7000 __ Move(r0, regs_.object());
7001 if (mode == INCREMENTAL_COMPACTION) {
7002 __ Move(r1, address);
7003 } else {
7004 ASSERT(mode == INCREMENTAL);
7005 __ ldr(r1, MemOperand(address, 0));
7006 }
7007 __ mov(r2, Operand(ExternalReference::isolate_address()));
7008
7009 AllowExternalCallThatCantCauseGC scope(masm);
7010 if (mode == INCREMENTAL_COMPACTION) {
7011 __ CallCFunction(
7012 ExternalReference::incremental_evacuation_record_write_function(
7013 masm->isolate()),
7014 argument_count);
7015 } else {
7016 ASSERT(mode == INCREMENTAL);
7017 __ CallCFunction(
7018 ExternalReference::incremental_marking_record_write_function(
7019 masm->isolate()),
7020 argument_count);
7021 }
7022 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
7023 }
7024
7025
7026 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
7027 MacroAssembler* masm,
7028 OnNoNeedToInformIncrementalMarker on_no_need,
7029 Mode mode) {
7030 Label on_black;
7031 Label need_incremental;
7032 Label need_incremental_pop_scratch;
7033
7034 // Let's look at the color of the object: If it is not black we don't have
7035 // to inform the incremental marker.
7036 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
7037
7038 regs_.Restore(masm);
7039 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7040 __ RememberedSetHelper(object_,
7041 address_,
7042 value_,
7043 save_fp_regs_mode_,
7044 MacroAssembler::kReturnAtEnd);
7045 } else {
7046 __ Ret();
7047 }
7048
7049 __ bind(&on_black);
7050
7051 // Get the value from the slot.
7052 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0));
7053
7054 if (mode == INCREMENTAL_COMPACTION) {
7055 Label ensure_not_white;
7056
7057 __ CheckPageFlag(regs_.scratch0(), // Contains value.
7058 regs_.scratch1(), // Scratch.
7059 MemoryChunk::kEvacuationCandidateMask,
7060 eq,
7061 &ensure_not_white);
7062
7063 __ CheckPageFlag(regs_.object(),
7064 regs_.scratch1(), // Scratch.
7065 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
7066 eq,
7067 &need_incremental);
7068
7069 __ bind(&ensure_not_white);
7070 }
7071
7072 // We need extra registers for this, so we push the object and the address
7073 // register temporarily.
7074 __ Push(regs_.object(), regs_.address());
7075 __ EnsureNotWhite(regs_.scratch0(), // The value.
7076 regs_.scratch1(), // Scratch.
7077 regs_.object(), // Scratch.
7078 regs_.address(), // Scratch.
7079 &need_incremental_pop_scratch);
7080 __ Pop(regs_.object(), regs_.address());
7081
7082 regs_.Restore(masm);
7083 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7084 __ RememberedSetHelper(object_,
7085 address_,
7086 value_,
7087 save_fp_regs_mode_,
7088 MacroAssembler::kReturnAtEnd);
7089 } else {
7090 __ Ret();
7091 }
7092
7093 __ bind(&need_incremental_pop_scratch);
7094 __ Pop(regs_.object(), regs_.address());
7095
7096 __ bind(&need_incremental);
7097
7098 // Fall through when we need to inform the incremental marker.
7099 }
7100
7101
6705 #undef __ 7102 #undef __
6706 7103
6707 } } // namespace v8::internal 7104 } } // namespace v8::internal
6708 7105
6709 #endif // V8_TARGET_ARCH_ARM 7106 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/codegen-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698