OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 1710 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1721 __ mov(t5, zero_reg); // In delay slot: param map size = 0 when t2 == 0. | 1721 __ mov(t5, zero_reg); // In delay slot: param map size = 0 when t2 == 0. |
1722 __ sll(t5, t2, 1); | 1722 __ sll(t5, t2, 1); |
1723 __ addiu(t5, t5, kParameterMapHeaderSize); | 1723 __ addiu(t5, t5, kParameterMapHeaderSize); |
1724 __ bind(¶m_map_size); | 1724 __ bind(¶m_map_size); |
1725 | 1725 |
1726 // 2. Backing store. | 1726 // 2. Backing store. |
1727 __ Lsa(t5, t5, t1, 1); | 1727 __ Lsa(t5, t5, t1, 1); |
1728 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize)); | 1728 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize)); |
1729 | 1729 |
1730 // 3. Arguments object. | 1730 // 3. Arguments object. |
1731 __ Addu(t5, t5, Operand(Heap::kSloppyArgumentsObjectSize)); | 1731 __ Addu(t5, t5, Operand(JSSloppyArgumentsObject::kSize)); |
1732 | 1732 |
1733 // Do the allocation of all three objects in one go. | 1733 // Do the allocation of all three objects in one go. |
1734 __ Allocate(t5, v0, t5, t0, &runtime, TAG_OBJECT); | 1734 __ Allocate(t5, v0, t5, t0, &runtime, TAG_OBJECT); |
1735 | 1735 |
1736 // v0 = address of new object(s) (tagged) | 1736 // v0 = address of new object(s) (tagged) |
1737 // a2 = argument count (smi-tagged) | 1737 // a2 = argument count (smi-tagged) |
1738 // Get the arguments boilerplate from the current native context into t0. | 1738 // Get the arguments boilerplate from the current native context into t0. |
1739 const int kNormalOffset = | 1739 const int kNormalOffset = |
1740 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); | 1740 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); |
1741 const int kAliasedOffset = | 1741 const int kAliasedOffset = |
(...skipping 12 matching lines...) Expand all Loading... |
1754 // v0 = address of new object (tagged) | 1754 // v0 = address of new object (tagged) |
1755 // a2 = argument count (smi-tagged) | 1755 // a2 = argument count (smi-tagged) |
1756 // t0 = address of arguments map (tagged) | 1756 // t0 = address of arguments map (tagged) |
1757 // t2 = mapped parameter count (tagged) | 1757 // t2 = mapped parameter count (tagged) |
1758 __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset)); | 1758 __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset)); |
1759 __ LoadRoot(t5, Heap::kEmptyFixedArrayRootIndex); | 1759 __ LoadRoot(t5, Heap::kEmptyFixedArrayRootIndex); |
1760 __ sw(t5, FieldMemOperand(v0, JSObject::kPropertiesOffset)); | 1760 __ sw(t5, FieldMemOperand(v0, JSObject::kPropertiesOffset)); |
1761 __ sw(t5, FieldMemOperand(v0, JSObject::kElementsOffset)); | 1761 __ sw(t5, FieldMemOperand(v0, JSObject::kElementsOffset)); |
1762 | 1762 |
1763 // Set up the callee in-object property. | 1763 // Set up the callee in-object property. |
1764 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); | |
1765 __ AssertNotSmi(a1); | 1764 __ AssertNotSmi(a1); |
1766 const int kCalleeOffset = JSObject::kHeaderSize + | 1765 __ sw(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset)); |
1767 Heap::kArgumentsCalleeIndex * kPointerSize; | |
1768 __ sw(a1, FieldMemOperand(v0, kCalleeOffset)); | |
1769 | 1766 |
1770 // Use the length (smi tagged) and set that as an in-object property too. | 1767 // Use the length (smi tagged) and set that as an in-object property too. |
1771 __ AssertSmi(t1); | 1768 __ AssertSmi(t1); |
1772 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 1769 __ sw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); |
1773 const int kLengthOffset = JSObject::kHeaderSize + | |
1774 Heap::kArgumentsLengthIndex * kPointerSize; | |
1775 __ sw(t1, FieldMemOperand(v0, kLengthOffset)); | |
1776 | 1770 |
1777 // Set up the elements pointer in the allocated arguments object. | 1771 // Set up the elements pointer in the allocated arguments object. |
1778 // If we allocated a parameter map, t0 will point there, otherwise | 1772 // If we allocated a parameter map, t0 will point there, otherwise |
1779 // it will point to the backing store. | 1773 // it will point to the backing store. |
1780 __ Addu(t0, v0, Operand(Heap::kSloppyArgumentsObjectSize)); | 1774 __ Addu(t0, v0, Operand(JSSloppyArgumentsObject::kSize)); |
1781 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); | 1775 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); |
1782 | 1776 |
1783 // v0 = address of new object (tagged) | 1777 // v0 = address of new object (tagged) |
1784 // a2 = argument count (tagged) | 1778 // a2 = argument count (tagged) |
1785 // t0 = address of parameter map or backing store (tagged) | 1779 // t0 = address of parameter map or backing store (tagged) |
1786 // t2 = mapped parameter count (tagged) | 1780 // t2 = mapped parameter count (tagged) |
1787 // Initialize parameter map. If there are no mapped arguments, we're done. | 1781 // Initialize parameter map. If there are no mapped arguments, we're done. |
1788 Label skip_parameter_map; | 1782 Label skip_parameter_map; |
1789 Label skip3; | 1783 Label skip3; |
1790 __ Branch(&skip3, ne, t2, Operand(Smi::FromInt(0))); | 1784 __ Branch(&skip3, ne, t2, Operand(Smi::FromInt(0))); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1834 __ Addu(t6, t0, a0); | 1828 __ Addu(t6, t0, a0); |
1835 __ sw(t5, MemOperand(t6)); | 1829 __ sw(t5, MemOperand(t6)); |
1836 __ Subu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); | 1830 __ Subu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); |
1837 __ Addu(t6, a1, a0); | 1831 __ Addu(t6, a1, a0); |
1838 __ sw(t3, MemOperand(t6)); | 1832 __ sw(t3, MemOperand(t6)); |
1839 __ Addu(t5, t5, Operand(Smi::FromInt(1))); | 1833 __ Addu(t5, t5, Operand(Smi::FromInt(1))); |
1840 __ bind(¶meters_test); | 1834 __ bind(¶meters_test); |
1841 __ Branch(¶meters_loop, ne, t1, Operand(Smi::FromInt(0))); | 1835 __ Branch(¶meters_loop, ne, t1, Operand(Smi::FromInt(0))); |
1842 | 1836 |
1843 // t1 = argument count (tagged). | 1837 // t1 = argument count (tagged). |
1844 __ lw(t1, FieldMemOperand(v0, kLengthOffset)); | 1838 __ lw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); |
1845 | 1839 |
1846 __ bind(&skip_parameter_map); | 1840 __ bind(&skip_parameter_map); |
1847 // v0 = address of new object (tagged) | 1841 // v0 = address of new object (tagged) |
1848 // a1 = address of backing store (tagged) | 1842 // a1 = address of backing store (tagged) |
1849 // t1 = argument count (tagged) | 1843 // t1 = argument count (tagged) |
1850 // t2 = mapped parameter count (tagged) | 1844 // t2 = mapped parameter count (tagged) |
1851 // t5 = scratch | 1845 // t5 = scratch |
1852 // Copy arguments header and remaining slots (if there are any). | 1846 // Copy arguments header and remaining slots (if there are any). |
1853 __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex); | 1847 __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex); |
1854 __ sw(t5, FieldMemOperand(a1, FixedArray::kMapOffset)); | 1848 __ sw(t5, FieldMemOperand(a1, FixedArray::kMapOffset)); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1896 | 1890 |
1897 // Perform tail call to the entry. | 1891 // Perform tail call to the entry. |
1898 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor); | 1892 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor); |
1899 | 1893 |
1900 __ bind(&slow); | 1894 __ bind(&slow); |
1901 PropertyAccessCompiler::TailCallBuiltin( | 1895 PropertyAccessCompiler::TailCallBuiltin( |
1902 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); | 1896 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); |
1903 } | 1897 } |
1904 | 1898 |
1905 | 1899 |
1906 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { | |
1907 // a1 : function | |
1908 // a2 : number of parameters (tagged) | |
1909 // a3 : parameters pointer | |
1910 | |
1911 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); | |
1912 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); | |
1913 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); | |
1914 | |
1915 // Check if the calling frame is an arguments adaptor frame. | |
1916 Label try_allocate, runtime; | |
1917 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
1918 __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset)); | |
1919 __ Branch(&try_allocate, ne, a0, | |
1920 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
1921 | |
1922 // Patch the arguments.length and the parameters pointer. | |
1923 __ lw(a2, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
1924 __ Lsa(t0, t0, a2, kPointerSizeLog2 - kSmiTagSize); | |
1925 __ Addu(a3, t0, Operand(StandardFrameConstants::kCallerSPOffset)); | |
1926 | |
1927 // Try the new space allocation. Start out with computing the size | |
1928 // of the arguments object and the elements array in words. | |
1929 Label add_arguments_object; | |
1930 __ bind(&try_allocate); | |
1931 __ SmiUntag(t5, a2); | |
1932 __ Branch(&add_arguments_object, eq, a2, Operand(zero_reg)); | |
1933 | |
1934 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize / kPointerSize)); | |
1935 __ bind(&add_arguments_object); | |
1936 __ Addu(t5, t5, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize)); | |
1937 | |
1938 // Do the allocation of both objects in one go. | |
1939 __ Allocate(t5, v0, t0, t1, &runtime, | |
1940 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); | |
1941 | |
1942 // Get the arguments boilerplate from the current native context. | |
1943 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, t0); | |
1944 | |
1945 __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset)); | |
1946 __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex); | |
1947 __ sw(t1, FieldMemOperand(v0, JSObject::kPropertiesOffset)); | |
1948 __ sw(t1, FieldMemOperand(v0, JSObject::kElementsOffset)); | |
1949 | |
1950 // Get the length (smi tagged) and set that as an in-object property too. | |
1951 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | |
1952 __ AssertSmi(a2); | |
1953 __ sw(a2, | |
1954 FieldMemOperand(v0, JSObject::kHeaderSize + | |
1955 Heap::kArgumentsLengthIndex * kPointerSize)); | |
1956 | |
1957 Label done; | |
1958 __ Branch(&done, eq, a2, Operand(zero_reg)); | |
1959 | |
1960 // Set up the elements pointer in the allocated arguments object and | |
1961 // initialize the header in the elements fixed array. | |
1962 __ Addu(t0, v0, Operand(Heap::kStrictArgumentsObjectSize)); | |
1963 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); | |
1964 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex); | |
1965 __ sw(t1, FieldMemOperand(t0, FixedArray::kMapOffset)); | |
1966 __ sw(a2, FieldMemOperand(t0, FixedArray::kLengthOffset)); | |
1967 __ SmiUntag(a2); | |
1968 | |
1969 // Copy the fixed array slots. | |
1970 Label loop; | |
1971 // Set up t0 to point to the first array slot. | |
1972 __ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
1973 __ bind(&loop); | |
1974 // Pre-decrement a3 with kPointerSize on each iteration. | |
1975 // Pre-decrement in order to skip receiver. | |
1976 __ Addu(a3, a3, Operand(-kPointerSize)); | |
1977 __ lw(t1, MemOperand(a3)); | |
1978 // Post-increment t0 with kPointerSize on each iteration. | |
1979 __ sw(t1, MemOperand(t0)); | |
1980 __ Addu(t0, t0, Operand(kPointerSize)); | |
1981 __ Subu(a2, a2, Operand(1)); | |
1982 __ Branch(&loop, ne, a2, Operand(zero_reg)); | |
1983 | |
1984 // Return. | |
1985 __ bind(&done); | |
1986 __ Ret(); | |
1987 | |
1988 // Do the runtime call to allocate the arguments object. | |
1989 __ bind(&runtime); | |
1990 __ Push(a1, a3, a2); | |
1991 __ TailCallRuntime(Runtime::kNewStrictArguments); | |
1992 } | |
1993 | |
1994 | |
1995 void RegExpExecStub::Generate(MacroAssembler* masm) { | 1900 void RegExpExecStub::Generate(MacroAssembler* masm) { |
1996 // Just jump directly to runtime if native RegExp is not selected at compile | 1901 // Just jump directly to runtime if native RegExp is not selected at compile |
1997 // time or if regexp entry in generated code is turned off runtime switch or | 1902 // time or if regexp entry in generated code is turned off runtime switch or |
1998 // at compilation. | 1903 // at compilation. |
1999 #ifdef V8_INTERPRETED_REGEXP | 1904 #ifdef V8_INTERPRETED_REGEXP |
2000 __ TailCallRuntime(Runtime::kRegExpExec); | 1905 __ TailCallRuntime(Runtime::kRegExpExec); |
2001 #else // V8_INTERPRETED_REGEXP | 1906 #else // V8_INTERPRETED_REGEXP |
2002 | 1907 |
2003 // Stack frame on entry. | 1908 // Stack frame on entry. |
2004 // sp[0]: last_match_info (expected JSArray) | 1909 // sp[0]: last_match_info (expected JSArray) |
(...skipping 3248 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5253 { | 5158 { |
5254 Label loop, done_loop; | 5159 Label loop, done_loop; |
5255 __ sll(at, a0, kPointerSizeLog2 - 1); | 5160 __ sll(at, a0, kPointerSizeLog2 - 1); |
5256 __ Addu(a1, a3, at); | 5161 __ Addu(a1, a3, at); |
5257 __ bind(&loop); | 5162 __ bind(&loop); |
5258 __ Branch(&done_loop, eq, a1, Operand(a3)); | 5163 __ Branch(&done_loop, eq, a1, Operand(a3)); |
5259 __ lw(at, MemOperand(a2, 0 * kPointerSize)); | 5164 __ lw(at, MemOperand(a2, 0 * kPointerSize)); |
5260 __ sw(at, FieldMemOperand(a3, 0 * kPointerSize)); | 5165 __ sw(at, FieldMemOperand(a3, 0 * kPointerSize)); |
5261 __ Subu(a2, a2, Operand(1 * kPointerSize)); | 5166 __ Subu(a2, a2, Operand(1 * kPointerSize)); |
5262 __ Addu(a3, a3, Operand(1 * kPointerSize)); | 5167 __ Addu(a3, a3, Operand(1 * kPointerSize)); |
5263 __ b(&loop); | 5168 __ jmp(&loop); |
5264 __ bind(&done_loop); | 5169 __ bind(&done_loop); |
5265 } | 5170 } |
5266 | 5171 |
5267 // Setup the rest parameter array in a3. | 5172 // Setup the rest parameter array in a3. |
5268 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, at); | 5173 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, at); |
5269 __ sw(at, FieldMemOperand(a3, JSArray::kMapOffset)); | 5174 __ sw(at, FieldMemOperand(a3, JSArray::kMapOffset)); |
5270 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); | 5175 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); |
5271 __ sw(at, FieldMemOperand(a3, JSArray::kPropertiesOffset)); | 5176 __ sw(at, FieldMemOperand(a3, JSArray::kPropertiesOffset)); |
5272 __ sw(v0, FieldMemOperand(a3, JSArray::kElementsOffset)); | 5177 __ sw(v0, FieldMemOperand(a3, JSArray::kElementsOffset)); |
5273 __ sw(a0, FieldMemOperand(a3, JSArray::kLengthOffset)); | 5178 __ sw(a0, FieldMemOperand(a3, JSArray::kLengthOffset)); |
5274 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize); | 5179 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize); |
5275 __ Ret(USE_DELAY_SLOT); | 5180 __ Ret(USE_DELAY_SLOT); |
5276 __ mov(v0, a3); // In delay slot | 5181 __ mov(v0, a3); // In delay slot |
5277 | 5182 |
5278 // Fall back to %AllocateInNewSpace. | 5183 // Fall back to %AllocateInNewSpace. |
5279 __ bind(&allocate); | 5184 __ bind(&allocate); |
5280 { | 5185 { |
5281 FrameScope scope(masm, StackFrame::INTERNAL); | 5186 FrameScope scope(masm, StackFrame::INTERNAL); |
5282 __ SmiTag(a1); | 5187 __ SmiTag(a1); |
5283 __ Push(a0, a2, a1); | 5188 __ Push(a0, a2, a1); |
5284 __ CallRuntime(Runtime::kAllocateInNewSpace); | 5189 __ CallRuntime(Runtime::kAllocateInNewSpace); |
5285 __ Pop(a0, a2); | 5190 __ Pop(a0, a2); |
5286 } | 5191 } |
5287 __ jmp(&done_allocate); | 5192 __ jmp(&done_allocate); |
5288 } | 5193 } |
5289 } | 5194 } |
5290 | 5195 |
5291 | 5196 |
| 5197 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { |
| 5198 // ----------- S t a t e ------------- |
| 5199 // -- a1 : function |
| 5200 // -- cp : context |
| 5201 // -- fp : frame pointer |
| 5202 // -- ra : return address |
| 5203 // ----------------------------------- |
| 5204 __ AssertFunction(a1); |
| 5205 |
| 5206 // For Ignition we need to skip all possible handler/stub frames until |
| 5207 // we reach the JavaScript frame for the function (similar to what the |
| 5208 // runtime fallback implementation does). So make a2 point to that |
| 5209 // JavaScript frame. |
| 5210 { |
| 5211 Label loop, loop_entry; |
| 5212 __ Branch(USE_DELAY_SLOT, &loop_entry); |
| 5213 __ mov(a2, fp); // In delay slot. |
| 5214 __ bind(&loop); |
| 5215 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); |
| 5216 __ bind(&loop_entry); |
| 5217 __ lw(a3, MemOperand(a2, StandardFrameConstants::kMarkerOffset)); |
| 5218 __ Branch(&loop, ne, a1, Operand(a3)); |
| 5219 } |
| 5220 |
| 5221 // Check if we have an arguments adaptor frame below the function frame. |
| 5222 Label arguments_adaptor, arguments_done; |
| 5223 __ lw(a3, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); |
| 5224 __ lw(a0, MemOperand(a3, StandardFrameConstants::kContextOffset)); |
| 5225 __ Branch(&arguments_adaptor, eq, a0, |
| 5226 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 5227 { |
| 5228 __ lw(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 5229 __ lw(a0, |
| 5230 FieldMemOperand(a1, SharedFunctionInfo::kFormalParameterCountOffset)); |
| 5231 __ Lsa(a2, a2, a0, kPointerSizeLog2 - 1); |
| 5232 __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset - |
| 5233 1 * kPointerSize)); |
| 5234 } |
| 5235 __ Branch(&arguments_done); |
| 5236 __ bind(&arguments_adaptor); |
| 5237 { |
| 5238 __ lw(a0, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 5239 __ Lsa(a2, a3, a0, kPointerSizeLog2 - 1); |
| 5240 __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset - |
| 5241 1 * kPointerSize)); |
| 5242 } |
| 5243 __ bind(&arguments_done); |
| 5244 |
| 5245 // ----------- S t a t e ------------- |
| 5246 // -- cp : context |
| 5247 // -- a0 : number of rest parameters (tagged) |
| 5248 // -- a2 : pointer to first rest parameters |
| 5249 // -- ra : return address |
| 5250 // ----------------------------------- |
| 5251 |
| 5252 // Allocate space for the strict arguments object plus the backing store. |
| 5253 Label allocate, done_allocate; |
| 5254 __ li(a1, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize)); |
| 5255 __ Lsa(a1, a1, a0, kPointerSizeLog2 - 1); |
| 5256 __ Allocate(a1, v0, a3, t0, &allocate, TAG_OBJECT); |
| 5257 __ bind(&done_allocate); |
| 5258 |
| 5259 // Setup the elements array in v0. |
| 5260 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); |
| 5261 __ sw(at, FieldMemOperand(v0, FixedArray::kMapOffset)); |
| 5262 __ sw(a0, FieldMemOperand(v0, FixedArray::kLengthOffset)); |
| 5263 __ Addu(a3, v0, Operand(FixedArray::kHeaderSize)); |
| 5264 { |
| 5265 Label loop, done_loop; |
| 5266 __ sll(at, a0, kPointerSizeLog2 - 1); |
| 5267 __ Addu(a1, a3, at); |
| 5268 __ bind(&loop); |
| 5269 __ Branch(&done_loop, eq, a1, Operand(a3)); |
| 5270 __ lw(at, MemOperand(a2, 0 * kPointerSize)); |
| 5271 __ sw(at, FieldMemOperand(a3, 0 * kPointerSize)); |
| 5272 __ Subu(a2, a2, Operand(1 * kPointerSize)); |
| 5273 __ Addu(a3, a3, Operand(1 * kPointerSize)); |
| 5274 __ Branch(&loop); |
| 5275 __ bind(&done_loop); |
| 5276 } |
| 5277 |
| 5278 // Setup the strict arguments object in a3. |
| 5279 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, at); |
| 5280 __ sw(at, FieldMemOperand(a3, JSStrictArgumentsObject::kMapOffset)); |
| 5281 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); |
| 5282 __ sw(at, FieldMemOperand(a3, JSStrictArgumentsObject::kPropertiesOffset)); |
| 5283 __ sw(v0, FieldMemOperand(a3, JSStrictArgumentsObject::kElementsOffset)); |
| 5284 __ sw(a0, FieldMemOperand(a3, JSStrictArgumentsObject::kLengthOffset)); |
| 5285 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize); |
| 5286 __ Ret(USE_DELAY_SLOT); |
| 5287 __ mov(v0, a3); // In delay slot |
| 5288 |
| 5289 // Fall back to %AllocateInNewSpace. |
| 5290 __ bind(&allocate); |
| 5291 { |
| 5292 FrameScope scope(masm, StackFrame::INTERNAL); |
| 5293 __ SmiTag(a1); |
| 5294 __ Push(a0, a2, a1); |
| 5295 __ CallRuntime(Runtime::kAllocateInNewSpace); |
| 5296 __ Pop(a0, a2); |
| 5297 } |
| 5298 __ jmp(&done_allocate); |
| 5299 } |
| 5300 |
| 5301 |
5292 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { | 5302 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { |
5293 Register context_reg = cp; | 5303 Register context_reg = cp; |
5294 Register slot_reg = a2; | 5304 Register slot_reg = a2; |
5295 Register result_reg = v0; | 5305 Register result_reg = v0; |
5296 Label slow_case; | 5306 Label slow_case; |
5297 | 5307 |
5298 // Go up context chain to the script context. | 5308 // Go up context chain to the script context. |
5299 for (int i = 0; i < depth(); ++i) { | 5309 for (int i = 0; i < depth(); ++i) { |
5300 __ lw(result_reg, ContextMemOperand(context_reg, Context::PREVIOUS_INDEX)); | 5310 __ lw(result_reg, ContextMemOperand(context_reg, Context::PREVIOUS_INDEX)); |
5301 context_reg = result_reg; | 5311 context_reg = result_reg; |
(...skipping 440 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5742 return_value_operand, NULL); | 5752 return_value_operand, NULL); |
5743 } | 5753 } |
5744 | 5754 |
5745 | 5755 |
5746 #undef __ | 5756 #undef __ |
5747 | 5757 |
5748 } // namespace internal | 5758 } // namespace internal |
5749 } // namespace v8 | 5759 } // namespace v8 |
5750 | 5760 |
5751 #endif // V8_TARGET_ARCH_MIPS | 5761 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |