| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
| 6 | 6 |
| 7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
| 8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
| (...skipping 1711 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1722 __ SmiScale(t1, a6, kPointerSizeLog2); | 1722 __ SmiScale(t1, a6, kPointerSizeLog2); |
| 1723 __ daddiu(t1, t1, kParameterMapHeaderSize); | 1723 __ daddiu(t1, t1, kParameterMapHeaderSize); |
| 1724 __ bind(¶m_map_size); | 1724 __ bind(¶m_map_size); |
| 1725 | 1725 |
| 1726 // 2. Backing store. | 1726 // 2. Backing store. |
| 1727 __ SmiScale(t2, a5, kPointerSizeLog2); | 1727 __ SmiScale(t2, a5, kPointerSizeLog2); |
| 1728 __ Daddu(t1, t1, Operand(t2)); | 1728 __ Daddu(t1, t1, Operand(t2)); |
| 1729 __ Daddu(t1, t1, Operand(FixedArray::kHeaderSize)); | 1729 __ Daddu(t1, t1, Operand(FixedArray::kHeaderSize)); |
| 1730 | 1730 |
| 1731 // 3. Arguments object. | 1731 // 3. Arguments object. |
| 1732 __ Daddu(t1, t1, Operand(Heap::kSloppyArgumentsObjectSize)); | 1732 __ Daddu(t1, t1, Operand(JSSloppyArgumentsObject::kSize)); |
| 1733 | 1733 |
| 1734 // Do the allocation of all three objects in one go. | 1734 // Do the allocation of all three objects in one go. |
| 1735 __ Allocate(t1, v0, t1, a4, &runtime, TAG_OBJECT); | 1735 __ Allocate(t1, v0, t1, a4, &runtime, TAG_OBJECT); |
| 1736 | 1736 |
| 1737 // v0 = address of new object(s) (tagged) | 1737 // v0 = address of new object(s) (tagged) |
| 1738 // a2 = argument count (smi-tagged) | 1738 // a2 = argument count (smi-tagged) |
| 1739 // Get the arguments boilerplate from the current native context into a4. | 1739 // Get the arguments boilerplate from the current native context into a4. |
| 1740 const int kNormalOffset = | 1740 const int kNormalOffset = |
| 1741 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); | 1741 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); |
| 1742 const int kAliasedOffset = | 1742 const int kAliasedOffset = |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1755 // v0 = address of new object (tagged) | 1755 // v0 = address of new object (tagged) |
| 1756 // a2 = argument count (smi-tagged) | 1756 // a2 = argument count (smi-tagged) |
| 1757 // a4 = address of arguments map (tagged) | 1757 // a4 = address of arguments map (tagged) |
| 1758 // a6 = mapped parameter count (tagged) | 1758 // a6 = mapped parameter count (tagged) |
| 1759 __ sd(a4, FieldMemOperand(v0, JSObject::kMapOffset)); | 1759 __ sd(a4, FieldMemOperand(v0, JSObject::kMapOffset)); |
| 1760 __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex); | 1760 __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex); |
| 1761 __ sd(t1, FieldMemOperand(v0, JSObject::kPropertiesOffset)); | 1761 __ sd(t1, FieldMemOperand(v0, JSObject::kPropertiesOffset)); |
| 1762 __ sd(t1, FieldMemOperand(v0, JSObject::kElementsOffset)); | 1762 __ sd(t1, FieldMemOperand(v0, JSObject::kElementsOffset)); |
| 1763 | 1763 |
| 1764 // Set up the callee in-object property. | 1764 // Set up the callee in-object property. |
| 1765 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); | |
| 1766 __ AssertNotSmi(a1); | 1765 __ AssertNotSmi(a1); |
| 1767 const int kCalleeOffset = JSObject::kHeaderSize + | 1766 __ sd(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset)); |
| 1768 Heap::kArgumentsCalleeIndex * kPointerSize; | |
| 1769 __ sd(a1, FieldMemOperand(v0, kCalleeOffset)); | |
| 1770 | 1767 |
| 1771 // Use the length (smi tagged) and set that as an in-object property too. | 1768 // Use the length (smi tagged) and set that as an in-object property too. |
| 1772 __ AssertSmi(a5); | 1769 __ AssertSmi(a5); |
| 1773 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 1770 __ sd(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); |
| 1774 const int kLengthOffset = JSObject::kHeaderSize + | |
| 1775 Heap::kArgumentsLengthIndex * kPointerSize; | |
| 1776 __ sd(a5, FieldMemOperand(v0, kLengthOffset)); | |
| 1777 | 1771 |
| 1778 // Set up the elements pointer in the allocated arguments object. | 1772 // Set up the elements pointer in the allocated arguments object. |
| 1779 // If we allocated a parameter map, a4 will point there, otherwise | 1773 // If we allocated a parameter map, a4 will point there, otherwise |
| 1780 // it will point to the backing store. | 1774 // it will point to the backing store. |
| 1781 __ Daddu(a4, v0, Operand(Heap::kSloppyArgumentsObjectSize)); | 1775 __ Daddu(a4, v0, Operand(JSSloppyArgumentsObject::kSize)); |
| 1782 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset)); | 1776 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset)); |
| 1783 | 1777 |
| 1784 // v0 = address of new object (tagged) | 1778 // v0 = address of new object (tagged) |
| 1785 // a2 = argument count (tagged) | 1779 // a2 = argument count (tagged) |
| 1786 // a4 = address of parameter map or backing store (tagged) | 1780 // a4 = address of parameter map or backing store (tagged) |
| 1787 // a6 = mapped parameter count (tagged) | 1781 // a6 = mapped parameter count (tagged) |
| 1788 // Initialize parameter map. If there are no mapped arguments, we're done. | 1782 // Initialize parameter map. If there are no mapped arguments, we're done. |
| 1789 Label skip_parameter_map; | 1783 Label skip_parameter_map; |
| 1790 Label skip3; | 1784 Label skip3; |
| 1791 __ Branch(&skip3, ne, a6, Operand(Smi::FromInt(0))); | 1785 __ Branch(&skip3, ne, a6, Operand(Smi::FromInt(0))); |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1837 __ Daddu(t2, a4, a0); | 1831 __ Daddu(t2, a4, a0); |
| 1838 __ sd(t1, MemOperand(t2)); | 1832 __ sd(t1, MemOperand(t2)); |
| 1839 __ Dsubu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); | 1833 __ Dsubu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); |
| 1840 __ Daddu(t2, a1, a0); | 1834 __ Daddu(t2, a1, a0); |
| 1841 __ sd(a7, MemOperand(t2)); | 1835 __ sd(a7, MemOperand(t2)); |
| 1842 __ Daddu(t1, t1, Operand(Smi::FromInt(1))); | 1836 __ Daddu(t1, t1, Operand(Smi::FromInt(1))); |
| 1843 __ bind(¶meters_test); | 1837 __ bind(¶meters_test); |
| 1844 __ Branch(¶meters_loop, ne, a5, Operand(Smi::FromInt(0))); | 1838 __ Branch(¶meters_loop, ne, a5, Operand(Smi::FromInt(0))); |
| 1845 | 1839 |
| 1846 // Restore t1 = argument count (tagged). | 1840 // Restore t1 = argument count (tagged). |
| 1847 __ ld(a5, FieldMemOperand(v0, kLengthOffset)); | 1841 __ ld(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); |
| 1848 | 1842 |
| 1849 __ bind(&skip_parameter_map); | 1843 __ bind(&skip_parameter_map); |
| 1850 // v0 = address of new object (tagged) | 1844 // v0 = address of new object (tagged) |
| 1851 // a1 = address of backing store (tagged) | 1845 // a1 = address of backing store (tagged) |
| 1852 // a5 = argument count (tagged) | 1846 // a5 = argument count (tagged) |
| 1853 // a6 = mapped parameter count (tagged) | 1847 // a6 = mapped parameter count (tagged) |
| 1854 // t1 = scratch | 1848 // t1 = scratch |
| 1855 // Copy arguments header and remaining slots (if there are any). | 1849 // Copy arguments header and remaining slots (if there are any). |
| 1856 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex); | 1850 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex); |
| 1857 __ sd(t1, FieldMemOperand(a1, FixedArray::kMapOffset)); | 1851 __ sd(t1, FieldMemOperand(a1, FixedArray::kMapOffset)); |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1900 | 1894 |
| 1901 // Perform tail call to the entry. | 1895 // Perform tail call to the entry. |
| 1902 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor); | 1896 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor); |
| 1903 | 1897 |
| 1904 __ bind(&slow); | 1898 __ bind(&slow); |
| 1905 PropertyAccessCompiler::TailCallBuiltin( | 1899 PropertyAccessCompiler::TailCallBuiltin( |
| 1906 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); | 1900 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); |
| 1907 } | 1901 } |
| 1908 | 1902 |
| 1909 | 1903 |
| 1910 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { | |
| 1911 // a1 : function | |
| 1912 // a2 : number of parameters (tagged) | |
| 1913 // a3 : parameters pointer | |
| 1914 | |
| 1915 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); | |
| 1916 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); | |
| 1917 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); | |
| 1918 | |
| 1919 // Check if the calling frame is an arguments adaptor frame. | |
| 1920 Label try_allocate, runtime; | |
| 1921 __ ld(a4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
| 1922 __ ld(a0, MemOperand(a4, StandardFrameConstants::kContextOffset)); | |
| 1923 __ Branch(&try_allocate, ne, a0, | |
| 1924 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
| 1925 | |
| 1926 // Patch the arguments.length and the parameters pointer. | |
| 1927 __ ld(a2, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
| 1928 __ SmiScale(at, a2, kPointerSizeLog2); | |
| 1929 __ Daddu(a4, a4, Operand(at)); | |
| 1930 __ Daddu(a3, a4, Operand(StandardFrameConstants::kCallerSPOffset)); | |
| 1931 | |
| 1932 // Try the new space allocation. Start out with computing the size | |
| 1933 // of the arguments object and the elements array in words. | |
| 1934 Label add_arguments_object; | |
| 1935 __ bind(&try_allocate); | |
| 1936 __ SmiUntag(t1, a2); | |
| 1937 __ Branch(&add_arguments_object, eq, a2, Operand(zero_reg)); | |
| 1938 | |
| 1939 __ Daddu(t1, t1, Operand(FixedArray::kHeaderSize / kPointerSize)); | |
| 1940 __ bind(&add_arguments_object); | |
| 1941 __ Daddu(t1, t1, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize)); | |
| 1942 | |
| 1943 // Do the allocation of both objects in one go. | |
| 1944 __ Allocate(t1, v0, a4, a5, &runtime, | |
| 1945 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); | |
| 1946 | |
| 1947 // Get the arguments boilerplate from the current native context. | |
| 1948 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, a4); | |
| 1949 | |
| 1950 __ sd(a4, FieldMemOperand(v0, JSObject::kMapOffset)); | |
| 1951 __ LoadRoot(a5, Heap::kEmptyFixedArrayRootIndex); | |
| 1952 __ sd(a5, FieldMemOperand(v0, JSObject::kPropertiesOffset)); | |
| 1953 __ sd(a5, FieldMemOperand(v0, JSObject::kElementsOffset)); | |
| 1954 | |
| 1955 // Get the length (smi tagged) and set that as an in-object property too. | |
| 1956 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | |
| 1957 __ AssertSmi(a2); | |
| 1958 __ sd(a2, | |
| 1959 FieldMemOperand(v0, JSObject::kHeaderSize + | |
| 1960 Heap::kArgumentsLengthIndex * kPointerSize)); | |
| 1961 | |
| 1962 Label done; | |
| 1963 __ Branch(&done, eq, a2, Operand(zero_reg)); | |
| 1964 | |
| 1965 // Set up the elements pointer in the allocated arguments object and | |
| 1966 // initialize the header in the elements fixed array. | |
| 1967 __ Daddu(a4, v0, Operand(Heap::kStrictArgumentsObjectSize)); | |
| 1968 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset)); | |
| 1969 __ LoadRoot(a5, Heap::kFixedArrayMapRootIndex); | |
| 1970 __ sd(a5, FieldMemOperand(a4, FixedArray::kMapOffset)); | |
| 1971 __ sd(a2, FieldMemOperand(a4, FixedArray::kLengthOffset)); | |
| 1972 __ SmiUntag(a2); | |
| 1973 | |
| 1974 // Copy the fixed array slots. | |
| 1975 Label loop; | |
| 1976 // Set up a4 to point to the first array slot. | |
| 1977 __ Daddu(a4, a4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 1978 __ bind(&loop); | |
| 1979 // Pre-decrement a3 with kPointerSize on each iteration. | |
| 1980 // Pre-decrement in order to skip receiver. | |
| 1981 __ Daddu(a3, a3, Operand(-kPointerSize)); | |
| 1982 __ ld(a5, MemOperand(a3)); | |
| 1983 // Post-increment a4 with kPointerSize on each iteration. | |
| 1984 __ sd(a5, MemOperand(a4)); | |
| 1985 __ Daddu(a4, a4, Operand(kPointerSize)); | |
| 1986 __ Dsubu(a2, a2, Operand(1)); | |
| 1987 __ Branch(&loop, ne, a2, Operand(zero_reg)); | |
| 1988 | |
| 1989 // Return. | |
| 1990 __ bind(&done); | |
| 1991 __ Ret(); | |
| 1992 | |
| 1993 // Do the runtime call to allocate the arguments object. | |
| 1994 __ bind(&runtime); | |
| 1995 __ Push(a1, a3, a2); | |
| 1996 __ TailCallRuntime(Runtime::kNewStrictArguments); | |
| 1997 } | |
| 1998 | |
| 1999 | |
| 2000 void RegExpExecStub::Generate(MacroAssembler* masm) { | 1904 void RegExpExecStub::Generate(MacroAssembler* masm) { |
| 2001 // Just jump directly to runtime if native RegExp is not selected at compile | 1905 // Just jump directly to runtime if native RegExp is not selected at compile |
| 2002 // time or if regexp entry in generated code is turned off runtime switch or | 1906 // time or if regexp entry in generated code is turned off runtime switch or |
| 2003 // at compilation. | 1907 // at compilation. |
| 2004 #ifdef V8_INTERPRETED_REGEXP | 1908 #ifdef V8_INTERPRETED_REGEXP |
| 2005 __ TailCallRuntime(Runtime::kRegExpExec); | 1909 __ TailCallRuntime(Runtime::kRegExpExec); |
| 2006 #else // V8_INTERPRETED_REGEXP | 1910 #else // V8_INTERPRETED_REGEXP |
| 2007 | 1911 |
| 2008 // Stack frame on entry. | 1912 // Stack frame on entry. |
| 2009 // sp[0]: last_match_info (expected JSArray) | 1913 // sp[0]: last_match_info (expected JSArray) |
| (...skipping 3292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5302 __ Push(a0, a2, a1); | 5206 __ Push(a0, a2, a1); |
| 5303 __ CallRuntime(Runtime::kAllocateInNewSpace); | 5207 __ CallRuntime(Runtime::kAllocateInNewSpace); |
| 5304 __ Pop(a0, a2); | 5208 __ Pop(a0, a2); |
| 5305 __ SmiUntag(a0); | 5209 __ SmiUntag(a0); |
| 5306 } | 5210 } |
| 5307 __ jmp(&done_allocate); | 5211 __ jmp(&done_allocate); |
| 5308 } | 5212 } |
| 5309 } | 5213 } |
| 5310 | 5214 |
| 5311 | 5215 |
| 5216 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { |
| 5217 // ----------- S t a t e ------------- |
| 5218 // -- a1 : function |
| 5219 // -- cp : context |
| 5220 // -- fp : frame pointer |
| 5221 // -- ra : return address |
| 5222 // ----------------------------------- |
| 5223 __ AssertFunction(a1); |
| 5224 |
| 5225 // For Ignition we need to skip all possible handler/stub frames until |
| 5226 // we reach the JavaScript frame for the function (similar to what the |
| 5227 // runtime fallback implementation does). So make a2 point to that |
| 5228 // JavaScript frame. |
| 5229 { |
| 5230 Label loop, loop_entry; |
| 5231 __ Branch(USE_DELAY_SLOT, &loop_entry); |
| 5232 __ mov(a2, fp); // In delay slot. |
| 5233 __ bind(&loop); |
| 5234 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); |
| 5235 __ bind(&loop_entry); |
| 5236 __ ld(a3, MemOperand(a2, StandardFrameConstants::kMarkerOffset)); |
| 5237 __ Branch(&loop, ne, a1, Operand(a3)); |
| 5238 } |
| 5239 |
| 5240 // Check if we have an arguments adaptor frame below the function frame. |
| 5241 Label arguments_adaptor, arguments_done; |
| 5242 __ ld(a3, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); |
| 5243 __ ld(a0, MemOperand(a3, StandardFrameConstants::kContextOffset)); |
| 5244 __ Branch(&arguments_adaptor, eq, a0, |
| 5245 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 5246 { |
| 5247 __ ld(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 5248 __ ld(a0, |
| 5249 FieldMemOperand(a1, SharedFunctionInfo::kFormalParameterCountOffset)); |
| 5250 __ Dlsa(a2, a2, a0, kPointerSizeLog2); |
| 5251 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset - |
| 5252 1 * kPointerSize)); |
| 5253 } |
| 5254 __ Branch(&arguments_done); |
| 5255 __ bind(&arguments_adaptor); |
| 5256 { |
| 5257 __ SmiLoadUntag( |
| 5258 a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 5259 __ Dlsa(a2, a3, a0, kPointerSizeLog2); |
| 5260 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset - |
| 5261 1 * kPointerSize)); |
| 5262 } |
| 5263 __ bind(&arguments_done); |
| 5264 |
| 5265 // ----------- S t a t e ------------- |
| 5266 // -- cp : context |
| 5267 // -- a0 : number of rest parameters |
| 5268 // -- a2 : pointer to first rest parameters |
| 5269 // -- ra : return address |
| 5270 // ----------------------------------- |
| 5271 |
| 5272 // Allocate space for the rest parameter array plus the backing store. |
| 5273 Label allocate, done_allocate; |
| 5274 __ li(a1, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize)); |
| 5275 __ Dlsa(a1, a1, a0, kPointerSizeLog2); |
| 5276 __ Allocate(a1, v0, a3, a4, &allocate, TAG_OBJECT); |
| 5277 __ bind(&done_allocate); |
| 5278 |
| 5279 // Compute arguments.length in a4. |
| 5280 __ SmiTag(a4, a0); |
| 5281 |
| 5282 // Setup the elements array in v0. |
| 5283 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); |
| 5284 __ sd(at, FieldMemOperand(v0, FixedArray::kMapOffset)); |
| 5285 __ sd(a4, FieldMemOperand(v0, FixedArray::kLengthOffset)); |
| 5286 __ Daddu(a3, v0, Operand(FixedArray::kHeaderSize)); |
| 5287 { |
| 5288 Label loop, done_loop; |
| 5289 __ Dlsa(a1, a3, a0, kPointerSizeLog2); |
| 5290 __ bind(&loop); |
| 5291 __ Branch(&done_loop, eq, a1, Operand(a3)); |
| 5292 __ ld(at, MemOperand(a2, 0 * kPointerSize)); |
| 5293 __ sd(at, FieldMemOperand(a3, 0 * kPointerSize)); |
| 5294 __ Dsubu(a2, a2, Operand(1 * kPointerSize)); |
| 5295 __ Daddu(a3, a3, Operand(1 * kPointerSize)); |
| 5296 __ b(&loop); |
| 5297 __ bind(&done_loop); |
| 5298 } |
| 5299 |
| 5300 // Setup the rest parameter array in a3. |
| 5301 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, at); |
| 5302 __ sd(at, FieldMemOperand(a3, JSStrictArgumentsObject::kMapOffset)); |
| 5303 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); |
| 5304 __ sd(at, FieldMemOperand(a3, JSStrictArgumentsObject::kPropertiesOffset)); |
| 5305 __ sd(v0, FieldMemOperand(a3, JSStrictArgumentsObject::kElementsOffset)); |
| 5306 __ sd(a4, FieldMemOperand(a3, JSStrictArgumentsObject::kLengthOffset)); |
| 5307 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize); |
| 5308 __ Ret(USE_DELAY_SLOT); |
| 5309 __ mov(v0, a3); // In delay slot |
| 5310 |
| 5311 // Fall back to %AllocateInNewSpace. |
| 5312 __ bind(&allocate); |
| 5313 { |
| 5314 FrameScope scope(masm, StackFrame::INTERNAL); |
| 5315 __ SmiTag(a0); |
| 5316 __ SmiTag(a1); |
| 5317 __ Push(a0, a2, a1); |
| 5318 __ CallRuntime(Runtime::kAllocateInNewSpace); |
| 5319 __ Pop(a0, a2); |
| 5320 __ SmiUntag(a0); |
| 5321 } |
| 5322 __ jmp(&done_allocate); |
| 5323 } |
| 5324 |
| 5325 |
| 5312 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { | 5326 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { |
| 5313 Register context_reg = cp; | 5327 Register context_reg = cp; |
| 5314 Register slot_reg = a2; | 5328 Register slot_reg = a2; |
| 5315 Register result_reg = v0; | 5329 Register result_reg = v0; |
| 5316 Label slow_case; | 5330 Label slow_case; |
| 5317 | 5331 |
| 5318 // Go up context chain to the script context. | 5332 // Go up context chain to the script context. |
| 5319 for (int i = 0; i < depth(); ++i) { | 5333 for (int i = 0; i < depth(); ++i) { |
| 5320 __ ld(result_reg, ContextMemOperand(context_reg, Context::PREVIOUS_INDEX)); | 5334 __ ld(result_reg, ContextMemOperand(context_reg, Context::PREVIOUS_INDEX)); |
| 5321 context_reg = result_reg; | 5335 context_reg = result_reg; |
| (...skipping 446 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5768 return_value_operand, NULL); | 5782 return_value_operand, NULL); |
| 5769 } | 5783 } |
| 5770 | 5784 |
| 5771 | 5785 |
| 5772 #undef __ | 5786 #undef __ |
| 5773 | 5787 |
| 5774 } // namespace internal | 5788 } // namespace internal |
| 5775 } // namespace v8 | 5789 } // namespace v8 |
| 5776 | 5790 |
| 5777 #endif // V8_TARGET_ARCH_MIPS64 | 5791 #endif // V8_TARGET_ARCH_MIPS64 |
| OLD | NEW |