Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(577)

Side by Side Diff: src/builtins/mips64/builtins-mips64.cc

Issue 2507683003: Port improvements of some builtins. (Closed)
Patch Set: Another optimisation Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS64 5 #if V8_TARGET_ARCH_MIPS64
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/debug/debug.h" 8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
(...skipping 796 matching lines...) Expand 10 before | Expand all | Expand 10 after
807 { 807 {
808 // Enter a new JavaScript frame, and initialize its slots as they were when 808 // Enter a new JavaScript frame, and initialize its slots as they were when
809 // the generator was suspended. 809 // the generator was suspended.
810 FrameScope scope(masm, StackFrame::MANUAL); 810 FrameScope scope(masm, StackFrame::MANUAL);
811 __ Push(ra, fp); 811 __ Push(ra, fp);
812 __ Move(fp, sp); 812 __ Move(fp, sp);
813 __ Push(cp, a4); 813 __ Push(cp, a4);
814 814
815 // Restore the operand stack. 815 // Restore the operand stack.
816 __ ld(a0, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset)); 816 __ ld(a0, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
817 __ ld(a3, FieldMemOperand(a0, FixedArray::kLengthOffset)); 817 __ lw(a3, UntagSmiFieldMemOperand(a0, FixedArray::kLengthOffset));
818 __ SmiUntag(a3);
819 __ Daddu(a0, a0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 818 __ Daddu(a0, a0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
820 __ Dlsa(a3, a0, a3, kPointerSizeLog2); 819 __ Dlsa(a3, a0, a3, kPointerSizeLog2);
821 { 820 {
822 Label done_loop, loop; 821 Label done_loop, loop;
823 __ bind(&loop); 822 __ bind(&loop);
824 __ Branch(&done_loop, eq, a0, Operand(a3)); 823 __ Branch(&done_loop, eq, a0, Operand(a3));
825 __ ld(a5, MemOperand(a0)); 824 __ ld(a5, MemOperand(a0));
826 __ Push(a5); 825 __ Push(a5);
827 __ Branch(USE_DELAY_SLOT, &loop); 826 __ Branch(USE_DELAY_SLOT, &loop);
828 __ daddiu(a0, a0, kPointerSize); // In delay slot. 827 __ daddiu(a0, a0, kPointerSize); // In delay slot.
829 __ bind(&done_loop); 828 __ bind(&done_loop);
830 } 829 }
831 830
832 // Reset operand stack so we don't leak. 831 // Reset operand stack so we don't leak.
833 __ LoadRoot(a5, Heap::kEmptyFixedArrayRootIndex); 832 __ LoadRoot(a5, Heap::kEmptyFixedArrayRootIndex);
834 __ sd(a5, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset)); 833 __ sd(a5, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
835 834
836 // Resume the generator function at the continuation. 835 // Resume the generator function at the continuation.
837 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); 836 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
838 __ ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset)); 837 __ ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset));
839 __ Daddu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag)); 838 __ Daddu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag));
840 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); 839 __ lw(a2,
841 __ SmiUntag(a2); 840 UntagSmiFieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
842 __ Daddu(a3, a3, Operand(a2)); 841 __ Daddu(a3, a3, Operand(a2));
843 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); 842 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
844 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); 843 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
845 __ Move(v0, a1); // Continuation expects generator object in v0. 844 __ Move(v0, a1); // Continuation expects generator object in v0.
846 __ Jump(a3); 845 __ Jump(a3);
847 } 846 }
848 847
849 __ bind(&prepare_step_in_if_stepping); 848 __ bind(&prepare_step_in_if_stepping);
850 { 849 {
851 FrameScope scope(masm, StackFrame::INTERNAL); 850 FrameScope scope(masm, StackFrame::INTERNAL);
(...skipping 459 matching lines...) Expand 10 before | Expand all | Expand 10 after
1311 // Check function data field is actually a BytecodeArray object. 1310 // Check function data field is actually a BytecodeArray object.
1312 __ SmiTst(kInterpreterBytecodeArrayRegister, at); 1311 __ SmiTst(kInterpreterBytecodeArrayRegister, at);
1313 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at, 1312 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at,
1314 Operand(zero_reg)); 1313 Operand(zero_reg));
1315 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1); 1314 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1316 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1, 1315 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1,
1317 Operand(BYTECODE_ARRAY_TYPE)); 1316 Operand(BYTECODE_ARRAY_TYPE));
1318 } 1317 }
1319 1318
1320 // Get the target bytecode offset from the frame. 1319 // Get the target bytecode offset from the frame.
1321 __ ld(kInterpreterBytecodeOffsetRegister, 1320 __ lw(
1322 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); 1321 kInterpreterBytecodeOffsetRegister,
1323 __ SmiUntag(kInterpreterBytecodeOffsetRegister); 1322 UntagSmiMemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1324 1323
1325 // Dispatch to the target bytecode. 1324 // Dispatch to the target bytecode.
1326 __ Daddu(a1, kInterpreterBytecodeArrayRegister, 1325 __ Daddu(a1, kInterpreterBytecodeArrayRegister,
1327 kInterpreterBytecodeOffsetRegister); 1326 kInterpreterBytecodeOffsetRegister);
1328 __ lbu(a1, MemOperand(a1)); 1327 __ lbu(a1, MemOperand(a1));
1329 __ Dlsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2); 1328 __ Dlsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2);
1330 __ ld(a1, MemOperand(a1)); 1329 __ ld(a1, MemOperand(a1));
1331 __ Jump(a1); 1330 __ Jump(a1);
1332 } 1331 }
1333 1332
(...skipping 322 matching lines...) Expand 10 before | Expand all | Expand 10 after
1656 Deoptimizer::BailoutType type) { 1655 Deoptimizer::BailoutType type) {
1657 { 1656 {
1658 FrameScope scope(masm, StackFrame::INTERNAL); 1657 FrameScope scope(masm, StackFrame::INTERNAL);
1659 // Pass the function and deoptimization type to the runtime system. 1658 // Pass the function and deoptimization type to the runtime system.
1660 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); 1659 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1661 __ push(a0); 1660 __ push(a0);
1662 __ CallRuntime(Runtime::kNotifyDeoptimized); 1661 __ CallRuntime(Runtime::kNotifyDeoptimized);
1663 } 1662 }
1664 1663
1665 // Get the full codegen state from the stack and untag it -> a6. 1664 // Get the full codegen state from the stack and untag it -> a6.
1666 __ ld(a6, MemOperand(sp, 0 * kPointerSize)); 1665 __ lw(a6, UntagSmiMemOperand(sp, 0 * kPointerSize));
1667 __ SmiUntag(a6);
1668 // Switch on the state. 1666 // Switch on the state.
1669 Label with_tos_register, unknown_state; 1667 Label with_tos_register, unknown_state;
1670 __ Branch( 1668 __ Branch(
1671 &with_tos_register, ne, a6, 1669 &with_tos_register, ne, a6,
1672 Operand(static_cast<int64_t>(Deoptimizer::BailoutState::NO_REGISTERS))); 1670 Operand(static_cast<int64_t>(Deoptimizer::BailoutState::NO_REGISTERS)));
1673 __ Ret(USE_DELAY_SLOT); 1671 __ Ret(USE_DELAY_SLOT);
1674 // Safe to fill delay slot Addu will emit one instruction. 1672 // Safe to fill delay slot Addu will emit one instruction.
1675 __ Daddu(sp, sp, Operand(1 * kPointerSize)); // Remove state. 1673 __ Daddu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1676 1674
1677 __ bind(&with_tos_register); 1675 __ bind(&with_tos_register);
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after
1825 if (has_handler_frame) { 1823 if (has_handler_frame) {
1826 __ LeaveFrame(StackFrame::STUB); 1824 __ LeaveFrame(StackFrame::STUB);
1827 } 1825 }
1828 1826
1829 // Load deoptimization data from the code object. 1827 // Load deoptimization data from the code object.
1830 // <deopt_data> = <code>[#deoptimization_data_offset] 1828 // <deopt_data> = <code>[#deoptimization_data_offset]
1831 __ ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); 1829 __ ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1832 1830
1833 // Load the OSR entrypoint offset from the deoptimization data. 1831 // Load the OSR entrypoint offset from the deoptimization data.
1834 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] 1832 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1835 __ ld(a1, MemOperand(a1, FixedArray::OffsetOfElementAt( 1833 __ lw(a1,
1836 DeoptimizationInputData::kOsrPcOffsetIndex) - 1834 UntagSmiMemOperand(a1, FixedArray::OffsetOfElementAt(
1837 kHeapObjectTag)); 1835 DeoptimizationInputData::kOsrPcOffsetIndex) -
1838 __ SmiUntag(a1); 1836 kHeapObjectTag));
1839 1837
1840 // Compute the target address = code_obj + header_size + osr_offset 1838 // Compute the target address = code_obj + header_size + osr_offset
1841 // <entry_addr> = <code_obj> + #header_size + <osr_offset> 1839 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1842 __ daddu(v0, v0, a1); 1840 __ daddu(v0, v0, a1);
1843 __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag); 1841 __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1844 1842
1845 // And "return" to the OSR entry point of the function. 1843 // And "return" to the OSR entry point of the function.
1846 __ Ret(); 1844 __ Ret();
1847 } 1845 }
1848 1846
1849 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 1847 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1850 Generate_OnStackReplacementHelper(masm, false); 1848 Generate_OnStackReplacementHelper(masm, false);
1851 } 1849 }
1852 1850
1853 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { 1851 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1854 Generate_OnStackReplacementHelper(masm, true); 1852 Generate_OnStackReplacementHelper(masm, true);
1855 } 1853 }
1856 1854
1857 // static 1855 // static
1858 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { 1856 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1859 // ----------- S t a t e ------------- 1857 // ----------- S t a t e -------------
1860 // -- a0 : argc 1858 // -- a0 : argc
1861 // -- sp[0] : argArray 1859 // -- sp[0] : argArray
1862 // -- sp[4] : thisArg 1860 // -- sp[4] : thisArg
1863 // -- sp[8] : receiver 1861 // -- sp[8] : receiver
1864 // ----------------------------------- 1862 // -----------------------------------
1865 1863
1864 Register argc = a0;
1865 Register arg_array = a0;
1866 Register receiver = a1;
1867 Register this_arg = a2;
1868 Register undefined_value = a3;
1869 Register scratch = a4;
1870
1871 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1866 // 1. Load receiver into a1, argArray into a0 (if present), remove all 1872 // 1. Load receiver into a1, argArray into a0 (if present), remove all
1867 // arguments from the stack (including the receiver), and push thisArg (if 1873 // arguments from the stack (including the receiver), and push thisArg (if
1868 // present) instead. 1874 // present) instead.
1869 { 1875 {
1870 Label no_arg; 1876 // Claim (2 - argc) dummy arguments form the stack, to put the stack in a
1871 Register scratch = a4; 1877 // consistent state for a simple pop operation.
1872 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 1878
1873 __ mov(a3, a2); 1879 __ Dsubu(sp, sp, Operand(2 * kPointerSize));
1874 // Dlsa() cannot be used hare as scratch value used later. 1880 __ Dlsa(sp, sp, argc, kPointerSizeLog2);
1875 __ dsll(scratch, a0, kPointerSizeLog2); 1881 __ mov(scratch, argc);
1876 __ Daddu(a0, sp, Operand(scratch)); 1882 __ Pop(this_arg, arg_array); // Overwrite argc
1877 __ ld(a1, MemOperand(a0)); // receiver 1883 __ Movz(arg_array, undefined_value, scratch); // if argc == 0
1878 __ Dsubu(a0, a0, Operand(kPointerSize)); 1884 __ Movz(this_arg, undefined_value, scratch); // if argc == 0
1879 __ Branch(&no_arg, lt, a0, Operand(sp)); 1885 __ Dsubu(scratch, scratch, Operand(1));
1880 __ ld(a2, MemOperand(a0)); // thisArg 1886 __ Movz(arg_array, undefined_value, scratch); // if argc == 1
1881 __ Dsubu(a0, a0, Operand(kPointerSize)); 1887 __ ld(receiver, MemOperand(sp));
1882 __ Branch(&no_arg, lt, a0, Operand(sp)); 1888 __ sd(this_arg, MemOperand(sp));
1883 __ ld(a3, MemOperand(a0)); // argArray
1884 __ bind(&no_arg);
1885 __ Daddu(sp, sp, Operand(scratch));
1886 __ sd(a2, MemOperand(sp));
1887 __ mov(a0, a3);
1888 } 1889 }
1889 1890
1890 // ----------- S t a t e ------------- 1891 // ----------- S t a t e -------------
1891 // -- a0 : argArray 1892 // -- a0 : argArray
1892 // -- a1 : receiver 1893 // -- a1 : receiver
1894 // -- a3 : undefined root value
1893 // -- sp[0] : thisArg 1895 // -- sp[0] : thisArg
1894 // ----------------------------------- 1896 // -----------------------------------
1895 1897
1896 // 2. Make sure the receiver is actually callable. 1898 // 2. Make sure the receiver is actually callable.
1897 Label receiver_not_callable; 1899 Label receiver_not_callable;
1898 __ JumpIfSmi(a1, &receiver_not_callable); 1900 __ JumpIfSmi(receiver, &receiver_not_callable);
1899 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset)); 1901 __ ld(a4, FieldMemOperand(receiver, HeapObject::kMapOffset));
1900 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); 1902 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1901 __ And(a4, a4, Operand(1 << Map::kIsCallable)); 1903 __ And(a4, a4, Operand(1 << Map::kIsCallable));
1902 __ Branch(&receiver_not_callable, eq, a4, Operand(zero_reg)); 1904 __ Branch(&receiver_not_callable, eq, a4, Operand(zero_reg));
1903 1905
1904 // 3. Tail call with no arguments if argArray is null or undefined. 1906 // 3. Tail call with no arguments if argArray is null or undefined.
1905 Label no_arguments; 1907 Label no_arguments;
1906 __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments); 1908 __ JumpIfRoot(arg_array, Heap::kNullValueRootIndex, &no_arguments);
1907 __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments); 1909 __ Branch(&no_arguments, eq, arg_array, Operand(undefined_value));
1908 1910
1909 // 4a. Apply the receiver to the given argArray (passing undefined for 1911 // 4a. Apply the receiver to the given argArray (passing undefined for
1910 // new.target). 1912 // new.target).
1911 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); 1913 DCHECK(undefined_value.is(a3));
1912 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 1914 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1913 1915
1914 // 4b. The argArray is either null or undefined, so we tail call without any 1916 // 4b. The argArray is either null or undefined, so we tail call without any
1915 // arguments to the receiver. 1917 // arguments to the receiver.
1916 __ bind(&no_arguments); 1918 __ bind(&no_arguments);
1917 { 1919 {
1918 __ mov(a0, zero_reg); 1920 __ mov(a0, zero_reg);
1921 DCHECK(receiver.is(a1));
1919 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1922 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1920 } 1923 }
1921 1924
1922 // 4c. The receiver is not callable, throw an appropriate TypeError. 1925 // 4c. The receiver is not callable, throw an appropriate TypeError.
1923 __ bind(&receiver_not_callable); 1926 __ bind(&receiver_not_callable);
1924 { 1927 {
1925 __ sd(a1, MemOperand(sp)); 1928 __ sd(receiver, MemOperand(sp));
1926 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); 1929 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1927 } 1930 }
1928 } 1931 }
1929 1932
1930 // static 1933 // static
1931 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { 1934 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1932 // 1. Make sure we have at least one argument. 1935 // 1. Make sure we have at least one argument.
1933 // a0: actual number of arguments 1936 // a0: actual number of arguments
1934 { 1937 {
1935 Label done; 1938 Label done;
(...skipping 29 matching lines...) Expand all
1965 __ Pop(); 1968 __ Pop();
1966 } 1969 }
1967 1970
1968 // 4. Call the callable. 1971 // 4. Call the callable.
1969 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1972 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1970 } 1973 }
1971 1974
1972 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { 1975 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1973 // ----------- S t a t e ------------- 1976 // ----------- S t a t e -------------
1974 // -- a0 : argc 1977 // -- a0 : argc
1975 // -- sp[0] : argumentsList 1978 // -- sp[0] : argumentsList (if argc ==3)
1976 // -- sp[4] : thisArgument 1979 // -- sp[4] : thisArgument (if argc >=2)
1977 // -- sp[8] : target 1980 // -- sp[8] : target (if argc >=1)
1978 // -- sp[12] : receiver 1981 // -- sp[12] : receiver
1979 // ----------------------------------- 1982 // -----------------------------------
1980 1983
1984 Register argc = a0;
1985 Register arguments_list = a0;
1986 Register target = a1;
1987 Register this_argument = a2;
1988 Register undefined_value = a3;
1989 Register scratch = a4;
1990
1991 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1981 // 1. Load target into a1 (if present), argumentsList into a0 (if present), 1992 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1982 // remove all arguments from the stack (including the receiver), and push 1993 // remove all arguments from the stack (including the receiver), and push
1983 // thisArgument (if present) instead. 1994 // thisArgument (if present) instead.
1984 { 1995 {
1985 Label no_arg; 1996 // Claim (3 - argc) dummy arguments form the stack, to put the stack in a
1986 Register scratch = a4; 1997 // consistent state for a simple pop operation.
1987 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); 1998
1988 __ mov(a2, a1); 1999 __ Dsubu(sp, sp, Operand(3 * kPointerSize));
1989 __ mov(a3, a1); 2000 __ Dlsa(sp, sp, argc, kPointerSizeLog2);
1990 __ dsll(scratch, a0, kPointerSizeLog2); 2001 __ mov(scratch, argc);
1991 __ mov(a0, scratch); 2002 __ Pop(target, this_argument, arguments_list);
1992 __ Dsubu(a0, a0, Operand(kPointerSize)); 2003 __ Movz(arguments_list, undefined_value, scratch); // if argc == 0
1993 __ Branch(&no_arg, lt, a0, Operand(zero_reg)); 2004 __ Movz(this_argument, undefined_value, scratch); // if argc == 0
1994 __ Daddu(a0, sp, Operand(a0)); 2005 __ Movz(target, undefined_value, scratch); // if argc == 0
1995 __ ld(a1, MemOperand(a0)); // target 2006 __ Dsubu(scratch, scratch, Operand(1));
1996 __ Dsubu(a0, a0, Operand(kPointerSize)); 2007 __ Movz(arguments_list, undefined_value, scratch); // if argc == 1
1997 __ Branch(&no_arg, lt, a0, Operand(sp)); 2008 __ Movz(this_argument, undefined_value, scratch); // if argc == 1
1998 __ ld(a2, MemOperand(a0)); // thisArgument 2009 __ Dsubu(scratch, scratch, Operand(1));
1999 __ Dsubu(a0, a0, Operand(kPointerSize)); 2010 __ Movz(arguments_list, undefined_value, scratch); // if argc == 2
2000 __ Branch(&no_arg, lt, a0, Operand(sp)); 2011
2001 __ ld(a3, MemOperand(a0)); // argumentsList 2012 __ sd(this_argument, MemOperand(sp, 0)); // Overwrite receiver
2002 __ bind(&no_arg);
2003 __ Daddu(sp, sp, Operand(scratch));
2004 __ sd(a2, MemOperand(sp));
2005 __ mov(a0, a3);
2006 } 2013 }
2007 2014
2008 // ----------- S t a t e ------------- 2015 // ----------- S t a t e -------------
2009 // -- a0 : argumentsList 2016 // -- a0 : argumentsList
2010 // -- a1 : target 2017 // -- a1 : target
2018 // -- a3 : undefined root value
2011 // -- sp[0] : thisArgument 2019 // -- sp[0] : thisArgument
2012 // ----------------------------------- 2020 // -----------------------------------
2013 2021
2014 // 2. Make sure the target is actually callable. 2022 // 2. Make sure the target is actually callable.
2015 Label target_not_callable; 2023 Label target_not_callable;
2016 __ JumpIfSmi(a1, &target_not_callable); 2024 __ JumpIfSmi(target, &target_not_callable);
2017 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset)); 2025 __ ld(a4, FieldMemOperand(target, HeapObject::kMapOffset));
2018 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); 2026 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
2019 __ And(a4, a4, Operand(1 << Map::kIsCallable)); 2027 __ And(a4, a4, Operand(1 << Map::kIsCallable));
2020 __ Branch(&target_not_callable, eq, a4, Operand(zero_reg)); 2028 __ Branch(&target_not_callable, eq, a4, Operand(zero_reg));
2021 2029
2022 // 3a. Apply the target to the given argumentsList (passing undefined for 2030 // 3a. Apply the target to the given argumentsList (passing undefined for
2023 // new.target). 2031 // new.target).
2024 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); 2032 DCHECK(undefined_value.is(a3));
2025 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 2033 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2026 2034
2027 // 3b. The target is not callable, throw an appropriate TypeError. 2035 // 3b. The target is not callable, throw an appropriate TypeError.
2028 __ bind(&target_not_callable); 2036 __ bind(&target_not_callable);
2029 { 2037 {
2030 __ sd(a1, MemOperand(sp)); 2038 __ sd(target, MemOperand(sp));
2031 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); 2039 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2032 } 2040 }
2033 } 2041 }
2034 2042
2035 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { 2043 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2036 // ----------- S t a t e ------------- 2044 // ----------- S t a t e -------------
2037 // -- a0 : argc 2045 // -- a0 : argc
2038 // -- sp[0] : new.target (optional) 2046 // -- sp[0] : new.target (optional) (dummy value if argc <= 2)
2039 // -- sp[4] : argumentsList 2047 // -- sp[4] : argumentsList (dummy value if argc <= 1)
2040 // -- sp[8] : target 2048 // -- sp[8] : target (dummy value if argc == 0)
2041 // -- sp[12] : receiver 2049 // -- sp[12] : receiver
2042 // ----------------------------------- 2050 // -----------------------------------
2051 Register argc = a0;
2052 Register arguments_list = a0;
2053 Register target = a1;
2054 Register new_target = a3;
2055 Register undefined_value = a4;
2056 Register scratch = a5;
2043 2057
2044 // 1. Load target into a1 (if present), argumentsList into a0 (if present), 2058 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
2045 // new.target into a3 (if present, otherwise use target), remove all 2059 // new.target into a3 (if present, otherwise use target), remove all
2046 // arguments from the stack (including the receiver), and push thisArgument 2060 // arguments from the stack (including the receiver), and push thisArgument
2047 // (if present) instead. 2061 // (if present) instead.
2048 { 2062 {
2049 Label no_arg; 2063 // Claim (3 - argc) dummy arguments form the stack, to put the stack in a
2050 Register scratch = a4; 2064 // consistent state for a simple pop operation.
2051 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); 2065
2052 __ mov(a2, a1); 2066 __ Dsubu(sp, sp, Operand(3 * kPointerSize));
2053 // Dlsa() cannot be used hare as scratch value used later. 2067 __ Dlsa(sp, sp, argc, kPointerSizeLog2);
2054 __ dsll(scratch, a0, kPointerSizeLog2); 2068 __ mov(scratch, argc);
2055 __ Daddu(a0, sp, Operand(scratch)); 2069 __ Pop(target, arguments_list, new_target);
2056 __ sd(a2, MemOperand(a0)); // receiver 2070 __ Movz(arguments_list, undefined_value, scratch); // if argc == 0
2057 __ Dsubu(a0, a0, Operand(kPointerSize)); 2071 __ Movz(new_target, undefined_value, scratch); // if argc == 0
2058 __ Branch(&no_arg, lt, a0, Operand(sp)); 2072 __ Movz(target, undefined_value, scratch); // if argc == 0
2059 __ ld(a1, MemOperand(a0)); // target 2073 __ Dsubu(scratch, scratch, Operand(1));
2060 __ mov(a3, a1); // new.target defaults to target 2074 __ Movz(arguments_list, undefined_value, scratch); // if argc == 1
2061 __ Dsubu(a0, a0, Operand(kPointerSize)); 2075 __ Movz(new_target, target, scratch); // if argc == 1
2062 __ Branch(&no_arg, lt, a0, Operand(sp)); 2076 __ Dsubu(scratch, scratch, Operand(1));
2063 __ ld(a2, MemOperand(a0)); // argumentsList 2077 __ Movz(new_target, target, scratch); // if argc == 2
2064 __ Dsubu(a0, a0, Operand(kPointerSize)); 2078
2065 __ Branch(&no_arg, lt, a0, Operand(sp)); 2079 __ sd(undefined_value, MemOperand(sp, 0)); // Overwrite receiver
2066 __ ld(a3, MemOperand(a0)); // new.target
2067 __ bind(&no_arg);
2068 __ Daddu(sp, sp, Operand(scratch));
2069 __ mov(a0, a2);
2070 } 2080 }
2071 2081
2072 // ----------- S t a t e ------------- 2082 // ----------- S t a t e -------------
2073 // -- a0 : argumentsList 2083 // -- a0 : argumentsList
2084 // -- a1 : target
2074 // -- a3 : new.target 2085 // -- a3 : new.target
2075 // -- a1 : target
2076 // -- sp[0] : receiver (undefined) 2086 // -- sp[0] : receiver (undefined)
2077 // ----------------------------------- 2087 // -----------------------------------
2078 2088
2079 // 2. Make sure the target is actually a constructor. 2089 // 2. Make sure the target is actually a constructor.
2080 Label target_not_constructor; 2090 Label target_not_constructor;
2081 __ JumpIfSmi(a1, &target_not_constructor); 2091 __ JumpIfSmi(target, &target_not_constructor);
2082 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset)); 2092 __ ld(a4, FieldMemOperand(target, HeapObject::kMapOffset));
2083 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); 2093 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
2084 __ And(a4, a4, Operand(1 << Map::kIsConstructor)); 2094 __ And(a4, a4, Operand(1 << Map::kIsConstructor));
2085 __ Branch(&target_not_constructor, eq, a4, Operand(zero_reg)); 2095 __ Branch(&target_not_constructor, eq, a4, Operand(zero_reg));
2086 2096
2087 // 3. Make sure the target is actually a constructor. 2097 // 3. Make sure the target is actually a constructor.
2088 Label new_target_not_constructor; 2098 Label new_target_not_constructor;
2089 __ JumpIfSmi(a3, &new_target_not_constructor); 2099 __ JumpIfSmi(new_target, &new_target_not_constructor);
2090 __ ld(a4, FieldMemOperand(a3, HeapObject::kMapOffset)); 2100 __ ld(a4, FieldMemOperand(new_target, HeapObject::kMapOffset));
2091 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); 2101 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
2092 __ And(a4, a4, Operand(1 << Map::kIsConstructor)); 2102 __ And(a4, a4, Operand(1 << Map::kIsConstructor));
2093 __ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg)); 2103 __ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg));
2094 2104
2095 // 4a. Construct the target with the given new.target and argumentsList. 2105 // 4a. Construct the target with the given new.target and argumentsList.
2096 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 2106 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2097 2107
2098 // 4b. The target is not a constructor, throw an appropriate TypeError. 2108 // 4b. The target is not a constructor, throw an appropriate TypeError.
2099 __ bind(&target_not_constructor); 2109 __ bind(&target_not_constructor);
2100 { 2110 {
2101 __ sd(a1, MemOperand(sp)); 2111 __ sd(target, MemOperand(sp));
2102 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); 2112 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2103 } 2113 }
2104 2114
2105 // 4c. The new.target is not a constructor, throw an appropriate TypeError. 2115 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2106 __ bind(&new_target_not_constructor); 2116 __ bind(&new_target_not_constructor);
2107 { 2117 {
2108 __ sd(a3, MemOperand(sp)); 2118 __ sd(new_target, MemOperand(sp));
2109 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); 2119 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2110 } 2120 }
2111 } 2121 }
2112 2122
2113 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 2123 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2114 // __ sll(a0, a0, kSmiTagSize); 2124 // __ sll(a0, a0, kSmiTagSize);
2115 __ dsll32(a0, a0, 0); 2125 __ dsll32(a0, a0, 0);
2116 __ li(a4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2126 __ li(a4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2117 __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit()); 2127 __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit());
2118 __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + 2128 __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
(...skipping 18 matching lines...) Expand all
2137 2147
2138 // static 2148 // static
2139 void Builtins::Generate_Apply(MacroAssembler* masm) { 2149 void Builtins::Generate_Apply(MacroAssembler* masm) {
2140 // ----------- S t a t e ------------- 2150 // ----------- S t a t e -------------
2141 // -- a0 : argumentsList 2151 // -- a0 : argumentsList
2142 // -- a1 : target 2152 // -- a1 : target
2143 // -- a3 : new.target (checked to be constructor or undefined) 2153 // -- a3 : new.target (checked to be constructor or undefined)
2144 // -- sp[0] : thisArgument 2154 // -- sp[0] : thisArgument
2145 // ----------------------------------- 2155 // -----------------------------------
2146 2156
2157 Register arguments_list = a0;
2158 Register target = a1;
2159 Register new_target = a3;
2160
2161 Register args = a0;
2162 Register len = a2;
2163
2147 // Create the list of arguments from the array-like argumentsList. 2164 // Create the list of arguments from the array-like argumentsList.
2148 { 2165 {
2149 Label create_arguments, create_array, create_runtime, done_create; 2166 Label create_arguments, create_array, create_runtime, done_create;
2150 __ JumpIfSmi(a0, &create_runtime); 2167 __ JumpIfSmi(arguments_list, &create_runtime);
2151 2168
2152 // Load the map of argumentsList into a2. 2169 // Load the map of argumentsList into a2.
2153 __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); 2170 Register arguments_list_map = a2;
2171 __ ld(arguments_list_map,
2172 FieldMemOperand(arguments_list, HeapObject::kMapOffset));
2154 2173
2155 // Load native context into a4. 2174 // Load native context into a4.
2156 __ ld(a4, NativeContextMemOperand()); 2175 Register native_context = a4;
2176 __ ld(native_context, NativeContextMemOperand());
2157 2177
2158 // Check if argumentsList is an (unmodified) arguments object. 2178 // Check if argumentsList is an (unmodified) arguments object.
2159 __ ld(at, ContextMemOperand(a4, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); 2179 __ ld(at, ContextMemOperand(native_context,
2160 __ Branch(&create_arguments, eq, a2, Operand(at)); 2180 Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2161 __ ld(at, ContextMemOperand(a4, Context::STRICT_ARGUMENTS_MAP_INDEX)); 2181 __ Branch(&create_arguments, eq, arguments_list_map, Operand(at));
2162 __ Branch(&create_arguments, eq, a2, Operand(at)); 2182 __ ld(at, ContextMemOperand(native_context,
2183 Context::STRICT_ARGUMENTS_MAP_INDEX));
2184 __ Branch(&create_arguments, eq, arguments_list_map, Operand(at));
2163 2185
2164 // Check if argumentsList is a fast JSArray. 2186 // Check if argumentsList is a fast JSArray.
2165 __ ld(v0, FieldMemOperand(a2, HeapObject::kMapOffset)); 2187 __ ld(v0, FieldMemOperand(a2, HeapObject::kMapOffset));
2166 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset)); 2188 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset));
2167 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE)); 2189 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
2168 2190
2169 // Ask the runtime to create the list (actually a FixedArray). 2191 // Ask the runtime to create the list (actually a FixedArray).
2170 __ bind(&create_runtime); 2192 __ bind(&create_runtime);
2171 { 2193 {
2172 FrameScope scope(masm, StackFrame::INTERNAL); 2194 FrameScope scope(masm, StackFrame::INTERNAL);
2173 __ Push(a1, a3, a0); 2195 __ Push(target, new_target, arguments_list);
2174 __ CallRuntime(Runtime::kCreateListFromArrayLike); 2196 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2175 __ mov(a0, v0); 2197 __ mov(arguments_list, v0);
2176 __ Pop(a1, a3); 2198 __ Pop(target, new_target);
2177 __ ld(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); 2199 __ lw(len, UntagSmiFieldMemOperand(v0, FixedArray::kLengthOffset));
2178 __ SmiUntag(a2);
2179 } 2200 }
2180 __ Branch(&done_create); 2201 __ Branch(&done_create);
2181 2202
2182 // Try to create the list from an arguments object. 2203 // Try to create the list from an arguments object.
2183 __ bind(&create_arguments); 2204 __ bind(&create_arguments);
2184 __ ld(a2, FieldMemOperand(a0, JSArgumentsObject::kLengthOffset)); 2205 __ lw(len, UntagSmiFieldMemOperand(arguments_list,
2185 __ ld(a4, FieldMemOperand(a0, JSObject::kElementsOffset)); 2206 JSArgumentsObject::kLengthOffset));
2186 __ ld(at, FieldMemOperand(a4, FixedArray::kLengthOffset)); 2207 __ ld(a4, FieldMemOperand(arguments_list, JSObject::kElementsOffset));
2187 __ Branch(&create_runtime, ne, a2, Operand(at)); 2208 __ lw(at, UntagSmiFieldMemOperand(a4, FixedArray::kLengthOffset));
2188 __ SmiUntag(a2); 2209 __ Branch(&create_runtime, ne, len, Operand(at));
2189 __ mov(a0, a4); 2210 __ mov(args, a4);
2211
2190 __ Branch(&done_create); 2212 __ Branch(&done_create);
2191 2213
2192 // Try to create the list from a JSArray object. 2214 // Try to create the list from a JSArray object.
2193 __ bind(&create_array); 2215 __ bind(&create_array);
2194 __ ld(a2, FieldMemOperand(a2, Map::kBitField2Offset)); 2216 __ ld(a2, FieldMemOperand(a2, Map::kBitField2Offset));
2195 __ DecodeField<Map::ElementsKindBits>(a2); 2217 __ DecodeField<Map::ElementsKindBits>(a2);
2196 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); 2218 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2197 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2198 STATIC_ASSERT(FAST_ELEMENTS == 2); 2219 STATIC_ASSERT(FAST_ELEMENTS == 2);
2199 __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS)); 2220 __ andi(a2, a2, uint16_t(~FAST_ELEMENTS)); // works if enum ElementsKind
2200 __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS)); 2221 // has less than 2^16 elements
2201 __ ld(a2, FieldMemOperand(a0, JSArray::kLengthOffset)); 2222 __ Branch(&create_runtime, ne, a2, Operand(int64_t(0)));
2202 __ ld(a0, FieldMemOperand(a0, JSArray::kElementsOffset)); 2223 __ lw(a2, UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset));
2203 __ SmiUntag(a2); 2224 __ ld(a0, FieldMemOperand(arguments_list, JSArray::kElementsOffset));
2204 2225
2205 __ bind(&done_create); 2226 __ bind(&done_create);
2206 } 2227 }
2207 2228
2208 // Check for stack overflow. 2229 // Check for stack overflow.
2209 { 2230 {
2210 // Check the stack for overflow. We are not trying to catch interruptions 2231 // Check the stack for overflow. We are not trying to catch interruptions
2211 // (i.e. debug break and preemption) here, so check the "real stack limit". 2232 // (i.e. debug break and preemption) here, so check the "real stack limit".
2212 Label done; 2233 Label done;
2213 __ LoadRoot(a4, Heap::kRealStackLimitRootIndex); 2234 __ LoadRoot(a4, Heap::kRealStackLimitRootIndex);
2214 // Make ip the space we have left. The stack might already be overflowed 2235 // Make ip the space we have left. The stack might already be overflowed
2215 // here which will cause ip to become negative. 2236 // here which will cause ip to become negative.
2216 __ Dsubu(a4, sp, a4); 2237 __ Dsubu(a4, sp, a4);
2217 // Check if the arguments will overflow the stack. 2238 // Check if the arguments will overflow the stack.
2218 __ dsll(at, a2, kPointerSizeLog2); 2239 __ dsll(at, len, kPointerSizeLog2);
2219 __ Branch(&done, gt, a4, Operand(at)); // Signed comparison. 2240 __ Branch(&done, gt, a4, Operand(at)); // Signed comparison.
2220 __ TailCallRuntime(Runtime::kThrowStackOverflow); 2241 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2221 __ bind(&done); 2242 __ bind(&done);
2222 } 2243 }
2223 2244
2224 // ----------- S t a t e ------------- 2245 // ----------- S t a t e -------------
2225 // -- a1 : target 2246 // -- a1 : target
2226 // -- a0 : args (a FixedArray built from argumentsList) 2247 // -- a0 : args (a FixedArray built from argumentsList)
2227 // -- a2 : len (number of elements to push from args) 2248 // -- a2 : len (number of elements to push from args)
2228 // -- a3 : new.target (checked to be constructor or undefined) 2249 // -- a3 : new.target (checked to be constructor or undefined)
2229 // -- sp[0] : thisArgument 2250 // -- sp[0] : thisArgument
2230 // ----------------------------------- 2251 // -----------------------------------
2231 2252
2232 // Push arguments onto the stack (thisArgument is already on the stack). 2253 // Push arguments onto the stack (thisArgument is already on the stack).
2233 { 2254 {
2234 __ mov(a4, zero_reg);
2235 Label done, loop; 2255 Label done, loop;
2256 Register src = a4;
2257 Register scratch = len;
2258
2259 __ daddiu(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
2260 __ Branch(&done, eq, len, Operand(zero_reg), i::USE_DELAY_SLOT);
2261 __ mov(a0, len); // The 'len' argument for Call() or Construct().
2262 __ dsll(scratch, len, kPointerSizeLog2);
2263 __ Dsubu(scratch, sp, Operand(scratch));
2236 __ bind(&loop); 2264 __ bind(&loop);
2237 __ Branch(&done, eq, a4, Operand(a2)); 2265 __ ld(a5, MemOperand(src));
2238 __ Dlsa(at, a0, a4, kPointerSizeLog2); 2266 __ daddiu(src, src, kPointerSize);
2239 __ ld(at, FieldMemOperand(at, FixedArray::kHeaderSize)); 2267 __ Push(a5);
2240 __ Push(at); 2268 __ Branch(&loop, ne, scratch, Operand(sp));
2241 __ Daddu(a4, a4, Operand(1));
2242 __ Branch(&loop);
2243 __ bind(&done); 2269 __ bind(&done);
2244 __ Move(a0, a4);
2245 } 2270 }
2246 2271
2272 // ----------- S t a t e -------------
2273 // -- a0 : argument count (len)
2274 // -- a1 : target
2275 // -- a3 : new.target (checked to be constructor or undefinded)
2276 // -- sp[0] : args[len-1]
2277 // -- sp[8] : args[len-2]
2278 // ... : ...
2279 // -- sp[8*(len-2)] : args[1]
2280 // -- sp[8*(len-1)] : args[0]
2281 // ----------------------------------
2282
2247 // Dispatch to Call or Construct depending on whether new.target is undefined. 2283 // Dispatch to Call or Construct depending on whether new.target is undefined.
2248 { 2284 {
2249 Label construct; 2285 Label construct;
2250 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 2286 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2251 __ Branch(&construct, ne, a3, Operand(at)); 2287 __ Branch(&construct, ne, a3, Operand(at));
2252 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 2288 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2253 __ bind(&construct); 2289 __ bind(&construct);
2254 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 2290 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2255 } 2291 }
2256 } 2292 }
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
2314 Register caller_args_count_reg = scratch1; 2350 Register caller_args_count_reg = scratch1;
2315 Label no_arguments_adaptor, formal_parameter_count_loaded; 2351 Label no_arguments_adaptor, formal_parameter_count_loaded;
2316 __ ld(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2352 __ ld(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2317 __ ld(scratch3, 2353 __ ld(scratch3,
2318 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset)); 2354 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2319 __ Branch(&no_arguments_adaptor, ne, scratch3, 2355 __ Branch(&no_arguments_adaptor, ne, scratch3,
2320 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2356 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2321 2357
2322 // Drop current frame and load arguments count from arguments adaptor frame. 2358 // Drop current frame and load arguments count from arguments adaptor frame.
2323 __ mov(fp, scratch2); 2359 __ mov(fp, scratch2);
2324 __ ld(caller_args_count_reg, 2360 __ lw(caller_args_count_reg,
2325 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2361 UntagSmiMemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2326 __ SmiUntag(caller_args_count_reg);
2327 __ Branch(&formal_parameter_count_loaded); 2362 __ Branch(&formal_parameter_count_loaded);
2328 2363
2329 __ bind(&no_arguments_adaptor); 2364 __ bind(&no_arguments_adaptor);
2330 // Load caller's formal parameter count 2365 // Load caller's formal parameter count
2331 __ ld(scratch1, 2366 __ ld(scratch1,
2332 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset)); 2367 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2333 __ ld(scratch1, 2368 __ ld(scratch1,
2334 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset)); 2369 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2335 __ lw(caller_args_count_reg, 2370 __ lw(caller_args_count_reg,
2336 FieldMemOperand(scratch1, 2371 FieldMemOperand(scratch1,
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
2473 2508
2474 // Patch the receiver to [[BoundThis]]. 2509 // Patch the receiver to [[BoundThis]].
2475 { 2510 {
2476 __ ld(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset)); 2511 __ ld(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
2477 __ Dlsa(a4, sp, a0, kPointerSizeLog2); 2512 __ Dlsa(a4, sp, a0, kPointerSizeLog2);
2478 __ sd(at, MemOperand(a4)); 2513 __ sd(at, MemOperand(a4));
2479 } 2514 }
2480 2515
2481 // Load [[BoundArguments]] into a2 and length of that into a4. 2516 // Load [[BoundArguments]] into a2 and length of that into a4.
2482 __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset)); 2517 __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2483 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset)); 2518 __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
2484 __ SmiUntag(a4);
2485 2519
2486 // ----------- S t a t e ------------- 2520 // ----------- S t a t e -------------
2487 // -- a0 : the number of arguments (not including the receiver) 2521 // -- a0 : the number of arguments (not including the receiver)
2488 // -- a1 : the function to call (checked to be a JSBoundFunction) 2522 // -- a1 : the function to call (checked to be a JSBoundFunction)
2489 // -- a2 : the [[BoundArguments]] (implemented as FixedArray) 2523 // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2490 // -- a4 : the number of [[BoundArguments]] 2524 // -- a4 : the number of [[BoundArguments]]
2491 // ----------------------------------- 2525 // -----------------------------------
2492 2526
2493 // Reserve stack space for the [[BoundArguments]]. 2527 // Reserve stack space for the [[BoundArguments]].
2494 { 2528 {
(...skipping 26 matching lines...) Expand all
2521 __ sd(at, MemOperand(a6)); 2555 __ sd(at, MemOperand(a6));
2522 __ Daddu(a4, a4, Operand(1)); 2556 __ Daddu(a4, a4, Operand(1));
2523 __ Daddu(a5, a5, Operand(1)); 2557 __ Daddu(a5, a5, Operand(1));
2524 __ Branch(&loop); 2558 __ Branch(&loop);
2525 __ bind(&done_loop); 2559 __ bind(&done_loop);
2526 } 2560 }
2527 2561
2528 // Copy [[BoundArguments]] to the stack (below the arguments). 2562 // Copy [[BoundArguments]] to the stack (below the arguments).
2529 { 2563 {
2530 Label loop, done_loop; 2564 Label loop, done_loop;
2531 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset)); 2565 __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
2532 __ SmiUntag(a4);
2533 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 2566 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2534 __ bind(&loop); 2567 __ bind(&loop);
2535 __ Dsubu(a4, a4, Operand(1)); 2568 __ Dsubu(a4, a4, Operand(1));
2536 __ Branch(&done_loop, lt, a4, Operand(zero_reg)); 2569 __ Branch(&done_loop, lt, a4, Operand(zero_reg));
2537 __ Dlsa(a5, a2, a4, kPointerSizeLog2); 2570 __ Dlsa(a5, a2, a4, kPointerSizeLog2);
2538 __ ld(at, MemOperand(a5)); 2571 __ ld(at, MemOperand(a5));
2539 __ Dlsa(a5, sp, a0, kPointerSizeLog2); 2572 __ Dlsa(a5, sp, a0, kPointerSizeLog2);
2540 __ sd(at, MemOperand(a5)); 2573 __ sd(at, MemOperand(a5));
2541 __ Daddu(a0, a0, Operand(1)); 2574 __ Daddu(a0, a0, Operand(1));
2542 __ Branch(&loop); 2575 __ Branch(&loop);
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
2635 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { 2668 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2636 // ----------- S t a t e ------------- 2669 // ----------- S t a t e -------------
2637 // -- a0 : the number of arguments (not including the receiver) 2670 // -- a0 : the number of arguments (not including the receiver)
2638 // -- a1 : the function to call (checked to be a JSBoundFunction) 2671 // -- a1 : the function to call (checked to be a JSBoundFunction)
2639 // -- a3 : the new target (checked to be a constructor) 2672 // -- a3 : the new target (checked to be a constructor)
2640 // ----------------------------------- 2673 // -----------------------------------
2641 __ AssertBoundFunction(a1); 2674 __ AssertBoundFunction(a1);
2642 2675
2643 // Load [[BoundArguments]] into a2 and length of that into a4. 2676 // Load [[BoundArguments]] into a2 and length of that into a4.
2644 __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset)); 2677 __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2645 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset)); 2678 __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
2646 __ SmiUntag(a4);
2647 2679
2648 // ----------- S t a t e ------------- 2680 // ----------- S t a t e -------------
2649 // -- a0 : the number of arguments (not including the receiver) 2681 // -- a0 : the number of arguments (not including the receiver)
2650 // -- a1 : the function to call (checked to be a JSBoundFunction) 2682 // -- a1 : the function to call (checked to be a JSBoundFunction)
2651 // -- a2 : the [[BoundArguments]] (implemented as FixedArray) 2683 // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2652 // -- a3 : the new target (checked to be a constructor) 2684 // -- a3 : the new target (checked to be a constructor)
2653 // -- a4 : the number of [[BoundArguments]] 2685 // -- a4 : the number of [[BoundArguments]]
2654 // ----------------------------------- 2686 // -----------------------------------
2655 2687
2656 // Reserve stack space for the [[BoundArguments]]. 2688 // Reserve stack space for the [[BoundArguments]].
(...skipping 27 matching lines...) Expand all
2684 __ sd(at, MemOperand(a6)); 2716 __ sd(at, MemOperand(a6));
2685 __ Daddu(a4, a4, Operand(1)); 2717 __ Daddu(a4, a4, Operand(1));
2686 __ Daddu(a5, a5, Operand(1)); 2718 __ Daddu(a5, a5, Operand(1));
2687 __ Branch(&loop); 2719 __ Branch(&loop);
2688 __ bind(&done_loop); 2720 __ bind(&done_loop);
2689 } 2721 }
2690 2722
2691 // Copy [[BoundArguments]] to the stack (below the arguments). 2723 // Copy [[BoundArguments]] to the stack (below the arguments).
2692 { 2724 {
2693 Label loop, done_loop; 2725 Label loop, done_loop;
2694 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset)); 2726 __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
2695 __ SmiUntag(a4);
2696 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 2727 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2697 __ bind(&loop); 2728 __ bind(&loop);
2698 __ Dsubu(a4, a4, Operand(1)); 2729 __ Dsubu(a4, a4, Operand(1));
2699 __ Branch(&done_loop, lt, a4, Operand(zero_reg)); 2730 __ Branch(&done_loop, lt, a4, Operand(zero_reg));
2700 __ Dlsa(a5, a2, a4, kPointerSizeLog2); 2731 __ Dlsa(a5, a2, a4, kPointerSizeLog2);
2701 __ ld(at, MemOperand(a5)); 2732 __ ld(at, MemOperand(a5));
2702 __ Dlsa(a5, sp, a0, kPointerSizeLog2); 2733 __ Dlsa(a5, sp, a0, kPointerSizeLog2);
2703 __ sd(at, MemOperand(a5)); 2734 __ sd(at, MemOperand(a5));
2704 __ Daddu(a0, a0, Operand(1)); 2735 __ Daddu(a0, a0, Operand(1));
2705 __ Branch(&loop); 2736 __ Branch(&loop);
(...skipping 252 matching lines...) Expand 10 before | Expand all | Expand 10 after
2958 __ break_(0xCC); 2989 __ break_(0xCC);
2959 } 2990 }
2960 } 2991 }
2961 2992
2962 #undef __ 2993 #undef __
2963 2994
2964 } // namespace internal 2995 } // namespace internal
2965 } // namespace v8 2996 } // namespace v8
2966 2997
2967 #endif // V8_TARGET_ARCH_MIPS64 2998 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698