| Index: src/mips64/builtins-mips64.cc
|
| diff --git a/src/mips64/builtins-mips64.cc b/src/mips64/builtins-mips64.cc
|
| index fecf3f7334fd70e7628ed14879df4e3d1619586d..47da867360cf9ab88c6b838a00cfe5ddc887bb34 100644
|
| --- a/src/mips64/builtins-mips64.cc
|
| +++ b/src/mips64/builtins-mips64.cc
|
| @@ -156,8 +156,7 @@ void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
|
| {
|
| __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
|
| __ Dsubu(a0, a0, Operand(1));
|
| - __ dsll(a0, a0, kPointerSizeLog2);
|
| - __ Daddu(sp, a0, sp);
|
| + __ Dlsa(sp, sp, a0, kPointerSizeLog2);
|
| __ ld(a0, MemOperand(sp));
|
| __ Drop(2);
|
| }
|
| @@ -192,8 +191,7 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
|
| Label no_arguments, done;
|
| __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
|
| __ Dsubu(a0, a0, Operand(1));
|
| - __ dsll(a0, a0, kPointerSizeLog2);
|
| - __ Daddu(sp, a0, sp);
|
| + __ Dlsa(sp, sp, a0, kPointerSizeLog2);
|
| __ ld(a0, MemOperand(sp));
|
| __ Drop(2);
|
| __ jmp(&done);
|
| @@ -257,8 +255,7 @@ void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
|
| {
|
| __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
|
| __ Dsubu(a0, a0, Operand(1));
|
| - __ dsll(a0, a0, kPointerSizeLog2);
|
| - __ Daddu(sp, a0, sp);
|
| + __ Dlsa(sp, sp, a0, kPointerSizeLog2);
|
| __ ld(a0, MemOperand(sp));
|
| __ Drop(2);
|
| }
|
| @@ -319,8 +316,7 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
|
| Label no_arguments, done;
|
| __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
|
| __ Dsubu(a0, a0, Operand(1));
|
| - __ dsll(a0, a0, kPointerSizeLog2);
|
| - __ Daddu(sp, a0, sp);
|
| + __ Dlsa(sp, sp, a0, kPointerSizeLog2);
|
| __ ld(a0, MemOperand(sp));
|
| __ Drop(2);
|
| __ jmp(&done);
|
| @@ -611,8 +607,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
| __ mov(t0, a0);
|
| __ jmp(&entry);
|
| __ bind(&loop);
|
| - __ dsll(a4, t0, kPointerSizeLog2);
|
| - __ Daddu(a4, a2, Operand(a4));
|
| + __ Dlsa(a4, a2, t0, kPointerSizeLog2);
|
| __ ld(a5, MemOperand(a4));
|
| __ push(a5);
|
| __ bind(&entry);
|
| @@ -798,8 +793,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
| // a3: argc
|
| // s0: argv, i.e. points to first arg
|
| Label loop, entry;
|
| - __ dsll(a4, a3, kPointerSizeLog2);
|
| - __ daddu(a6, s0, a4);
|
| + __ Dlsa(a6, s0, a3, kPointerSizeLog2);
|
| __ b(&entry);
|
| __ nop(); // Branch delay slot nop.
|
| // a6 points past last arg.
|
| @@ -955,8 +949,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
|
| __ Daddu(a0, kInterpreterBytecodeArrayRegister,
|
| kInterpreterBytecodeOffsetRegister);
|
| __ lbu(a0, MemOperand(a0));
|
| - __ dsll(at, a0, kPointerSizeLog2);
|
| - __ Daddu(at, kInterpreterDispatchTableRegister, at);
|
| + __ Dlsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2);
|
| __ ld(at, MemOperand(at));
|
| // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
|
| // and header removal.
|
| @@ -1109,8 +1102,7 @@ static void Generate_InterpreterNotifyDeoptimizedHelper(
|
| __ Daddu(a1, kInterpreterBytecodeArrayRegister,
|
| kInterpreterBytecodeOffsetRegister);
|
| __ lbu(a1, MemOperand(a1));
|
| - __ dsll(a1, a1, kPointerSizeLog2);
|
| - __ Daddu(a1, kInterpreterDispatchTableRegister, a1);
|
| + __ Dlsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2);
|
| __ ld(a1, MemOperand(a1));
|
| __ Daddu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag));
|
| __ Jump(a1);
|
| @@ -1397,8 +1389,7 @@ void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
|
|
|
| // Do the compatible receiver check
|
| Label receiver_check_failed;
|
| - __ sll(at, a0, kPointerSizeLog2);
|
| - __ Daddu(t8, sp, at);
|
| + __ Dlsa(t8, sp, a0, kPointerSizeLog2);
|
| __ ld(t0, MemOperand(t8));
|
| CompatibleReceiverCheck(masm, t0, t1, &receiver_check_failed);
|
|
|
| @@ -1532,6 +1523,7 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
| Register scratch = a4;
|
| __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
|
| __ mov(a3, a2);
|
| + // Dlsa() cannot be used hare as scratch value used later.
|
| __ dsll(scratch, a0, kPointerSizeLog2);
|
| __ Daddu(a0, sp, Operand(scratch));
|
| __ ld(a1, MemOperand(a0)); // receiver
|
| @@ -1602,8 +1594,7 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
|
|
|
| // 2. Get the function to call (passed as receiver) from the stack.
|
| // a0: actual number of arguments
|
| - __ dsll(at, a0, kPointerSizeLog2);
|
| - __ daddu(at, sp, at);
|
| + __ Dlsa(at, sp, a0, kPointerSizeLog2);
|
| __ ld(a1, MemOperand(at));
|
|
|
| // 3. Shift arguments and return address one slot down on the stack
|
| @@ -1614,8 +1605,7 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
|
| {
|
| Label loop;
|
| // Calculate the copy start address (destination). Copy end address is sp.
|
| - __ dsll(at, a0, kPointerSizeLog2);
|
| - __ daddu(a2, sp, at);
|
| + __ Dlsa(a2, sp, a0, kPointerSizeLog2);
|
|
|
| __ bind(&loop);
|
| __ ld(at, MemOperand(a2, -kPointerSize));
|
| @@ -1715,6 +1705,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
|
| Register scratch = a4;
|
| __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
|
| __ mov(a2, a1);
|
| + // Dlsa() cannot be used hare as scratch value used later.
|
| __ dsll(scratch, a0, kPointerSizeLog2);
|
| __ Daddu(a0, sp, Operand(scratch));
|
| __ sd(a2, MemOperand(a0)); // receiver
|
| @@ -1926,8 +1917,7 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
|
| Label done, loop;
|
| __ bind(&loop);
|
| __ Branch(&done, eq, a4, Operand(a2));
|
| - __ dsll(at, a4, kPointerSizeLog2);
|
| - __ Daddu(at, a0, at);
|
| + __ Dlsa(at, a0, a4, kPointerSizeLog2);
|
| __ ld(at, FieldMemOperand(at, FixedArray::kHeaderSize));
|
| __ Push(at);
|
| __ Daddu(a4, a4, Operand(1));
|
| @@ -1990,8 +1980,7 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
|
| __ LoadGlobalProxy(a3);
|
| } else {
|
| Label convert_to_object, convert_receiver;
|
| - __ dsll(at, a0, kPointerSizeLog2);
|
| - __ daddu(at, sp, at);
|
| + __ Dlsa(at, sp, a0, kPointerSizeLog2);
|
| __ ld(a3, MemOperand(at));
|
| __ JumpIfSmi(a3, &convert_to_object);
|
| STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
|
| @@ -2027,8 +2016,7 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
|
| __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
|
| __ bind(&convert_receiver);
|
| }
|
| - __ dsll(at, a0, kPointerSizeLog2);
|
| - __ daddu(at, sp, at);
|
| + __ Dlsa(at, sp, a0, kPointerSizeLog2);
|
| __ sd(a3, MemOperand(at));
|
| }
|
| __ bind(&done_convert);
|
| @@ -2068,8 +2056,7 @@ void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
|
| // Patch the receiver to [[BoundThis]].
|
| {
|
| __ ld(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
|
| - __ dsll(a4, a0, kPointerSizeLog2);
|
| - __ daddu(a4, a4, sp);
|
| + __ Dlsa(a4, sp, a0, kPointerSizeLog2);
|
| __ sd(at, MemOperand(a4));
|
| }
|
|
|
| @@ -2110,11 +2097,9 @@ void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
|
| __ mov(a5, zero_reg);
|
| __ bind(&loop);
|
| __ Branch(&done_loop, gt, a5, Operand(a0));
|
| - __ dsll(a6, a4, kPointerSizeLog2);
|
| - __ daddu(a6, a6, sp);
|
| + __ Dlsa(a6, sp, a4, kPointerSizeLog2);
|
| __ ld(at, MemOperand(a6));
|
| - __ dsll(a6, a5, kPointerSizeLog2);
|
| - __ daddu(a6, a6, sp);
|
| + __ Dlsa(a6, sp, a5, kPointerSizeLog2);
|
| __ sd(at, MemOperand(a6));
|
| __ Daddu(a4, a4, Operand(1));
|
| __ Daddu(a5, a5, Operand(1));
|
| @@ -2131,11 +2116,9 @@ void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
|
| __ bind(&loop);
|
| __ Dsubu(a4, a4, Operand(1));
|
| __ Branch(&done_loop, lt, a4, Operand(zero_reg));
|
| - __ dsll(a5, a4, kPointerSizeLog2);
|
| - __ daddu(a5, a5, a2);
|
| + __ Dlsa(a5, a2, a4, kPointerSizeLog2);
|
| __ ld(at, MemOperand(a5));
|
| - __ dsll(a5, a0, kPointerSizeLog2);
|
| - __ daddu(a5, a5, sp);
|
| + __ Dlsa(a5, sp, a0, kPointerSizeLog2);
|
| __ sd(at, MemOperand(a5));
|
| __ Daddu(a0, a0, Operand(1));
|
| __ Branch(&loop);
|
| @@ -2186,8 +2169,7 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
|
| __ And(t1, t1, Operand(1 << Map::kIsCallable));
|
| __ Branch(&non_callable, eq, t1, Operand(zero_reg));
|
| // Overwrite the original receiver with the (original) target.
|
| - __ dsll(at, a0, kPointerSizeLog2);
|
| - __ daddu(at, sp, at);
|
| + __ Dlsa(at, sp, a0, kPointerSizeLog2);
|
| __ sd(a1, MemOperand(at));
|
| // Let the "call_as_function_delegate" take care of the rest.
|
| __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
|
| @@ -2273,11 +2255,9 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
|
| __ mov(a5, zero_reg);
|
| __ bind(&loop);
|
| __ Branch(&done_loop, ge, a5, Operand(a0));
|
| - __ dsll(a6, a4, kPointerSizeLog2);
|
| - __ daddu(a6, a6, sp);
|
| + __ Dlsa(a6, sp, a4, kPointerSizeLog2);
|
| __ ld(at, MemOperand(a6));
|
| - __ dsll(a6, a5, kPointerSizeLog2);
|
| - __ daddu(a6, a6, sp);
|
| + __ Dlsa(a6, sp, a5, kPointerSizeLog2);
|
| __ sd(at, MemOperand(a6));
|
| __ Daddu(a4, a4, Operand(1));
|
| __ Daddu(a5, a5, Operand(1));
|
| @@ -2294,11 +2274,9 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
|
| __ bind(&loop);
|
| __ Dsubu(a4, a4, Operand(1));
|
| __ Branch(&done_loop, lt, a4, Operand(zero_reg));
|
| - __ dsll(a5, a4, kPointerSizeLog2);
|
| - __ daddu(a5, a5, a2);
|
| + __ Dlsa(a5, a2, a4, kPointerSizeLog2);
|
| __ ld(at, MemOperand(a5));
|
| - __ dsll(a5, a0, kPointerSizeLog2);
|
| - __ daddu(a5, a5, sp);
|
| + __ Dlsa(a5, sp, a0, kPointerSizeLog2);
|
| __ sd(at, MemOperand(a5));
|
| __ Daddu(a0, a0, Operand(1));
|
| __ Branch(&loop);
|
| @@ -2377,8 +2355,7 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
|
| // Called Construct on an exotic Object with a [[Construct]] internal method.
|
| {
|
| // Overwrite the original receiver with the (original) target.
|
| - __ dsll(at, a0, kPointerSizeLog2);
|
| - __ daddu(at, sp, at);
|
| + __ Dlsa(at, sp, a0, kPointerSizeLog2);
|
| __ sd(a1, MemOperand(at));
|
| // Let the "call_as_constructor_delegate" take care of the rest.
|
| __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
|
|
|