| Index: src/mips/code-stubs-mips.cc
|
| diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
|
| index 0c3a7d6fde22f63bd254e4e7ea6577f97b84b002..cc1ff0f4824c3714554f8a90adf87ec0671c0149 100644
|
| --- a/src/mips/code-stubs-mips.cc
|
| +++ b/src/mips/code-stubs-mips.cc
|
| @@ -1591,236 +1591,6 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| -void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
|
| - // a1 : function
|
| - // a2 : number of parameters (tagged)
|
| - // a3 : parameters pointer
|
| -
|
| - DCHECK(a1.is(ArgumentsAccessNewDescriptor::function()));
|
| - DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count()));
|
| - DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
|
| -
|
| - // Check if the calling frame is an arguments adaptor frame.
|
| - Label runtime;
|
| - __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
| - __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset));
|
| - __ Branch(&runtime, ne, a0,
|
| - Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
| -
|
| - // Patch the arguments.length and the parameters pointer in the current frame.
|
| - __ lw(a2, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset));
|
| - __ Lsa(t0, t0, a2, 1);
|
| - __ addiu(a3, t0, StandardFrameConstants::kCallerSPOffset);
|
| -
|
| - __ bind(&runtime);
|
| - __ Push(a1, a3, a2);
|
| - __ TailCallRuntime(Runtime::kNewSloppyArguments);
|
| -}
|
| -
|
| -
|
| -void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
|
| - // a1 : function
|
| - // a2 : number of parameters (tagged)
|
| - // a3 : parameters pointer
|
| - // Registers used over whole function:
|
| - // t1 : arguments count (tagged)
|
| - // t2 : mapped parameter count (tagged)
|
| -
|
| - DCHECK(a1.is(ArgumentsAccessNewDescriptor::function()));
|
| - DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count()));
|
| - DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
|
| -
|
| - // Check if the calling frame is an arguments adaptor frame.
|
| - Label adaptor_frame, try_allocate, runtime;
|
| - __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
| - __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset));
|
| - __ Branch(&adaptor_frame, eq, a0,
|
| - Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
| -
|
| - // No adaptor, parameter count = argument count.
|
| - __ mov(t1, a2);
|
| - __ Branch(USE_DELAY_SLOT, &try_allocate);
|
| - __ mov(t2, a2); // In delay slot.
|
| -
|
| - // We have an adaptor frame. Patch the parameters pointer.
|
| - __ bind(&adaptor_frame);
|
| - __ lw(t1, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset));
|
| - __ Lsa(t0, t0, t1, 1);
|
| - __ Addu(a3, t0, Operand(StandardFrameConstants::kCallerSPOffset));
|
| -
|
| - // t1 = argument count (tagged)
|
| - // t2 = parameter count (tagged)
|
| - // Compute the mapped parameter count = min(t2, t1) in t2.
|
| - __ mov(t2, a2);
|
| - __ Branch(&try_allocate, le, t2, Operand(t1));
|
| - __ mov(t2, t1);
|
| -
|
| - __ bind(&try_allocate);
|
| -
|
| - // Compute the sizes of backing store, parameter map, and arguments object.
|
| - // 1. Parameter map, has 2 extra words containing context and backing store.
|
| - const int kParameterMapHeaderSize =
|
| - FixedArray::kHeaderSize + 2 * kPointerSize;
|
| - // If there are no mapped parameters, we do not need the parameter_map.
|
| - Label param_map_size;
|
| - DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
|
| - __ Branch(USE_DELAY_SLOT, ¶m_map_size, eq, t2, Operand(zero_reg));
|
| - __ mov(t5, zero_reg); // In delay slot: param map size = 0 when t2 == 0.
|
| - __ sll(t5, t2, 1);
|
| - __ addiu(t5, t5, kParameterMapHeaderSize);
|
| - __ bind(¶m_map_size);
|
| -
|
| - // 2. Backing store.
|
| - __ Lsa(t5, t5, t1, 1);
|
| - __ Addu(t5, t5, Operand(FixedArray::kHeaderSize));
|
| -
|
| - // 3. Arguments object.
|
| - __ Addu(t5, t5, Operand(JSSloppyArgumentsObject::kSize));
|
| -
|
| - // Do the allocation of all three objects in one go.
|
| - __ Allocate(t5, v0, t5, t0, &runtime, TAG_OBJECT);
|
| -
|
| - // v0 = address of new object(s) (tagged)
|
| - // a2 = argument count (smi-tagged)
|
| - // Get the arguments boilerplate from the current native context into t0.
|
| - const int kNormalOffset =
|
| - Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
|
| - const int kAliasedOffset =
|
| - Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
|
| -
|
| - __ lw(t0, NativeContextMemOperand());
|
| - Label skip2_ne, skip2_eq;
|
| - __ Branch(&skip2_ne, ne, t2, Operand(zero_reg));
|
| - __ lw(t0, MemOperand(t0, kNormalOffset));
|
| - __ bind(&skip2_ne);
|
| -
|
| - __ Branch(&skip2_eq, eq, t2, Operand(zero_reg));
|
| - __ lw(t0, MemOperand(t0, kAliasedOffset));
|
| - __ bind(&skip2_eq);
|
| -
|
| - // v0 = address of new object (tagged)
|
| - // a2 = argument count (smi-tagged)
|
| - // t0 = address of arguments map (tagged)
|
| - // t2 = mapped parameter count (tagged)
|
| - __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset));
|
| - __ LoadRoot(t5, Heap::kEmptyFixedArrayRootIndex);
|
| - __ sw(t5, FieldMemOperand(v0, JSObject::kPropertiesOffset));
|
| - __ sw(t5, FieldMemOperand(v0, JSObject::kElementsOffset));
|
| -
|
| - // Set up the callee in-object property.
|
| - __ AssertNotSmi(a1);
|
| - __ sw(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset));
|
| -
|
| - // Use the length (smi tagged) and set that as an in-object property too.
|
| - __ AssertSmi(t1);
|
| - __ sw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
|
| -
|
| - // Set up the elements pointer in the allocated arguments object.
|
| - // If we allocated a parameter map, t0 will point there, otherwise
|
| - // it will point to the backing store.
|
| - __ Addu(t0, v0, Operand(JSSloppyArgumentsObject::kSize));
|
| - __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
|
| -
|
| - // v0 = address of new object (tagged)
|
| - // a2 = argument count (tagged)
|
| - // t0 = address of parameter map or backing store (tagged)
|
| - // t2 = mapped parameter count (tagged)
|
| - // Initialize parameter map. If there are no mapped arguments, we're done.
|
| - Label skip_parameter_map;
|
| - Label skip3;
|
| - __ Branch(&skip3, ne, t2, Operand(Smi::FromInt(0)));
|
| - // Move backing store address to a1, because it is
|
| - // expected there when filling in the unmapped arguments.
|
| - __ mov(a1, t0);
|
| - __ bind(&skip3);
|
| -
|
| - __ Branch(&skip_parameter_map, eq, t2, Operand(Smi::FromInt(0)));
|
| -
|
| - __ LoadRoot(t1, Heap::kSloppyArgumentsElementsMapRootIndex);
|
| - __ sw(t1, FieldMemOperand(t0, FixedArray::kMapOffset));
|
| - __ Addu(t1, t2, Operand(Smi::FromInt(2)));
|
| - __ sw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset));
|
| - __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize));
|
| - __ Lsa(t1, t0, t2, 1);
|
| - __ Addu(t1, t1, Operand(kParameterMapHeaderSize));
|
| - __ sw(t1, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize));
|
| -
|
| - // Copy the parameter slots and the holes in the arguments.
|
| - // We need to fill in mapped_parameter_count slots. They index the context,
|
| - // where parameters are stored in reverse order, at
|
| - // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
|
| - // The mapped parameter thus need to get indices
|
| - // MIN_CONTEXT_SLOTS+parameter_count-1 ..
|
| - // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
|
| - // We loop from right to left.
|
| - Label parameters_loop, parameters_test;
|
| - __ mov(t1, t2);
|
| - __ Addu(t5, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
|
| - __ Subu(t5, t5, Operand(t2));
|
| - __ LoadRoot(t3, Heap::kTheHoleValueRootIndex);
|
| - __ Lsa(a1, t0, t1, 1);
|
| - __ Addu(a1, a1, Operand(kParameterMapHeaderSize));
|
| -
|
| - // a1 = address of backing store (tagged)
|
| - // t0 = address of parameter map (tagged)
|
| - // a0 = temporary scratch (a.o., for address calculation)
|
| - // t1 = loop variable (tagged)
|
| - // t3 = the hole value
|
| - __ jmp(¶meters_test);
|
| -
|
| - __ bind(¶meters_loop);
|
| - __ Subu(t1, t1, Operand(Smi::FromInt(1)));
|
| - __ sll(a0, t1, 1);
|
| - __ Addu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
|
| - __ Addu(t6, t0, a0);
|
| - __ sw(t5, MemOperand(t6));
|
| - __ Subu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
|
| - __ Addu(t6, a1, a0);
|
| - __ sw(t3, MemOperand(t6));
|
| - __ Addu(t5, t5, Operand(Smi::FromInt(1)));
|
| - __ bind(¶meters_test);
|
| - __ Branch(¶meters_loop, ne, t1, Operand(Smi::FromInt(0)));
|
| -
|
| - // t1 = argument count (tagged).
|
| - __ lw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
|
| -
|
| - __ bind(&skip_parameter_map);
|
| - // v0 = address of new object (tagged)
|
| - // a1 = address of backing store (tagged)
|
| - // t1 = argument count (tagged)
|
| - // t2 = mapped parameter count (tagged)
|
| - // t5 = scratch
|
| - // Copy arguments header and remaining slots (if there are any).
|
| - __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex);
|
| - __ sw(t5, FieldMemOperand(a1, FixedArray::kMapOffset));
|
| - __ sw(t1, FieldMemOperand(a1, FixedArray::kLengthOffset));
|
| -
|
| - Label arguments_loop, arguments_test;
|
| - __ sll(t6, t2, 1);
|
| - __ Subu(a3, a3, Operand(t6));
|
| - __ jmp(&arguments_test);
|
| -
|
| - __ bind(&arguments_loop);
|
| - __ Subu(a3, a3, Operand(kPointerSize));
|
| - __ lw(t0, MemOperand(a3, 0));
|
| - __ Lsa(t5, a1, t2, 1);
|
| - __ sw(t0, FieldMemOperand(t5, FixedArray::kHeaderSize));
|
| - __ Addu(t2, t2, Operand(Smi::FromInt(1)));
|
| -
|
| - __ bind(&arguments_test);
|
| - __ Branch(&arguments_loop, lt, t2, Operand(t1));
|
| -
|
| - // Return.
|
| - __ Ret();
|
| -
|
| - // Do the runtime call to allocate the arguments object.
|
| - // t1 = argument count (tagged)
|
| - __ bind(&runtime);
|
| - __ Push(a1, a3, t1);
|
| - __ TailCallRuntime(Runtime::kNewSloppyArguments);
|
| -}
|
| -
|
| -
|
| void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
|
| // Return address is in ra.
|
| Label slow;
|
| @@ -5141,6 +4911,220 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| +void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
|
| + // ----------- S t a t e -------------
|
| + // -- a1 : function
|
| + // -- cp : context
|
| + // -- fp : frame pointer
|
| + // -- ra : return address
|
| + // -----------------------------------
|
| + __ AssertFunction(a1);
|
| +
|
| + // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
|
| + __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
|
| + __ lw(a2,
|
| + FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
|
| + __ Lsa(a3, fp, a2, kPointerSizeLog2 - 1);
|
| + __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
|
| +
|
| + // a1 : function
|
| + // a2 : number of parameters (tagged)
|
| + // a3 : parameters pointer
|
| + // Registers used over whole function:
|
| + // t1 : arguments count (tagged)
|
| + // t2 : mapped parameter count (tagged)
|
| +
|
| + // Check if the calling frame is an arguments adaptor frame.
|
| + Label adaptor_frame, try_allocate, runtime;
|
| + __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
| + __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset));
|
| + __ Branch(&adaptor_frame, eq, a0,
|
| + Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
| +
|
| + // No adaptor, parameter count = argument count.
|
| + __ mov(t1, a2);
|
| + __ Branch(USE_DELAY_SLOT, &try_allocate);
|
| + __ mov(t2, a2); // In delay slot.
|
| +
|
| + // We have an adaptor frame. Patch the parameters pointer.
|
| + __ bind(&adaptor_frame);
|
| + __ lw(t1, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset));
|
| + __ Lsa(t0, t0, t1, 1);
|
| + __ Addu(a3, t0, Operand(StandardFrameConstants::kCallerSPOffset));
|
| +
|
| + // t1 = argument count (tagged)
|
| + // t2 = parameter count (tagged)
|
| + // Compute the mapped parameter count = min(t2, t1) in t2.
|
| + __ mov(t2, a2);
|
| + __ Branch(&try_allocate, le, t2, Operand(t1));
|
| + __ mov(t2, t1);
|
| +
|
| + __ bind(&try_allocate);
|
| +
|
| + // Compute the sizes of backing store, parameter map, and arguments object.
|
| + // 1. Parameter map, has 2 extra words containing context and backing store.
|
| + const int kParameterMapHeaderSize =
|
| + FixedArray::kHeaderSize + 2 * kPointerSize;
|
| + // If there are no mapped parameters, we do not need the parameter_map.
|
| + Label param_map_size;
|
| + DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
|
| + __ Branch(USE_DELAY_SLOT, ¶m_map_size, eq, t2, Operand(zero_reg));
|
| + __ mov(t5, zero_reg); // In delay slot: param map size = 0 when t2 == 0.
|
| + __ sll(t5, t2, 1);
|
| + __ addiu(t5, t5, kParameterMapHeaderSize);
|
| + __ bind(¶m_map_size);
|
| +
|
| + // 2. Backing store.
|
| + __ Lsa(t5, t5, t1, 1);
|
| + __ Addu(t5, t5, Operand(FixedArray::kHeaderSize));
|
| +
|
| + // 3. Arguments object.
|
| + __ Addu(t5, t5, Operand(JSSloppyArgumentsObject::kSize));
|
| +
|
| + // Do the allocation of all three objects in one go.
|
| + __ Allocate(t5, v0, t5, t0, &runtime, TAG_OBJECT);
|
| +
|
| + // v0 = address of new object(s) (tagged)
|
| + // a2 = argument count (smi-tagged)
|
| + // Get the arguments boilerplate from the current native context into t0.
|
| + const int kNormalOffset =
|
| + Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
|
| + const int kAliasedOffset =
|
| + Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
|
| +
|
| + __ lw(t0, NativeContextMemOperand());
|
| + Label skip2_ne, skip2_eq;
|
| + __ Branch(&skip2_ne, ne, t2, Operand(zero_reg));
|
| + __ lw(t0, MemOperand(t0, kNormalOffset));
|
| + __ bind(&skip2_ne);
|
| +
|
| + __ Branch(&skip2_eq, eq, t2, Operand(zero_reg));
|
| + __ lw(t0, MemOperand(t0, kAliasedOffset));
|
| + __ bind(&skip2_eq);
|
| +
|
| + // v0 = address of new object (tagged)
|
| + // a2 = argument count (smi-tagged)
|
| + // t0 = address of arguments map (tagged)
|
| + // t2 = mapped parameter count (tagged)
|
| + __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset));
|
| + __ LoadRoot(t5, Heap::kEmptyFixedArrayRootIndex);
|
| + __ sw(t5, FieldMemOperand(v0, JSObject::kPropertiesOffset));
|
| + __ sw(t5, FieldMemOperand(v0, JSObject::kElementsOffset));
|
| +
|
| + // Set up the callee in-object property.
|
| + __ AssertNotSmi(a1);
|
| + __ sw(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset));
|
| +
|
| + // Use the length (smi tagged) and set that as an in-object property too.
|
| + __ AssertSmi(t1);
|
| + __ sw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
|
| +
|
| + // Set up the elements pointer in the allocated arguments object.
|
| + // If we allocated a parameter map, t0 will point there, otherwise
|
| + // it will point to the backing store.
|
| + __ Addu(t0, v0, Operand(JSSloppyArgumentsObject::kSize));
|
| + __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
|
| +
|
| + // v0 = address of new object (tagged)
|
| + // a2 = argument count (tagged)
|
| + // t0 = address of parameter map or backing store (tagged)
|
| + // t2 = mapped parameter count (tagged)
|
| + // Initialize parameter map. If there are no mapped arguments, we're done.
|
| + Label skip_parameter_map;
|
| + Label skip3;
|
| + __ Branch(&skip3, ne, t2, Operand(Smi::FromInt(0)));
|
| + // Move backing store address to a1, because it is
|
| + // expected there when filling in the unmapped arguments.
|
| + __ mov(a1, t0);
|
| + __ bind(&skip3);
|
| +
|
| + __ Branch(&skip_parameter_map, eq, t2, Operand(Smi::FromInt(0)));
|
| +
|
| + __ LoadRoot(t1, Heap::kSloppyArgumentsElementsMapRootIndex);
|
| + __ sw(t1, FieldMemOperand(t0, FixedArray::kMapOffset));
|
| + __ Addu(t1, t2, Operand(Smi::FromInt(2)));
|
| + __ sw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset));
|
| + __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize));
|
| + __ Lsa(t1, t0, t2, 1);
|
| + __ Addu(t1, t1, Operand(kParameterMapHeaderSize));
|
| + __ sw(t1, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize));
|
| +
|
| + // Copy the parameter slots and the holes in the arguments.
|
| + // We need to fill in mapped_parameter_count slots. They index the context,
|
| + // where parameters are stored in reverse order, at
|
| + // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
|
| + // The mapped parameter thus need to get indices
|
| + // MIN_CONTEXT_SLOTS+parameter_count-1 ..
|
| + // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
|
| + // We loop from right to left.
|
| + Label parameters_loop, parameters_test;
|
| + __ mov(t1, t2);
|
| + __ Addu(t5, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
|
| + __ Subu(t5, t5, Operand(t2));
|
| + __ LoadRoot(t3, Heap::kTheHoleValueRootIndex);
|
| + __ Lsa(a1, t0, t1, 1);
|
| + __ Addu(a1, a1, Operand(kParameterMapHeaderSize));
|
| +
|
| + // a1 = address of backing store (tagged)
|
| + // t0 = address of parameter map (tagged)
|
| + // a0 = temporary scratch (a.o., for address calculation)
|
| + // t1 = loop variable (tagged)
|
| + // t3 = the hole value
|
| + __ jmp(¶meters_test);
|
| +
|
| + __ bind(¶meters_loop);
|
| + __ Subu(t1, t1, Operand(Smi::FromInt(1)));
|
| + __ sll(a0, t1, 1);
|
| + __ Addu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
|
| + __ Addu(t6, t0, a0);
|
| + __ sw(t5, MemOperand(t6));
|
| + __ Subu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
|
| + __ Addu(t6, a1, a0);
|
| + __ sw(t3, MemOperand(t6));
|
| + __ Addu(t5, t5, Operand(Smi::FromInt(1)));
|
| + __ bind(¶meters_test);
|
| + __ Branch(¶meters_loop, ne, t1, Operand(Smi::FromInt(0)));
|
| +
|
| + // t1 = argument count (tagged).
|
| + __ lw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
|
| +
|
| + __ bind(&skip_parameter_map);
|
| + // v0 = address of new object (tagged)
|
| + // a1 = address of backing store (tagged)
|
| + // t1 = argument count (tagged)
|
| + // t2 = mapped parameter count (tagged)
|
| + // t5 = scratch
|
| + // Copy arguments header and remaining slots (if there are any).
|
| + __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex);
|
| + __ sw(t5, FieldMemOperand(a1, FixedArray::kMapOffset));
|
| + __ sw(t1, FieldMemOperand(a1, FixedArray::kLengthOffset));
|
| +
|
| + Label arguments_loop, arguments_test;
|
| + __ sll(t6, t2, 1);
|
| + __ Subu(a3, a3, Operand(t6));
|
| + __ jmp(&arguments_test);
|
| +
|
| + __ bind(&arguments_loop);
|
| + __ Subu(a3, a3, Operand(kPointerSize));
|
| + __ lw(t0, MemOperand(a3, 0));
|
| + __ Lsa(t5, a1, t2, 1);
|
| + __ sw(t0, FieldMemOperand(t5, FixedArray::kHeaderSize));
|
| + __ Addu(t2, t2, Operand(Smi::FromInt(1)));
|
| +
|
| + __ bind(&arguments_test);
|
| + __ Branch(&arguments_loop, lt, t2, Operand(t1));
|
| +
|
| + // Return.
|
| + __ Ret();
|
| +
|
| + // Do the runtime call to allocate the arguments object.
|
| + // t1 = argument count (tagged)
|
| + __ bind(&runtime);
|
| + __ Push(a1, a3, t1);
|
| + __ TailCallRuntime(Runtime::kNewSloppyArguments);
|
| +}
|
| +
|
| +
|
| void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| // -- a1 : function
|
|
|