Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(191)

Unified Diff: src/mips64/code-stubs-mips64.cc

Issue 1695633003: [runtime] Turn ArgumentAccessStub into FastNewSloppyArgumentsStub. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix MIPS dead code Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/mips/interface-descriptors-mips.cc ('k') | src/mips64/interface-descriptors-mips64.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/mips64/code-stubs-mips64.cc
diff --git a/src/mips64/code-stubs-mips64.cc b/src/mips64/code-stubs-mips64.cc
index 88e130f29277c086b59a417232d737bcb45f27a1..2de4fcc880bf9b0653095c4b55af61c6cfa0a76e 100644
--- a/src/mips64/code-stubs-mips64.cc
+++ b/src/mips64/code-stubs-mips64.cc
@@ -1587,242 +1587,6 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
}
-void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
- // a1 : function
- // a2 : number of parameters (tagged)
- // a3 : parameters pointer
-
- DCHECK(a1.is(ArgumentsAccessNewDescriptor::function()));
- DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count()));
- DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
-
- // Check if the calling frame is an arguments adaptor frame.
- Label runtime;
- __ ld(a4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ ld(a0, MemOperand(a4, StandardFrameConstants::kContextOffset));
- __ Branch(&runtime, ne, a0,
- Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
-
- // Patch the arguments.length and the parameters pointer in the current frame.
- __ ld(a2, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ SmiScale(a7, a2, kPointerSizeLog2);
- __ Daddu(a4, a4, Operand(a7));
- __ daddiu(a3, a4, StandardFrameConstants::kCallerSPOffset);
-
- __ bind(&runtime);
- __ Push(a1, a3, a2);
- __ TailCallRuntime(Runtime::kNewSloppyArguments);
-}
-
-
-void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
- // a1 : function
- // a2 : number of parameters (tagged)
- // a3 : parameters pointer
- // Registers used over whole function:
- // a5 : arguments count (tagged)
- // a6 : mapped parameter count (tagged)
-
- DCHECK(a1.is(ArgumentsAccessNewDescriptor::function()));
- DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count()));
- DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
-
- // Check if the calling frame is an arguments adaptor frame.
- Label adaptor_frame, try_allocate, runtime;
- __ ld(a4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ ld(a0, MemOperand(a4, StandardFrameConstants::kContextOffset));
- __ Branch(&adaptor_frame, eq, a0,
- Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
-
- // No adaptor, parameter count = argument count.
- __ mov(a5, a2);
- __ Branch(USE_DELAY_SLOT, &try_allocate);
- __ mov(a6, a2); // In delay slot.
-
- // We have an adaptor frame. Patch the parameters pointer.
- __ bind(&adaptor_frame);
- __ ld(a5, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ SmiScale(t2, a5, kPointerSizeLog2);
- __ Daddu(a4, a4, Operand(t2));
- __ Daddu(a3, a4, Operand(StandardFrameConstants::kCallerSPOffset));
-
- // a5 = argument count (tagged)
- // a6 = parameter count (tagged)
- // Compute the mapped parameter count = min(a6, a5) in a6.
- __ mov(a6, a2);
- __ Branch(&try_allocate, le, a6, Operand(a5));
- __ mov(a6, a5);
-
- __ bind(&try_allocate);
-
- // Compute the sizes of backing store, parameter map, and arguments object.
- // 1. Parameter map, has 2 extra words containing context and backing store.
- const int kParameterMapHeaderSize =
- FixedArray::kHeaderSize + 2 * kPointerSize;
- // If there are no mapped parameters, we do not need the parameter_map.
- Label param_map_size;
- DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
- __ Branch(USE_DELAY_SLOT, &param_map_size, eq, a6, Operand(zero_reg));
- __ mov(t1, zero_reg); // In delay slot: param map size = 0 when a6 == 0.
- __ SmiScale(t1, a6, kPointerSizeLog2);
- __ daddiu(t1, t1, kParameterMapHeaderSize);
- __ bind(&param_map_size);
-
- // 2. Backing store.
- __ SmiScale(t2, a5, kPointerSizeLog2);
- __ Daddu(t1, t1, Operand(t2));
- __ Daddu(t1, t1, Operand(FixedArray::kHeaderSize));
-
- // 3. Arguments object.
- __ Daddu(t1, t1, Operand(JSSloppyArgumentsObject::kSize));
-
- // Do the allocation of all three objects in one go.
- __ Allocate(t1, v0, t1, a4, &runtime, TAG_OBJECT);
-
- // v0 = address of new object(s) (tagged)
- // a2 = argument count (smi-tagged)
- // Get the arguments boilerplate from the current native context into a4.
- const int kNormalOffset =
- Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
- const int kAliasedOffset =
- Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
-
- __ ld(a4, NativeContextMemOperand());
- Label skip2_ne, skip2_eq;
- __ Branch(&skip2_ne, ne, a6, Operand(zero_reg));
- __ ld(a4, MemOperand(a4, kNormalOffset));
- __ bind(&skip2_ne);
-
- __ Branch(&skip2_eq, eq, a6, Operand(zero_reg));
- __ ld(a4, MemOperand(a4, kAliasedOffset));
- __ bind(&skip2_eq);
-
- // v0 = address of new object (tagged)
- // a2 = argument count (smi-tagged)
- // a4 = address of arguments map (tagged)
- // a6 = mapped parameter count (tagged)
- __ sd(a4, FieldMemOperand(v0, JSObject::kMapOffset));
- __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex);
- __ sd(t1, FieldMemOperand(v0, JSObject::kPropertiesOffset));
- __ sd(t1, FieldMemOperand(v0, JSObject::kElementsOffset));
-
- // Set up the callee in-object property.
- __ AssertNotSmi(a1);
- __ sd(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset));
-
- // Use the length (smi tagged) and set that as an in-object property too.
- __ AssertSmi(a5);
- __ sd(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
-
- // Set up the elements pointer in the allocated arguments object.
- // If we allocated a parameter map, a4 will point there, otherwise
- // it will point to the backing store.
- __ Daddu(a4, v0, Operand(JSSloppyArgumentsObject::kSize));
- __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
-
- // v0 = address of new object (tagged)
- // a2 = argument count (tagged)
- // a4 = address of parameter map or backing store (tagged)
- // a6 = mapped parameter count (tagged)
- // Initialize parameter map. If there are no mapped arguments, we're done.
- Label skip_parameter_map;
- Label skip3;
- __ Branch(&skip3, ne, a6, Operand(Smi::FromInt(0)));
- // Move backing store address to a1, because it is
- // expected there when filling in the unmapped arguments.
- __ mov(a1, a4);
- __ bind(&skip3);
-
- __ Branch(&skip_parameter_map, eq, a6, Operand(Smi::FromInt(0)));
-
- __ LoadRoot(a5, Heap::kSloppyArgumentsElementsMapRootIndex);
- __ sd(a5, FieldMemOperand(a4, FixedArray::kMapOffset));
- __ Daddu(a5, a6, Operand(Smi::FromInt(2)));
- __ sd(a5, FieldMemOperand(a4, FixedArray::kLengthOffset));
- __ sd(cp, FieldMemOperand(a4, FixedArray::kHeaderSize + 0 * kPointerSize));
- __ SmiScale(t2, a6, kPointerSizeLog2);
- __ Daddu(a5, a4, Operand(t2));
- __ Daddu(a5, a5, Operand(kParameterMapHeaderSize));
- __ sd(a5, FieldMemOperand(a4, FixedArray::kHeaderSize + 1 * kPointerSize));
-
- // Copy the parameter slots and the holes in the arguments.
- // We need to fill in mapped_parameter_count slots. They index the context,
- // where parameters are stored in reverse order, at
- // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
- // The mapped parameter thus need to get indices
- // MIN_CONTEXT_SLOTS+parameter_count-1 ..
- // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
- // We loop from right to left.
- Label parameters_loop, parameters_test;
- __ mov(a5, a6);
- __ Daddu(t1, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
- __ Dsubu(t1, t1, Operand(a6));
- __ LoadRoot(a7, Heap::kTheHoleValueRootIndex);
- __ SmiScale(t2, a5, kPointerSizeLog2);
- __ Daddu(a1, a4, Operand(t2));
- __ Daddu(a1, a1, Operand(kParameterMapHeaderSize));
-
- // a1 = address of backing store (tagged)
- // a4 = address of parameter map (tagged)
- // a0 = temporary scratch (a.o., for address calculation)
- // t1 = loop variable (tagged)
- // a7 = the hole value
- __ jmp(&parameters_test);
-
- __ bind(&parameters_loop);
- __ Dsubu(a5, a5, Operand(Smi::FromInt(1)));
- __ SmiScale(a0, a5, kPointerSizeLog2);
- __ Daddu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
- __ Daddu(t2, a4, a0);
- __ sd(t1, MemOperand(t2));
- __ Dsubu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
- __ Daddu(t2, a1, a0);
- __ sd(a7, MemOperand(t2));
- __ Daddu(t1, t1, Operand(Smi::FromInt(1)));
- __ bind(&parameters_test);
- __ Branch(&parameters_loop, ne, a5, Operand(Smi::FromInt(0)));
-
- // Restore t1 = argument count (tagged).
- __ ld(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
-
- __ bind(&skip_parameter_map);
- // v0 = address of new object (tagged)
- // a1 = address of backing store (tagged)
- // a5 = argument count (tagged)
- // a6 = mapped parameter count (tagged)
- // t1 = scratch
- // Copy arguments header and remaining slots (if there are any).
- __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
- __ sd(t1, FieldMemOperand(a1, FixedArray::kMapOffset));
- __ sd(a5, FieldMemOperand(a1, FixedArray::kLengthOffset));
-
- Label arguments_loop, arguments_test;
- __ SmiScale(t2, a6, kPointerSizeLog2);
- __ Dsubu(a3, a3, Operand(t2));
- __ jmp(&arguments_test);
-
- __ bind(&arguments_loop);
- __ Dsubu(a3, a3, Operand(kPointerSize));
- __ ld(a4, MemOperand(a3, 0));
- __ SmiScale(t2, a6, kPointerSizeLog2);
- __ Daddu(t1, a1, Operand(t2));
- __ sd(a4, FieldMemOperand(t1, FixedArray::kHeaderSize));
- __ Daddu(a6, a6, Operand(Smi::FromInt(1)));
-
- __ bind(&arguments_test);
- __ Branch(&arguments_loop, lt, a6, Operand(a5));
-
- // Return.
- __ Ret();
-
- // Do the runtime call to allocate the arguments object.
- // a5 = argument count (tagged)
- __ bind(&runtime);
- __ Push(a1, a3, a5);
- __ TailCallRuntime(Runtime::kNewSloppyArguments);
-}
-
-
void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
// Return address is in ra.
Label slow;
@@ -5158,6 +4922,226 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
}
+void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- a1 : function
+ // -- cp : context
+ // -- fp : frame pointer
+ // -- ra : return address
+ // -----------------------------------
+ __ AssertFunction(a1);
+
+ // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
+ __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
+ __ lw(a2,
+ FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ Lsa(a3, fp, a2, kPointerSizeLog2);
+ __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
+ __ SmiTag(a2);
+
+ // a1 : function
+ // a2 : number of parameters (tagged)
+ // a3 : parameters pointer
+ // Registers used over whole function:
+ // a5 : arguments count (tagged)
+ // a6 : mapped parameter count (tagged)
+
+ // Check if the calling frame is an arguments adaptor frame.
+ Label adaptor_frame, try_allocate, runtime;
+ __ ld(a4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+ __ ld(a0, MemOperand(a4, StandardFrameConstants::kContextOffset));
+ __ Branch(&adaptor_frame, eq, a0,
+ Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+
+ // No adaptor, parameter count = argument count.
+ __ mov(a5, a2);
+ __ Branch(USE_DELAY_SLOT, &try_allocate);
+ __ mov(a6, a2); // In delay slot.
+
+ // We have an adaptor frame. Patch the parameters pointer.
+ __ bind(&adaptor_frame);
+ __ ld(a5, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ SmiScale(t2, a5, kPointerSizeLog2);
+ __ Daddu(a4, a4, Operand(t2));
+ __ Daddu(a3, a4, Operand(StandardFrameConstants::kCallerSPOffset));
+
+ // a5 = argument count (tagged)
+ // a6 = parameter count (tagged)
+ // Compute the mapped parameter count = min(a6, a5) in a6.
+ __ mov(a6, a2);
+ __ Branch(&try_allocate, le, a6, Operand(a5));
+ __ mov(a6, a5);
+
+ __ bind(&try_allocate);
+
+ // Compute the sizes of backing store, parameter map, and arguments object.
+ // 1. Parameter map, has 2 extra words containing context and backing store.
+ const int kParameterMapHeaderSize =
+ FixedArray::kHeaderSize + 2 * kPointerSize;
+ // If there are no mapped parameters, we do not need the parameter_map.
+ Label param_map_size;
+ DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
+ __ Branch(USE_DELAY_SLOT, &param_map_size, eq, a6, Operand(zero_reg));
+ __ mov(t1, zero_reg); // In delay slot: param map size = 0 when a6 == 0.
+ __ SmiScale(t1, a6, kPointerSizeLog2);
+ __ daddiu(t1, t1, kParameterMapHeaderSize);
+ __ bind(&param_map_size);
+
+ // 2. Backing store.
+ __ SmiScale(t2, a5, kPointerSizeLog2);
+ __ Daddu(t1, t1, Operand(t2));
+ __ Daddu(t1, t1, Operand(FixedArray::kHeaderSize));
+
+ // 3. Arguments object.
+ __ Daddu(t1, t1, Operand(JSSloppyArgumentsObject::kSize));
+
+ // Do the allocation of all three objects in one go.
+ __ Allocate(t1, v0, t1, a4, &runtime, TAG_OBJECT);
+
+ // v0 = address of new object(s) (tagged)
+ // a2 = argument count (smi-tagged)
+ // Get the arguments boilerplate from the current native context into a4.
+ const int kNormalOffset =
+ Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
+ const int kAliasedOffset =
+ Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
+
+ __ ld(a4, NativeContextMemOperand());
+ Label skip2_ne, skip2_eq;
+ __ Branch(&skip2_ne, ne, a6, Operand(zero_reg));
+ __ ld(a4, MemOperand(a4, kNormalOffset));
+ __ bind(&skip2_ne);
+
+ __ Branch(&skip2_eq, eq, a6, Operand(zero_reg));
+ __ ld(a4, MemOperand(a4, kAliasedOffset));
+ __ bind(&skip2_eq);
+
+ // v0 = address of new object (tagged)
+ // a2 = argument count (smi-tagged)
+ // a4 = address of arguments map (tagged)
+ // a6 = mapped parameter count (tagged)
+ __ sd(a4, FieldMemOperand(v0, JSObject::kMapOffset));
+ __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex);
+ __ sd(t1, FieldMemOperand(v0, JSObject::kPropertiesOffset));
+ __ sd(t1, FieldMemOperand(v0, JSObject::kElementsOffset));
+
+ // Set up the callee in-object property.
+ __ AssertNotSmi(a1);
+ __ sd(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset));
+
+ // Use the length (smi tagged) and set that as an in-object property too.
+ __ AssertSmi(a5);
+ __ sd(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
+
+ // Set up the elements pointer in the allocated arguments object.
+ // If we allocated a parameter map, a4 will point there, otherwise
+ // it will point to the backing store.
+ __ Daddu(a4, v0, Operand(JSSloppyArgumentsObject::kSize));
+ __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
+
+ // v0 = address of new object (tagged)
+ // a2 = argument count (tagged)
+ // a4 = address of parameter map or backing store (tagged)
+ // a6 = mapped parameter count (tagged)
+ // Initialize parameter map. If there are no mapped arguments, we're done.
+ Label skip_parameter_map;
+ Label skip3;
+ __ Branch(&skip3, ne, a6, Operand(Smi::FromInt(0)));
+ // Move backing store address to a1, because it is
+ // expected there when filling in the unmapped arguments.
+ __ mov(a1, a4);
+ __ bind(&skip3);
+
+ __ Branch(&skip_parameter_map, eq, a6, Operand(Smi::FromInt(0)));
+
+ __ LoadRoot(a5, Heap::kSloppyArgumentsElementsMapRootIndex);
+ __ sd(a5, FieldMemOperand(a4, FixedArray::kMapOffset));
+ __ Daddu(a5, a6, Operand(Smi::FromInt(2)));
+ __ sd(a5, FieldMemOperand(a4, FixedArray::kLengthOffset));
+ __ sd(cp, FieldMemOperand(a4, FixedArray::kHeaderSize + 0 * kPointerSize));
+ __ SmiScale(t2, a6, kPointerSizeLog2);
+ __ Daddu(a5, a4, Operand(t2));
+ __ Daddu(a5, a5, Operand(kParameterMapHeaderSize));
+ __ sd(a5, FieldMemOperand(a4, FixedArray::kHeaderSize + 1 * kPointerSize));
+
+ // Copy the parameter slots and the holes in the arguments.
+ // We need to fill in mapped_parameter_count slots. They index the context,
+ // where parameters are stored in reverse order, at
+ // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
+ // The mapped parameter thus need to get indices
+ // MIN_CONTEXT_SLOTS+parameter_count-1 ..
+ // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
+ // We loop from right to left.
+ Label parameters_loop, parameters_test;
+ __ mov(a5, a6);
+ __ Daddu(t1, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
+ __ Dsubu(t1, t1, Operand(a6));
+ __ LoadRoot(a7, Heap::kTheHoleValueRootIndex);
+ __ SmiScale(t2, a5, kPointerSizeLog2);
+ __ Daddu(a1, a4, Operand(t2));
+ __ Daddu(a1, a1, Operand(kParameterMapHeaderSize));
+
+ // a1 = address of backing store (tagged)
+ // a4 = address of parameter map (tagged)
+ // a0 = temporary scratch (a.o., for address calculation)
+ // t1 = loop variable (tagged)
+ // a7 = the hole value
+ __ jmp(&parameters_test);
+
+ __ bind(&parameters_loop);
+ __ Dsubu(a5, a5, Operand(Smi::FromInt(1)));
+ __ SmiScale(a0, a5, kPointerSizeLog2);
+ __ Daddu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
+ __ Daddu(t2, a4, a0);
+ __ sd(t1, MemOperand(t2));
+ __ Dsubu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
+ __ Daddu(t2, a1, a0);
+ __ sd(a7, MemOperand(t2));
+ __ Daddu(t1, t1, Operand(Smi::FromInt(1)));
+ __ bind(&parameters_test);
+ __ Branch(&parameters_loop, ne, a5, Operand(Smi::FromInt(0)));
+
+ // Restore t1 = argument count (tagged).
+ __ ld(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
+
+ __ bind(&skip_parameter_map);
+ // v0 = address of new object (tagged)
+ // a1 = address of backing store (tagged)
+ // a5 = argument count (tagged)
+ // a6 = mapped parameter count (tagged)
+ // t1 = scratch
+ // Copy arguments header and remaining slots (if there are any).
+ __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
+ __ sd(t1, FieldMemOperand(a1, FixedArray::kMapOffset));
+ __ sd(a5, FieldMemOperand(a1, FixedArray::kLengthOffset));
+
+ Label arguments_loop, arguments_test;
+ __ SmiScale(t2, a6, kPointerSizeLog2);
+ __ Dsubu(a3, a3, Operand(t2));
+ __ jmp(&arguments_test);
+
+ __ bind(&arguments_loop);
+ __ Dsubu(a3, a3, Operand(kPointerSize));
+ __ ld(a4, MemOperand(a3, 0));
+ __ SmiScale(t2, a6, kPointerSizeLog2);
+ __ Daddu(t1, a1, Operand(t2));
+ __ sd(a4, FieldMemOperand(t1, FixedArray::kHeaderSize));
+ __ Daddu(a6, a6, Operand(Smi::FromInt(1)));
+
+ __ bind(&arguments_test);
+ __ Branch(&arguments_loop, lt, a6, Operand(a5));
+
+ // Return.
+ __ Ret();
+
+ // Do the runtime call to allocate the arguments object.
+ // a5 = argument count (tagged)
+ __ bind(&runtime);
+ __ Push(a1, a3, a5);
+ __ TailCallRuntime(Runtime::kNewSloppyArguments);
+}
+
+
void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a1 : function
« no previous file with comments | « src/mips/interface-descriptors-mips.cc ('k') | src/mips64/interface-descriptors-mips64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698